Error out for Cilk_spawn or array expression in forbidden places
[official-gcc.git] / gcc / lra.c
blob3ae47e86ba641fe68245ded4b5dbb5fdcf56787e
1 /* LRA (local register allocator) driver and LRA utilities.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 /* The Local Register Allocator (LRA) is a replacement of former
23 reload pass. It is focused to simplify code solving the reload
24 pass tasks, to make the code maintenance easier, and to implement new
25 perspective optimizations.
27 The major LRA design solutions are:
28 o division small manageable, separated sub-tasks
29 o reflection of all transformations and decisions in RTL as more
30 as possible
31 o insn constraints as a primary source of the info (minimizing
32 number of target-depended macros/hooks)
34 In brief LRA works by iterative insn process with the final goal is
35 to satisfy all insn and address constraints:
36 o New reload insns (in brief reloads) and reload pseudos might be
37 generated;
38 o Some pseudos might be spilled to assign hard registers to
39 new reload pseudos;
40 o Changing spilled pseudos to stack memory or their equivalences;
41 o Allocation stack memory changes the address displacement and
42 new iteration is needed.
44 Here is block diagram of LRA passes:
46 ------------------------
47 --------------- | Undo inheritance for | ---------------
48 | Memory-memory | | spilled pseudos, | | New (and old) |
49 | move coalesce |<---| splits for pseudos got |<-- | pseudos |
50 --------------- | the same hard regs, | | assignment |
51 Start | | and optional reloads | ---------------
52 | | ------------------------ ^
53 V | ---------------- |
54 ----------- V | Update virtual | |
55 | Remove |----> ------------>| register | |
56 | scratches | ^ | displacements | |
57 ----------- | ---------------- |
58 | | |
59 | V New |
60 ---------------- No ------------ pseudos -------------------
61 | Spilled pseudo | change |Constraints:| or insns | Inheritance/split |
62 | to memory |<-------| RTL |--------->| transformations |
63 | substitution | | transfor- | | in EBB scope |
64 ---------------- | mations | -------------------
65 | ------------
67 -------------------------
68 | Hard regs substitution, |
69 | devirtalization, and |------> Finish
70 | restoring scratches got |
71 | memory |
72 -------------------------
74 To speed up the process:
75 o We process only insns affected by changes on previous
76 iterations;
77 o We don't use DFA-infrastructure because it results in much slower
78 compiler speed than a special IR described below does;
79 o We use a special insn representation for quick access to insn
80 info which is always *synchronized* with the current RTL;
81 o Insn IR is minimized by memory. It is divided on three parts:
82 o one specific for each insn in RTL (only operand locations);
83 o one common for all insns in RTL with the same insn code
84 (different operand attributes from machine descriptions);
85 o one oriented for maintenance of live info (list of pseudos).
86 o Pseudo data:
87 o all insns where the pseudo is referenced;
88 o live info (conflicting hard regs, live ranges, # of
89 references etc);
90 o data used for assigning (preferred hard regs, costs etc).
92 This file contains LRA driver, LRA utility functions and data, and
93 code for dealing with scratches. */
95 #include "config.h"
96 #include "system.h"
97 #include "coretypes.h"
98 #include "tm.h"
99 #include "hard-reg-set.h"
100 #include "rtl.h"
101 #include "tm_p.h"
102 #include "regs.h"
103 #include "insn-config.h"
104 #include "insn-codes.h"
105 #include "recog.h"
106 #include "output.h"
107 #include "addresses.h"
108 #include "flags.h"
109 #include "hashtab.h"
110 #include "hash-set.h"
111 #include "vec.h"
112 #include "machmode.h"
113 #include "input.h"
114 #include "function.h"
115 #include "tree-core.h"
116 #include "optabs.h"
117 #include "expr.h"
118 #include "predict.h"
119 #include "dominance.h"
120 #include "cfg.h"
121 #include "cfgrtl.h"
122 #include "cfgbuild.h"
123 #include "basic-block.h"
124 #include "except.h"
125 #include "tree-pass.h"
126 #include "timevar.h"
127 #include "target.h"
128 #include "ira.h"
129 #include "lra-int.h"
130 #include "df.h"
132 /* Dump bitmap SET with TITLE and BB INDEX. */
133 void
134 lra_dump_bitmap_with_title (const char *title, bitmap set, int index)
136 unsigned int i;
137 int count;
138 bitmap_iterator bi;
139 static const int max_nums_on_line = 10;
141 if (bitmap_empty_p (set))
142 return;
143 fprintf (lra_dump_file, " %s %d:", title, index);
144 fprintf (lra_dump_file, "\n");
145 count = max_nums_on_line + 1;
146 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
148 if (count > max_nums_on_line)
150 fprintf (lra_dump_file, "\n ");
151 count = 0;
153 fprintf (lra_dump_file, " %4u", i);
154 count++;
156 fprintf (lra_dump_file, "\n");
159 /* Hard registers currently not available for allocation. It can
160 changed after some hard registers become not eliminable. */
161 HARD_REG_SET lra_no_alloc_regs;
163 static int get_new_reg_value (void);
164 static void expand_reg_info (void);
165 static void invalidate_insn_recog_data (int);
166 static int get_insn_freq (rtx_insn *);
167 static void invalidate_insn_data_regno_info (lra_insn_recog_data_t,
168 rtx_insn *, int);
170 /* Expand all regno related info needed for LRA. */
171 static void
172 expand_reg_data (int old)
174 resize_reg_info ();
175 expand_reg_info ();
176 ira_expand_reg_equiv ();
177 for (int i = (int) max_reg_num () - 1; i >= old; i--)
178 lra_change_class (i, ALL_REGS, " Set", true);
181 /* Create and return a new reg of ORIGINAL mode. If ORIGINAL is NULL
182 or of VOIDmode, use MD_MODE for the new reg. Initialize its
183 register class to RCLASS. Print message about assigning class
184 RCLASS containing new register name TITLE unless it is NULL. Use
185 attributes of ORIGINAL if it is a register. The created register
186 will have unique held value. */
188 lra_create_new_reg_with_unique_value (machine_mode md_mode, rtx original,
189 enum reg_class rclass, const char *title)
191 machine_mode mode;
192 rtx new_reg;
194 if (original == NULL_RTX || (mode = GET_MODE (original)) == VOIDmode)
195 mode = md_mode;
196 lra_assert (mode != VOIDmode);
197 new_reg = gen_reg_rtx (mode);
198 if (original == NULL_RTX || ! REG_P (original))
200 if (lra_dump_file != NULL)
201 fprintf (lra_dump_file, " Creating newreg=%i", REGNO (new_reg));
203 else
205 if (ORIGINAL_REGNO (original) >= FIRST_PSEUDO_REGISTER)
206 ORIGINAL_REGNO (new_reg) = ORIGINAL_REGNO (original);
207 REG_USERVAR_P (new_reg) = REG_USERVAR_P (original);
208 REG_POINTER (new_reg) = REG_POINTER (original);
209 REG_ATTRS (new_reg) = REG_ATTRS (original);
210 if (lra_dump_file != NULL)
211 fprintf (lra_dump_file, " Creating newreg=%i from oldreg=%i",
212 REGNO (new_reg), REGNO (original));
214 if (lra_dump_file != NULL)
216 if (title != NULL)
217 fprintf (lra_dump_file, ", assigning class %s to%s%s r%d",
218 reg_class_names[rclass], *title == '\0' ? "" : " ",
219 title, REGNO (new_reg));
220 fprintf (lra_dump_file, "\n");
222 expand_reg_data (max_reg_num ());
223 setup_reg_classes (REGNO (new_reg), rclass, NO_REGS, rclass);
224 return new_reg;
227 /* Analogous to the previous function but also inherits value of
228 ORIGINAL. */
230 lra_create_new_reg (machine_mode md_mode, rtx original,
231 enum reg_class rclass, const char *title)
233 rtx new_reg;
235 new_reg
236 = lra_create_new_reg_with_unique_value (md_mode, original, rclass, title);
237 if (original != NULL_RTX && REG_P (original))
238 lra_assign_reg_val (REGNO (original), REGNO (new_reg));
239 return new_reg;
242 /* Set up for REGNO unique hold value. */
243 void
244 lra_set_regno_unique_value (int regno)
246 lra_reg_info[regno].val = get_new_reg_value ();
249 /* Invalidate INSN related info used by LRA. The info should never be
250 used after that. */
251 void
252 lra_invalidate_insn_data (rtx_insn *insn)
254 lra_invalidate_insn_regno_info (insn);
255 invalidate_insn_recog_data (INSN_UID (insn));
258 /* Mark INSN deleted and invalidate the insn related info used by
259 LRA. */
260 void
261 lra_set_insn_deleted (rtx_insn *insn)
263 lra_invalidate_insn_data (insn);
264 SET_INSN_DELETED (insn);
267 /* Delete an unneeded INSN and any previous insns who sole purpose is
268 loading data that is dead in INSN. */
269 void
270 lra_delete_dead_insn (rtx_insn *insn)
272 rtx_insn *prev = prev_real_insn (insn);
273 rtx prev_dest;
275 /* If the previous insn sets a register that dies in our insn,
276 delete it too. */
277 if (prev && GET_CODE (PATTERN (prev)) == SET
278 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
279 && reg_mentioned_p (prev_dest, PATTERN (insn))
280 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
281 && ! side_effects_p (SET_SRC (PATTERN (prev))))
282 lra_delete_dead_insn (prev);
284 lra_set_insn_deleted (insn);
287 /* Emit insn x = y + z. Return NULL if we failed to do it.
288 Otherwise, return the insn. We don't use gen_add3_insn as it might
289 clobber CC. */
290 static rtx
291 emit_add3_insn (rtx x, rtx y, rtx z)
293 rtx_insn *last;
295 last = get_last_insn ();
297 if (have_addptr3_insn (x, y, z))
299 rtx insn = gen_addptr3_insn (x, y, z);
301 /* If the target provides an "addptr" pattern it hopefully does
302 for a reason. So falling back to the normal add would be
303 a bug. */
304 lra_assert (insn != NULL_RTX);
305 emit_insn (insn);
306 return insn;
309 rtx_insn *insn = emit_insn (gen_rtx_SET (VOIDmode, x,
310 gen_rtx_PLUS (GET_MODE (y), y, z)));
311 if (recog_memoized (insn) < 0)
313 delete_insns_since (last);
314 insn = NULL;
316 return insn;
319 /* Emit insn x = x + y. Return the insn. We use gen_add2_insn as the
320 last resort. */
321 static rtx
322 emit_add2_insn (rtx x, rtx y)
324 rtx insn;
326 insn = emit_add3_insn (x, x, y);
327 if (insn == NULL_RTX)
329 insn = gen_add2_insn (x, y);
330 if (insn != NULL_RTX)
331 emit_insn (insn);
333 return insn;
336 /* Target checks operands through operand predicates to recognize an
337 insn. We should have a special precaution to generate add insns
338 which are frequent results of elimination.
340 Emit insns for x = y + z. X can be used to store intermediate
341 values and should be not in Y and Z when we use X to store an
342 intermediate value. Y + Z should form [base] [+ index[ * scale]] [
343 + disp] where base and index are registers, disp and scale are
344 constants. Y should contain base if it is present, Z should
345 contain disp if any. index[*scale] can be part of Y or Z. */
346 void
347 lra_emit_add (rtx x, rtx y, rtx z)
349 int old;
350 rtx_insn *last;
351 rtx a1, a2, base, index, disp, scale, index_scale;
352 bool ok_p;
354 rtx add3_insn = emit_add3_insn (x, y, z);
355 old = max_reg_num ();
356 if (add3_insn != NULL)
358 else
360 disp = a2 = NULL_RTX;
361 if (GET_CODE (y) == PLUS)
363 a1 = XEXP (y, 0);
364 a2 = XEXP (y, 1);
365 disp = z;
367 else
369 a1 = y;
370 if (CONSTANT_P (z))
371 disp = z;
372 else
373 a2 = z;
375 index_scale = scale = NULL_RTX;
376 if (GET_CODE (a1) == MULT)
378 index_scale = a1;
379 index = XEXP (a1, 0);
380 scale = XEXP (a1, 1);
381 base = a2;
383 else if (a2 != NULL_RTX && GET_CODE (a2) == MULT)
385 index_scale = a2;
386 index = XEXP (a2, 0);
387 scale = XEXP (a2, 1);
388 base = a1;
390 else
392 base = a1;
393 index = a2;
395 if (! (REG_P (base) || GET_CODE (base) == SUBREG)
396 || (index != NULL_RTX
397 && ! (REG_P (index) || GET_CODE (index) == SUBREG))
398 || (disp != NULL_RTX && ! CONSTANT_P (disp))
399 || (scale != NULL_RTX && ! CONSTANT_P (scale)))
401 /* Probably we have no 3 op add. Last chance is to use 2-op
402 add insn. To succeed, don't move Z to X as an address
403 segment always comes in Y. Otherwise, we might fail when
404 adding the address segment to register. */
405 lra_assert (x != y && x != z);
406 emit_move_insn (x, y);
407 rtx insn = emit_add2_insn (x, z);
408 lra_assert (insn != NULL_RTX);
410 else
412 if (index_scale == NULL_RTX)
413 index_scale = index;
414 if (disp == NULL_RTX)
416 /* Generate x = index_scale; x = x + base. */
417 lra_assert (index_scale != NULL_RTX && base != NULL_RTX);
418 emit_move_insn (x, index_scale);
419 rtx insn = emit_add2_insn (x, base);
420 lra_assert (insn != NULL_RTX);
422 else if (scale == NULL_RTX)
424 /* Try x = base + disp. */
425 lra_assert (base != NULL_RTX);
426 last = get_last_insn ();
427 rtx_insn *move_insn =
428 emit_move_insn (x, gen_rtx_PLUS (GET_MODE (base), base, disp));
429 if (recog_memoized (move_insn) < 0)
431 delete_insns_since (last);
432 /* Generate x = disp; x = x + base. */
433 emit_move_insn (x, disp);
434 rtx add2_insn = emit_add2_insn (x, base);
435 lra_assert (add2_insn != NULL_RTX);
437 /* Generate x = x + index. */
438 if (index != NULL_RTX)
440 rtx insn = emit_add2_insn (x, index);
441 lra_assert (insn != NULL_RTX);
444 else
446 /* Try x = index_scale; x = x + disp; x = x + base. */
447 last = get_last_insn ();
448 rtx_insn *move_insn = emit_move_insn (x, index_scale);
449 ok_p = false;
450 if (recog_memoized (move_insn) >= 0)
452 rtx insn = emit_add2_insn (x, disp);
453 if (insn != NULL_RTX)
455 insn = emit_add2_insn (x, disp);
456 if (insn != NULL_RTX)
457 ok_p = true;
460 if (! ok_p)
462 delete_insns_since (last);
463 /* Generate x = disp; x = x + base; x = x + index_scale. */
464 emit_move_insn (x, disp);
465 rtx insn = emit_add2_insn (x, base);
466 lra_assert (insn != NULL_RTX);
467 insn = emit_add2_insn (x, index_scale);
468 lra_assert (insn != NULL_RTX);
473 /* Functions emit_... can create pseudos -- so expand the pseudo
474 data. */
475 if (old != max_reg_num ())
476 expand_reg_data (old);
479 /* The number of emitted reload insns so far. */
480 int lra_curr_reload_num;
482 /* Emit x := y, processing special case when y = u + v or y = u + v *
483 scale + w through emit_add (Y can be an address which is base +
484 index reg * scale + displacement in general case). X may be used
485 as intermediate result therefore it should be not in Y. */
486 void
487 lra_emit_move (rtx x, rtx y)
489 int old;
491 if (GET_CODE (y) != PLUS)
493 if (rtx_equal_p (x, y))
494 return;
495 old = max_reg_num ();
496 emit_move_insn (x, y);
497 if (REG_P (x))
498 lra_reg_info[ORIGINAL_REGNO (x)].last_reload = ++lra_curr_reload_num;
499 /* Function emit_move can create pseudos -- so expand the pseudo
500 data. */
501 if (old != max_reg_num ())
502 expand_reg_data (old);
503 return;
505 lra_emit_add (x, XEXP (y, 0), XEXP (y, 1));
508 /* Update insn operands which are duplication of operands whose
509 numbers are in array of NOPS (with end marker -1). The insn is
510 represented by its LRA internal representation ID. */
511 void
512 lra_update_dups (lra_insn_recog_data_t id, signed char *nops)
514 int i, j, nop;
515 struct lra_static_insn_data *static_id = id->insn_static_data;
517 for (i = 0; i < static_id->n_dups; i++)
518 for (j = 0; (nop = nops[j]) >= 0; j++)
519 if (static_id->dup_num[i] == nop)
520 *id->dup_loc[i] = *id->operand_loc[nop];
525 /* This page contains code dealing with info about registers in the
526 insns. */
528 /* Pools for insn reg info. */
529 static alloc_pool insn_reg_pool;
531 /* Initiate pool for insn reg info. */
532 static void
533 init_insn_regs (void)
535 insn_reg_pool
536 = create_alloc_pool ("insn regs", sizeof (struct lra_insn_reg), 100);
539 /* Create LRA insn related info about a reference to REGNO in INSN with
540 TYPE (in/out/inout), biggest reference mode MODE, flag that it is
541 reference through subreg (SUBREG_P), flag that is early clobbered
542 in the insn (EARLY_CLOBBER), and reference to the next insn reg
543 info (NEXT). */
544 static struct lra_insn_reg *
545 new_insn_reg (rtx_insn *insn, int regno, enum op_type type,
546 machine_mode mode,
547 bool subreg_p, bool early_clobber, struct lra_insn_reg *next)
549 struct lra_insn_reg *ir;
551 ir = (struct lra_insn_reg *) pool_alloc (insn_reg_pool);
552 ir->type = type;
553 ir->biggest_mode = mode;
554 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (lra_reg_info[regno].biggest_mode)
555 && NONDEBUG_INSN_P (insn))
556 lra_reg_info[regno].biggest_mode = mode;
557 ir->subreg_p = subreg_p;
558 ir->early_clobber = early_clobber;
559 ir->regno = regno;
560 ir->next = next;
561 return ir;
564 /* Free insn reg info IR. */
565 static void
566 free_insn_reg (struct lra_insn_reg *ir)
568 pool_free (insn_reg_pool, ir);
571 /* Free insn reg info list IR. */
572 static void
573 free_insn_regs (struct lra_insn_reg *ir)
575 struct lra_insn_reg *next_ir;
577 for (; ir != NULL; ir = next_ir)
579 next_ir = ir->next;
580 free_insn_reg (ir);
584 /* Finish pool for insn reg info. */
585 static void
586 finish_insn_regs (void)
588 free_alloc_pool (insn_reg_pool);
593 /* This page contains code dealing LRA insn info (or in other words
594 LRA internal insn representation). */
596 /* Map INSN_CODE -> the static insn data. This info is valid during
597 all translation unit. */
598 struct lra_static_insn_data *insn_code_data[LAST_INSN_CODE];
600 /* Debug insns are represented as a special insn with one input
601 operand which is RTL expression in var_location. */
603 /* The following data are used as static insn operand data for all
604 debug insns. If structure lra_operand_data is changed, the
605 initializer should be changed too. */
606 static struct lra_operand_data debug_operand_data =
608 NULL, /* alternative */
609 VOIDmode, /* We are not interesting in the operand mode. */
610 OP_IN,
611 0, 0, 0, 0
614 /* The following data are used as static insn data for all debug
615 insns. If structure lra_static_insn_data is changed, the
616 initializer should be changed too. */
617 static struct lra_static_insn_data debug_insn_static_data =
619 &debug_operand_data,
620 0, /* Duplication operands #. */
621 -1, /* Commutative operand #. */
622 1, /* Operands #. There is only one operand which is debug RTL
623 expression. */
624 0, /* Duplications #. */
625 0, /* Alternatives #. We are not interesting in alternatives
626 because we does not proceed debug_insns for reloads. */
627 NULL, /* Hard registers referenced in machine description. */
628 NULL /* Descriptions of operands in alternatives. */
631 /* Called once per compiler work to initialize some LRA data related
632 to insns. */
633 static void
634 init_insn_code_data_once (void)
636 memset (insn_code_data, 0, sizeof (insn_code_data));
639 /* Called once per compiler work to finalize some LRA data related to
640 insns. */
641 static void
642 finish_insn_code_data_once (void)
644 int i;
646 for (i = 0; i < LAST_INSN_CODE; i++)
648 if (insn_code_data[i] != NULL)
649 free (insn_code_data[i]);
653 /* Return static insn data, allocate and setup if necessary. Although
654 dup_num is static data (it depends only on icode), to set it up we
655 need to extract insn first. So recog_data should be valid for
656 normal insn (ICODE >= 0) before the call. */
657 static struct lra_static_insn_data *
658 get_static_insn_data (int icode, int nop, int ndup, int nalt)
660 struct lra_static_insn_data *data;
661 size_t n_bytes;
663 lra_assert (icode < LAST_INSN_CODE);
664 if (icode >= 0 && (data = insn_code_data[icode]) != NULL)
665 return data;
666 lra_assert (nop >= 0 && ndup >= 0 && nalt >= 0);
667 n_bytes = sizeof (struct lra_static_insn_data)
668 + sizeof (struct lra_operand_data) * nop
669 + sizeof (int) * ndup;
670 data = XNEWVAR (struct lra_static_insn_data, n_bytes);
671 data->operand_alternative = NULL;
672 data->n_operands = nop;
673 data->n_dups = ndup;
674 data->n_alternatives = nalt;
675 data->operand = ((struct lra_operand_data *)
676 ((char *) data + sizeof (struct lra_static_insn_data)));
677 data->dup_num = ((int *) ((char *) data->operand
678 + sizeof (struct lra_operand_data) * nop));
679 if (icode >= 0)
681 int i;
683 insn_code_data[icode] = data;
684 for (i = 0; i < nop; i++)
686 data->operand[i].constraint
687 = insn_data[icode].operand[i].constraint;
688 data->operand[i].mode = insn_data[icode].operand[i].mode;
689 data->operand[i].strict_low = insn_data[icode].operand[i].strict_low;
690 data->operand[i].is_operator
691 = insn_data[icode].operand[i].is_operator;
692 data->operand[i].type
693 = (data->operand[i].constraint[0] == '=' ? OP_OUT
694 : data->operand[i].constraint[0] == '+' ? OP_INOUT
695 : OP_IN);
696 data->operand[i].is_address = false;
698 for (i = 0; i < ndup; i++)
699 data->dup_num[i] = recog_data.dup_num[i];
701 return data;
704 /* The current length of the following array. */
705 int lra_insn_recog_data_len;
707 /* Map INSN_UID -> the insn recog data (NULL if unknown). */
708 lra_insn_recog_data_t *lra_insn_recog_data;
710 /* Initialize LRA data about insns. */
711 static void
712 init_insn_recog_data (void)
714 lra_insn_recog_data_len = 0;
715 lra_insn_recog_data = NULL;
716 init_insn_regs ();
719 /* Expand, if necessary, LRA data about insns. */
720 static void
721 check_and_expand_insn_recog_data (int index)
723 int i, old;
725 if (lra_insn_recog_data_len > index)
726 return;
727 old = lra_insn_recog_data_len;
728 lra_insn_recog_data_len = index * 3 / 2 + 1;
729 lra_insn_recog_data = XRESIZEVEC (lra_insn_recog_data_t,
730 lra_insn_recog_data,
731 lra_insn_recog_data_len);
732 for (i = old; i < lra_insn_recog_data_len; i++)
733 lra_insn_recog_data[i] = NULL;
736 /* Finish LRA DATA about insn. */
737 static void
738 free_insn_recog_data (lra_insn_recog_data_t data)
740 if (data->operand_loc != NULL)
741 free (data->operand_loc);
742 if (data->dup_loc != NULL)
743 free (data->dup_loc);
744 if (data->arg_hard_regs != NULL)
745 free (data->arg_hard_regs);
746 if (data->icode < 0 && NONDEBUG_INSN_P (data->insn))
748 if (data->insn_static_data->operand_alternative != NULL)
749 free (const_cast <operand_alternative *>
750 (data->insn_static_data->operand_alternative));
751 free_insn_regs (data->insn_static_data->hard_regs);
752 free (data->insn_static_data);
754 free_insn_regs (data->regs);
755 data->regs = NULL;
756 free (data);
759 /* Finish LRA data about all insns. */
760 static void
761 finish_insn_recog_data (void)
763 int i;
764 lra_insn_recog_data_t data;
766 for (i = 0; i < lra_insn_recog_data_len; i++)
767 if ((data = lra_insn_recog_data[i]) != NULL)
768 free_insn_recog_data (data);
769 finish_insn_regs ();
770 free (lra_insn_recog_data);
773 /* Setup info about operands in alternatives of LRA DATA of insn. */
774 static void
775 setup_operand_alternative (lra_insn_recog_data_t data,
776 const operand_alternative *op_alt)
778 int i, j, nop, nalt;
779 int icode = data->icode;
780 struct lra_static_insn_data *static_data = data->insn_static_data;
782 static_data->commutative = -1;
783 nop = static_data->n_operands;
784 nalt = static_data->n_alternatives;
785 static_data->operand_alternative = op_alt;
786 for (i = 0; i < nop; i++)
788 static_data->operand[i].early_clobber = false;
789 static_data->operand[i].is_address = false;
790 if (static_data->operand[i].constraint[0] == '%')
792 /* We currently only support one commutative pair of operands. */
793 if (static_data->commutative < 0)
794 static_data->commutative = i;
795 else
796 lra_assert (icode < 0); /* Asm */
797 /* The last operand should not be marked commutative. */
798 lra_assert (i != nop - 1);
801 for (j = 0; j < nalt; j++)
802 for (i = 0; i < nop; i++, op_alt++)
804 static_data->operand[i].early_clobber |= op_alt->earlyclobber;
805 static_data->operand[i].is_address |= op_alt->is_address;
809 /* Recursively process X and collect info about registers, which are
810 not the insn operands, in X with TYPE (in/out/inout) and flag that
811 it is early clobbered in the insn (EARLY_CLOBBER) and add the info
812 to LIST. X is a part of insn given by DATA. Return the result
813 list. */
814 static struct lra_insn_reg *
815 collect_non_operand_hard_regs (rtx *x, lra_insn_recog_data_t data,
816 struct lra_insn_reg *list,
817 enum op_type type, bool early_clobber)
819 int i, j, regno, last;
820 bool subreg_p;
821 machine_mode mode;
822 struct lra_insn_reg *curr;
823 rtx op = *x;
824 enum rtx_code code = GET_CODE (op);
825 const char *fmt = GET_RTX_FORMAT (code);
827 for (i = 0; i < data->insn_static_data->n_operands; i++)
828 if (x == data->operand_loc[i])
829 /* It is an operand loc. Stop here. */
830 return list;
831 for (i = 0; i < data->insn_static_data->n_dups; i++)
832 if (x == data->dup_loc[i])
833 /* It is a dup loc. Stop here. */
834 return list;
835 mode = GET_MODE (op);
836 subreg_p = false;
837 if (code == SUBREG)
839 op = SUBREG_REG (op);
840 code = GET_CODE (op);
841 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (op)))
843 mode = GET_MODE (op);
844 if (GET_MODE_SIZE (mode) > REGMODE_NATURAL_SIZE (mode))
845 subreg_p = true;
848 if (REG_P (op))
850 if ((regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER)
851 return list;
852 for (last = regno + hard_regno_nregs[regno][mode];
853 regno < last;
854 regno++)
855 if (! TEST_HARD_REG_BIT (lra_no_alloc_regs, regno)
856 || TEST_HARD_REG_BIT (eliminable_regset, regno))
858 for (curr = list; curr != NULL; curr = curr->next)
859 if (curr->regno == regno && curr->subreg_p == subreg_p
860 && curr->biggest_mode == mode)
862 if (curr->type != type)
863 curr->type = OP_INOUT;
864 if (curr->early_clobber != early_clobber)
865 curr->early_clobber = true;
866 break;
868 if (curr == NULL)
870 /* This is a new hard regno or the info can not be
871 integrated into the found structure. */
872 #ifdef STACK_REGS
873 early_clobber
874 = (early_clobber
875 /* This clobber is to inform popping floating
876 point stack only. */
877 && ! (FIRST_STACK_REG <= regno
878 && regno <= LAST_STACK_REG));
879 #endif
880 list = new_insn_reg (data->insn, regno, type, mode, subreg_p,
881 early_clobber, list);
884 return list;
886 switch (code)
888 case SET:
889 list = collect_non_operand_hard_regs (&SET_DEST (op), data,
890 list, OP_OUT, false);
891 list = collect_non_operand_hard_regs (&SET_SRC (op), data,
892 list, OP_IN, false);
893 break;
894 case CLOBBER:
895 /* We treat clobber of non-operand hard registers as early
896 clobber (the behavior is expected from asm). */
897 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
898 list, OP_OUT, true);
899 break;
900 case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
901 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
902 list, OP_INOUT, false);
903 break;
904 case PRE_MODIFY: case POST_MODIFY:
905 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
906 list, OP_INOUT, false);
907 list = collect_non_operand_hard_regs (&XEXP (op, 1), data,
908 list, OP_IN, false);
909 break;
910 default:
911 fmt = GET_RTX_FORMAT (code);
912 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
914 if (fmt[i] == 'e')
915 list = collect_non_operand_hard_regs (&XEXP (op, i), data,
916 list, OP_IN, false);
917 else if (fmt[i] == 'E')
918 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
919 list = collect_non_operand_hard_regs (&XVECEXP (op, i, j), data,
920 list, OP_IN, false);
923 return list;
926 /* Set up and return info about INSN. Set up the info if it is not set up
927 yet. */
928 lra_insn_recog_data_t
929 lra_set_insn_recog_data (rtx_insn *insn)
931 lra_insn_recog_data_t data;
932 int i, n, icode;
933 rtx **locs;
934 unsigned int uid = INSN_UID (insn);
935 struct lra_static_insn_data *insn_static_data;
937 check_and_expand_insn_recog_data (uid);
938 if (DEBUG_INSN_P (insn))
939 icode = -1;
940 else
942 icode = INSN_CODE (insn);
943 if (icode < 0)
944 /* It might be a new simple insn which is not recognized yet. */
945 INSN_CODE (insn) = icode = recog_memoized (insn);
947 data = XNEW (struct lra_insn_recog_data);
948 lra_insn_recog_data[uid] = data;
949 data->insn = insn;
950 data->used_insn_alternative = -1;
951 data->icode = icode;
952 data->regs = NULL;
953 if (DEBUG_INSN_P (insn))
955 data->insn_static_data = &debug_insn_static_data;
956 data->dup_loc = NULL;
957 data->arg_hard_regs = NULL;
958 data->preferred_alternatives = ALL_ALTERNATIVES;
959 data->operand_loc = XNEWVEC (rtx *, 1);
960 data->operand_loc[0] = &INSN_VAR_LOCATION_LOC (insn);
961 return data;
963 if (icode < 0)
965 int nop, nalt;
966 machine_mode operand_mode[MAX_RECOG_OPERANDS];
967 const char *constraints[MAX_RECOG_OPERANDS];
969 nop = asm_noperands (PATTERN (insn));
970 data->operand_loc = data->dup_loc = NULL;
971 nalt = 1;
972 if (nop < 0)
974 /* It is a special insn like USE or CLOBBER. We should
975 recognize any regular insn otherwise LRA can do nothing
976 with this insn. */
977 gcc_assert (GET_CODE (PATTERN (insn)) == USE
978 || GET_CODE (PATTERN (insn)) == CLOBBER
979 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
980 data->insn_static_data = insn_static_data
981 = get_static_insn_data (-1, 0, 0, nalt);
983 else
985 /* expand_asm_operands makes sure there aren't too many
986 operands. */
987 lra_assert (nop <= MAX_RECOG_OPERANDS);
988 if (nop != 0)
989 data->operand_loc = XNEWVEC (rtx *, nop);
990 /* Now get the operand values and constraints out of the
991 insn. */
992 decode_asm_operands (PATTERN (insn), NULL,
993 data->operand_loc,
994 constraints, operand_mode, NULL);
995 if (nop > 0)
997 const char *p = recog_data.constraints[0];
999 for (p = constraints[0]; *p; p++)
1000 nalt += *p == ',';
1002 data->insn_static_data = insn_static_data
1003 = get_static_insn_data (-1, nop, 0, nalt);
1004 for (i = 0; i < nop; i++)
1006 insn_static_data->operand[i].mode = operand_mode[i];
1007 insn_static_data->operand[i].constraint = constraints[i];
1008 insn_static_data->operand[i].strict_low = false;
1009 insn_static_data->operand[i].is_operator = false;
1010 insn_static_data->operand[i].is_address = false;
1013 for (i = 0; i < insn_static_data->n_operands; i++)
1014 insn_static_data->operand[i].type
1015 = (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1016 : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1017 : OP_IN);
1018 data->preferred_alternatives = ALL_ALTERNATIVES;
1019 if (nop > 0)
1021 operand_alternative *op_alt = XCNEWVEC (operand_alternative,
1022 nalt * nop);
1023 preprocess_constraints (nop, nalt, constraints, op_alt);
1024 setup_operand_alternative (data, op_alt);
1027 else
1029 insn_extract (insn);
1030 data->insn_static_data = insn_static_data
1031 = get_static_insn_data (icode, insn_data[icode].n_operands,
1032 insn_data[icode].n_dups,
1033 insn_data[icode].n_alternatives);
1034 n = insn_static_data->n_operands;
1035 if (n == 0)
1036 locs = NULL;
1037 else
1039 locs = XNEWVEC (rtx *, n);
1040 memcpy (locs, recog_data.operand_loc, n * sizeof (rtx *));
1042 data->operand_loc = locs;
1043 n = insn_static_data->n_dups;
1044 if (n == 0)
1045 locs = NULL;
1046 else
1048 locs = XNEWVEC (rtx *, n);
1049 memcpy (locs, recog_data.dup_loc, n * sizeof (rtx *));
1051 data->dup_loc = locs;
1052 data->preferred_alternatives = get_preferred_alternatives (insn);
1053 const operand_alternative *op_alt = preprocess_insn_constraints (icode);
1054 if (!insn_static_data->operand_alternative)
1055 setup_operand_alternative (data, op_alt);
1056 else if (op_alt != insn_static_data->operand_alternative)
1057 insn_static_data->operand_alternative = op_alt;
1059 if (GET_CODE (PATTERN (insn)) == CLOBBER || GET_CODE (PATTERN (insn)) == USE)
1060 insn_static_data->hard_regs = NULL;
1061 else
1062 insn_static_data->hard_regs
1063 = collect_non_operand_hard_regs (&PATTERN (insn), data,
1064 NULL, OP_IN, false);
1065 data->arg_hard_regs = NULL;
1066 if (CALL_P (insn))
1068 rtx link;
1069 int n_hard_regs, regno, arg_hard_regs[FIRST_PSEUDO_REGISTER];
1071 n_hard_regs = 0;
1072 /* Finding implicit hard register usage. We believe it will be
1073 not changed whatever transformations are used. Call insns
1074 are such example. */
1075 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1076 link != NULL_RTX;
1077 link = XEXP (link, 1))
1078 if (GET_CODE (XEXP (link, 0)) == USE
1079 && REG_P (XEXP (XEXP (link, 0), 0)))
1081 regno = REGNO (XEXP (XEXP (link, 0), 0));
1082 lra_assert (regno < FIRST_PSEUDO_REGISTER);
1083 /* It is an argument register. */
1084 for (i = (hard_regno_nregs
1085 [regno][GET_MODE (XEXP (XEXP (link, 0), 0))]) - 1;
1086 i >= 0;
1087 i--)
1088 arg_hard_regs[n_hard_regs++] = regno + i;
1090 if (n_hard_regs != 0)
1092 arg_hard_regs[n_hard_regs++] = -1;
1093 data->arg_hard_regs = XNEWVEC (int, n_hard_regs);
1094 memcpy (data->arg_hard_regs, arg_hard_regs,
1095 sizeof (int) * n_hard_regs);
1098 /* Some output operand can be recognized only from the context not
1099 from the constraints which are empty in this case. Call insn may
1100 contain a hard register in set destination with empty constraint
1101 and extract_insn treats them as an input. */
1102 for (i = 0; i < insn_static_data->n_operands; i++)
1104 int j;
1105 rtx pat, set;
1106 struct lra_operand_data *operand = &insn_static_data->operand[i];
1108 /* ??? Should we treat 'X' the same way. It looks to me that
1109 'X' means anything and empty constraint means we do not
1110 care. */
1111 if (operand->type != OP_IN || *operand->constraint != '\0'
1112 || operand->is_operator)
1113 continue;
1114 pat = PATTERN (insn);
1115 if (GET_CODE (pat) == SET)
1117 if (data->operand_loc[i] != &SET_DEST (pat))
1118 continue;
1120 else if (GET_CODE (pat) == PARALLEL)
1122 for (j = XVECLEN (pat, 0) - 1; j >= 0; j--)
1124 set = XVECEXP (PATTERN (insn), 0, j);
1125 if (GET_CODE (set) == SET
1126 && &SET_DEST (set) == data->operand_loc[i])
1127 break;
1129 if (j < 0)
1130 continue;
1132 else
1133 continue;
1134 operand->type = OP_OUT;
1136 return data;
1139 /* Return info about insn give by UID. The info should be already set
1140 up. */
1141 static lra_insn_recog_data_t
1142 get_insn_recog_data_by_uid (int uid)
1144 lra_insn_recog_data_t data;
1146 data = lra_insn_recog_data[uid];
1147 lra_assert (data != NULL);
1148 return data;
1151 /* Invalidate all info about insn given by its UID. */
1152 static void
1153 invalidate_insn_recog_data (int uid)
1155 lra_insn_recog_data_t data;
1157 data = lra_insn_recog_data[uid];
1158 lra_assert (data != NULL);
1159 free_insn_recog_data (data);
1160 lra_insn_recog_data[uid] = NULL;
1163 /* Update all the insn info about INSN. It is usually called when
1164 something in the insn was changed. Return the updated info. */
1165 lra_insn_recog_data_t
1166 lra_update_insn_recog_data (rtx_insn *insn)
1168 lra_insn_recog_data_t data;
1169 int n;
1170 unsigned int uid = INSN_UID (insn);
1171 struct lra_static_insn_data *insn_static_data;
1172 HOST_WIDE_INT sp_offset = 0;
1174 check_and_expand_insn_recog_data (uid);
1175 if ((data = lra_insn_recog_data[uid]) != NULL
1176 && data->icode != INSN_CODE (insn))
1178 sp_offset = data->sp_offset;
1179 invalidate_insn_data_regno_info (data, insn, get_insn_freq (insn));
1180 invalidate_insn_recog_data (uid);
1181 data = NULL;
1183 if (data == NULL)
1185 data = lra_get_insn_recog_data (insn);
1186 /* Initiate or restore SP offset. */
1187 data->sp_offset = sp_offset;
1188 return data;
1190 insn_static_data = data->insn_static_data;
1191 data->used_insn_alternative = -1;
1192 if (DEBUG_INSN_P (insn))
1193 return data;
1194 if (data->icode < 0)
1196 int nop;
1197 machine_mode operand_mode[MAX_RECOG_OPERANDS];
1198 const char *constraints[MAX_RECOG_OPERANDS];
1200 nop = asm_noperands (PATTERN (insn));
1201 if (nop >= 0)
1203 lra_assert (nop == data->insn_static_data->n_operands);
1204 /* Now get the operand values and constraints out of the
1205 insn. */
1206 decode_asm_operands (PATTERN (insn), NULL,
1207 data->operand_loc,
1208 constraints, operand_mode, NULL);
1209 #ifdef ENABLE_CHECKING
1211 int i;
1213 for (i = 0; i < nop; i++)
1214 lra_assert
1215 (insn_static_data->operand[i].mode == operand_mode[i]
1216 && insn_static_data->operand[i].constraint == constraints[i]
1217 && ! insn_static_data->operand[i].is_operator);
1219 #endif
1221 #ifdef ENABLE_CHECKING
1223 int i;
1225 for (i = 0; i < insn_static_data->n_operands; i++)
1226 lra_assert
1227 (insn_static_data->operand[i].type
1228 == (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1229 : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1230 : OP_IN));
1232 #endif
1234 else
1236 insn_extract (insn);
1237 n = insn_static_data->n_operands;
1238 if (n != 0)
1239 memcpy (data->operand_loc, recog_data.operand_loc, n * sizeof (rtx *));
1240 n = insn_static_data->n_dups;
1241 if (n != 0)
1242 memcpy (data->dup_loc, recog_data.dup_loc, n * sizeof (rtx *));
1243 lra_assert (check_bool_attrs (insn));
1245 return data;
1248 /* Set up that INSN is using alternative ALT now. */
1249 void
1250 lra_set_used_insn_alternative (rtx_insn *insn, int alt)
1252 lra_insn_recog_data_t data;
1254 data = lra_get_insn_recog_data (insn);
1255 data->used_insn_alternative = alt;
1258 /* Set up that insn with UID is using alternative ALT now. The insn
1259 info should be already set up. */
1260 void
1261 lra_set_used_insn_alternative_by_uid (int uid, int alt)
1263 lra_insn_recog_data_t data;
1265 check_and_expand_insn_recog_data (uid);
1266 data = lra_insn_recog_data[uid];
1267 lra_assert (data != NULL);
1268 data->used_insn_alternative = alt;
1273 /* This page contains code dealing with common register info and
1274 pseudo copies. */
1276 /* The size of the following array. */
1277 static int reg_info_size;
1278 /* Common info about each register. */
1279 struct lra_reg *lra_reg_info;
1281 /* Last register value. */
1282 static int last_reg_value;
1284 /* Return new register value. */
1285 static int
1286 get_new_reg_value (void)
1288 return ++last_reg_value;
1291 /* Pools for copies. */
1292 static alloc_pool copy_pool;
1294 /* Vec referring to pseudo copies. */
1295 static vec<lra_copy_t> copy_vec;
1297 /* Initialize I-th element of lra_reg_info. */
1298 static inline void
1299 initialize_lra_reg_info_element (int i)
1301 bitmap_initialize (&lra_reg_info[i].insn_bitmap, &reg_obstack);
1302 #ifdef STACK_REGS
1303 lra_reg_info[i].no_stack_p = false;
1304 #endif
1305 CLEAR_HARD_REG_SET (lra_reg_info[i].conflict_hard_regs);
1306 CLEAR_HARD_REG_SET (lra_reg_info[i].actual_call_used_reg_set);
1307 lra_reg_info[i].preferred_hard_regno1 = -1;
1308 lra_reg_info[i].preferred_hard_regno2 = -1;
1309 lra_reg_info[i].preferred_hard_regno_profit1 = 0;
1310 lra_reg_info[i].preferred_hard_regno_profit2 = 0;
1311 lra_reg_info[i].biggest_mode = VOIDmode;
1312 lra_reg_info[i].live_ranges = NULL;
1313 lra_reg_info[i].nrefs = lra_reg_info[i].freq = 0;
1314 lra_reg_info[i].last_reload = 0;
1315 lra_reg_info[i].restore_regno = -1;
1316 lra_reg_info[i].val = get_new_reg_value ();
1317 lra_reg_info[i].offset = 0;
1318 lra_reg_info[i].copies = NULL;
1321 /* Initialize common reg info and copies. */
1322 static void
1323 init_reg_info (void)
1325 int i;
1327 last_reg_value = 0;
1328 reg_info_size = max_reg_num () * 3 / 2 + 1;
1329 lra_reg_info = XNEWVEC (struct lra_reg, reg_info_size);
1330 for (i = 0; i < reg_info_size; i++)
1331 initialize_lra_reg_info_element (i);
1332 copy_pool
1333 = create_alloc_pool ("lra copies", sizeof (struct lra_copy), 100);
1334 copy_vec.create (100);
1338 /* Finish common reg info and copies. */
1339 static void
1340 finish_reg_info (void)
1342 int i;
1344 for (i = 0; i < reg_info_size; i++)
1345 bitmap_clear (&lra_reg_info[i].insn_bitmap);
1346 free (lra_reg_info);
1347 reg_info_size = 0;
1348 free_alloc_pool (copy_pool);
1349 copy_vec.release ();
1352 /* Expand common reg info if it is necessary. */
1353 static void
1354 expand_reg_info (void)
1356 int i, old = reg_info_size;
1358 if (reg_info_size > max_reg_num ())
1359 return;
1360 reg_info_size = max_reg_num () * 3 / 2 + 1;
1361 lra_reg_info = XRESIZEVEC (struct lra_reg, lra_reg_info, reg_info_size);
1362 for (i = old; i < reg_info_size; i++)
1363 initialize_lra_reg_info_element (i);
1366 /* Free all copies. */
1367 void
1368 lra_free_copies (void)
1370 lra_copy_t cp;
1372 while (copy_vec.length () != 0)
1374 cp = copy_vec.pop ();
1375 lra_reg_info[cp->regno1].copies = lra_reg_info[cp->regno2].copies = NULL;
1376 pool_free (copy_pool, cp);
1380 /* Create copy of two pseudos REGNO1 and REGNO2. The copy execution
1381 frequency is FREQ. */
1382 void
1383 lra_create_copy (int regno1, int regno2, int freq)
1385 bool regno1_dest_p;
1386 lra_copy_t cp;
1388 lra_assert (regno1 != regno2);
1389 regno1_dest_p = true;
1390 if (regno1 > regno2)
1392 int temp = regno2;
1394 regno1_dest_p = false;
1395 regno2 = regno1;
1396 regno1 = temp;
1398 cp = (lra_copy_t) pool_alloc (copy_pool);
1399 copy_vec.safe_push (cp);
1400 cp->regno1_dest_p = regno1_dest_p;
1401 cp->freq = freq;
1402 cp->regno1 = regno1;
1403 cp->regno2 = regno2;
1404 cp->regno1_next = lra_reg_info[regno1].copies;
1405 lra_reg_info[regno1].copies = cp;
1406 cp->regno2_next = lra_reg_info[regno2].copies;
1407 lra_reg_info[regno2].copies = cp;
1408 if (lra_dump_file != NULL)
1409 fprintf (lra_dump_file, " Creating copy r%d%sr%d@%d\n",
1410 regno1, regno1_dest_p ? "<-" : "->", regno2, freq);
1413 /* Return N-th (0, 1, ...) copy. If there is no copy, return
1414 NULL. */
1415 lra_copy_t
1416 lra_get_copy (int n)
1418 if (n >= (int) copy_vec.length ())
1419 return NULL;
1420 return copy_vec[n];
1425 /* This page contains code dealing with info about registers in
1426 insns. */
1428 /* Process X of insn UID recursively and add info (operand type is
1429 given by TYPE, flag of that it is early clobber is EARLY_CLOBBER)
1430 about registers in X to the insn DATA. */
1431 static void
1432 add_regs_to_insn_regno_info (lra_insn_recog_data_t data, rtx x, int uid,
1433 enum op_type type, bool early_clobber)
1435 int i, j, regno;
1436 bool subreg_p;
1437 machine_mode mode;
1438 const char *fmt;
1439 enum rtx_code code;
1440 struct lra_insn_reg *curr;
1442 code = GET_CODE (x);
1443 mode = GET_MODE (x);
1444 subreg_p = false;
1445 if (GET_CODE (x) == SUBREG)
1447 x = SUBREG_REG (x);
1448 code = GET_CODE (x);
1449 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1451 mode = GET_MODE (x);
1452 if (GET_MODE_SIZE (mode) > REGMODE_NATURAL_SIZE (mode))
1453 subreg_p = true;
1456 if (REG_P (x))
1458 regno = REGNO (x);
1459 if (regno < FIRST_PSEUDO_REGISTER
1460 && TEST_HARD_REG_BIT (lra_no_alloc_regs, regno)
1461 && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
1462 return;
1463 expand_reg_info ();
1464 if (bitmap_set_bit (&lra_reg_info[regno].insn_bitmap, uid))
1466 data->regs = new_insn_reg (data->insn, regno, type, mode, subreg_p,
1467 early_clobber, data->regs);
1468 return;
1470 else
1472 for (curr = data->regs; curr != NULL; curr = curr->next)
1473 if (curr->regno == regno)
1475 if (curr->subreg_p != subreg_p || curr->biggest_mode != mode)
1476 /* The info can not be integrated into the found
1477 structure. */
1478 data->regs = new_insn_reg (data->insn, regno, type, mode,
1479 subreg_p, early_clobber,
1480 data->regs);
1481 else
1483 if (curr->type != type)
1484 curr->type = OP_INOUT;
1485 if (curr->early_clobber != early_clobber)
1486 curr->early_clobber = true;
1488 return;
1490 gcc_unreachable ();
1494 switch (code)
1496 case SET:
1497 add_regs_to_insn_regno_info (data, SET_DEST (x), uid, OP_OUT, false);
1498 add_regs_to_insn_regno_info (data, SET_SRC (x), uid, OP_IN, false);
1499 break;
1500 case CLOBBER:
1501 /* We treat clobber of non-operand hard registers as early
1502 clobber (the behavior is expected from asm). */
1503 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_OUT, true);
1504 break;
1505 case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
1506 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_INOUT, false);
1507 break;
1508 case PRE_MODIFY: case POST_MODIFY:
1509 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_INOUT, false);
1510 add_regs_to_insn_regno_info (data, XEXP (x, 1), uid, OP_IN, false);
1511 break;
1512 default:
1513 if ((code != PARALLEL && code != EXPR_LIST) || type != OP_OUT)
1514 /* Some targets place small structures in registers for return
1515 values of functions, and those registers are wrapped in
1516 PARALLEL that we may see as the destination of a SET. Here
1517 is an example:
1519 (call_insn 13 12 14 2 (set (parallel:BLK [
1520 (expr_list:REG_DEP_TRUE (reg:DI 0 ax)
1521 (const_int 0 [0]))
1522 (expr_list:REG_DEP_TRUE (reg:DI 1 dx)
1523 (const_int 8 [0x8]))
1525 (call (mem:QI (symbol_ref:DI (... */
1526 type = OP_IN;
1527 fmt = GET_RTX_FORMAT (code);
1528 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1530 if (fmt[i] == 'e')
1531 add_regs_to_insn_regno_info (data, XEXP (x, i), uid, type, false);
1532 else if (fmt[i] == 'E')
1534 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1535 add_regs_to_insn_regno_info (data, XVECEXP (x, i, j), uid,
1536 type, false);
1542 /* Return execution frequency of INSN. */
1543 static int
1544 get_insn_freq (rtx_insn *insn)
1546 basic_block bb = BLOCK_FOR_INSN (insn);
1548 gcc_checking_assert (bb != NULL);
1549 return REG_FREQ_FROM_BB (bb);
1552 /* Invalidate all reg info of INSN with DATA and execution frequency
1553 FREQ. Update common info about the invalidated registers. */
1554 static void
1555 invalidate_insn_data_regno_info (lra_insn_recog_data_t data, rtx_insn *insn,
1556 int freq)
1558 int uid;
1559 bool debug_p;
1560 unsigned int i;
1561 struct lra_insn_reg *ir, *next_ir;
1563 uid = INSN_UID (insn);
1564 debug_p = DEBUG_INSN_P (insn);
1565 for (ir = data->regs; ir != NULL; ir = next_ir)
1567 i = ir->regno;
1568 next_ir = ir->next;
1569 free_insn_reg (ir);
1570 bitmap_clear_bit (&lra_reg_info[i].insn_bitmap, uid);
1571 if (i >= FIRST_PSEUDO_REGISTER && ! debug_p)
1573 lra_reg_info[i].nrefs--;
1574 lra_reg_info[i].freq -= freq;
1575 lra_assert (lra_reg_info[i].nrefs >= 0 && lra_reg_info[i].freq >= 0);
1578 data->regs = NULL;
1581 /* Invalidate all reg info of INSN. Update common info about the
1582 invalidated registers. */
1583 void
1584 lra_invalidate_insn_regno_info (rtx_insn *insn)
1586 invalidate_insn_data_regno_info (lra_get_insn_recog_data (insn), insn,
1587 get_insn_freq (insn));
1590 /* Update common reg info from reg info of insn given by its DATA and
1591 execution frequency FREQ. */
1592 static void
1593 setup_insn_reg_info (lra_insn_recog_data_t data, int freq)
1595 unsigned int i;
1596 struct lra_insn_reg *ir;
1598 for (ir = data->regs; ir != NULL; ir = ir->next)
1599 if ((i = ir->regno) >= FIRST_PSEUDO_REGISTER)
1601 lra_reg_info[i].nrefs++;
1602 lra_reg_info[i].freq += freq;
1606 /* Set up insn reg info of INSN. Update common reg info from reg info
1607 of INSN. */
1608 void
1609 lra_update_insn_regno_info (rtx_insn *insn)
1611 int i, uid, freq;
1612 lra_insn_recog_data_t data;
1613 struct lra_static_insn_data *static_data;
1614 enum rtx_code code;
1616 if (! INSN_P (insn))
1617 return;
1618 data = lra_get_insn_recog_data (insn);
1619 static_data = data->insn_static_data;
1620 freq = get_insn_freq (insn);
1621 invalidate_insn_data_regno_info (data, insn, freq);
1622 uid = INSN_UID (insn);
1623 for (i = static_data->n_operands - 1; i >= 0; i--)
1624 add_regs_to_insn_regno_info (data, *data->operand_loc[i], uid,
1625 static_data->operand[i].type,
1626 static_data->operand[i].early_clobber);
1627 if ((code = GET_CODE (PATTERN (insn))) == CLOBBER || code == USE)
1628 add_regs_to_insn_regno_info (data, XEXP (PATTERN (insn), 0), uid,
1629 code == USE ? OP_IN : OP_OUT, false);
1630 if (NONDEBUG_INSN_P (insn))
1631 setup_insn_reg_info (data, freq);
1634 /* Return reg info of insn given by it UID. */
1635 struct lra_insn_reg *
1636 lra_get_insn_regs (int uid)
1638 lra_insn_recog_data_t data;
1640 data = get_insn_recog_data_by_uid (uid);
1641 return data->regs;
1646 /* This page contains code dealing with stack of the insns which
1647 should be processed by the next constraint pass. */
1649 /* Bitmap used to put an insn on the stack only in one exemplar. */
1650 static sbitmap lra_constraint_insn_stack_bitmap;
1652 /* The stack itself. */
1653 vec<rtx_insn *> lra_constraint_insn_stack;
1655 /* Put INSN on the stack. If ALWAYS_UPDATE is true, always update the reg
1656 info for INSN, otherwise only update it if INSN is not already on the
1657 stack. */
1658 static inline void
1659 lra_push_insn_1 (rtx_insn *insn, bool always_update)
1661 unsigned int uid = INSN_UID (insn);
1662 if (always_update)
1663 lra_update_insn_regno_info (insn);
1664 if (uid >= SBITMAP_SIZE (lra_constraint_insn_stack_bitmap))
1665 lra_constraint_insn_stack_bitmap =
1666 sbitmap_resize (lra_constraint_insn_stack_bitmap, 3 * uid / 2, 0);
1667 if (bitmap_bit_p (lra_constraint_insn_stack_bitmap, uid))
1668 return;
1669 bitmap_set_bit (lra_constraint_insn_stack_bitmap, uid);
1670 if (! always_update)
1671 lra_update_insn_regno_info (insn);
1672 lra_constraint_insn_stack.safe_push (insn);
1675 /* Put INSN on the stack. */
1676 void
1677 lra_push_insn (rtx_insn *insn)
1679 lra_push_insn_1 (insn, false);
1682 /* Put INSN on the stack and update its reg info. */
1683 void
1684 lra_push_insn_and_update_insn_regno_info (rtx_insn *insn)
1686 lra_push_insn_1 (insn, true);
1689 /* Put insn with UID on the stack. */
1690 void
1691 lra_push_insn_by_uid (unsigned int uid)
1693 lra_push_insn (lra_insn_recog_data[uid]->insn);
1696 /* Take the last-inserted insns off the stack and return it. */
1697 rtx_insn *
1698 lra_pop_insn (void)
1700 rtx_insn *insn = lra_constraint_insn_stack.pop ();
1701 bitmap_clear_bit (lra_constraint_insn_stack_bitmap, INSN_UID (insn));
1702 return insn;
1705 /* Return the current size of the insn stack. */
1706 unsigned int
1707 lra_insn_stack_length (void)
1709 return lra_constraint_insn_stack.length ();
1712 /* Push insns FROM to TO (excluding it) going in reverse order. */
1713 static void
1714 push_insns (rtx_insn *from, rtx_insn *to)
1716 rtx_insn *insn;
1718 if (from == NULL_RTX)
1719 return;
1720 for (insn = from; insn != to; insn = PREV_INSN (insn))
1721 if (INSN_P (insn))
1722 lra_push_insn (insn);
1725 /* Set up sp offset for insn in range [FROM, LAST]. The offset is
1726 taken from the next BB insn after LAST or zero if there in such
1727 insn. */
1728 static void
1729 setup_sp_offset (rtx_insn *from, rtx_insn *last)
1731 rtx_insn *before = next_nonnote_insn_bb (last);
1732 HOST_WIDE_INT offset = (before == NULL_RTX || ! INSN_P (before)
1733 ? 0 : lra_get_insn_recog_data (before)->sp_offset);
1735 for (rtx_insn *insn = from; insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
1736 lra_get_insn_recog_data (insn)->sp_offset = offset;
1739 /* Emit insns BEFORE before INSN and insns AFTER after INSN. Put the
1740 insns onto the stack. Print about emitting the insns with
1741 TITLE. */
1742 void
1743 lra_process_new_insns (rtx_insn *insn, rtx_insn *before, rtx_insn *after,
1744 const char *title)
1746 rtx_insn *last;
1748 if (before == NULL_RTX && after == NULL_RTX)
1749 return;
1750 if (lra_dump_file != NULL)
1752 dump_insn_slim (lra_dump_file, insn);
1753 if (before != NULL_RTX)
1755 fprintf (lra_dump_file," %s before:\n", title);
1756 dump_rtl_slim (lra_dump_file, before, NULL, -1, 0);
1758 if (after != NULL_RTX)
1760 fprintf (lra_dump_file, " %s after:\n", title);
1761 dump_rtl_slim (lra_dump_file, after, NULL, -1, 0);
1763 fprintf (lra_dump_file, "\n");
1765 if (before != NULL_RTX)
1767 emit_insn_before (before, insn);
1768 push_insns (PREV_INSN (insn), PREV_INSN (before));
1769 setup_sp_offset (before, PREV_INSN (insn));
1771 if (after != NULL_RTX)
1773 for (last = after; NEXT_INSN (last) != NULL_RTX; last = NEXT_INSN (last))
1775 emit_insn_after (after, insn);
1776 push_insns (last, insn);
1777 setup_sp_offset (after, last);
1783 /* Replace all references to register OLD_REGNO in *LOC with pseudo
1784 register NEW_REG. Return true if any change was made. */
1785 bool
1786 lra_substitute_pseudo (rtx *loc, int old_regno, rtx new_reg)
1788 rtx x = *loc;
1789 bool result = false;
1790 enum rtx_code code;
1791 const char *fmt;
1792 int i, j;
1794 if (x == NULL_RTX)
1795 return false;
1797 code = GET_CODE (x);
1798 if (code == REG && (int) REGNO (x) == old_regno)
1800 machine_mode mode = GET_MODE (*loc);
1801 machine_mode inner_mode = GET_MODE (new_reg);
1803 if (mode != inner_mode)
1805 if (GET_MODE_SIZE (mode) >= GET_MODE_SIZE (inner_mode)
1806 || ! SCALAR_INT_MODE_P (inner_mode))
1807 new_reg = gen_rtx_SUBREG (mode, new_reg, 0);
1808 else
1809 new_reg = gen_lowpart_SUBREG (mode, new_reg);
1811 *loc = new_reg;
1812 return true;
1815 /* Scan all the operand sub-expressions. */
1816 fmt = GET_RTX_FORMAT (code);
1817 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1819 if (fmt[i] == 'e')
1821 if (lra_substitute_pseudo (&XEXP (x, i), old_regno, new_reg))
1822 result = true;
1824 else if (fmt[i] == 'E')
1826 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1827 if (lra_substitute_pseudo (&XVECEXP (x, i, j), old_regno, new_reg))
1828 result = true;
1831 return result;
1834 /* Call lra_substitute_pseudo within an insn. This won't update the insn ptr,
1835 just the contents of the insn. */
1836 bool
1837 lra_substitute_pseudo_within_insn (rtx_insn *insn, int old_regno, rtx new_reg)
1839 rtx loc = insn;
1840 return lra_substitute_pseudo (&loc, old_regno, new_reg);
1845 /* This page contains code dealing with scratches (changing them onto
1846 pseudos and restoring them from the pseudos).
1848 We change scratches into pseudos at the beginning of LRA to
1849 simplify dealing with them (conflicts, hard register assignments).
1851 If the pseudo denoting scratch was spilled it means that we do need
1852 a hard register for it. Such pseudos are transformed back to
1853 scratches at the end of LRA. */
1855 /* Description of location of a former scratch operand. */
1856 struct sloc
1858 rtx_insn *insn; /* Insn where the scratch was. */
1859 int nop; /* Number of the operand which was a scratch. */
1862 typedef struct sloc *sloc_t;
1864 /* Locations of the former scratches. */
1865 static vec<sloc_t> scratches;
1867 /* Bitmap of scratch regnos. */
1868 static bitmap_head scratch_bitmap;
1870 /* Bitmap of scratch operands. */
1871 static bitmap_head scratch_operand_bitmap;
1873 /* Return true if pseudo REGNO is made of SCRATCH. */
1874 bool
1875 lra_former_scratch_p (int regno)
1877 return bitmap_bit_p (&scratch_bitmap, regno);
1880 /* Return true if the operand NOP of INSN is a former scratch. */
1881 bool
1882 lra_former_scratch_operand_p (rtx_insn *insn, int nop)
1884 return bitmap_bit_p (&scratch_operand_bitmap,
1885 INSN_UID (insn) * MAX_RECOG_OPERANDS + nop) != 0;
1888 /* Change scratches onto pseudos and save their location. */
1889 static void
1890 remove_scratches (void)
1892 int i;
1893 bool insn_changed_p;
1894 basic_block bb;
1895 rtx_insn *insn;
1896 rtx reg;
1897 sloc_t loc;
1898 lra_insn_recog_data_t id;
1899 struct lra_static_insn_data *static_id;
1901 scratches.create (get_max_uid ());
1902 bitmap_initialize (&scratch_bitmap, &reg_obstack);
1903 bitmap_initialize (&scratch_operand_bitmap, &reg_obstack);
1904 FOR_EACH_BB_FN (bb, cfun)
1905 FOR_BB_INSNS (bb, insn)
1906 if (INSN_P (insn))
1908 id = lra_get_insn_recog_data (insn);
1909 static_id = id->insn_static_data;
1910 insn_changed_p = false;
1911 for (i = 0; i < static_id->n_operands; i++)
1912 if (GET_CODE (*id->operand_loc[i]) == SCRATCH
1913 && GET_MODE (*id->operand_loc[i]) != VOIDmode)
1915 insn_changed_p = true;
1916 *id->operand_loc[i] = reg
1917 = lra_create_new_reg (static_id->operand[i].mode,
1918 *id->operand_loc[i], ALL_REGS, NULL);
1919 add_reg_note (insn, REG_UNUSED, reg);
1920 lra_update_dup (id, i);
1921 loc = XNEW (struct sloc);
1922 loc->insn = insn;
1923 loc->nop = i;
1924 scratches.safe_push (loc);
1925 bitmap_set_bit (&scratch_bitmap, REGNO (*id->operand_loc[i]));
1926 bitmap_set_bit (&scratch_operand_bitmap,
1927 INSN_UID (insn) * MAX_RECOG_OPERANDS + i);
1928 if (lra_dump_file != NULL)
1929 fprintf (lra_dump_file,
1930 "Removing SCRATCH in insn #%u (nop %d)\n",
1931 INSN_UID (insn), i);
1933 if (insn_changed_p)
1934 /* Because we might use DF right after caller-saves sub-pass
1935 we need to keep DF info up to date. */
1936 df_insn_rescan (insn);
1940 /* Changes pseudos created by function remove_scratches onto scratches. */
1941 static void
1942 restore_scratches (void)
1944 int regno;
1945 unsigned i;
1946 sloc_t loc;
1947 rtx_insn *last = NULL;
1948 lra_insn_recog_data_t id = NULL;
1950 for (i = 0; scratches.iterate (i, &loc); i++)
1952 if (last != loc->insn)
1954 last = loc->insn;
1955 id = lra_get_insn_recog_data (last);
1957 if (REG_P (*id->operand_loc[loc->nop])
1958 && ((regno = REGNO (*id->operand_loc[loc->nop]))
1959 >= FIRST_PSEUDO_REGISTER)
1960 && lra_get_regno_hard_regno (regno) < 0)
1962 /* It should be only case when scratch register with chosen
1963 constraint 'X' did not get memory or hard register. */
1964 lra_assert (lra_former_scratch_p (regno));
1965 *id->operand_loc[loc->nop]
1966 = gen_rtx_SCRATCH (GET_MODE (*id->operand_loc[loc->nop]));
1967 lra_update_dup (id, loc->nop);
1968 if (lra_dump_file != NULL)
1969 fprintf (lra_dump_file, "Restoring SCRATCH in insn #%u(nop %d)\n",
1970 INSN_UID (loc->insn), loc->nop);
1973 for (i = 0; scratches.iterate (i, &loc); i++)
1974 free (loc);
1975 scratches.release ();
1976 bitmap_clear (&scratch_bitmap);
1977 bitmap_clear (&scratch_operand_bitmap);
1982 #ifdef ENABLE_CHECKING
1984 /* Function checks RTL for correctness. If FINAL_P is true, it is
1985 done at the end of LRA and the check is more rigorous. */
1986 static void
1987 check_rtl (bool final_p)
1989 basic_block bb;
1990 rtx_insn *insn;
1992 lra_assert (! final_p || reload_completed);
1993 FOR_EACH_BB_FN (bb, cfun)
1994 FOR_BB_INSNS (bb, insn)
1995 if (NONDEBUG_INSN_P (insn)
1996 && GET_CODE (PATTERN (insn)) != USE
1997 && GET_CODE (PATTERN (insn)) != CLOBBER
1998 && GET_CODE (PATTERN (insn)) != ASM_INPUT)
2000 if (final_p)
2002 #ifdef ENABLED_CHECKING
2003 extract_constrain_insn (insn);
2004 #endif
2005 continue;
2007 /* LRA code is based on assumption that all addresses can be
2008 correctly decomposed. LRA can generate reloads for
2009 decomposable addresses. The decomposition code checks the
2010 correctness of the addresses. So we don't need to check
2011 the addresses here. Don't call insn_invalid_p here, it can
2012 change the code at this stage. */
2013 if (recog_memoized (insn) < 0 && asm_noperands (PATTERN (insn)) < 0)
2014 fatal_insn_not_found (insn);
2017 #endif /* #ifdef ENABLE_CHECKING */
2019 /* Determine if the current function has an exception receiver block
2020 that reaches the exit block via non-exceptional edges */
2021 static bool
2022 has_nonexceptional_receiver (void)
2024 edge e;
2025 edge_iterator ei;
2026 basic_block *tos, *worklist, bb;
2028 /* If we're not optimizing, then just err on the safe side. */
2029 if (!optimize)
2030 return true;
2032 /* First determine which blocks can reach exit via normal paths. */
2033 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
2035 FOR_EACH_BB_FN (bb, cfun)
2036 bb->flags &= ~BB_REACHABLE;
2038 /* Place the exit block on our worklist. */
2039 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
2040 *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
2042 /* Iterate: find everything reachable from what we've already seen. */
2043 while (tos != worklist)
2045 bb = *--tos;
2047 FOR_EACH_EDGE (e, ei, bb->preds)
2048 if (e->flags & EDGE_ABNORMAL)
2050 free (worklist);
2051 return true;
2053 else
2055 basic_block src = e->src;
2057 if (!(src->flags & BB_REACHABLE))
2059 src->flags |= BB_REACHABLE;
2060 *tos++ = src;
2064 free (worklist);
2065 /* No exceptional block reached exit unexceptionally. */
2066 return false;
2069 #ifdef AUTO_INC_DEC
2071 /* Process recursively X of INSN and add REG_INC notes if necessary. */
2072 static void
2073 add_auto_inc_notes (rtx_insn *insn, rtx x)
2075 enum rtx_code code = GET_CODE (x);
2076 const char *fmt;
2077 int i, j;
2079 if (code == MEM && auto_inc_p (XEXP (x, 0)))
2081 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
2082 return;
2085 /* Scan all X sub-expressions. */
2086 fmt = GET_RTX_FORMAT (code);
2087 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2089 if (fmt[i] == 'e')
2090 add_auto_inc_notes (insn, XEXP (x, i));
2091 else if (fmt[i] == 'E')
2092 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2093 add_auto_inc_notes (insn, XVECEXP (x, i, j));
2097 #endif
2099 /* Remove all REG_DEAD and REG_UNUSED notes and regenerate REG_INC.
2100 We change pseudos by hard registers without notification of DF and
2101 that can make the notes obsolete. DF-infrastructure does not deal
2102 with REG_INC notes -- so we should regenerate them here. */
2103 static void
2104 update_inc_notes (void)
2106 rtx *pnote;
2107 basic_block bb;
2108 rtx_insn *insn;
2110 FOR_EACH_BB_FN (bb, cfun)
2111 FOR_BB_INSNS (bb, insn)
2112 if (NONDEBUG_INSN_P (insn))
2114 pnote = &REG_NOTES (insn);
2115 while (*pnote != 0)
2117 if (REG_NOTE_KIND (*pnote) == REG_DEAD
2118 || REG_NOTE_KIND (*pnote) == REG_UNUSED
2119 || REG_NOTE_KIND (*pnote) == REG_INC)
2120 *pnote = XEXP (*pnote, 1);
2121 else
2122 pnote = &XEXP (*pnote, 1);
2124 #ifdef AUTO_INC_DEC
2125 add_auto_inc_notes (insn, PATTERN (insn));
2126 #endif
2130 /* Set to 1 while in lra. */
2131 int lra_in_progress;
2133 /* Start of pseudo regnos before the LRA. */
2134 int lra_new_regno_start;
2136 /* Start of reload pseudo regnos before the new spill pass. */
2137 int lra_constraint_new_regno_start;
2139 /* Inheritance pseudo regnos before the new spill pass. */
2140 bitmap_head lra_inheritance_pseudos;
2142 /* Split regnos before the new spill pass. */
2143 bitmap_head lra_split_regs;
2145 /* Reload pseudo regnos before the new assignmnet pass which still can
2146 be spilled after the assinment pass as memory is also accepted in
2147 insns for the reload pseudos. */
2148 bitmap_head lra_optional_reload_pseudos;
2150 /* Pseudo regnos used for subreg reloads before the new assignment
2151 pass. Such pseudos still can be spilled after the assinment
2152 pass. */
2153 bitmap_head lra_subreg_reload_pseudos;
2155 /* First UID of insns generated before a new spill pass. */
2156 int lra_constraint_new_insn_uid_start;
2158 /* File used for output of LRA debug information. */
2159 FILE *lra_dump_file;
2161 /* True if we should try spill into registers of different classes
2162 instead of memory. */
2163 bool lra_reg_spill_p;
2165 /* Set up value LRA_REG_SPILL_P. */
2166 static void
2167 setup_reg_spill_flag (void)
2169 int cl, mode;
2171 if (targetm.spill_class != NULL)
2172 for (cl = 0; cl < (int) LIM_REG_CLASSES; cl++)
2173 for (mode = 0; mode < MAX_MACHINE_MODE; mode++)
2174 if (targetm.spill_class ((enum reg_class) cl,
2175 (machine_mode) mode) != NO_REGS)
2177 lra_reg_spill_p = true;
2178 return;
2180 lra_reg_spill_p = false;
2183 /* True if the current function is too big to use regular algorithms
2184 in LRA. In other words, we should use simpler and faster algorithms
2185 in LRA. It also means we should not worry about generation code
2186 for caller saves. The value is set up in IRA. */
2187 bool lra_simple_p;
2189 /* Major LRA entry function. F is a file should be used to dump LRA
2190 debug info. */
2191 void
2192 lra (FILE *f)
2194 int i;
2195 bool live_p, scratch_p, inserted_p;
2197 lra_dump_file = f;
2199 timevar_push (TV_LRA);
2201 /* Make sure that the last insn is a note. Some subsequent passes
2202 need it. */
2203 emit_note (NOTE_INSN_DELETED);
2205 COPY_HARD_REG_SET (lra_no_alloc_regs, ira_no_alloc_regs);
2207 init_reg_info ();
2208 expand_reg_info ();
2210 init_insn_recog_data ();
2212 #ifdef ENABLE_CHECKING
2213 /* Some quick check on RTL generated by previous passes. */
2214 check_rtl (false);
2215 #endif
2217 lra_in_progress = 1;
2219 lra_live_range_iter = lra_coalesce_iter = lra_constraint_iter = 0;
2220 lra_assignment_iter = lra_assignment_iter_after_spill = 0;
2221 lra_inheritance_iter = lra_undo_inheritance_iter = 0;
2223 setup_reg_spill_flag ();
2225 /* Function remove_scratches can creates new pseudos for clobbers --
2226 so set up lra_constraint_new_regno_start before its call to
2227 permit changing reg classes for pseudos created by this
2228 simplification. */
2229 lra_constraint_new_regno_start = lra_new_regno_start = max_reg_num ();
2230 remove_scratches ();
2231 scratch_p = lra_constraint_new_regno_start != max_reg_num ();
2233 /* A function that has a non-local label that can reach the exit
2234 block via non-exceptional paths must save all call-saved
2235 registers. */
2236 if (cfun->has_nonlocal_label && has_nonexceptional_receiver ())
2237 crtl->saves_all_registers = 1;
2239 if (crtl->saves_all_registers)
2240 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2241 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
2242 df_set_regs_ever_live (i, true);
2244 /* We don't DF from now and avoid its using because it is to
2245 expensive when a lot of RTL changes are made. */
2246 df_set_flags (DF_NO_INSN_RESCAN);
2247 lra_constraint_insn_stack.create (get_max_uid ());
2248 lra_constraint_insn_stack_bitmap = sbitmap_alloc (get_max_uid ());
2249 bitmap_clear (lra_constraint_insn_stack_bitmap);
2250 lra_live_ranges_init ();
2251 lra_constraints_init ();
2252 lra_curr_reload_num = 0;
2253 push_insns (get_last_insn (), NULL);
2254 /* It is needed for the 1st coalescing. */
2255 lra_constraint_new_insn_uid_start = get_max_uid ();
2256 bitmap_initialize (&lra_inheritance_pseudos, &reg_obstack);
2257 bitmap_initialize (&lra_split_regs, &reg_obstack);
2258 bitmap_initialize (&lra_optional_reload_pseudos, &reg_obstack);
2259 bitmap_initialize (&lra_subreg_reload_pseudos, &reg_obstack);
2260 live_p = false;
2261 if (get_frame_size () != 0 && crtl->stack_alignment_needed)
2262 /* If we have a stack frame, we must align it now. The stack size
2263 may be a part of the offset computation for register
2264 elimination. */
2265 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
2266 lra_init_equiv ();
2267 for (;;)
2269 for (;;)
2271 /* We should try to assign hard registers to scratches even
2272 if there were no RTL transformations in
2273 lra_constraints. */
2274 if (! lra_constraints (lra_constraint_iter == 0)
2275 && (lra_constraint_iter > 1
2276 || (! scratch_p && ! caller_save_needed)))
2277 break;
2278 /* Constraint transformations may result in that eliminable
2279 hard regs become uneliminable and pseudos which use them
2280 should be spilled. It is better to do it before pseudo
2281 assignments.
2283 For example, rs6000 can make
2284 RS6000_PIC_OFFSET_TABLE_REGNUM uneliminable if we started
2285 to use a constant pool. */
2286 lra_eliminate (false, false);
2287 /* Do inheritance only for regular algorithms. */
2288 if (! lra_simple_p)
2290 if (flag_use_caller_save)
2292 if (live_p)
2293 lra_clear_live_ranges ();
2294 /* As a side-effect of lra_create_live_ranges, we calculate
2295 actual_call_used_reg_set, which is needed during
2296 lra_inheritance. */
2297 lra_create_live_ranges (true);
2299 lra_inheritance ();
2301 if (live_p)
2302 lra_clear_live_ranges ();
2303 /* We need live ranges for lra_assign -- so build them. */
2304 lra_create_live_ranges (true);
2305 live_p = true;
2306 /* If we don't spill non-reload and non-inheritance pseudos,
2307 there is no sense to run memory-memory move coalescing.
2308 If inheritance pseudos were spilled, the memory-memory
2309 moves involving them will be removed by pass undoing
2310 inheritance. */
2311 if (lra_simple_p)
2312 lra_assign ();
2313 else
2315 bool spill_p = !lra_assign ();
2317 if (lra_undo_inheritance ())
2318 live_p = false;
2319 if (spill_p)
2321 if (! live_p)
2323 lra_create_live_ranges (true);
2324 live_p = true;
2326 if (lra_coalesce ())
2327 live_p = false;
2329 if (! live_p)
2330 lra_clear_live_ranges ();
2333 /* Don't clear optional reloads bitmap until all constraints are
2334 satisfied as we need to differ them from regular reloads. */
2335 bitmap_clear (&lra_optional_reload_pseudos);
2336 bitmap_clear (&lra_subreg_reload_pseudos);
2337 bitmap_clear (&lra_inheritance_pseudos);
2338 bitmap_clear (&lra_split_regs);
2339 if (! lra_need_for_spills_p ())
2340 break;
2341 if (! live_p)
2343 /* We need full live info for spilling pseudos into
2344 registers instead of memory. */
2345 lra_create_live_ranges (lra_reg_spill_p);
2346 live_p = true;
2348 lra_spill ();
2349 /* Assignment of stack slots changes elimination offsets for
2350 some eliminations. So update the offsets here. */
2351 lra_eliminate (false, false);
2352 lra_constraint_new_regno_start = max_reg_num ();
2353 lra_constraint_new_insn_uid_start = get_max_uid ();
2354 lra_assignment_iter_after_spill = 0;
2356 restore_scratches ();
2357 lra_eliminate (true, false);
2358 lra_final_code_change ();
2359 lra_in_progress = 0;
2360 if (live_p)
2361 lra_clear_live_ranges ();
2362 lra_live_ranges_finish ();
2363 lra_constraints_finish ();
2364 finish_reg_info ();
2365 sbitmap_free (lra_constraint_insn_stack_bitmap);
2366 lra_constraint_insn_stack.release ();
2367 finish_insn_recog_data ();
2368 regstat_free_n_sets_and_refs ();
2369 regstat_free_ri ();
2370 reload_completed = 1;
2371 update_inc_notes ();
2373 inserted_p = fixup_abnormal_edges ();
2375 /* We've possibly turned single trapping insn into multiple ones. */
2376 if (cfun->can_throw_non_call_exceptions)
2378 sbitmap blocks;
2379 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2380 bitmap_ones (blocks);
2381 find_many_sub_basic_blocks (blocks);
2382 sbitmap_free (blocks);
2385 if (inserted_p)
2386 commit_edge_insertions ();
2388 /* Replacing pseudos with their memory equivalents might have
2389 created shared rtx. Subsequent passes would get confused
2390 by this, so unshare everything here. */
2391 unshare_all_rtl_again (get_insns ());
2393 #ifdef ENABLE_CHECKING
2394 check_rtl (true);
2395 #endif
2397 timevar_pop (TV_LRA);
2400 /* Called once per compiler to initialize LRA data once. */
2401 void
2402 lra_init_once (void)
2404 init_insn_code_data_once ();
2407 /* Called once per compiler to finish LRA data which are initialize
2408 once. */
2409 void
2410 lra_finish_once (void)
2412 finish_insn_code_data_once ();