PR c/61852
[official-gcc.git] / gcc / lra-constraints.c
bloba43f8dc420886f141ef16309415de0ac3eac4376
1 /* Code for RTL transformations to satisfy insn constraints.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 /* This file contains code for 3 passes: constraint pass,
23 inheritance/split pass, and pass for undoing failed inheritance and
24 split.
26 The major goal of constraint pass is to transform RTL to satisfy
27 insn and address constraints by:
28 o choosing insn alternatives;
29 o generating *reload insns* (or reloads in brief) and *reload
30 pseudos* which will get necessary hard registers later;
31 o substituting pseudos with equivalent values and removing the
32 instructions that initialized those pseudos.
34 The constraint pass has biggest and most complicated code in LRA.
35 There are a lot of important details like:
36 o reuse of input reload pseudos to simplify reload pseudo
37 allocations;
38 o some heuristics to choose insn alternative to improve the
39 inheritance;
40 o early clobbers etc.
42 The pass is mimicking former reload pass in alternative choosing
43 because the reload pass is oriented to current machine description
44 model. It might be changed if the machine description model is
45 changed.
47 There is special code for preventing all LRA and this pass cycling
48 in case of bugs.
50 On the first iteration of the pass we process every instruction and
51 choose an alternative for each one. On subsequent iterations we try
52 to avoid reprocessing instructions if we can be sure that the old
53 choice is still valid.
55 The inheritance/spilt pass is to transform code to achieve
56 ineheritance and live range splitting. It is done on backward
57 traversal of EBBs.
59 The inheritance optimization goal is to reuse values in hard
60 registers. There is analogous optimization in old reload pass. The
61 inheritance is achieved by following transformation:
63 reload_p1 <- p reload_p1 <- p
64 ... new_p <- reload_p1
65 ... => ...
66 reload_p2 <- p reload_p2 <- new_p
68 where p is spilled and not changed between the insns. Reload_p1 is
69 also called *original pseudo* and new_p is called *inheritance
70 pseudo*.
72 The subsequent assignment pass will try to assign the same (or
73 another if it is not possible) hard register to new_p as to
74 reload_p1 or reload_p2.
76 If the assignment pass fails to assign a hard register to new_p,
77 this file will undo the inheritance and restore the original code.
78 This is because implementing the above sequence with a spilled
79 new_p would make the code much worse. The inheritance is done in
80 EBB scope. The above is just a simplified example to get an idea
81 of the inheritance as the inheritance is also done for non-reload
82 insns.
84 Splitting (transformation) is also done in EBB scope on the same
85 pass as the inheritance:
87 r <- ... or ... <- r r <- ... or ... <- r
88 ... s <- r (new insn -- save)
89 ... =>
90 ... r <- s (new insn -- restore)
91 ... <- r ... <- r
93 The *split pseudo* s is assigned to the hard register of the
94 original pseudo or hard register r.
96 Splitting is done:
97 o In EBBs with high register pressure for global pseudos (living
98 in at least 2 BBs) and assigned to hard registers when there
99 are more one reloads needing the hard registers;
100 o for pseudos needing save/restore code around calls.
102 If the split pseudo still has the same hard register as the
103 original pseudo after the subsequent assignment pass or the
104 original pseudo was split, the opposite transformation is done on
105 the same pass for undoing inheritance. */
107 #undef REG_OK_STRICT
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "hard-reg-set.h"
114 #include "rtl.h"
115 #include "tm_p.h"
116 #include "regs.h"
117 #include "insn-config.h"
118 #include "insn-codes.h"
119 #include "recog.h"
120 #include "output.h"
121 #include "addresses.h"
122 #include "target.h"
123 #include "function.h"
124 #include "expr.h"
125 #include "basic-block.h"
126 #include "except.h"
127 #include "optabs.h"
128 #include "df.h"
129 #include "ira.h"
130 #include "rtl-error.h"
131 #include "lra-int.h"
133 /* Value of LRA_CURR_RELOAD_NUM at the beginning of BB of the current
134 insn. Remember that LRA_CURR_RELOAD_NUM is the number of emitted
135 reload insns. */
136 static int bb_reload_num;
138 /* The current insn being processed and corresponding its single set
139 (NULL otherwise), its data (basic block, the insn data, the insn
140 static data, and the mode of each operand). */
141 static rtx curr_insn;
142 static rtx curr_insn_set;
143 static basic_block curr_bb;
144 static lra_insn_recog_data_t curr_id;
145 static struct lra_static_insn_data *curr_static_id;
146 static enum machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
150 /* Start numbers for new registers and insns at the current constraints
151 pass start. */
152 static int new_regno_start;
153 static int new_insn_uid_start;
155 /* If LOC is nonnull, strip any outer subreg from it. */
156 static inline rtx *
157 strip_subreg (rtx *loc)
159 return loc && GET_CODE (*loc) == SUBREG ? &SUBREG_REG (*loc) : loc;
162 /* Return hard regno of REGNO or if it is was not assigned to a hard
163 register, use a hard register from its allocno class. */
164 static int
165 get_try_hard_regno (int regno)
167 int hard_regno;
168 enum reg_class rclass;
170 if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
171 hard_regno = lra_get_regno_hard_regno (regno);
172 if (hard_regno >= 0)
173 return hard_regno;
174 rclass = lra_get_allocno_class (regno);
175 if (rclass == NO_REGS)
176 return -1;
177 return ira_class_hard_regs[rclass][0];
180 /* Return final hard regno (plus offset) which will be after
181 elimination. We do this for matching constraints because the final
182 hard regno could have a different class. */
183 static int
184 get_final_hard_regno (int hard_regno, int offset)
186 if (hard_regno < 0)
187 return hard_regno;
188 hard_regno = lra_get_elimination_hard_regno (hard_regno);
189 return hard_regno + offset;
192 /* Return hard regno of X after removing subreg and making
193 elimination. If X is not a register or subreg of register, return
194 -1. For pseudo use its assignment. */
195 static int
196 get_hard_regno (rtx x)
198 rtx reg;
199 int offset, hard_regno;
201 reg = x;
202 if (GET_CODE (x) == SUBREG)
203 reg = SUBREG_REG (x);
204 if (! REG_P (reg))
205 return -1;
206 if ((hard_regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
207 hard_regno = lra_get_regno_hard_regno (hard_regno);
208 if (hard_regno < 0)
209 return -1;
210 offset = 0;
211 if (GET_CODE (x) == SUBREG)
212 offset += subreg_regno_offset (hard_regno, GET_MODE (reg),
213 SUBREG_BYTE (x), GET_MODE (x));
214 return get_final_hard_regno (hard_regno, offset);
217 /* If REGNO is a hard register or has been allocated a hard register,
218 return the class of that register. If REGNO is a reload pseudo
219 created by the current constraints pass, return its allocno class.
220 Return NO_REGS otherwise. */
221 static enum reg_class
222 get_reg_class (int regno)
224 int hard_regno;
226 if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
227 hard_regno = lra_get_regno_hard_regno (regno);
228 if (hard_regno >= 0)
230 hard_regno = get_final_hard_regno (hard_regno, 0);
231 return REGNO_REG_CLASS (hard_regno);
233 if (regno >= new_regno_start)
234 return lra_get_allocno_class (regno);
235 return NO_REGS;
238 /* Return true if REG satisfies (or will satisfy) reg class constraint
239 CL. Use elimination first if REG is a hard register. If REG is a
240 reload pseudo created by this constraints pass, assume that it will
241 be allocated a hard register from its allocno class, but allow that
242 class to be narrowed to CL if it is currently a superset of CL.
244 If NEW_CLASS is nonnull, set *NEW_CLASS to the new allocno class of
245 REGNO (reg), or NO_REGS if no change in its class was needed. */
246 static bool
247 in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class)
249 enum reg_class rclass, common_class;
250 enum machine_mode reg_mode;
251 int class_size, hard_regno, nregs, i, j;
252 int regno = REGNO (reg);
254 if (new_class != NULL)
255 *new_class = NO_REGS;
256 if (regno < FIRST_PSEUDO_REGISTER)
258 rtx final_reg = reg;
259 rtx *final_loc = &final_reg;
261 lra_eliminate_reg_if_possible (final_loc);
262 return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
264 reg_mode = GET_MODE (reg);
265 rclass = get_reg_class (regno);
266 if (regno < new_regno_start
267 /* Do not allow the constraints for reload instructions to
268 influence the classes of new pseudos. These reloads are
269 typically moves that have many alternatives, and restricting
270 reload pseudos for one alternative may lead to situations
271 where other reload pseudos are no longer allocatable. */
272 || (INSN_UID (curr_insn) >= new_insn_uid_start
273 && curr_insn_set != NULL
274 && ((OBJECT_P (SET_SRC (curr_insn_set))
275 && ! CONSTANT_P (SET_SRC (curr_insn_set)))
276 || (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
277 && OBJECT_P (SUBREG_REG (SET_SRC (curr_insn_set)))
278 && ! CONSTANT_P (SUBREG_REG (SET_SRC (curr_insn_set)))))))
279 /* When we don't know what class will be used finally for reload
280 pseudos, we use ALL_REGS. */
281 return ((regno >= new_regno_start && rclass == ALL_REGS)
282 || (rclass != NO_REGS && ira_class_subset_p[rclass][cl]
283 && ! hard_reg_set_subset_p (reg_class_contents[cl],
284 lra_no_alloc_regs)));
285 else
287 common_class = ira_reg_class_subset[rclass][cl];
288 if (new_class != NULL)
289 *new_class = common_class;
290 if (hard_reg_set_subset_p (reg_class_contents[common_class],
291 lra_no_alloc_regs))
292 return false;
293 /* Check that there are enough allocatable regs. */
294 class_size = ira_class_hard_regs_num[common_class];
295 for (i = 0; i < class_size; i++)
297 hard_regno = ira_class_hard_regs[common_class][i];
298 nregs = hard_regno_nregs[hard_regno][reg_mode];
299 if (nregs == 1)
300 return true;
301 for (j = 0; j < nregs; j++)
302 if (TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno + j)
303 || ! TEST_HARD_REG_BIT (reg_class_contents[common_class],
304 hard_regno + j))
305 break;
306 if (j >= nregs)
307 return true;
309 return false;
313 /* Return true if REGNO satisfies a memory constraint. */
314 static bool
315 in_mem_p (int regno)
317 return get_reg_class (regno) == NO_REGS;
320 /* Return 1 if ADDR is a valid memory address for mode MODE in address
321 space AS, and check that each pseudo has the proper kind of hard
322 reg. */
323 static int
324 valid_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
325 rtx addr, addr_space_t as)
327 #ifdef GO_IF_LEGITIMATE_ADDRESS
328 lra_assert (ADDR_SPACE_GENERIC_P (as));
329 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
330 return 0;
332 win:
333 return 1;
334 #else
335 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
336 #endif
339 namespace {
340 /* Temporarily eliminates registers in an address (for the lifetime of
341 the object). */
342 class address_eliminator {
343 public:
344 address_eliminator (struct address_info *ad);
345 ~address_eliminator ();
347 private:
348 struct address_info *m_ad;
349 rtx *m_base_loc;
350 rtx m_base_reg;
351 rtx *m_index_loc;
352 rtx m_index_reg;
356 address_eliminator::address_eliminator (struct address_info *ad)
357 : m_ad (ad),
358 m_base_loc (strip_subreg (ad->base_term)),
359 m_base_reg (NULL_RTX),
360 m_index_loc (strip_subreg (ad->index_term)),
361 m_index_reg (NULL_RTX)
363 if (m_base_loc != NULL)
365 m_base_reg = *m_base_loc;
366 lra_eliminate_reg_if_possible (m_base_loc);
367 if (m_ad->base_term2 != NULL)
368 *m_ad->base_term2 = *m_ad->base_term;
370 if (m_index_loc != NULL)
372 m_index_reg = *m_index_loc;
373 lra_eliminate_reg_if_possible (m_index_loc);
377 address_eliminator::~address_eliminator ()
379 if (m_base_loc && *m_base_loc != m_base_reg)
381 *m_base_loc = m_base_reg;
382 if (m_ad->base_term2 != NULL)
383 *m_ad->base_term2 = *m_ad->base_term;
385 if (m_index_loc && *m_index_loc != m_index_reg)
386 *m_index_loc = m_index_reg;
389 /* Return true if the eliminated form of AD is a legitimate target address. */
390 static bool
391 valid_address_p (struct address_info *ad)
393 address_eliminator eliminator (ad);
394 return valid_address_p (ad->mode, *ad->outer, ad->as);
397 /* Return true if the eliminated form of memory reference OP satisfies
398 extra memory constraint CONSTRAINT. */
399 static bool
400 satisfies_memory_constraint_p (rtx op, enum constraint_num constraint)
402 struct address_info ad;
404 decompose_mem_address (&ad, op);
405 address_eliminator eliminator (&ad);
406 return constraint_satisfied_p (op, constraint);
409 /* Return true if the eliminated form of address AD satisfies extra
410 address constraint CONSTRAINT. */
411 static bool
412 satisfies_address_constraint_p (struct address_info *ad,
413 enum constraint_num constraint)
415 address_eliminator eliminator (ad);
416 return constraint_satisfied_p (*ad->outer, constraint);
419 /* Return true if the eliminated form of address OP satisfies extra
420 address constraint CONSTRAINT. */
421 static bool
422 satisfies_address_constraint_p (rtx op, enum constraint_num constraint)
424 struct address_info ad;
426 decompose_lea_address (&ad, &op);
427 return satisfies_address_constraint_p (&ad, constraint);
430 /* Initiate equivalences for LRA. As we keep original equivalences
431 before any elimination, we need to make copies otherwise any change
432 in insns might change the equivalences. */
433 void
434 lra_init_equiv (void)
436 ira_expand_reg_equiv ();
437 for (int i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
439 rtx res;
441 if ((res = ira_reg_equiv[i].memory) != NULL_RTX)
442 ira_reg_equiv[i].memory = copy_rtx (res);
443 if ((res = ira_reg_equiv[i].invariant) != NULL_RTX)
444 ira_reg_equiv[i].invariant = copy_rtx (res);
448 static rtx loc_equivalence_callback (rtx, const_rtx, void *);
450 /* Update equivalence for REGNO. We need to this as the equivalence
451 might contain other pseudos which are changed by their
452 equivalences. */
453 static void
454 update_equiv (int regno)
456 rtx x;
458 if ((x = ira_reg_equiv[regno].memory) != NULL_RTX)
459 ira_reg_equiv[regno].memory
460 = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
461 NULL_RTX);
462 if ((x = ira_reg_equiv[regno].invariant) != NULL_RTX)
463 ira_reg_equiv[regno].invariant
464 = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
465 NULL_RTX);
468 /* If we have decided to substitute X with another value, return that
469 value, otherwise return X. */
470 static rtx
471 get_equiv (rtx x)
473 int regno;
474 rtx res;
476 if (! REG_P (x) || (regno = REGNO (x)) < FIRST_PSEUDO_REGISTER
477 || ! ira_reg_equiv[regno].defined_p
478 || ! ira_reg_equiv[regno].profitable_p
479 || lra_get_regno_hard_regno (regno) >= 0)
480 return x;
481 if ((res = ira_reg_equiv[regno].memory) != NULL_RTX)
482 return res;
483 if ((res = ira_reg_equiv[regno].constant) != NULL_RTX)
484 return res;
485 if ((res = ira_reg_equiv[regno].invariant) != NULL_RTX)
486 return res;
487 gcc_unreachable ();
490 /* If we have decided to substitute X with the equivalent value,
491 return that value after elimination for INSN, otherwise return
492 X. */
493 static rtx
494 get_equiv_with_elimination (rtx x, rtx insn)
496 rtx res = get_equiv (x);
498 if (x == res || CONSTANT_P (res))
499 return res;
500 return lra_eliminate_regs_1 (insn, res, GET_MODE (res), false, false, true);
503 /* Set up curr_operand_mode. */
504 static void
505 init_curr_operand_mode (void)
507 int nop = curr_static_id->n_operands;
508 for (int i = 0; i < nop; i++)
510 enum machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
511 if (mode == VOIDmode)
513 /* The .md mode for address operands is the mode of the
514 addressed value rather than the mode of the address itself. */
515 if (curr_id->icode >= 0 && curr_static_id->operand[i].is_address)
516 mode = Pmode;
517 else
518 mode = curr_static_id->operand[i].mode;
520 curr_operand_mode[i] = mode;
526 /* The page contains code to reuse input reloads. */
528 /* Structure describes input reload of the current insns. */
529 struct input_reload
531 /* Reloaded value. */
532 rtx input;
533 /* Reload pseudo used. */
534 rtx reg;
537 /* The number of elements in the following array. */
538 static int curr_insn_input_reloads_num;
539 /* Array containing info about input reloads. It is used to find the
540 same input reload and reuse the reload pseudo in this case. */
541 static struct input_reload curr_insn_input_reloads[LRA_MAX_INSN_RELOADS];
543 /* Initiate data concerning reuse of input reloads for the current
544 insn. */
545 static void
546 init_curr_insn_input_reloads (void)
548 curr_insn_input_reloads_num = 0;
551 /* Create a new pseudo using MODE, RCLASS, ORIGINAL or reuse already
552 created input reload pseudo (only if TYPE is not OP_OUT). Don't
553 reuse pseudo if IN_SUBREG_P is true and the reused pseudo should be
554 wrapped up in SUBREG. The result pseudo is returned through
555 RESULT_REG. Return TRUE if we created a new pseudo, FALSE if we
556 reused the already created input reload pseudo. Use TITLE to
557 describe new registers for debug purposes. */
558 static bool
559 get_reload_reg (enum op_type type, enum machine_mode mode, rtx original,
560 enum reg_class rclass, bool in_subreg_p,
561 const char *title, rtx *result_reg)
563 int i, regno;
564 enum reg_class new_class;
566 if (type == OP_OUT)
568 *result_reg
569 = lra_create_new_reg_with_unique_value (mode, original, rclass, title);
570 return true;
572 /* Prevent reuse value of expression with side effects,
573 e.g. volatile memory. */
574 if (! side_effects_p (original))
575 for (i = 0; i < curr_insn_input_reloads_num; i++)
576 if (rtx_equal_p (curr_insn_input_reloads[i].input, original)
577 && in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
579 rtx reg = curr_insn_input_reloads[i].reg;
580 regno = REGNO (reg);
581 /* If input is equal to original and both are VOIDmode,
582 GET_MODE (reg) might be still different from mode.
583 Ensure we don't return *result_reg with wrong mode. */
584 if (GET_MODE (reg) != mode)
586 if (in_subreg_p)
587 continue;
588 if (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (mode))
589 continue;
590 reg = lowpart_subreg (mode, reg, GET_MODE (reg));
591 if (reg == NULL_RTX || GET_CODE (reg) != SUBREG)
592 continue;
594 *result_reg = reg;
595 if (lra_dump_file != NULL)
597 fprintf (lra_dump_file, " Reuse r%d for reload ", regno);
598 dump_value_slim (lra_dump_file, original, 1);
600 if (new_class != lra_get_allocno_class (regno))
601 lra_change_class (regno, new_class, ", change to", false);
602 if (lra_dump_file != NULL)
603 fprintf (lra_dump_file, "\n");
604 return false;
606 *result_reg = lra_create_new_reg (mode, original, rclass, title);
607 lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
608 curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
609 curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
610 return true;
615 /* The page contains code to extract memory address parts. */
617 /* Wrapper around REGNO_OK_FOR_INDEX_P, to allow pseudos. */
618 static inline bool
619 ok_for_index_p_nonstrict (rtx reg)
621 unsigned regno = REGNO (reg);
623 return regno >= FIRST_PSEUDO_REGISTER || REGNO_OK_FOR_INDEX_P (regno);
626 /* A version of regno_ok_for_base_p for use here, when all pseudos
627 should count as OK. Arguments as for regno_ok_for_base_p. */
628 static inline bool
629 ok_for_base_p_nonstrict (rtx reg, enum machine_mode mode, addr_space_t as,
630 enum rtx_code outer_code, enum rtx_code index_code)
632 unsigned regno = REGNO (reg);
634 if (regno >= FIRST_PSEUDO_REGISTER)
635 return true;
636 return ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
641 /* The page contains major code to choose the current insn alternative
642 and generate reloads for it. */
644 /* Return the offset from REGNO of the least significant register
645 in (reg:MODE REGNO).
647 This function is used to tell whether two registers satisfy
648 a matching constraint. (reg:MODE1 REGNO1) matches (reg:MODE2 REGNO2) if:
650 REGNO1 + lra_constraint_offset (REGNO1, MODE1)
651 == REGNO2 + lra_constraint_offset (REGNO2, MODE2) */
653 lra_constraint_offset (int regno, enum machine_mode mode)
655 lra_assert (regno < FIRST_PSEUDO_REGISTER);
656 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (mode) > UNITS_PER_WORD
657 && SCALAR_INT_MODE_P (mode))
658 return hard_regno_nregs[regno][mode] - 1;
659 return 0;
662 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
663 if they are the same hard reg, and has special hacks for
664 auto-increment and auto-decrement. This is specifically intended for
665 process_alt_operands to use in determining whether two operands
666 match. X is the operand whose number is the lower of the two.
668 It is supposed that X is the output operand and Y is the input
669 operand. Y_HARD_REGNO is the final hard regno of register Y or
670 register in subreg Y as we know it now. Otherwise, it is a
671 negative value. */
672 static bool
673 operands_match_p (rtx x, rtx y, int y_hard_regno)
675 int i;
676 RTX_CODE code = GET_CODE (x);
677 const char *fmt;
679 if (x == y)
680 return true;
681 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
682 && (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
684 int j;
686 i = get_hard_regno (x);
687 if (i < 0)
688 goto slow;
690 if ((j = y_hard_regno) < 0)
691 goto slow;
693 i += lra_constraint_offset (i, GET_MODE (x));
694 j += lra_constraint_offset (j, GET_MODE (y));
696 return i == j;
699 /* If two operands must match, because they are really a single
700 operand of an assembler insn, then two post-increments are invalid
701 because the assembler insn would increment only once. On the
702 other hand, a post-increment matches ordinary indexing if the
703 post-increment is the output operand. */
704 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
705 return operands_match_p (XEXP (x, 0), y, y_hard_regno);
707 /* Two pre-increments are invalid because the assembler insn would
708 increment only once. On the other hand, a pre-increment matches
709 ordinary indexing if the pre-increment is the input operand. */
710 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
711 || GET_CODE (y) == PRE_MODIFY)
712 return operands_match_p (x, XEXP (y, 0), -1);
714 slow:
716 if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
717 && x == SUBREG_REG (y))
718 return true;
719 if (GET_CODE (y) == REG && code == SUBREG && REG_P (SUBREG_REG (x))
720 && SUBREG_REG (x) == y)
721 return true;
723 /* Now we have disposed of all the cases in which different rtx
724 codes can match. */
725 if (code != GET_CODE (y))
726 return false;
728 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
729 if (GET_MODE (x) != GET_MODE (y))
730 return false;
732 switch (code)
734 CASE_CONST_UNIQUE:
735 return false;
737 case LABEL_REF:
738 return XEXP (x, 0) == XEXP (y, 0);
739 case SYMBOL_REF:
740 return XSTR (x, 0) == XSTR (y, 0);
742 default:
743 break;
746 /* Compare the elements. If any pair of corresponding elements fail
747 to match, return false for the whole things. */
749 fmt = GET_RTX_FORMAT (code);
750 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
752 int val, j;
753 switch (fmt[i])
755 case 'w':
756 if (XWINT (x, i) != XWINT (y, i))
757 return false;
758 break;
760 case 'i':
761 if (XINT (x, i) != XINT (y, i))
762 return false;
763 break;
765 case 'e':
766 val = operands_match_p (XEXP (x, i), XEXP (y, i), -1);
767 if (val == 0)
768 return false;
769 break;
771 case '0':
772 break;
774 case 'E':
775 if (XVECLEN (x, i) != XVECLEN (y, i))
776 return false;
777 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
779 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j), -1);
780 if (val == 0)
781 return false;
783 break;
785 /* It is believed that rtx's at this level will never
786 contain anything but integers and other rtx's, except for
787 within LABEL_REFs and SYMBOL_REFs. */
788 default:
789 gcc_unreachable ();
792 return true;
795 /* True if X is a constant that can be forced into the constant pool.
796 MODE is the mode of the operand, or VOIDmode if not known. */
797 #define CONST_POOL_OK_P(MODE, X) \
798 ((MODE) != VOIDmode \
799 && CONSTANT_P (X) \
800 && GET_CODE (X) != HIGH \
801 && !targetm.cannot_force_const_mem (MODE, X))
803 /* True if C is a non-empty register class that has too few registers
804 to be safely used as a reload target class. */
805 #define SMALL_REGISTER_CLASS_P(C) \
806 (ira_class_hard_regs_num [(C)] == 1 \
807 || (ira_class_hard_regs_num [(C)] >= 1 \
808 && targetm.class_likely_spilled_p (C)))
810 /* If REG is a reload pseudo, try to make its class satisfying CL. */
811 static void
812 narrow_reload_pseudo_class (rtx reg, enum reg_class cl)
814 enum reg_class rclass;
816 /* Do not make more accurate class from reloads generated. They are
817 mostly moves with a lot of constraints. Making more accurate
818 class may results in very narrow class and impossibility of find
819 registers for several reloads of one insn. */
820 if (INSN_UID (curr_insn) >= new_insn_uid_start)
821 return;
822 if (GET_CODE (reg) == SUBREG)
823 reg = SUBREG_REG (reg);
824 if (! REG_P (reg) || (int) REGNO (reg) < new_regno_start)
825 return;
826 if (in_class_p (reg, cl, &rclass) && rclass != cl)
827 lra_change_class (REGNO (reg), rclass, " Change to", true);
830 /* Generate reloads for matching OUT and INS (array of input operand
831 numbers with end marker -1) with reg class GOAL_CLASS. Add input
832 and output reloads correspondingly to the lists *BEFORE and *AFTER.
833 OUT might be negative. In this case we generate input reloads for
834 matched input operands INS. */
835 static void
836 match_reload (signed char out, signed char *ins, enum reg_class goal_class,
837 rtx *before, rtx *after)
839 int i, in;
840 rtx new_in_reg, new_out_reg, reg, clobber;
841 enum machine_mode inmode, outmode;
842 rtx in_rtx = *curr_id->operand_loc[ins[0]];
843 rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
845 inmode = curr_operand_mode[ins[0]];
846 outmode = out < 0 ? inmode : curr_operand_mode[out];
847 push_to_sequence (*before);
848 if (inmode != outmode)
850 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
852 reg = new_in_reg
853 = lra_create_new_reg_with_unique_value (inmode, in_rtx,
854 goal_class, "");
855 if (SCALAR_INT_MODE_P (inmode))
856 new_out_reg = gen_lowpart_SUBREG (outmode, reg);
857 else
858 new_out_reg = gen_rtx_SUBREG (outmode, reg, 0);
859 LRA_SUBREG_P (new_out_reg) = 1;
860 /* If the input reg is dying here, we can use the same hard
861 register for REG and IN_RTX. We do it only for original
862 pseudos as reload pseudos can die although original
863 pseudos still live where reload pseudos dies. */
864 if (REG_P (in_rtx) && (int) REGNO (in_rtx) < lra_new_regno_start
865 && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx)))
866 lra_assign_reg_val (REGNO (in_rtx), REGNO (reg));
868 else
870 reg = new_out_reg
871 = lra_create_new_reg_with_unique_value (outmode, out_rtx,
872 goal_class, "");
873 if (SCALAR_INT_MODE_P (outmode))
874 new_in_reg = gen_lowpart_SUBREG (inmode, reg);
875 else
876 new_in_reg = gen_rtx_SUBREG (inmode, reg, 0);
877 /* NEW_IN_REG is non-paradoxical subreg. We don't want
878 NEW_OUT_REG living above. We add clobber clause for
879 this. This is just a temporary clobber. We can remove
880 it at the end of LRA work. */
881 clobber = emit_clobber (new_out_reg);
882 LRA_TEMP_CLOBBER_P (PATTERN (clobber)) = 1;
883 LRA_SUBREG_P (new_in_reg) = 1;
884 if (GET_CODE (in_rtx) == SUBREG)
886 rtx subreg_reg = SUBREG_REG (in_rtx);
888 /* If SUBREG_REG is dying here and sub-registers IN_RTX
889 and NEW_IN_REG are similar, we can use the same hard
890 register for REG and SUBREG_REG. */
891 if (REG_P (subreg_reg)
892 && (int) REGNO (subreg_reg) < lra_new_regno_start
893 && GET_MODE (subreg_reg) == outmode
894 && SUBREG_BYTE (in_rtx) == SUBREG_BYTE (new_in_reg)
895 && find_regno_note (curr_insn, REG_DEAD, REGNO (subreg_reg)))
896 lra_assign_reg_val (REGNO (subreg_reg), REGNO (reg));
900 else
902 /* Pseudos have values -- see comments for lra_reg_info.
903 Different pseudos with the same value do not conflict even if
904 they live in the same place. When we create a pseudo we
905 assign value of original pseudo (if any) from which we
906 created the new pseudo. If we create the pseudo from the
907 input pseudo, the new pseudo will no conflict with the input
908 pseudo which is wrong when the input pseudo lives after the
909 insn and as the new pseudo value is changed by the insn
910 output. Therefore we create the new pseudo from the output.
912 We cannot reuse the current output register because we might
913 have a situation like "a <- a op b", where the constraints
914 force the second input operand ("b") to match the output
915 operand ("a"). "b" must then be copied into a new register
916 so that it doesn't clobber the current value of "a". */
918 new_in_reg = new_out_reg
919 = lra_create_new_reg_with_unique_value (outmode, out_rtx,
920 goal_class, "");
922 /* In operand can be got from transformations before processing insn
923 constraints. One example of such transformations is subreg
924 reloading (see function simplify_operand_subreg). The new
925 pseudos created by the transformations might have inaccurate
926 class (ALL_REGS) and we should make their classes more
927 accurate. */
928 narrow_reload_pseudo_class (in_rtx, goal_class);
929 lra_emit_move (copy_rtx (new_in_reg), in_rtx);
930 *before = get_insns ();
931 end_sequence ();
932 for (i = 0; (in = ins[i]) >= 0; i++)
934 lra_assert
935 (GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
936 || GET_MODE (new_in_reg) == GET_MODE (*curr_id->operand_loc[in]));
937 *curr_id->operand_loc[in] = new_in_reg;
939 lra_update_dups (curr_id, ins);
940 if (out < 0)
941 return;
942 /* See a comment for the input operand above. */
943 narrow_reload_pseudo_class (out_rtx, goal_class);
944 if (find_reg_note (curr_insn, REG_UNUSED, out_rtx) == NULL_RTX)
946 start_sequence ();
947 lra_emit_move (out_rtx, copy_rtx (new_out_reg));
948 emit_insn (*after);
949 *after = get_insns ();
950 end_sequence ();
952 *curr_id->operand_loc[out] = new_out_reg;
953 lra_update_dup (curr_id, out);
956 /* Return register class which is union of all reg classes in insn
957 constraint alternative string starting with P. */
958 static enum reg_class
959 reg_class_from_constraints (const char *p)
961 int c, len;
962 enum reg_class op_class = NO_REGS;
965 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
967 case '#':
968 case ',':
969 return op_class;
971 case 'g':
972 op_class = reg_class_subunion[op_class][GENERAL_REGS];
973 break;
975 default:
976 enum constraint_num cn = lookup_constraint (p);
977 enum reg_class cl = reg_class_for_constraint (cn);
978 if (cl == NO_REGS)
980 if (insn_extra_address_constraint (cn))
981 op_class
982 = (reg_class_subunion
983 [op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
984 ADDRESS, SCRATCH)]);
985 break;
988 op_class = reg_class_subunion[op_class][cl];
989 break;
991 while ((p += len), c);
992 return op_class;
995 /* If OP is a register, return the class of the register as per
996 get_reg_class, otherwise return NO_REGS. */
997 static inline enum reg_class
998 get_op_class (rtx op)
1000 return REG_P (op) ? get_reg_class (REGNO (op)) : NO_REGS;
1003 /* Return generated insn mem_pseudo:=val if TO_P or val:=mem_pseudo
1004 otherwise. If modes of MEM_PSEUDO and VAL are different, use
1005 SUBREG for VAL to make them equal. */
1006 static rtx
1007 emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
1009 if (GET_MODE (mem_pseudo) != GET_MODE (val))
1011 /* Usually size of mem_pseudo is greater than val size but in
1012 rare cases it can be less as it can be defined by target
1013 dependent macro HARD_REGNO_CALLER_SAVE_MODE. */
1014 if (! MEM_P (val))
1016 val = gen_rtx_SUBREG (GET_MODE (mem_pseudo),
1017 GET_CODE (val) == SUBREG ? SUBREG_REG (val) : val,
1019 LRA_SUBREG_P (val) = 1;
1021 else
1023 mem_pseudo = gen_lowpart_SUBREG (GET_MODE (val), mem_pseudo);
1024 LRA_SUBREG_P (mem_pseudo) = 1;
1027 return (to_p
1028 ? gen_move_insn (mem_pseudo, val)
1029 : gen_move_insn (val, mem_pseudo));
1032 /* Process a special case insn (register move), return true if we
1033 don't need to process it anymore. INSN should be a single set
1034 insn. Set up that RTL was changed through CHANGE_P and macro
1035 SECONDARY_MEMORY_NEEDED says to use secondary memory through
1036 SEC_MEM_P. */
1037 static bool
1038 check_and_process_move (bool *change_p, bool *sec_mem_p ATTRIBUTE_UNUSED)
1040 int sregno, dregno;
1041 rtx dest, src, dreg, sreg, old_sreg, new_reg, before, scratch_reg;
1042 enum reg_class dclass, sclass, secondary_class;
1043 enum machine_mode sreg_mode;
1044 secondary_reload_info sri;
1046 lra_assert (curr_insn_set != NULL_RTX);
1047 dreg = dest = SET_DEST (curr_insn_set);
1048 sreg = src = SET_SRC (curr_insn_set);
1049 if (GET_CODE (dest) == SUBREG)
1050 dreg = SUBREG_REG (dest);
1051 if (GET_CODE (src) == SUBREG)
1052 sreg = SUBREG_REG (src);
1053 if (! (REG_P (dreg) || MEM_P (dreg)) || ! (REG_P (sreg) || MEM_P (sreg)))
1054 return false;
1055 sclass = dclass = NO_REGS;
1056 if (REG_P (dreg))
1057 dclass = get_reg_class (REGNO (dreg));
1058 if (dclass == ALL_REGS)
1059 /* ALL_REGS is used for new pseudos created by transformations
1060 like reload of SUBREG_REG (see function
1061 simplify_operand_subreg). We don't know their class yet. We
1062 should figure out the class from processing the insn
1063 constraints not in this fast path function. Even if ALL_REGS
1064 were a right class for the pseudo, secondary_... hooks usually
1065 are not define for ALL_REGS. */
1066 return false;
1067 sreg_mode = GET_MODE (sreg);
1068 old_sreg = sreg;
1069 if (REG_P (sreg))
1070 sclass = get_reg_class (REGNO (sreg));
1071 if (sclass == ALL_REGS)
1072 /* See comments above. */
1073 return false;
1074 if (sclass == NO_REGS && dclass == NO_REGS)
1075 return false;
1076 #ifdef SECONDARY_MEMORY_NEEDED
1077 if (SECONDARY_MEMORY_NEEDED (sclass, dclass, GET_MODE (src))
1078 #ifdef SECONDARY_MEMORY_NEEDED_MODE
1079 && ((sclass != NO_REGS && dclass != NO_REGS)
1080 || GET_MODE (src) != SECONDARY_MEMORY_NEEDED_MODE (GET_MODE (src)))
1081 #endif
1084 *sec_mem_p = true;
1085 return false;
1087 #endif
1088 if (! REG_P (dreg) || ! REG_P (sreg))
1089 return false;
1090 sri.prev_sri = NULL;
1091 sri.icode = CODE_FOR_nothing;
1092 sri.extra_cost = 0;
1093 secondary_class = NO_REGS;
1094 /* Set up hard register for a reload pseudo for hook
1095 secondary_reload because some targets just ignore unassigned
1096 pseudos in the hook. */
1097 if (dclass != NO_REGS && lra_get_regno_hard_regno (REGNO (dreg)) < 0)
1099 dregno = REGNO (dreg);
1100 reg_renumber[dregno] = ira_class_hard_regs[dclass][0];
1102 else
1103 dregno = -1;
1104 if (sclass != NO_REGS && lra_get_regno_hard_regno (REGNO (sreg)) < 0)
1106 sregno = REGNO (sreg);
1107 reg_renumber[sregno] = ira_class_hard_regs[sclass][0];
1109 else
1110 sregno = -1;
1111 if (sclass != NO_REGS)
1112 secondary_class
1113 = (enum reg_class) targetm.secondary_reload (false, dest,
1114 (reg_class_t) sclass,
1115 GET_MODE (src), &sri);
1116 if (sclass == NO_REGS
1117 || ((secondary_class != NO_REGS || sri.icode != CODE_FOR_nothing)
1118 && dclass != NO_REGS))
1120 enum reg_class old_sclass = secondary_class;
1121 secondary_reload_info old_sri = sri;
1123 sri.prev_sri = NULL;
1124 sri.icode = CODE_FOR_nothing;
1125 sri.extra_cost = 0;
1126 secondary_class
1127 = (enum reg_class) targetm.secondary_reload (true, sreg,
1128 (reg_class_t) dclass,
1129 sreg_mode, &sri);
1130 /* Check the target hook consistency. */
1131 lra_assert
1132 ((secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1133 || (old_sclass == NO_REGS && old_sri.icode == CODE_FOR_nothing)
1134 || (secondary_class == old_sclass && sri.icode == old_sri.icode));
1136 if (sregno >= 0)
1137 reg_renumber [sregno] = -1;
1138 if (dregno >= 0)
1139 reg_renumber [dregno] = -1;
1140 if (secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1141 return false;
1142 *change_p = true;
1143 new_reg = NULL_RTX;
1144 if (secondary_class != NO_REGS)
1145 new_reg = lra_create_new_reg_with_unique_value (sreg_mode, NULL_RTX,
1146 secondary_class,
1147 "secondary");
1148 start_sequence ();
1149 if (old_sreg != sreg)
1150 sreg = copy_rtx (sreg);
1151 if (sri.icode == CODE_FOR_nothing)
1152 lra_emit_move (new_reg, sreg);
1153 else
1155 enum reg_class scratch_class;
1157 scratch_class = (reg_class_from_constraints
1158 (insn_data[sri.icode].operand[2].constraint));
1159 scratch_reg = (lra_create_new_reg_with_unique_value
1160 (insn_data[sri.icode].operand[2].mode, NULL_RTX,
1161 scratch_class, "scratch"));
1162 emit_insn (GEN_FCN (sri.icode) (new_reg != NULL_RTX ? new_reg : dest,
1163 sreg, scratch_reg));
1165 before = get_insns ();
1166 end_sequence ();
1167 lra_process_new_insns (curr_insn, before, NULL_RTX, "Inserting the move");
1168 if (new_reg != NULL_RTX)
1170 if (GET_CODE (src) == SUBREG)
1171 SUBREG_REG (src) = new_reg;
1172 else
1173 SET_SRC (curr_insn_set) = new_reg;
1175 else
1177 if (lra_dump_file != NULL)
1179 fprintf (lra_dump_file, "Deleting move %u\n", INSN_UID (curr_insn));
1180 dump_insn_slim (lra_dump_file, curr_insn);
1182 lra_set_insn_deleted (curr_insn);
1183 return true;
1185 return false;
1188 /* The following data describe the result of process_alt_operands.
1189 The data are used in curr_insn_transform to generate reloads. */
1191 /* The chosen reg classes which should be used for the corresponding
1192 operands. */
1193 static enum reg_class goal_alt[MAX_RECOG_OPERANDS];
1194 /* True if the operand should be the same as another operand and that
1195 other operand does not need a reload. */
1196 static bool goal_alt_match_win[MAX_RECOG_OPERANDS];
1197 /* True if the operand does not need a reload. */
1198 static bool goal_alt_win[MAX_RECOG_OPERANDS];
1199 /* True if the operand can be offsetable memory. */
1200 static bool goal_alt_offmemok[MAX_RECOG_OPERANDS];
1201 /* The number of an operand to which given operand can be matched to. */
1202 static int goal_alt_matches[MAX_RECOG_OPERANDS];
1203 /* The number of elements in the following array. */
1204 static int goal_alt_dont_inherit_ops_num;
1205 /* Numbers of operands whose reload pseudos should not be inherited. */
1206 static int goal_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1207 /* True if the insn commutative operands should be swapped. */
1208 static bool goal_alt_swapped;
1209 /* The chosen insn alternative. */
1210 static int goal_alt_number;
1212 /* The following five variables are used to choose the best insn
1213 alternative. They reflect final characteristics of the best
1214 alternative. */
1216 /* Number of necessary reloads and overall cost reflecting the
1217 previous value and other unpleasantness of the best alternative. */
1218 static int best_losers, best_overall;
1219 /* Overall number hard registers used for reloads. For example, on
1220 some targets we need 2 general registers to reload DFmode and only
1221 one floating point register. */
1222 static int best_reload_nregs;
1223 /* Overall number reflecting distances of previous reloading the same
1224 value. The distances are counted from the current BB start. It is
1225 used to improve inheritance chances. */
1226 static int best_reload_sum;
1228 /* True if the current insn should have no correspondingly input or
1229 output reloads. */
1230 static bool no_input_reloads_p, no_output_reloads_p;
1232 /* True if we swapped the commutative operands in the current
1233 insn. */
1234 static int curr_swapped;
1236 /* Arrange for address element *LOC to be a register of class CL.
1237 Add any input reloads to list BEFORE. AFTER is nonnull if *LOC is an
1238 automodified value; handle that case by adding the required output
1239 reloads to list AFTER. Return true if the RTL was changed. */
1240 static bool
1241 process_addr_reg (rtx *loc, rtx *before, rtx *after, enum reg_class cl)
1243 int regno;
1244 enum reg_class rclass, new_class;
1245 rtx reg;
1246 rtx new_reg;
1247 enum machine_mode mode;
1248 bool subreg_p, before_p = false;
1250 subreg_p = GET_CODE (*loc) == SUBREG;
1251 if (subreg_p)
1252 loc = &SUBREG_REG (*loc);
1253 reg = *loc;
1254 mode = GET_MODE (reg);
1255 if (! REG_P (reg))
1257 /* Always reload memory in an address even if the target supports
1258 such addresses. */
1259 new_reg = lra_create_new_reg_with_unique_value (mode, reg, cl, "address");
1260 before_p = true;
1262 else
1264 regno = REGNO (reg);
1265 rclass = get_reg_class (regno);
1266 if ((*loc = get_equiv_with_elimination (reg, curr_insn)) != reg)
1268 if (lra_dump_file != NULL)
1270 fprintf (lra_dump_file,
1271 "Changing pseudo %d in address of insn %u on equiv ",
1272 REGNO (reg), INSN_UID (curr_insn));
1273 dump_value_slim (lra_dump_file, *loc, 1);
1274 fprintf (lra_dump_file, "\n");
1276 *loc = copy_rtx (*loc);
1278 if (*loc != reg || ! in_class_p (reg, cl, &new_class))
1280 reg = *loc;
1281 if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
1282 mode, reg, cl, subreg_p, "address", &new_reg))
1283 before_p = true;
1285 else if (new_class != NO_REGS && rclass != new_class)
1287 lra_change_class (regno, new_class, " Change to", true);
1288 return false;
1290 else
1291 return false;
1293 if (before_p)
1295 push_to_sequence (*before);
1296 lra_emit_move (new_reg, reg);
1297 *before = get_insns ();
1298 end_sequence ();
1300 *loc = new_reg;
1301 if (after != NULL)
1303 start_sequence ();
1304 lra_emit_move (reg, new_reg);
1305 emit_insn (*after);
1306 *after = get_insns ();
1307 end_sequence ();
1309 return true;
1312 /* Insert move insn in simplify_operand_subreg. BEFORE returns
1313 the insn to be inserted before curr insn. AFTER returns the
1314 the insn to be inserted after curr insn. ORIGREG and NEWREG
1315 are the original reg and new reg for reload. */
1316 static void
1317 insert_move_for_subreg (rtx *before, rtx *after, rtx origreg, rtx newreg)
1319 if (before)
1321 push_to_sequence (*before);
1322 lra_emit_move (newreg, origreg);
1323 *before = get_insns ();
1324 end_sequence ();
1326 if (after)
1328 start_sequence ();
1329 lra_emit_move (origreg, newreg);
1330 emit_insn (*after);
1331 *after = get_insns ();
1332 end_sequence ();
1336 static int valid_address_p (enum machine_mode mode, rtx addr, addr_space_t as);
1338 /* Make reloads for subreg in operand NOP with internal subreg mode
1339 REG_MODE, add new reloads for further processing. Return true if
1340 any reload was generated. */
1341 static bool
1342 simplify_operand_subreg (int nop, enum machine_mode reg_mode)
1344 int hard_regno;
1345 rtx before, after;
1346 enum machine_mode mode;
1347 rtx reg, new_reg;
1348 rtx operand = *curr_id->operand_loc[nop];
1349 enum reg_class regclass;
1350 enum op_type type;
1352 before = after = NULL_RTX;
1354 if (GET_CODE (operand) != SUBREG)
1355 return false;
1357 mode = GET_MODE (operand);
1358 reg = SUBREG_REG (operand);
1359 type = curr_static_id->operand[nop].type;
1360 /* If we change address for paradoxical subreg of memory, the
1361 address might violate the necessary alignment or the access might
1362 be slow. So take this into consideration. We should not worry
1363 about access beyond allocated memory for paradoxical memory
1364 subregs as we don't substitute such equiv memory (see processing
1365 equivalences in function lra_constraints) and because for spilled
1366 pseudos we allocate stack memory enough for the biggest
1367 corresponding paradoxical subreg. */
1368 if (MEM_P (reg)
1369 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (reg))
1370 || MEM_ALIGN (reg) >= GET_MODE_ALIGNMENT (mode)))
1372 rtx subst, old = *curr_id->operand_loc[nop];
1374 alter_subreg (curr_id->operand_loc[nop], false);
1375 subst = *curr_id->operand_loc[nop];
1376 lra_assert (MEM_P (subst));
1377 if (! valid_address_p (GET_MODE (reg), XEXP (reg, 0),
1378 MEM_ADDR_SPACE (reg))
1379 || valid_address_p (GET_MODE (subst), XEXP (subst, 0),
1380 MEM_ADDR_SPACE (subst)))
1381 return true;
1382 /* If the address was valid and became invalid, prefer to reload
1383 the memory. Typical case is when the index scale should
1384 correspond the memory. */
1385 *curr_id->operand_loc[nop] = old;
1387 else if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
1389 alter_subreg (curr_id->operand_loc[nop], false);
1390 return true;
1392 /* Put constant into memory when we have mixed modes. It generates
1393 a better code in most cases as it does not need a secondary
1394 reload memory. It also prevents LRA looping when LRA is using
1395 secondary reload memory again and again. */
1396 if (CONSTANT_P (reg) && CONST_POOL_OK_P (reg_mode, reg)
1397 && SCALAR_INT_MODE_P (reg_mode) != SCALAR_INT_MODE_P (mode))
1399 SUBREG_REG (operand) = force_const_mem (reg_mode, reg);
1400 alter_subreg (curr_id->operand_loc[nop], false);
1401 return true;
1403 /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
1404 if there may be a problem accessing OPERAND in the outer
1405 mode. */
1406 if ((REG_P (reg)
1407 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1408 && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1409 /* Don't reload paradoxical subregs because we could be looping
1410 having repeatedly final regno out of hard regs range. */
1411 && (hard_regno_nregs[hard_regno][GET_MODE (reg)]
1412 >= hard_regno_nregs[hard_regno][mode])
1413 && simplify_subreg_regno (hard_regno, GET_MODE (reg),
1414 SUBREG_BYTE (operand), mode) < 0
1415 /* Don't reload subreg for matching reload. It is actually
1416 valid subreg in LRA. */
1417 && ! LRA_SUBREG_P (operand))
1418 || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
1420 enum reg_class rclass;
1422 if (REG_P (reg))
1423 /* There is a big probability that we will get the same class
1424 for the new pseudo and we will get the same insn which
1425 means infinite looping. So spill the new pseudo. */
1426 rclass = NO_REGS;
1427 else
1428 /* The class will be defined later in curr_insn_transform. */
1429 rclass
1430 = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1432 if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
1433 rclass, TRUE, "subreg reg", &new_reg))
1435 bool insert_before, insert_after;
1436 bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1438 insert_before = (type != OP_OUT
1439 || GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode));
1440 insert_after = (type != OP_IN);
1441 insert_move_for_subreg (insert_before ? &before : NULL,
1442 insert_after ? &after : NULL,
1443 reg, new_reg);
1445 SUBREG_REG (operand) = new_reg;
1446 lra_process_new_insns (curr_insn, before, after,
1447 "Inserting subreg reload");
1448 return true;
1450 /* Force a reload for a paradoxical subreg. For paradoxical subreg,
1451 IRA allocates hardreg to the inner pseudo reg according to its mode
1452 instead of the outermode, so the size of the hardreg may not be enough
1453 to contain the outermode operand, in that case we may need to insert
1454 reload for the reg. For the following two types of paradoxical subreg,
1455 we need to insert reload:
1456 1. If the op_type is OP_IN, and the hardreg could not be paired with
1457 other hardreg to contain the outermode operand
1458 (checked by in_hard_reg_set_p), we need to insert the reload.
1459 2. If the op_type is OP_OUT or OP_INOUT.
1461 Here is a paradoxical subreg example showing how the reload is generated:
1463 (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1464 (subreg:TI (reg:DI 107 [ __comp ]) 0)) {*movti_internal_rex64}
1466 In IRA, reg107 is allocated to a DImode hardreg. We use x86-64 as example
1467 here, if reg107 is assigned to hardreg R15, because R15 is the last
1468 hardreg, compiler cannot find another hardreg to pair with R15 to
1469 contain TImode data. So we insert a TImode reload reg180 for it.
1470 After reload is inserted:
1472 (insn 283 0 0 (set (subreg:DI (reg:TI 180 [orig:107 __comp ] [107]) 0)
1473 (reg:DI 107 [ __comp ])) -1
1474 (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1475 (subreg:TI (reg:TI 180 [orig:107 __comp ] [107]) 0)) {*movti_internal_rex64}
1477 Two reload hard registers will be allocated to reg180 to save TImode data
1478 in LRA_assign. */
1479 else if (REG_P (reg)
1480 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1481 && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1482 && (hard_regno_nregs[hard_regno][GET_MODE (reg)]
1483 < hard_regno_nregs[hard_regno][mode])
1484 && (regclass = lra_get_allocno_class (REGNO (reg)))
1485 && (type != OP_IN
1486 || !in_hard_reg_set_p (reg_class_contents[regclass],
1487 mode, hard_regno)))
1489 /* The class will be defined later in curr_insn_transform. */
1490 enum reg_class rclass
1491 = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1493 if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
1494 rclass, TRUE, "paradoxical subreg", &new_reg))
1496 rtx subreg;
1497 bool insert_before, insert_after;
1499 PUT_MODE (new_reg, mode);
1500 subreg = simplify_gen_subreg (GET_MODE (reg), new_reg, mode, 0);
1501 bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1503 insert_before = (type != OP_OUT);
1504 insert_after = (type != OP_IN);
1505 insert_move_for_subreg (insert_before ? &before : NULL,
1506 insert_after ? &after : NULL,
1507 reg, subreg);
1509 SUBREG_REG (operand) = new_reg;
1510 lra_process_new_insns (curr_insn, before, after,
1511 "Inserting paradoxical subreg reload");
1512 return true;
1514 return false;
1517 /* Return TRUE if X refers for a hard register from SET. */
1518 static bool
1519 uses_hard_regs_p (rtx x, HARD_REG_SET set)
1521 int i, j, x_hard_regno;
1522 enum machine_mode mode;
1523 const char *fmt;
1524 enum rtx_code code;
1526 if (x == NULL_RTX)
1527 return false;
1528 code = GET_CODE (x);
1529 mode = GET_MODE (x);
1530 if (code == SUBREG)
1532 x = SUBREG_REG (x);
1533 code = GET_CODE (x);
1534 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (mode))
1535 mode = GET_MODE (x);
1538 if (REG_P (x))
1540 x_hard_regno = get_hard_regno (x);
1541 return (x_hard_regno >= 0
1542 && overlaps_hard_reg_set_p (set, mode, x_hard_regno));
1544 if (MEM_P (x))
1546 struct address_info ad;
1548 decompose_mem_address (&ad, x);
1549 if (ad.base_term != NULL && uses_hard_regs_p (*ad.base_term, set))
1550 return true;
1551 if (ad.index_term != NULL && uses_hard_regs_p (*ad.index_term, set))
1552 return true;
1554 fmt = GET_RTX_FORMAT (code);
1555 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1557 if (fmt[i] == 'e')
1559 if (uses_hard_regs_p (XEXP (x, i), set))
1560 return true;
1562 else if (fmt[i] == 'E')
1564 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1565 if (uses_hard_regs_p (XVECEXP (x, i, j), set))
1566 return true;
1569 return false;
1572 /* Return true if OP is a spilled pseudo. */
1573 static inline bool
1574 spilled_pseudo_p (rtx op)
1576 return (REG_P (op)
1577 && REGNO (op) >= FIRST_PSEUDO_REGISTER && in_mem_p (REGNO (op)));
1580 /* Return true if X is a general constant. */
1581 static inline bool
1582 general_constant_p (rtx x)
1584 return CONSTANT_P (x) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (x));
1587 static bool
1588 reg_in_class_p (rtx reg, enum reg_class cl)
1590 if (cl == NO_REGS)
1591 return get_reg_class (REGNO (reg)) == NO_REGS;
1592 return in_class_p (reg, cl, NULL);
1595 /* Major function to choose the current insn alternative and what
1596 operands should be reloaded and how. If ONLY_ALTERNATIVE is not
1597 negative we should consider only this alternative. Return false if
1598 we can not choose the alternative or find how to reload the
1599 operands. */
1600 static bool
1601 process_alt_operands (int only_alternative)
1603 bool ok_p = false;
1604 int nop, overall, nalt;
1605 int n_alternatives = curr_static_id->n_alternatives;
1606 int n_operands = curr_static_id->n_operands;
1607 /* LOSERS counts the operands that don't fit this alternative and
1608 would require loading. */
1609 int losers;
1610 /* REJECT is a count of how undesirable this alternative says it is
1611 if any reloading is required. If the alternative matches exactly
1612 then REJECT is ignored, but otherwise it gets this much counted
1613 against it in addition to the reloading needed. */
1614 int reject;
1615 /* The number of elements in the following array. */
1616 int early_clobbered_regs_num;
1617 /* Numbers of operands which are early clobber registers. */
1618 int early_clobbered_nops[MAX_RECOG_OPERANDS];
1619 enum reg_class curr_alt[MAX_RECOG_OPERANDS];
1620 HARD_REG_SET curr_alt_set[MAX_RECOG_OPERANDS];
1621 bool curr_alt_match_win[MAX_RECOG_OPERANDS];
1622 bool curr_alt_win[MAX_RECOG_OPERANDS];
1623 bool curr_alt_offmemok[MAX_RECOG_OPERANDS];
1624 int curr_alt_matches[MAX_RECOG_OPERANDS];
1625 /* The number of elements in the following array. */
1626 int curr_alt_dont_inherit_ops_num;
1627 /* Numbers of operands whose reload pseudos should not be inherited. */
1628 int curr_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1629 rtx op;
1630 /* The register when the operand is a subreg of register, otherwise the
1631 operand itself. */
1632 rtx no_subreg_reg_operand[MAX_RECOG_OPERANDS];
1633 /* The register if the operand is a register or subreg of register,
1634 otherwise NULL. */
1635 rtx operand_reg[MAX_RECOG_OPERANDS];
1636 int hard_regno[MAX_RECOG_OPERANDS];
1637 enum machine_mode biggest_mode[MAX_RECOG_OPERANDS];
1638 int reload_nregs, reload_sum;
1639 bool costly_p;
1640 enum reg_class cl;
1642 /* Calculate some data common for all alternatives to speed up the
1643 function. */
1644 for (nop = 0; nop < n_operands; nop++)
1646 rtx reg;
1648 op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
1649 /* The real hard regno of the operand after the allocation. */
1650 hard_regno[nop] = get_hard_regno (op);
1652 operand_reg[nop] = reg = op;
1653 biggest_mode[nop] = GET_MODE (op);
1654 if (GET_CODE (op) == SUBREG)
1656 operand_reg[nop] = reg = SUBREG_REG (op);
1657 if (GET_MODE_SIZE (biggest_mode[nop])
1658 < GET_MODE_SIZE (GET_MODE (reg)))
1659 biggest_mode[nop] = GET_MODE (reg);
1661 if (! REG_P (reg))
1662 operand_reg[nop] = NULL_RTX;
1663 else if (REGNO (reg) >= FIRST_PSEUDO_REGISTER
1664 || ((int) REGNO (reg)
1665 == lra_get_elimination_hard_regno (REGNO (reg))))
1666 no_subreg_reg_operand[nop] = reg;
1667 else
1668 operand_reg[nop] = no_subreg_reg_operand[nop]
1669 /* Just use natural mode for elimination result. It should
1670 be enough for extra constraints hooks. */
1671 = regno_reg_rtx[hard_regno[nop]];
1674 /* The constraints are made of several alternatives. Each operand's
1675 constraint looks like foo,bar,... with commas separating the
1676 alternatives. The first alternatives for all operands go
1677 together, the second alternatives go together, etc.
1679 First loop over alternatives. */
1680 alternative_mask enabled = curr_id->enabled_alternatives;
1681 if (only_alternative >= 0)
1682 enabled &= ALTERNATIVE_BIT (only_alternative);
1684 for (nalt = 0; nalt < n_alternatives; nalt++)
1686 /* Loop over operands for one constraint alternative. */
1687 if (!TEST_BIT (enabled, nalt))
1688 continue;
1690 overall = losers = reject = reload_nregs = reload_sum = 0;
1691 for (nop = 0; nop < n_operands; nop++)
1693 int inc = (curr_static_id
1694 ->operand_alternative[nalt * n_operands + nop].reject);
1695 if (lra_dump_file != NULL && inc != 0)
1696 fprintf (lra_dump_file,
1697 " Staticly defined alt reject+=%d\n", inc);
1698 reject += inc;
1700 early_clobbered_regs_num = 0;
1702 for (nop = 0; nop < n_operands; nop++)
1704 const char *p;
1705 char *end;
1706 int len, c, m, i, opalt_num, this_alternative_matches;
1707 bool win, did_match, offmemok, early_clobber_p;
1708 /* false => this operand can be reloaded somehow for this
1709 alternative. */
1710 bool badop;
1711 /* true => this operand can be reloaded if the alternative
1712 allows regs. */
1713 bool winreg;
1714 /* True if a constant forced into memory would be OK for
1715 this operand. */
1716 bool constmemok;
1717 enum reg_class this_alternative, this_costly_alternative;
1718 HARD_REG_SET this_alternative_set, this_costly_alternative_set;
1719 bool this_alternative_match_win, this_alternative_win;
1720 bool this_alternative_offmemok;
1721 bool scratch_p;
1722 enum machine_mode mode;
1723 enum constraint_num cn;
1725 opalt_num = nalt * n_operands + nop;
1726 if (curr_static_id->operand_alternative[opalt_num].anything_ok)
1728 /* Fast track for no constraints at all. */
1729 curr_alt[nop] = NO_REGS;
1730 CLEAR_HARD_REG_SET (curr_alt_set[nop]);
1731 curr_alt_win[nop] = true;
1732 curr_alt_match_win[nop] = false;
1733 curr_alt_offmemok[nop] = false;
1734 curr_alt_matches[nop] = -1;
1735 continue;
1738 op = no_subreg_reg_operand[nop];
1739 mode = curr_operand_mode[nop];
1741 win = did_match = winreg = offmemok = constmemok = false;
1742 badop = true;
1744 early_clobber_p = false;
1745 p = curr_static_id->operand_alternative[opalt_num].constraint;
1747 this_costly_alternative = this_alternative = NO_REGS;
1748 /* We update set of possible hard regs besides its class
1749 because reg class might be inaccurate. For example,
1750 union of LO_REGS (l), HI_REGS(h), and STACK_REG(k) in ARM
1751 is translated in HI_REGS because classes are merged by
1752 pairs and there is no accurate intermediate class. */
1753 CLEAR_HARD_REG_SET (this_alternative_set);
1754 CLEAR_HARD_REG_SET (this_costly_alternative_set);
1755 this_alternative_win = false;
1756 this_alternative_match_win = false;
1757 this_alternative_offmemok = false;
1758 this_alternative_matches = -1;
1760 /* An empty constraint should be excluded by the fast
1761 track. */
1762 lra_assert (*p != 0 && *p != ',');
1764 /* Scan this alternative's specs for this operand; set WIN
1765 if the operand fits any letter in this alternative.
1766 Otherwise, clear BADOP if this operand could fit some
1767 letter after reloads, or set WINREG if this operand could
1768 fit after reloads provided the constraint allows some
1769 registers. */
1770 costly_p = false;
1773 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
1775 case '\0':
1776 len = 0;
1777 break;
1778 case ',':
1779 c = '\0';
1780 break;
1782 case '&':
1783 early_clobber_p = true;
1784 break;
1786 case '#':
1787 /* Ignore rest of this alternative. */
1788 c = '\0';
1789 break;
1791 case '0': case '1': case '2': case '3': case '4':
1792 case '5': case '6': case '7': case '8': case '9':
1794 int m_hregno;
1795 bool match_p;
1797 m = strtoul (p, &end, 10);
1798 p = end;
1799 len = 0;
1800 lra_assert (nop > m);
1802 this_alternative_matches = m;
1803 m_hregno = get_hard_regno (*curr_id->operand_loc[m]);
1804 /* We are supposed to match a previous operand.
1805 If we do, we win if that one did. If we do
1806 not, count both of the operands as losers.
1807 (This is too conservative, since most of the
1808 time only a single reload insn will be needed
1809 to make the two operands win. As a result,
1810 this alternative may be rejected when it is
1811 actually desirable.) */
1812 match_p = false;
1813 if (operands_match_p (*curr_id->operand_loc[nop],
1814 *curr_id->operand_loc[m], m_hregno))
1816 /* We should reject matching of an early
1817 clobber operand if the matching operand is
1818 not dying in the insn. */
1819 if (! curr_static_id->operand[m].early_clobber
1820 || operand_reg[nop] == NULL_RTX
1821 || (find_regno_note (curr_insn, REG_DEAD,
1822 REGNO (op))
1823 || REGNO (op) == REGNO (operand_reg[m])))
1824 match_p = true;
1826 if (match_p)
1828 /* If we are matching a non-offsettable
1829 address where an offsettable address was
1830 expected, then we must reject this
1831 combination, because we can't reload
1832 it. */
1833 if (curr_alt_offmemok[m]
1834 && MEM_P (*curr_id->operand_loc[m])
1835 && curr_alt[m] == NO_REGS && ! curr_alt_win[m])
1836 continue;
1838 else
1840 /* Operands don't match. Both operands must
1841 allow a reload register, otherwise we
1842 cannot make them match. */
1843 if (curr_alt[m] == NO_REGS)
1844 break;
1845 /* Retroactively mark the operand we had to
1846 match as a loser, if it wasn't already and
1847 it wasn't matched to a register constraint
1848 (e.g it might be matched by memory). */
1849 if (curr_alt_win[m]
1850 && (operand_reg[m] == NULL_RTX
1851 || hard_regno[m] < 0))
1853 losers++;
1854 reload_nregs
1855 += (ira_reg_class_max_nregs[curr_alt[m]]
1856 [GET_MODE (*curr_id->operand_loc[m])]);
1859 /* Prefer matching earlyclobber alternative as
1860 it results in less hard regs required for
1861 the insn than a non-matching earlyclobber
1862 alternative. */
1863 if (curr_static_id->operand[m].early_clobber)
1865 if (lra_dump_file != NULL)
1866 fprintf
1867 (lra_dump_file,
1868 " %d Matching earlyclobber alt:"
1869 " reject--\n",
1870 nop);
1871 reject--;
1873 /* Otherwise we prefer no matching
1874 alternatives because it gives more freedom
1875 in RA. */
1876 else if (operand_reg[nop] == NULL_RTX
1877 || (find_regno_note (curr_insn, REG_DEAD,
1878 REGNO (operand_reg[nop]))
1879 == NULL_RTX))
1881 if (lra_dump_file != NULL)
1882 fprintf
1883 (lra_dump_file,
1884 " %d Matching alt: reject+=2\n",
1885 nop);
1886 reject += 2;
1889 /* If we have to reload this operand and some
1890 previous operand also had to match the same
1891 thing as this operand, we don't know how to do
1892 that. */
1893 if (!match_p || !curr_alt_win[m])
1895 for (i = 0; i < nop; i++)
1896 if (curr_alt_matches[i] == m)
1897 break;
1898 if (i < nop)
1899 break;
1901 else
1902 did_match = true;
1904 /* This can be fixed with reloads if the operand
1905 we are supposed to match can be fixed with
1906 reloads. */
1907 badop = false;
1908 this_alternative = curr_alt[m];
1909 COPY_HARD_REG_SET (this_alternative_set, curr_alt_set[m]);
1910 winreg = this_alternative != NO_REGS;
1911 break;
1914 case 'g':
1915 if (MEM_P (op)
1916 || general_constant_p (op)
1917 || spilled_pseudo_p (op))
1918 win = true;
1919 cl = GENERAL_REGS;
1920 goto reg;
1922 default:
1923 cn = lookup_constraint (p);
1924 switch (get_constraint_type (cn))
1926 case CT_REGISTER:
1927 cl = reg_class_for_constraint (cn);
1928 if (cl != NO_REGS)
1929 goto reg;
1930 break;
1932 case CT_CONST_INT:
1933 if (CONST_INT_P (op)
1934 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1935 win = true;
1936 break;
1938 case CT_MEMORY:
1939 if (MEM_P (op)
1940 && satisfies_memory_constraint_p (op, cn))
1941 win = true;
1942 else if (spilled_pseudo_p (op))
1943 win = true;
1945 /* If we didn't already win, we can reload constants
1946 via force_const_mem or put the pseudo value into
1947 memory, or make other memory by reloading the
1948 address like for 'o'. */
1949 if (CONST_POOL_OK_P (mode, op)
1950 || MEM_P (op) || REG_P (op))
1951 badop = false;
1952 constmemok = true;
1953 offmemok = true;
1954 break;
1956 case CT_ADDRESS:
1957 /* If we didn't already win, we can reload the address
1958 into a base register. */
1959 if (satisfies_address_constraint_p (op, cn))
1960 win = true;
1961 cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1962 ADDRESS, SCRATCH);
1963 badop = false;
1964 goto reg;
1966 case CT_FIXED_FORM:
1967 if (constraint_satisfied_p (op, cn))
1968 win = true;
1969 break;
1971 break;
1973 reg:
1974 this_alternative = reg_class_subunion[this_alternative][cl];
1975 IOR_HARD_REG_SET (this_alternative_set,
1976 reg_class_contents[cl]);
1977 if (costly_p)
1979 this_costly_alternative
1980 = reg_class_subunion[this_costly_alternative][cl];
1981 IOR_HARD_REG_SET (this_costly_alternative_set,
1982 reg_class_contents[cl]);
1984 if (mode == BLKmode)
1985 break;
1986 winreg = true;
1987 if (REG_P (op))
1989 if (hard_regno[nop] >= 0
1990 && in_hard_reg_set_p (this_alternative_set,
1991 mode, hard_regno[nop]))
1992 win = true;
1993 else if (hard_regno[nop] < 0
1994 && in_class_p (op, this_alternative, NULL))
1995 win = true;
1997 break;
1999 if (c != ' ' && c != '\t')
2000 costly_p = c == '*';
2002 while ((p += len), c);
2004 scratch_p = (operand_reg[nop] != NULL_RTX
2005 && lra_former_scratch_p (REGNO (operand_reg[nop])));
2006 /* Record which operands fit this alternative. */
2007 if (win)
2009 this_alternative_win = true;
2010 if (operand_reg[nop] != NULL_RTX)
2012 if (hard_regno[nop] >= 0)
2014 if (in_hard_reg_set_p (this_costly_alternative_set,
2015 mode, hard_regno[nop]))
2017 if (lra_dump_file != NULL)
2018 fprintf (lra_dump_file,
2019 " %d Costly set: reject++\n",
2020 nop);
2021 reject++;
2024 else
2026 /* Prefer won reg to spilled pseudo under other
2027 equal conditions for possibe inheritance. */
2028 if (! scratch_p)
2030 if (lra_dump_file != NULL)
2031 fprintf
2032 (lra_dump_file,
2033 " %d Non pseudo reload: reject++\n",
2034 nop);
2035 reject++;
2037 if (in_class_p (operand_reg[nop],
2038 this_costly_alternative, NULL))
2040 if (lra_dump_file != NULL)
2041 fprintf
2042 (lra_dump_file,
2043 " %d Non pseudo costly reload:"
2044 " reject++\n",
2045 nop);
2046 reject++;
2049 /* We simulate the behaviour of old reload here.
2050 Although scratches need hard registers and it
2051 might result in spilling other pseudos, no reload
2052 insns are generated for the scratches. So it
2053 might cost something but probably less than old
2054 reload pass believes. */
2055 if (scratch_p)
2057 if (lra_dump_file != NULL)
2058 fprintf (lra_dump_file,
2059 " %d Scratch win: reject+=2\n",
2060 nop);
2061 reject += 2;
2065 else if (did_match)
2066 this_alternative_match_win = true;
2067 else
2069 int const_to_mem = 0;
2070 bool no_regs_p;
2072 /* Never do output reload of stack pointer. It makes
2073 impossible to do elimination when SP is changed in
2074 RTL. */
2075 if (op == stack_pointer_rtx && ! frame_pointer_needed
2076 && curr_static_id->operand[nop].type != OP_IN)
2077 goto fail;
2079 /* If this alternative asks for a specific reg class, see if there
2080 is at least one allocatable register in that class. */
2081 no_regs_p
2082 = (this_alternative == NO_REGS
2083 || (hard_reg_set_subset_p
2084 (reg_class_contents[this_alternative],
2085 lra_no_alloc_regs)));
2087 /* For asms, verify that the class for this alternative is possible
2088 for the mode that is specified. */
2089 if (!no_regs_p && INSN_CODE (curr_insn) < 0)
2091 int i;
2092 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2093 if (HARD_REGNO_MODE_OK (i, mode)
2094 && in_hard_reg_set_p (reg_class_contents[this_alternative],
2095 mode, i))
2096 break;
2097 if (i == FIRST_PSEUDO_REGISTER)
2098 winreg = false;
2101 /* If this operand accepts a register, and if the
2102 register class has at least one allocatable register,
2103 then this operand can be reloaded. */
2104 if (winreg && !no_regs_p)
2105 badop = false;
2107 if (badop)
2109 if (lra_dump_file != NULL)
2110 fprintf (lra_dump_file,
2111 " alt=%d: Bad operand -- refuse\n",
2112 nalt);
2113 goto fail;
2116 /* If not assigned pseudo has a class which a subset of
2117 required reg class, it is a less costly alternative
2118 as the pseudo still can get a hard reg of necessary
2119 class. */
2120 if (! no_regs_p && REG_P (op) && hard_regno[nop] < 0
2121 && (cl = get_reg_class (REGNO (op))) != NO_REGS
2122 && ira_class_subset_p[this_alternative][cl])
2124 if (lra_dump_file != NULL)
2125 fprintf
2126 (lra_dump_file,
2127 " %d Super set class reg: reject-=3\n", nop);
2128 reject -= 3;
2131 this_alternative_offmemok = offmemok;
2132 if (this_costly_alternative != NO_REGS)
2134 if (lra_dump_file != NULL)
2135 fprintf (lra_dump_file,
2136 " %d Costly loser: reject++\n", nop);
2137 reject++;
2139 /* If the operand is dying, has a matching constraint,
2140 and satisfies constraints of the matched operand
2141 which failed to satisfy the own constraints, most probably
2142 the reload for this operand will be gone. */
2143 if (this_alternative_matches >= 0
2144 && !curr_alt_win[this_alternative_matches]
2145 && REG_P (op)
2146 && find_regno_note (curr_insn, REG_DEAD, REGNO (op))
2147 && (hard_regno[nop] >= 0
2148 ? in_hard_reg_set_p (this_alternative_set,
2149 mode, hard_regno[nop])
2150 : in_class_p (op, this_alternative, NULL)))
2152 if (lra_dump_file != NULL)
2153 fprintf
2154 (lra_dump_file,
2155 " %d Dying matched operand reload: reject++\n",
2156 nop);
2157 reject++;
2159 else
2161 /* Strict_low_part requires to reload the register
2162 not the sub-register. In this case we should
2163 check that a final reload hard reg can hold the
2164 value mode. */
2165 if (curr_static_id->operand[nop].strict_low
2166 && REG_P (op)
2167 && hard_regno[nop] < 0
2168 && GET_CODE (*curr_id->operand_loc[nop]) == SUBREG
2169 && ira_class_hard_regs_num[this_alternative] > 0
2170 && ! HARD_REGNO_MODE_OK (ira_class_hard_regs
2171 [this_alternative][0],
2172 GET_MODE
2173 (*curr_id->operand_loc[nop])))
2175 if (lra_dump_file != NULL)
2176 fprintf
2177 (lra_dump_file,
2178 " alt=%d: Strict low subreg reload -- refuse\n",
2179 nalt);
2180 goto fail;
2182 losers++;
2184 if (operand_reg[nop] != NULL_RTX
2185 /* Output operands and matched input operands are
2186 not inherited. The following conditions do not
2187 exactly describe the previous statement but they
2188 are pretty close. */
2189 && curr_static_id->operand[nop].type != OP_OUT
2190 && (this_alternative_matches < 0
2191 || curr_static_id->operand[nop].type != OP_IN))
2193 int last_reload = (lra_reg_info[ORIGINAL_REGNO
2194 (operand_reg[nop])]
2195 .last_reload);
2197 /* The value of reload_sum has sense only if we
2198 process insns in their order. It happens only on
2199 the first constraints sub-pass when we do most of
2200 reload work. */
2201 if (lra_constraint_iter == 1 && last_reload > bb_reload_num)
2202 reload_sum += last_reload - bb_reload_num;
2204 /* If this is a constant that is reloaded into the
2205 desired class by copying it to memory first, count
2206 that as another reload. This is consistent with
2207 other code and is required to avoid choosing another
2208 alternative when the constant is moved into memory.
2209 Note that the test here is precisely the same as in
2210 the code below that calls force_const_mem. */
2211 if (CONST_POOL_OK_P (mode, op)
2212 && ((targetm.preferred_reload_class
2213 (op, this_alternative) == NO_REGS)
2214 || no_input_reloads_p))
2216 const_to_mem = 1;
2217 if (! no_regs_p)
2218 losers++;
2221 /* Alternative loses if it requires a type of reload not
2222 permitted for this insn. We can always reload
2223 objects with a REG_UNUSED note. */
2224 if ((curr_static_id->operand[nop].type != OP_IN
2225 && no_output_reloads_p
2226 && ! find_reg_note (curr_insn, REG_UNUSED, op))
2227 || (curr_static_id->operand[nop].type != OP_OUT
2228 && no_input_reloads_p && ! const_to_mem)
2229 || (this_alternative_matches >= 0
2230 && (no_input_reloads_p
2231 || (no_output_reloads_p
2232 && (curr_static_id->operand
2233 [this_alternative_matches].type != OP_IN)
2234 && ! find_reg_note (curr_insn, REG_UNUSED,
2235 no_subreg_reg_operand
2236 [this_alternative_matches])))))
2238 if (lra_dump_file != NULL)
2239 fprintf
2240 (lra_dump_file,
2241 " alt=%d: No input/otput reload -- refuse\n",
2242 nalt);
2243 goto fail;
2246 /* Check strong discouragement of reload of non-constant
2247 into class THIS_ALTERNATIVE. */
2248 if (! CONSTANT_P (op) && ! no_regs_p
2249 && (targetm.preferred_reload_class
2250 (op, this_alternative) == NO_REGS
2251 || (curr_static_id->operand[nop].type == OP_OUT
2252 && (targetm.preferred_output_reload_class
2253 (op, this_alternative) == NO_REGS))))
2255 if (lra_dump_file != NULL)
2256 fprintf (lra_dump_file,
2257 " %d Non-prefered reload: reject+=%d\n",
2258 nop, LRA_MAX_REJECT);
2259 reject += LRA_MAX_REJECT;
2262 if (! (MEM_P (op) && offmemok)
2263 && ! (const_to_mem && constmemok))
2265 /* We prefer to reload pseudos over reloading other
2266 things, since such reloads may be able to be
2267 eliminated later. So bump REJECT in other cases.
2268 Don't do this in the case where we are forcing a
2269 constant into memory and it will then win since
2270 we don't want to have a different alternative
2271 match then. */
2272 if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2274 if (lra_dump_file != NULL)
2275 fprintf
2276 (lra_dump_file,
2277 " %d Non-pseudo reload: reject+=2\n",
2278 nop);
2279 reject += 2;
2282 if (! no_regs_p)
2283 reload_nregs
2284 += ira_reg_class_max_nregs[this_alternative][mode];
2286 if (SMALL_REGISTER_CLASS_P (this_alternative))
2288 if (lra_dump_file != NULL)
2289 fprintf
2290 (lra_dump_file,
2291 " %d Small class reload: reject+=%d\n",
2292 nop, LRA_LOSER_COST_FACTOR / 2);
2293 reject += LRA_LOSER_COST_FACTOR / 2;
2297 /* We are trying to spill pseudo into memory. It is
2298 usually more costly than moving to a hard register
2299 although it might takes the same number of
2300 reloads. */
2301 if (no_regs_p && REG_P (op) && hard_regno[nop] >= 0)
2303 if (lra_dump_file != NULL)
2304 fprintf
2305 (lra_dump_file,
2306 " %d Spill pseudo into memory: reject+=3\n",
2307 nop);
2308 reject += 3;
2309 if (VECTOR_MODE_P (mode))
2311 /* Spilling vectors into memory is usually more
2312 costly as they contain big values. */
2313 if (lra_dump_file != NULL)
2314 fprintf
2315 (lra_dump_file,
2316 " %d Spill vector pseudo: reject+=2\n",
2317 nop);
2318 reject += 2;
2322 #ifdef SECONDARY_MEMORY_NEEDED
2323 /* If reload requires moving value through secondary
2324 memory, it will need one more insn at least. */
2325 if (this_alternative != NO_REGS
2326 && REG_P (op) && (cl = get_reg_class (REGNO (op))) != NO_REGS
2327 && ((curr_static_id->operand[nop].type != OP_OUT
2328 && SECONDARY_MEMORY_NEEDED (cl, this_alternative,
2329 GET_MODE (op)))
2330 || (curr_static_id->operand[nop].type != OP_IN
2331 && SECONDARY_MEMORY_NEEDED (this_alternative, cl,
2332 GET_MODE (op)))))
2333 losers++;
2334 #endif
2335 /* Input reloads can be inherited more often than output
2336 reloads can be removed, so penalize output
2337 reloads. */
2338 if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
2340 if (lra_dump_file != NULL)
2341 fprintf
2342 (lra_dump_file,
2343 " %d Non input pseudo reload: reject++\n",
2344 nop);
2345 reject++;
2349 if (early_clobber_p && ! scratch_p)
2351 if (lra_dump_file != NULL)
2352 fprintf (lra_dump_file,
2353 " %d Early clobber: reject++\n", nop);
2354 reject++;
2356 /* ??? We check early clobbers after processing all operands
2357 (see loop below) and there we update the costs more.
2358 Should we update the cost (may be approximately) here
2359 because of early clobber register reloads or it is a rare
2360 or non-important thing to be worth to do it. */
2361 overall = losers * LRA_LOSER_COST_FACTOR + reject;
2362 if ((best_losers == 0 || losers != 0) && best_overall < overall)
2364 if (lra_dump_file != NULL)
2365 fprintf (lra_dump_file,
2366 " alt=%d,overall=%d,losers=%d -- refuse\n",
2367 nalt, overall, losers);
2368 goto fail;
2371 curr_alt[nop] = this_alternative;
2372 COPY_HARD_REG_SET (curr_alt_set[nop], this_alternative_set);
2373 curr_alt_win[nop] = this_alternative_win;
2374 curr_alt_match_win[nop] = this_alternative_match_win;
2375 curr_alt_offmemok[nop] = this_alternative_offmemok;
2376 curr_alt_matches[nop] = this_alternative_matches;
2378 if (this_alternative_matches >= 0
2379 && !did_match && !this_alternative_win)
2380 curr_alt_win[this_alternative_matches] = false;
2382 if (early_clobber_p && operand_reg[nop] != NULL_RTX)
2383 early_clobbered_nops[early_clobbered_regs_num++] = nop;
2385 if (curr_insn_set != NULL_RTX && n_operands == 2
2386 /* Prevent processing non-move insns. */
2387 && (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
2388 || SET_SRC (curr_insn_set) == no_subreg_reg_operand[1])
2389 && ((! curr_alt_win[0] && ! curr_alt_win[1]
2390 && REG_P (no_subreg_reg_operand[0])
2391 && REG_P (no_subreg_reg_operand[1])
2392 && (reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
2393 || reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0])))
2394 || (! curr_alt_win[0] && curr_alt_win[1]
2395 && REG_P (no_subreg_reg_operand[1])
2396 && reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0]))
2397 || (curr_alt_win[0] && ! curr_alt_win[1]
2398 && REG_P (no_subreg_reg_operand[0])
2399 && reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
2400 && (! CONST_POOL_OK_P (curr_operand_mode[1],
2401 no_subreg_reg_operand[1])
2402 || (targetm.preferred_reload_class
2403 (no_subreg_reg_operand[1],
2404 (enum reg_class) curr_alt[1]) != NO_REGS))
2405 /* If it is a result of recent elimination in move
2406 insn we can transform it into an add still by
2407 using this alternative. */
2408 && GET_CODE (no_subreg_reg_operand[1]) != PLUS)))
2410 /* We have a move insn and a new reload insn will be similar
2411 to the current insn. We should avoid such situation as it
2412 results in LRA cycling. */
2413 overall += LRA_MAX_REJECT;
2415 ok_p = true;
2416 curr_alt_dont_inherit_ops_num = 0;
2417 for (nop = 0; nop < early_clobbered_regs_num; nop++)
2419 int i, j, clobbered_hard_regno, first_conflict_j, last_conflict_j;
2420 HARD_REG_SET temp_set;
2422 i = early_clobbered_nops[nop];
2423 if ((! curr_alt_win[i] && ! curr_alt_match_win[i])
2424 || hard_regno[i] < 0)
2425 continue;
2426 lra_assert (operand_reg[i] != NULL_RTX);
2427 clobbered_hard_regno = hard_regno[i];
2428 CLEAR_HARD_REG_SET (temp_set);
2429 add_to_hard_reg_set (&temp_set, biggest_mode[i], clobbered_hard_regno);
2430 first_conflict_j = last_conflict_j = -1;
2431 for (j = 0; j < n_operands; j++)
2432 if (j == i
2433 /* We don't want process insides of match_operator and
2434 match_parallel because otherwise we would process
2435 their operands once again generating a wrong
2436 code. */
2437 || curr_static_id->operand[j].is_operator)
2438 continue;
2439 else if ((curr_alt_matches[j] == i && curr_alt_match_win[j])
2440 || (curr_alt_matches[i] == j && curr_alt_match_win[i]))
2441 continue;
2442 /* If we don't reload j-th operand, check conflicts. */
2443 else if ((curr_alt_win[j] || curr_alt_match_win[j])
2444 && uses_hard_regs_p (*curr_id->operand_loc[j], temp_set))
2446 if (first_conflict_j < 0)
2447 first_conflict_j = j;
2448 last_conflict_j = j;
2450 if (last_conflict_j < 0)
2451 continue;
2452 /* If earlyclobber operand conflicts with another
2453 non-matching operand which is actually the same register
2454 as the earlyclobber operand, it is better to reload the
2455 another operand as an operand matching the earlyclobber
2456 operand can be also the same. */
2457 if (first_conflict_j == last_conflict_j
2458 && operand_reg[last_conflict_j]
2459 != NULL_RTX && ! curr_alt_match_win[last_conflict_j]
2460 && REGNO (operand_reg[i]) == REGNO (operand_reg[last_conflict_j]))
2462 curr_alt_win[last_conflict_j] = false;
2463 curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++]
2464 = last_conflict_j;
2465 losers++;
2466 /* Early clobber was already reflected in REJECT. */
2467 lra_assert (reject > 0);
2468 if (lra_dump_file != NULL)
2469 fprintf
2470 (lra_dump_file,
2471 " %d Conflict early clobber reload: reject--\n",
2473 reject--;
2474 overall += LRA_LOSER_COST_FACTOR - 1;
2476 else
2478 /* We need to reload early clobbered register and the
2479 matched registers. */
2480 for (j = 0; j < n_operands; j++)
2481 if (curr_alt_matches[j] == i)
2483 curr_alt_match_win[j] = false;
2484 losers++;
2485 overall += LRA_LOSER_COST_FACTOR;
2487 if (! curr_alt_match_win[i])
2488 curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++] = i;
2489 else
2491 /* Remember pseudos used for match reloads are never
2492 inherited. */
2493 lra_assert (curr_alt_matches[i] >= 0);
2494 curr_alt_win[curr_alt_matches[i]] = false;
2496 curr_alt_win[i] = curr_alt_match_win[i] = false;
2497 losers++;
2498 /* Early clobber was already reflected in REJECT. */
2499 lra_assert (reject > 0);
2500 if (lra_dump_file != NULL)
2501 fprintf
2502 (lra_dump_file,
2503 " %d Matched conflict early clobber reloads:"
2504 "reject--\n",
2506 reject--;
2507 overall += LRA_LOSER_COST_FACTOR - 1;
2510 if (lra_dump_file != NULL)
2511 fprintf (lra_dump_file, " alt=%d,overall=%d,losers=%d,rld_nregs=%d\n",
2512 nalt, overall, losers, reload_nregs);
2514 /* If this alternative can be made to work by reloading, and it
2515 needs less reloading than the others checked so far, record
2516 it as the chosen goal for reloading. */
2517 if ((best_losers != 0 && losers == 0)
2518 || (((best_losers == 0 && losers == 0)
2519 || (best_losers != 0 && losers != 0))
2520 && (best_overall > overall
2521 || (best_overall == overall
2522 /* If the cost of the reloads is the same,
2523 prefer alternative which requires minimal
2524 number of reload regs. */
2525 && (reload_nregs < best_reload_nregs
2526 || (reload_nregs == best_reload_nregs
2527 && (best_reload_sum < reload_sum
2528 || (best_reload_sum == reload_sum
2529 && nalt < goal_alt_number))))))))
2531 for (nop = 0; nop < n_operands; nop++)
2533 goal_alt_win[nop] = curr_alt_win[nop];
2534 goal_alt_match_win[nop] = curr_alt_match_win[nop];
2535 goal_alt_matches[nop] = curr_alt_matches[nop];
2536 goal_alt[nop] = curr_alt[nop];
2537 goal_alt_offmemok[nop] = curr_alt_offmemok[nop];
2539 goal_alt_dont_inherit_ops_num = curr_alt_dont_inherit_ops_num;
2540 for (nop = 0; nop < curr_alt_dont_inherit_ops_num; nop++)
2541 goal_alt_dont_inherit_ops[nop] = curr_alt_dont_inherit_ops[nop];
2542 goal_alt_swapped = curr_swapped;
2543 best_overall = overall;
2544 best_losers = losers;
2545 best_reload_nregs = reload_nregs;
2546 best_reload_sum = reload_sum;
2547 goal_alt_number = nalt;
2549 if (losers == 0)
2550 /* Everything is satisfied. Do not process alternatives
2551 anymore. */
2552 break;
2553 fail:
2556 return ok_p;
2559 /* Make reload base reg from address AD. */
2560 static rtx
2561 base_to_reg (struct address_info *ad)
2563 enum reg_class cl;
2564 int code = -1;
2565 rtx new_inner = NULL_RTX;
2566 rtx new_reg = NULL_RTX;
2567 rtx insn;
2568 rtx last_insn = get_last_insn();
2570 lra_assert (ad->base == ad->base_term && ad->disp == ad->disp_term);
2571 cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
2572 get_index_code (ad));
2573 new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX,
2574 cl, "base");
2575 new_inner = simplify_gen_binary (PLUS, GET_MODE (new_reg), new_reg,
2576 ad->disp_term == NULL
2577 ? gen_int_mode (0, ad->mode)
2578 : *ad->disp_term);
2579 if (!valid_address_p (ad->mode, new_inner, ad->as))
2580 return NULL_RTX;
2581 insn = emit_insn (gen_rtx_SET (ad->mode, new_reg, *ad->base_term));
2582 code = recog_memoized (insn);
2583 if (code < 0)
2585 delete_insns_since (last_insn);
2586 return NULL_RTX;
2589 return new_inner;
2592 /* Make reload base reg + disp from address AD. Return the new pseudo. */
2593 static rtx
2594 base_plus_disp_to_reg (struct address_info *ad)
2596 enum reg_class cl;
2597 rtx new_reg;
2599 lra_assert (ad->base == ad->base_term && ad->disp == ad->disp_term);
2600 cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
2601 get_index_code (ad));
2602 new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX,
2603 cl, "base + disp");
2604 lra_emit_add (new_reg, *ad->base_term, *ad->disp_term);
2605 return new_reg;
2608 /* Make reload of index part of address AD. Return the new
2609 pseudo. */
2610 static rtx
2611 index_part_to_reg (struct address_info *ad)
2613 rtx new_reg;
2615 new_reg = lra_create_new_reg (GET_MODE (*ad->index), NULL_RTX,
2616 INDEX_REG_CLASS, "index term");
2617 expand_mult (GET_MODE (*ad->index), *ad->index_term,
2618 GEN_INT (get_index_scale (ad)), new_reg, 1);
2619 return new_reg;
2622 /* Return true if we can add a displacement to address AD, even if that
2623 makes the address invalid. The fix-up code requires any new address
2624 to be the sum of the BASE_TERM, INDEX and DISP_TERM fields. */
2625 static bool
2626 can_add_disp_p (struct address_info *ad)
2628 return (!ad->autoinc_p
2629 && ad->segment == NULL
2630 && ad->base == ad->base_term
2631 && ad->disp == ad->disp_term);
2634 /* Make equiv substitution in address AD. Return true if a substitution
2635 was made. */
2636 static bool
2637 equiv_address_substitution (struct address_info *ad)
2639 rtx base_reg, new_base_reg, index_reg, new_index_reg, *base_term, *index_term;
2640 HOST_WIDE_INT disp, scale;
2641 bool change_p;
2643 base_term = strip_subreg (ad->base_term);
2644 if (base_term == NULL)
2645 base_reg = new_base_reg = NULL_RTX;
2646 else
2648 base_reg = *base_term;
2649 new_base_reg = get_equiv_with_elimination (base_reg, curr_insn);
2651 index_term = strip_subreg (ad->index_term);
2652 if (index_term == NULL)
2653 index_reg = new_index_reg = NULL_RTX;
2654 else
2656 index_reg = *index_term;
2657 new_index_reg = get_equiv_with_elimination (index_reg, curr_insn);
2659 if (base_reg == new_base_reg && index_reg == new_index_reg)
2660 return false;
2661 disp = 0;
2662 change_p = false;
2663 if (lra_dump_file != NULL)
2665 fprintf (lra_dump_file, "Changing address in insn %d ",
2666 INSN_UID (curr_insn));
2667 dump_value_slim (lra_dump_file, *ad->outer, 1);
2669 if (base_reg != new_base_reg)
2671 if (REG_P (new_base_reg))
2673 *base_term = new_base_reg;
2674 change_p = true;
2676 else if (GET_CODE (new_base_reg) == PLUS
2677 && REG_P (XEXP (new_base_reg, 0))
2678 && CONST_INT_P (XEXP (new_base_reg, 1))
2679 && can_add_disp_p (ad))
2681 disp += INTVAL (XEXP (new_base_reg, 1));
2682 *base_term = XEXP (new_base_reg, 0);
2683 change_p = true;
2685 if (ad->base_term2 != NULL)
2686 *ad->base_term2 = *ad->base_term;
2688 if (index_reg != new_index_reg)
2690 if (REG_P (new_index_reg))
2692 *index_term = new_index_reg;
2693 change_p = true;
2695 else if (GET_CODE (new_index_reg) == PLUS
2696 && REG_P (XEXP (new_index_reg, 0))
2697 && CONST_INT_P (XEXP (new_index_reg, 1))
2698 && can_add_disp_p (ad)
2699 && (scale = get_index_scale (ad)))
2701 disp += INTVAL (XEXP (new_index_reg, 1)) * scale;
2702 *index_term = XEXP (new_index_reg, 0);
2703 change_p = true;
2706 if (disp != 0)
2708 if (ad->disp != NULL)
2709 *ad->disp = plus_constant (GET_MODE (*ad->inner), *ad->disp, disp);
2710 else
2712 *ad->inner = plus_constant (GET_MODE (*ad->inner), *ad->inner, disp);
2713 update_address (ad);
2715 change_p = true;
2717 if (lra_dump_file != NULL)
2719 if (! change_p)
2720 fprintf (lra_dump_file, " -- no change\n");
2721 else
2723 fprintf (lra_dump_file, " on equiv ");
2724 dump_value_slim (lra_dump_file, *ad->outer, 1);
2725 fprintf (lra_dump_file, "\n");
2728 return change_p;
2731 /* Major function to make reloads for an address in operand NOP.
2732 The supported cases are:
2734 1) an address that existed before LRA started, at which point it
2735 must have been valid. These addresses are subject to elimination
2736 and may have become invalid due to the elimination offset being out
2737 of range.
2739 2) an address created by forcing a constant to memory
2740 (force_const_to_mem). The initial form of these addresses might
2741 not be valid, and it is this function's job to make them valid.
2743 3) a frame address formed from a register and a (possibly zero)
2744 constant offset. As above, these addresses might not be valid and
2745 this function must make them so.
2747 Add reloads to the lists *BEFORE and *AFTER. We might need to add
2748 reloads to *AFTER because of inc/dec, {pre, post} modify in the
2749 address. Return true for any RTL change.
2751 The function is a helper function which does not produce all
2752 transformations which can be necessary. It does just basic steps.
2753 To do all necessary transformations use function
2754 process_address. */
2755 static bool
2756 process_address_1 (int nop, rtx *before, rtx *after)
2758 struct address_info ad;
2759 rtx new_reg;
2760 rtx op = *curr_id->operand_loc[nop];
2761 const char *constraint = curr_static_id->operand[nop].constraint;
2762 enum constraint_num cn = lookup_constraint (constraint);
2763 bool change_p;
2765 if (insn_extra_address_constraint (cn))
2766 decompose_lea_address (&ad, curr_id->operand_loc[nop]);
2767 else if (MEM_P (op))
2768 decompose_mem_address (&ad, op);
2769 else if (GET_CODE (op) == SUBREG
2770 && MEM_P (SUBREG_REG (op)))
2771 decompose_mem_address (&ad, SUBREG_REG (op));
2772 else
2773 return false;
2774 change_p = equiv_address_substitution (&ad);
2775 if (ad.base_term != NULL
2776 && (process_addr_reg
2777 (ad.base_term, before,
2778 (ad.autoinc_p
2779 && !(REG_P (*ad.base_term)
2780 && find_regno_note (curr_insn, REG_DEAD,
2781 REGNO (*ad.base_term)) != NULL_RTX)
2782 ? after : NULL),
2783 base_reg_class (ad.mode, ad.as, ad.base_outer_code,
2784 get_index_code (&ad)))))
2786 change_p = true;
2787 if (ad.base_term2 != NULL)
2788 *ad.base_term2 = *ad.base_term;
2790 if (ad.index_term != NULL
2791 && process_addr_reg (ad.index_term, before, NULL, INDEX_REG_CLASS))
2792 change_p = true;
2794 /* Target hooks sometimes don't treat extra-constraint addresses as
2795 legitimate address_operands, so handle them specially. */
2796 if (insn_extra_address_constraint (cn)
2797 && satisfies_address_constraint_p (&ad, cn))
2798 return change_p;
2800 /* There are three cases where the shape of *AD.INNER may now be invalid:
2802 1) the original address was valid, but either elimination or
2803 equiv_address_substitution was applied and that made
2804 the address invalid.
2806 2) the address is an invalid symbolic address created by
2807 force_const_to_mem.
2809 3) the address is a frame address with an invalid offset.
2811 4) the address is a frame address with an invalid base.
2813 All these cases involve a non-autoinc address, so there is no
2814 point revalidating other types. */
2815 if (ad.autoinc_p || valid_address_p (&ad))
2816 return change_p;
2818 /* Any index existed before LRA started, so we can assume that the
2819 presence and shape of the index is valid. */
2820 push_to_sequence (*before);
2821 lra_assert (ad.disp == ad.disp_term);
2822 if (ad.base == NULL)
2824 if (ad.index == NULL)
2826 int code = -1;
2827 enum reg_class cl = base_reg_class (ad.mode, ad.as,
2828 SCRATCH, SCRATCH);
2829 rtx addr = *ad.inner;
2831 new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "addr");
2832 #ifdef HAVE_lo_sum
2834 rtx insn;
2835 rtx last = get_last_insn ();
2837 /* addr => lo_sum (new_base, addr), case (2) above. */
2838 insn = emit_insn (gen_rtx_SET
2839 (VOIDmode, new_reg,
2840 gen_rtx_HIGH (Pmode, copy_rtx (addr))));
2841 code = recog_memoized (insn);
2842 if (code >= 0)
2844 *ad.inner = gen_rtx_LO_SUM (Pmode, new_reg, addr);
2845 if (! valid_address_p (ad.mode, *ad.outer, ad.as))
2847 /* Try to put lo_sum into register. */
2848 insn = emit_insn (gen_rtx_SET
2849 (VOIDmode, new_reg,
2850 gen_rtx_LO_SUM (Pmode, new_reg, addr)));
2851 code = recog_memoized (insn);
2852 if (code >= 0)
2854 *ad.inner = new_reg;
2855 if (! valid_address_p (ad.mode, *ad.outer, ad.as))
2857 *ad.inner = addr;
2858 code = -1;
2864 if (code < 0)
2865 delete_insns_since (last);
2867 #endif
2868 if (code < 0)
2870 /* addr => new_base, case (2) above. */
2871 lra_emit_move (new_reg, addr);
2872 *ad.inner = new_reg;
2875 else
2877 /* index * scale + disp => new base + index * scale,
2878 case (1) above. */
2879 enum reg_class cl = base_reg_class (ad.mode, ad.as, PLUS,
2880 GET_CODE (*ad.index));
2882 lra_assert (INDEX_REG_CLASS != NO_REGS);
2883 new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "disp");
2884 lra_emit_move (new_reg, *ad.disp);
2885 *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
2886 new_reg, *ad.index);
2889 else if (ad.index == NULL)
2891 int regno;
2892 enum reg_class cl;
2893 rtx set, insns, last_insn;
2894 /* Try to reload base into register only if the base is invalid
2895 for the address but with valid offset, case (4) above. */
2896 start_sequence ();
2897 new_reg = base_to_reg (&ad);
2899 /* base + disp => new base, cases (1) and (3) above. */
2900 /* Another option would be to reload the displacement into an
2901 index register. However, postreload has code to optimize
2902 address reloads that have the same base and different
2903 displacements, so reloading into an index register would
2904 not necessarily be a win. */
2905 if (new_reg == NULL_RTX)
2906 new_reg = base_plus_disp_to_reg (&ad);
2907 insns = get_insns ();
2908 last_insn = get_last_insn ();
2909 /* If we generated at least two insns, try last insn source as
2910 an address. If we succeed, we generate one less insn. */
2911 if (last_insn != insns && (set = single_set (last_insn)) != NULL_RTX
2912 && GET_CODE (SET_SRC (set)) == PLUS
2913 && REG_P (XEXP (SET_SRC (set), 0))
2914 && CONSTANT_P (XEXP (SET_SRC (set), 1)))
2916 *ad.inner = SET_SRC (set);
2917 if (valid_address_p (ad.mode, *ad.outer, ad.as))
2919 *ad.base_term = XEXP (SET_SRC (set), 0);
2920 *ad.disp_term = XEXP (SET_SRC (set), 1);
2921 cl = base_reg_class (ad.mode, ad.as, ad.base_outer_code,
2922 get_index_code (&ad));
2923 regno = REGNO (*ad.base_term);
2924 if (regno >= FIRST_PSEUDO_REGISTER
2925 && cl != lra_get_allocno_class (regno))
2926 lra_change_class (regno, cl, " Change to", true);
2927 new_reg = SET_SRC (set);
2928 delete_insns_since (PREV_INSN (last_insn));
2931 end_sequence ();
2932 emit_insn (insns);
2933 *ad.inner = new_reg;
2935 else if (ad.disp_term != NULL)
2937 /* base + scale * index + disp => new base + scale * index,
2938 case (1) above. */
2939 new_reg = base_plus_disp_to_reg (&ad);
2940 *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
2941 new_reg, *ad.index);
2943 else if (get_index_scale (&ad) == 1)
2945 /* The last transformation to one reg will be made in
2946 curr_insn_transform function. */
2947 end_sequence ();
2948 return false;
2950 else
2952 /* base + scale * index => base + new_reg,
2953 case (1) above.
2954 Index part of address may become invalid. For example, we
2955 changed pseudo on the equivalent memory and a subreg of the
2956 pseudo onto the memory of different mode for which the scale is
2957 prohibitted. */
2958 new_reg = index_part_to_reg (&ad);
2959 *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
2960 *ad.base_term, new_reg);
2962 *before = get_insns ();
2963 end_sequence ();
2964 return true;
2967 /* Do address reloads until it is necessary. Use process_address_1 as
2968 a helper function. Return true for any RTL changes. */
2969 static bool
2970 process_address (int nop, rtx *before, rtx *after)
2972 bool res = false;
2974 while (process_address_1 (nop, before, after))
2975 res = true;
2976 return res;
2979 /* Emit insns to reload VALUE into a new register. VALUE is an
2980 auto-increment or auto-decrement RTX whose operand is a register or
2981 memory location; so reloading involves incrementing that location.
2982 IN is either identical to VALUE, or some cheaper place to reload
2983 value being incremented/decremented from.
2985 INC_AMOUNT is the number to increment or decrement by (always
2986 positive and ignored for POST_MODIFY/PRE_MODIFY).
2988 Return pseudo containing the result. */
2989 static rtx
2990 emit_inc (enum reg_class new_rclass, rtx in, rtx value, int inc_amount)
2992 /* REG or MEM to be copied and incremented. */
2993 rtx incloc = XEXP (value, 0);
2994 /* Nonzero if increment after copying. */
2995 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
2996 || GET_CODE (value) == POST_MODIFY);
2997 rtx last;
2998 rtx inc;
2999 rtx add_insn;
3000 int code;
3001 rtx real_in = in == value ? incloc : in;
3002 rtx result;
3003 bool plus_p = true;
3005 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
3007 lra_assert (GET_CODE (XEXP (value, 1)) == PLUS
3008 || GET_CODE (XEXP (value, 1)) == MINUS);
3009 lra_assert (rtx_equal_p (XEXP (XEXP (value, 1), 0), XEXP (value, 0)));
3010 plus_p = GET_CODE (XEXP (value, 1)) == PLUS;
3011 inc = XEXP (XEXP (value, 1), 1);
3013 else
3015 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
3016 inc_amount = -inc_amount;
3018 inc = GEN_INT (inc_amount);
3021 if (! post && REG_P (incloc))
3022 result = incloc;
3023 else
3024 result = lra_create_new_reg (GET_MODE (value), value, new_rclass,
3025 "INC/DEC result");
3027 if (real_in != result)
3029 /* First copy the location to the result register. */
3030 lra_assert (REG_P (result));
3031 emit_insn (gen_move_insn (result, real_in));
3034 /* We suppose that there are insns to add/sub with the constant
3035 increment permitted in {PRE/POST)_{DEC/INC/MODIFY}. At least the
3036 old reload worked with this assumption. If the assumption
3037 becomes wrong, we should use approach in function
3038 base_plus_disp_to_reg. */
3039 if (in == value)
3041 /* See if we can directly increment INCLOC. */
3042 last = get_last_insn ();
3043 add_insn = emit_insn (plus_p
3044 ? gen_add2_insn (incloc, inc)
3045 : gen_sub2_insn (incloc, inc));
3047 code = recog_memoized (add_insn);
3048 if (code >= 0)
3050 if (! post && result != incloc)
3051 emit_insn (gen_move_insn (result, incloc));
3052 return result;
3054 delete_insns_since (last);
3057 /* If couldn't do the increment directly, must increment in RESULT.
3058 The way we do this depends on whether this is pre- or
3059 post-increment. For pre-increment, copy INCLOC to the reload
3060 register, increment it there, then save back. */
3061 if (! post)
3063 if (real_in != result)
3064 emit_insn (gen_move_insn (result, real_in));
3065 if (plus_p)
3066 emit_insn (gen_add2_insn (result, inc));
3067 else
3068 emit_insn (gen_sub2_insn (result, inc));
3069 if (result != incloc)
3070 emit_insn (gen_move_insn (incloc, result));
3072 else
3074 /* Post-increment.
3076 Because this might be a jump insn or a compare, and because
3077 RESULT may not be available after the insn in an input
3078 reload, we must do the incrementing before the insn being
3079 reloaded for.
3081 We have already copied IN to RESULT. Increment the copy in
3082 RESULT, save that back, then decrement RESULT so it has
3083 the original value. */
3084 if (plus_p)
3085 emit_insn (gen_add2_insn (result, inc));
3086 else
3087 emit_insn (gen_sub2_insn (result, inc));
3088 emit_insn (gen_move_insn (incloc, result));
3089 /* Restore non-modified value for the result. We prefer this
3090 way because it does not require an additional hard
3091 register. */
3092 if (plus_p)
3094 if (CONST_INT_P (inc))
3095 emit_insn (gen_add2_insn (result,
3096 gen_int_mode (-INTVAL (inc),
3097 GET_MODE (result))));
3098 else
3099 emit_insn (gen_sub2_insn (result, inc));
3101 else
3102 emit_insn (gen_add2_insn (result, inc));
3104 return result;
3107 /* Return true if the current move insn does not need processing as we
3108 already know that it satisfies its constraints. */
3109 static bool
3110 simple_move_p (void)
3112 rtx dest, src;
3113 enum reg_class dclass, sclass;
3115 lra_assert (curr_insn_set != NULL_RTX);
3116 dest = SET_DEST (curr_insn_set);
3117 src = SET_SRC (curr_insn_set);
3118 return ((dclass = get_op_class (dest)) != NO_REGS
3119 && (sclass = get_op_class (src)) != NO_REGS
3120 /* The backend guarantees that register moves of cost 2
3121 never need reloads. */
3122 && targetm.register_move_cost (GET_MODE (src), dclass, sclass) == 2);
3125 /* Swap operands NOP and NOP + 1. */
3126 static inline void
3127 swap_operands (int nop)
3129 enum machine_mode mode = curr_operand_mode[nop];
3130 curr_operand_mode[nop] = curr_operand_mode[nop + 1];
3131 curr_operand_mode[nop + 1] = mode;
3132 rtx x = *curr_id->operand_loc[nop];
3133 *curr_id->operand_loc[nop] = *curr_id->operand_loc[nop + 1];
3134 *curr_id->operand_loc[nop + 1] = x;
3135 /* Swap the duplicates too. */
3136 lra_update_dup (curr_id, nop);
3137 lra_update_dup (curr_id, nop + 1);
3140 /* Main entry point of the constraint code: search the body of the
3141 current insn to choose the best alternative. It is mimicking insn
3142 alternative cost calculation model of former reload pass. That is
3143 because machine descriptions were written to use this model. This
3144 model can be changed in future. Make commutative operand exchange
3145 if it is chosen.
3147 Return true if some RTL changes happened during function call. */
3148 static bool
3149 curr_insn_transform (void)
3151 int i, j, k;
3152 int n_operands;
3153 int n_alternatives;
3154 int commutative;
3155 signed char goal_alt_matched[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
3156 signed char match_inputs[MAX_RECOG_OPERANDS + 1];
3157 rtx before, after;
3158 bool alt_p = false;
3159 /* Flag that the insn has been changed through a transformation. */
3160 bool change_p;
3161 bool sec_mem_p;
3162 #ifdef SECONDARY_MEMORY_NEEDED
3163 bool use_sec_mem_p;
3164 #endif
3165 int max_regno_before;
3166 int reused_alternative_num;
3168 curr_insn_set = single_set (curr_insn);
3169 if (curr_insn_set != NULL_RTX && simple_move_p ())
3170 return false;
3172 no_input_reloads_p = no_output_reloads_p = false;
3173 goal_alt_number = -1;
3174 change_p = sec_mem_p = false;
3175 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output
3176 reloads; neither are insns that SET cc0. Insns that use CC0 are
3177 not allowed to have any input reloads. */
3178 if (JUMP_P (curr_insn) || CALL_P (curr_insn))
3179 no_output_reloads_p = true;
3181 #ifdef HAVE_cc0
3182 if (reg_referenced_p (cc0_rtx, PATTERN (curr_insn)))
3183 no_input_reloads_p = true;
3184 if (reg_set_p (cc0_rtx, PATTERN (curr_insn)))
3185 no_output_reloads_p = true;
3186 #endif
3188 n_operands = curr_static_id->n_operands;
3189 n_alternatives = curr_static_id->n_alternatives;
3191 /* Just return "no reloads" if insn has no operands with
3192 constraints. */
3193 if (n_operands == 0 || n_alternatives == 0)
3194 return false;
3196 max_regno_before = max_reg_num ();
3198 for (i = 0; i < n_operands; i++)
3200 goal_alt_matched[i][0] = -1;
3201 goal_alt_matches[i] = -1;
3204 commutative = curr_static_id->commutative;
3206 /* Now see what we need for pseudos that didn't get hard regs or got
3207 the wrong kind of hard reg. For this, we must consider all the
3208 operands together against the register constraints. */
3210 best_losers = best_overall = INT_MAX;
3211 best_reload_sum = 0;
3213 curr_swapped = false;
3214 goal_alt_swapped = false;
3216 /* Make equivalence substitution and memory subreg elimination
3217 before address processing because an address legitimacy can
3218 depend on memory mode. */
3219 for (i = 0; i < n_operands; i++)
3221 rtx op = *curr_id->operand_loc[i];
3222 rtx subst, old = op;
3223 bool op_change_p = false;
3225 if (GET_CODE (old) == SUBREG)
3226 old = SUBREG_REG (old);
3227 subst = get_equiv_with_elimination (old, curr_insn);
3228 if (subst != old)
3230 subst = copy_rtx (subst);
3231 lra_assert (REG_P (old));
3232 if (GET_CODE (op) == SUBREG)
3233 SUBREG_REG (op) = subst;
3234 else
3235 *curr_id->operand_loc[i] = subst;
3236 if (lra_dump_file != NULL)
3238 fprintf (lra_dump_file,
3239 "Changing pseudo %d in operand %i of insn %u on equiv ",
3240 REGNO (old), i, INSN_UID (curr_insn));
3241 dump_value_slim (lra_dump_file, subst, 1);
3242 fprintf (lra_dump_file, "\n");
3244 op_change_p = change_p = true;
3246 if (simplify_operand_subreg (i, GET_MODE (old)) || op_change_p)
3248 change_p = true;
3249 lra_update_dup (curr_id, i);
3253 /* Reload address registers and displacements. We do it before
3254 finding an alternative because of memory constraints. */
3255 before = after = NULL_RTX;
3256 for (i = 0; i < n_operands; i++)
3257 if (! curr_static_id->operand[i].is_operator
3258 && process_address (i, &before, &after))
3260 change_p = true;
3261 lra_update_dup (curr_id, i);
3264 if (change_p)
3265 /* If we've changed the instruction then any alternative that
3266 we chose previously may no longer be valid. */
3267 lra_set_used_insn_alternative (curr_insn, -1);
3269 if (curr_insn_set != NULL_RTX
3270 && check_and_process_move (&change_p, &sec_mem_p))
3271 return change_p;
3273 try_swapped:
3275 reused_alternative_num = curr_id->used_insn_alternative;
3276 if (lra_dump_file != NULL && reused_alternative_num >= 0)
3277 fprintf (lra_dump_file, "Reusing alternative %d for insn #%u\n",
3278 reused_alternative_num, INSN_UID (curr_insn));
3280 if (process_alt_operands (reused_alternative_num))
3281 alt_p = true;
3283 /* If insn is commutative (it's safe to exchange a certain pair of
3284 operands) then we need to try each alternative twice, the second
3285 time matching those two operands as if we had exchanged them. To
3286 do this, really exchange them in operands.
3288 If we have just tried the alternatives the second time, return
3289 operands to normal and drop through. */
3291 if (reused_alternative_num < 0 && commutative >= 0)
3293 curr_swapped = !curr_swapped;
3294 if (curr_swapped)
3296 swap_operands (commutative);
3297 goto try_swapped;
3299 else
3300 swap_operands (commutative);
3303 if (! alt_p && ! sec_mem_p)
3305 /* No alternative works with reloads?? */
3306 if (INSN_CODE (curr_insn) >= 0)
3307 fatal_insn ("unable to generate reloads for:", curr_insn);
3308 error_for_asm (curr_insn,
3309 "inconsistent operand constraints in an %<asm%>");
3310 /* Avoid further trouble with this insn. */
3311 PATTERN (curr_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3312 lra_invalidate_insn_data (curr_insn);
3313 return true;
3316 /* If the best alternative is with operands 1 and 2 swapped, swap
3317 them. Update the operand numbers of any reloads already
3318 pushed. */
3320 if (goal_alt_swapped)
3322 if (lra_dump_file != NULL)
3323 fprintf (lra_dump_file, " Commutative operand exchange in insn %u\n",
3324 INSN_UID (curr_insn));
3326 /* Swap the duplicates too. */
3327 swap_operands (commutative);
3328 change_p = true;
3331 #ifdef SECONDARY_MEMORY_NEEDED
3332 /* Some target macros SECONDARY_MEMORY_NEEDED (e.g. x86) are defined
3333 too conservatively. So we use the secondary memory only if there
3334 is no any alternative without reloads. */
3335 use_sec_mem_p = false;
3336 if (! alt_p)
3337 use_sec_mem_p = true;
3338 else if (sec_mem_p)
3340 for (i = 0; i < n_operands; i++)
3341 if (! goal_alt_win[i] && ! goal_alt_match_win[i])
3342 break;
3343 use_sec_mem_p = i < n_operands;
3346 if (use_sec_mem_p)
3348 rtx new_reg, src, dest, rld;
3349 enum machine_mode sec_mode, rld_mode;
3351 lra_assert (sec_mem_p);
3352 lra_assert (curr_static_id->operand[0].type == OP_OUT
3353 && curr_static_id->operand[1].type == OP_IN);
3354 dest = *curr_id->operand_loc[0];
3355 src = *curr_id->operand_loc[1];
3356 rld = (GET_MODE_SIZE (GET_MODE (dest)) <= GET_MODE_SIZE (GET_MODE (src))
3357 ? dest : src);
3358 rld_mode = GET_MODE (rld);
3359 #ifdef SECONDARY_MEMORY_NEEDED_MODE
3360 sec_mode = SECONDARY_MEMORY_NEEDED_MODE (rld_mode);
3361 #else
3362 sec_mode = rld_mode;
3363 #endif
3364 new_reg = lra_create_new_reg (sec_mode, NULL_RTX,
3365 NO_REGS, "secondary");
3366 /* If the mode is changed, it should be wider. */
3367 lra_assert (GET_MODE_SIZE (sec_mode) >= GET_MODE_SIZE (rld_mode));
3368 if (sec_mode != rld_mode)
3370 /* If the target says specifically to use another mode for
3371 secondary memory moves we can not reuse the original
3372 insn. */
3373 after = emit_spill_move (false, new_reg, dest);
3374 lra_process_new_insns (curr_insn, NULL_RTX, after,
3375 "Inserting the sec. move");
3376 /* We may have non null BEFORE here (e.g. after address
3377 processing. */
3378 push_to_sequence (before);
3379 before = emit_spill_move (true, new_reg, src);
3380 emit_insn (before);
3381 before = get_insns ();
3382 end_sequence ();
3383 lra_process_new_insns (curr_insn, before, NULL_RTX, "Changing on");
3384 lra_set_insn_deleted (curr_insn);
3386 else if (dest == rld)
3388 *curr_id->operand_loc[0] = new_reg;
3389 after = emit_spill_move (false, new_reg, dest);
3390 lra_process_new_insns (curr_insn, NULL_RTX, after,
3391 "Inserting the sec. move");
3393 else
3395 *curr_id->operand_loc[1] = new_reg;
3396 /* See comments above. */
3397 push_to_sequence (before);
3398 before = emit_spill_move (true, new_reg, src);
3399 emit_insn (before);
3400 before = get_insns ();
3401 end_sequence ();
3402 lra_process_new_insns (curr_insn, before, NULL_RTX,
3403 "Inserting the sec. move");
3405 lra_update_insn_regno_info (curr_insn);
3406 return true;
3408 #endif
3410 lra_assert (goal_alt_number >= 0);
3411 lra_set_used_insn_alternative (curr_insn, goal_alt_number);
3413 if (lra_dump_file != NULL)
3415 const char *p;
3417 fprintf (lra_dump_file, " Choosing alt %d in insn %u:",
3418 goal_alt_number, INSN_UID (curr_insn));
3419 for (i = 0; i < n_operands; i++)
3421 p = (curr_static_id->operand_alternative
3422 [goal_alt_number * n_operands + i].constraint);
3423 if (*p == '\0')
3424 continue;
3425 fprintf (lra_dump_file, " (%d) ", i);
3426 for (; *p != '\0' && *p != ',' && *p != '#'; p++)
3427 fputc (*p, lra_dump_file);
3429 if (INSN_CODE (curr_insn) >= 0
3430 && (p = get_insn_name (INSN_CODE (curr_insn))) != NULL)
3431 fprintf (lra_dump_file, " {%s}", p);
3432 if (curr_id->sp_offset != 0)
3433 fprintf (lra_dump_file, " (sp_off=%" HOST_WIDE_INT_PRINT "d)",
3434 curr_id->sp_offset);
3435 fprintf (lra_dump_file, "\n");
3438 /* Right now, for any pair of operands I and J that are required to
3439 match, with J < I, goal_alt_matches[I] is J. Add I to
3440 goal_alt_matched[J]. */
3442 for (i = 0; i < n_operands; i++)
3443 if ((j = goal_alt_matches[i]) >= 0)
3445 for (k = 0; goal_alt_matched[j][k] >= 0; k++)
3447 /* We allow matching one output operand and several input
3448 operands. */
3449 lra_assert (k == 0
3450 || (curr_static_id->operand[j].type == OP_OUT
3451 && curr_static_id->operand[i].type == OP_IN
3452 && (curr_static_id->operand
3453 [goal_alt_matched[j][0]].type == OP_IN)));
3454 goal_alt_matched[j][k] = i;
3455 goal_alt_matched[j][k + 1] = -1;
3458 for (i = 0; i < n_operands; i++)
3459 goal_alt_win[i] |= goal_alt_match_win[i];
3461 /* Any constants that aren't allowed and can't be reloaded into
3462 registers are here changed into memory references. */
3463 for (i = 0; i < n_operands; i++)
3464 if (goal_alt_win[i])
3466 int regno;
3467 enum reg_class new_class;
3468 rtx reg = *curr_id->operand_loc[i];
3470 if (GET_CODE (reg) == SUBREG)
3471 reg = SUBREG_REG (reg);
3473 if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
3475 bool ok_p = in_class_p (reg, goal_alt[i], &new_class);
3477 if (new_class != NO_REGS && get_reg_class (regno) != new_class)
3479 lra_assert (ok_p);
3480 lra_change_class (regno, new_class, " Change to", true);
3484 else
3486 const char *constraint;
3487 char c;
3488 rtx op = *curr_id->operand_loc[i];
3489 rtx subreg = NULL_RTX;
3490 enum machine_mode mode = curr_operand_mode[i];
3492 if (GET_CODE (op) == SUBREG)
3494 subreg = op;
3495 op = SUBREG_REG (op);
3496 mode = GET_MODE (op);
3499 if (CONST_POOL_OK_P (mode, op)
3500 && ((targetm.preferred_reload_class
3501 (op, (enum reg_class) goal_alt[i]) == NO_REGS)
3502 || no_input_reloads_p))
3504 rtx tem = force_const_mem (mode, op);
3506 change_p = true;
3507 if (subreg != NULL_RTX)
3508 tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
3510 *curr_id->operand_loc[i] = tem;
3511 lra_update_dup (curr_id, i);
3512 process_address (i, &before, &after);
3514 /* If the alternative accepts constant pool refs directly
3515 there will be no reload needed at all. */
3516 if (subreg != NULL_RTX)
3517 continue;
3518 /* Skip alternatives before the one requested. */
3519 constraint = (curr_static_id->operand_alternative
3520 [goal_alt_number * n_operands + i].constraint);
3521 for (;
3522 (c = *constraint) && c != ',' && c != '#';
3523 constraint += CONSTRAINT_LEN (c, constraint))
3525 enum constraint_num cn = lookup_constraint (constraint);
3526 if (insn_extra_memory_constraint (cn)
3527 && satisfies_memory_constraint_p (tem, cn))
3528 break;
3530 if (c == '\0' || c == ',' || c == '#')
3531 continue;
3533 goal_alt_win[i] = true;
3537 for (i = 0; i < n_operands; i++)
3539 int regno;
3540 bool optional_p = false;
3541 rtx old, new_reg;
3542 rtx op = *curr_id->operand_loc[i];
3544 if (goal_alt_win[i])
3546 if (goal_alt[i] == NO_REGS
3547 && REG_P (op)
3548 /* When we assign NO_REGS it means that we will not
3549 assign a hard register to the scratch pseudo by
3550 assigment pass and the scratch pseudo will be
3551 spilled. Spilled scratch pseudos are transformed
3552 back to scratches at the LRA end. */
3553 && lra_former_scratch_operand_p (curr_insn, i))
3555 int regno = REGNO (op);
3556 lra_change_class (regno, NO_REGS, " Change to", true);
3557 if (lra_get_regno_hard_regno (regno) >= 0)
3558 /* We don't have to mark all insn affected by the
3559 spilled pseudo as there is only one such insn, the
3560 current one. */
3561 reg_renumber[regno] = -1;
3563 /* We can do an optional reload. If the pseudo got a hard
3564 reg, we might improve the code through inheritance. If
3565 it does not get a hard register we coalesce memory/memory
3566 moves later. Ignore move insns to avoid cycling. */
3567 if (! lra_simple_p
3568 && lra_undo_inheritance_iter < LRA_MAX_INHERITANCE_PASSES
3569 && goal_alt[i] != NO_REGS && REG_P (op)
3570 && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
3571 && regno < new_regno_start
3572 && ! lra_former_scratch_p (regno)
3573 && reg_renumber[regno] < 0
3574 && (curr_insn_set == NULL_RTX
3575 || !((REG_P (SET_SRC (curr_insn_set))
3576 || MEM_P (SET_SRC (curr_insn_set))
3577 || GET_CODE (SET_SRC (curr_insn_set)) == SUBREG)
3578 && (REG_P (SET_DEST (curr_insn_set))
3579 || MEM_P (SET_DEST (curr_insn_set))
3580 || GET_CODE (SET_DEST (curr_insn_set)) == SUBREG))))
3581 optional_p = true;
3582 else
3583 continue;
3586 /* Operands that match previous ones have already been handled. */
3587 if (goal_alt_matches[i] >= 0)
3588 continue;
3590 /* We should not have an operand with a non-offsettable address
3591 appearing where an offsettable address will do. It also may
3592 be a case when the address should be special in other words
3593 not a general one (e.g. it needs no index reg). */
3594 if (goal_alt_matched[i][0] == -1 && goal_alt_offmemok[i] && MEM_P (op))
3596 enum reg_class rclass;
3597 rtx *loc = &XEXP (op, 0);
3598 enum rtx_code code = GET_CODE (*loc);
3600 push_to_sequence (before);
3601 rclass = base_reg_class (GET_MODE (op), MEM_ADDR_SPACE (op),
3602 MEM, SCRATCH);
3603 if (GET_RTX_CLASS (code) == RTX_AUTOINC)
3604 new_reg = emit_inc (rclass, *loc, *loc,
3605 /* This value does not matter for MODIFY. */
3606 GET_MODE_SIZE (GET_MODE (op)));
3607 else if (get_reload_reg (OP_IN, Pmode, *loc, rclass, FALSE,
3608 "offsetable address", &new_reg))
3609 lra_emit_move (new_reg, *loc);
3610 before = get_insns ();
3611 end_sequence ();
3612 *loc = new_reg;
3613 lra_update_dup (curr_id, i);
3615 else if (goal_alt_matched[i][0] == -1)
3617 enum machine_mode mode;
3618 rtx reg, *loc;
3619 int hard_regno, byte;
3620 enum op_type type = curr_static_id->operand[i].type;
3622 loc = curr_id->operand_loc[i];
3623 mode = curr_operand_mode[i];
3624 if (GET_CODE (*loc) == SUBREG)
3626 reg = SUBREG_REG (*loc);
3627 byte = SUBREG_BYTE (*loc);
3628 if (REG_P (reg)
3629 /* Strict_low_part requires reload the register not
3630 the sub-register. */
3631 && (curr_static_id->operand[i].strict_low
3632 || (GET_MODE_SIZE (mode)
3633 <= GET_MODE_SIZE (GET_MODE (reg))
3634 && (hard_regno
3635 = get_try_hard_regno (REGNO (reg))) >= 0
3636 && (simplify_subreg_regno
3637 (hard_regno,
3638 GET_MODE (reg), byte, mode) < 0)
3639 && (goal_alt[i] == NO_REGS
3640 || (simplify_subreg_regno
3641 (ira_class_hard_regs[goal_alt[i]][0],
3642 GET_MODE (reg), byte, mode) >= 0)))))
3644 loc = &SUBREG_REG (*loc);
3645 mode = GET_MODE (*loc);
3648 old = *loc;
3649 if (get_reload_reg (type, mode, old, goal_alt[i],
3650 loc != curr_id->operand_loc[i], "", &new_reg)
3651 && type != OP_OUT)
3653 push_to_sequence (before);
3654 lra_emit_move (new_reg, old);
3655 before = get_insns ();
3656 end_sequence ();
3658 *loc = new_reg;
3659 if (type != OP_IN
3660 && find_reg_note (curr_insn, REG_UNUSED, old) == NULL_RTX)
3662 start_sequence ();
3663 lra_emit_move (type == OP_INOUT ? copy_rtx (old) : old, new_reg);
3664 emit_insn (after);
3665 after = get_insns ();
3666 end_sequence ();
3667 *loc = new_reg;
3669 for (j = 0; j < goal_alt_dont_inherit_ops_num; j++)
3670 if (goal_alt_dont_inherit_ops[j] == i)
3672 lra_set_regno_unique_value (REGNO (new_reg));
3673 break;
3675 lra_update_dup (curr_id, i);
3677 else if (curr_static_id->operand[i].type == OP_IN
3678 && (curr_static_id->operand[goal_alt_matched[i][0]].type
3679 == OP_OUT))
3681 /* generate reloads for input and matched outputs. */
3682 match_inputs[0] = i;
3683 match_inputs[1] = -1;
3684 match_reload (goal_alt_matched[i][0], match_inputs,
3685 goal_alt[i], &before, &after);
3687 else if (curr_static_id->operand[i].type == OP_OUT
3688 && (curr_static_id->operand[goal_alt_matched[i][0]].type
3689 == OP_IN))
3690 /* Generate reloads for output and matched inputs. */
3691 match_reload (i, goal_alt_matched[i], goal_alt[i], &before, &after);
3692 else if (curr_static_id->operand[i].type == OP_IN
3693 && (curr_static_id->operand[goal_alt_matched[i][0]].type
3694 == OP_IN))
3696 /* Generate reloads for matched inputs. */
3697 match_inputs[0] = i;
3698 for (j = 0; (k = goal_alt_matched[i][j]) >= 0; j++)
3699 match_inputs[j + 1] = k;
3700 match_inputs[j + 1] = -1;
3701 match_reload (-1, match_inputs, goal_alt[i], &before, &after);
3703 else
3704 /* We must generate code in any case when function
3705 process_alt_operands decides that it is possible. */
3706 gcc_unreachable ();
3707 if (optional_p)
3709 lra_assert (REG_P (op));
3710 regno = REGNO (op);
3711 op = *curr_id->operand_loc[i]; /* Substitution. */
3712 if (GET_CODE (op) == SUBREG)
3713 op = SUBREG_REG (op);
3714 gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
3715 bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
3716 lra_reg_info[REGNO (op)].restore_regno = regno;
3717 if (lra_dump_file != NULL)
3718 fprintf (lra_dump_file,
3719 " Making reload reg %d for reg %d optional\n",
3720 REGNO (op), regno);
3723 if (before != NULL_RTX || after != NULL_RTX
3724 || max_regno_before != max_reg_num ())
3725 change_p = true;
3726 if (change_p)
3728 lra_update_operator_dups (curr_id);
3729 /* Something changes -- process the insn. */
3730 lra_update_insn_regno_info (curr_insn);
3732 lra_process_new_insns (curr_insn, before, after, "Inserting insn reload");
3733 return change_p;
3736 /* Return true if X is in LIST. */
3737 static bool
3738 in_list_p (rtx x, rtx list)
3740 for (; list != NULL_RTX; list = XEXP (list, 1))
3741 if (XEXP (list, 0) == x)
3742 return true;
3743 return false;
3746 /* Return true if X contains an allocatable hard register (if
3747 HARD_REG_P) or a (spilled if SPILLED_P) pseudo. */
3748 static bool
3749 contains_reg_p (rtx x, bool hard_reg_p, bool spilled_p)
3751 int i, j;
3752 const char *fmt;
3753 enum rtx_code code;
3755 code = GET_CODE (x);
3756 if (REG_P (x))
3758 int regno = REGNO (x);
3759 HARD_REG_SET alloc_regs;
3761 if (hard_reg_p)
3763 if (regno >= FIRST_PSEUDO_REGISTER)
3764 regno = lra_get_regno_hard_regno (regno);
3765 if (regno < 0)
3766 return false;
3767 COMPL_HARD_REG_SET (alloc_regs, lra_no_alloc_regs);
3768 return overlaps_hard_reg_set_p (alloc_regs, GET_MODE (x), regno);
3770 else
3772 if (regno < FIRST_PSEUDO_REGISTER)
3773 return false;
3774 if (! spilled_p)
3775 return true;
3776 return lra_get_regno_hard_regno (regno) < 0;
3779 fmt = GET_RTX_FORMAT (code);
3780 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3782 if (fmt[i] == 'e')
3784 if (contains_reg_p (XEXP (x, i), hard_reg_p, spilled_p))
3785 return true;
3787 else if (fmt[i] == 'E')
3789 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3790 if (contains_reg_p (XVECEXP (x, i, j), hard_reg_p, spilled_p))
3791 return true;
3794 return false;
3797 /* Process all regs in location *LOC and change them on equivalent
3798 substitution. Return true if any change was done. */
3799 static bool
3800 loc_equivalence_change_p (rtx *loc)
3802 rtx subst, reg, x = *loc;
3803 bool result = false;
3804 enum rtx_code code = GET_CODE (x);
3805 const char *fmt;
3806 int i, j;
3808 if (code == SUBREG)
3810 reg = SUBREG_REG (x);
3811 if ((subst = get_equiv_with_elimination (reg, curr_insn)) != reg
3812 && GET_MODE (subst) == VOIDmode)
3814 /* We cannot reload debug location. Simplify subreg here
3815 while we know the inner mode. */
3816 *loc = simplify_gen_subreg (GET_MODE (x), subst,
3817 GET_MODE (reg), SUBREG_BYTE (x));
3818 return true;
3821 if (code == REG && (subst = get_equiv_with_elimination (x, curr_insn)) != x)
3823 *loc = subst;
3824 return true;
3827 /* Scan all the operand sub-expressions. */
3828 fmt = GET_RTX_FORMAT (code);
3829 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3831 if (fmt[i] == 'e')
3832 result = loc_equivalence_change_p (&XEXP (x, i)) || result;
3833 else if (fmt[i] == 'E')
3834 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3835 result
3836 = loc_equivalence_change_p (&XVECEXP (x, i, j)) || result;
3838 return result;
3841 /* Similar to loc_equivalence_change_p, but for use as
3842 simplify_replace_fn_rtx callback. DATA is insn for which the
3843 elimination is done. If it null we don't do the elimination. */
3844 static rtx
3845 loc_equivalence_callback (rtx loc, const_rtx, void *data)
3847 if (!REG_P (loc))
3848 return NULL_RTX;
3850 rtx subst = (data == NULL
3851 ? get_equiv (loc) : get_equiv_with_elimination (loc, (rtx) data));
3852 if (subst != loc)
3853 return subst;
3855 return NULL_RTX;
3858 /* Maximum number of generated reload insns per an insn. It is for
3859 preventing this pass cycling in a bug case. */
3860 #define MAX_RELOAD_INSNS_NUMBER LRA_MAX_INSN_RELOADS
3862 /* The current iteration number of this LRA pass. */
3863 int lra_constraint_iter;
3865 /* The current iteration number of this LRA pass after the last spill
3866 pass. */
3867 int lra_constraint_iter_after_spill;
3869 /* True if we substituted equiv which needs checking register
3870 allocation correctness because the equivalent value contains
3871 allocatable hard registers or when we restore multi-register
3872 pseudo. */
3873 bool lra_risky_transformations_p;
3875 /* Return true if REGNO is referenced in more than one block. */
3876 static bool
3877 multi_block_pseudo_p (int regno)
3879 basic_block bb = NULL;
3880 unsigned int uid;
3881 bitmap_iterator bi;
3883 if (regno < FIRST_PSEUDO_REGISTER)
3884 return false;
3886 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
3887 if (bb == NULL)
3888 bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
3889 else if (BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn) != bb)
3890 return true;
3891 return false;
3894 /* Return true if LIST contains a deleted insn. */
3895 static bool
3896 contains_deleted_insn_p (rtx list)
3898 for (; list != NULL_RTX; list = XEXP (list, 1))
3899 if (NOTE_P (XEXP (list, 0))
3900 && NOTE_KIND (XEXP (list, 0)) == NOTE_INSN_DELETED)
3901 return true;
3902 return false;
3905 /* Return true if X contains a pseudo dying in INSN. */
3906 static bool
3907 dead_pseudo_p (rtx x, rtx insn)
3909 int i, j;
3910 const char *fmt;
3911 enum rtx_code code;
3913 if (REG_P (x))
3914 return (insn != NULL_RTX
3915 && find_regno_note (insn, REG_DEAD, REGNO (x)) != NULL_RTX);
3916 code = GET_CODE (x);
3917 fmt = GET_RTX_FORMAT (code);
3918 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3920 if (fmt[i] == 'e')
3922 if (dead_pseudo_p (XEXP (x, i), insn))
3923 return true;
3925 else if (fmt[i] == 'E')
3927 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3928 if (dead_pseudo_p (XVECEXP (x, i, j), insn))
3929 return true;
3932 return false;
3935 /* Return true if INSN contains a dying pseudo in INSN right hand
3936 side. */
3937 static bool
3938 insn_rhs_dead_pseudo_p (rtx insn)
3940 rtx set = single_set (insn);
3942 gcc_assert (set != NULL);
3943 return dead_pseudo_p (SET_SRC (set), insn);
3946 /* Return true if any init insn of REGNO contains a dying pseudo in
3947 insn right hand side. */
3948 static bool
3949 init_insn_rhs_dead_pseudo_p (int regno)
3951 rtx insns = ira_reg_equiv[regno].init_insns;
3953 if (insns == NULL)
3954 return false;
3955 if (INSN_P (insns))
3956 return insn_rhs_dead_pseudo_p (insns);
3957 for (; insns != NULL_RTX; insns = XEXP (insns, 1))
3958 if (insn_rhs_dead_pseudo_p (XEXP (insns, 0)))
3959 return true;
3960 return false;
3963 /* Return TRUE if REGNO has a reverse equivalence. The equivalence is
3964 reverse only if we have one init insn with given REGNO as a
3965 source. */
3966 static bool
3967 reverse_equiv_p (int regno)
3969 rtx insns, set;
3971 if ((insns = ira_reg_equiv[regno].init_insns) == NULL_RTX)
3972 return false;
3973 if (! INSN_P (XEXP (insns, 0))
3974 || XEXP (insns, 1) != NULL_RTX)
3975 return false;
3976 if ((set = single_set (XEXP (insns, 0))) == NULL_RTX)
3977 return false;
3978 return REG_P (SET_SRC (set)) && (int) REGNO (SET_SRC (set)) == regno;
3981 /* Return TRUE if REGNO was reloaded in an equivalence init insn. We
3982 call this function only for non-reverse equivalence. */
3983 static bool
3984 contains_reloaded_insn_p (int regno)
3986 rtx set;
3987 rtx list = ira_reg_equiv[regno].init_insns;
3989 for (; list != NULL_RTX; list = XEXP (list, 1))
3990 if ((set = single_set (XEXP (list, 0))) == NULL_RTX
3991 || ! REG_P (SET_DEST (set))
3992 || (int) REGNO (SET_DEST (set)) != regno)
3993 return true;
3994 return false;
3997 /* Entry function of LRA constraint pass. Return true if the
3998 constraint pass did change the code. */
3999 bool
4000 lra_constraints (bool first_p)
4002 bool changed_p;
4003 int i, hard_regno, new_insns_num;
4004 unsigned int min_len, new_min_len, uid;
4005 rtx set, x, reg, dest_reg;
4006 basic_block last_bb;
4007 bitmap_head equiv_insn_bitmap;
4008 bitmap_iterator bi;
4010 lra_constraint_iter++;
4011 if (lra_dump_file != NULL)
4012 fprintf (lra_dump_file, "\n********** Local #%d: **********\n\n",
4013 lra_constraint_iter);
4014 lra_constraint_iter_after_spill++;
4015 if (lra_constraint_iter_after_spill > LRA_MAX_CONSTRAINT_ITERATION_NUMBER)
4016 internal_error
4017 ("Maximum number of LRA constraint passes is achieved (%d)\n",
4018 LRA_MAX_CONSTRAINT_ITERATION_NUMBER);
4019 changed_p = false;
4020 lra_risky_transformations_p = false;
4021 new_insn_uid_start = get_max_uid ();
4022 new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
4023 /* Mark used hard regs for target stack size calulations. */
4024 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4025 if (lra_reg_info[i].nrefs != 0
4026 && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
4028 int j, nregs;
4030 nregs = hard_regno_nregs[hard_regno][lra_reg_info[i].biggest_mode];
4031 for (j = 0; j < nregs; j++)
4032 df_set_regs_ever_live (hard_regno + j, true);
4034 /* Do elimination before the equivalence processing as we can spill
4035 some pseudos during elimination. */
4036 lra_eliminate (false, first_p);
4037 bitmap_initialize (&equiv_insn_bitmap, &reg_obstack);
4038 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4039 if (lra_reg_info[i].nrefs != 0)
4041 ira_reg_equiv[i].profitable_p = true;
4042 reg = regno_reg_rtx[i];
4043 if (lra_get_regno_hard_regno (i) < 0 && (x = get_equiv (reg)) != reg)
4045 bool pseudo_p = contains_reg_p (x, false, false);
4047 /* After RTL transformation, we can not guarantee that
4048 pseudo in the substitution was not reloaded which might
4049 make equivalence invalid. For example, in reverse
4050 equiv of p0
4052 p0 <- ...
4054 equiv_mem <- p0
4056 the memory address register was reloaded before the 2nd
4057 insn. */
4058 if ((! first_p && pseudo_p)
4059 /* We don't use DF for compilation speed sake. So it
4060 is problematic to update live info when we use an
4061 equivalence containing pseudos in more than one
4062 BB. */
4063 || (pseudo_p && multi_block_pseudo_p (i))
4064 /* If an init insn was deleted for some reason, cancel
4065 the equiv. We could update the equiv insns after
4066 transformations including an equiv insn deletion
4067 but it is not worthy as such cases are extremely
4068 rare. */
4069 || contains_deleted_insn_p (ira_reg_equiv[i].init_insns)
4070 /* If it is not a reverse equivalence, we check that a
4071 pseudo in rhs of the init insn is not dying in the
4072 insn. Otherwise, the live info at the beginning of
4073 the corresponding BB might be wrong after we
4074 removed the insn. When the equiv can be a
4075 constant, the right hand side of the init insn can
4076 be a pseudo. */
4077 || (! reverse_equiv_p (i)
4078 && (init_insn_rhs_dead_pseudo_p (i)
4079 /* If we reloaded the pseudo in an equivalence
4080 init insn, we can not remove the equiv init
4081 insns and the init insns might write into
4082 const memory in this case. */
4083 || contains_reloaded_insn_p (i)))
4084 /* Prevent access beyond equivalent memory for
4085 paradoxical subregs. */
4086 || (MEM_P (x)
4087 && (GET_MODE_SIZE (lra_reg_info[i].biggest_mode)
4088 > GET_MODE_SIZE (GET_MODE (x)))))
4089 ira_reg_equiv[i].defined_p = false;
4090 if (contains_reg_p (x, false, true))
4091 ira_reg_equiv[i].profitable_p = false;
4092 if (get_equiv (reg) != reg)
4093 bitmap_ior_into (&equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
4096 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4097 update_equiv (i);
4098 /* We should add all insns containing pseudos which should be
4099 substituted by their equivalences. */
4100 EXECUTE_IF_SET_IN_BITMAP (&equiv_insn_bitmap, 0, uid, bi)
4101 lra_push_insn_by_uid (uid);
4102 min_len = lra_insn_stack_length ();
4103 new_insns_num = 0;
4104 last_bb = NULL;
4105 changed_p = false;
4106 while ((new_min_len = lra_insn_stack_length ()) != 0)
4108 curr_insn = lra_pop_insn ();
4109 --new_min_len;
4110 curr_bb = BLOCK_FOR_INSN (curr_insn);
4111 if (curr_bb != last_bb)
4113 last_bb = curr_bb;
4114 bb_reload_num = lra_curr_reload_num;
4116 if (min_len > new_min_len)
4118 min_len = new_min_len;
4119 new_insns_num = 0;
4121 if (new_insns_num > MAX_RELOAD_INSNS_NUMBER)
4122 internal_error
4123 ("Max. number of generated reload insns per insn is achieved (%d)\n",
4124 MAX_RELOAD_INSNS_NUMBER);
4125 new_insns_num++;
4126 if (DEBUG_INSN_P (curr_insn))
4128 /* We need to check equivalence in debug insn and change
4129 pseudo to the equivalent value if necessary. */
4130 curr_id = lra_get_insn_recog_data (curr_insn);
4131 if (bitmap_bit_p (&equiv_insn_bitmap, INSN_UID (curr_insn)))
4133 rtx old = *curr_id->operand_loc[0];
4134 *curr_id->operand_loc[0]
4135 = simplify_replace_fn_rtx (old, NULL_RTX,
4136 loc_equivalence_callback, curr_insn);
4137 if (old != *curr_id->operand_loc[0])
4139 lra_update_insn_regno_info (curr_insn);
4140 changed_p = true;
4144 else if (INSN_P (curr_insn))
4146 if ((set = single_set (curr_insn)) != NULL_RTX)
4148 dest_reg = SET_DEST (set);
4149 /* The equivalence pseudo could be set up as SUBREG in a
4150 case when it is a call restore insn in a mode
4151 different from the pseudo mode. */
4152 if (GET_CODE (dest_reg) == SUBREG)
4153 dest_reg = SUBREG_REG (dest_reg);
4154 if ((REG_P (dest_reg)
4155 && (x = get_equiv (dest_reg)) != dest_reg
4156 /* Remove insns which set up a pseudo whose value
4157 can not be changed. Such insns might be not in
4158 init_insns because we don't update equiv data
4159 during insn transformations.
4161 As an example, let suppose that a pseudo got
4162 hard register and on the 1st pass was not
4163 changed to equivalent constant. We generate an
4164 additional insn setting up the pseudo because of
4165 secondary memory movement. Then the pseudo is
4166 spilled and we use the equiv constant. In this
4167 case we should remove the additional insn and
4168 this insn is not init_insns list. */
4169 && (! MEM_P (x) || MEM_READONLY_P (x)
4170 /* Check that this is actually an insn setting
4171 up the equivalence. */
4172 || in_list_p (curr_insn,
4173 ira_reg_equiv
4174 [REGNO (dest_reg)].init_insns)))
4175 || (((x = get_equiv (SET_SRC (set))) != SET_SRC (set))
4176 && in_list_p (curr_insn,
4177 ira_reg_equiv
4178 [REGNO (SET_SRC (set))].init_insns)))
4180 /* This is equiv init insn of pseudo which did not get a
4181 hard register -- remove the insn. */
4182 if (lra_dump_file != NULL)
4184 fprintf (lra_dump_file,
4185 " Removing equiv init insn %i (freq=%d)\n",
4186 INSN_UID (curr_insn),
4187 REG_FREQ_FROM_BB (BLOCK_FOR_INSN (curr_insn)));
4188 dump_insn_slim (lra_dump_file, curr_insn);
4190 if (contains_reg_p (x, true, false))
4191 lra_risky_transformations_p = true;
4192 lra_set_insn_deleted (curr_insn);
4193 continue;
4196 curr_id = lra_get_insn_recog_data (curr_insn);
4197 curr_static_id = curr_id->insn_static_data;
4198 init_curr_insn_input_reloads ();
4199 init_curr_operand_mode ();
4200 if (curr_insn_transform ())
4201 changed_p = true;
4202 /* Check non-transformed insns too for equiv change as USE
4203 or CLOBBER don't need reloads but can contain pseudos
4204 being changed on their equivalences. */
4205 else if (bitmap_bit_p (&equiv_insn_bitmap, INSN_UID (curr_insn))
4206 && loc_equivalence_change_p (&PATTERN (curr_insn)))
4208 lra_update_insn_regno_info (curr_insn);
4209 changed_p = true;
4213 bitmap_clear (&equiv_insn_bitmap);
4214 /* If we used a new hard regno, changed_p should be true because the
4215 hard reg is assigned to a new pseudo. */
4216 #ifdef ENABLE_CHECKING
4217 if (! changed_p)
4219 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4220 if (lra_reg_info[i].nrefs != 0
4221 && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
4223 int j, nregs = hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (i)];
4225 for (j = 0; j < nregs; j++)
4226 lra_assert (df_regs_ever_live_p (hard_regno + j));
4229 #endif
4230 return changed_p;
4233 /* Initiate the LRA constraint pass. It is done once per
4234 function. */
4235 void
4236 lra_constraints_init (void)
4240 /* Finalize the LRA constraint pass. It is done once per
4241 function. */
4242 void
4243 lra_constraints_finish (void)
4249 /* This page contains code to do inheritance/split
4250 transformations. */
4252 /* Number of reloads passed so far in current EBB. */
4253 static int reloads_num;
4255 /* Number of calls passed so far in current EBB. */
4256 static int calls_num;
4258 /* Current reload pseudo check for validity of elements in
4259 USAGE_INSNS. */
4260 static int curr_usage_insns_check;
4262 /* Info about last usage of registers in EBB to do inheritance/split
4263 transformation. Inheritance transformation is done from a spilled
4264 pseudo and split transformations from a hard register or a pseudo
4265 assigned to a hard register. */
4266 struct usage_insns
4268 /* If the value is equal to CURR_USAGE_INSNS_CHECK, then the member
4269 value INSNS is valid. The insns is chain of optional debug insns
4270 and a finishing non-debug insn using the corresponding reg. The
4271 value is also used to mark the registers which are set up in the
4272 current insn. The negated insn uid is used for this. */
4273 int check;
4274 /* Value of global reloads_num at the last insn in INSNS. */
4275 int reloads_num;
4276 /* Value of global reloads_nums at the last insn in INSNS. */
4277 int calls_num;
4278 /* It can be true only for splitting. And it means that the restore
4279 insn should be put after insn given by the following member. */
4280 bool after_p;
4281 /* Next insns in the current EBB which use the original reg and the
4282 original reg value is not changed between the current insn and
4283 the next insns. In order words, e.g. for inheritance, if we need
4284 to use the original reg value again in the next insns we can try
4285 to use the value in a hard register from a reload insn of the
4286 current insn. */
4287 rtx insns;
4290 /* Map: regno -> corresponding pseudo usage insns. */
4291 static struct usage_insns *usage_insns;
4293 static void
4294 setup_next_usage_insn (int regno, rtx insn, int reloads_num, bool after_p)
4296 usage_insns[regno].check = curr_usage_insns_check;
4297 usage_insns[regno].insns = insn;
4298 usage_insns[regno].reloads_num = reloads_num;
4299 usage_insns[regno].calls_num = calls_num;
4300 usage_insns[regno].after_p = after_p;
4303 /* The function is used to form list REGNO usages which consists of
4304 optional debug insns finished by a non-debug insn using REGNO.
4305 RELOADS_NUM is current number of reload insns processed so far. */
4306 static void
4307 add_next_usage_insn (int regno, rtx insn, int reloads_num)
4309 rtx next_usage_insns;
4311 if (usage_insns[regno].check == curr_usage_insns_check
4312 && (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
4313 && DEBUG_INSN_P (insn))
4315 /* Check that we did not add the debug insn yet. */
4316 if (next_usage_insns != insn
4317 && (GET_CODE (next_usage_insns) != INSN_LIST
4318 || XEXP (next_usage_insns, 0) != insn))
4319 usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
4320 next_usage_insns);
4322 else if (NONDEBUG_INSN_P (insn))
4323 setup_next_usage_insn (regno, insn, reloads_num, false);
4324 else
4325 usage_insns[regno].check = 0;
4328 /* Replace all references to register OLD_REGNO in *LOC with pseudo
4329 register NEW_REG. Return true if any change was made. */
4330 static bool
4331 substitute_pseudo (rtx *loc, int old_regno, rtx new_reg)
4333 rtx x = *loc;
4334 bool result = false;
4335 enum rtx_code code;
4336 const char *fmt;
4337 int i, j;
4339 if (x == NULL_RTX)
4340 return false;
4342 code = GET_CODE (x);
4343 if (code == REG && (int) REGNO (x) == old_regno)
4345 enum machine_mode mode = GET_MODE (*loc);
4346 enum machine_mode inner_mode = GET_MODE (new_reg);
4348 if (mode != inner_mode)
4350 if (GET_MODE_SIZE (mode) >= GET_MODE_SIZE (inner_mode)
4351 || ! SCALAR_INT_MODE_P (inner_mode))
4352 new_reg = gen_rtx_SUBREG (mode, new_reg, 0);
4353 else
4354 new_reg = gen_lowpart_SUBREG (mode, new_reg);
4356 *loc = new_reg;
4357 return true;
4360 /* Scan all the operand sub-expressions. */
4361 fmt = GET_RTX_FORMAT (code);
4362 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4364 if (fmt[i] == 'e')
4366 if (substitute_pseudo (&XEXP (x, i), old_regno, new_reg))
4367 result = true;
4369 else if (fmt[i] == 'E')
4371 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4372 if (substitute_pseudo (&XVECEXP (x, i, j), old_regno, new_reg))
4373 result = true;
4376 return result;
4379 /* Return first non-debug insn in list USAGE_INSNS. */
4380 static rtx
4381 skip_usage_debug_insns (rtx usage_insns)
4383 rtx insn;
4385 /* Skip debug insns. */
4386 for (insn = usage_insns;
4387 insn != NULL_RTX && GET_CODE (insn) == INSN_LIST;
4388 insn = XEXP (insn, 1))
4390 return insn;
4393 /* Return true if we need secondary memory moves for insn in
4394 USAGE_INSNS after inserting inherited pseudo of class INHER_CL
4395 into the insn. */
4396 static bool
4397 check_secondary_memory_needed_p (enum reg_class inher_cl ATTRIBUTE_UNUSED,
4398 rtx usage_insns ATTRIBUTE_UNUSED)
4400 #ifndef SECONDARY_MEMORY_NEEDED
4401 return false;
4402 #else
4403 rtx insn, set, dest;
4404 enum reg_class cl;
4406 if (inher_cl == ALL_REGS
4407 || (insn = skip_usage_debug_insns (usage_insns)) == NULL_RTX)
4408 return false;
4409 lra_assert (INSN_P (insn));
4410 if ((set = single_set (insn)) == NULL_RTX || ! REG_P (SET_DEST (set)))
4411 return false;
4412 dest = SET_DEST (set);
4413 if (! REG_P (dest))
4414 return false;
4415 lra_assert (inher_cl != NO_REGS);
4416 cl = get_reg_class (REGNO (dest));
4417 return (cl != NO_REGS && cl != ALL_REGS
4418 && SECONDARY_MEMORY_NEEDED (inher_cl, cl, GET_MODE (dest)));
4419 #endif
4422 /* Registers involved in inheritance/split in the current EBB
4423 (inheritance/split pseudos and original registers). */
4424 static bitmap_head check_only_regs;
4426 /* Do inheritance transformations for insn INSN, which defines (if
4427 DEF_P) or uses ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which
4428 instruction in the EBB next uses ORIGINAL_REGNO; it has the same
4429 form as the "insns" field of usage_insns. Return true if we
4430 succeed in such transformation.
4432 The transformations look like:
4434 p <- ... i <- ...
4435 ... p <- i (new insn)
4436 ... =>
4437 <- ... p ... <- ... i ...
4439 ... i <- p (new insn)
4440 <- ... p ... <- ... i ...
4441 ... =>
4442 <- ... p ... <- ... i ...
4443 where p is a spilled original pseudo and i is a new inheritance pseudo.
4446 The inheritance pseudo has the smallest class of two classes CL and
4447 class of ORIGINAL REGNO. */
4448 static bool
4449 inherit_reload_reg (bool def_p, int original_regno,
4450 enum reg_class cl, rtx insn, rtx next_usage_insns)
4452 if (optimize_function_for_size_p (cfun))
4453 return false;
4455 enum reg_class rclass = lra_get_allocno_class (original_regno);
4456 rtx original_reg = regno_reg_rtx[original_regno];
4457 rtx new_reg, new_insns, usage_insn;
4459 lra_assert (! usage_insns[original_regno].after_p);
4460 if (lra_dump_file != NULL)
4461 fprintf (lra_dump_file,
4462 " <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
4463 if (! ira_reg_classes_intersect_p[cl][rclass])
4465 if (lra_dump_file != NULL)
4467 fprintf (lra_dump_file,
4468 " Rejecting inheritance for %d "
4469 "because of disjoint classes %s and %s\n",
4470 original_regno, reg_class_names[cl],
4471 reg_class_names[rclass]);
4472 fprintf (lra_dump_file,
4473 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4475 return false;
4477 if ((ira_class_subset_p[cl][rclass] && cl != rclass)
4478 /* We don't use a subset of two classes because it can be
4479 NO_REGS. This transformation is still profitable in most
4480 cases even if the classes are not intersected as register
4481 move is probably cheaper than a memory load. */
4482 || ira_class_hard_regs_num[cl] < ira_class_hard_regs_num[rclass])
4484 if (lra_dump_file != NULL)
4485 fprintf (lra_dump_file, " Use smallest class of %s and %s\n",
4486 reg_class_names[cl], reg_class_names[rclass]);
4488 rclass = cl;
4490 if (check_secondary_memory_needed_p (rclass, next_usage_insns))
4492 /* Reject inheritance resulting in secondary memory moves.
4493 Otherwise, there is a danger in LRA cycling. Also such
4494 transformation will be unprofitable. */
4495 if (lra_dump_file != NULL)
4497 rtx insn = skip_usage_debug_insns (next_usage_insns);
4498 rtx set = single_set (insn);
4500 lra_assert (set != NULL_RTX);
4502 rtx dest = SET_DEST (set);
4504 lra_assert (REG_P (dest));
4505 fprintf (lra_dump_file,
4506 " Rejecting inheritance for insn %d(%s)<-%d(%s) "
4507 "as secondary mem is needed\n",
4508 REGNO (dest), reg_class_names[get_reg_class (REGNO (dest))],
4509 original_regno, reg_class_names[rclass]);
4510 fprintf (lra_dump_file,
4511 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4513 return false;
4515 new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
4516 rclass, "inheritance");
4517 start_sequence ();
4518 if (def_p)
4519 lra_emit_move (original_reg, new_reg);
4520 else
4521 lra_emit_move (new_reg, original_reg);
4522 new_insns = get_insns ();
4523 end_sequence ();
4524 if (NEXT_INSN (new_insns) != NULL_RTX)
4526 if (lra_dump_file != NULL)
4528 fprintf (lra_dump_file,
4529 " Rejecting inheritance %d->%d "
4530 "as it results in 2 or more insns:\n",
4531 original_regno, REGNO (new_reg));
4532 dump_rtl_slim (lra_dump_file, new_insns, NULL_RTX, -1, 0);
4533 fprintf (lra_dump_file,
4534 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4536 return false;
4538 substitute_pseudo (&insn, original_regno, new_reg);
4539 lra_update_insn_regno_info (insn);
4540 if (! def_p)
4541 /* We now have a new usage insn for original regno. */
4542 setup_next_usage_insn (original_regno, new_insns, reloads_num, false);
4543 if (lra_dump_file != NULL)
4544 fprintf (lra_dump_file, " Original reg change %d->%d (bb%d):\n",
4545 original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
4546 lra_reg_info[REGNO (new_reg)].restore_regno = original_regno;
4547 bitmap_set_bit (&check_only_regs, REGNO (new_reg));
4548 bitmap_set_bit (&check_only_regs, original_regno);
4549 bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
4550 if (def_p)
4551 lra_process_new_insns (insn, NULL_RTX, new_insns,
4552 "Add original<-inheritance");
4553 else
4554 lra_process_new_insns (insn, new_insns, NULL_RTX,
4555 "Add inheritance<-original");
4556 while (next_usage_insns != NULL_RTX)
4558 if (GET_CODE (next_usage_insns) != INSN_LIST)
4560 usage_insn = next_usage_insns;
4561 lra_assert (NONDEBUG_INSN_P (usage_insn));
4562 next_usage_insns = NULL;
4564 else
4566 usage_insn = XEXP (next_usage_insns, 0);
4567 lra_assert (DEBUG_INSN_P (usage_insn));
4568 next_usage_insns = XEXP (next_usage_insns, 1);
4570 substitute_pseudo (&usage_insn, original_regno, new_reg);
4571 lra_update_insn_regno_info (usage_insn);
4572 if (lra_dump_file != NULL)
4574 fprintf (lra_dump_file,
4575 " Inheritance reuse change %d->%d (bb%d):\n",
4576 original_regno, REGNO (new_reg),
4577 BLOCK_FOR_INSN (usage_insn)->index);
4578 dump_insn_slim (lra_dump_file, usage_insn);
4581 if (lra_dump_file != NULL)
4582 fprintf (lra_dump_file,
4583 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4584 return true;
4587 /* Return true if we need a caller save/restore for pseudo REGNO which
4588 was assigned to a hard register. */
4589 static inline bool
4590 need_for_call_save_p (int regno)
4592 lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
4593 return (usage_insns[regno].calls_num < calls_num
4594 && (overlaps_hard_reg_set_p
4595 ((flag_use_caller_save &&
4596 ! hard_reg_set_empty_p (lra_reg_info[regno].actual_call_used_reg_set))
4597 ? lra_reg_info[regno].actual_call_used_reg_set
4598 : call_used_reg_set,
4599 PSEUDO_REGNO_MODE (regno), reg_renumber[regno])
4600 || HARD_REGNO_CALL_PART_CLOBBERED (reg_renumber[regno],
4601 PSEUDO_REGNO_MODE (regno))));
4604 /* Global registers occurring in the current EBB. */
4605 static bitmap_head ebb_global_regs;
4607 /* Return true if we need a split for hard register REGNO or pseudo
4608 REGNO which was assigned to a hard register.
4609 POTENTIAL_RELOAD_HARD_REGS contains hard registers which might be
4610 used for reloads since the EBB end. It is an approximation of the
4611 used hard registers in the split range. The exact value would
4612 require expensive calculations. If we were aggressive with
4613 splitting because of the approximation, the split pseudo will save
4614 the same hard register assignment and will be removed in the undo
4615 pass. We still need the approximation because too aggressive
4616 splitting would result in too inaccurate cost calculation in the
4617 assignment pass because of too many generated moves which will be
4618 probably removed in the undo pass. */
4619 static inline bool
4620 need_for_split_p (HARD_REG_SET potential_reload_hard_regs, int regno)
4622 int hard_regno = regno < FIRST_PSEUDO_REGISTER ? regno : reg_renumber[regno];
4624 lra_assert (hard_regno >= 0);
4625 return ((TEST_HARD_REG_BIT (potential_reload_hard_regs, hard_regno)
4626 /* Don't split eliminable hard registers, otherwise we can
4627 split hard registers like hard frame pointer, which
4628 lives on BB start/end according to DF-infrastructure,
4629 when there is a pseudo assigned to the register and
4630 living in the same BB. */
4631 && (regno >= FIRST_PSEUDO_REGISTER
4632 || ! TEST_HARD_REG_BIT (eliminable_regset, hard_regno))
4633 && ! TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno)
4634 /* Don't split call clobbered hard regs living through
4635 calls, otherwise we might have a check problem in the
4636 assign sub-pass as in the most cases (exception is a
4637 situation when lra_risky_transformations_p value is
4638 true) the assign pass assumes that all pseudos living
4639 through calls are assigned to call saved hard regs. */
4640 && (regno >= FIRST_PSEUDO_REGISTER
4641 || ! TEST_HARD_REG_BIT (call_used_reg_set, regno)
4642 || usage_insns[regno].calls_num == calls_num)
4643 /* We need at least 2 reloads to make pseudo splitting
4644 profitable. We should provide hard regno splitting in
4645 any case to solve 1st insn scheduling problem when
4646 moving hard register definition up might result in
4647 impossibility to find hard register for reload pseudo of
4648 small register class. */
4649 && (usage_insns[regno].reloads_num
4650 + (regno < FIRST_PSEUDO_REGISTER ? 0 : 3) < reloads_num)
4651 && (regno < FIRST_PSEUDO_REGISTER
4652 /* For short living pseudos, spilling + inheritance can
4653 be considered a substitution for splitting.
4654 Therefore we do not splitting for local pseudos. It
4655 decreases also aggressiveness of splitting. The
4656 minimal number of references is chosen taking into
4657 account that for 2 references splitting has no sense
4658 as we can just spill the pseudo. */
4659 || (regno >= FIRST_PSEUDO_REGISTER
4660 && lra_reg_info[regno].nrefs > 3
4661 && bitmap_bit_p (&ebb_global_regs, regno))))
4662 || (regno >= FIRST_PSEUDO_REGISTER && need_for_call_save_p (regno)));
4665 /* Return class for the split pseudo created from original pseudo with
4666 ALLOCNO_CLASS and MODE which got a hard register HARD_REGNO. We
4667 choose subclass of ALLOCNO_CLASS which contains HARD_REGNO and
4668 results in no secondary memory movements. */
4669 static enum reg_class
4670 choose_split_class (enum reg_class allocno_class,
4671 int hard_regno ATTRIBUTE_UNUSED,
4672 enum machine_mode mode ATTRIBUTE_UNUSED)
4674 #ifndef SECONDARY_MEMORY_NEEDED
4675 return allocno_class;
4676 #else
4677 int i;
4678 enum reg_class cl, best_cl = NO_REGS;
4679 enum reg_class hard_reg_class ATTRIBUTE_UNUSED
4680 = REGNO_REG_CLASS (hard_regno);
4682 if (! SECONDARY_MEMORY_NEEDED (allocno_class, allocno_class, mode)
4683 && TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
4684 return allocno_class;
4685 for (i = 0;
4686 (cl = reg_class_subclasses[allocno_class][i]) != LIM_REG_CLASSES;
4687 i++)
4688 if (! SECONDARY_MEMORY_NEEDED (cl, hard_reg_class, mode)
4689 && ! SECONDARY_MEMORY_NEEDED (hard_reg_class, cl, mode)
4690 && TEST_HARD_REG_BIT (reg_class_contents[cl], hard_regno)
4691 && (best_cl == NO_REGS
4692 || ira_class_hard_regs_num[best_cl] < ira_class_hard_regs_num[cl]))
4693 best_cl = cl;
4694 return best_cl;
4695 #endif
4698 /* Do split transformations for insn INSN, which defines or uses
4699 ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which instruction in
4700 the EBB next uses ORIGINAL_REGNO; it has the same form as the
4701 "insns" field of usage_insns.
4703 The transformations look like:
4705 p <- ... p <- ...
4706 ... s <- p (new insn -- save)
4707 ... =>
4708 ... p <- s (new insn -- restore)
4709 <- ... p ... <- ... p ...
4711 <- ... p ... <- ... p ...
4712 ... s <- p (new insn -- save)
4713 ... =>
4714 ... p <- s (new insn -- restore)
4715 <- ... p ... <- ... p ...
4717 where p is an original pseudo got a hard register or a hard
4718 register and s is a new split pseudo. The save is put before INSN
4719 if BEFORE_P is true. Return true if we succeed in such
4720 transformation. */
4721 static bool
4722 split_reg (bool before_p, int original_regno, rtx insn, rtx next_usage_insns)
4724 enum reg_class rclass;
4725 rtx original_reg;
4726 int hard_regno, nregs;
4727 rtx new_reg, save, restore, usage_insn;
4728 bool after_p;
4729 bool call_save_p;
4731 if (original_regno < FIRST_PSEUDO_REGISTER)
4733 rclass = ira_allocno_class_translate[REGNO_REG_CLASS (original_regno)];
4734 hard_regno = original_regno;
4735 call_save_p = false;
4736 nregs = 1;
4738 else
4740 hard_regno = reg_renumber[original_regno];
4741 nregs = hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (original_regno)];
4742 rclass = lra_get_allocno_class (original_regno);
4743 original_reg = regno_reg_rtx[original_regno];
4744 call_save_p = need_for_call_save_p (original_regno);
4746 original_reg = regno_reg_rtx[original_regno];
4747 lra_assert (hard_regno >= 0);
4748 if (lra_dump_file != NULL)
4749 fprintf (lra_dump_file,
4750 " ((((((((((((((((((((((((((((((((((((((((((((((((\n");
4751 if (call_save_p)
4753 enum machine_mode mode = GET_MODE (original_reg);
4755 mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
4756 hard_regno_nregs[hard_regno][mode],
4757 mode);
4758 new_reg = lra_create_new_reg (mode, NULL_RTX, NO_REGS, "save");
4760 else
4762 rclass = choose_split_class (rclass, hard_regno,
4763 GET_MODE (original_reg));
4764 if (rclass == NO_REGS)
4766 if (lra_dump_file != NULL)
4768 fprintf (lra_dump_file,
4769 " Rejecting split of %d(%s): "
4770 "no good reg class for %d(%s)\n",
4771 original_regno,
4772 reg_class_names[lra_get_allocno_class (original_regno)],
4773 hard_regno,
4774 reg_class_names[REGNO_REG_CLASS (hard_regno)]);
4775 fprintf
4776 (lra_dump_file,
4777 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
4779 return false;
4781 new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
4782 rclass, "split");
4783 reg_renumber[REGNO (new_reg)] = hard_regno;
4785 save = emit_spill_move (true, new_reg, original_reg);
4786 if (NEXT_INSN (save) != NULL_RTX)
4788 lra_assert (! call_save_p);
4789 if (lra_dump_file != NULL)
4791 fprintf
4792 (lra_dump_file,
4793 " Rejecting split %d->%d resulting in > 2 %s save insns:\n",
4794 original_regno, REGNO (new_reg), call_save_p ? "call" : "");
4795 dump_rtl_slim (lra_dump_file, save, NULL_RTX, -1, 0);
4796 fprintf (lra_dump_file,
4797 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
4799 return false;
4801 restore = emit_spill_move (false, new_reg, original_reg);
4802 if (NEXT_INSN (restore) != NULL_RTX)
4804 lra_assert (! call_save_p);
4805 if (lra_dump_file != NULL)
4807 fprintf (lra_dump_file,
4808 " Rejecting split %d->%d "
4809 "resulting in > 2 %s restore insns:\n",
4810 original_regno, REGNO (new_reg), call_save_p ? "call" : "");
4811 dump_rtl_slim (lra_dump_file, restore, NULL_RTX, -1, 0);
4812 fprintf (lra_dump_file,
4813 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
4815 return false;
4817 after_p = usage_insns[original_regno].after_p;
4818 lra_reg_info[REGNO (new_reg)].restore_regno = original_regno;
4819 bitmap_set_bit (&check_only_regs, REGNO (new_reg));
4820 bitmap_set_bit (&check_only_regs, original_regno);
4821 bitmap_set_bit (&lra_split_regs, REGNO (new_reg));
4822 for (;;)
4824 if (GET_CODE (next_usage_insns) != INSN_LIST)
4826 usage_insn = next_usage_insns;
4827 break;
4829 usage_insn = XEXP (next_usage_insns, 0);
4830 lra_assert (DEBUG_INSN_P (usage_insn));
4831 next_usage_insns = XEXP (next_usage_insns, 1);
4832 substitute_pseudo (&usage_insn, original_regno, new_reg);
4833 lra_update_insn_regno_info (usage_insn);
4834 if (lra_dump_file != NULL)
4836 fprintf (lra_dump_file, " Split reuse change %d->%d:\n",
4837 original_regno, REGNO (new_reg));
4838 dump_insn_slim (lra_dump_file, usage_insn);
4841 lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
4842 lra_assert (usage_insn != insn || (after_p && before_p));
4843 lra_process_new_insns (usage_insn, after_p ? NULL_RTX : restore,
4844 after_p ? restore : NULL_RTX,
4845 call_save_p
4846 ? "Add reg<-save" : "Add reg<-split");
4847 lra_process_new_insns (insn, before_p ? save : NULL_RTX,
4848 before_p ? NULL_RTX : save,
4849 call_save_p
4850 ? "Add save<-reg" : "Add split<-reg");
4851 if (nregs > 1)
4852 /* If we are trying to split multi-register. We should check
4853 conflicts on the next assignment sub-pass. IRA can allocate on
4854 sub-register levels, LRA do this on pseudos level right now and
4855 this discrepancy may create allocation conflicts after
4856 splitting. */
4857 lra_risky_transformations_p = true;
4858 if (lra_dump_file != NULL)
4859 fprintf (lra_dump_file,
4860 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
4861 return true;
4864 /* Recognize that we need a split transformation for insn INSN, which
4865 defines or uses REGNO in its insn biggest MODE (we use it only if
4866 REGNO is a hard register). POTENTIAL_RELOAD_HARD_REGS contains
4867 hard registers which might be used for reloads since the EBB end.
4868 Put the save before INSN if BEFORE_P is true. MAX_UID is maximla
4869 uid before starting INSN processing. Return true if we succeed in
4870 such transformation. */
4871 static bool
4872 split_if_necessary (int regno, enum machine_mode mode,
4873 HARD_REG_SET potential_reload_hard_regs,
4874 bool before_p, rtx insn, int max_uid)
4876 bool res = false;
4877 int i, nregs = 1;
4878 rtx next_usage_insns;
4880 if (regno < FIRST_PSEUDO_REGISTER)
4881 nregs = hard_regno_nregs[regno][mode];
4882 for (i = 0; i < nregs; i++)
4883 if (usage_insns[regno + i].check == curr_usage_insns_check
4884 && (next_usage_insns = usage_insns[regno + i].insns) != NULL_RTX
4885 /* To avoid processing the register twice or more. */
4886 && ((GET_CODE (next_usage_insns) != INSN_LIST
4887 && INSN_UID (next_usage_insns) < max_uid)
4888 || (GET_CODE (next_usage_insns) == INSN_LIST
4889 && (INSN_UID (XEXP (next_usage_insns, 0)) < max_uid)))
4890 && need_for_split_p (potential_reload_hard_regs, regno + i)
4891 && split_reg (before_p, regno + i, insn, next_usage_insns))
4892 res = true;
4893 return res;
4896 /* Check only registers living at the current program point in the
4897 current EBB. */
4898 static bitmap_head live_regs;
4900 /* Update live info in EBB given by its HEAD and TAIL insns after
4901 inheritance/split transformation. The function removes dead moves
4902 too. */
4903 static void
4904 update_ebb_live_info (rtx head, rtx tail)
4906 unsigned int j;
4907 int i, regno;
4908 bool live_p;
4909 rtx prev_insn, set;
4910 bool remove_p;
4911 basic_block last_bb, prev_bb, curr_bb;
4912 bitmap_iterator bi;
4913 struct lra_insn_reg *reg;
4914 edge e;
4915 edge_iterator ei;
4917 last_bb = BLOCK_FOR_INSN (tail);
4918 prev_bb = NULL;
4919 for (curr_insn = tail;
4920 curr_insn != PREV_INSN (head);
4921 curr_insn = prev_insn)
4923 prev_insn = PREV_INSN (curr_insn);
4924 /* We need to process empty blocks too. They contain
4925 NOTE_INSN_BASIC_BLOCK referring for the basic block. */
4926 if (NOTE_P (curr_insn) && NOTE_KIND (curr_insn) != NOTE_INSN_BASIC_BLOCK)
4927 continue;
4928 curr_bb = BLOCK_FOR_INSN (curr_insn);
4929 if (curr_bb != prev_bb)
4931 if (prev_bb != NULL)
4933 /* Update df_get_live_in (prev_bb): */
4934 EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
4935 if (bitmap_bit_p (&live_regs, j))
4936 bitmap_set_bit (df_get_live_in (prev_bb), j);
4937 else
4938 bitmap_clear_bit (df_get_live_in (prev_bb), j);
4940 if (curr_bb != last_bb)
4942 /* Update df_get_live_out (curr_bb): */
4943 EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
4945 live_p = bitmap_bit_p (&live_regs, j);
4946 if (! live_p)
4947 FOR_EACH_EDGE (e, ei, curr_bb->succs)
4948 if (bitmap_bit_p (df_get_live_in (e->dest), j))
4950 live_p = true;
4951 break;
4953 if (live_p)
4954 bitmap_set_bit (df_get_live_out (curr_bb), j);
4955 else
4956 bitmap_clear_bit (df_get_live_out (curr_bb), j);
4959 prev_bb = curr_bb;
4960 bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
4962 if (! NONDEBUG_INSN_P (curr_insn))
4963 continue;
4964 curr_id = lra_get_insn_recog_data (curr_insn);
4965 curr_static_id = curr_id->insn_static_data;
4966 remove_p = false;
4967 if ((set = single_set (curr_insn)) != NULL_RTX && REG_P (SET_DEST (set))
4968 && (regno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
4969 && bitmap_bit_p (&check_only_regs, regno)
4970 && ! bitmap_bit_p (&live_regs, regno))
4971 remove_p = true;
4972 /* See which defined values die here. */
4973 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
4974 if (reg->type == OP_OUT && ! reg->subreg_p)
4975 bitmap_clear_bit (&live_regs, reg->regno);
4976 for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
4977 if (reg->type == OP_OUT && ! reg->subreg_p)
4978 bitmap_clear_bit (&live_regs, reg->regno);
4979 /* Mark each used value as live. */
4980 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
4981 if (reg->type != OP_OUT
4982 && bitmap_bit_p (&check_only_regs, reg->regno))
4983 bitmap_set_bit (&live_regs, reg->regno);
4984 for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
4985 if (reg->type != OP_OUT
4986 && bitmap_bit_p (&check_only_regs, reg->regno))
4987 bitmap_set_bit (&live_regs, reg->regno);
4988 if (curr_id->arg_hard_regs != NULL)
4989 /* Make argument hard registers live. */
4990 for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
4991 if (bitmap_bit_p (&check_only_regs, regno))
4992 bitmap_set_bit (&live_regs, regno);
4993 /* It is quite important to remove dead move insns because it
4994 means removing dead store. We don't need to process them for
4995 constraints. */
4996 if (remove_p)
4998 if (lra_dump_file != NULL)
5000 fprintf (lra_dump_file, " Removing dead insn:\n ");
5001 dump_insn_slim (lra_dump_file, curr_insn);
5003 lra_set_insn_deleted (curr_insn);
5008 /* The structure describes info to do an inheritance for the current
5009 insn. We need to collect such info first before doing the
5010 transformations because the transformations change the insn
5011 internal representation. */
5012 struct to_inherit
5014 /* Original regno. */
5015 int regno;
5016 /* Subsequent insns which can inherit original reg value. */
5017 rtx insns;
5020 /* Array containing all info for doing inheritance from the current
5021 insn. */
5022 static struct to_inherit to_inherit[LRA_MAX_INSN_RELOADS];
5024 /* Number elements in the previous array. */
5025 static int to_inherit_num;
5027 /* Add inheritance info REGNO and INSNS. Their meaning is described in
5028 structure to_inherit. */
5029 static void
5030 add_to_inherit (int regno, rtx insns)
5032 int i;
5034 for (i = 0; i < to_inherit_num; i++)
5035 if (to_inherit[i].regno == regno)
5036 return;
5037 lra_assert (to_inherit_num < LRA_MAX_INSN_RELOADS);
5038 to_inherit[to_inherit_num].regno = regno;
5039 to_inherit[to_inherit_num++].insns = insns;
5042 /* Return the last non-debug insn in basic block BB, or the block begin
5043 note if none. */
5044 static rtx
5045 get_last_insertion_point (basic_block bb)
5047 rtx insn;
5049 FOR_BB_INSNS_REVERSE (bb, insn)
5050 if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
5051 return insn;
5052 gcc_unreachable ();
5055 /* Set up RES by registers living on edges FROM except the edge (FROM,
5056 TO) or by registers set up in a jump insn in BB FROM. */
5057 static void
5058 get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
5060 rtx last;
5061 struct lra_insn_reg *reg;
5062 edge e;
5063 edge_iterator ei;
5065 lra_assert (to != NULL);
5066 bitmap_clear (res);
5067 FOR_EACH_EDGE (e, ei, from->succs)
5068 if (e->dest != to)
5069 bitmap_ior_into (res, df_get_live_in (e->dest));
5070 last = get_last_insertion_point (from);
5071 if (! JUMP_P (last))
5072 return;
5073 curr_id = lra_get_insn_recog_data (last);
5074 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
5075 if (reg->type != OP_IN)
5076 bitmap_set_bit (res, reg->regno);
5079 /* Used as a temporary results of some bitmap calculations. */
5080 static bitmap_head temp_bitmap;
5082 /* We split for reloads of small class of hard regs. The following
5083 defines how many hard regs the class should have to be qualified as
5084 small. The code is mostly oriented to x86/x86-64 architecture
5085 where some insns need to use only specific register or pair of
5086 registers and these register can live in RTL explicitly, e.g. for
5087 parameter passing. */
5088 static const int max_small_class_regs_num = 2;
5090 /* Do inheritance/split transformations in EBB starting with HEAD and
5091 finishing on TAIL. We process EBB insns in the reverse order.
5092 Return true if we did any inheritance/split transformation in the
5093 EBB.
5095 We should avoid excessive splitting which results in worse code
5096 because of inaccurate cost calculations for spilling new split
5097 pseudos in such case. To achieve this we do splitting only if
5098 register pressure is high in given basic block and there are reload
5099 pseudos requiring hard registers. We could do more register
5100 pressure calculations at any given program point to avoid necessary
5101 splitting even more but it is to expensive and the current approach
5102 works well enough. */
5103 static bool
5104 inherit_in_ebb (rtx head, rtx tail)
5106 int i, src_regno, dst_regno, nregs;
5107 bool change_p, succ_p, update_reloads_num_p;
5108 rtx prev_insn, next_usage_insns, set, last_insn;
5109 enum reg_class cl;
5110 struct lra_insn_reg *reg;
5111 basic_block last_processed_bb, curr_bb = NULL;
5112 HARD_REG_SET potential_reload_hard_regs, live_hard_regs;
5113 bitmap to_process;
5114 unsigned int j;
5115 bitmap_iterator bi;
5116 bool head_p, after_p;
5118 change_p = false;
5119 curr_usage_insns_check++;
5120 reloads_num = calls_num = 0;
5121 bitmap_clear (&check_only_regs);
5122 last_processed_bb = NULL;
5123 CLEAR_HARD_REG_SET (potential_reload_hard_regs);
5124 COPY_HARD_REG_SET (live_hard_regs, eliminable_regset);
5125 IOR_HARD_REG_SET (live_hard_regs, lra_no_alloc_regs);
5126 /* We don't process new insns generated in the loop. */
5127 for (curr_insn = tail; curr_insn != PREV_INSN (head); curr_insn = prev_insn)
5129 prev_insn = PREV_INSN (curr_insn);
5130 if (BLOCK_FOR_INSN (curr_insn) != NULL)
5131 curr_bb = BLOCK_FOR_INSN (curr_insn);
5132 if (last_processed_bb != curr_bb)
5134 /* We are at the end of BB. Add qualified living
5135 pseudos for potential splitting. */
5136 to_process = df_get_live_out (curr_bb);
5137 if (last_processed_bb != NULL)
5139 /* We are somewhere in the middle of EBB. */
5140 get_live_on_other_edges (curr_bb, last_processed_bb,
5141 &temp_bitmap);
5142 to_process = &temp_bitmap;
5144 last_processed_bb = curr_bb;
5145 last_insn = get_last_insertion_point (curr_bb);
5146 after_p = (! JUMP_P (last_insn)
5147 && (! CALL_P (last_insn)
5148 || (find_reg_note (last_insn,
5149 REG_NORETURN, NULL_RTX) == NULL_RTX
5150 && ! SIBLING_CALL_P (last_insn))));
5151 CLEAR_HARD_REG_SET (potential_reload_hard_regs);
5152 EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
5154 if ((int) j >= lra_constraint_new_regno_start)
5155 break;
5156 if (j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
5158 if (j < FIRST_PSEUDO_REGISTER)
5159 SET_HARD_REG_BIT (live_hard_regs, j);
5160 else
5161 add_to_hard_reg_set (&live_hard_regs,
5162 PSEUDO_REGNO_MODE (j),
5163 reg_renumber[j]);
5164 setup_next_usage_insn (j, last_insn, reloads_num, after_p);
5168 src_regno = dst_regno = -1;
5169 if (NONDEBUG_INSN_P (curr_insn)
5170 && (set = single_set (curr_insn)) != NULL_RTX
5171 && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
5173 src_regno = REGNO (SET_SRC (set));
5174 dst_regno = REGNO (SET_DEST (set));
5176 update_reloads_num_p = true;
5177 if (src_regno < lra_constraint_new_regno_start
5178 && src_regno >= FIRST_PSEUDO_REGISTER
5179 && reg_renumber[src_regno] < 0
5180 && dst_regno >= lra_constraint_new_regno_start
5181 && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS)
5183 /* 'reload_pseudo <- original_pseudo'. */
5184 if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
5185 reloads_num++;
5186 update_reloads_num_p = false;
5187 succ_p = false;
5188 if (usage_insns[src_regno].check == curr_usage_insns_check
5189 && (next_usage_insns = usage_insns[src_regno].insns) != NULL_RTX)
5190 succ_p = inherit_reload_reg (false, src_regno, cl,
5191 curr_insn, next_usage_insns);
5192 if (succ_p)
5193 change_p = true;
5194 else
5195 setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
5196 if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
5197 IOR_HARD_REG_SET (potential_reload_hard_regs,
5198 reg_class_contents[cl]);
5200 else if (src_regno >= lra_constraint_new_regno_start
5201 && dst_regno < lra_constraint_new_regno_start
5202 && dst_regno >= FIRST_PSEUDO_REGISTER
5203 && reg_renumber[dst_regno] < 0
5204 && (cl = lra_get_allocno_class (src_regno)) != NO_REGS
5205 && usage_insns[dst_regno].check == curr_usage_insns_check
5206 && (next_usage_insns
5207 = usage_insns[dst_regno].insns) != NULL_RTX)
5209 if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
5210 reloads_num++;
5211 update_reloads_num_p = false;
5212 /* 'original_pseudo <- reload_pseudo'. */
5213 if (! JUMP_P (curr_insn)
5214 && inherit_reload_reg (true, dst_regno, cl,
5215 curr_insn, next_usage_insns))
5216 change_p = true;
5217 /* Invalidate. */
5218 usage_insns[dst_regno].check = 0;
5219 if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
5220 IOR_HARD_REG_SET (potential_reload_hard_regs,
5221 reg_class_contents[cl]);
5223 else if (INSN_P (curr_insn))
5225 int iter;
5226 int max_uid = get_max_uid ();
5228 curr_id = lra_get_insn_recog_data (curr_insn);
5229 curr_static_id = curr_id->insn_static_data;
5230 to_inherit_num = 0;
5231 /* Process insn definitions. */
5232 for (iter = 0; iter < 2; iter++)
5233 for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
5234 reg != NULL;
5235 reg = reg->next)
5236 if (reg->type != OP_IN
5237 && (dst_regno = reg->regno) < lra_constraint_new_regno_start)
5239 if (dst_regno >= FIRST_PSEUDO_REGISTER && reg->type == OP_OUT
5240 && reg_renumber[dst_regno] < 0 && ! reg->subreg_p
5241 && usage_insns[dst_regno].check == curr_usage_insns_check
5242 && (next_usage_insns
5243 = usage_insns[dst_regno].insns) != NULL_RTX)
5245 struct lra_insn_reg *r;
5247 for (r = curr_id->regs; r != NULL; r = r->next)
5248 if (r->type != OP_OUT && r->regno == dst_regno)
5249 break;
5250 /* Don't do inheritance if the pseudo is also
5251 used in the insn. */
5252 if (r == NULL)
5253 /* We can not do inheritance right now
5254 because the current insn reg info (chain
5255 regs) can change after that. */
5256 add_to_inherit (dst_regno, next_usage_insns);
5258 /* We can not process one reg twice here because of
5259 usage_insns invalidation. */
5260 if ((dst_regno < FIRST_PSEUDO_REGISTER
5261 || reg_renumber[dst_regno] >= 0)
5262 && ! reg->subreg_p && reg->type != OP_IN)
5264 HARD_REG_SET s;
5266 if (split_if_necessary (dst_regno, reg->biggest_mode,
5267 potential_reload_hard_regs,
5268 false, curr_insn, max_uid))
5269 change_p = true;
5270 CLEAR_HARD_REG_SET (s);
5271 if (dst_regno < FIRST_PSEUDO_REGISTER)
5272 add_to_hard_reg_set (&s, reg->biggest_mode, dst_regno);
5273 else
5274 add_to_hard_reg_set (&s, PSEUDO_REGNO_MODE (dst_regno),
5275 reg_renumber[dst_regno]);
5276 AND_COMPL_HARD_REG_SET (live_hard_regs, s);
5278 /* We should invalidate potential inheritance or
5279 splitting for the current insn usages to the next
5280 usage insns (see code below) as the output pseudo
5281 prevents this. */
5282 if ((dst_regno >= FIRST_PSEUDO_REGISTER
5283 && reg_renumber[dst_regno] < 0)
5284 || (reg->type == OP_OUT && ! reg->subreg_p
5285 && (dst_regno < FIRST_PSEUDO_REGISTER
5286 || reg_renumber[dst_regno] >= 0)))
5288 /* Invalidate and mark definitions. */
5289 if (dst_regno >= FIRST_PSEUDO_REGISTER)
5290 usage_insns[dst_regno].check = -(int) INSN_UID (curr_insn);
5291 else
5293 nregs = hard_regno_nregs[dst_regno][reg->biggest_mode];
5294 for (i = 0; i < nregs; i++)
5295 usage_insns[dst_regno + i].check
5296 = -(int) INSN_UID (curr_insn);
5300 if (! JUMP_P (curr_insn))
5301 for (i = 0; i < to_inherit_num; i++)
5302 if (inherit_reload_reg (true, to_inherit[i].regno,
5303 ALL_REGS, curr_insn,
5304 to_inherit[i].insns))
5305 change_p = true;
5306 if (CALL_P (curr_insn))
5308 rtx cheap, pat, dest, restore;
5309 int regno, hard_regno;
5311 calls_num++;
5312 if ((cheap = find_reg_note (curr_insn,
5313 REG_RETURNED, NULL_RTX)) != NULL_RTX
5314 && ((cheap = XEXP (cheap, 0)), true)
5315 && (regno = REGNO (cheap)) >= FIRST_PSEUDO_REGISTER
5316 && (hard_regno = reg_renumber[regno]) >= 0
5317 /* If there are pending saves/restores, the
5318 optimization is not worth. */
5319 && usage_insns[regno].calls_num == calls_num - 1
5320 && TEST_HARD_REG_BIT (call_used_reg_set, hard_regno))
5322 /* Restore the pseudo from the call result as
5323 REG_RETURNED note says that the pseudo value is
5324 in the call result and the pseudo is an argument
5325 of the call. */
5326 pat = PATTERN (curr_insn);
5327 if (GET_CODE (pat) == PARALLEL)
5328 pat = XVECEXP (pat, 0, 0);
5329 dest = SET_DEST (pat);
5330 start_sequence ();
5331 emit_move_insn (cheap, copy_rtx (dest));
5332 restore = get_insns ();
5333 end_sequence ();
5334 lra_process_new_insns (curr_insn, NULL, restore,
5335 "Inserting call parameter restore");
5336 /* We don't need to save/restore of the pseudo from
5337 this call. */
5338 usage_insns[regno].calls_num = calls_num;
5339 bitmap_set_bit (&check_only_regs, regno);
5342 to_inherit_num = 0;
5343 /* Process insn usages. */
5344 for (iter = 0; iter < 2; iter++)
5345 for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
5346 reg != NULL;
5347 reg = reg->next)
5348 if ((reg->type != OP_OUT
5349 || (reg->type == OP_OUT && reg->subreg_p))
5350 && (src_regno = reg->regno) < lra_constraint_new_regno_start)
5352 if (src_regno >= FIRST_PSEUDO_REGISTER
5353 && reg_renumber[src_regno] < 0 && reg->type == OP_IN)
5355 if (usage_insns[src_regno].check == curr_usage_insns_check
5356 && (next_usage_insns
5357 = usage_insns[src_regno].insns) != NULL_RTX
5358 && NONDEBUG_INSN_P (curr_insn))
5359 add_to_inherit (src_regno, next_usage_insns);
5360 else if (usage_insns[src_regno].check
5361 != -(int) INSN_UID (curr_insn))
5362 /* Add usages but only if the reg is not set up
5363 in the same insn. */
5364 add_next_usage_insn (src_regno, curr_insn, reloads_num);
5366 else if (src_regno < FIRST_PSEUDO_REGISTER
5367 || reg_renumber[src_regno] >= 0)
5369 bool before_p;
5370 rtx use_insn = curr_insn;
5372 before_p = (JUMP_P (curr_insn)
5373 || (CALL_P (curr_insn) && reg->type == OP_IN));
5374 if (NONDEBUG_INSN_P (curr_insn)
5375 && (! JUMP_P (curr_insn) || reg->type == OP_IN)
5376 && split_if_necessary (src_regno, reg->biggest_mode,
5377 potential_reload_hard_regs,
5378 before_p, curr_insn, max_uid))
5380 if (reg->subreg_p)
5381 lra_risky_transformations_p = true;
5382 change_p = true;
5383 /* Invalidate. */
5384 usage_insns[src_regno].check = 0;
5385 if (before_p)
5386 use_insn = PREV_INSN (curr_insn);
5388 if (NONDEBUG_INSN_P (curr_insn))
5390 if (src_regno < FIRST_PSEUDO_REGISTER)
5391 add_to_hard_reg_set (&live_hard_regs,
5392 reg->biggest_mode, src_regno);
5393 else
5394 add_to_hard_reg_set (&live_hard_regs,
5395 PSEUDO_REGNO_MODE (src_regno),
5396 reg_renumber[src_regno]);
5398 add_next_usage_insn (src_regno, use_insn, reloads_num);
5401 /* Process call args. */
5402 if (curr_id->arg_hard_regs != NULL)
5403 for (i = 0; (src_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
5404 if (src_regno < FIRST_PSEUDO_REGISTER)
5406 SET_HARD_REG_BIT (live_hard_regs, src_regno);
5407 add_next_usage_insn (src_regno, curr_insn, reloads_num);
5409 for (i = 0; i < to_inherit_num; i++)
5411 src_regno = to_inherit[i].regno;
5412 if (inherit_reload_reg (false, src_regno, ALL_REGS,
5413 curr_insn, to_inherit[i].insns))
5414 change_p = true;
5415 else
5416 setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
5419 if (update_reloads_num_p
5420 && NONDEBUG_INSN_P (curr_insn)
5421 && (set = single_set (curr_insn)) != NULL_RTX)
5423 int regno = -1;
5424 if ((REG_P (SET_DEST (set))
5425 && (regno = REGNO (SET_DEST (set))) >= lra_constraint_new_regno_start
5426 && reg_renumber[regno] < 0
5427 && (cl = lra_get_allocno_class (regno)) != NO_REGS)
5428 || (REG_P (SET_SRC (set))
5429 && (regno = REGNO (SET_SRC (set))) >= lra_constraint_new_regno_start
5430 && reg_renumber[regno] < 0
5431 && (cl = lra_get_allocno_class (regno)) != NO_REGS))
5433 if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
5434 reloads_num++;
5435 if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
5436 IOR_HARD_REG_SET (potential_reload_hard_regs,
5437 reg_class_contents[cl]);
5440 /* We reached the start of the current basic block. */
5441 if (prev_insn == NULL_RTX || prev_insn == PREV_INSN (head)
5442 || BLOCK_FOR_INSN (prev_insn) != curr_bb)
5444 /* We reached the beginning of the current block -- do
5445 rest of spliting in the current BB. */
5446 to_process = df_get_live_in (curr_bb);
5447 if (BLOCK_FOR_INSN (head) != curr_bb)
5449 /* We are somewhere in the middle of EBB. */
5450 get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
5451 curr_bb, &temp_bitmap);
5452 to_process = &temp_bitmap;
5454 head_p = true;
5455 EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
5457 if ((int) j >= lra_constraint_new_regno_start)
5458 break;
5459 if (((int) j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
5460 && usage_insns[j].check == curr_usage_insns_check
5461 && (next_usage_insns = usage_insns[j].insns) != NULL_RTX)
5463 if (need_for_split_p (potential_reload_hard_regs, j))
5465 if (lra_dump_file != NULL && head_p)
5467 fprintf (lra_dump_file,
5468 " ----------------------------------\n");
5469 head_p = false;
5471 if (split_reg (false, j, bb_note (curr_bb),
5472 next_usage_insns))
5473 change_p = true;
5475 usage_insns[j].check = 0;
5480 return change_p;
5483 /* This value affects EBB forming. If probability of edge from EBB to
5484 a BB is not greater than the following value, we don't add the BB
5485 to EBB. */
5486 #define EBB_PROBABILITY_CUTOFF ((REG_BR_PROB_BASE * 50) / 100)
5488 /* Current number of inheritance/split iteration. */
5489 int lra_inheritance_iter;
5491 /* Entry function for inheritance/split pass. */
5492 void
5493 lra_inheritance (void)
5495 int i;
5496 basic_block bb, start_bb;
5497 edge e;
5499 lra_inheritance_iter++;
5500 if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
5501 return;
5502 timevar_push (TV_LRA_INHERITANCE);
5503 if (lra_dump_file != NULL)
5504 fprintf (lra_dump_file, "\n********** Inheritance #%d: **********\n\n",
5505 lra_inheritance_iter);
5506 curr_usage_insns_check = 0;
5507 usage_insns = XNEWVEC (struct usage_insns, lra_constraint_new_regno_start);
5508 for (i = 0; i < lra_constraint_new_regno_start; i++)
5509 usage_insns[i].check = 0;
5510 bitmap_initialize (&check_only_regs, &reg_obstack);
5511 bitmap_initialize (&live_regs, &reg_obstack);
5512 bitmap_initialize (&temp_bitmap, &reg_obstack);
5513 bitmap_initialize (&ebb_global_regs, &reg_obstack);
5514 FOR_EACH_BB_FN (bb, cfun)
5516 start_bb = bb;
5517 if (lra_dump_file != NULL)
5518 fprintf (lra_dump_file, "EBB");
5519 /* Form a EBB starting with BB. */
5520 bitmap_clear (&ebb_global_regs);
5521 bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
5522 for (;;)
5524 if (lra_dump_file != NULL)
5525 fprintf (lra_dump_file, " %d", bb->index);
5526 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5527 || LABEL_P (BB_HEAD (bb->next_bb)))
5528 break;
5529 e = find_fallthru_edge (bb->succs);
5530 if (! e)
5531 break;
5532 if (e->probability <= EBB_PROBABILITY_CUTOFF)
5533 break;
5534 bb = bb->next_bb;
5536 bitmap_ior_into (&ebb_global_regs, df_get_live_out (bb));
5537 if (lra_dump_file != NULL)
5538 fprintf (lra_dump_file, "\n");
5539 if (inherit_in_ebb (BB_HEAD (start_bb), BB_END (bb)))
5540 /* Remember that the EBB head and tail can change in
5541 inherit_in_ebb. */
5542 update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
5544 bitmap_clear (&ebb_global_regs);
5545 bitmap_clear (&temp_bitmap);
5546 bitmap_clear (&live_regs);
5547 bitmap_clear (&check_only_regs);
5548 free (usage_insns);
5550 timevar_pop (TV_LRA_INHERITANCE);
5555 /* This page contains code to undo failed inheritance/split
5556 transformations. */
5558 /* Current number of iteration undoing inheritance/split. */
5559 int lra_undo_inheritance_iter;
5561 /* Fix BB live info LIVE after removing pseudos created on pass doing
5562 inheritance/split which are REMOVED_PSEUDOS. */
5563 static void
5564 fix_bb_live_info (bitmap live, bitmap removed_pseudos)
5566 unsigned int regno;
5567 bitmap_iterator bi;
5569 EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
5570 if (bitmap_clear_bit (live, regno))
5571 bitmap_set_bit (live, lra_reg_info[regno].restore_regno);
5574 /* Return regno of the (subreg of) REG. Otherwise, return a negative
5575 number. */
5576 static int
5577 get_regno (rtx reg)
5579 if (GET_CODE (reg) == SUBREG)
5580 reg = SUBREG_REG (reg);
5581 if (REG_P (reg))
5582 return REGNO (reg);
5583 return -1;
5586 /* Remove inheritance/split pseudos which are in REMOVE_PSEUDOS and
5587 return true if we did any change. The undo transformations for
5588 inheritance looks like
5589 i <- i2
5590 p <- i => p <- i2
5591 or removing
5592 p <- i, i <- p, and i <- i3
5593 where p is original pseudo from which inheritance pseudo i was
5594 created, i and i3 are removed inheritance pseudos, i2 is another
5595 not removed inheritance pseudo. All split pseudos or other
5596 occurrences of removed inheritance pseudos are changed on the
5597 corresponding original pseudos.
5599 The function also schedules insns changed and created during
5600 inheritance/split pass for processing by the subsequent constraint
5601 pass. */
5602 static bool
5603 remove_inheritance_pseudos (bitmap remove_pseudos)
5605 basic_block bb;
5606 int regno, sregno, prev_sregno, dregno, restore_regno;
5607 rtx set, prev_set, prev_insn;
5608 bool change_p, done_p;
5610 change_p = ! bitmap_empty_p (remove_pseudos);
5611 /* We can not finish the function right away if CHANGE_P is true
5612 because we need to marks insns affected by previous
5613 inheritance/split pass for processing by the subsequent
5614 constraint pass. */
5615 FOR_EACH_BB_FN (bb, cfun)
5617 fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
5618 fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
5619 FOR_BB_INSNS_REVERSE (bb, curr_insn)
5621 if (! INSN_P (curr_insn))
5622 continue;
5623 done_p = false;
5624 sregno = dregno = -1;
5625 if (change_p && NONDEBUG_INSN_P (curr_insn)
5626 && (set = single_set (curr_insn)) != NULL_RTX)
5628 dregno = get_regno (SET_DEST (set));
5629 sregno = get_regno (SET_SRC (set));
5632 if (sregno >= 0 && dregno >= 0)
5634 if ((bitmap_bit_p (remove_pseudos, sregno)
5635 && (lra_reg_info[sregno].restore_regno == dregno
5636 || (bitmap_bit_p (remove_pseudos, dregno)
5637 && (lra_reg_info[sregno].restore_regno
5638 == lra_reg_info[dregno].restore_regno))))
5639 || (bitmap_bit_p (remove_pseudos, dregno)
5640 && lra_reg_info[dregno].restore_regno == sregno))
5641 /* One of the following cases:
5642 original <- removed inheritance pseudo
5643 removed inherit pseudo <- another removed inherit pseudo
5644 removed inherit pseudo <- original pseudo
5646 removed_split_pseudo <- original_reg
5647 original_reg <- removed_split_pseudo */
5649 if (lra_dump_file != NULL)
5651 fprintf (lra_dump_file, " Removing %s:\n",
5652 bitmap_bit_p (&lra_split_regs, sregno)
5653 || bitmap_bit_p (&lra_split_regs, dregno)
5654 ? "split" : "inheritance");
5655 dump_insn_slim (lra_dump_file, curr_insn);
5657 lra_set_insn_deleted (curr_insn);
5658 done_p = true;
5660 else if (bitmap_bit_p (remove_pseudos, sregno)
5661 && bitmap_bit_p (&lra_inheritance_pseudos, sregno))
5663 /* Search the following pattern:
5664 inherit_or_split_pseudo1 <- inherit_or_split_pseudo2
5665 original_pseudo <- inherit_or_split_pseudo1
5666 where the 2nd insn is the current insn and
5667 inherit_or_split_pseudo2 is not removed. If it is found,
5668 change the current insn onto:
5669 original_pseudo <- inherit_or_split_pseudo2. */
5670 for (prev_insn = PREV_INSN (curr_insn);
5671 prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
5672 prev_insn = PREV_INSN (prev_insn))
5674 if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
5675 && (prev_set = single_set (prev_insn)) != NULL_RTX
5676 /* There should be no subregs in insn we are
5677 searching because only the original reg might
5678 be in subreg when we changed the mode of
5679 load/store for splitting. */
5680 && REG_P (SET_DEST (prev_set))
5681 && REG_P (SET_SRC (prev_set))
5682 && (int) REGNO (SET_DEST (prev_set)) == sregno
5683 && ((prev_sregno = REGNO (SET_SRC (prev_set)))
5684 >= FIRST_PSEUDO_REGISTER)
5685 /* As we consider chain of inheritance or
5686 splitting described in above comment we should
5687 check that sregno and prev_sregno were
5688 inheritance/split pseudos created from the
5689 same original regno. */
5690 && (lra_reg_info[sregno].restore_regno
5691 == lra_reg_info[prev_sregno].restore_regno)
5692 && ! bitmap_bit_p (remove_pseudos, prev_sregno))
5694 lra_assert (GET_MODE (SET_SRC (prev_set))
5695 == GET_MODE (regno_reg_rtx[sregno]));
5696 if (GET_CODE (SET_SRC (set)) == SUBREG)
5697 SUBREG_REG (SET_SRC (set)) = SET_SRC (prev_set);
5698 else
5699 SET_SRC (set) = SET_SRC (prev_set);
5700 /* As we are finishing with processing the insn
5701 here, check the destination too as it might
5702 inheritance pseudo for another pseudo. */
5703 if (bitmap_bit_p (remove_pseudos, dregno)
5704 && bitmap_bit_p (&lra_inheritance_pseudos, dregno)
5705 && (restore_regno
5706 = lra_reg_info[dregno].restore_regno) >= 0)
5708 if (GET_CODE (SET_DEST (set)) == SUBREG)
5709 SUBREG_REG (SET_DEST (set))
5710 = regno_reg_rtx[restore_regno];
5711 else
5712 SET_DEST (set) = regno_reg_rtx[restore_regno];
5714 lra_push_insn_and_update_insn_regno_info (curr_insn);
5715 lra_set_used_insn_alternative_by_uid
5716 (INSN_UID (curr_insn), -1);
5717 done_p = true;
5718 if (lra_dump_file != NULL)
5720 fprintf (lra_dump_file, " Change reload insn:\n");
5721 dump_insn_slim (lra_dump_file, curr_insn);
5726 if (! done_p)
5728 struct lra_insn_reg *reg;
5729 bool restored_regs_p = false;
5730 bool kept_regs_p = false;
5732 curr_id = lra_get_insn_recog_data (curr_insn);
5733 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
5735 regno = reg->regno;
5736 restore_regno = lra_reg_info[regno].restore_regno;
5737 if (restore_regno >= 0)
5739 if (change_p && bitmap_bit_p (remove_pseudos, regno))
5741 substitute_pseudo (&curr_insn, regno,
5742 regno_reg_rtx[restore_regno]);
5743 restored_regs_p = true;
5745 else
5746 kept_regs_p = true;
5749 if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
5751 /* The instruction has changed since the previous
5752 constraints pass. */
5753 lra_push_insn_and_update_insn_regno_info (curr_insn);
5754 lra_set_used_insn_alternative_by_uid
5755 (INSN_UID (curr_insn), -1);
5757 else if (restored_regs_p)
5758 /* The instruction has been restored to the form that
5759 it had during the previous constraints pass. */
5760 lra_update_insn_regno_info (curr_insn);
5761 if (restored_regs_p && lra_dump_file != NULL)
5763 fprintf (lra_dump_file, " Insn after restoring regs:\n");
5764 dump_insn_slim (lra_dump_file, curr_insn);
5769 return change_p;
5772 /* If optional reload pseudos failed to get a hard register or was not
5773 inherited, it is better to remove optional reloads. We do this
5774 transformation after undoing inheritance to figure out necessity to
5775 remove optional reloads easier. Return true if we do any
5776 change. */
5777 static bool
5778 undo_optional_reloads (void)
5780 bool change_p, keep_p;
5781 unsigned int regno, uid;
5782 bitmap_iterator bi, bi2;
5783 rtx insn, set, src, dest;
5784 bitmap_head removed_optional_reload_pseudos, insn_bitmap;
5786 bitmap_initialize (&removed_optional_reload_pseudos, &reg_obstack);
5787 bitmap_copy (&removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
5788 EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
5790 keep_p = false;
5791 /* Keep optional reloads from previous subpasses. */
5792 if (lra_reg_info[regno].restore_regno < 0
5793 /* If the original pseudo changed its allocation, just
5794 removing the optional pseudo is dangerous as the original
5795 pseudo will have longer live range. */
5796 || reg_renumber[lra_reg_info[regno].restore_regno] >= 0)
5797 keep_p = true;
5798 else if (reg_renumber[regno] >= 0)
5799 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi2)
5801 insn = lra_insn_recog_data[uid]->insn;
5802 if ((set = single_set (insn)) == NULL_RTX)
5803 continue;
5804 src = SET_SRC (set);
5805 dest = SET_DEST (set);
5806 if (! REG_P (src) || ! REG_P (dest))
5807 continue;
5808 if (REGNO (dest) == regno
5809 /* Ignore insn for optional reloads itself. */
5810 && lra_reg_info[regno].restore_regno != (int) REGNO (src)
5811 /* Check only inheritance on last inheritance pass. */
5812 && (int) REGNO (src) >= new_regno_start
5813 /* Check that the optional reload was inherited. */
5814 && bitmap_bit_p (&lra_inheritance_pseudos, REGNO (src)))
5816 keep_p = true;
5817 break;
5820 if (keep_p)
5822 bitmap_clear_bit (&removed_optional_reload_pseudos, regno);
5823 if (lra_dump_file != NULL)
5824 fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
5827 change_p = ! bitmap_empty_p (&removed_optional_reload_pseudos);
5828 bitmap_initialize (&insn_bitmap, &reg_obstack);
5829 EXECUTE_IF_SET_IN_BITMAP (&removed_optional_reload_pseudos, 0, regno, bi)
5831 if (lra_dump_file != NULL)
5832 fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
5833 bitmap_copy (&insn_bitmap, &lra_reg_info[regno].insn_bitmap);
5834 EXECUTE_IF_SET_IN_BITMAP (&insn_bitmap, 0, uid, bi2)
5836 insn = lra_insn_recog_data[uid]->insn;
5837 if ((set = single_set (insn)) != NULL_RTX)
5839 src = SET_SRC (set);
5840 dest = SET_DEST (set);
5841 if (REG_P (src) && REG_P (dest)
5842 && ((REGNO (src) == regno
5843 && (lra_reg_info[regno].restore_regno
5844 == (int) REGNO (dest)))
5845 || (REGNO (dest) == regno
5846 && (lra_reg_info[regno].restore_regno
5847 == (int) REGNO (src)))))
5849 if (lra_dump_file != NULL)
5851 fprintf (lra_dump_file, " Deleting move %u\n",
5852 INSN_UID (insn));
5853 dump_insn_slim (lra_dump_file, insn);
5855 lra_set_insn_deleted (insn);
5856 continue;
5858 /* We should not worry about generation memory-memory
5859 moves here as if the corresponding inheritance did
5860 not work (inheritance pseudo did not get a hard reg),
5861 we remove the inheritance pseudo and the optional
5862 reload. */
5864 substitute_pseudo (&insn, regno,
5865 regno_reg_rtx[lra_reg_info[regno].restore_regno]);
5866 lra_update_insn_regno_info (insn);
5867 if (lra_dump_file != NULL)
5869 fprintf (lra_dump_file,
5870 " Restoring original insn:\n");
5871 dump_insn_slim (lra_dump_file, insn);
5875 /* Clear restore_regnos. */
5876 EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
5877 lra_reg_info[regno].restore_regno = -1;
5878 bitmap_clear (&insn_bitmap);
5879 bitmap_clear (&removed_optional_reload_pseudos);
5880 return change_p;
5883 /* Entry function for undoing inheritance/split transformation. Return true
5884 if we did any RTL change in this pass. */
5885 bool
5886 lra_undo_inheritance (void)
5888 unsigned int regno;
5889 int restore_regno, hard_regno;
5890 int n_all_inherit, n_inherit, n_all_split, n_split;
5891 bitmap_head remove_pseudos;
5892 bitmap_iterator bi;
5893 bool change_p;
5895 lra_undo_inheritance_iter++;
5896 if (lra_undo_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
5897 return false;
5898 if (lra_dump_file != NULL)
5899 fprintf (lra_dump_file,
5900 "\n********** Undoing inheritance #%d: **********\n\n",
5901 lra_undo_inheritance_iter);
5902 bitmap_initialize (&remove_pseudos, &reg_obstack);
5903 n_inherit = n_all_inherit = 0;
5904 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
5905 if (lra_reg_info[regno].restore_regno >= 0)
5907 n_all_inherit++;
5908 if (reg_renumber[regno] < 0
5909 /* If the original pseudo changed its allocation, just
5910 removing inheritance is dangerous as for changing
5911 allocation we used shorter live-ranges. */
5912 && reg_renumber[lra_reg_info[regno].restore_regno] < 0)
5913 bitmap_set_bit (&remove_pseudos, regno);
5914 else
5915 n_inherit++;
5917 if (lra_dump_file != NULL && n_all_inherit != 0)
5918 fprintf (lra_dump_file, "Inherit %d out of %d (%.2f%%)\n",
5919 n_inherit, n_all_inherit,
5920 (double) n_inherit / n_all_inherit * 100);
5921 n_split = n_all_split = 0;
5922 EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
5923 if ((restore_regno = lra_reg_info[regno].restore_regno) >= 0)
5925 n_all_split++;
5926 hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
5927 ? reg_renumber[restore_regno] : restore_regno);
5928 if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
5929 bitmap_set_bit (&remove_pseudos, regno);
5930 else
5932 n_split++;
5933 if (lra_dump_file != NULL)
5934 fprintf (lra_dump_file, " Keep split r%d (orig=r%d)\n",
5935 regno, restore_regno);
5938 if (lra_dump_file != NULL && n_all_split != 0)
5939 fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
5940 n_split, n_all_split,
5941 (double) n_split / n_all_split * 100);
5942 change_p = remove_inheritance_pseudos (&remove_pseudos);
5943 bitmap_clear (&remove_pseudos);
5944 /* Clear restore_regnos. */
5945 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
5946 lra_reg_info[regno].restore_regno = -1;
5947 EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
5948 lra_reg_info[regno].restore_regno = -1;
5949 change_p = undo_optional_reloads () || change_p;
5950 return change_p;