PR libgomp/38270
[official-gcc.git] / gcc / ira-lives.c
blob83b5ec85e6f26a4691ff46e8256af9a0e72cc571
1 /* IRA processing allocno lives to build allocno live ranges.
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "regs.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "target.h"
30 #include "flags.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "toplev.h"
36 #include "params.h"
37 #include "df.h"
38 #include "sparseset.h"
39 #include "ira-int.h"
41 /* The code in this file is similar to one in global but the code
42 works on the allocno basis and creates live ranges instead of
43 pseudo-register conflicts. */
45 /* Program points are enumerated by numbers from range
46 0..IRA_MAX_POINT-1. There are approximately two times more program
47 points than insns. Program points are places in the program where
48 liveness info can be changed. In most general case (there are more
49 complicated cases too) some program points correspond to places
50 where input operand dies and other ones correspond to places where
51 output operands are born. */
52 int ira_max_point;
54 /* Arrays of size IRA_MAX_POINT mapping a program point to the allocno
55 live ranges with given start/finish point. */
56 allocno_live_range_t *ira_start_point_ranges, *ira_finish_point_ranges;
58 /* Number of the current program point. */
59 static int curr_point;
61 /* Point where register pressure excess started or -1 if there is no
62 register pressure excess. Excess pressure for a register class at
63 some point means that there are more allocnos of given register
64 class living at the point than number of hard-registers of the
65 class available for the allocation. It is defined only for cover
66 classes. */
67 static int high_pressure_start_point[N_REG_CLASSES];
69 /* Allocnos live at current point in the scan. */
70 static sparseset allocnos_live;
72 /* Set of hard regs (except eliminable ones) currently live. */
73 static HARD_REG_SET hard_regs_live;
75 /* The loop tree node corresponding to the current basic block. */
76 static ira_loop_tree_node_t curr_bb_node;
78 /* The number of the last processed call. */
79 static int last_call_num;
80 /* The number of last call at which given allocno was saved. */
81 static int *allocno_saved_at_call;
83 /* The function processing birth of register REGNO. It updates living
84 hard regs and conflict hard regs for living allocnos or starts a
85 new live range for the allocno corresponding to REGNO if it is
86 necessary. */
87 static void
88 make_regno_born (int regno)
90 unsigned int i;
91 ira_allocno_t a;
92 allocno_live_range_t p;
94 if (regno < FIRST_PSEUDO_REGISTER)
96 SET_HARD_REG_BIT (hard_regs_live, regno);
97 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
99 SET_HARD_REG_BIT (ALLOCNO_CONFLICT_HARD_REGS (ira_allocnos[i]),
100 regno);
101 SET_HARD_REG_BIT (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (ira_allocnos[i]),
102 regno);
104 return;
106 a = ira_curr_regno_allocno_map[regno];
107 if (a == NULL)
108 return;
109 if ((p = ALLOCNO_LIVE_RANGES (a)) == NULL
110 || (p->finish != curr_point && p->finish + 1 != curr_point))
111 ALLOCNO_LIVE_RANGES (a)
112 = ira_create_allocno_live_range (a, curr_point, -1,
113 ALLOCNO_LIVE_RANGES (a));
116 /* Update ALLOCNO_EXCESS_PRESSURE_POINTS_NUM for allocno A. */
117 static void
118 update_allocno_pressure_excess_length (ira_allocno_t a)
120 int start;
121 enum reg_class cover_class;
122 allocno_live_range_t p;
124 cover_class = ALLOCNO_COVER_CLASS (a);
125 if (high_pressure_start_point[cover_class] < 0)
126 return;
127 p = ALLOCNO_LIVE_RANGES (a);
128 ira_assert (p != NULL);
129 start = (high_pressure_start_point[cover_class] > p->start
130 ? high_pressure_start_point[cover_class] : p->start);
131 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) += curr_point - start + 1;
134 /* Process the death of register REGNO. This updates hard_regs_live
135 or finishes the current live range for the allocno corresponding to
136 REGNO. */
137 static void
138 make_regno_dead (int regno)
140 ira_allocno_t a;
141 allocno_live_range_t p;
143 if (regno < FIRST_PSEUDO_REGISTER)
145 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
146 return;
148 a = ira_curr_regno_allocno_map[regno];
149 if (a == NULL)
150 return;
151 p = ALLOCNO_LIVE_RANGES (a);
152 ira_assert (p != NULL);
153 p->finish = curr_point;
154 update_allocno_pressure_excess_length (a);
157 /* The current register pressures for each cover class for the current
158 basic block. */
159 static int curr_reg_pressure[N_REG_CLASSES];
161 /* Mark allocno A as currently living and update current register
162 pressure, maximal register pressure for the current BB, start point
163 of the register pressure excess, and conflicting hard registers of
164 A. */
165 static void
166 set_allocno_live (ira_allocno_t a)
168 int nregs;
169 enum reg_class cover_class;
171 /* Invalidate because it is referenced. */
172 allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
173 if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
174 return;
175 sparseset_set_bit (allocnos_live, ALLOCNO_NUM (a));
176 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), hard_regs_live);
177 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), hard_regs_live);
178 cover_class = ALLOCNO_COVER_CLASS (a);
179 nregs = ira_reg_class_nregs[cover_class][ALLOCNO_MODE (a)];
180 curr_reg_pressure[cover_class] += nregs;
181 if (high_pressure_start_point[cover_class] < 0
182 && (curr_reg_pressure[cover_class]
183 > ira_available_class_regs[cover_class]))
184 high_pressure_start_point[cover_class] = curr_point;
185 if (curr_bb_node->reg_pressure[cover_class]
186 < curr_reg_pressure[cover_class])
187 curr_bb_node->reg_pressure[cover_class] = curr_reg_pressure[cover_class];
190 /* Mark allocno A as currently not living and update current register
191 pressure, start point of the register pressure excess, and register
192 pressure excess length for living allocnos. */
193 static void
194 clear_allocno_live (ira_allocno_t a)
196 unsigned int i;
197 enum reg_class cover_class;
199 /* Invalidate because it is referenced. */
200 allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
201 if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
203 cover_class = ALLOCNO_COVER_CLASS (a);
204 curr_reg_pressure[cover_class]
205 -= ira_reg_class_nregs[cover_class][ALLOCNO_MODE (a)];
206 ira_assert (curr_reg_pressure[cover_class] >= 0);
207 if (high_pressure_start_point[cover_class] >= 0
208 && (curr_reg_pressure[cover_class]
209 <= ira_available_class_regs[cover_class]))
211 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
213 update_allocno_pressure_excess_length (ira_allocnos[i]);
215 high_pressure_start_point[cover_class] = -1;
218 sparseset_clear_bit (allocnos_live, ALLOCNO_NUM (a));
221 /* Mark the register REG as live. Store a 1 in hard_regs_live or
222 allocnos_live for this register or the corresponding allocno,
223 record how many consecutive hardware registers it actually
224 needs. */
225 static void
226 mark_reg_live (rtx reg)
228 int regno;
230 gcc_assert (REG_P (reg));
231 regno = REGNO (reg);
233 if (regno >= FIRST_PSEUDO_REGISTER)
235 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
237 if (a != NULL)
239 if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
241 /* Invalidate because it is referenced. */
242 allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
243 return;
245 set_allocno_live (a);
247 make_regno_born (regno);
249 else if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
251 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
252 enum reg_class cover_class;
254 while (regno < last)
256 if (! TEST_HARD_REG_BIT (hard_regs_live, regno)
257 && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
259 cover_class = ira_hard_regno_cover_class[regno];
260 if (cover_class != NO_REGS)
262 curr_reg_pressure[cover_class]++;
263 if (high_pressure_start_point[cover_class] < 0
264 && (curr_reg_pressure[cover_class]
265 > ira_available_class_regs[cover_class]))
266 high_pressure_start_point[cover_class] = curr_point;
268 make_regno_born (regno);
269 if (cover_class != NO_REGS
270 && (curr_bb_node->reg_pressure[cover_class]
271 < curr_reg_pressure[cover_class]))
272 curr_bb_node->reg_pressure[cover_class]
273 = curr_reg_pressure[cover_class];
275 regno++;
280 /* Mark the register referenced by use or def REF as live. */
281 static void
282 mark_ref_live (df_ref ref)
284 rtx reg;
286 reg = DF_REF_REG (ref);
287 if (GET_CODE (reg) == SUBREG)
288 reg = SUBREG_REG (reg);
289 mark_reg_live (reg);
292 /* Mark the register REG as dead. Store a 0 in hard_regs_live or
293 allocnos_live for the register. */
294 static void
295 mark_reg_dead (rtx reg)
297 int regno;
299 gcc_assert (REG_P (reg));
300 regno = REGNO (reg);
302 if (regno >= FIRST_PSEUDO_REGISTER)
304 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
306 if (a != NULL)
308 if (! sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
310 /* Invalidate because it is referenced. */
311 allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
312 return;
314 clear_allocno_live (a);
316 make_regno_dead (regno);
318 else if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
320 unsigned int i;
321 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
322 enum reg_class cover_class;
324 while (regno < last)
326 if (TEST_HARD_REG_BIT (hard_regs_live, regno))
328 cover_class = ira_hard_regno_cover_class[regno];
329 if (cover_class != NO_REGS)
331 curr_reg_pressure[cover_class]--;
332 if (high_pressure_start_point[cover_class] >= 0
333 && (curr_reg_pressure[cover_class]
334 <= ira_available_class_regs[cover_class]))
336 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
338 update_allocno_pressure_excess_length
339 (ira_allocnos[i]);
341 high_pressure_start_point[cover_class] = -1;
343 ira_assert (curr_reg_pressure[cover_class] >= 0);
345 make_regno_dead (regno);
347 regno++;
352 /* Mark the register referenced by definition DEF as dead, if the
353 definition is a total one. */
354 static void
355 mark_ref_dead (df_ref def)
357 rtx reg;
359 if (DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL)
360 || DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL))
361 return;
363 reg = DF_REF_REG (def);
364 if (GET_CODE (reg) == SUBREG)
365 reg = SUBREG_REG (reg);
366 mark_reg_dead (reg);
369 /* Make pseudo REG conflicting with pseudo DREG, if the 1st pseudo
370 class is intersected with class CL. Advance the current program
371 point before making the conflict if ADVANCE_P. Return TRUE if we
372 will need to advance the current program point. */
373 static bool
374 make_pseudo_conflict (rtx reg, enum reg_class cl, rtx dreg, bool advance_p)
376 ira_allocno_t a;
378 if (GET_CODE (reg) == SUBREG)
379 reg = SUBREG_REG (reg);
381 if (! REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
382 return advance_p;
384 a = ira_curr_regno_allocno_map[REGNO (reg)];
385 if (! reg_classes_intersect_p (cl, ALLOCNO_COVER_CLASS (a)))
386 return advance_p;
388 if (advance_p)
389 curr_point++;
391 mark_reg_live (reg);
392 mark_reg_live (dreg);
393 mark_reg_dead (reg);
394 mark_reg_dead (dreg);
396 return false;
399 /* Check and make if necessary conflicts for pseudo DREG of class
400 DEF_CL of the current insn with input operand USE of class USE_CL.
401 Advance the current program point before making the conflict if
402 ADVANCE_P. Return TRUE if we will need to advance the current
403 program point. */
404 static bool
405 check_and_make_def_use_conflict (rtx dreg, enum reg_class def_cl,
406 int use, enum reg_class use_cl,
407 bool advance_p)
409 if (! reg_classes_intersect_p (def_cl, use_cl))
410 return advance_p;
412 advance_p = make_pseudo_conflict (recog_data.operand[use],
413 use_cl, dreg, advance_p);
414 /* Reload may end up swapping commutative operands, so you
415 have to take both orderings into account. The
416 constraints for the two operands can be completely
417 different. (Indeed, if the constraints for the two
418 operands are the same for all alternatives, there's no
419 point marking them as commutative.) */
420 if (use < recog_data.n_operands + 1
421 && recog_data.constraints[use][0] == '%')
422 advance_p
423 = make_pseudo_conflict (recog_data.operand[use + 1],
424 use_cl, dreg, advance_p);
425 if (use >= 1
426 && recog_data.constraints[use - 1][0] == '%')
427 advance_p
428 = make_pseudo_conflict (recog_data.operand[use - 1],
429 use_cl, dreg, advance_p);
430 return advance_p;
433 /* Check and make if necessary conflicts for definition DEF of class
434 DEF_CL of the current insn with input operands. Process only
435 constraints of alternative ALT. */
436 static void
437 check_and_make_def_conflict (int alt, int def, enum reg_class def_cl)
439 int use, use_match;
440 ira_allocno_t a;
441 enum reg_class use_cl, acl;
442 bool advance_p;
443 rtx dreg = recog_data.operand[def];
445 if (def_cl == NO_REGS)
446 return;
448 if (GET_CODE (dreg) == SUBREG)
449 dreg = SUBREG_REG (dreg);
451 if (! REG_P (dreg) || REGNO (dreg) < FIRST_PSEUDO_REGISTER)
452 return;
454 a = ira_curr_regno_allocno_map[REGNO (dreg)];
455 acl = ALLOCNO_COVER_CLASS (a);
456 if (! reg_classes_intersect_p (acl, def_cl))
457 return;
459 advance_p = true;
461 for (use = 0; use < recog_data.n_operands; use++)
463 if (use == def || recog_data.operand_type[use] == OP_OUT)
464 return;
466 if (recog_op_alt[use][alt].anything_ok)
467 use_cl = ALL_REGS;
468 else
469 use_cl = recog_op_alt[use][alt].cl;
471 advance_p = check_and_make_def_use_conflict (dreg, def_cl, use,
472 use_cl, advance_p);
474 if ((use_match = recog_op_alt[use][alt].matches) >= 0)
476 if (use_match == def)
477 return;
479 if (recog_op_alt[use_match][alt].anything_ok)
480 use_cl = ALL_REGS;
481 else
482 use_cl = recog_op_alt[use_match][alt].cl;
483 advance_p = check_and_make_def_use_conflict (dreg, def_cl, use,
484 use_cl, advance_p);
489 /* Make conflicts of early clobber pseudo registers of the current
490 insn with its inputs. Avoid introducing unnecessary conflicts by
491 checking classes of the constraints and pseudos because otherwise
492 significant code degradation is possible for some targets. */
493 static void
494 make_early_clobber_and_input_conflicts (void)
496 int alt;
497 int def, def_match;
498 enum reg_class def_cl;
500 for (alt = 0; alt < recog_data.n_alternatives; alt++)
501 for (def = 0; def < recog_data.n_operands; def++)
503 def_cl = NO_REGS;
504 if (recog_op_alt[def][alt].earlyclobber)
506 if (recog_op_alt[def][alt].anything_ok)
507 def_cl = ALL_REGS;
508 else
509 def_cl = recog_op_alt[def][alt].cl;
510 check_and_make_def_conflict (alt, def, def_cl);
512 if ((def_match = recog_op_alt[def][alt].matches) >= 0
513 && (recog_op_alt[def_match][alt].earlyclobber
514 || recog_op_alt[def][alt].earlyclobber))
516 if (recog_op_alt[def_match][alt].anything_ok)
517 def_cl = ALL_REGS;
518 else
519 def_cl = recog_op_alt[def_match][alt].cl;
520 check_and_make_def_conflict (alt, def, def_cl);
525 /* Mark early clobber hard registers of the current INSN as live (if
526 LIVE_P) or dead. Return true if there are such registers. */
527 static bool
528 mark_hard_reg_early_clobbers (rtx insn, bool live_p)
530 df_ref *def_rec;
531 bool set_p = false;
533 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
534 if (DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MUST_CLOBBER))
536 rtx dreg = DF_REF_REG (*def_rec);
538 if (GET_CODE (dreg) == SUBREG)
539 dreg = SUBREG_REG (dreg);
540 if (! REG_P (dreg) || REGNO (dreg) >= FIRST_PSEUDO_REGISTER)
541 continue;
543 /* Hard register clobbers are believed to be early clobber
544 because there is no way to say that non-operand hard
545 register clobbers are not early ones. */
546 if (live_p)
547 mark_ref_live (*def_rec);
548 else
549 mark_ref_dead (*def_rec);
550 set_p = true;
553 return set_p;
556 /* Checks that CONSTRAINTS permits to use only one hard register. If
557 it is so, the function returns the class of the hard register.
558 Otherwise it returns NO_REGS. */
559 static enum reg_class
560 single_reg_class (const char *constraints, rtx op, rtx equiv_const)
562 int ignore_p;
563 enum reg_class cl, next_cl;
564 int c;
566 cl = NO_REGS;
567 for (ignore_p = false;
568 (c = *constraints);
569 constraints += CONSTRAINT_LEN (c, constraints))
570 if (c == '#')
571 ignore_p = true;
572 else if (c == ',')
573 ignore_p = false;
574 else if (! ignore_p)
575 switch (c)
577 case ' ':
578 case '\t':
579 case '=':
580 case '+':
581 case '*':
582 case '&':
583 case '%':
584 case '!':
585 case '?':
586 break;
587 case 'i':
588 if (CONSTANT_P (op)
589 || (equiv_const != NULL_RTX && CONSTANT_P (equiv_const)))
590 return NO_REGS;
591 break;
593 case 'n':
594 if (GET_CODE (op) == CONST_INT
595 || (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
596 || (equiv_const != NULL_RTX
597 && (GET_CODE (equiv_const) == CONST_INT
598 || (GET_CODE (equiv_const) == CONST_DOUBLE
599 && GET_MODE (equiv_const) == VOIDmode))))
600 return NO_REGS;
601 break;
603 case 's':
604 if ((CONSTANT_P (op) && GET_CODE (op) != CONST_INT
605 && (GET_CODE (op) != CONST_DOUBLE || GET_MODE (op) != VOIDmode))
606 || (equiv_const != NULL_RTX
607 && CONSTANT_P (equiv_const)
608 && GET_CODE (equiv_const) != CONST_INT
609 && (GET_CODE (equiv_const) != CONST_DOUBLE
610 || GET_MODE (equiv_const) != VOIDmode)))
611 return NO_REGS;
612 break;
614 case 'I':
615 case 'J':
616 case 'K':
617 case 'L':
618 case 'M':
619 case 'N':
620 case 'O':
621 case 'P':
622 if ((GET_CODE (op) == CONST_INT
623 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, constraints))
624 || (equiv_const != NULL_RTX
625 && GET_CODE (equiv_const) == CONST_INT
626 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const),
627 c, constraints)))
628 return NO_REGS;
629 break;
631 case 'E':
632 case 'F':
633 if (GET_CODE (op) == CONST_DOUBLE
634 || (GET_CODE (op) == CONST_VECTOR
635 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)
636 || (equiv_const != NULL_RTX
637 && (GET_CODE (equiv_const) == CONST_DOUBLE
638 || (GET_CODE (equiv_const) == CONST_VECTOR
639 && (GET_MODE_CLASS (GET_MODE (equiv_const))
640 == MODE_VECTOR_FLOAT)))))
641 return NO_REGS;
642 break;
644 case 'G':
645 case 'H':
646 if ((GET_CODE (op) == CONST_DOUBLE
647 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, constraints))
648 || (equiv_const != NULL_RTX
649 && GET_CODE (equiv_const) == CONST_DOUBLE
650 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (equiv_const,
651 c, constraints)))
652 return NO_REGS;
653 /* ??? what about memory */
654 case 'r':
655 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
656 case 'h': case 'j': case 'k': case 'l':
657 case 'q': case 't': case 'u':
658 case 'v': case 'w': case 'x': case 'y': case 'z':
659 case 'A': case 'B': case 'C': case 'D':
660 case 'Q': case 'R': case 'S': case 'T': case 'U':
661 case 'W': case 'Y': case 'Z':
662 next_cl = (c == 'r'
663 ? GENERAL_REGS
664 : REG_CLASS_FROM_CONSTRAINT (c, constraints));
665 if ((cl != NO_REGS && next_cl != cl)
666 || ira_available_class_regs[next_cl] > 1)
667 return NO_REGS;
668 cl = next_cl;
669 break;
671 case '0': case '1': case '2': case '3': case '4':
672 case '5': case '6': case '7': case '8': case '9':
673 next_cl
674 = single_reg_class (recog_data.constraints[c - '0'],
675 recog_data.operand[c - '0'], NULL_RTX);
676 if ((cl != NO_REGS && next_cl != cl) || next_cl == NO_REGS
677 || ira_available_class_regs[next_cl] > 1)
678 return NO_REGS;
679 cl = next_cl;
680 break;
682 default:
683 return NO_REGS;
685 return cl;
688 /* The function checks that operand OP_NUM of the current insn can use
689 only one hard register. If it is so, the function returns the
690 class of the hard register. Otherwise it returns NO_REGS. */
691 static enum reg_class
692 single_reg_operand_class (int op_num)
694 if (op_num < 0 || recog_data.n_alternatives == 0)
695 return NO_REGS;
696 return single_reg_class (recog_data.constraints[op_num],
697 recog_data.operand[op_num], NULL_RTX);
700 /* Processes input operands, if IN_P, or output operands otherwise of
701 the current insn with FREQ to find allocno which can use only one
702 hard register and makes other currently living allocnos conflicting
703 with the hard register. */
704 static void
705 process_single_reg_class_operands (bool in_p, int freq)
707 int i, regno, cost;
708 unsigned int px;
709 enum reg_class cl, cover_class;
710 rtx operand;
711 ira_allocno_t operand_a, a;
713 for (i = 0; i < recog_data.n_operands; i++)
715 operand = recog_data.operand[i];
716 if (in_p && recog_data.operand_type[i] != OP_IN
717 && recog_data.operand_type[i] != OP_INOUT)
718 continue;
719 if (! in_p && recog_data.operand_type[i] != OP_OUT
720 && recog_data.operand_type[i] != OP_INOUT)
721 continue;
722 cl = single_reg_operand_class (i);
723 if (cl == NO_REGS)
724 continue;
726 operand_a = NULL;
728 if (GET_CODE (operand) == SUBREG)
729 operand = SUBREG_REG (operand);
731 if (REG_P (operand)
732 && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER)
734 enum machine_mode mode;
735 enum reg_class cover_class;
737 operand_a = ira_curr_regno_allocno_map[regno];
738 mode = ALLOCNO_MODE (operand_a);
739 cover_class = ALLOCNO_COVER_CLASS (operand_a);
740 if (ira_class_subset_p[cl][cover_class]
741 && ira_class_hard_regs_num[cl] != 0
742 && (ira_class_hard_reg_index[cover_class]
743 [ira_class_hard_regs[cl][0]]) >= 0
744 && reg_class_size[cl] <= (unsigned) CLASS_MAX_NREGS (cl, mode))
746 /* ??? FREQ */
747 cost = freq * (in_p
748 ? ira_register_move_cost[mode][cover_class][cl]
749 : ira_register_move_cost[mode][cl][cover_class]);
750 ira_allocate_and_set_costs
751 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a), cover_class, 0);
752 ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a)
753 [ira_class_hard_reg_index
754 [cover_class][ira_class_hard_regs[cl][0]]]
755 -= cost;
759 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, px)
761 a = ira_allocnos[px];
762 cover_class = ALLOCNO_COVER_CLASS (a);
763 if (a != operand_a)
765 /* We could increase costs of A instead of making it
766 conflicting with the hard register. But it works worse
767 because it will be spilled in reload in anyway. */
768 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
769 reg_class_contents[cl]);
770 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
771 reg_class_contents[cl]);
777 /* Process insns of the basic block given by its LOOP_TREE_NODE to
778 update allocno live ranges, allocno hard register conflicts,
779 intersected calls, and register pressure info for allocnos for the
780 basic block for and regions containing the basic block. */
781 static void
782 process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
784 int i, freq;
785 unsigned int j;
786 basic_block bb;
787 rtx insn;
788 bitmap_iterator bi;
789 bitmap reg_live_out;
790 unsigned int px;
791 bool set_p;
793 bb = loop_tree_node->bb;
794 if (bb != NULL)
796 for (i = 0; i < ira_reg_class_cover_size; i++)
798 curr_reg_pressure[ira_reg_class_cover[i]] = 0;
799 high_pressure_start_point[ira_reg_class_cover[i]] = -1;
801 curr_bb_node = loop_tree_node;
802 reg_live_out = DF_LR_OUT (bb);
803 sparseset_clear (allocnos_live);
804 REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_out);
805 AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset);
806 AND_COMPL_HARD_REG_SET (hard_regs_live, ira_no_alloc_regs);
807 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
808 if (TEST_HARD_REG_BIT (hard_regs_live, i))
810 enum reg_class cover_class;
812 cover_class = ira_hard_regno_cover_class[i];
813 if (cover_class == NO_REGS)
814 continue;
815 curr_reg_pressure[cover_class]++;
816 if (curr_bb_node->reg_pressure[cover_class]
817 < curr_reg_pressure[cover_class])
818 curr_bb_node->reg_pressure[cover_class]
819 = curr_reg_pressure[cover_class];
820 ira_assert (curr_reg_pressure[cover_class]
821 <= ira_available_class_regs[cover_class]);
823 EXECUTE_IF_SET_IN_BITMAP (reg_live_out, FIRST_PSEUDO_REGISTER, j, bi)
825 ira_allocno_t a = ira_curr_regno_allocno_map[j];
827 if (a == NULL)
828 continue;
829 ira_assert (! sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)));
830 set_allocno_live (a);
831 make_regno_born (j);
834 freq = REG_FREQ_FROM_BB (bb);
835 if (freq == 0)
836 freq = 1;
838 /* Invalidate all allocno_saved_at_call entries. */
839 last_call_num++;
841 /* Scan the code of this basic block, noting which allocnos and
842 hard regs are born or die.
844 Note that this loop treats uninitialized values as live until
845 the beginning of the block. For example, if an instruction
846 uses (reg:DI foo), and only (subreg:SI (reg:DI foo) 0) is ever
847 set, FOO will remain live until the beginning of the block.
848 Likewise if FOO is not set at all. This is unnecessarily
849 pessimistic, but it probably doesn't matter much in practice. */
850 FOR_BB_INSNS_REVERSE (bb, insn)
852 df_ref *def_rec, *use_rec;
853 bool call_p;
855 if (! INSN_P (insn))
856 continue;
858 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
859 fprintf (ira_dump_file, " Insn %u(l%d): point = %d\n",
860 INSN_UID (insn), loop_tree_node->parent->loop->num,
861 curr_point);
863 /* Mark each defined value as live. We need to do this for
864 unused values because they still conflict with quantities
865 that are live at the time of the definition.
867 Ignore DF_REF_MAY_CLOBBERs on a call instruction. Such
868 references represent the effect of the called function
869 on a call-clobbered register. Marking the register as
870 live would stop us from allocating it to a call-crossing
871 allocno. */
872 call_p = CALL_P (insn);
873 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
874 if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER))
875 mark_ref_live (*def_rec);
877 /* If INSN has multiple outputs, then any value used in one
878 of the outputs conflicts with the other outputs. Model this
879 by making the used value live during the output phase.
881 It is unsafe to use !single_set here since it will ignore
882 an unused output. Just because an output is unused does
883 not mean the compiler can assume the side effect will not
884 occur. Consider if ALLOCNO appears in the address of an
885 output and we reload the output. If we allocate ALLOCNO
886 to the same hard register as an unused output we could
887 set the hard register before the output reload insn. */
888 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
889 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
891 int i;
892 rtx reg;
894 reg = DF_REF_REG (*use_rec);
895 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
897 rtx set;
899 set = XVECEXP (PATTERN (insn), 0, i);
900 if (GET_CODE (set) == SET
901 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
903 /* After the previous loop, this is a no-op if
904 REG is contained within SET_DEST (SET). */
905 mark_ref_live (*use_rec);
906 break;
911 extract_insn (insn);
912 preprocess_constraints ();
913 process_single_reg_class_operands (false, freq);
915 /* See which defined values die here. */
916 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
917 if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER))
918 mark_ref_dead (*def_rec);
920 if (call_p)
922 last_call_num++;
923 /* The current set of live allocnos are live across the call. */
924 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
926 ira_allocno_t a = ira_allocnos[i];
928 if (allocno_saved_at_call[i] != last_call_num)
929 /* Here we are mimicking caller-save.c behaviour
930 which does not save hard register at a call if
931 it was saved on previous call in the same basic
932 block and the hard register was not mentioned
933 between the two calls. */
934 ALLOCNO_CALL_FREQ (a) += freq;
935 /* Mark it as saved at the next call. */
936 allocno_saved_at_call[i] = last_call_num + 1;
937 ALLOCNO_CALLS_CROSSED_NUM (a)++;
938 /* Don't allocate allocnos that cross setjmps or any
939 call, if this function receives a nonlocal
940 goto. */
941 if (cfun->has_nonlocal_label
942 || find_reg_note (insn, REG_SETJMP,
943 NULL_RTX) != NULL_RTX)
945 SET_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a));
946 SET_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
951 make_early_clobber_and_input_conflicts ();
953 curr_point++;
955 /* Mark each used value as live. */
956 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
957 mark_ref_live (*use_rec);
959 process_single_reg_class_operands (true, freq);
961 set_p = mark_hard_reg_early_clobbers (insn, true);
963 if (set_p)
965 mark_hard_reg_early_clobbers (insn, false);
967 /* Mark each hard reg as live again. For example, a
968 hard register can be in clobber and in an insn
969 input. */
970 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
972 rtx ureg = DF_REF_REG (*use_rec);
974 if (GET_CODE (ureg) == SUBREG)
975 ureg = SUBREG_REG (ureg);
976 if (! REG_P (ureg) || REGNO (ureg) >= FIRST_PSEUDO_REGISTER)
977 continue;
979 mark_ref_live (*use_rec);
983 curr_point++;
986 #ifdef EH_RETURN_DATA_REGNO
987 if (bb_has_eh_pred (bb))
988 for (j = 0; ; ++j)
990 unsigned int regno = EH_RETURN_DATA_REGNO (j);
991 if (regno == INVALID_REGNUM)
992 break;
993 make_regno_born (regno);
995 #endif
997 /* Allocnos can't go in stack regs at the start of a basic block
998 that is reached by an abnormal edge. Likewise for call
999 clobbered regs, because caller-save, fixup_abnormal_edges and
1000 possibly the table driven EH machinery are not quite ready to
1001 handle such allocnos live across such edges. */
1002 if (bb_has_abnormal_pred (bb))
1004 #ifdef STACK_REGS
1005 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, px)
1007 ALLOCNO_NO_STACK_REG_P (ira_allocnos[px]) = true;
1008 ALLOCNO_TOTAL_NO_STACK_REG_P (ira_allocnos[px]) = true;
1010 for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++)
1011 make_regno_born (px);
1012 #endif
1013 /* No need to record conflicts for call clobbered regs if we
1014 have nonlocal labels around, as we don't ever try to
1015 allocate such regs in this case. */
1016 if (!cfun->has_nonlocal_label)
1017 for (px = 0; px < FIRST_PSEUDO_REGISTER; px++)
1018 if (call_used_regs[px])
1019 make_regno_born (px);
1022 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
1024 make_regno_dead (ALLOCNO_REGNO (ira_allocnos[i]));
1027 curr_point++;
1030 /* Propagate register pressure to upper loop tree nodes: */
1031 if (loop_tree_node != ira_loop_tree_root)
1032 for (i = 0; i < ira_reg_class_cover_size; i++)
1034 enum reg_class cover_class;
1036 cover_class = ira_reg_class_cover[i];
1037 if (loop_tree_node->reg_pressure[cover_class]
1038 > loop_tree_node->parent->reg_pressure[cover_class])
1039 loop_tree_node->parent->reg_pressure[cover_class]
1040 = loop_tree_node->reg_pressure[cover_class];
1044 /* Create and set up IRA_START_POINT_RANGES and
1045 IRA_FINISH_POINT_RANGES. */
1046 static void
1047 create_start_finish_chains (void)
1049 ira_allocno_t a;
1050 ira_allocno_iterator ai;
1051 allocno_live_range_t r;
1053 ira_start_point_ranges
1054 = (allocno_live_range_t *) ira_allocate (ira_max_point
1055 * sizeof (allocno_live_range_t));
1056 memset (ira_start_point_ranges, 0,
1057 ira_max_point * sizeof (allocno_live_range_t));
1058 ira_finish_point_ranges
1059 = (allocno_live_range_t *) ira_allocate (ira_max_point
1060 * sizeof (allocno_live_range_t));
1061 memset (ira_finish_point_ranges, 0,
1062 ira_max_point * sizeof (allocno_live_range_t));
1063 FOR_EACH_ALLOCNO (a, ai)
1065 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
1067 r->start_next = ira_start_point_ranges[r->start];
1068 ira_start_point_ranges[r->start] = r;
1069 r->finish_next = ira_finish_point_ranges[r->finish];
1070 ira_finish_point_ranges[r->finish] = r;
1075 /* Rebuild IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES after
1076 new live ranges and program points were added as a result if new
1077 insn generation. */
1078 void
1079 ira_rebuild_start_finish_chains (void)
1081 ira_free (ira_finish_point_ranges);
1082 ira_free (ira_start_point_ranges);
1083 create_start_finish_chains ();
1086 /* Compress allocno live ranges by removing program points where
1087 nothing happens. */
1088 static void
1089 remove_some_program_points_and_update_live_ranges (void)
1091 unsigned i;
1092 int n;
1093 int *map;
1094 ira_allocno_t a;
1095 ira_allocno_iterator ai;
1096 allocno_live_range_t r;
1097 bitmap born_or_died;
1098 bitmap_iterator bi;
1100 born_or_died = ira_allocate_bitmap ();
1101 FOR_EACH_ALLOCNO (a, ai)
1103 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
1105 ira_assert (r->start <= r->finish);
1106 bitmap_set_bit (born_or_died, r->start);
1107 bitmap_set_bit (born_or_died, r->finish);
1110 map = (int *) ira_allocate (sizeof (int) * ira_max_point);
1111 n = 0;
1112 EXECUTE_IF_SET_IN_BITMAP(born_or_died, 0, i, bi)
1114 map[i] = n++;
1116 ira_free_bitmap (born_or_died);
1117 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1118 fprintf (ira_dump_file, "Compressing live ranges: from %d to %d - %d%%\n",
1119 ira_max_point, n, 100 * n / ira_max_point);
1120 ira_max_point = n;
1121 FOR_EACH_ALLOCNO (a, ai)
1123 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
1125 r->start = map[r->start];
1126 r->finish = map[r->finish];
1129 ira_free (map);
1132 /* Print live ranges R to file F. */
1133 void
1134 ira_print_live_range_list (FILE *f, allocno_live_range_t r)
1136 for (; r != NULL; r = r->next)
1137 fprintf (f, " [%d..%d]", r->start, r->finish);
1138 fprintf (f, "\n");
1141 /* Print live ranges R to stderr. */
1142 void
1143 ira_debug_live_range_list (allocno_live_range_t r)
1145 ira_print_live_range_list (stderr, r);
1148 /* Print live ranges of allocno A to file F. */
1149 static void
1150 print_allocno_live_ranges (FILE *f, ira_allocno_t a)
1152 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1153 ira_print_live_range_list (f, ALLOCNO_LIVE_RANGES (a));
1156 /* Print live ranges of allocno A to stderr. */
1157 void
1158 ira_debug_allocno_live_ranges (ira_allocno_t a)
1160 print_allocno_live_ranges (stderr, a);
1163 /* Print live ranges of all allocnos to file F. */
1164 static void
1165 print_live_ranges (FILE *f)
1167 ira_allocno_t a;
1168 ira_allocno_iterator ai;
1170 FOR_EACH_ALLOCNO (a, ai)
1171 print_allocno_live_ranges (f, a);
1174 /* Print live ranges of all allocnos to stderr. */
1175 void
1176 ira_debug_live_ranges (void)
1178 print_live_ranges (stderr);
1181 /* The main entry function creates live ranges, set up
1182 CONFLICT_HARD_REGS and TOTAL_CONFLICT_HARD_REGS for allocnos, and
1183 calculate register pressure info. */
1184 void
1185 ira_create_allocno_live_ranges (void)
1187 allocnos_live = sparseset_alloc (ira_allocnos_num);
1188 curr_point = 0;
1189 last_call_num = 0;
1190 allocno_saved_at_call
1191 = (int *) ira_allocate (ira_allocnos_num * sizeof (int));
1192 memset (allocno_saved_at_call, 0, ira_allocnos_num * sizeof (int));
1193 ira_traverse_loop_tree (true, ira_loop_tree_root, NULL,
1194 process_bb_node_lives);
1195 ira_max_point = curr_point;
1196 create_start_finish_chains ();
1197 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1198 print_live_ranges (ira_dump_file);
1199 /* Clean up. */
1200 ira_free (allocno_saved_at_call);
1201 sparseset_free (allocnos_live);
1204 /* Compress allocno live ranges. */
1205 void
1206 ira_compress_allocno_live_ranges (void)
1208 remove_some_program_points_and_update_live_ranges ();
1209 ira_rebuild_start_finish_chains ();
1210 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1212 fprintf (ira_dump_file, "Ranges after the compression:\n");
1213 print_live_ranges (ira_dump_file);
1217 /* Free arrays IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES. */
1218 void
1219 ira_finish_allocno_live_ranges (void)
1221 ira_free (ira_finish_point_ranges);
1222 ira_free (ira_start_point_ranges);