2008-07-03 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / ira-lives.c
blob2e23fee1788e096c22ed62595405f89adf056277
1 /* IRA processing allocno lives to build allocno live ranges.
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "regs.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "target.h"
30 #include "flags.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "toplev.h"
36 #include "params.h"
37 #include "df.h"
38 #include "sparseset.h"
39 #include "ira-int.h"
41 /* The code in this file is similar to one in global but the code
42 works on the allocno basis and creates live ranges instead of
43 pseudo-register conflicts. */
45 /* Program points are enumerated by numbers from range
46 0..IRA_MAX_POINT-1. There are approximately two times more program
47 points than insns. Program points are places in the program where
48 liveness info can be changed. In most general case (there are more
49 complicated cases too) some program points correspond places where
50 input operand dies and other ones correspond to places where output
51 operands are born. */
52 int ira_max_point;
54 /* Arrays of size IRA_MAX_POINT mapping a program point to the allocno
55 live ranges with given start/finish point. */
56 allocno_live_range_t *ira_start_point_ranges, *ira_finish_point_ranges;
58 /* Number of the current program point. */
59 static int curr_point;
61 /* Point where register pressure excess started or -1 if there is no
62 register pressure excess. Excess pressure for a register class at
63 some point means that there are more allocnos of given register
64 class living at the point than number of hard-registers of the
65 class available for the allocation. It is defined only for cover
66 classes. */
67 static int high_pressure_start_point[N_REG_CLASSES];
69 /* Allocnos live at current point in the scan. */
70 static sparseset allocnos_live;
72 /* Set of hard regs (except eliminable ones) currently live. */
73 static HARD_REG_SET hard_regs_live;
75 /* The loop tree node corresponding to the current basic block. */
76 static ira_loop_tree_node_t curr_bb_node;
78 /* The function processing birth of register REGNO. It updates living
79 hard regs and conflict hard regs for living allocnos or starts a
80 new live range for the allocno corresponding to REGNO if it is
81 necessary. */
82 static void
83 make_regno_born (int regno)
85 unsigned int i;
86 ira_allocno_t a;
87 allocno_live_range_t p;
89 if (regno < FIRST_PSEUDO_REGISTER)
91 SET_HARD_REG_BIT (hard_regs_live, regno);
92 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
94 SET_HARD_REG_BIT (ALLOCNO_CONFLICT_HARD_REGS (ira_allocnos[i]), regno);
95 SET_HARD_REG_BIT (IRA_ALLOCNO_TOTAL_CONFLICT_HARD_REGS (ira_allocnos[i]),
96 regno);
98 return;
100 a = ira_curr_regno_allocno_map[regno];
101 if (a == NULL)
102 return;
103 if ((p = ALLOCNO_LIVE_RANGES (a)) == NULL
104 || (p->finish != curr_point && p->finish + 1 != curr_point))
105 ALLOCNO_LIVE_RANGES (a)
106 = ira_create_allocno_live_range (a, curr_point, -1,
107 ALLOCNO_LIVE_RANGES (a));
110 /* Update ALLOCNO_EXCESS_PRESSURE_POINTS_NUM for allocno A. */
111 static void
112 update_allocno_pressure_excess_length (ira_allocno_t a)
114 int start;
115 enum reg_class cover_class;
116 allocno_live_range_t p;
118 cover_class = ALLOCNO_COVER_CLASS (a);
119 if (high_pressure_start_point[cover_class] < 0)
120 return;
121 p = ALLOCNO_LIVE_RANGES (a);
122 ira_assert (p != NULL);
123 start = (high_pressure_start_point[cover_class] > p->start
124 ? high_pressure_start_point[cover_class] : p->start);
125 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) += curr_point - start + 1;
128 /* Process the death of register REGNO. This updates hard_regs_live
129 or finishes the current live range for the allocno corresponding to
130 REGNO. */
131 static void
132 make_regno_dead (int regno)
134 ira_allocno_t a;
135 allocno_live_range_t p;
137 if (regno < FIRST_PSEUDO_REGISTER)
139 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
140 return;
142 a = ira_curr_regno_allocno_map[regno];
143 if (a == NULL)
144 return;
145 p = ALLOCNO_LIVE_RANGES (a);
146 ira_assert (p != NULL);
147 p->finish = curr_point;
148 update_allocno_pressure_excess_length (a);
151 /* Process the birth and, right after then, death of register
152 REGNO. */
153 static void
154 make_regno_born_and_dead (int regno)
156 make_regno_born (regno);
157 make_regno_dead (regno);
160 /* The current register pressures for each cover class for the current
161 basic block. */
162 static int curr_reg_pressure[N_REG_CLASSES];
164 /* Mark allocno A as currently living and update current register
165 pressure, maximal register pressure for the current BB, start point
166 of the register pressure excess, and conflicting hard registers of
167 A. */
168 static void
169 set_allocno_live (ira_allocno_t a)
171 int nregs;
172 enum reg_class cover_class;
174 if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
175 return;
176 sparseset_set_bit (allocnos_live, ALLOCNO_NUM (a));
177 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), hard_regs_live);
178 IOR_HARD_REG_SET (IRA_ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), hard_regs_live);
179 cover_class = ALLOCNO_COVER_CLASS (a);
180 nregs = ira_reg_class_nregs[cover_class][ALLOCNO_MODE (a)];
181 curr_reg_pressure[cover_class] += nregs;
182 if (high_pressure_start_point[cover_class] < 0
183 && (curr_reg_pressure[cover_class]
184 > ira_available_class_regs[cover_class]))
185 high_pressure_start_point[cover_class] = curr_point;
186 if (curr_bb_node->reg_pressure[cover_class]
187 < curr_reg_pressure[cover_class])
188 curr_bb_node->reg_pressure[cover_class] = curr_reg_pressure[cover_class];
191 /* Mark allocno A as currently not living and update current register
192 pressure, start point of the register pressure excess, and register
193 pressure excess length for living allocnos. */
194 static void
195 clear_allocno_live (ira_allocno_t a)
197 unsigned int i;
198 enum reg_class cover_class;
200 if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
202 cover_class = ALLOCNO_COVER_CLASS (a);
203 curr_reg_pressure[cover_class]
204 -= ira_reg_class_nregs[cover_class][ALLOCNO_MODE (a)];
205 ira_assert (curr_reg_pressure[cover_class] >= 0);
206 if (high_pressure_start_point[cover_class] >= 0
207 && (curr_reg_pressure[cover_class]
208 <= ira_available_class_regs[cover_class]))
210 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
212 update_allocno_pressure_excess_length (ira_allocnos[i]);
214 high_pressure_start_point[cover_class] = -1;
217 sparseset_clear_bit (allocnos_live, ALLOCNO_NUM (a));
220 /* Record all regs that are set in any one insn. Communication from
221 mark_reg_{store,clobber}. */
222 static VEC(rtx, heap) *regs_set;
224 /* Handle the case where REG is set by the insn being scanned, during
225 the scan to build live ranges and calculate reg pressure info.
226 Store a 1 in hard_regs_live or allocnos_live for this register or
227 the corresponding allocno, record how many consecutive hardware
228 registers it actually needs.
230 Note that even if REG does not remain alive after this insn, we
231 must mark it here as live, to ensure a conflict between REG and any
232 other reg allocnos set in this insn that really do live. This is
233 because those other allocnos could be considered after this.
235 REG might actually be something other than a register; if so, we do
236 nothing.
238 SETTER is 0 if this register was modified by an auto-increment
239 (i.e., a REG_INC note was found for it). */
240 static void
241 mark_reg_store (rtx reg, const_rtx setter ATTRIBUTE_UNUSED,
242 void *data ATTRIBUTE_UNUSED)
244 int regno;
246 if (GET_CODE (reg) == SUBREG)
247 reg = SUBREG_REG (reg);
249 if (! REG_P (reg))
250 return;
252 VEC_safe_push (rtx, heap, regs_set, reg);
254 regno = REGNO (reg);
256 if (regno >= FIRST_PSEUDO_REGISTER)
258 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
260 if (a != NULL)
262 if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
263 return;
264 set_allocno_live (a);
266 make_regno_born (regno);
268 else if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
270 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
271 enum reg_class cover_class;
273 while (regno < last)
275 if (! TEST_HARD_REG_BIT (hard_regs_live, regno)
276 && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
278 cover_class = ira_class_translate[REGNO_REG_CLASS (regno)];
279 if (cover_class != NO_REGS)
281 curr_reg_pressure[cover_class]++;
282 if (high_pressure_start_point[cover_class] < 0
283 && (curr_reg_pressure[cover_class]
284 > ira_available_class_regs[cover_class]))
285 high_pressure_start_point[cover_class] = curr_point;
287 make_regno_born (regno);
288 if (cover_class != NO_REGS
289 && (curr_bb_node->reg_pressure[cover_class]
290 < curr_reg_pressure[cover_class]))
291 curr_bb_node->reg_pressure[cover_class]
292 = curr_reg_pressure[cover_class];
294 regno++;
299 /* Like mark_reg_store except notice just CLOBBERs; ignore SETs. */
300 static void
301 mark_reg_clobber (rtx reg, const_rtx setter, void *data)
303 if (GET_CODE (setter) == CLOBBER)
304 mark_reg_store (reg, setter, data);
307 /* Record that hard register REG (if it is a hard register) has
308 conflicts with all the allocno currently live or the corresponding
309 allocno lives at just the current program point. Do not mark REG
310 (or the allocno) itself as live. */
311 static void
312 mark_reg_conflicts (rtx reg)
314 int regno;
316 if (GET_CODE (reg) == SUBREG)
317 reg = SUBREG_REG (reg);
319 if (! REG_P (reg))
320 return;
322 regno = REGNO (reg);
324 if (regno >= FIRST_PSEUDO_REGISTER)
325 make_regno_born_and_dead (regno);
326 else if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
328 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
330 while (regno < last)
332 make_regno_born_and_dead (regno);
333 regno++;
338 /* Mark REG (or the corresponding allocno) as being dead (following
339 the insn being scanned now). Store a 0 in hard_regs_live or
340 allocnos_live for the register. */
341 static void
342 mark_reg_death (rtx reg)
344 unsigned int i;
345 int regno = REGNO (reg);
347 if (regno >= FIRST_PSEUDO_REGISTER)
349 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
351 if (a != NULL)
353 if (! sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
354 return;
355 clear_allocno_live (a);
357 make_regno_dead (regno);
359 else if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
361 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
362 enum reg_class cover_class;
364 while (regno < last)
366 if (TEST_HARD_REG_BIT (hard_regs_live, regno))
368 cover_class = ira_class_translate[REGNO_REG_CLASS (regno)];
369 if (cover_class != NO_REGS)
371 curr_reg_pressure[cover_class]--;
372 if (high_pressure_start_point[cover_class] >= 0
373 && (curr_reg_pressure[cover_class]
374 <= ira_available_class_regs[cover_class]))
376 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
378 update_allocno_pressure_excess_length (ira_allocnos[i]);
380 high_pressure_start_point[cover_class] = -1;
382 ira_assert (curr_reg_pressure[cover_class] >= 0);
384 make_regno_dead (regno);
386 regno++;
391 /* Checks that CONSTRAINTS permits to use only one hard register. If
392 it is so, the function returns the class of the hard register.
393 Otherwise it returns NO_REGS. */
394 static enum reg_class
395 single_reg_class (const char *constraints, rtx op, rtx equiv_const)
397 int ignore_p;
398 enum reg_class cl, next_cl;
399 int c;
401 cl = NO_REGS;
402 for (ignore_p = false;
403 (c = *constraints);
404 constraints += CONSTRAINT_LEN (c, constraints))
405 if (c == '#')
406 ignore_p = true;
407 else if (c == ',')
408 ignore_p = false;
409 else if (! ignore_p)
410 switch (c)
412 case ' ':
413 case '\t':
414 case '=':
415 case '+':
416 case '*':
417 case '&':
418 case '%':
419 case '!':
420 case '?':
421 break;
422 case 'i':
423 if (CONSTANT_P (op)
424 || (equiv_const != NULL_RTX && CONSTANT_P (equiv_const)))
425 return NO_REGS;
426 break;
428 case 'n':
429 if (GET_CODE (op) == CONST_INT
430 || (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
431 || (equiv_const != NULL_RTX
432 && (GET_CODE (equiv_const) == CONST_INT
433 || (GET_CODE (equiv_const) == CONST_DOUBLE
434 && GET_MODE (equiv_const) == VOIDmode))))
435 return NO_REGS;
436 break;
438 case 's':
439 if ((CONSTANT_P (op) && GET_CODE (op) != CONST_INT
440 && (GET_CODE (op) != CONST_DOUBLE || GET_MODE (op) != VOIDmode))
441 || (equiv_const != NULL_RTX
442 && CONSTANT_P (equiv_const)
443 && GET_CODE (equiv_const) != CONST_INT
444 && (GET_CODE (equiv_const) != CONST_DOUBLE
445 || GET_MODE (equiv_const) != VOIDmode)))
446 return NO_REGS;
447 break;
449 case 'I':
450 case 'J':
451 case 'K':
452 case 'L':
453 case 'M':
454 case 'N':
455 case 'O':
456 case 'P':
457 if ((GET_CODE (op) == CONST_INT
458 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, constraints))
459 || (equiv_const != NULL_RTX
460 && GET_CODE (equiv_const) == CONST_INT
461 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const),
462 c, constraints)))
463 return NO_REGS;
464 break;
466 case 'E':
467 case 'F':
468 if (GET_CODE (op) == CONST_DOUBLE
469 || (GET_CODE (op) == CONST_VECTOR
470 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)
471 || (equiv_const != NULL_RTX
472 && (GET_CODE (equiv_const) == CONST_DOUBLE
473 || (GET_CODE (equiv_const) == CONST_VECTOR
474 && (GET_MODE_CLASS (GET_MODE (equiv_const))
475 == MODE_VECTOR_FLOAT)))))
476 return NO_REGS;
477 break;
479 case 'G':
480 case 'H':
481 if ((GET_CODE (op) == CONST_DOUBLE
482 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, constraints))
483 || (equiv_const != NULL_RTX
484 && GET_CODE (equiv_const) == CONST_DOUBLE
485 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (equiv_const,
486 c, constraints)))
487 return NO_REGS;
488 /* ??? what about memory */
489 case 'r':
490 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
491 case 'h': case 'j': case 'k': case 'l':
492 case 'q': case 't': case 'u':
493 case 'v': case 'w': case 'x': case 'y': case 'z':
494 case 'A': case 'B': case 'C': case 'D':
495 case 'Q': case 'R': case 'S': case 'T': case 'U':
496 case 'W': case 'Y': case 'Z':
497 next_cl = (c == 'r'
498 ? GENERAL_REGS
499 : REG_CLASS_FROM_CONSTRAINT (c, constraints));
500 if ((cl != NO_REGS && next_cl != cl)
501 || ira_available_class_regs[next_cl] > 1)
502 return NO_REGS;
503 cl = next_cl;
504 break;
506 case '0': case '1': case '2': case '3': case '4':
507 case '5': case '6': case '7': case '8': case '9':
508 next_cl
509 = single_reg_class (recog_data.constraints[c - '0'],
510 recog_data.operand[c - '0'], NULL_RTX);
511 if ((cl != NO_REGS && next_cl != cl) || next_cl == NO_REGS
512 || ira_available_class_regs[next_cl] > 1)
513 return NO_REGS;
514 cl = next_cl;
515 break;
517 default:
518 return NO_REGS;
520 return cl;
523 /* The function checks that operand OP_NUM of the current insn can use
524 only one hard register. If it is so, the function returns the
525 class of the hard register. Otherwise it returns NO_REGS. */
526 static enum reg_class
527 single_reg_operand_class (int op_num)
529 if (op_num < 0 || recog_data.n_alternatives == 0)
530 return NO_REGS;
531 return single_reg_class (recog_data.constraints[op_num],
532 recog_data.operand[op_num], NULL_RTX);
535 /* Processes input operands, if IN_P, or output operands otherwise of
536 the current insn with FREQ to find allocno which can use only one
537 hard register and makes other currently living allocnos conflicting
538 with the hard register. */
539 static void
540 process_single_reg_class_operands (bool in_p, int freq)
542 int i, regno, cost;
543 unsigned int px;
544 enum reg_class cl, cover_class;
545 rtx operand;
546 ira_allocno_t operand_a, a;
548 for (i = 0; i < recog_data.n_operands; i++)
550 operand = recog_data.operand[i];
551 if (in_p && recog_data.operand_type[i] != OP_IN
552 && recog_data.operand_type[i] != OP_INOUT)
553 continue;
554 if (! in_p && recog_data.operand_type[i] != OP_OUT
555 && recog_data.operand_type[i] != OP_INOUT)
556 continue;
557 cl = single_reg_operand_class (i);
558 if (cl == NO_REGS)
559 continue;
561 operand_a = NULL;
563 if (GET_CODE (operand) == SUBREG)
564 operand = SUBREG_REG (operand);
566 if (REG_P (operand)
567 && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER)
569 enum machine_mode mode;
570 enum reg_class cover_class;
572 operand_a = ira_curr_regno_allocno_map[regno];
573 mode = ALLOCNO_MODE (operand_a);
574 cover_class = ALLOCNO_COVER_CLASS (operand_a);
575 if (ira_class_subset_p[cl][cover_class]
576 && ira_class_hard_regs_num[cl] != 0
577 && (ira_class_hard_reg_index[cover_class]
578 [ira_class_hard_regs[cl][0]]) >= 0
579 && reg_class_size[cl] <= (unsigned) CLASS_MAX_NREGS (cl, mode))
581 /* ??? FREQ */
582 cost = freq * (in_p
583 ? ira_register_move_cost[mode][cover_class][cl]
584 : ira_register_move_cost[mode][cl][cover_class]);
585 ira_allocate_and_set_costs
586 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a), cover_class, 0);
587 ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a)
588 [ira_class_hard_reg_index
589 [cover_class][ira_class_hard_regs[cl][0]]]
590 -= cost;
594 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, px)
596 a = ira_allocnos[px];
597 cover_class = ALLOCNO_COVER_CLASS (a);
598 if (a != operand_a)
600 /* We could increase costs of A instead of making it
601 conflicting with the hard register. But it works worse
602 because it will be spilled in reload in anyway. */
603 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
604 reg_class_contents[cl]);
605 IOR_HARD_REG_SET (IRA_ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
606 reg_class_contents[cl]);
612 /* Process insns of the basic block given by its LOOP_TREE_NODE to
613 update allocno live ranges, allocno hard register conflicts,
614 intersected calls, and register pressure info for allocnos for the
615 basic block for and regions containing the basic block. */
616 static void
617 process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
619 int i, index;
620 unsigned int j;
621 basic_block bb;
622 rtx insn;
623 edge e;
624 edge_iterator ei;
625 bitmap_iterator bi;
626 bitmap reg_live_in;
627 unsigned int px;
629 bb = loop_tree_node->bb;
630 if (bb != NULL)
632 for (i = 0; i < ira_reg_class_cover_size; i++)
634 curr_reg_pressure[ira_reg_class_cover[i]] = 0;
635 high_pressure_start_point[ira_reg_class_cover[i]] = -1;
637 curr_bb_node = loop_tree_node;
638 reg_live_in = DF_LR_IN (bb);
639 sparseset_clear (allocnos_live);
640 REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_in);
641 AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset);
642 AND_COMPL_HARD_REG_SET (hard_regs_live, ira_no_alloc_regs);
643 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
644 if (TEST_HARD_REG_BIT (hard_regs_live, i))
646 enum reg_class cover_class;
648 cover_class = REGNO_REG_CLASS (i);
649 if (cover_class == NO_REGS)
650 continue;
651 cover_class = ira_class_translate[cover_class];
652 curr_reg_pressure[cover_class]++;
653 if (curr_bb_node->reg_pressure[cover_class]
654 < curr_reg_pressure[cover_class])
655 curr_bb_node->reg_pressure[cover_class]
656 = curr_reg_pressure[cover_class];
657 ira_assert (curr_reg_pressure[cover_class]
658 <= ira_available_class_regs[cover_class]);
660 EXECUTE_IF_SET_IN_BITMAP (reg_live_in, FIRST_PSEUDO_REGISTER, j, bi)
662 ira_allocno_t a = ira_curr_regno_allocno_map[j];
664 if (a == NULL)
665 continue;
666 ira_assert (! sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)));
667 set_allocno_live (a);
668 make_regno_born (j);
671 #ifdef EH_RETURN_DATA_REGNO
672 if (bb_has_eh_pred (bb))
674 for (j = 0; ; ++j)
676 unsigned int regno = EH_RETURN_DATA_REGNO (j);
678 if (regno == INVALID_REGNUM)
679 break;
680 make_regno_born_and_dead (regno);
683 #endif
685 /* Allocnos can't go in stack regs at the start of a basic block
686 that is reached by an abnormal edge. Likewise for call
687 clobbered regs, because caller-save, fixup_abnormal_edges and
688 possibly the table driven EH machinery are not quite ready to
689 handle such allocnos live across such edges. */
690 FOR_EACH_EDGE (e, ei, bb->preds)
691 if (e->flags & EDGE_ABNORMAL)
692 break;
694 if (e != NULL)
696 #ifdef STACK_REGS
697 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, px)
699 ALLOCNO_NO_STACK_REG_P (ira_allocnos[px]) = true;
700 IRA_ALLOCNO_TOTAL_NO_STACK_REG_P (ira_allocnos[px]) = true;
702 for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++)
703 make_regno_born_and_dead (px);
704 #endif
705 /* No need to record conflicts for call clobbered regs if we
706 have nonlocal labels around, as we don't ever try to
707 allocate such regs in this case. */
708 if (!cfun->has_nonlocal_label)
709 for (px = 0; px < FIRST_PSEUDO_REGISTER; px++)
710 if (call_used_regs[px])
711 make_regno_born_and_dead (px);
714 /* Scan the code of this basic block, noting which allocnos and
715 hard regs are born or die. */
716 FOR_BB_INSNS (bb, insn)
718 rtx link;
719 int freq;
721 if (! INSN_P (insn))
722 continue;
724 freq = REG_FREQ_FROM_BB (BLOCK_FOR_INSN (insn));
725 if (freq == 0)
726 freq = 1;
728 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
729 fprintf (ira_dump_file, " Insn %u(l%d): point = %d\n",
730 INSN_UID (insn), loop_tree_node->parent->loop->num,
731 curr_point);
733 /* Check regs_set is an empty set. */
734 gcc_assert (VEC_empty (rtx, regs_set));
736 /* Mark any allocnos clobbered by INSN as live, so they
737 conflict with the inputs. */
738 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
740 extract_insn (insn);
741 process_single_reg_class_operands (true, freq);
743 /* Mark any allocnos dead after INSN as dead now. */
744 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
745 if (REG_NOTE_KIND (link) == REG_DEAD)
746 mark_reg_death (XEXP (link, 0));
748 curr_point++;
750 if (CALL_P (insn))
752 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
754 ira_allocno_t a = ira_allocnos[i];
756 ALLOCNO_CALL_FREQ (a) += freq;
757 index = ira_add_regno_call (ALLOCNO_REGNO (a), insn);
758 if (ALLOCNO_CALLS_CROSSED_START (a) < 0)
759 ALLOCNO_CALLS_CROSSED_START (a) = index;
760 ALLOCNO_CALLS_CROSSED_NUM (a)++;
761 /* Don't allocate allocnos that cross calls, if this
762 function receives a nonlocal goto. */
763 if (cfun->has_nonlocal_label)
765 SET_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a));
766 SET_HARD_REG_SET (IRA_ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
771 /* Mark any allocnos set in INSN as live. Clobbers are
772 processed again, so they will conflict with the reg
773 allocnos that are set. */
774 note_stores (PATTERN (insn), mark_reg_store, NULL);
776 #ifdef AUTO_INC_DEC
777 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
778 if (REG_NOTE_KIND (link) == REG_INC)
779 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
780 #endif
782 /* If INSN has multiple outputs, then any allocno that dies
783 here and is used inside of an output must conflict with
784 the other outputs.
786 It is unsafe to use !single_set here since it will ignore
787 an unused output. Just because an output is unused does
788 not mean the compiler can assume the side effect will not
789 occur. Consider if ALLOCNO appears in the address of an
790 output and we reload the output. If we allocate ALLOCNO
791 to the same hard register as an unused output we could
792 set the hard register before the output reload insn. */
793 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
794 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
795 if (REG_NOTE_KIND (link) == REG_DEAD)
797 int i;
798 int used_in_output = 0;
799 rtx reg = XEXP (link, 0);
801 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
803 rtx set = XVECEXP (PATTERN (insn), 0, i);
805 if (GET_CODE (set) == SET
806 && ! REG_P (SET_DEST (set))
807 && ! rtx_equal_p (reg, SET_DEST (set))
808 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
809 used_in_output = 1;
811 if (used_in_output)
812 mark_reg_conflicts (reg);
815 process_single_reg_class_operands (false, freq);
817 /* Mark any allocnos set in INSN and then never used. */
818 while (! VEC_empty (rtx, regs_set))
820 rtx reg = VEC_pop (rtx, regs_set);
821 rtx note = find_regno_note (insn, REG_UNUSED, REGNO (reg));
823 if (note)
824 mark_reg_death (XEXP (note, 0));
826 curr_point++;
828 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
830 make_regno_dead (ALLOCNO_REGNO (ira_allocnos[i]));
833 curr_point++;
836 /* Propagate register pressure to upper loop tree nodes: */
837 if (loop_tree_node != ira_loop_tree_root)
838 for (i = 0; i < ira_reg_class_cover_size; i++)
840 enum reg_class cover_class;
842 cover_class = ira_reg_class_cover[i];
843 if (loop_tree_node->reg_pressure[cover_class]
844 > loop_tree_node->parent->reg_pressure[cover_class])
845 loop_tree_node->parent->reg_pressure[cover_class]
846 = loop_tree_node->reg_pressure[cover_class];
850 /* Create and set up IRA_START_POINT_RANGES and
851 IRA_FINISH_POINT_RANGES. */
852 static void
853 create_start_finish_chains (void)
855 ira_allocno_t a;
856 ira_allocno_iterator ai;
857 allocno_live_range_t r;
859 ira_start_point_ranges
860 = ira_allocate (ira_max_point * sizeof (allocno_live_range_t));
861 memset (ira_start_point_ranges, 0,
862 ira_max_point * sizeof (allocno_live_range_t));
863 ira_finish_point_ranges
864 = ira_allocate (ira_max_point * sizeof (allocno_live_range_t));
865 memset (ira_finish_point_ranges, 0,
866 ira_max_point * sizeof (allocno_live_range_t));
867 FOR_EACH_ALLOCNO (a, ai)
869 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
871 r->start_next = ira_start_point_ranges[r->start];
872 ira_start_point_ranges[r->start] = r;
873 r->finish_next = ira_finish_point_ranges[r->finish];
874 ira_finish_point_ranges[r->finish] = r;
879 /* Rebuild IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES after
880 new live ranges and program points were added as a result if new
881 insn generation. */
882 void
883 ira_rebuild_start_finish_chains (void)
885 ira_free (ira_finish_point_ranges);
886 ira_free (ira_start_point_ranges);
887 create_start_finish_chains ();
890 /* Print live ranges R to file F. */
891 void
892 ira_print_live_range_list (FILE *f, allocno_live_range_t r)
894 for (; r != NULL; r = r->next)
895 fprintf (f, " [%d..%d]", r->start, r->finish);
896 fprintf (f, "\n");
899 /* Print live ranges R to stderr. */
900 void
901 ira_debug_live_range_list (allocno_live_range_t r)
903 ira_print_live_range_list (stderr, r);
906 /* Print live ranges of allocno A to file F. */
907 static void
908 print_allocno_live_ranges (FILE *f, ira_allocno_t a)
910 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
911 ira_print_live_range_list (f, ALLOCNO_LIVE_RANGES (a));
914 /* Print live ranges of allocno A to stderr. */
915 void
916 ira_debug_allocno_live_ranges (ira_allocno_t a)
918 print_allocno_live_ranges (stderr, a);
921 /* Print live ranges of all allocnos to file F. */
922 static void
923 print_live_ranges (FILE *f)
925 ira_allocno_t a;
926 ira_allocno_iterator ai;
928 FOR_EACH_ALLOCNO (a, ai)
929 print_allocno_live_ranges (f, a);
932 /* Print live ranges of all allocnos to stderr. */
933 void
934 ira_debug_live_ranges (void)
936 print_live_ranges (stderr);
939 /* Propagate new info about allocno A (see comments about accumulated
940 info in allocno definition) to the corresponding allocno on upper
941 loop tree level. So allocnos on upper levels accumulate
942 information about the corresponding allocnos in nested regions.
943 The new info means allocno info finally calculated in this
944 file. */
945 static void
946 propagate_new_allocno_info (ira_allocno_t a)
948 int regno;
949 ira_allocno_t parent_a;
950 ira_loop_tree_node_t parent;
952 regno = ALLOCNO_REGNO (a);
953 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
954 && (parent_a = parent->regno_allocno_map[regno]) != NULL)
956 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
957 #ifdef STACK_REGS
958 if (IRA_ALLOCNO_TOTAL_NO_STACK_REG_P (a))
959 IRA_ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a) = true;
960 #endif
961 IOR_HARD_REG_SET (IRA_ALLOCNO_TOTAL_CONFLICT_HARD_REGS (parent_a),
962 IRA_ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
963 if (ALLOCNO_CALLS_CROSSED_START (parent_a) < 0
964 || (ALLOCNO_CALLS_CROSSED_START (a) >= 0
965 && (ALLOCNO_CALLS_CROSSED_START (parent_a)
966 > ALLOCNO_CALLS_CROSSED_START (a))))
967 ALLOCNO_CALLS_CROSSED_START (parent_a)
968 = ALLOCNO_CALLS_CROSSED_START (a);
969 ALLOCNO_CALLS_CROSSED_NUM (parent_a) += ALLOCNO_CALLS_CROSSED_NUM (a);
970 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
971 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
975 /* Propagate new info about allocnos to the corresponding allocnos on
976 upper loop tree level. */
977 static void
978 propagate_new_info (void)
980 int i;
981 ira_allocno_t a;
983 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
984 for (a = ira_regno_allocno_map[i];
985 a != NULL;
986 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
987 propagate_new_allocno_info (a);
990 /* The main entry function creates live ranges, set up
991 CONFLICT_HARD_REGS and TOTAL_CONFLICT_HARD_REGS for allocnos, and
992 calculate register pressure info. */
993 void
994 ira_create_allocno_live_ranges (void)
996 allocnos_live = sparseset_alloc (ira_allocnos_num);
997 /* Make a vector that mark_reg_{store,clobber} will store in. */
998 if (!regs_set)
999 regs_set = VEC_alloc (rtx, heap, 10);
1000 curr_point = 0;
1001 ira_traverse_loop_tree (true, ira_loop_tree_root, NULL,
1002 process_bb_node_lives);
1003 ira_max_point = curr_point;
1004 create_start_finish_chains ();
1005 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1006 print_live_ranges (ira_dump_file);
1007 propagate_new_info ();
1008 /* Clean up. */
1009 sparseset_free (allocnos_live);
1012 /* Free arrays IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES. */
1013 void
1014 ira_finish_allocno_live_ranges (void)
1016 ira_free (ira_finish_point_ranges);
1017 ira_free (ira_start_point_ranges);