2008-02-20 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / ira-lives.c
blobad0b522c3081d8b343f56ba1fdce98130c0b6fae
1 /* IRA processing allocno lives.
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "regs.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "target.h"
31 #include "flags.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "toplev.h"
37 #include "params.h"
38 #include "df.h"
39 #include "ira-int.h"
41 /* The file contains code is analogous to one in global but the code
42 works on the allocno basis. */
44 static void set_allocno_live (allocno_t);
45 static void clear_allocno_live (allocno_t);
46 static void mark_reg_store (rtx, const_rtx, void *);
47 static void mark_reg_clobber (rtx, const_rtx, void *);
48 static void mark_reg_conflicts (rtx);
49 static void mark_reg_death (rtx);
50 static enum reg_class single_reg_class (const char *, rtx op, rtx);
51 static enum reg_class single_reg_operand_class (int);
52 static void process_single_reg_class_operands (int, int);
53 static void process_bb_node_lives (loop_tree_node_t);
55 /* Program points are enumerated by number from range
56 0..MAX_POINT-1. */
57 int max_point;
59 /* Arrays of size MAX_POINT mapping a program point to the allocno
60 live ranges with given start/finish point. */
61 allocno_live_range_t *start_point_ranges, *finish_point_ranges;
63 /* Number of the current program point. */
64 static int curr_point;
66 /* Number of ints required to hold allocnos_num bits. */
67 int allocno_set_words;
69 /* Set, clear or test bit number I in `allocnos_live',
70 a bit vector indexed by allocno number. */
71 #define SET_ALLOCNO_LIVE(I) SET_ALLOCNO_SET_BIT (allocnos_live, I)
72 #define CLEAR_ALLOCNO_LIVE(I) CLEAR_ALLOCNO_SET_BIT (allocnos_live, I)
73 #define TEST_ALLOCNO_LIVE(I) TEST_ALLOCNO_SET_BIT (allocnos_live, I)
75 /* Bit mask for allocnos live at current point in the scan. */
76 static INT_TYPE *allocnos_live;
78 /* The same as previous but as bitmap. */
79 static bitmap allocnos_live_bitmap;
81 /* Set of hard regs (except eliminable ones) currently live (during
82 scan of all insns). */
83 static HARD_REG_SET hard_regs_live;
85 /* Loop tree node corresponding to the current basic block. */
86 static loop_tree_node_t curr_bb_node;
88 /* The function processing birth of register REGNO. It updates living
89 hard regs and conflict hard regs for living allocnos or starts a
90 new live range for allocno corresponding to REGNO. */
91 static void
92 make_regno_born (int regno)
94 int i;
95 allocno_t a;
96 allocno_live_range_t p;
98 if (regno < FIRST_PSEUDO_REGISTER)
100 SET_HARD_REG_BIT (hard_regs_live, regno);
101 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, i,
103 SET_HARD_REG_BIT (ALLOCNO_CONFLICT_HARD_REGS (allocnos [i]), regno);
104 SET_HARD_REG_BIT (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (allocnos [i]),
105 regno);
107 return;
109 a = ira_curr_regno_allocno_map [regno];
110 if (a == NULL)
111 return;
112 if ((p = ALLOCNO_LIVE_RANGES (a)) == NULL
113 || (p->finish != curr_point && p->finish + 1 != curr_point))
114 ALLOCNO_LIVE_RANGES (a)
115 = create_allocno_live_range (a, curr_point, -1, ALLOCNO_LIVE_RANGES (a));
118 /* The function processing death of register REGNO. It updates live
119 hard regs or finish the current live range for allocno
120 corresponding to REGNO. */
121 static void
122 make_regno_dead (int regno)
124 allocno_t a;
125 allocno_live_range_t p;
127 if (regno < FIRST_PSEUDO_REGISTER)
129 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
130 return;
132 a = ira_curr_regno_allocno_map [regno];
133 if (a == NULL)
134 return;
135 p = ALLOCNO_LIVE_RANGES (a);
136 ira_assert (p != NULL);
137 p->finish = curr_point;
140 /* The function processing birth and, right after then, death of
141 register REGNO. */
142 static void
143 make_regno_born_and_died (int regno)
145 make_regno_born (regno);
146 make_regno_dead (regno);
149 /* The current pressure for the current basic block. */
150 static int curr_reg_pressure [N_REG_CLASSES];
152 /* The function marks allocno A as currently living. */
153 static void
154 set_allocno_live (allocno_t a)
156 enum reg_class cover_class;
158 if (TEST_ALLOCNO_LIVE (ALLOCNO_NUM (a)))
159 return;
160 SET_ALLOCNO_LIVE (ALLOCNO_NUM (a));
161 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), hard_regs_live);
162 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), hard_regs_live);
163 bitmap_set_bit (allocnos_live_bitmap, ALLOCNO_NUM (a));
164 cover_class = ALLOCNO_COVER_CLASS (a);
165 curr_reg_pressure [cover_class]
166 += reg_class_nregs [cover_class] [ALLOCNO_MODE (a)];
167 if (curr_bb_node->reg_pressure [cover_class]
168 < curr_reg_pressure [cover_class])
169 curr_bb_node->reg_pressure [cover_class] = curr_reg_pressure [cover_class];
172 /* The function marks allocno A as currently not living. */
173 static void
174 clear_allocno_live (allocno_t a)
176 enum reg_class cover_class;
178 if (bitmap_bit_p (allocnos_live_bitmap, ALLOCNO_NUM (a)))
180 cover_class = ALLOCNO_COVER_CLASS (a);
181 curr_reg_pressure [cover_class]
182 -= reg_class_nregs [cover_class] [ALLOCNO_MODE (a)];
183 ira_assert (curr_reg_pressure [cover_class] >= 0);
185 CLEAR_ALLOCNO_LIVE (ALLOCNO_NUM (a));
186 bitmap_clear_bit (allocnos_live_bitmap, ALLOCNO_NUM (a));
189 /* Record all regs that are set in any one insn. Communication from
190 mark_reg_{store,clobber}. */
191 static VEC(rtx, heap) *regs_set;
193 /* Handle the case where REG is set by the insn being scanned, during
194 the scan to accumulate conflicts. Store a 1 in hard_regs_live or
195 allocnos_live for this register or the
196 corresponding allocno, record how many consecutive hardware
197 registers it actually needs, and record a conflict with all other
198 reg allocnos already live.
200 Note that even if REG does not remain alive after this insn, we
201 must mark it here as live, to ensure a conflict between REG and any
202 other reg allocnos set in this insn that really do live. This is
203 because those other allocnos could be considered after this.
205 REG might actually be something other than a register; if so, we do
206 nothing.
208 SETTER is 0 if this register was modified by an auto-increment
209 (i.e., a REG_INC note was found for it). */
210 static void
211 mark_reg_store (rtx reg, const_rtx setter ATTRIBUTE_UNUSED,
212 void *data ATTRIBUTE_UNUSED)
214 int regno;
216 if (GET_CODE (reg) == SUBREG)
217 reg = SUBREG_REG (reg);
219 if (! REG_P (reg))
220 return;
222 VEC_safe_push (rtx, heap, regs_set, reg);
224 regno = REGNO (reg);
226 if (regno >= FIRST_PSEUDO_REGISTER)
228 allocno_t a = ira_curr_regno_allocno_map [regno];
230 if (a != NULL)
232 if (bitmap_bit_p (allocnos_live_bitmap, ALLOCNO_NUM (a)))
233 return;
234 set_allocno_live (a);
236 make_regno_born (regno);
238 else if (! TEST_HARD_REG_BIT (no_alloc_regs, regno))
240 int last = regno + hard_regno_nregs [regno] [GET_MODE (reg)];
241 enum reg_class cover_class;
243 while (regno < last)
245 if (! TEST_HARD_REG_BIT (hard_regs_live, regno)
246 && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
248 cover_class = class_translate [REGNO_REG_CLASS (regno)];
249 curr_reg_pressure [cover_class]++;
250 make_regno_born (regno);
251 if (curr_bb_node->reg_pressure [cover_class]
252 < curr_reg_pressure [cover_class])
253 curr_bb_node->reg_pressure [cover_class]
254 = curr_reg_pressure [cover_class];
256 regno++;
261 /* Like mark_reg_store except notice just CLOBBERs; ignore SETs. */
262 static void
263 mark_reg_clobber (rtx reg, const_rtx setter, void *data)
265 if (GET_CODE (setter) == CLOBBER)
266 mark_reg_store (reg, setter, data);
269 /* Record that REG (or the corresponding allocno) has conflicts with
270 all the allocno currently live. Do not mark REG (or the allocno)
271 itself as live. */
272 static void
273 mark_reg_conflicts (rtx reg)
275 int regno;
277 if (GET_CODE (reg) == SUBREG)
278 reg = SUBREG_REG (reg);
280 if (! REG_P (reg))
281 return;
283 regno = REGNO (reg);
285 if (regno >= FIRST_PSEUDO_REGISTER)
286 make_regno_born_and_died (regno);
287 else if (! TEST_HARD_REG_BIT (no_alloc_regs, regno))
289 int last = regno + hard_regno_nregs [regno] [GET_MODE (reg)];
291 while (regno < last)
293 make_regno_born_and_died (regno);
294 regno++;
299 /* Mark REG (or the corresponding allocno) as being dead (following
300 the insn being scanned now). Store a 0 in hard_regs_live or
301 allocnos_live for this register. */
302 static void
303 mark_reg_death (rtx reg)
305 int regno = REGNO (reg);
307 if (regno >= FIRST_PSEUDO_REGISTER)
309 allocno_t a = ira_curr_regno_allocno_map [regno];
311 if (a != NULL)
313 if (! bitmap_bit_p (allocnos_live_bitmap, ALLOCNO_NUM (a)))
314 return;
315 clear_allocno_live (a);
317 make_regno_dead (regno);
319 else if (! TEST_HARD_REG_BIT (no_alloc_regs, regno))
321 int last = regno + hard_regno_nregs [regno] [GET_MODE (reg)];
322 enum reg_class cover_class;
324 while (regno < last)
326 if (TEST_HARD_REG_BIT (hard_regs_live, regno))
328 cover_class = class_translate [REGNO_REG_CLASS (regno)];
329 curr_reg_pressure [cover_class]--;
330 ira_assert (curr_reg_pressure [cover_class] >= 0);
331 make_regno_dead (regno);
333 regno++;
338 /* The function checks that CONSTRAINTS permits to use only one hard
339 register. If it is so, the function returns the class of the hard
340 register. Otherwise it returns NO_REGS.
342 EQUIV_COSNT ??? */
343 static enum reg_class
344 single_reg_class (const char *constraints, rtx op, rtx equiv_const)
346 int ignore_p;
347 enum reg_class cl, next_cl;
348 int c;
350 cl = NO_REGS;
351 for (ignore_p = FALSE;
352 (c = *constraints);
353 constraints += CONSTRAINT_LEN (c, constraints))
354 if (c == '#')
355 ignore_p = TRUE;
356 else if (c == ',')
357 ignore_p = FALSE;
358 else if (! ignore_p)
359 switch (c)
361 case ' ':
362 case '\t':
363 case '=':
364 case '+':
365 case '*':
366 case '&':
367 case '%':
368 case '!':
369 case '?':
370 break;
371 case 'i':
372 if (CONSTANT_P (op)
373 || (equiv_const != NULL_RTX && CONSTANT_P (equiv_const)))
374 return NO_REGS;
375 break;
377 case 'n':
378 if (GET_CODE (op) == CONST_INT
379 || (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
380 || (equiv_const != NULL_RTX
381 && (GET_CODE (equiv_const) == CONST_INT
382 || (GET_CODE (equiv_const) == CONST_DOUBLE
383 && GET_MODE (equiv_const) == VOIDmode))))
384 return NO_REGS;
385 break;
387 case 's':
388 if ((CONSTANT_P (op) && GET_CODE (op) != CONST_INT
389 && (GET_CODE (op) != CONST_DOUBLE || GET_MODE (op) != VOIDmode))
390 || (equiv_const != NULL_RTX
391 && CONSTANT_P (equiv_const)
392 && GET_CODE (equiv_const) != CONST_INT
393 && (GET_CODE (equiv_const) != CONST_DOUBLE
394 || GET_MODE (equiv_const) != VOIDmode)))
395 return NO_REGS;
396 break;
398 case 'I':
399 case 'J':
400 case 'K':
401 case 'L':
402 case 'M':
403 case 'N':
404 case 'O':
405 case 'P':
406 if ((GET_CODE (op) == CONST_INT
407 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, constraints))
408 || (equiv_const != NULL_RTX
409 && GET_CODE (equiv_const) == CONST_INT
410 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const),
411 c, constraints)))
412 return NO_REGS;
413 break;
415 case 'E':
416 case 'F':
417 if (GET_CODE (op) == CONST_DOUBLE
418 || (GET_CODE (op) == CONST_VECTOR
419 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)
420 || (equiv_const != NULL_RTX
421 && (GET_CODE (equiv_const) == CONST_DOUBLE
422 || (GET_CODE (equiv_const) == CONST_VECTOR
423 && (GET_MODE_CLASS (GET_MODE (equiv_const))
424 == MODE_VECTOR_FLOAT)))))
425 return NO_REGS;
426 break;
428 case 'G':
429 case 'H':
430 if ((GET_CODE (op) == CONST_DOUBLE
431 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, constraints))
432 || (equiv_const != NULL_RTX
433 && GET_CODE (equiv_const) == CONST_DOUBLE
434 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (equiv_const,
435 c, constraints)))
436 return NO_REGS;
437 /* ??? what about memory */
438 case 'r':
439 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
440 case 'h': case 'j': case 'k': case 'l':
441 case 'q': case 't': case 'u':
442 case 'v': case 'w': case 'x': case 'y': case 'z':
443 case 'A': case 'B': case 'C': case 'D':
444 case 'Q': case 'R': case 'S': case 'T': case 'U':
445 case 'W': case 'Y': case 'Z':
446 next_cl = (c == 'r'
447 ? GENERAL_REGS
448 : REG_CLASS_FROM_CONSTRAINT (c, constraints));
449 if ((cl != NO_REGS && next_cl != cl)
450 || available_class_regs [next_cl] > 1)
451 return NO_REGS;
452 cl = next_cl;
453 break;
455 case '0': case '1': case '2': case '3': case '4':
456 case '5': case '6': case '7': case '8': case '9':
457 next_cl
458 = single_reg_class (recog_data.constraints [c - '0'],
459 recog_data.operand [c - '0'], NULL_RTX);
460 if ((cl != NO_REGS && next_cl != cl) || next_cl == NO_REGS
461 || available_class_regs [next_cl] > 1)
462 return NO_REGS;
463 cl = next_cl;
464 break;
466 default:
467 return NO_REGS;
469 return cl;
472 /* The function checks that operand OP_NUM of the current insn can use
473 only one hard register. If it is so, the function returns the
474 class of the hard register. Otherwise it returns NO_REGS. */
475 static enum reg_class
476 single_reg_operand_class (int op_num)
478 if (op_num < 0 || recog_data.n_alternatives == 0)
479 return NO_REGS;
480 return single_reg_class (recog_data.constraints [op_num],
481 recog_data.operand [op_num], NULL_RTX);
484 /* The function processes input (if IN_P) or output operands of insn
485 with FREQ to find allocno which can use only one hard register and
486 makes other currently living reg allocnos conflicting with the hard
487 register. */
488 static void
489 process_single_reg_class_operands (int in_p, int freq)
491 int i, regno, px, cost;
492 enum reg_class cl, cover_class;
493 rtx operand;
494 allocno_t operand_a, a;
496 for (i = 0; i < recog_data.n_operands; i++)
498 operand = recog_data.operand [i];
499 if (in_p && recog_data.operand_type [i] != OP_IN
500 && recog_data.operand_type [i] != OP_INOUT)
501 continue;
502 if (! in_p && recog_data.operand_type [i] != OP_OUT
503 && recog_data.operand_type [i] != OP_INOUT)
504 continue;
505 cl = single_reg_operand_class (i);
506 if (cl == NO_REGS)
507 continue;
509 operand_a = NULL;
511 if (GET_CODE (operand) == SUBREG)
512 operand = SUBREG_REG (operand);
514 if (REG_P (operand)
515 && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER)
517 enum machine_mode mode;
518 enum reg_class cover_class;
520 operand_a = ira_curr_regno_allocno_map [regno];
521 mode = ALLOCNO_MODE (operand_a);
522 cover_class = ALLOCNO_MODE (operand_a);
523 if (class_subset_p [cl] [cover_class]
524 && class_hard_regs_num [cl] != 0
525 && class_hard_reg_index [cover_class] [class_hard_regs
526 [cl] [0]] >= 0
527 && (reg_class_size [cl]
528 <= (unsigned) CLASS_MAX_NREGS (cl, mode)))
530 /* ??? FREQ */
531 cost = freq * (in_p
532 ? register_move_cost [mode] [cover_class] [cl]
533 : register_move_cost [mode] [cl] [cover_class]);
534 allocate_and_set_costs
535 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a),
536 class_hard_regs_num [cover_class], 0);
537 ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a)
538 [class_hard_reg_index [cover_class] [class_hard_regs [cl] [0]]]
539 -= cost;
543 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, px,
545 a = allocnos [px];
546 cover_class = ALLOCNO_COVER_CLASS (a);
547 if (a != operand_a)
549 /* We could increase costs of A instead of making it
550 conflicting with the hard register. But it works worse
551 because it will be spilled in reload in anyway. */
552 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
553 reg_class_contents [cl]);
554 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
555 reg_class_contents [cl]);
561 /* The function processes insns of the basic block given by its
562 LOOP_TREE_NODE to update allocno conflict table. */
563 static void
564 process_bb_node_lives (loop_tree_node_t loop_tree_node)
566 int i, index;
567 unsigned int j;
568 basic_block bb;
569 rtx insn;
570 edge e;
571 edge_iterator ei;
572 bitmap_iterator bi;
573 bitmap reg_live_in;
574 int px = 0;
576 bb = loop_tree_node->bb;
577 if (bb != NULL)
579 for (i = 0; i < reg_class_cover_size; i++)
580 curr_reg_pressure [reg_class_cover [i]] = 0;
581 curr_bb_node = loop_tree_node;
582 reg_live_in = DF_LR_IN (bb);
583 memset (allocnos_live, 0, allocno_set_words * sizeof (INT_TYPE));
584 REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_in);
585 AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset);
586 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
587 if (TEST_HARD_REG_BIT (hard_regs_live, i))
589 enum reg_class cover_class;
591 cover_class = class_translate [REGNO_REG_CLASS (i)];
592 curr_reg_pressure [cover_class]++;
593 if (curr_bb_node->reg_pressure [cover_class]
594 < curr_reg_pressure [cover_class])
595 curr_bb_node->reg_pressure [cover_class]
596 = curr_reg_pressure [cover_class];
598 bitmap_clear (allocnos_live_bitmap);
599 EXECUTE_IF_SET_IN_BITMAP (reg_live_in, FIRST_PSEUDO_REGISTER, j, bi)
601 allocno_t a = ira_curr_regno_allocno_map [j];
603 if (a == NULL)
604 continue;
605 ira_assert (! bitmap_bit_p (allocnos_live_bitmap, ALLOCNO_NUM (a)));
606 set_allocno_live (a);
607 make_regno_born (j);
610 #ifdef EH_RETURN_DATA_REGNO
611 if (bb_has_eh_pred (bb))
613 for (j = 0; ; ++j)
615 unsigned int regno = EH_RETURN_DATA_REGNO (j);
617 if (regno == INVALID_REGNUM)
618 break;
619 make_regno_born_and_died (regno);
622 #endif
624 /* Reg allocnos can't go in stack regs at the start of a basic block
625 that is reached by an abnormal edge. Likewise for call
626 clobbered regs, because caller-save, fixup_abnormal_edges and
627 possibly the table driven EH machinery are not quite ready to
628 handle such reg allocnos live across such edges. */
629 FOR_EACH_EDGE (e, ei, bb->preds)
630 if (e->flags & EDGE_ABNORMAL)
631 break;
633 if (e != NULL)
635 #ifdef STACK_REGS
636 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, px,
638 ALLOCNO_NO_STACK_REG_P (allocnos [px]) = TRUE;
639 ALLOCNO_TOTAL_NO_STACK_REG_P (allocnos [px]) = TRUE;
641 for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++)
642 make_regno_born_and_died (px);
643 #endif
644 /* No need to record conflicts for call clobbered regs if we
645 have nonlocal labels around, as we don't ever try to
646 allocate such regs in this case. */
647 if (! current_function_has_nonlocal_label)
648 for (px = 0; px < FIRST_PSEUDO_REGISTER; px++)
649 if (call_used_regs [px])
650 make_regno_born_and_died (px);
653 /* Scan the code of this basic block, noting which allocnos and
654 hard regs are born or die. When one is born, record a
655 conflict with all others currently live. */
656 FOR_BB_INSNS (bb, insn)
658 rtx link;
659 int freq;
661 if (! INSN_P (insn))
662 continue;
664 freq = REG_FREQ_FROM_BB (BLOCK_FOR_INSN (insn));
665 if (freq == 0)
666 freq = 1;
668 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
669 fprintf (ira_dump_file, " Insn %u(l%d): point = %d\n",
670 INSN_UID (insn), loop_tree_node->father->loop->num,
671 curr_point);
673 /* Check regs_set is an empty set. */
674 gcc_assert (VEC_empty (rtx, regs_set));
676 /* Mark any allocnos clobbered by INSN as live, so they
677 conflict with the inputs. */
678 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
680 extract_insn (insn);
681 process_single_reg_class_operands (TRUE, freq);
683 /* Mark any allocnos dead after INSN as dead now. */
684 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
685 if (REG_NOTE_KIND (link) == REG_DEAD)
686 mark_reg_death (XEXP (link, 0));
688 curr_point++;
689 if (CALL_P (insn))
691 HARD_REG_SET clobbered_regs;
693 get_call_invalidated_used_regs (insn, &clobbered_regs, FALSE);
694 IOR_HARD_REG_SET (cfun->emit->call_used_regs, clobbered_regs);
695 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, i,
697 allocno_t a = allocnos [i];
699 ALLOCNO_CALL_FREQ (a) += freq;
700 index = add_regno_call (ALLOCNO_REGNO (a), insn);
701 if (ALLOCNO_CALLS_CROSSED_START (a) < 0)
702 ALLOCNO_CALLS_CROSSED_START (a) = index;
703 ALLOCNO_CALLS_CROSSED_NUM (a)++;
704 /* Don't allocate allocnos that cross calls, if this
705 function receives a nonlocal goto. */
706 if (current_function_has_nonlocal_label)
708 SET_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a));
709 SET_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
714 /* Mark any allocnos set in INSN as live, and mark them as
715 conflicting with all other live reg allocnos. Clobbers are
716 processed again, so they conflict with the reg allocnos that
717 are set. */
718 note_stores (PATTERN (insn), mark_reg_store, NULL);
720 #ifdef AUTO_INC_DEC
721 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
722 if (REG_NOTE_KIND (link) == REG_INC)
723 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
724 #endif
726 /* If INSN has multiple outputs, then any allocno that dies
727 here and is used inside of an output must conflict with
728 the other outputs.
730 It is unsafe to use !single_set here since it will ignore
731 an unused output. Just because an output is unused does
732 not mean the compiler can assume the side effect will not
733 occur. Consider if ALLOCNO appears in the address of an
734 output and we reload the output. If we allocate ALLOCNO
735 to the same hard register as an unused output we could
736 set the hard register before the output reload insn. */
737 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
738 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
739 if (REG_NOTE_KIND (link) == REG_DEAD)
741 int i;
742 int used_in_output = 0;
743 rtx reg = XEXP (link, 0);
745 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
747 rtx set = XVECEXP (PATTERN (insn), 0, i);
749 if (GET_CODE (set) == SET
750 && ! REG_P (SET_DEST (set))
751 && ! rtx_equal_p (reg, SET_DEST (set))
752 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
753 used_in_output = 1;
755 if (used_in_output)
756 mark_reg_conflicts (reg);
759 process_single_reg_class_operands (FALSE, freq);
761 /* Mark any allocnos set in INSN and then never used. */
762 while (! VEC_empty (rtx, regs_set))
764 rtx reg = VEC_pop (rtx, regs_set);
765 rtx note = find_regno_note (insn, REG_UNUSED, REGNO (reg));
767 if (note)
768 mark_reg_death (XEXP (note, 0));
770 curr_point++;
772 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, i,
774 make_regno_dead (ALLOCNO_REGNO (allocnos [i]));
776 curr_point++;
778 /* Propagate register pressure: */
779 if (loop_tree_node != ira_loop_tree_root)
780 for (i = 0; i < reg_class_cover_size; i++)
782 enum reg_class cover_class;
784 cover_class = reg_class_cover [i];
785 if (loop_tree_node->reg_pressure [cover_class]
786 > loop_tree_node->father->reg_pressure [cover_class])
787 loop_tree_node->father->reg_pressure [cover_class]
788 = loop_tree_node->reg_pressure [cover_class];
792 static void
793 create_start_finish_chains (void)
795 int i;
796 allocno_t a;
797 allocno_live_range_t r;
799 start_point_ranges
800 = ira_allocate (max_point * sizeof (allocno_live_range_t));
801 memset (start_point_ranges, 0, max_point * sizeof (allocno_live_range_t));
802 finish_point_ranges
803 = ira_allocate (max_point * sizeof (allocno_live_range_t));
804 memset (finish_point_ranges, 0, max_point * sizeof (allocno_live_range_t));
805 for (i = 0; i < allocnos_num; i++)
807 a = allocnos [i];
808 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
810 r->start_next = start_point_ranges [r->start];
811 start_point_ranges [r->start] = r;
812 r->finish_next = finish_point_ranges [r->finish];
813 finish_point_ranges [r->finish] = r;
818 void
819 rebuild_start_finish_chains (void)
821 ira_free (finish_point_ranges);
822 ira_free (start_point_ranges);
823 create_start_finish_chains ();
826 void
827 print_live_range_list (FILE *f, allocno_live_range_t r)
829 for (; r != NULL; r = r->next)
830 fprintf (f, " [%d..%d]", r->start, r->finish);
831 fprintf (f, "\n");
834 void
835 debug_live_range_list (allocno_live_range_t r)
837 print_live_range_list (stderr, r);
840 static void
841 print_allocno_live_ranges (FILE *f, allocno_t a)
843 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
844 print_live_range_list (f, ALLOCNO_LIVE_RANGES (a));
847 void
848 debug_allocno_live_ranges (allocno_t a)
850 print_allocno_live_ranges (stderr, a);
853 static void
854 print_live_ranges (FILE *f)
856 int i;
858 for (i = 0; i < allocnos_num; i++)
859 print_allocno_live_ranges (f, allocnos [i]);
862 void
863 debug_live_ranges (void)
865 print_live_ranges (stderr);
868 /* The function creates live ranges, set up CONFLICT_HARD_REGS and
869 TOTAL_CONFLICT_HARD_REGS for allocnos. It also modifies hard reg
870 costs for allocnos whose hard register is actually fixed in an
871 insn. */
872 void
873 create_allocno_live_ranges (void)
875 allocno_set_words = (allocnos_num + INT_BITS - 1) / INT_BITS;
876 allocnos_live = ira_allocate (sizeof (INT_TYPE) * allocno_set_words);
877 allocnos_live_bitmap = ira_allocate_bitmap ();
878 /* Make a vector that mark_reg_{store,clobber} will store in. */
879 if (!regs_set)
880 regs_set = VEC_alloc (rtx, heap, 10);
881 curr_point = 0;
882 traverse_loop_tree (FALSE, ira_loop_tree_root, NULL, process_bb_node_lives);
883 max_point = curr_point;
884 create_start_finish_chains ();
885 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
886 print_live_ranges (ira_dump_file);
887 /* Clean up. */
888 ira_free_bitmap (allocnos_live_bitmap);
889 ira_free (allocnos_live);
892 void
893 finish_allocno_live_ranges (void)
895 ira_free (finish_point_ranges);
896 ira_free (start_point_ranges);