2007-12-17 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / ira-lives.c
blob7199af5a6fa7ad7737bb32a64fe66a909ed1d925
1 /* IRA processing allocno lives.
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "regs.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "target.h"
31 #include "flags.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "toplev.h"
37 #include "params.h"
38 #include "df.h"
39 #include "ira-int.h"
41 /* The file contains code is analogous to one in global but the code
42 works on the allocno basis. */
44 static void set_allocno_live (allocno_t);
45 static void clear_allocno_live (allocno_t);
46 static void mark_reg_store (rtx, const_rtx, void *);
47 static void mark_reg_clobber (rtx, const_rtx, void *);
48 static void mark_reg_conflicts (rtx);
49 static void mark_reg_death (rtx);
50 static enum reg_class single_reg_class (const char *, rtx op, rtx);
51 static enum reg_class single_reg_operand_class (int);
52 static void process_single_reg_class_operands (int);
53 static void process_bb_node_lives (loop_tree_node_t);
55 /* Program points are enumerated by number from range
56 0..MAX_POINT-1. */
57 int max_point;
59 /* Arrays of size MAX_POINT mapping a program point to the allocno
60 live ranges with given start/finish point. */
61 allocno_live_range_t *start_point_ranges, *finish_point_ranges;
63 /* Number of the current program point. */
64 static int curr_point;
66 /* Number of ints required to hold allocnos_num bits. */
67 int allocno_set_words;
69 /* Set, clear or test bit number I in `allocnos_live',
70 a bit vector indexed by allocno number. */
71 #define SET_ALLOCNO_LIVE(I) SET_ALLOCNO_SET_BIT (allocnos_live, I)
72 #define CLEAR_ALLOCNO_LIVE(I) CLEAR_ALLOCNO_SET_BIT (allocnos_live, I)
73 #define TEST_ALLOCNO_LIVE(I) TEST_ALLOCNO_SET_BIT (allocnos_live, I)
75 /* Bit mask for allocnos live at current point in the scan. */
76 static INT_TYPE *allocnos_live;
78 /* The same as previous but as bitmap. */
79 static bitmap allocnos_live_bitmap;
81 /* Set of hard regs (except eliminable ones) currently live (during
82 scan of all insns). */
83 static HARD_REG_SET hard_regs_live;
85 /* Loop tree node corresponding to the current basic block. */
86 static loop_tree_node_t curr_bb_node;
88 /* The function processing birth of register REGNO. It updates living
89 hard regs and conflict hard regs for living allocnos or starts a
90 new live range for allocno corresponding to REGNO. */
91 static void
92 make_regno_born (int regno)
94 int i;
95 allocno_t a;
96 allocno_live_range_t p;
98 if (regno < FIRST_PSEUDO_REGISTER)
100 SET_HARD_REG_BIT (hard_regs_live, regno);
101 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, i,
103 SET_HARD_REG_BIT (ALLOCNO_CONFLICT_HARD_REGS (allocnos [i]), regno);
104 SET_HARD_REG_BIT (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (allocnos [i]),
105 regno);
107 return;
109 a = ira_curr_regno_allocno_map [regno];
110 if (a == NULL)
111 return;
112 if ((p = ALLOCNO_LIVE_RANGES (a)) == NULL
113 || (p->finish != curr_point && p->finish + 1 != curr_point))
114 ALLOCNO_LIVE_RANGES (a)
115 = create_allocno_live_range (a, curr_point, -1, ALLOCNO_LIVE_RANGES (a));
118 /* The function processing death of register REGNO. It updates live
119 hard regs or finish the current live range for allocno
120 corresponding to REGNO. */
121 static void
122 make_regno_dead (int regno)
124 allocno_t a;
125 allocno_live_range_t p;
127 if (regno < FIRST_PSEUDO_REGISTER)
129 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
130 return;
132 a = ira_curr_regno_allocno_map [regno];
133 if (a == NULL)
134 return;
135 p = ALLOCNO_LIVE_RANGES (a);
136 ira_assert (p != NULL);
137 p->finish = curr_point;
140 /* The function processing birth and, right after then, death of
141 register REGNO. */
142 static void
143 make_regno_born_and_died (int regno)
145 make_regno_born (regno);
146 make_regno_dead (regno);
149 /* The current pressure for the current basic block. */
150 static int curr_reg_pressure [N_REG_CLASSES];
152 /* The function marks allocno A as currently living. */
153 static void
154 set_allocno_live (allocno_t a)
156 enum reg_class cover_class;
158 if (TEST_ALLOCNO_LIVE (ALLOCNO_NUM (a)))
159 return;
160 SET_ALLOCNO_LIVE (ALLOCNO_NUM (a));
161 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), hard_regs_live);
162 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), hard_regs_live);
163 bitmap_set_bit (allocnos_live_bitmap, ALLOCNO_NUM (a));
164 cover_class = ALLOCNO_COVER_CLASS (a);
165 curr_reg_pressure [cover_class]
166 += reg_class_nregs [cover_class] [ALLOCNO_MODE (a)];
167 if (curr_bb_node->reg_pressure [cover_class]
168 < curr_reg_pressure [cover_class])
169 curr_bb_node->reg_pressure [cover_class] = curr_reg_pressure [cover_class];
172 /* The function marks allocno A as currently not living. */
173 static void
174 clear_allocno_live (allocno_t a)
176 enum reg_class cover_class;
178 if (bitmap_bit_p (allocnos_live_bitmap, ALLOCNO_NUM (a)))
180 cover_class = ALLOCNO_COVER_CLASS (a);
181 curr_reg_pressure [cover_class]
182 -= reg_class_nregs [cover_class] [ALLOCNO_MODE (a)];
183 ira_assert (curr_reg_pressure [cover_class] >= 0);
185 CLEAR_ALLOCNO_LIVE (ALLOCNO_NUM (a));
186 bitmap_clear_bit (allocnos_live_bitmap, ALLOCNO_NUM (a));
189 /* Record all regs that are set in any one insn. Communication from
190 mark_reg_{store,clobber}. */
191 static VEC(rtx, heap) *regs_set;
193 /* Handle the case where REG is set by the insn being scanned, during
194 the scan to accumulate conflicts. Store a 1 in hard_regs_live or
195 allocnos_live for this register or the
196 corresponding allocno, record how many consecutive hardware
197 registers it actually needs, and record a conflict with all other
198 reg allocnos already live.
200 Note that even if REG does not remain alive after this insn, we
201 must mark it here as live, to ensure a conflict between REG and any
202 other reg allocnos set in this insn that really do live. This is
203 because those other allocnos could be considered after this.
205 REG might actually be something other than a register; if so, we do
206 nothing.
208 SETTER is 0 if this register was modified by an auto-increment
209 (i.e., a REG_INC note was found for it). */
210 static void
211 mark_reg_store (rtx reg, const_rtx setter ATTRIBUTE_UNUSED,
212 void *data ATTRIBUTE_UNUSED)
214 int regno;
216 if (GET_CODE (reg) == SUBREG)
217 reg = SUBREG_REG (reg);
219 if (! REG_P (reg))
220 return;
222 VEC_safe_push (rtx, heap, regs_set, reg);
224 regno = REGNO (reg);
226 if (regno >= FIRST_PSEUDO_REGISTER)
228 allocno_t a = ira_curr_regno_allocno_map [regno];
230 ira_assert (a != NULL || REG_N_REFS (regno) == 0);
231 if (a != NULL)
233 if (bitmap_bit_p (allocnos_live_bitmap, ALLOCNO_NUM (a)))
234 return;
235 set_allocno_live (a);
237 make_regno_born (regno);
239 else if (! TEST_HARD_REG_BIT (no_alloc_regs, regno))
241 int last = regno + hard_regno_nregs [regno] [GET_MODE (reg)];
242 enum reg_class cover_class;
244 while (regno < last)
246 if (! TEST_HARD_REG_BIT (hard_regs_live, regno)
247 && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
249 cover_class = class_translate [REGNO_REG_CLASS (regno)];
250 curr_reg_pressure [cover_class]++;
251 make_regno_born (regno);
252 if (curr_bb_node->reg_pressure [cover_class]
253 < curr_reg_pressure [cover_class])
254 curr_bb_node->reg_pressure [cover_class]
255 = curr_reg_pressure [cover_class];
257 regno++;
262 /* Like mark_reg_store except notice just CLOBBERs; ignore SETs. */
263 static void
264 mark_reg_clobber (rtx reg, const_rtx setter, void *data)
266 if (GET_CODE (setter) == CLOBBER)
267 mark_reg_store (reg, setter, data);
270 /* Record that REG (or the corresponding allocno) has conflicts with
271 all the allocno currently live. Do not mark REG (or the allocno)
272 itself as live. */
273 static void
274 mark_reg_conflicts (rtx reg)
276 int regno;
278 if (GET_CODE (reg) == SUBREG)
279 reg = SUBREG_REG (reg);
281 if (! REG_P (reg))
282 return;
284 regno = REGNO (reg);
286 if (regno >= FIRST_PSEUDO_REGISTER)
287 make_regno_born_and_died (regno);
288 else if (! TEST_HARD_REG_BIT (no_alloc_regs, regno))
290 int last = regno + hard_regno_nregs [regno] [GET_MODE (reg)];
292 while (regno < last)
294 make_regno_born_and_died (regno);
295 regno++;
300 /* Mark REG (or the corresponding allocno) as being dead (following
301 the insn being scanned now). Store a 0 in hard_regs_live or
302 allocnos_live for this register. */
303 static void
304 mark_reg_death (rtx reg)
306 int regno = REGNO (reg);
308 if (regno >= FIRST_PSEUDO_REGISTER)
310 allocno_t a = ira_curr_regno_allocno_map [regno];
312 ira_assert (a != NULL || REG_N_REFS (regno) == 0);
313 if (a != NULL)
315 if (! bitmap_bit_p (allocnos_live_bitmap, ALLOCNO_NUM (a)))
316 return;
317 clear_allocno_live (a);
319 make_regno_dead (regno);
321 else if (! TEST_HARD_REG_BIT (no_alloc_regs, regno))
323 int last = regno + hard_regno_nregs [regno] [GET_MODE (reg)];
324 enum reg_class cover_class;
326 while (regno < last)
328 if (TEST_HARD_REG_BIT (hard_regs_live, regno))
330 cover_class = class_translate [REGNO_REG_CLASS (regno)];
331 curr_reg_pressure [cover_class]--;
332 ira_assert (curr_reg_pressure [cover_class] >= 0);
333 make_regno_dead (regno);
335 regno++;
340 /* The function checks that CONSTRAINTS permits to use only one hard
341 register. If it is so, the function returns the class of the hard
342 register. Otherwise it returns NO_REGS.
344 EQUIV_COSNT ??? */
345 static enum reg_class
346 single_reg_class (const char *constraints, rtx op, rtx equiv_const)
348 int ignore_p;
349 enum reg_class cl, next_cl;
350 int c;
352 cl = NO_REGS;
353 for (ignore_p = FALSE;
354 (c = *constraints);
355 constraints += CONSTRAINT_LEN (c, constraints))
356 if (c == '#')
357 ignore_p = TRUE;
358 else if (c == ',')
359 ignore_p = FALSE;
360 else if (! ignore_p)
361 switch (c)
363 case ' ':
364 case '\t':
365 case '=':
366 case '+':
367 case '*':
368 case '&':
369 case '%':
370 case '!':
371 case '?':
372 break;
373 case 'i':
374 if (CONSTANT_P (op)
375 || (equiv_const != NULL_RTX && CONSTANT_P (equiv_const)))
376 return NO_REGS;
377 break;
379 case 'n':
380 if (GET_CODE (op) == CONST_INT
381 || (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
382 || (equiv_const != NULL_RTX
383 && (GET_CODE (equiv_const) == CONST_INT
384 || (GET_CODE (equiv_const) == CONST_DOUBLE
385 && GET_MODE (equiv_const) == VOIDmode))))
386 return NO_REGS;
387 break;
389 case 's':
390 if ((CONSTANT_P (op) && GET_CODE (op) != CONST_INT
391 && (GET_CODE (op) != CONST_DOUBLE || GET_MODE (op) != VOIDmode))
392 || (equiv_const != NULL_RTX
393 && CONSTANT_P (equiv_const)
394 && GET_CODE (equiv_const) != CONST_INT
395 && (GET_CODE (equiv_const) != CONST_DOUBLE
396 || GET_MODE (equiv_const) != VOIDmode)))
397 return NO_REGS;
398 break;
400 case 'I':
401 case 'J':
402 case 'K':
403 case 'L':
404 case 'M':
405 case 'N':
406 case 'O':
407 case 'P':
408 if ((GET_CODE (op) == CONST_INT
409 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, constraints))
410 || (equiv_const != NULL_RTX
411 && GET_CODE (equiv_const) == CONST_INT
412 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const),
413 c, constraints)))
414 return NO_REGS;
415 break;
417 case 'E':
418 case 'F':
419 if (GET_CODE (op) == CONST_DOUBLE
420 || (GET_CODE (op) == CONST_VECTOR
421 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)
422 || (equiv_const != NULL_RTX
423 && (GET_CODE (equiv_const) == CONST_DOUBLE
424 || (GET_CODE (equiv_const) == CONST_VECTOR
425 && (GET_MODE_CLASS (GET_MODE (equiv_const))
426 == MODE_VECTOR_FLOAT)))))
427 return NO_REGS;
428 break;
430 case 'G':
431 case 'H':
432 if ((GET_CODE (op) == CONST_DOUBLE
433 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, constraints))
434 || (equiv_const != NULL_RTX
435 && GET_CODE (equiv_const) == CONST_DOUBLE
436 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (equiv_const,
437 c, constraints)))
438 return NO_REGS;
439 /* ??? what about memory */
440 case 'r':
441 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
442 case 'h': case 'j': case 'k': case 'l':
443 case 'q': case 't': case 'u':
444 case 'v': case 'w': case 'x': case 'y': case 'z':
445 case 'A': case 'B': case 'C': case 'D':
446 case 'Q': case 'R': case 'S': case 'T': case 'U':
447 case 'W': case 'Y': case 'Z':
448 next_cl = (c == 'r'
449 ? GENERAL_REGS
450 : REG_CLASS_FROM_CONSTRAINT (c, constraints));
451 if ((cl != NO_REGS && next_cl != cl)
452 || available_class_regs [next_cl] > 1)
453 return NO_REGS;
454 cl = next_cl;
455 break;
457 case '0': case '1': case '2': case '3': case '4':
458 case '5': case '6': case '7': case '8': case '9':
459 next_cl
460 = single_reg_class (recog_data.constraints [c - '0'],
461 recog_data.operand [c - '0'], NULL_RTX);
462 if ((cl != NO_REGS && next_cl != cl) || next_cl == NO_REGS
463 || available_class_regs [next_cl] > 1)
464 return NO_REGS;
465 cl = next_cl;
466 break;
468 default:
469 return NO_REGS;
471 return cl;
474 /* The function checks that operand OP_NUM of the current insn can use
475 only one hard register. If it is so, the function returns the
476 class of the hard register. Otherwise it returns NO_REGS. */
477 static enum reg_class
478 single_reg_operand_class (int op_num)
480 if (op_num < 0 || recog_data.n_alternatives == 0)
481 return NO_REGS;
482 return single_reg_class (recog_data.constraints [op_num],
483 recog_data.operand [op_num], NULL_RTX);
486 /* The function processes input (if IN_P) or output operands to find
487 allocno which can use only one hard register and makes other
488 currently living reg allocnos conflicting with the hard register. */
489 static void
490 process_single_reg_class_operands (int in_p)
492 int i, regno, px, cost;
493 enum reg_class cl, cover_class;
494 rtx operand;
495 allocno_t operand_a, a;
497 for (i = 0; i < recog_data.n_operands; i++)
499 operand = recog_data.operand [i];
500 if (in_p && recog_data.operand_type [i] != OP_IN
501 && recog_data.operand_type [i] != OP_INOUT)
502 continue;
503 if (! in_p && recog_data.operand_type [i] != OP_OUT
504 && recog_data.operand_type [i] != OP_INOUT)
505 continue;
506 cl = single_reg_operand_class (i);
507 if (cl == NO_REGS)
508 continue;
510 operand_a = NULL;
512 if (GET_CODE (operand) == SUBREG)
513 operand = SUBREG_REG (operand);
515 if (REG_P (operand)
516 && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER)
518 enum machine_mode mode;
519 enum reg_class cover_class;
521 operand_a = ira_curr_regno_allocno_map [regno];
522 mode = ALLOCNO_MODE (operand_a);
523 cover_class = ALLOCNO_MODE (operand_a);
524 if (class_subset_p [cl] [cover_class]
525 && (reg_class_size [cl]
526 <= (unsigned) CLASS_MAX_NREGS (cl, mode)))
528 cost = (in_p
529 ? register_move_cost [mode] [cover_class] [cl]
530 : register_move_cost [mode] [cl] [cover_class]);
531 ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a)
532 [class_hard_reg_index [cover_class] [class_hard_regs [cl] [0]]]
533 -= cost;
537 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, px,
539 a = allocnos [px];
540 cover_class = ALLOCNO_COVER_CLASS (a);
541 if (a != operand_a)
543 /* We could increase costs of A instead of making it
544 conflicting with the hard register. But it works worse
545 because it will be spilled in reload in anyway. */
546 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
547 reg_class_contents [cl]);
548 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
549 reg_class_contents [cl]);
555 /* The function processes insns of the basic block given by its
556 LOOP_TREE_NODE to update allocno conflict table. */
557 static void
558 process_bb_node_lives (loop_tree_node_t loop_tree_node)
560 int i, index;
561 unsigned int j;
562 basic_block bb;
563 rtx insn;
564 edge e;
565 edge_iterator ei;
566 bitmap_iterator bi;
567 bitmap reg_live_in;
568 int px = 0;
570 bb = loop_tree_node->bb;
571 if (bb != NULL)
573 for (i = 0; i < reg_class_cover_size; i++)
574 curr_reg_pressure [reg_class_cover [i]] = 0;
575 curr_bb_node = loop_tree_node;
576 reg_live_in = DF_LR_IN (bb);
577 memset (allocnos_live, 0, allocno_set_words * sizeof (INT_TYPE));
578 REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_in);
579 AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset);
580 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
581 if (TEST_HARD_REG_BIT (hard_regs_live, i))
583 enum reg_class cover_class;
585 cover_class = class_translate [REGNO_REG_CLASS (i)];
586 curr_reg_pressure [cover_class]++;
587 if (curr_bb_node->reg_pressure [cover_class]
588 < curr_reg_pressure [cover_class])
589 curr_bb_node->reg_pressure [cover_class]
590 = curr_reg_pressure [cover_class];
592 bitmap_clear (allocnos_live_bitmap);
593 EXECUTE_IF_SET_IN_BITMAP (reg_live_in, FIRST_PSEUDO_REGISTER, j, bi)
595 allocno_t a = ira_curr_regno_allocno_map [j];
597 ira_assert (a != NULL || REG_N_REFS (j) == 0);
598 if (a == NULL)
599 continue;
600 ira_assert (! bitmap_bit_p (allocnos_live_bitmap, ALLOCNO_NUM (a)));
601 set_allocno_live (a);
602 make_regno_born (j);
605 #ifdef EH_RETURN_DATA_REGNO
606 if (bb_has_eh_pred (bb))
608 for (j = 0; ; ++j)
610 unsigned int regno = EH_RETURN_DATA_REGNO (j);
612 if (regno == INVALID_REGNUM)
613 break;
614 make_regno_born_and_died (regno);
617 #endif
619 /* Reg allocnos can't go in stack regs at the start of a basic block
620 that is reached by an abnormal edge. Likewise for call
621 clobbered regs, because caller-save, fixup_abnormal_edges and
622 possibly the table driven EH machinery are not quite ready to
623 handle such reg allocnos live across such edges. */
624 FOR_EACH_EDGE (e, ei, bb->preds)
625 if (e->flags & EDGE_ABNORMAL)
626 break;
628 if (e != NULL)
630 #ifdef STACK_REGS
631 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, px,
633 ALLOCNO_NO_STACK_REG_P (allocnos [px]) = TRUE;
634 ALLOCNO_TOTAL_NO_STACK_REG_P (allocnos [px]) = TRUE;
636 for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++)
637 make_regno_born_and_died (px);
638 #endif
639 /* No need to record conflicts for call clobbered regs if we
640 have nonlocal labels around, as we don't ever try to
641 allocate such regs in this case. */
642 if (! current_function_has_nonlocal_label)
643 for (px = 0; px < FIRST_PSEUDO_REGISTER; px++)
644 if (call_used_regs [px])
645 make_regno_born_and_died (px);
648 /* Scan the code of this basic block, noting which allocnos and
649 hard regs are born or die. When one is born, record a
650 conflict with all others currently live. */
651 FOR_BB_INSNS (bb, insn)
653 rtx link;
655 if (! INSN_P (insn))
656 continue;
658 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
659 fprintf (ira_dump_file, " Insn %u(l%d): point = %d\n",
660 INSN_UID (insn), loop_tree_node->father->loop->num,
661 curr_point);
663 /* Check regs_set is an empty set. */
664 gcc_assert (VEC_empty (rtx, regs_set));
666 /* Mark any allocnos clobbered by INSN as live, so they
667 conflict with the inputs. */
668 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
670 extract_insn (insn);
671 process_single_reg_class_operands (TRUE);
673 /* Mark any allocnos dead after INSN as dead now. */
674 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
675 if (REG_NOTE_KIND (link) == REG_DEAD)
676 mark_reg_death (XEXP (link, 0));
678 curr_point++;
679 if (CALL_P (insn))
681 HARD_REG_SET clobbered_regs;
683 get_call_invalidated_used_regs (insn, &clobbered_regs, FALSE);
684 IOR_HARD_REG_SET (cfun->emit->call_used_regs, clobbered_regs);
685 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, i,
687 int freq;
688 allocno_t a = allocnos [i];
690 freq = REG_FREQ_FROM_BB (BLOCK_FOR_INSN (insn));
691 if (freq == 0)
692 freq = 1;
693 ALLOCNO_CALL_FREQ (a) += freq;
694 index = add_regno_call (ALLOCNO_REGNO (a), insn);
695 if (ALLOCNO_CALLS_CROSSED_START (a) < 0)
696 ALLOCNO_CALLS_CROSSED_START (a) = index;
697 ALLOCNO_CALLS_CROSSED_NUM (a)++;
698 /* Don't allocate allocnos that cross calls, if this
699 function receives a nonlocal goto. */
700 if (current_function_has_nonlocal_label)
702 SET_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a));
703 SET_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
708 /* Mark any allocnos set in INSN as live, and mark them as
709 conflicting with all other live reg allocnos. Clobbers are
710 processed again, so they conflict with the reg allocnos that
711 are set. */
712 note_stores (PATTERN (insn), mark_reg_store, NULL);
714 #ifdef AUTO_INC_DEC
715 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
716 if (REG_NOTE_KIND (link) == REG_INC)
717 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
718 #endif
720 /* If INSN has multiple outputs, then any allocno that dies
721 here and is used inside of an output must conflict with
722 the other outputs.
724 It is unsafe to use !single_set here since it will ignore
725 an unused output. Just because an output is unused does
726 not mean the compiler can assume the side effect will not
727 occur. Consider if ALLOCNO appears in the address of an
728 output and we reload the output. If we allocate ALLOCNO
729 to the same hard register as an unused output we could
730 set the hard register before the output reload insn. */
731 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
732 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
733 if (REG_NOTE_KIND (link) == REG_DEAD)
735 int i;
736 int used_in_output = 0;
737 rtx reg = XEXP (link, 0);
739 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
741 rtx set = XVECEXP (PATTERN (insn), 0, i);
743 if (GET_CODE (set) == SET
744 && ! REG_P (SET_DEST (set))
745 && ! rtx_equal_p (reg, SET_DEST (set))
746 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
747 used_in_output = 1;
749 if (used_in_output)
750 mark_reg_conflicts (reg);
753 process_single_reg_class_operands (FALSE);
755 /* Mark any allocnos set in INSN and then never used. */
756 while (! VEC_empty (rtx, regs_set))
758 rtx reg = VEC_pop (rtx, regs_set);
759 rtx note = find_regno_note (insn, REG_UNUSED, REGNO (reg));
761 if (note)
762 mark_reg_death (XEXP (note, 0));
764 curr_point++;
766 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, i,
768 make_regno_dead (ALLOCNO_REGNO (allocnos [i]));
770 curr_point++;
772 /* Propagate register pressure: */
773 if (loop_tree_node != ira_loop_tree_root)
774 for (i = 0; i < reg_class_cover_size; i++)
776 enum reg_class cover_class;
778 cover_class = reg_class_cover [i];
779 if (loop_tree_node->reg_pressure [cover_class]
780 > loop_tree_node->father->reg_pressure [cover_class])
781 loop_tree_node->father->reg_pressure [cover_class]
782 = loop_tree_node->reg_pressure [cover_class];
786 static void
787 create_start_finish_chains (void)
789 int i;
790 allocno_t a;
791 allocno_live_range_t r;
793 start_point_ranges
794 = ira_allocate (max_point * sizeof (allocno_live_range_t));
795 memset (start_point_ranges, 0, max_point * sizeof (allocno_live_range_t));
796 finish_point_ranges
797 = ira_allocate (max_point * sizeof (allocno_live_range_t));
798 memset (finish_point_ranges, 0, max_point * sizeof (allocno_live_range_t));
799 for (i = 0; i < allocnos_num; i++)
801 a = allocnos [i];
802 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
804 r->start_next = start_point_ranges [r->start];
805 start_point_ranges [r->start] = r;
806 r->finish_next = finish_point_ranges [r->finish];
807 finish_point_ranges [r->finish] = r;
812 void
813 rebuild_start_finish_chains (void)
815 ira_free (finish_point_ranges);
816 ira_free (start_point_ranges);
817 create_start_finish_chains ();
820 void
821 print_live_range_list (FILE *f, allocno_live_range_t r)
823 for (; r != NULL; r = r->next)
824 fprintf (f, " [%d..%d]", r->start, r->finish);
825 fprintf (f, "\n");
828 void
829 debug_live_range_list (allocno_live_range_t r)
831 print_live_range_list (stderr, r);
834 static void
835 print_allocno_live_ranges (FILE *f, allocno_t a)
837 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
838 print_live_range_list (f, ALLOCNO_LIVE_RANGES (a));
841 void
842 debug_allocno_live_ranges (allocno_t a)
844 print_allocno_live_ranges (stderr, a);
847 static void
848 print_live_ranges (FILE *f)
850 int i;
852 for (i = 0; i < allocnos_num; i++)
853 print_allocno_live_ranges (f, allocnos [i]);
856 void
857 debug_live_ranges (void)
859 print_live_ranges (stderr);
862 /* The function creates live ranges, set up CONFLICT_HARD_REGS and
863 TOTAL_CONFLICT_HARD_REGS for allocnos. It also modifies hard reg
864 costs for allocnos whose hard register is actually fixed in an
865 insn. */
866 void
867 create_allocno_live_ranges (void)
869 allocno_set_words = (allocnos_num + INT_BITS - 1) / INT_BITS;
870 allocnos_live = ira_allocate (sizeof (INT_TYPE) * allocno_set_words);
871 allocnos_live_bitmap = ira_allocate_bitmap ();
872 /* Make a vector that mark_reg_{store,clobber} will store in. */
873 if (!regs_set)
874 regs_set = VEC_alloc (rtx, heap, 10);
875 curr_point = 0;
876 traverse_loop_tree (FALSE, ira_loop_tree_root, NULL, process_bb_node_lives);
877 max_point = curr_point;
878 create_start_finish_chains ();
879 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
880 print_live_ranges (ira_dump_file);
881 /* Clean up. */
882 ira_free_bitmap (allocnos_live_bitmap);
883 ira_free (allocnos_live);
886 void
887 finish_allocno_live_ranges (void)
889 ira_free (finish_point_ranges);
890 ira_free (start_point_ranges);