1 /* IRA processing allocno lives.
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "insn-config.h"
41 /* The file contains code is analogous to one in global but the code
42 works on the allocno basis. */
44 static void set_allocno_live (allocno_t
);
45 static void clear_allocno_live (allocno_t
);
46 static void mark_reg_store (rtx
, const_rtx
, void *);
47 static void mark_reg_clobber (rtx
, const_rtx
, void *);
48 static void mark_reg_conflicts (rtx
);
49 static void mark_reg_death (rtx
);
50 static enum reg_class
single_reg_class (const char *, rtx op
, rtx
);
51 static enum reg_class
single_reg_operand_class (int);
52 static void process_single_reg_class_operands (int);
53 static void process_bb_node_lives (loop_tree_node_t
);
55 /* Program points are enumerated by number from range
59 /* Arrays of size MAX_POINT mapping a program point to the allocno
60 live ranges with given start/finish point. */
61 allocno_live_range_t
*start_point_ranges
, *finish_point_ranges
;
63 /* Number of the current program point. */
64 static int curr_point
;
66 /* Number of ints required to hold allocnos_num bits. */
67 int allocno_set_words
;
69 /* Set, clear or test bit number I in `allocnos_live',
70 a bit vector indexed by allocno number. */
71 #define SET_ALLOCNO_LIVE(I) SET_ALLOCNO_SET_BIT (allocnos_live, I)
72 #define CLEAR_ALLOCNO_LIVE(I) CLEAR_ALLOCNO_SET_BIT (allocnos_live, I)
73 #define TEST_ALLOCNO_LIVE(I) TEST_ALLOCNO_SET_BIT (allocnos_live, I)
75 /* Bit mask for allocnos live at current point in the scan. */
76 static INT_TYPE
*allocnos_live
;
78 /* The same as previous but as bitmap. */
79 static bitmap allocnos_live_bitmap
;
81 /* Set of hard regs (except eliminable ones) currently live (during
82 scan of all insns). */
83 static HARD_REG_SET hard_regs_live
;
85 /* Loop tree node corresponding to the current basic block. */
86 static loop_tree_node_t curr_bb_node
;
88 /* The function processing birth of register REGNO. It updates living
89 hard regs and conflict hard regs for living allocnos or starts a
90 new live range for allocno corresponding to REGNO. */
92 make_regno_born (int regno
)
96 allocno_live_range_t p
;
98 if (regno
< FIRST_PSEUDO_REGISTER
)
100 SET_HARD_REG_BIT (hard_regs_live
, regno
);
101 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live
, i
,
103 SET_HARD_REG_BIT (ALLOCNO_CONFLICT_HARD_REGS (allocnos
[i
]), regno
);
104 SET_HARD_REG_BIT (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (allocnos
[i
]),
109 a
= ira_curr_regno_allocno_map
[regno
];
112 if ((p
= ALLOCNO_LIVE_RANGES (a
)) == NULL
113 || (p
->finish
!= curr_point
&& p
->finish
+ 1 != curr_point
))
114 ALLOCNO_LIVE_RANGES (a
)
115 = create_allocno_live_range (a
, curr_point
, -1, ALLOCNO_LIVE_RANGES (a
));
118 /* The function processing death of register REGNO. It updates live
119 hard regs or finish the current live range for allocno
120 corresponding to REGNO. */
122 make_regno_dead (int regno
)
125 allocno_live_range_t p
;
127 if (regno
< FIRST_PSEUDO_REGISTER
)
129 CLEAR_HARD_REG_BIT (hard_regs_live
, regno
);
132 a
= ira_curr_regno_allocno_map
[regno
];
135 p
= ALLOCNO_LIVE_RANGES (a
);
136 ira_assert (p
!= NULL
);
137 p
->finish
= curr_point
;
140 /* The function processing birth and, right after then, death of
143 make_regno_born_and_died (int regno
)
145 make_regno_born (regno
);
146 make_regno_dead (regno
);
149 /* The current pressure for the current basic block. */
150 static int curr_reg_pressure
[N_REG_CLASSES
];
152 /* The function marks allocno A as currently living. */
154 set_allocno_live (allocno_t a
)
156 enum reg_class cover_class
;
158 if (TEST_ALLOCNO_LIVE (ALLOCNO_NUM (a
)))
160 SET_ALLOCNO_LIVE (ALLOCNO_NUM (a
));
161 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a
), hard_regs_live
);
162 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a
), hard_regs_live
);
163 bitmap_set_bit (allocnos_live_bitmap
, ALLOCNO_NUM (a
));
164 cover_class
= ALLOCNO_COVER_CLASS (a
);
165 curr_reg_pressure
[cover_class
]
166 += reg_class_nregs
[cover_class
] [ALLOCNO_MODE (a
)];
167 if (curr_bb_node
->reg_pressure
[cover_class
]
168 < curr_reg_pressure
[cover_class
])
169 curr_bb_node
->reg_pressure
[cover_class
] = curr_reg_pressure
[cover_class
];
172 /* The function marks allocno A as currently not living. */
174 clear_allocno_live (allocno_t a
)
176 enum reg_class cover_class
;
178 if (bitmap_bit_p (allocnos_live_bitmap
, ALLOCNO_NUM (a
)))
180 cover_class
= ALLOCNO_COVER_CLASS (a
);
181 curr_reg_pressure
[cover_class
]
182 -= reg_class_nregs
[cover_class
] [ALLOCNO_MODE (a
)];
183 ira_assert (curr_reg_pressure
[cover_class
] >= 0);
185 CLEAR_ALLOCNO_LIVE (ALLOCNO_NUM (a
));
186 bitmap_clear_bit (allocnos_live_bitmap
, ALLOCNO_NUM (a
));
189 /* Record all regs that are set in any one insn. Communication from
190 mark_reg_{store,clobber}. */
191 static VEC(rtx
, heap
) *regs_set
;
193 /* Handle the case where REG is set by the insn being scanned, during
194 the scan to accumulate conflicts. Store a 1 in hard_regs_live or
195 allocnos_live for this register or the
196 corresponding allocno, record how many consecutive hardware
197 registers it actually needs, and record a conflict with all other
198 reg allocnos already live.
200 Note that even if REG does not remain alive after this insn, we
201 must mark it here as live, to ensure a conflict between REG and any
202 other reg allocnos set in this insn that really do live. This is
203 because those other allocnos could be considered after this.
205 REG might actually be something other than a register; if so, we do
208 SETTER is 0 if this register was modified by an auto-increment
209 (i.e., a REG_INC note was found for it). */
211 mark_reg_store (rtx reg
, const_rtx setter ATTRIBUTE_UNUSED
,
212 void *data ATTRIBUTE_UNUSED
)
216 if (GET_CODE (reg
) == SUBREG
)
217 reg
= SUBREG_REG (reg
);
222 VEC_safe_push (rtx
, heap
, regs_set
, reg
);
226 if (regno
>= FIRST_PSEUDO_REGISTER
)
228 allocno_t a
= ira_curr_regno_allocno_map
[regno
];
230 ira_assert (a
!= NULL
|| REG_N_REFS (regno
) == 0);
233 if (bitmap_bit_p (allocnos_live_bitmap
, ALLOCNO_NUM (a
)))
235 set_allocno_live (a
);
237 make_regno_born (regno
);
239 else if (! TEST_HARD_REG_BIT (no_alloc_regs
, regno
))
241 int last
= regno
+ hard_regno_nregs
[regno
] [GET_MODE (reg
)];
242 enum reg_class cover_class
;
246 if (! TEST_HARD_REG_BIT (hard_regs_live
, regno
)
247 && ! TEST_HARD_REG_BIT (eliminable_regset
, regno
))
249 cover_class
= class_translate
[REGNO_REG_CLASS (regno
)];
250 curr_reg_pressure
[cover_class
]++;
251 make_regno_born (regno
);
252 if (curr_bb_node
->reg_pressure
[cover_class
]
253 < curr_reg_pressure
[cover_class
])
254 curr_bb_node
->reg_pressure
[cover_class
]
255 = curr_reg_pressure
[cover_class
];
262 /* Like mark_reg_store except notice just CLOBBERs; ignore SETs. */
264 mark_reg_clobber (rtx reg
, const_rtx setter
, void *data
)
266 if (GET_CODE (setter
) == CLOBBER
)
267 mark_reg_store (reg
, setter
, data
);
270 /* Record that REG (or the corresponding allocno) has conflicts with
271 all the allocno currently live. Do not mark REG (or the allocno)
274 mark_reg_conflicts (rtx reg
)
278 if (GET_CODE (reg
) == SUBREG
)
279 reg
= SUBREG_REG (reg
);
286 if (regno
>= FIRST_PSEUDO_REGISTER
)
287 make_regno_born_and_died (regno
);
288 else if (! TEST_HARD_REG_BIT (no_alloc_regs
, regno
))
290 int last
= regno
+ hard_regno_nregs
[regno
] [GET_MODE (reg
)];
294 make_regno_born_and_died (regno
);
300 /* Mark REG (or the corresponding allocno) as being dead (following
301 the insn being scanned now). Store a 0 in hard_regs_live or
302 allocnos_live for this register. */
304 mark_reg_death (rtx reg
)
306 int regno
= REGNO (reg
);
308 if (regno
>= FIRST_PSEUDO_REGISTER
)
310 allocno_t a
= ira_curr_regno_allocno_map
[regno
];
312 ira_assert (a
!= NULL
|| REG_N_REFS (regno
) == 0);
315 if (! bitmap_bit_p (allocnos_live_bitmap
, ALLOCNO_NUM (a
)))
317 clear_allocno_live (a
);
319 make_regno_dead (regno
);
321 else if (! TEST_HARD_REG_BIT (no_alloc_regs
, regno
))
323 int last
= regno
+ hard_regno_nregs
[regno
] [GET_MODE (reg
)];
324 enum reg_class cover_class
;
328 if (TEST_HARD_REG_BIT (hard_regs_live
, regno
))
330 cover_class
= class_translate
[REGNO_REG_CLASS (regno
)];
331 curr_reg_pressure
[cover_class
]--;
332 ira_assert (curr_reg_pressure
[cover_class
] >= 0);
333 make_regno_dead (regno
);
340 /* The function checks that CONSTRAINTS permits to use only one hard
341 register. If it is so, the function returns the class of the hard
342 register. Otherwise it returns NO_REGS.
345 static enum reg_class
346 single_reg_class (const char *constraints
, rtx op
, rtx equiv_const
)
349 enum reg_class cl
, next_cl
;
353 for (ignore_p
= FALSE
;
355 constraints
+= CONSTRAINT_LEN (c
, constraints
))
375 || (equiv_const
!= NULL_RTX
&& CONSTANT_P (equiv_const
)))
380 if (GET_CODE (op
) == CONST_INT
381 || (GET_CODE (op
) == CONST_DOUBLE
&& GET_MODE (op
) == VOIDmode
)
382 || (equiv_const
!= NULL_RTX
383 && (GET_CODE (equiv_const
) == CONST_INT
384 || (GET_CODE (equiv_const
) == CONST_DOUBLE
385 && GET_MODE (equiv_const
) == VOIDmode
))))
390 if ((CONSTANT_P (op
) && GET_CODE (op
) != CONST_INT
391 && (GET_CODE (op
) != CONST_DOUBLE
|| GET_MODE (op
) != VOIDmode
))
392 || (equiv_const
!= NULL_RTX
393 && CONSTANT_P (equiv_const
)
394 && GET_CODE (equiv_const
) != CONST_INT
395 && (GET_CODE (equiv_const
) != CONST_DOUBLE
396 || GET_MODE (equiv_const
) != VOIDmode
)))
408 if ((GET_CODE (op
) == CONST_INT
409 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, constraints
))
410 || (equiv_const
!= NULL_RTX
411 && GET_CODE (equiv_const
) == CONST_INT
412 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const
),
419 if (GET_CODE (op
) == CONST_DOUBLE
420 || (GET_CODE (op
) == CONST_VECTOR
421 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
)
422 || (equiv_const
!= NULL_RTX
423 && (GET_CODE (equiv_const
) == CONST_DOUBLE
424 || (GET_CODE (equiv_const
) == CONST_VECTOR
425 && (GET_MODE_CLASS (GET_MODE (equiv_const
))
426 == MODE_VECTOR_FLOAT
)))))
432 if ((GET_CODE (op
) == CONST_DOUBLE
433 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, constraints
))
434 || (equiv_const
!= NULL_RTX
435 && GET_CODE (equiv_const
) == CONST_DOUBLE
436 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (equiv_const
,
439 /* ??? what about memory */
441 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
442 case 'h': case 'j': case 'k': case 'l':
443 case 'q': case 't': case 'u':
444 case 'v': case 'w': case 'x': case 'y': case 'z':
445 case 'A': case 'B': case 'C': case 'D':
446 case 'Q': case 'R': case 'S': case 'T': case 'U':
447 case 'W': case 'Y': case 'Z':
450 : REG_CLASS_FROM_CONSTRAINT (c
, constraints
));
451 if ((cl
!= NO_REGS
&& next_cl
!= cl
)
452 || available_class_regs
[next_cl
] > 1)
457 case '0': case '1': case '2': case '3': case '4':
458 case '5': case '6': case '7': case '8': case '9':
460 = single_reg_class (recog_data
.constraints
[c
- '0'],
461 recog_data
.operand
[c
- '0'], NULL_RTX
);
462 if ((cl
!= NO_REGS
&& next_cl
!= cl
) || next_cl
== NO_REGS
463 || available_class_regs
[next_cl
] > 1)
474 /* The function checks that operand OP_NUM of the current insn can use
475 only one hard register. If it is so, the function returns the
476 class of the hard register. Otherwise it returns NO_REGS. */
477 static enum reg_class
478 single_reg_operand_class (int op_num
)
480 if (op_num
< 0 || recog_data
.n_alternatives
== 0)
482 return single_reg_class (recog_data
.constraints
[op_num
],
483 recog_data
.operand
[op_num
], NULL_RTX
);
486 /* The function processes input (if IN_P) or output operands to find
487 allocno which can use only one hard register and makes other
488 currently living reg allocnos conflicting with the hard register. */
490 process_single_reg_class_operands (int in_p
)
492 int i
, regno
, px
, cost
;
493 enum reg_class cl
, cover_class
;
495 allocno_t operand_a
, a
;
497 for (i
= 0; i
< recog_data
.n_operands
; i
++)
499 operand
= recog_data
.operand
[i
];
500 if (in_p
&& recog_data
.operand_type
[i
] != OP_IN
501 && recog_data
.operand_type
[i
] != OP_INOUT
)
503 if (! in_p
&& recog_data
.operand_type
[i
] != OP_OUT
504 && recog_data
.operand_type
[i
] != OP_INOUT
)
506 cl
= single_reg_operand_class (i
);
512 if (GET_CODE (operand
) == SUBREG
)
513 operand
= SUBREG_REG (operand
);
516 && (regno
= REGNO (operand
)) >= FIRST_PSEUDO_REGISTER
)
518 enum machine_mode mode
;
519 enum reg_class cover_class
;
521 operand_a
= ira_curr_regno_allocno_map
[regno
];
522 mode
= ALLOCNO_MODE (operand_a
);
523 cover_class
= ALLOCNO_MODE (operand_a
);
524 if (class_subset_p
[cl
] [cover_class
]
525 && (reg_class_size
[cl
]
526 <= (unsigned) CLASS_MAX_NREGS (cl
, mode
)))
529 ? register_move_cost
[mode
] [cover_class
] [cl
]
530 : register_move_cost
[mode
] [cl
] [cover_class
]);
531 ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a
)
532 [class_hard_reg_index
[cover_class
] [class_hard_regs
[cl
] [0]]]
537 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live
, px
,
540 cover_class
= ALLOCNO_COVER_CLASS (a
);
543 /* We could increase costs of A instead of making it
544 conflicting with the hard register. But it works worse
545 because it will be spilled in reload in anyway. */
546 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a
),
547 reg_class_contents
[cl
]);
548 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a
),
549 reg_class_contents
[cl
]);
555 /* The function processes insns of the basic block given by its
556 LOOP_TREE_NODE to update allocno conflict table. */
558 process_bb_node_lives (loop_tree_node_t loop_tree_node
)
570 bb
= loop_tree_node
->bb
;
573 for (i
= 0; i
< reg_class_cover_size
; i
++)
574 curr_reg_pressure
[reg_class_cover
[i
]] = 0;
575 curr_bb_node
= loop_tree_node
;
576 reg_live_in
= DF_LR_IN (bb
);
577 memset (allocnos_live
, 0, allocno_set_words
* sizeof (INT_TYPE
));
578 REG_SET_TO_HARD_REG_SET (hard_regs_live
, reg_live_in
);
579 AND_COMPL_HARD_REG_SET (hard_regs_live
, eliminable_regset
);
580 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
581 if (TEST_HARD_REG_BIT (hard_regs_live
, i
))
583 enum reg_class cover_class
;
585 cover_class
= class_translate
[REGNO_REG_CLASS (i
)];
586 curr_reg_pressure
[cover_class
]++;
587 if (curr_bb_node
->reg_pressure
[cover_class
]
588 < curr_reg_pressure
[cover_class
])
589 curr_bb_node
->reg_pressure
[cover_class
]
590 = curr_reg_pressure
[cover_class
];
592 bitmap_clear (allocnos_live_bitmap
);
593 EXECUTE_IF_SET_IN_BITMAP (reg_live_in
, FIRST_PSEUDO_REGISTER
, j
, bi
)
595 allocno_t a
= ira_curr_regno_allocno_map
[j
];
597 ira_assert (a
!= NULL
|| REG_N_REFS (j
) == 0);
600 ira_assert (! bitmap_bit_p (allocnos_live_bitmap
, ALLOCNO_NUM (a
)));
601 set_allocno_live (a
);
605 #ifdef EH_RETURN_DATA_REGNO
606 if (bb_has_eh_pred (bb
))
610 unsigned int regno
= EH_RETURN_DATA_REGNO (j
);
612 if (regno
== INVALID_REGNUM
)
614 make_regno_born_and_died (regno
);
619 /* Reg allocnos can't go in stack regs at the start of a basic block
620 that is reached by an abnormal edge. Likewise for call
621 clobbered regs, because caller-save, fixup_abnormal_edges and
622 possibly the table driven EH machinery are not quite ready to
623 handle such reg allocnos live across such edges. */
624 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
625 if (e
->flags
& EDGE_ABNORMAL
)
631 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live
, px
,
633 ALLOCNO_NO_STACK_REG_P (allocnos
[px
]) = TRUE
;
634 ALLOCNO_TOTAL_NO_STACK_REG_P (allocnos
[px
]) = TRUE
;
636 for (px
= FIRST_STACK_REG
; px
<= LAST_STACK_REG
; px
++)
637 make_regno_born_and_died (px
);
639 /* No need to record conflicts for call clobbered regs if we
640 have nonlocal labels around, as we don't ever try to
641 allocate such regs in this case. */
642 if (! current_function_has_nonlocal_label
)
643 for (px
= 0; px
< FIRST_PSEUDO_REGISTER
; px
++)
644 if (call_used_regs
[px
])
645 make_regno_born_and_died (px
);
648 /* Scan the code of this basic block, noting which allocnos and
649 hard regs are born or die. When one is born, record a
650 conflict with all others currently live. */
651 FOR_BB_INSNS (bb
, insn
)
658 if (internal_flag_ira_verbose
> 2 && ira_dump_file
!= NULL
)
659 fprintf (ira_dump_file
, " Insn %u(l%d): point = %d\n",
660 INSN_UID (insn
), loop_tree_node
->father
->loop
->num
,
663 /* Check regs_set is an empty set. */
664 gcc_assert (VEC_empty (rtx
, regs_set
));
666 /* Mark any allocnos clobbered by INSN as live, so they
667 conflict with the inputs. */
668 note_stores (PATTERN (insn
), mark_reg_clobber
, NULL
);
671 process_single_reg_class_operands (TRUE
);
673 /* Mark any allocnos dead after INSN as dead now. */
674 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
675 if (REG_NOTE_KIND (link
) == REG_DEAD
)
676 mark_reg_death (XEXP (link
, 0));
681 HARD_REG_SET clobbered_regs
;
683 get_call_invalidated_used_regs (insn
, &clobbered_regs
, FALSE
);
684 IOR_HARD_REG_SET (cfun
->emit
->call_used_regs
, clobbered_regs
);
685 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live
, i
,
688 allocno_t a
= allocnos
[i
];
690 freq
= REG_FREQ_FROM_BB (BLOCK_FOR_INSN (insn
));
693 ALLOCNO_CALL_FREQ (a
) += freq
;
694 index
= add_regno_call (ALLOCNO_REGNO (a
), insn
);
695 if (ALLOCNO_CALLS_CROSSED_START (a
) < 0)
696 ALLOCNO_CALLS_CROSSED_START (a
) = index
;
697 ALLOCNO_CALLS_CROSSED_NUM (a
)++;
698 /* Don't allocate allocnos that cross calls, if this
699 function receives a nonlocal goto. */
700 if (current_function_has_nonlocal_label
)
702 SET_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a
));
703 SET_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a
));
708 /* Mark any allocnos set in INSN as live, and mark them as
709 conflicting with all other live reg allocnos. Clobbers are
710 processed again, so they conflict with the reg allocnos that
712 note_stores (PATTERN (insn
), mark_reg_store
, NULL
);
715 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
716 if (REG_NOTE_KIND (link
) == REG_INC
)
717 mark_reg_store (XEXP (link
, 0), NULL_RTX
, NULL
);
720 /* If INSN has multiple outputs, then any allocno that dies
721 here and is used inside of an output must conflict with
724 It is unsafe to use !single_set here since it will ignore
725 an unused output. Just because an output is unused does
726 not mean the compiler can assume the side effect will not
727 occur. Consider if ALLOCNO appears in the address of an
728 output and we reload the output. If we allocate ALLOCNO
729 to the same hard register as an unused output we could
730 set the hard register before the output reload insn. */
731 if (GET_CODE (PATTERN (insn
)) == PARALLEL
&& multiple_sets (insn
))
732 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
733 if (REG_NOTE_KIND (link
) == REG_DEAD
)
736 int used_in_output
= 0;
737 rtx reg
= XEXP (link
, 0);
739 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
741 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
743 if (GET_CODE (set
) == SET
744 && ! REG_P (SET_DEST (set
))
745 && ! rtx_equal_p (reg
, SET_DEST (set
))
746 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
750 mark_reg_conflicts (reg
);
753 process_single_reg_class_operands (FALSE
);
755 /* Mark any allocnos set in INSN and then never used. */
756 while (! VEC_empty (rtx
, regs_set
))
758 rtx reg
= VEC_pop (rtx
, regs_set
);
759 rtx note
= find_regno_note (insn
, REG_UNUSED
, REGNO (reg
));
762 mark_reg_death (XEXP (note
, 0));
766 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live
, i
,
768 make_regno_dead (ALLOCNO_REGNO (allocnos
[i
]));
772 /* Propagate register pressure: */
773 if (loop_tree_node
!= ira_loop_tree_root
)
774 for (i
= 0; i
< reg_class_cover_size
; i
++)
776 enum reg_class cover_class
;
778 cover_class
= reg_class_cover
[i
];
779 if (loop_tree_node
->reg_pressure
[cover_class
]
780 > loop_tree_node
->father
->reg_pressure
[cover_class
])
781 loop_tree_node
->father
->reg_pressure
[cover_class
]
782 = loop_tree_node
->reg_pressure
[cover_class
];
787 create_start_finish_chains (void)
791 allocno_live_range_t r
;
794 = ira_allocate (max_point
* sizeof (allocno_live_range_t
));
795 memset (start_point_ranges
, 0, max_point
* sizeof (allocno_live_range_t
));
797 = ira_allocate (max_point
* sizeof (allocno_live_range_t
));
798 memset (finish_point_ranges
, 0, max_point
* sizeof (allocno_live_range_t
));
799 for (i
= 0; i
< allocnos_num
; i
++)
802 for (r
= ALLOCNO_LIVE_RANGES (a
); r
!= NULL
; r
= r
->next
)
804 r
->start_next
= start_point_ranges
[r
->start
];
805 start_point_ranges
[r
->start
] = r
;
806 r
->finish_next
= finish_point_ranges
[r
->finish
];
807 finish_point_ranges
[r
->finish
] = r
;
813 rebuild_start_finish_chains (void)
815 ira_free (finish_point_ranges
);
816 ira_free (start_point_ranges
);
817 create_start_finish_chains ();
821 print_live_range_list (FILE *f
, allocno_live_range_t r
)
823 for (; r
!= NULL
; r
= r
->next
)
824 fprintf (f
, " [%d..%d]", r
->start
, r
->finish
);
829 debug_live_range_list (allocno_live_range_t r
)
831 print_live_range_list (stderr
, r
);
835 print_allocno_live_ranges (FILE *f
, allocno_t a
)
837 fprintf (f
, " a%d(r%d):", ALLOCNO_NUM (a
), ALLOCNO_REGNO (a
));
838 print_live_range_list (f
, ALLOCNO_LIVE_RANGES (a
));
842 debug_allocno_live_ranges (allocno_t a
)
844 print_allocno_live_ranges (stderr
, a
);
848 print_live_ranges (FILE *f
)
852 for (i
= 0; i
< allocnos_num
; i
++)
853 print_allocno_live_ranges (f
, allocnos
[i
]);
857 debug_live_ranges (void)
859 print_live_ranges (stderr
);
862 /* The function creates live ranges, set up CONFLICT_HARD_REGS and
863 TOTAL_CONFLICT_HARD_REGS for allocnos. It also modifies hard reg
864 costs for allocnos whose hard register is actually fixed in an
867 create_allocno_live_ranges (void)
869 allocno_set_words
= (allocnos_num
+ INT_BITS
- 1) / INT_BITS
;
870 allocnos_live
= ira_allocate (sizeof (INT_TYPE
) * allocno_set_words
);
871 allocnos_live_bitmap
= ira_allocate_bitmap ();
872 /* Make a vector that mark_reg_{store,clobber} will store in. */
874 regs_set
= VEC_alloc (rtx
, heap
, 10);
876 traverse_loop_tree (FALSE
, ira_loop_tree_root
, NULL
, process_bb_node_lives
);
877 max_point
= curr_point
;
878 create_start_finish_chains ();
879 if (internal_flag_ira_verbose
> 2 && ira_dump_file
!= NULL
)
880 print_live_ranges (ira_dump_file
);
882 ira_free_bitmap (allocnos_live_bitmap
);
883 ira_free (allocnos_live
);
887 finish_allocno_live_ranges (void)
889 ira_free (finish_point_ranges
);
890 ira_free (start_point_ranges
);