1 /* IRA processing allocno lives to build allocno live ranges.
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "insn-config.h"
36 #include "diagnostic-core.h"
40 #include "sparseset.h"
43 /* The code in this file is similar to one in global but the code
44 works on the allocno basis and creates live ranges instead of
45 pseudo-register conflicts. */
47 /* Program points are enumerated by numbers from range
48 0..IRA_MAX_POINT-1. There are approximately two times more program
49 points than insns. Program points are places in the program where
50 liveness info can be changed. In most general case (there are more
51 complicated cases too) some program points correspond to places
52 where input operand dies and other ones correspond to places where
53 output operands are born. */
56 /* Arrays of size IRA_MAX_POINT mapping a program point to the allocno
57 live ranges with given start/finish point. */
58 live_range_t
*ira_start_point_ranges
, *ira_finish_point_ranges
;
60 /* Number of the current program point. */
61 static int curr_point
;
63 /* Point where register pressure excess started or -1 if there is no
64 register pressure excess. Excess pressure for a register class at
65 some point means that there are more allocnos of given register
66 class living at the point than number of hard-registers of the
67 class available for the allocation. It is defined only for
69 static int high_pressure_start_point
[N_REG_CLASSES
];
71 /* Objects live at current point in the scan. */
72 static sparseset objects_live
;
74 /* A temporary bitmap used in functions that wish to avoid visiting an allocno
76 static sparseset allocnos_processed
;
78 /* Set of hard regs (except eliminable ones) currently live. */
79 static HARD_REG_SET hard_regs_live
;
81 /* The loop tree node corresponding to the current basic block. */
82 static ira_loop_tree_node_t curr_bb_node
;
84 /* The number of the last processed call. */
85 static int last_call_num
;
86 /* The number of last call at which given allocno was saved. */
87 static int *allocno_saved_at_call
;
89 /* Record the birth of hard register REGNO, updating hard_regs_live and
90 hard reg conflict information for living allocnos. */
92 make_hard_regno_born (int regno
)
96 SET_HARD_REG_BIT (hard_regs_live
, regno
);
97 EXECUTE_IF_SET_IN_SPARSESET (objects_live
, i
)
99 ira_object_t obj
= ira_object_id_map
[i
];
101 SET_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS (obj
), regno
);
102 SET_HARD_REG_BIT (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj
), regno
);
106 /* Process the death of hard register REGNO. This updates
109 make_hard_regno_dead (int regno
)
111 CLEAR_HARD_REG_BIT (hard_regs_live
, regno
);
114 /* Record the birth of object OBJ. Set a bit for it in objects_live,
115 start a new live range for it if necessary and update hard register
118 make_object_born (ira_object_t obj
)
120 live_range_t lr
= OBJECT_LIVE_RANGES (obj
);
122 sparseset_set_bit (objects_live
, OBJECT_CONFLICT_ID (obj
));
123 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj
), hard_regs_live
);
124 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj
), hard_regs_live
);
127 || (lr
->finish
!= curr_point
&& lr
->finish
+ 1 != curr_point
))
128 ira_add_live_range_to_object (obj
, curr_point
, -1);
131 /* Update ALLOCNO_EXCESS_PRESSURE_POINTS_NUM for the allocno
132 associated with object OBJ. */
134 update_allocno_pressure_excess_length (ira_object_t obj
)
136 ira_allocno_t a
= OBJECT_ALLOCNO (obj
);
138 enum reg_class aclass
, pclass
, cl
;
141 aclass
= ALLOCNO_CLASS (a
);
142 pclass
= ira_pressure_class_translate
[aclass
];
144 (cl
= ira_reg_class_super_classes
[pclass
][i
]) != LIM_REG_CLASSES
;
147 if (! ira_reg_pressure_class_p
[cl
])
149 if (high_pressure_start_point
[cl
] < 0)
151 p
= OBJECT_LIVE_RANGES (obj
);
152 ira_assert (p
!= NULL
);
153 start
= (high_pressure_start_point
[cl
] > p
->start
154 ? high_pressure_start_point
[cl
] : p
->start
);
155 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a
) += curr_point
- start
+ 1;
159 /* Process the death of object OBJ, which is associated with allocno
160 A. This finishes the current live range for it. */
162 make_object_dead (ira_object_t obj
)
166 sparseset_clear_bit (objects_live
, OBJECT_CONFLICT_ID (obj
));
167 lr
= OBJECT_LIVE_RANGES (obj
);
168 ira_assert (lr
!= NULL
);
169 lr
->finish
= curr_point
;
170 update_allocno_pressure_excess_length (obj
);
173 /* The current register pressures for each pressure class for the current
175 static int curr_reg_pressure
[N_REG_CLASSES
];
177 /* Record that register pressure for PCLASS increased by N registers.
178 Update the current register pressure, maximal register pressure for
179 the current BB and the start point of the register pressure
182 inc_register_pressure (enum reg_class pclass
, int n
)
188 (cl
= ira_reg_class_super_classes
[pclass
][i
]) != LIM_REG_CLASSES
;
191 if (! ira_reg_pressure_class_p
[cl
])
193 curr_reg_pressure
[cl
] += n
;
194 if (high_pressure_start_point
[cl
] < 0
195 && (curr_reg_pressure
[cl
] > ira_class_hard_regs_num
[cl
]))
196 high_pressure_start_point
[cl
] = curr_point
;
197 if (curr_bb_node
->reg_pressure
[cl
] < curr_reg_pressure
[cl
])
198 curr_bb_node
->reg_pressure
[cl
] = curr_reg_pressure
[cl
];
202 /* Record that register pressure for PCLASS has decreased by NREGS
203 registers; update current register pressure, start point of the
204 register pressure excess, and register pressure excess length for
208 dec_register_pressure (enum reg_class pclass
, int nregs
)
216 (cl
= ira_reg_class_super_classes
[pclass
][i
]) != LIM_REG_CLASSES
;
219 if (! ira_reg_pressure_class_p
[cl
])
221 curr_reg_pressure
[cl
] -= nregs
;
222 ira_assert (curr_reg_pressure
[cl
] >= 0);
223 if (high_pressure_start_point
[cl
] >= 0
224 && curr_reg_pressure
[cl
] <= ira_class_hard_regs_num
[cl
])
229 EXECUTE_IF_SET_IN_SPARSESET (objects_live
, j
)
230 update_allocno_pressure_excess_length (ira_object_id_map
[j
]);
232 (cl
= ira_reg_class_super_classes
[pclass
][i
]) != LIM_REG_CLASSES
;
235 if (! ira_reg_pressure_class_p
[cl
])
237 if (high_pressure_start_point
[cl
] >= 0
238 && curr_reg_pressure
[cl
] <= ira_class_hard_regs_num
[cl
])
239 high_pressure_start_point
[cl
] = -1;
244 /* Determine from the objects_live bitmap whether REGNO is currently live,
245 and occupies only one object. Return false if we have no information. */
247 pseudo_regno_single_word_and_live_p (int regno
)
249 ira_allocno_t a
= ira_curr_regno_allocno_map
[regno
];
254 if (ALLOCNO_NUM_OBJECTS (a
) > 1)
257 obj
= ALLOCNO_OBJECT (a
, 0);
259 return sparseset_bit_p (objects_live
, OBJECT_CONFLICT_ID (obj
));
262 /* Mark the pseudo register REGNO as live. Update all information about
263 live ranges and register pressure. */
265 mark_pseudo_regno_live (int regno
)
267 ira_allocno_t a
= ira_curr_regno_allocno_map
[regno
];
268 enum reg_class pclass
;
274 /* Invalidate because it is referenced. */
275 allocno_saved_at_call
[ALLOCNO_NUM (a
)] = 0;
277 n
= ALLOCNO_NUM_OBJECTS (a
);
278 pclass
= ira_pressure_class_translate
[ALLOCNO_CLASS (a
)];
279 nregs
= ira_reg_class_max_nregs
[ALLOCNO_CLASS (a
)][ALLOCNO_MODE (a
)];
282 /* We track every subobject separately. */
283 gcc_assert (nregs
== n
);
287 for (i
= 0; i
< n
; i
++)
289 ira_object_t obj
= ALLOCNO_OBJECT (a
, i
);
291 if (sparseset_bit_p (objects_live
, OBJECT_CONFLICT_ID (obj
)))
294 inc_register_pressure (pclass
, nregs
);
295 make_object_born (obj
);
299 /* Like mark_pseudo_regno_live, but try to only mark one subword of
300 the pseudo as live. SUBWORD indicates which; a value of 0
301 indicates the low part. */
303 mark_pseudo_regno_subword_live (int regno
, int subword
)
305 ira_allocno_t a
= ira_curr_regno_allocno_map
[regno
];
307 enum reg_class pclass
;
313 /* Invalidate because it is referenced. */
314 allocno_saved_at_call
[ALLOCNO_NUM (a
)] = 0;
316 n
= ALLOCNO_NUM_OBJECTS (a
);
319 mark_pseudo_regno_live (regno
);
323 pclass
= ira_pressure_class_translate
[ALLOCNO_CLASS (a
)];
325 (n
== ira_reg_class_max_nregs
[ALLOCNO_CLASS (a
)][ALLOCNO_MODE (a
)]);
326 obj
= ALLOCNO_OBJECT (a
, subword
);
328 if (sparseset_bit_p (objects_live
, OBJECT_CONFLICT_ID (obj
)))
331 inc_register_pressure (pclass
, 1);
332 make_object_born (obj
);
335 /* Mark the register REG as live. Store a 1 in hard_regs_live for
336 this register, record how many consecutive hardware registers it
339 mark_hard_reg_live (rtx reg
)
341 int regno
= REGNO (reg
);
343 if (! TEST_HARD_REG_BIT (ira_no_alloc_regs
, regno
))
345 int last
= regno
+ hard_regno_nregs
[regno
][GET_MODE (reg
)];
346 enum reg_class aclass
, pclass
;
350 if (! TEST_HARD_REG_BIT (hard_regs_live
, regno
)
351 && ! TEST_HARD_REG_BIT (eliminable_regset
, regno
))
353 aclass
= ira_hard_regno_allocno_class
[regno
];
354 pclass
= ira_pressure_class_translate
[aclass
];
355 inc_register_pressure (pclass
, 1);
356 make_hard_regno_born (regno
);
363 /* Mark a pseudo, or one of its subwords, as live. REGNO is the pseudo's
364 register number; ORIG_REG is the access in the insn, which may be a
367 mark_pseudo_reg_live (rtx orig_reg
, unsigned regno
)
369 if (df_read_modify_subreg_p (orig_reg
))
371 mark_pseudo_regno_subword_live (regno
,
372 subreg_lowpart_p (orig_reg
) ? 0 : 1);
375 mark_pseudo_regno_live (regno
);
378 /* Mark the register referenced by use or def REF as live. */
380 mark_ref_live (df_ref ref
)
382 rtx reg
= DF_REF_REG (ref
);
385 if (GET_CODE (reg
) == SUBREG
)
386 reg
= SUBREG_REG (reg
);
388 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
389 mark_pseudo_reg_live (orig_reg
, REGNO (reg
));
391 mark_hard_reg_live (reg
);
394 /* Mark the pseudo register REGNO as dead. Update all information about
395 live ranges and register pressure. */
397 mark_pseudo_regno_dead (int regno
)
399 ira_allocno_t a
= ira_curr_regno_allocno_map
[regno
];
406 /* Invalidate because it is referenced. */
407 allocno_saved_at_call
[ALLOCNO_NUM (a
)] = 0;
409 n
= ALLOCNO_NUM_OBJECTS (a
);
410 cl
= ira_pressure_class_translate
[ALLOCNO_CLASS (a
)];
411 nregs
= ira_reg_class_max_nregs
[ALLOCNO_CLASS (a
)][ALLOCNO_MODE (a
)];
414 /* We track every subobject separately. */
415 gcc_assert (nregs
== n
);
418 for (i
= 0; i
< n
; i
++)
420 ira_object_t obj
= ALLOCNO_OBJECT (a
, i
);
421 if (!sparseset_bit_p (objects_live
, OBJECT_CONFLICT_ID (obj
)))
424 dec_register_pressure (cl
, nregs
);
425 make_object_dead (obj
);
429 /* Like mark_pseudo_regno_dead, but called when we know that only part of the
430 register dies. SUBWORD indicates which; a value of 0 indicates the low part. */
432 mark_pseudo_regno_subword_dead (int regno
, int subword
)
434 ira_allocno_t a
= ira_curr_regno_allocno_map
[regno
];
442 /* Invalidate because it is referenced. */
443 allocno_saved_at_call
[ALLOCNO_NUM (a
)] = 0;
445 n
= ALLOCNO_NUM_OBJECTS (a
);
447 /* The allocno as a whole doesn't die in this case. */
450 cl
= ira_pressure_class_translate
[ALLOCNO_CLASS (a
)];
452 (n
== ira_reg_class_max_nregs
[ALLOCNO_CLASS (a
)][ALLOCNO_MODE (a
)]);
454 obj
= ALLOCNO_OBJECT (a
, subword
);
455 if (!sparseset_bit_p (objects_live
, OBJECT_CONFLICT_ID (obj
)))
458 dec_register_pressure (cl
, 1);
459 make_object_dead (obj
);
462 /* Mark the hard register REG as dead. Store a 0 in hard_regs_live for the
465 mark_hard_reg_dead (rtx reg
)
467 int regno
= REGNO (reg
);
469 if (! TEST_HARD_REG_BIT (ira_no_alloc_regs
, regno
))
471 int last
= regno
+ hard_regno_nregs
[regno
][GET_MODE (reg
)];
472 enum reg_class aclass
, pclass
;
476 if (TEST_HARD_REG_BIT (hard_regs_live
, regno
))
478 aclass
= ira_hard_regno_allocno_class
[regno
];
479 pclass
= ira_pressure_class_translate
[aclass
];
480 dec_register_pressure (pclass
, 1);
481 make_hard_regno_dead (regno
);
488 /* Mark a pseudo, or one of its subwords, as dead. REGNO is the pseudo's
489 register number; ORIG_REG is the access in the insn, which may be a
492 mark_pseudo_reg_dead (rtx orig_reg
, unsigned regno
)
494 if (df_read_modify_subreg_p (orig_reg
))
496 mark_pseudo_regno_subword_dead (regno
,
497 subreg_lowpart_p (orig_reg
) ? 0 : 1);
500 mark_pseudo_regno_dead (regno
);
503 /* Mark the register referenced by definition DEF as dead, if the
504 definition is a total one. */
506 mark_ref_dead (df_ref def
)
508 rtx reg
= DF_REF_REG (def
);
511 if (DF_REF_FLAGS_IS_SET (def
, DF_REF_CONDITIONAL
))
514 if (GET_CODE (reg
) == SUBREG
)
515 reg
= SUBREG_REG (reg
);
517 if (DF_REF_FLAGS_IS_SET (def
, DF_REF_PARTIAL
)
518 && (GET_CODE (orig_reg
) != SUBREG
519 || REGNO (reg
) < FIRST_PSEUDO_REGISTER
520 || !df_read_modify_subreg_p (orig_reg
)))
523 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
524 mark_pseudo_reg_dead (orig_reg
, REGNO (reg
));
526 mark_hard_reg_dead (reg
);
529 /* If REG is a pseudo or a subreg of it, and the class of its allocno
530 intersects CL, make a conflict with pseudo DREG. ORIG_DREG is the
531 rtx actually accessed, it may be identical to DREG or a subreg of it.
532 Advance the current program point before making the conflict if
533 ADVANCE_P. Return TRUE if we will need to advance the current
536 make_pseudo_conflict (rtx reg
, enum reg_class cl
, rtx dreg
, rtx orig_dreg
,
542 if (GET_CODE (reg
) == SUBREG
)
543 reg
= SUBREG_REG (reg
);
545 if (! REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
548 a
= ira_curr_regno_allocno_map
[REGNO (reg
)];
549 if (! reg_classes_intersect_p (cl
, ALLOCNO_CLASS (a
)))
555 mark_pseudo_reg_live (orig_reg
, REGNO (reg
));
556 mark_pseudo_reg_live (orig_dreg
, REGNO (dreg
));
557 mark_pseudo_reg_dead (orig_reg
, REGNO (reg
));
558 mark_pseudo_reg_dead (orig_dreg
, REGNO (dreg
));
563 /* Check and make if necessary conflicts for pseudo DREG of class
564 DEF_CL of the current insn with input operand USE of class USE_CL.
565 ORIG_DREG is the rtx actually accessed, it may be identical to
566 DREG or a subreg of it. Advance the current program point before
567 making the conflict if ADVANCE_P. Return TRUE if we will need to
568 advance the current program point. */
570 check_and_make_def_use_conflict (rtx dreg
, rtx orig_dreg
,
571 enum reg_class def_cl
, int use
,
572 enum reg_class use_cl
, bool advance_p
)
574 if (! reg_classes_intersect_p (def_cl
, use_cl
))
577 advance_p
= make_pseudo_conflict (recog_data
.operand
[use
],
578 use_cl
, dreg
, orig_dreg
, advance_p
);
580 /* Reload may end up swapping commutative operands, so you
581 have to take both orderings into account. The
582 constraints for the two operands can be completely
583 different. (Indeed, if the constraints for the two
584 operands are the same for all alternatives, there's no
585 point marking them as commutative.) */
586 if (use
< recog_data
.n_operands
- 1
587 && recog_data
.constraints
[use
][0] == '%')
589 = make_pseudo_conflict (recog_data
.operand
[use
+ 1],
590 use_cl
, dreg
, orig_dreg
, advance_p
);
592 && recog_data
.constraints
[use
- 1][0] == '%')
594 = make_pseudo_conflict (recog_data
.operand
[use
- 1],
595 use_cl
, dreg
, orig_dreg
, advance_p
);
599 /* Check and make if necessary conflicts for definition DEF of class
600 DEF_CL of the current insn with input operands. Process only
601 constraints of alternative ALT. */
603 check_and_make_def_conflict (int alt
, int def
, enum reg_class def_cl
)
607 enum reg_class use_cl
, acl
;
609 rtx dreg
= recog_data
.operand
[def
];
610 rtx orig_dreg
= dreg
;
612 if (def_cl
== NO_REGS
)
615 if (GET_CODE (dreg
) == SUBREG
)
616 dreg
= SUBREG_REG (dreg
);
618 if (! REG_P (dreg
) || REGNO (dreg
) < FIRST_PSEUDO_REGISTER
)
621 a
= ira_curr_regno_allocno_map
[REGNO (dreg
)];
622 acl
= ALLOCNO_CLASS (a
);
623 if (! reg_classes_intersect_p (acl
, def_cl
))
628 for (use
= 0; use
< recog_data
.n_operands
; use
++)
632 if (use
== def
|| recog_data
.operand_type
[use
] == OP_OUT
)
635 if (recog_op_alt
[use
][alt
].anything_ok
)
638 use_cl
= recog_op_alt
[use
][alt
].cl
;
640 /* If there's any alternative that allows USE to match DEF, do not
641 record a conflict. If that causes us to create an invalid
642 instruction due to the earlyclobber, reload must fix it up. */
643 for (alt1
= 0; alt1
< recog_data
.n_alternatives
; alt1
++)
644 if (recog_op_alt
[use
][alt1
].matches
== def
645 || (use
< recog_data
.n_operands
- 1
646 && recog_data
.constraints
[use
][0] == '%'
647 && recog_op_alt
[use
+ 1][alt1
].matches
== def
)
649 && recog_data
.constraints
[use
- 1][0] == '%'
650 && recog_op_alt
[use
- 1][alt1
].matches
== def
))
653 if (alt1
< recog_data
.n_alternatives
)
656 advance_p
= check_and_make_def_use_conflict (dreg
, orig_dreg
, def_cl
,
657 use
, use_cl
, advance_p
);
659 if ((use_match
= recog_op_alt
[use
][alt
].matches
) >= 0)
661 if (use_match
== def
)
664 if (recog_op_alt
[use_match
][alt
].anything_ok
)
667 use_cl
= recog_op_alt
[use_match
][alt
].cl
;
668 advance_p
= check_and_make_def_use_conflict (dreg
, orig_dreg
, def_cl
,
669 use
, use_cl
, advance_p
);
674 /* Make conflicts of early clobber pseudo registers of the current
675 insn with its inputs. Avoid introducing unnecessary conflicts by
676 checking classes of the constraints and pseudos because otherwise
677 significant code degradation is possible for some targets. */
679 make_early_clobber_and_input_conflicts (void)
683 enum reg_class def_cl
;
685 for (alt
= 0; alt
< recog_data
.n_alternatives
; alt
++)
686 for (def
= 0; def
< recog_data
.n_operands
; def
++)
689 if (recog_op_alt
[def
][alt
].earlyclobber
)
691 if (recog_op_alt
[def
][alt
].anything_ok
)
694 def_cl
= recog_op_alt
[def
][alt
].cl
;
695 check_and_make_def_conflict (alt
, def
, def_cl
);
697 if ((def_match
= recog_op_alt
[def
][alt
].matches
) >= 0
698 && (recog_op_alt
[def_match
][alt
].earlyclobber
699 || recog_op_alt
[def
][alt
].earlyclobber
))
701 if (recog_op_alt
[def_match
][alt
].anything_ok
)
704 def_cl
= recog_op_alt
[def_match
][alt
].cl
;
705 check_and_make_def_conflict (alt
, def
, def_cl
);
710 /* Mark early clobber hard registers of the current INSN as live (if
711 LIVE_P) or dead. Return true if there are such registers. */
713 mark_hard_reg_early_clobbers (rtx insn
, bool live_p
)
718 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
719 if (DF_REF_FLAGS_IS_SET (*def_rec
, DF_REF_MUST_CLOBBER
))
721 rtx dreg
= DF_REF_REG (*def_rec
);
723 if (GET_CODE (dreg
) == SUBREG
)
724 dreg
= SUBREG_REG (dreg
);
725 if (! REG_P (dreg
) || REGNO (dreg
) >= FIRST_PSEUDO_REGISTER
)
728 /* Hard register clobbers are believed to be early clobber
729 because there is no way to say that non-operand hard
730 register clobbers are not early ones. */
732 mark_ref_live (*def_rec
);
734 mark_ref_dead (*def_rec
);
741 /* Checks that CONSTRAINTS permits to use only one hard register. If
742 it is so, the function returns the class of the hard register.
743 Otherwise it returns NO_REGS. */
744 static enum reg_class
745 single_reg_class (const char *constraints
, rtx op
, rtx equiv_const
)
749 enum reg_class cl
, next_cl
;
752 for (ignore_p
= false, curr_alt
= 0;
754 constraints
+= CONSTRAINT_LEN (c
, constraints
))
755 if (c
== '#' || !recog_data
.alternative_enabled_p
[curr_alt
])
777 || (equiv_const
!= NULL_RTX
&& CONSTANT_P (equiv_const
)))
783 || CONST_DOUBLE_AS_INT_P (op
)
784 || (equiv_const
!= NULL_RTX
785 && (CONST_INT_P (equiv_const
)
786 || CONST_DOUBLE_AS_INT_P (equiv_const
))))
793 && !CONST_DOUBLE_AS_INT_P (op
))
794 || (equiv_const
!= NULL_RTX
795 && CONSTANT_P (equiv_const
)
796 && !CONST_INT_P (equiv_const
)
797 && !CONST_DOUBLE_AS_INT_P (equiv_const
)))
809 if ((CONST_INT_P (op
)
810 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, constraints
))
811 || (equiv_const
!= NULL_RTX
812 && CONST_INT_P (equiv_const
)
813 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const
),
820 if (CONST_DOUBLE_AS_FLOAT_P (op
)
821 || (GET_CODE (op
) == CONST_VECTOR
822 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
)
823 || (equiv_const
!= NULL_RTX
824 && (CONST_DOUBLE_AS_FLOAT_P (equiv_const
)
825 || (GET_CODE (equiv_const
) == CONST_VECTOR
826 && (GET_MODE_CLASS (GET_MODE (equiv_const
))
827 == MODE_VECTOR_FLOAT
)))))
833 if ((CONST_DOUBLE_AS_FLOAT_P (op
)
834 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, constraints
))
835 || (equiv_const
!= NULL_RTX
836 && CONST_DOUBLE_AS_FLOAT_P (equiv_const
)
837 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (equiv_const
,
840 /* ??? what about memory */
842 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
843 case 'h': case 'j': case 'k': case 'l':
844 case 'q': case 't': case 'u':
845 case 'v': case 'w': case 'x': case 'y': case 'z':
846 case 'A': case 'B': case 'C': case 'D':
847 case 'Q': case 'R': case 'S': case 'T': case 'U':
848 case 'W': case 'Y': case 'Z':
851 : REG_CLASS_FROM_CONSTRAINT (c
, constraints
));
852 if ((cl
!= NO_REGS
&& next_cl
!= cl
)
853 || (ira_class_hard_regs_num
[next_cl
]
854 > ira_reg_class_max_nregs
[next_cl
][GET_MODE (op
)]))
859 case '0': case '1': case '2': case '3': case '4':
860 case '5': case '6': case '7': case '8': case '9':
862 = single_reg_class (recog_data
.constraints
[c
- '0'],
863 recog_data
.operand
[c
- '0'], NULL_RTX
);
864 if ((cl
!= NO_REGS
&& next_cl
!= cl
)
865 || next_cl
== NO_REGS
866 || (ira_class_hard_regs_num
[next_cl
]
867 > ira_reg_class_max_nregs
[next_cl
][GET_MODE (op
)]))
878 /* The function checks that operand OP_NUM of the current insn can use
879 only one hard register. If it is so, the function returns the
880 class of the hard register. Otherwise it returns NO_REGS. */
881 static enum reg_class
882 single_reg_operand_class (int op_num
)
884 if (op_num
< 0 || recog_data
.n_alternatives
== 0)
886 return single_reg_class (recog_data
.constraints
[op_num
],
887 recog_data
.operand
[op_num
], NULL_RTX
);
890 /* The function sets up hard register set *SET to hard registers which
891 might be used by insn reloads because the constraints are too
894 ira_implicitly_set_insn_hard_regs (HARD_REG_SET
*set
)
896 int i
, curr_alt
, c
, regno
= 0;
900 enum machine_mode mode
;
902 CLEAR_HARD_REG_SET (*set
);
903 for (i
= 0; i
< recog_data
.n_operands
; i
++)
905 op
= recog_data
.operand
[i
];
907 if (GET_CODE (op
) == SUBREG
)
908 op
= SUBREG_REG (op
);
910 if (GET_CODE (op
) == SCRATCH
911 || (REG_P (op
) && (regno
= REGNO (op
)) >= FIRST_PSEUDO_REGISTER
))
913 const char *p
= recog_data
.constraints
[i
];
915 mode
= (GET_CODE (op
) == SCRATCH
916 ? GET_MODE (op
) : PSEUDO_REGNO_MODE (regno
));
918 for (ignore_p
= false, curr_alt
= 0;
920 p
+= CONSTRAINT_LEN (c
, p
))
921 if (c
== '#' || !recog_data
.alternative_enabled_p
[curr_alt
])
932 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
933 case 'h': case 'j': case 'k': case 'l':
934 case 'q': case 't': case 'u':
935 case 'v': case 'w': case 'x': case 'y': case 'z':
936 case 'A': case 'B': case 'C': case 'D':
937 case 'Q': case 'R': case 'S': case 'T': case 'U':
938 case 'W': case 'Y': case 'Z':
941 : REG_CLASS_FROM_CONSTRAINT (c
, p
));
943 /* There is no register pressure problem if all of the
944 regs in this class are fixed. */
945 && ira_class_hard_regs_num
[cl
] != 0
946 && (ira_class_hard_regs_num
[cl
]
947 <= ira_reg_class_max_nregs
[cl
][mode
]))
948 IOR_HARD_REG_SET (*set
, reg_class_contents
[cl
]);
954 /* Processes input operands, if IN_P, or output operands otherwise of
955 the current insn with FREQ to find allocno which can use only one
956 hard register and makes other currently living allocnos conflicting
957 with the hard register. */
959 process_single_reg_class_operands (bool in_p
, int freq
)
965 ira_allocno_t operand_a
, a
;
967 for (i
= 0; i
< recog_data
.n_operands
; i
++)
969 operand
= recog_data
.operand
[i
];
970 if (in_p
&& recog_data
.operand_type
[i
] != OP_IN
971 && recog_data
.operand_type
[i
] != OP_INOUT
)
973 if (! in_p
&& recog_data
.operand_type
[i
] != OP_OUT
974 && recog_data
.operand_type
[i
] != OP_INOUT
)
976 cl
= single_reg_operand_class (i
);
982 if (GET_CODE (operand
) == SUBREG
)
983 operand
= SUBREG_REG (operand
);
986 && (regno
= REGNO (operand
)) >= FIRST_PSEUDO_REGISTER
)
988 enum reg_class aclass
;
990 operand_a
= ira_curr_regno_allocno_map
[regno
];
991 aclass
= ALLOCNO_CLASS (operand_a
);
992 if (ira_class_subset_p
[cl
][aclass
]
993 && ira_class_hard_regs_num
[cl
] != 0)
995 /* View the desired allocation of OPERAND as:
1001 (subreg:YMODE (reg:XMODE XREGNO) OFFSET). */
1002 enum machine_mode ymode
, xmode
;
1004 HOST_WIDE_INT offset
;
1006 xmode
= recog_data
.operand_mode
[i
];
1007 xregno
= ira_class_hard_regs
[cl
][0];
1008 ymode
= ALLOCNO_MODE (operand_a
);
1009 offset
= subreg_lowpart_offset (ymode
, xmode
);
1010 yregno
= simplify_subreg_regno (xregno
, xmode
, offset
, ymode
);
1012 && ira_class_hard_reg_index
[aclass
][yregno
] >= 0)
1016 ira_allocate_and_set_costs
1017 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a
),
1019 ira_init_register_move_cost_if_necessary (xmode
);
1021 ? ira_register_move_cost
[xmode
][aclass
][cl
]
1022 : ira_register_move_cost
[xmode
][cl
][aclass
]);
1023 ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a
)
1024 [ira_class_hard_reg_index
[aclass
][yregno
]] -= cost
;
1029 EXECUTE_IF_SET_IN_SPARSESET (objects_live
, px
)
1031 ira_object_t obj
= ira_object_id_map
[px
];
1032 a
= OBJECT_ALLOCNO (obj
);
1035 /* We could increase costs of A instead of making it
1036 conflicting with the hard register. But it works worse
1037 because it will be spilled in reload in anyway. */
1038 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj
),
1039 reg_class_contents
[cl
]);
1040 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj
),
1041 reg_class_contents
[cl
]);
1047 /* Return true when one of the predecessor edges of BB is marked with
1048 EDGE_ABNORMAL_CALL or EDGE_EH. */
1050 bb_has_abnormal_call_pred (basic_block bb
)
1055 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1057 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
1063 /* Look through the CALL_INSN_FUNCTION_USAGE of a call insn INSN, and see if
1064 we find a SET rtx that we can use to deduce that a register can be cheaply
1065 caller-saved. Return such a register, or NULL_RTX if none is found. */
1067 find_call_crossed_cheap_reg (rtx insn
)
1069 rtx cheap_reg
= NULL_RTX
;
1070 rtx exp
= CALL_INSN_FUNCTION_USAGE (insn
);
1074 rtx x
= XEXP (exp
, 0);
1075 if (GET_CODE (x
) == SET
)
1080 exp
= XEXP (exp
, 1);
1084 basic_block bb
= BLOCK_FOR_INSN (insn
);
1085 rtx reg
= SET_SRC (exp
);
1086 rtx prev
= PREV_INSN (insn
);
1087 while (prev
&& !(INSN_P (prev
)
1088 && BLOCK_FOR_INSN (prev
) != bb
))
1090 if (NONDEBUG_INSN_P (prev
))
1092 rtx set
= single_set (prev
);
1094 if (set
&& rtx_equal_p (SET_DEST (set
), reg
))
1096 rtx src
= SET_SRC (set
);
1097 if (!REG_P (src
) || HARD_REGISTER_P (src
)
1098 || !pseudo_regno_single_word_and_live_p (REGNO (src
)))
1100 if (!modified_between_p (src
, prev
, insn
))
1104 if (set
&& rtx_equal_p (SET_SRC (set
), reg
))
1106 rtx dest
= SET_DEST (set
);
1107 if (!REG_P (dest
) || HARD_REGISTER_P (dest
)
1108 || !pseudo_regno_single_word_and_live_p (REGNO (dest
)))
1110 if (!modified_between_p (dest
, prev
, insn
))
1115 if (reg_overlap_mentioned_p (reg
, PATTERN (prev
)))
1118 prev
= PREV_INSN (prev
);
1124 /* Process insns of the basic block given by its LOOP_TREE_NODE to
1125 update allocno live ranges, allocno hard register conflicts,
1126 intersected calls, and register pressure info for allocnos for the
1127 basic block for and regions containing the basic block. */
1129 process_bb_node_lives (ira_loop_tree_node_t loop_tree_node
)
1136 bitmap reg_live_out
;
1140 bb
= loop_tree_node
->bb
;
1143 for (i
= 0; i
< ira_pressure_classes_num
; i
++)
1145 curr_reg_pressure
[ira_pressure_classes
[i
]] = 0;
1146 high_pressure_start_point
[ira_pressure_classes
[i
]] = -1;
1148 curr_bb_node
= loop_tree_node
;
1149 reg_live_out
= DF_LR_OUT (bb
);
1150 sparseset_clear (objects_live
);
1151 REG_SET_TO_HARD_REG_SET (hard_regs_live
, reg_live_out
);
1152 AND_COMPL_HARD_REG_SET (hard_regs_live
, eliminable_regset
);
1153 AND_COMPL_HARD_REG_SET (hard_regs_live
, ira_no_alloc_regs
);
1154 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1155 if (TEST_HARD_REG_BIT (hard_regs_live
, i
))
1157 enum reg_class aclass
, pclass
, cl
;
1159 aclass
= ira_allocno_class_translate
[REGNO_REG_CLASS (i
)];
1160 pclass
= ira_pressure_class_translate
[aclass
];
1162 (cl
= ira_reg_class_super_classes
[pclass
][j
])
1166 if (! ira_reg_pressure_class_p
[cl
])
1168 curr_reg_pressure
[cl
]++;
1169 if (curr_bb_node
->reg_pressure
[cl
] < curr_reg_pressure
[cl
])
1170 curr_bb_node
->reg_pressure
[cl
] = curr_reg_pressure
[cl
];
1171 ira_assert (curr_reg_pressure
[cl
]
1172 <= ira_class_hard_regs_num
[cl
]);
1175 EXECUTE_IF_SET_IN_BITMAP (reg_live_out
, FIRST_PSEUDO_REGISTER
, j
, bi
)
1176 mark_pseudo_regno_live (j
);
1178 freq
= REG_FREQ_FROM_BB (bb
);
1182 /* Invalidate all allocno_saved_at_call entries. */
1185 /* Scan the code of this basic block, noting which allocnos and
1186 hard regs are born or die.
1188 Note that this loop treats uninitialized values as live until
1189 the beginning of the block. For example, if an instruction
1190 uses (reg:DI foo), and only (subreg:SI (reg:DI foo) 0) is ever
1191 set, FOO will remain live until the beginning of the block.
1192 Likewise if FOO is not set at all. This is unnecessarily
1193 pessimistic, but it probably doesn't matter much in practice. */
1194 FOR_BB_INSNS_REVERSE (bb
, insn
)
1196 df_ref
*def_rec
, *use_rec
;
1199 if (!NONDEBUG_INSN_P (insn
))
1202 if (internal_flag_ira_verbose
> 2 && ira_dump_file
!= NULL
)
1203 fprintf (ira_dump_file
, " Insn %u(l%d): point = %d\n",
1204 INSN_UID (insn
), loop_tree_node
->parent
->loop_num
,
1207 /* Mark each defined value as live. We need to do this for
1208 unused values because they still conflict with quantities
1209 that are live at the time of the definition.
1211 Ignore DF_REF_MAY_CLOBBERs on a call instruction. Such
1212 references represent the effect of the called function
1213 on a call-clobbered register. Marking the register as
1214 live would stop us from allocating it to a call-crossing
1216 call_p
= CALL_P (insn
);
1217 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
1218 if (!call_p
|| !DF_REF_FLAGS_IS_SET (*def_rec
, DF_REF_MAY_CLOBBER
))
1219 mark_ref_live (*def_rec
);
1221 /* If INSN has multiple outputs, then any value used in one
1222 of the outputs conflicts with the other outputs. Model this
1223 by making the used value live during the output phase.
1225 It is unsafe to use !single_set here since it will ignore
1226 an unused output. Just because an output is unused does
1227 not mean the compiler can assume the side effect will not
1228 occur. Consider if ALLOCNO appears in the address of an
1229 output and we reload the output. If we allocate ALLOCNO
1230 to the same hard register as an unused output we could
1231 set the hard register before the output reload insn. */
1232 if (GET_CODE (PATTERN (insn
)) == PARALLEL
&& multiple_sets (insn
))
1233 for (use_rec
= DF_INSN_USES (insn
); *use_rec
; use_rec
++)
1238 reg
= DF_REF_REG (*use_rec
);
1239 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
1243 set
= XVECEXP (PATTERN (insn
), 0, i
);
1244 if (GET_CODE (set
) == SET
1245 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
1247 /* After the previous loop, this is a no-op if
1248 REG is contained within SET_DEST (SET). */
1249 mark_ref_live (*use_rec
);
1255 extract_insn (insn
);
1256 preprocess_constraints ();
1257 process_single_reg_class_operands (false, freq
);
1259 /* See which defined values die here. */
1260 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
1261 if (!call_p
|| !DF_REF_FLAGS_IS_SET (*def_rec
, DF_REF_MAY_CLOBBER
))
1262 mark_ref_dead (*def_rec
);
1266 /* Try to find a SET in the CALL_INSN_FUNCTION_USAGE, and from
1267 there, try to find a pseudo that is live across the call but
1268 can be cheaply reconstructed from the return value. */
1269 rtx cheap_reg
= find_call_crossed_cheap_reg (insn
);
1270 if (cheap_reg
!= NULL_RTX
)
1271 add_reg_note (insn
, REG_RETURNED
, cheap_reg
);
1274 sparseset_clear (allocnos_processed
);
1275 /* The current set of live allocnos are live across the call. */
1276 EXECUTE_IF_SET_IN_SPARSESET (objects_live
, i
)
1278 ira_object_t obj
= ira_object_id_map
[i
];
1279 ira_allocno_t a
= OBJECT_ALLOCNO (obj
);
1280 int num
= ALLOCNO_NUM (a
);
1282 /* Don't allocate allocnos that cross setjmps or any
1283 call, if this function receives a nonlocal
1285 if (cfun
->has_nonlocal_label
1286 || find_reg_note (insn
, REG_SETJMP
,
1287 NULL_RTX
) != NULL_RTX
)
1289 SET_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj
));
1290 SET_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj
));
1292 if (can_throw_internal (insn
))
1294 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj
),
1296 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj
),
1300 if (sparseset_bit_p (allocnos_processed
, num
))
1302 sparseset_set_bit (allocnos_processed
, num
);
1304 if (allocno_saved_at_call
[num
] != last_call_num
)
1305 /* Here we are mimicking caller-save.c behaviour
1306 which does not save hard register at a call if
1307 it was saved on previous call in the same basic
1308 block and the hard register was not mentioned
1309 between the two calls. */
1310 ALLOCNO_CALL_FREQ (a
) += freq
;
1311 /* Mark it as saved at the next call. */
1312 allocno_saved_at_call
[num
] = last_call_num
+ 1;
1313 ALLOCNO_CALLS_CROSSED_NUM (a
)++;
1314 if (cheap_reg
!= NULL_RTX
1315 && ALLOCNO_REGNO (a
) == (int) REGNO (cheap_reg
))
1316 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a
)++;
1320 make_early_clobber_and_input_conflicts ();
1324 /* Mark each used value as live. */
1325 for (use_rec
= DF_INSN_USES (insn
); *use_rec
; use_rec
++)
1326 mark_ref_live (*use_rec
);
1328 process_single_reg_class_operands (true, freq
);
1330 set_p
= mark_hard_reg_early_clobbers (insn
, true);
1334 mark_hard_reg_early_clobbers (insn
, false);
1336 /* Mark each hard reg as live again. For example, a
1337 hard register can be in clobber and in an insn
1339 for (use_rec
= DF_INSN_USES (insn
); *use_rec
; use_rec
++)
1341 rtx ureg
= DF_REF_REG (*use_rec
);
1343 if (GET_CODE (ureg
) == SUBREG
)
1344 ureg
= SUBREG_REG (ureg
);
1345 if (! REG_P (ureg
) || REGNO (ureg
) >= FIRST_PSEUDO_REGISTER
)
1348 mark_ref_live (*use_rec
);
1355 #ifdef EH_RETURN_DATA_REGNO
1356 if (bb_has_eh_pred (bb
))
1359 unsigned int regno
= EH_RETURN_DATA_REGNO (j
);
1360 if (regno
== INVALID_REGNUM
)
1362 make_hard_regno_born (regno
);
1366 /* Allocnos can't go in stack regs at the start of a basic block
1367 that is reached by an abnormal edge. Likewise for call
1368 clobbered regs, because caller-save, fixup_abnormal_edges and
1369 possibly the table driven EH machinery are not quite ready to
1370 handle such allocnos live across such edges. */
1371 if (bb_has_abnormal_pred (bb
))
1374 EXECUTE_IF_SET_IN_SPARSESET (objects_live
, px
)
1376 ira_allocno_t a
= OBJECT_ALLOCNO (ira_object_id_map
[px
]);
1378 ALLOCNO_NO_STACK_REG_P (a
) = true;
1379 ALLOCNO_TOTAL_NO_STACK_REG_P (a
) = true;
1381 for (px
= FIRST_STACK_REG
; px
<= LAST_STACK_REG
; px
++)
1382 make_hard_regno_born (px
);
1384 /* No need to record conflicts for call clobbered regs if we
1385 have nonlocal labels around, as we don't ever try to
1386 allocate such regs in this case. */
1387 if (!cfun
->has_nonlocal_label
&& bb_has_abnormal_call_pred (bb
))
1388 for (px
= 0; px
< FIRST_PSEUDO_REGISTER
; px
++)
1389 if (call_used_regs
[px
])
1390 make_hard_regno_born (px
);
1393 EXECUTE_IF_SET_IN_SPARSESET (objects_live
, i
)
1394 make_object_dead (ira_object_id_map
[i
]);
1399 /* Propagate register pressure to upper loop tree nodes: */
1400 if (loop_tree_node
!= ira_loop_tree_root
)
1401 for (i
= 0; i
< ira_pressure_classes_num
; i
++)
1403 enum reg_class pclass
;
1405 pclass
= ira_pressure_classes
[i
];
1406 if (loop_tree_node
->reg_pressure
[pclass
]
1407 > loop_tree_node
->parent
->reg_pressure
[pclass
])
1408 loop_tree_node
->parent
->reg_pressure
[pclass
]
1409 = loop_tree_node
->reg_pressure
[pclass
];
1413 /* Create and set up IRA_START_POINT_RANGES and
1414 IRA_FINISH_POINT_RANGES. */
1416 create_start_finish_chains (void)
1419 ira_object_iterator oi
;
1422 ira_start_point_ranges
1423 = (live_range_t
*) ira_allocate (ira_max_point
* sizeof (live_range_t
));
1424 memset (ira_start_point_ranges
, 0, ira_max_point
* sizeof (live_range_t
));
1425 ira_finish_point_ranges
1426 = (live_range_t
*) ira_allocate (ira_max_point
* sizeof (live_range_t
));
1427 memset (ira_finish_point_ranges
, 0, ira_max_point
* sizeof (live_range_t
));
1428 FOR_EACH_OBJECT (obj
, oi
)
1429 for (r
= OBJECT_LIVE_RANGES (obj
); r
!= NULL
; r
= r
->next
)
1431 r
->start_next
= ira_start_point_ranges
[r
->start
];
1432 ira_start_point_ranges
[r
->start
] = r
;
1433 r
->finish_next
= ira_finish_point_ranges
[r
->finish
];
1434 ira_finish_point_ranges
[r
->finish
] = r
;
1438 /* Rebuild IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES after
1439 new live ranges and program points were added as a result if new
1442 ira_rebuild_start_finish_chains (void)
1444 ira_free (ira_finish_point_ranges
);
1445 ira_free (ira_start_point_ranges
);
1446 create_start_finish_chains ();
1449 /* Compress allocno live ranges by removing program points where
1452 remove_some_program_points_and_update_live_ranges (void)
1458 ira_object_iterator oi
;
1460 sbitmap born_or_dead
, born
, dead
;
1461 sbitmap_iterator sbi
;
1462 bool born_p
, dead_p
, prev_born_p
, prev_dead_p
;
1464 born
= sbitmap_alloc (ira_max_point
);
1465 dead
= sbitmap_alloc (ira_max_point
);
1466 sbitmap_zero (born
);
1467 sbitmap_zero (dead
);
1468 FOR_EACH_OBJECT (obj
, oi
)
1469 for (r
= OBJECT_LIVE_RANGES (obj
); r
!= NULL
; r
= r
->next
)
1471 ira_assert (r
->start
<= r
->finish
);
1472 SET_BIT (born
, r
->start
);
1473 SET_BIT (dead
, r
->finish
);
1476 born_or_dead
= sbitmap_alloc (ira_max_point
);
1477 sbitmap_a_or_b (born_or_dead
, born
, dead
);
1478 map
= (int *) ira_allocate (sizeof (int) * ira_max_point
);
1480 prev_born_p
= prev_dead_p
= false;
1481 EXECUTE_IF_SET_IN_SBITMAP (born_or_dead
, 0, i
, sbi
)
1483 born_p
= TEST_BIT (born
, i
);
1484 dead_p
= TEST_BIT (dead
, i
);
1485 if ((prev_born_p
&& ! prev_dead_p
&& born_p
&& ! dead_p
)
1486 || (prev_dead_p
&& ! prev_born_p
&& dead_p
&& ! born_p
))
1490 prev_born_p
= born_p
;
1491 prev_dead_p
= dead_p
;
1493 sbitmap_free (born_or_dead
);
1494 sbitmap_free (born
);
1495 sbitmap_free (dead
);
1497 if (internal_flag_ira_verbose
> 1 && ira_dump_file
!= NULL
)
1498 fprintf (ira_dump_file
, "Compressing live ranges: from %d to %d - %d%%\n",
1499 ira_max_point
, n
, 100 * n
/ ira_max_point
);
1502 FOR_EACH_OBJECT (obj
, oi
)
1503 for (r
= OBJECT_LIVE_RANGES (obj
); r
!= NULL
; r
= r
->next
)
1505 r
->start
= map
[r
->start
];
1506 r
->finish
= map
[r
->finish
];
1512 /* Print live ranges R to file F. */
1514 ira_print_live_range_list (FILE *f
, live_range_t r
)
1516 for (; r
!= NULL
; r
= r
->next
)
1517 fprintf (f
, " [%d..%d]", r
->start
, r
->finish
);
1521 /* Print live ranges R to stderr. */
1523 ira_debug_live_range_list (live_range_t r
)
1525 ira_print_live_range_list (stderr
, r
);
1528 /* Print live ranges of object OBJ to file F. */
1530 print_object_live_ranges (FILE *f
, ira_object_t obj
)
1532 ira_print_live_range_list (f
, OBJECT_LIVE_RANGES (obj
));
1535 /* Print live ranges of allocno A to file F. */
1537 print_allocno_live_ranges (FILE *f
, ira_allocno_t a
)
1539 int n
= ALLOCNO_NUM_OBJECTS (a
);
1542 for (i
= 0; i
< n
; i
++)
1544 fprintf (f
, " a%d(r%d", ALLOCNO_NUM (a
), ALLOCNO_REGNO (a
));
1546 fprintf (f
, " [%d]", i
);
1548 print_object_live_ranges (f
, ALLOCNO_OBJECT (a
, i
));
1552 /* Print live ranges of allocno A to stderr. */
1554 ira_debug_allocno_live_ranges (ira_allocno_t a
)
1556 print_allocno_live_ranges (stderr
, a
);
1559 /* Print live ranges of all allocnos to file F. */
1561 print_live_ranges (FILE *f
)
1564 ira_allocno_iterator ai
;
1566 FOR_EACH_ALLOCNO (a
, ai
)
1567 print_allocno_live_ranges (f
, a
);
1570 /* Print live ranges of all allocnos to stderr. */
1572 ira_debug_live_ranges (void)
1574 print_live_ranges (stderr
);
1577 /* The main entry function creates live ranges, set up
1578 CONFLICT_HARD_REGS and TOTAL_CONFLICT_HARD_REGS for objects, and
1579 calculate register pressure info. */
1581 ira_create_allocno_live_ranges (void)
1583 objects_live
= sparseset_alloc (ira_objects_num
);
1584 allocnos_processed
= sparseset_alloc (ira_allocnos_num
);
1587 allocno_saved_at_call
1588 = (int *) ira_allocate (ira_allocnos_num
* sizeof (int));
1589 memset (allocno_saved_at_call
, 0, ira_allocnos_num
* sizeof (int));
1590 ira_traverse_loop_tree (true, ira_loop_tree_root
, NULL
,
1591 process_bb_node_lives
);
1592 ira_max_point
= curr_point
;
1593 create_start_finish_chains ();
1594 if (internal_flag_ira_verbose
> 2 && ira_dump_file
!= NULL
)
1595 print_live_ranges (ira_dump_file
);
1597 ira_free (allocno_saved_at_call
);
1598 sparseset_free (objects_live
);
1599 sparseset_free (allocnos_processed
);
1602 /* Compress allocno live ranges. */
1604 ira_compress_allocno_live_ranges (void)
1606 remove_some_program_points_and_update_live_ranges ();
1607 ira_rebuild_start_finish_chains ();
1608 if (internal_flag_ira_verbose
> 2 && ira_dump_file
!= NULL
)
1610 fprintf (ira_dump_file
, "Ranges after the compression:\n");
1611 print_live_ranges (ira_dump_file
);
1615 /* Free arrays IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES. */
1617 ira_finish_allocno_live_ranges (void)
1619 ira_free (ira_finish_point_ranges
);
1620 ira_free (ira_start_point_ranges
);