1 /* IRA allocation based on graph coloring.
2 Contributed by Vladimir Makarov.
3 Copyright (C) 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
43 /* We use optimistic colouring with optional biased colouring. */
45 static void update_copy_costs (pseudo_t
, int);
46 static int assign_hard_reg (pseudo_t
, int);
48 static void add_pseudo_to_bucket (pseudo_t
, pseudo_t
*);
49 static void add_pseudo_to_ordered_bucket (pseudo_t
, pseudo_t
*);
50 static void delete_pseudo_from_bucket (pseudo_t
, pseudo_t
*);
51 static void push_pseudo_to_stack (pseudo_t
);
52 static void remove_pseudo_from_bucket_and_push (pseudo_t
, int);
53 static void push_only_colorable (void);
54 static void push_pseudo_to_spill (pseudo_t
);
55 static int loop_edge_freq (struct ira_loop_tree_node
*, int, int);
56 static int calculate_pseudo_spill_cost (pseudo_t
);
57 static void push_pseudos_to_stack (void);
58 static void pop_pseudos_from_stack (void);
59 static void put_pseudo_into_bucket (pseudo_t
);
60 static void color_pseudos (void);
62 static void print_loop_title (struct ira_loop_tree_node
*);
63 static void color_pass (struct ira_loop_tree_node
*);
64 static int pseudo_priority_compare_func (const void *, const void *);
65 static void priority_coloring (void);
66 static void do_coloring (void);
68 static void move_spill_restore (void);
70 static void update_pseudo_avaialable_regs (void);
72 /* Bitmap of pseudos which should be colored. */
73 static bitmap coloring_pseudo_bitmap
;
75 /* Bitmap of pseudos which should be taken into account during
76 coloring. In general case it contains pseudos from
77 coloring_pseudo_bitmap plus other already colored conflicting
79 static bitmap consideration_pseudo_bitmap
;
83 /* This page contains function to choose hard register for pseudos. */
85 /* Array whose element value is TRUE if the corresponding hard
86 register already allocated for a pseudo. */
87 static int allocated_hardreg_p
[FIRST_PSEUDO_REGISTER
];
89 /* The function updates costs (decrease if DECR_P) of the pseudos
90 connected by copies with PSEUDO. */
92 update_copy_costs (pseudo_t pseudo
, int decr_p
)
94 int i
, hard_regno
, cost
;
95 enum machine_mode mode
;
97 pseudo_t another_pseudo
;
98 struct pseudo_copy
*cp
, *next_cp
;
100 if (PSEUDO_COVER_CLASS (pseudo
) == NO_REGS
)
102 hard_regno
= PSEUDO_HARD_REGNO (pseudo
);
103 ira_assert (hard_regno
>= 0 && PSEUDO_COVER_CLASS (pseudo
) != NO_REGS
);
104 i
= class_hard_reg_index
[PSEUDO_COVER_CLASS (pseudo
)] [hard_regno
];
106 class = REGNO_REG_CLASS (hard_regno
);
107 mode
= PSEUDO_MODE (pseudo
);
108 for (cp
= PSEUDO_COPIES (pseudo
); cp
!= NULL
; cp
= next_cp
)
110 if (cp
->first
== pseudo
)
112 next_cp
= cp
->next_first_pseudo_copy
;
113 another_pseudo
= cp
->second
;
115 else if (cp
->second
== pseudo
)
117 next_cp
= cp
->next_second_pseudo_copy
;
118 another_pseudo
= cp
->first
;
122 if (PSEUDO_COVER_CLASS (pseudo
) != PSEUDO_COVER_CLASS (another_pseudo
))
124 cost
= (cp
->second
== pseudo
125 ? register_move_cost
[mode
] [class]
126 [PSEUDO_COVER_CLASS (another_pseudo
)]
127 : register_move_cost
[mode
] [PSEUDO_COVER_CLASS (another_pseudo
)]
131 PSEUDO_CURR_HARD_REG_COSTS (another_pseudo
) [i
] += cp
->freq
* cost
;
132 PSEUDO_CURR_CONFLICT_HARD_REG_COSTS (another_pseudo
) [i
]
137 /* Function choosing a hard register for PSEUDO. If RETRY_P is
138 nonzero, it means that the function called from
139 `retry_ira_color'. */
141 assign_hard_reg (pseudo_t pseudo
, int retry_p
)
143 HARD_REG_SET conflicting_regs
, biased_regs
;
144 int i
, j
, hard_regno
, best_hard_regno
, class_size
;
145 int cost
, mem_cost
, min_cost
, full_cost
, min_full_cost
, add_cost
, *costs
;
147 enum reg_class cover_class
, class;
148 enum machine_mode mode
;
149 pseudo_t conflict_pseudo
, conflict_pseudo2
;
150 pseudo_t
*pseudo_vec
, *pseudo_vec2
;
151 pseudo_t another_pseudo
;
152 struct pseudo_copy
*cp
, *next_cp
;
153 static int full_costs
[FIRST_PSEUDO_REGISTER
];
155 ira_assert (! PSEUDO_ASSIGNED_P (pseudo
));
156 cover_class
= PSEUDO_COVER_CLASS (pseudo
);
157 class_size
= class_hard_regs_num
[cover_class
];
158 CLEAR_HARD_REG_SET (biased_regs
);
159 memset (full_costs
, 0, sizeof (int) * class_size
);
160 mem_cost
= PSEUDO_MEMORY_COST (pseudo
);
161 mode
= PSEUDO_MODE (pseudo
);
162 costs
= PSEUDO_CURR_HARD_REG_COSTS (pseudo
);
163 memcpy (full_costs
, costs
, sizeof (int) * class_size
);
164 COPY_HARD_REG_SET (conflicting_regs
, PSEUDO_CONFLICT_HARD_REGS (pseudo
));
165 pseudo_vec
= PSEUDO_CONFLICT_PSEUDO_VEC (pseudo
);
166 for (i
= 0; (conflict_pseudo
= pseudo_vec
[i
]) != NULL
; i
++)
167 /* Reload can give another class so we need to check all
170 || (cover_class
== PSEUDO_COVER_CLASS (conflict_pseudo
)
171 && bitmap_bit_p (consideration_pseudo_bitmap
,
172 PSEUDO_NUM (conflict_pseudo
))))
174 if (PSEUDO_ASSIGNED_P (conflict_pseudo
))
176 if ((hard_regno
= PSEUDO_HARD_REGNO (conflict_pseudo
)) >= 0)
180 [hard_regno
] [PSEUDO_MODE (conflict_pseudo
)]);
186 = PSEUDO_CURR_CONFLICT_HARD_REG_COSTS (conflict_pseudo
);
187 if (conflict_costs
!= NULL
)
188 for (j
= class_size
- 1; j
>= 0; j
--)
189 full_costs
[j
] -= conflict_costs
[j
];
191 if (retry_p
|| ! flag_ira_biased_coloring
)
193 pseudo_vec2
= PSEUDO_CONFLICT_PSEUDO_VEC (conflict_pseudo
);
194 for (j
= 0; (conflict_pseudo2
= pseudo_vec2
[j
]) != NULL
; j
++)
195 if (cover_class
== PSEUDO_COVER_CLASS (conflict_pseudo2
)
196 && PSEUDO_ASSIGNED_P (conflict_pseudo2
)
197 && (hard_regno
= PSEUDO_HARD_REGNO (conflict_pseudo2
)) >= 0
199 || bitmap_bit_p (consideration_pseudo_bitmap
,
200 PSEUDO_NUM (conflict_pseudo2
))))
201 IOR_HARD_REG_SET (biased_regs
,
203 [hard_regno
] [PSEUDO_MODE (conflict_pseudo2
)]);
205 for (cp
= PSEUDO_COPIES (pseudo
); cp
!= NULL
; cp
= next_cp
)
207 if (cp
->first
== pseudo
)
209 next_cp
= cp
->next_first_pseudo_copy
;
210 another_pseudo
= cp
->second
;
212 else if (cp
->second
== pseudo
)
214 next_cp
= cp
->next_second_pseudo_copy
;
215 another_pseudo
= cp
->first
;
219 if (PSEUDO_ASSIGNED_P (another_pseudo
))
221 conflict_costs
= PSEUDO_CURR_CONFLICT_HARD_REG_COSTS (another_pseudo
);
222 if (conflict_costs
!= NULL
)
223 for (j
= class_size
- 1; j
>= 0; j
--)
224 full_costs
[j
] += conflict_costs
[j
];
226 IOR_HARD_REG_SET (conflicting_regs
, no_alloc_regs
);
227 IOR_COMPL_HARD_REG_SET (conflicting_regs
, reg_class_contents
[cover_class
]);
228 best_hard_regno
= -1;
229 GO_IF_HARD_REG_SUBSET (reg_class_contents
[cover_class
], conflicting_regs
,
231 min_cost
= min_full_cost
= INT_MAX
;
232 /* We don't care about giving callee saved registers to pseudos no
233 living through calls because call used register are allocated
234 first (it is usual practice to put them first in
236 for (i
= 0; i
< class_size
; i
++)
238 hard_regno
= class_hard_regs
[cover_class
] [i
];
240 if (PSEUDO_NO_STACK_REG_P (pseudo
)
241 && FIRST_STACK_REG
<= hard_regno
&& hard_regno
<= LAST_STACK_REG
)
244 if (TEST_HARD_REG_BIT (prohibited_class_mode_regs
245 [cover_class
] [mode
], hard_regno
))
247 if (hard_reg_not_in_set_p (hard_regno
, mode
, conflicting_regs
))
250 full_cost
= full_costs
[i
];
251 if (! allocated_hardreg_p
[hard_regno
]
252 && hard_reg_not_in_set_p (hard_regno
, mode
, call_used_reg_set
))
253 /* We need to save/restore the register in
254 epilogue/prologue. Therefore we increase the cost. */
256 /* ??? If only part is call clobbered. */
257 class = REGNO_REG_CLASS (hard_regno
);
258 add_cost
= (memory_move_cost
[mode
] [class] [0]
259 + memory_move_cost
[mode
] [class] [1] - 1);
261 full_cost
+= add_cost
;
265 if (min_full_cost
> full_cost
)
267 min_full_cost
= full_cost
;
268 best_hard_regno
= hard_regno
;
269 ira_assert (hard_regno
>= 0);
271 else if (min_full_cost
== full_cost
&& flag_ira_biased_coloring
!= 0
272 && TEST_HARD_REG_BIT (biased_regs
, hard_regno
)
273 && ! TEST_HARD_REG_BIT (biased_regs
, best_hard_regno
))
274 best_hard_regno
= hard_regno
;
277 if (min_cost
> mem_cost
)
278 best_hard_regno
= -1;
280 PSEUDO_HARD_REGNO (pseudo
) = best_hard_regno
;
281 PSEUDO_ASSIGNED_P (pseudo
) = TRUE
;
282 if (best_hard_regno
>= 0)
284 allocated_hardreg_p
[best_hard_regno
] = TRUE
;
285 update_copy_costs (pseudo
, TRUE
);
287 return best_hard_regno
>= 0;
292 /* This page contains allocator based on Chaitin algorithm. */
294 /* Bucket of pseudos pseudo be colored currently without spilling. */
295 static pseudo_t colorable_pseudo_bucket
;
297 /* Bucket of pseudos pseudo might be not colored currently without
299 static pseudo_t uncolorable_pseudo_bucket
;
301 /* Add PSEUDO to *BUCKET_PTR bucket. PSEUDO should be not in a bucket
304 add_pseudo_to_bucket (pseudo_t pseudo
, pseudo_t
*bucket_ptr
)
306 pseudo_t first_pseudo
;
308 first_pseudo
= *bucket_ptr
;
309 PSEUDO_NEXT_BUCKET_PSEUDO (pseudo
) = first_pseudo
;
310 PSEUDO_PREV_BUCKET_PSEUDO (pseudo
) = NULL
;
311 if (first_pseudo
!= NULL
)
312 PSEUDO_PREV_BUCKET_PSEUDO (first_pseudo
) = pseudo
;
313 *bucket_ptr
= pseudo
;
316 /* Add PSEUDO to *BUCKET_PTR bucket maintaining the order according
317 their frequency. PSEUDO should be not in a bucket before the
320 add_pseudo_to_ordered_bucket (pseudo_t pseudo
, pseudo_t
*bucket_ptr
)
322 pseudo_t before
, after
;
323 enum reg_class cover_class
;
326 freq
= PSEUDO_FREQ (pseudo
);
327 cover_class
= PSEUDO_COVER_CLASS (pseudo
);
328 nregs
= reg_class_nregs
[cover_class
] [PSEUDO_MODE (pseudo
)];
329 for (before
= *bucket_ptr
, after
= NULL
;
331 after
= before
, before
= PSEUDO_NEXT_BUCKET_PSEUDO (before
))
332 if (PSEUDO_FREQ (before
) > freq
)
334 PSEUDO_NEXT_BUCKET_PSEUDO (pseudo
) = before
;
335 PSEUDO_PREV_BUCKET_PSEUDO (pseudo
) = after
;
337 *bucket_ptr
= pseudo
;
339 PSEUDO_NEXT_BUCKET_PSEUDO (after
) = pseudo
;
341 PSEUDO_PREV_BUCKET_PSEUDO (before
) = pseudo
;
344 /* Delete PSEUDO from *BUCKET_PTR bucket. It should be there before
347 delete_pseudo_from_bucket (pseudo_t pseudo
, pseudo_t
*bucket_ptr
)
349 pseudo_t prev_pseudo
, next_pseudo
;
351 prev_pseudo
= PSEUDO_PREV_BUCKET_PSEUDO (pseudo
);
352 next_pseudo
= PSEUDO_NEXT_BUCKET_PSEUDO (pseudo
);
353 if (prev_pseudo
!= NULL
)
354 PSEUDO_NEXT_BUCKET_PSEUDO (prev_pseudo
) = next_pseudo
;
357 ira_assert (*bucket_ptr
== pseudo
);
358 *bucket_ptr
= next_pseudo
;
360 if (next_pseudo
!= NULL
)
361 PSEUDO_PREV_BUCKET_PSEUDO (next_pseudo
) = prev_pseudo
;
364 /* Varray representing the stack of pseudos used during coloring. */
365 static varray_type pseudo_stack_varray
;
367 /* The function puts PSEUDO onto the coloring stack without removing
368 it from the bucket. Such action can result in moving conflicting
369 pseudos from the uncolorable bucket to the colorable one. */
371 push_pseudo_to_stack (pseudo_t pseudo
)
373 int i
, conflicts_num
, conflict_size
, size
;
374 pseudo_t conflict_pseudo
;
375 pseudo_t
*pseudo_vec
;
376 enum reg_class cover_class
;
378 PSEUDO_IN_GRAPH_P (pseudo
) = FALSE
;
379 VARRAY_PUSH_GENERIC_PTR (pseudo_stack_varray
, pseudo
);
380 cover_class
= PSEUDO_COVER_CLASS (pseudo
);
381 if (cover_class
== NO_REGS
)
383 size
= reg_class_nregs
[cover_class
] [PSEUDO_MODE (pseudo
)];
384 pseudo_vec
= PSEUDO_CONFLICT_PSEUDO_VEC (pseudo
);
385 for (i
= 0; (conflict_pseudo
= pseudo_vec
[i
]) != NULL
; i
++)
386 if (cover_class
== PSEUDO_COVER_CLASS (conflict_pseudo
)
387 && bitmap_bit_p (coloring_pseudo_bitmap
, PSEUDO_NUM (conflict_pseudo
)))
389 if (PSEUDO_IN_GRAPH_P (conflict_pseudo
)
390 && ! PSEUDO_ASSIGNED_P (conflict_pseudo
))
392 conflicts_num
= PSEUDO_LEFT_CONFLICTS_NUM (conflict_pseudo
);
395 [cover_class
] [PSEUDO_MODE (conflict_pseudo
)]);
397 (PSEUDO_LEFT_CONFLICTS_NUM (conflict_pseudo
) >= size
);
398 PSEUDO_LEFT_CONFLICTS_NUM (conflict_pseudo
) -= size
;
399 if (conflicts_num
+ conflict_size
400 <= PSEUDO_AVAILABLE_REGS_NUM (conflict_pseudo
))
402 conflicts_num
= PSEUDO_LEFT_CONFLICTS_NUM (conflict_pseudo
);
403 if (conflicts_num
+ conflict_size
404 <= PSEUDO_AVAILABLE_REGS_NUM (conflict_pseudo
))
406 delete_pseudo_from_bucket
407 (conflict_pseudo
, &uncolorable_pseudo_bucket
);
408 add_pseudo_to_ordered_bucket (conflict_pseudo
,
409 &colorable_pseudo_bucket
);
415 /* The function puts PSEUDO onto the coloring stack and removes it
416 from the bucket. The pseudo is in the colorable bucket if
417 COLORABLE_P is nonzero. */
419 remove_pseudo_from_bucket_and_push (pseudo_t pseudo
, int colorable_p
)
421 enum reg_class cover_class
;
422 pseudo_t
*bucket_ptr
;
424 bucket_ptr
= (colorable_p
425 ? &colorable_pseudo_bucket
: &uncolorable_pseudo_bucket
);
426 delete_pseudo_from_bucket (pseudo
, bucket_ptr
);
427 if (ira_dump_file
!= NULL
)
429 fprintf (ira_dump_file
, " Pushing");
430 print_expanded_pseudo (pseudo
);
431 fprintf (ira_dump_file
, "%s\n", colorable_p
? "" : "(potential spill)");
433 cover_class
= PSEUDO_COVER_CLASS (pseudo
);
434 ira_assert ((colorable_p
435 && (PSEUDO_LEFT_CONFLICTS_NUM (pseudo
)
436 + reg_class_nregs
[cover_class
] [PSEUDO_MODE (pseudo
)]
437 <= PSEUDO_AVAILABLE_REGS_NUM (pseudo
)))
439 && (PSEUDO_LEFT_CONFLICTS_NUM (pseudo
)
440 + reg_class_nregs
[cover_class
] [PSEUDO_MODE (pseudo
)]
441 > PSEUDO_AVAILABLE_REGS_NUM (pseudo
))));
443 PSEUDO_MAY_BE_SPILLED_P (pseudo
) = TRUE
;
444 push_pseudo_to_stack (pseudo
);
447 /* The function puts all pseudos from colorable bucket onto the
450 push_only_colorable (void)
452 for (;colorable_pseudo_bucket
!= NULL
;)
453 remove_pseudo_from_bucket_and_push (colorable_pseudo_bucket
, TRUE
);
456 /* The function puts PSEUDO chosen for potential spilling onto the
459 push_pseudo_to_spill (pseudo_t pseudo
)
461 delete_pseudo_from_bucket (pseudo
, &uncolorable_pseudo_bucket
);
462 PSEUDO_MAY_BE_SPILLED_P (pseudo
) = TRUE
;
463 if (ira_dump_file
!= NULL
)
464 fprintf (ira_dump_file
, " Pushing p%d(%d) (potential spill)\n",
465 PSEUDO_NUM (pseudo
), PSEUDO_REGNO (pseudo
));
466 push_pseudo_to_stack (pseudo
);
469 /* The function returns frequency of exit edges (if EXIT_P) or enter
470 from/to the loop given by its LOOP_NODE. */
472 loop_edge_freq (struct ira_loop_tree_node
*loop_node
, int regno
, int exit_p
)
477 VEC (edge
, heap
) *edges
;
479 ira_assert (loop_node
->loop
!= NULL
480 && (regno
< 0 || regno
>= FIRST_PSEUDO_REGISTER
));
484 FOR_EACH_EDGE (e
, ei
, loop_node
->loop
->header
->preds
)
485 if (e
->src
!= loop_node
->loop
->latch
487 || (bitmap_bit_p (DF_UPWARD_LIVE_OUT (build_df
, e
->src
), regno
)
488 && bitmap_bit_p (DF_UPWARD_LIVE_IN (build_df
, e
->dest
),
490 freq
+= EDGE_FREQUENCY (e
);
494 edges
= get_loop_exit_edges (loop_node
->loop
);
495 for (i
= 0; VEC_iterate (edge
, edges
, i
, e
); i
++)
497 || (bitmap_bit_p (DF_UPWARD_LIVE_OUT (build_df
, e
->src
), regno
)
498 && bitmap_bit_p (DF_UPWARD_LIVE_IN (build_df
, e
->dest
),
500 freq
+= EDGE_FREQUENCY (e
);
501 VEC_free (edge
, heap
, edges
);
504 return REG_FREQ_FROM_EDGE_FREQ (freq
);
507 /* The function calculates and returns cost of putting pseudo P into
510 calculate_pseudo_spill_cost (pseudo_t p
)
513 enum machine_mode mode
;
514 enum reg_class
class;
515 pseudo_t subloop_pseudo
;
516 struct ira_loop_tree_node
*subloop_node
;
518 regno
= PSEUDO_REGNO (p
);
519 cost
= PSEUDO_MEMORY_COST (p
) - PSEUDO_COVER_CLASS_COST (p
);
522 mode
= PSEUDO_MODE (p
);
523 class = PSEUDO_COVER_CLASS (p
);
524 for (subloop_node
= PSEUDO_LOOP_TREE_NODE (p
)->inner
;
525 subloop_node
!= NULL
;
526 subloop_node
= subloop_node
->next
)
527 if (subloop_node
->bb
== NULL
)
529 subloop_pseudo
= subloop_node
->regno_pseudo_map
[regno
];
530 if (subloop_pseudo
== NULL
)
532 if (PSEUDO_HARD_REGNO (subloop_pseudo
) < 0)
533 cost
-= (memory_move_cost
[mode
] [class] [0]
534 * loop_edge_freq (subloop_node
, regno
, TRUE
)
535 + memory_move_cost
[mode
] [class] [1]
536 * loop_edge_freq (subloop_node
, regno
, FALSE
));
538 /* ??? move register to register. How often do we have
539 different hard registers. */
540 cost
+= 2 * (memory_move_cost
[mode
] [class] [1]
541 * loop_edge_freq (subloop_node
, regno
, FALSE
)
542 + memory_move_cost
[mode
] [class] [0]
543 * loop_edge_freq (subloop_node
, regno
, TRUE
));
548 /* All pseudos sorted accoring their priorities. */
549 static pseudo_t
*sorted_pseudos
;
551 /* Push pseudos on the coloring stack. The order of pseudos in the
552 stack defines the order for the subsequent coloring. */
554 push_pseudos_to_stack (void)
557 double pseudo_pri
, i_pseudo_pri
;
558 pseudo_t pseudo
, i_pseudo
;
559 pseudo_t
*pseudo_vec
;
560 enum reg_class cover_class
;
561 int num
, cover_class_pseudos_num
[N_REG_CLASSES
];
562 pseudo_t
*cover_class_pseudos
[N_REG_CLASSES
];
565 for (i
= 0; i
< reg_class_cover_size
; i
++)
567 cover_class
= reg_class_cover
[i
];
568 cover_class_pseudos_num
[cover_class
] = 0;
569 cover_class_pseudos
[cover_class
] = NULL
;
571 /* Calculate uncolorable pseudos of each cover class. */
572 for (pseudo
= uncolorable_pseudo_bucket
;
574 pseudo
= PSEUDO_NEXT_BUCKET_PSEUDO (pseudo
))
575 if ((cover_class
= PSEUDO_COVER_CLASS (pseudo
)) != NO_REGS
)
577 cover_class_pseudos_num
[cover_class
]++;
578 PSEUDO_TEMP (pseudo
) = INT_MAX
;
580 /* Define place where to put uncolorable pseudos of the same cover
582 for (num
= i
= 0; i
< reg_class_cover_size
; i
++)
584 cover_class
= reg_class_cover
[i
];
585 if (cover_class_pseudos_num
[cover_class
] != 0)
587 cover_class_pseudos
[cover_class
] = sorted_pseudos
+ num
;
588 num
+= cover_class_pseudos_num
[cover_class
];
589 cover_class_pseudos_num
[cover_class
] = 0;
592 ira_assert (num
<= pseudos_num
);
593 /* Put uncolorable pseudos of the same cover class together. */
594 for (pseudo
= uncolorable_pseudo_bucket
;
596 pseudo
= PSEUDO_NEXT_BUCKET_PSEUDO (pseudo
))
597 if ((cover_class
= PSEUDO_COVER_CLASS (pseudo
)) != NO_REGS
)
599 [cover_class
] [cover_class_pseudos_num
[cover_class
]++] = pseudo
;
602 push_only_colorable ();
603 pseudo
= uncolorable_pseudo_bucket
;
606 cover_class
= PSEUDO_COVER_CLASS (pseudo
);
607 if (cover_class
== NO_REGS
)
609 push_pseudo_to_spill (pseudo
);
612 /* Potential spilling. */
613 ira_assert (reg_class_nregs
[cover_class
] [PSEUDO_MODE (pseudo
)] > 0);
614 num
= cover_class_pseudos_num
[cover_class
];
615 ira_assert (num
> 0);
616 pseudo_vec
= cover_class_pseudos
[cover_class
];
619 /* Sort uncolorable pseudo to find the one with the lowest spill
621 for (i
= 0, j
= num
- 1; i
<= j
;)
623 i_pseudo
= pseudo_vec
[i
];
624 if (! PSEUDO_IN_GRAPH_P (i_pseudo
)
625 && PSEUDO_IN_GRAPH_P (pseudo_vec
[j
]))
627 i_pseudo
= pseudo_vec
[j
];
628 pseudo_vec
[j
] = pseudo_vec
[i
];
629 pseudo_vec
[i
] = i_pseudo
;
631 if (PSEUDO_IN_GRAPH_P (i_pseudo
))
634 if (PSEUDO_TEMP (i_pseudo
) == INT_MAX
)
635 PSEUDO_TEMP (i_pseudo
)
636 = calculate_pseudo_spill_cost (i_pseudo
);
638 = ((double) PSEUDO_TEMP (i_pseudo
)
639 / (PSEUDO_LEFT_CONFLICTS_NUM (i_pseudo
)
640 * reg_class_nregs
[PSEUDO_COVER_CLASS (i_pseudo
)]
641 [PSEUDO_MODE (i_pseudo
)] + 1));
642 if (pseudo
== NULL
|| pseudo_pri
> i_pseudo_pri
643 || (pseudo_pri
== i_pseudo_pri
644 && (PSEUDO_NUM (pseudo
) > PSEUDO_NUM (i_pseudo
))))
647 pseudo_pri
= i_pseudo_pri
;
650 if (! PSEUDO_IN_GRAPH_P (pseudo_vec
[j
]))
653 ira_assert (pseudo
!= NULL
&& j
>= 0);
654 cover_class_pseudos_num
[cover_class
] = j
+ 1;
655 ira_assert (PSEUDO_IN_GRAPH_P (pseudo
)
656 && PSEUDO_COVER_CLASS (pseudo
) == cover_class
657 && (PSEUDO_LEFT_CONFLICTS_NUM (pseudo
)
658 + reg_class_nregs
[cover_class
] [PSEUDO_MODE (pseudo
)]
659 > PSEUDO_AVAILABLE_REGS_NUM (pseudo
)));
660 remove_pseudo_from_bucket_and_push (pseudo
, FALSE
);
664 /* Assign hard registers to pseudos on the coloring stack. */
666 pop_pseudos_from_stack (void)
669 enum reg_class cover_class
;
671 for (;VARRAY_ACTIVE_SIZE (pseudo_stack_varray
) != 0;)
673 pseudo
= VARRAY_TOP_GENERIC_PTR (pseudo_stack_varray
);
674 VARRAY_POP (pseudo_stack_varray
);
675 cover_class
= PSEUDO_COVER_CLASS (pseudo
);
676 if (ira_dump_file
!= NULL
)
678 fprintf (ira_dump_file
, " Popping");
679 print_expanded_pseudo (pseudo
);
680 fprintf (ira_dump_file
, " -- ");
682 if (cover_class
== NO_REGS
)
684 PSEUDO_HARD_REGNO (pseudo
) = -1;
685 if (ira_dump_file
!= NULL
)
686 fprintf (ira_dump_file
, "assign memory\n");
688 else if (assign_hard_reg (pseudo
, FALSE
))
690 if (ira_dump_file
!= NULL
)
691 fprintf (ira_dump_file
, "assign reg %d\n",
692 PSEUDO_HARD_REGNO (pseudo
));
696 if (ira_dump_file
!= NULL
)
697 fprintf (ira_dump_file
, "spill\n");
699 PSEUDO_IN_GRAPH_P (pseudo
) = TRUE
;
703 /* The function put PSEUDO in a bucket corresponding to its number and
704 size of its conflicting pseudos and hard registers. */
706 put_pseudo_into_bucket (pseudo_t pseudo
)
708 int i
, hard_regno
, conflict_pseudos_size
, hard_regs_num
;
709 pseudo_t conflict_pseudo
;
710 pseudo_t
*pseudo_vec
;
711 enum reg_class cover_class
;
712 HARD_REG_SET temp_set
;
714 cover_class
= PSEUDO_COVER_CLASS (pseudo
);
715 hard_regs_num
= class_hard_regs_num
[cover_class
];
716 if (hard_regs_num
!= 0)
718 memcpy (PSEUDO_CURR_HARD_REG_COSTS (pseudo
),
719 PSEUDO_HARD_REG_COSTS (pseudo
), sizeof (int) * hard_regs_num
);
720 memcpy (PSEUDO_CURR_CONFLICT_HARD_REG_COSTS (pseudo
),
721 PSEUDO_CONFLICT_HARD_REG_COSTS (pseudo
),
722 sizeof (int) * hard_regs_num
);
724 COPY_HARD_REG_SET (temp_set
, PSEUDO_CONFLICT_HARD_REGS (pseudo
));
725 AND_HARD_REG_SET (temp_set
, reg_class_contents
[cover_class
]);
726 AND_COMPL_HARD_REG_SET (temp_set
, no_alloc_regs
);
727 conflict_pseudos_size
= 0;
728 GO_IF_HARD_REG_EQUAL (temp_set
, zero_hard_reg_set
, skip
);
729 for (i
= 0; i
< (int) hard_regs_num
; i
++)
731 hard_regno
= class_hard_regs
[cover_class
] [i
];
732 if (TEST_HARD_REG_BIT (temp_set
, hard_regno
))
734 conflict_pseudos_size
++;
735 CLEAR_HARD_REG_BIT (temp_set
, hard_regno
);
736 GO_IF_HARD_REG_EQUAL (temp_set
, zero_hard_reg_set
, skip
);
740 pseudo_vec
= PSEUDO_CONFLICT_PSEUDO_VEC (pseudo
);
741 CLEAR_HARD_REG_SET (temp_set
);
742 if (cover_class
!= NO_REGS
)
743 for (i
= 0; (conflict_pseudo
= pseudo_vec
[i
]) != NULL
; i
++)
744 if (cover_class
== PSEUDO_COVER_CLASS (conflict_pseudo
)
745 && bitmap_bit_p (consideration_pseudo_bitmap
,
746 PSEUDO_NUM (conflict_pseudo
)))
748 if (! PSEUDO_ASSIGNED_P (conflict_pseudo
))
749 conflict_pseudos_size
750 += reg_class_nregs
[cover_class
] [PSEUDO_MODE (conflict_pseudo
)];
751 else if ((hard_regno
= PSEUDO_HARD_REGNO (conflict_pseudo
)) >= 0)
753 int last
= (hard_regno
755 [hard_regno
] [PSEUDO_MODE (conflict_pseudo
)]);
757 while (hard_regno
< last
)
759 if (! TEST_HARD_REG_BIT (temp_set
, hard_regno
))
761 conflict_pseudos_size
++;
762 SET_HARD_REG_BIT (temp_set
, hard_regno
);
768 PSEUDO_IN_GRAPH_P (pseudo
) = TRUE
;
769 PSEUDO_LEFT_CONFLICTS_NUM (pseudo
) = conflict_pseudos_size
;
770 if (conflict_pseudos_size
771 + reg_class_nregs
[cover_class
] [PSEUDO_MODE (pseudo
)]
772 <= PSEUDO_AVAILABLE_REGS_NUM (pseudo
))
773 add_pseudo_to_ordered_bucket (pseudo
, &colorable_pseudo_bucket
);
775 add_pseudo_to_bucket (pseudo
, &uncolorable_pseudo_bucket
);
778 /* Function implements Chaitin-Briggs coloring for pseudos in
779 COLORING_PSEUDO_BITMAP taking into account pseudos in
780 CONSIDERATION_PSEUDO_BITMAP. */
787 /* Put the pseudos into the corresponding buckets. */
788 colorable_pseudo_bucket
= NULL
;
789 uncolorable_pseudo_bucket
= NULL
;
790 EXECUTE_IF_SET_IN_BITMAP (coloring_pseudo_bitmap
, 0, i
, bi
)
791 put_pseudo_into_bucket (pseudos
[i
]);
792 push_pseudos_to_stack ();
793 pop_pseudos_from_stack ();
798 /* The function outputs information about the loop given by its
801 print_loop_title (struct ira_loop_tree_node
*loop_tree_node
)
806 ira_assert (loop_tree_node
->loop
!= NULL
);
807 fprintf (ira_dump_file
,
808 "\n Loop %d (father %d, header bb%d, depth %d)\n ref:",
809 loop_tree_node
->loop
->num
,
810 (loop_tree_node
->father
== NULL
811 ? -1 : loop_tree_node
->father
->loop
->num
),
812 loop_tree_node
->loop
->header
->index
,
813 loop_tree_node
->loop
->depth
);
814 EXECUTE_IF_SET_IN_BITMAP (loop_tree_node
->mentioned_pseudos
, 0, j
, bi
)
815 fprintf (ira_dump_file
, " %dr%d", j
, PSEUDO_REGNO (pseudos
[j
]));
816 fprintf (ira_dump_file
, "\n modified regnos:");
817 EXECUTE_IF_SET_IN_BITMAP (loop_tree_node
->modified_regnos
, 0, j
, bi
)
818 fprintf (ira_dump_file
, " %d", j
);
819 fprintf (ira_dump_file
, "\n border:");
820 EXECUTE_IF_SET_IN_BITMAP (loop_tree_node
->border_pseudos
, 0, j
, bi
)
821 fprintf (ira_dump_file
, " %dr%d", j
, PSEUDO_REGNO (pseudos
[j
]));
822 fprintf (ira_dump_file
, "\n");
825 /* The function implements Chaitin-Briggs coloring for pseudos inside
826 loop (in extreme case it can be all function) given by the
827 corresponding LOOP_TREE_NODE. */
829 color_pass (struct ira_loop_tree_node
*loop_tree_node
)
831 int regno
, hard_regno
, index
= -1;
832 int cost
, exit_freq
, enter_freq
;
835 enum machine_mode mode
;
836 enum reg_class
class;
837 pseudo_t p
, subloop_pseudo
;
838 struct ira_loop_tree_node
*subloop_node
;
840 if (loop_tree_node
->loop
== NULL
)
842 if (ira_dump_file
!= NULL
)
843 print_loop_title (loop_tree_node
);
845 bitmap_copy (coloring_pseudo_bitmap
, loop_tree_node
->mentioned_pseudos
);
846 bitmap_ior_into (coloring_pseudo_bitmap
, loop_tree_node
->border_pseudos
);
847 bitmap_copy (consideration_pseudo_bitmap
, coloring_pseudo_bitmap
);
848 EXECUTE_IF_SET_IN_BITMAP (consideration_pseudo_bitmap
, 0, j
, bi
)
851 if (! PSEUDO_ASSIGNED_P (p
))
853 bitmap_clear_bit (coloring_pseudo_bitmap
, PSEUDO_NUM (p
));
855 /* Color all mentioned including transparent. */
857 /* Update costs for subloops. */
858 for (subloop_node
= loop_tree_node
->inner
;
859 subloop_node
!= NULL
;
860 subloop_node
= subloop_node
->next
)
861 if (subloop_node
->bb
== NULL
)
862 EXECUTE_IF_SET_IN_BITMAP (consideration_pseudo_bitmap
, 0, j
, bi
)
865 mode
= PSEUDO_MODE (p
);
866 class = PSEUDO_COVER_CLASS (p
);
867 hard_regno
= PSEUDO_HARD_REGNO (p
);
870 index
= class_hard_reg_index
[class] [hard_regno
];
871 ira_assert (index
>= 0);
873 regno
= PSEUDO_REGNO (p
);
874 /* ??? conflict costs */
877 subloop_pseudo
= subloop_node
->regno_pseudo_map
[regno
];
878 if (subloop_pseudo
== NULL
)
880 exit_freq
= loop_edge_freq (subloop_node
, regno
, TRUE
);
881 enter_freq
= loop_edge_freq (subloop_node
, regno
, FALSE
);
882 if (reg_equiv_invariant_p
[regno
]
883 || reg_equiv_const
[regno
] != NULL_RTX
)
885 if (! PSEUDO_ASSIGNED_P (subloop_pseudo
))
887 PSEUDO_HARD_REGNO (subloop_pseudo
) = hard_regno
;
888 PSEUDO_ASSIGNED_P (subloop_pseudo
) = TRUE
;
890 update_copy_costs (subloop_pseudo
, TRUE
);
893 else if (hard_regno
< 0)
895 PSEUDO_MEMORY_COST (subloop_pseudo
)
896 -= ((memory_move_cost
[mode
] [class] [1] * enter_freq
)
897 + (memory_move_cost
[mode
] [class] [0] * exit_freq
));
901 cost
= (register_move_cost
[mode
] [class] [class]
902 * (exit_freq
+ enter_freq
));
903 PSEUDO_HARD_REG_COSTS (subloop_pseudo
) [index
] -= cost
;
904 PSEUDO_CONFLICT_HARD_REG_COSTS (subloop_pseudo
) [index
]
906 PSEUDO_MEMORY_COST (subloop_pseudo
)
907 += (memory_move_cost
[mode
] [class] [0] * enter_freq
908 + memory_move_cost
[mode
] [class] [1] * exit_freq
);
909 if (PSEUDO_COVER_CLASS_COST (subloop_pseudo
)
910 > PSEUDO_HARD_REG_COSTS (subloop_pseudo
) [index
])
911 PSEUDO_COVER_CLASS_COST (subloop_pseudo
)
912 = PSEUDO_HARD_REG_COSTS (subloop_pseudo
) [index
];
917 exit_freq
= loop_edge_freq (subloop_node
, -1, TRUE
);
918 enter_freq
= loop_edge_freq (subloop_node
, -1, FALSE
);
919 cost
= (register_move_cost
[mode
] [class] [class]
920 * (exit_freq
+ enter_freq
));
925 /* The function is used to sort pseudos according to their priorities
926 which are calculated analogous to ones in file `global.c'. */
928 pseudo_priority_compare_func (const void *v1p
, const void *v2p
)
930 pseudo_t p1
= *(const pseudo_t
*) v1p
, p2
= *(const pseudo_t
*) v2p
;
933 pri1
= (((double) (floor_log2 (REG_N_REFS (PSEUDO_REGNO (p1
)))
935 / REG_LIVE_LENGTH (PSEUDO_REGNO (p1
)))
936 * (10000 / REG_FREQ_MAX
) * PSEUDO_REGNO_SIZE (PSEUDO_REGNO (p1
)));
937 pri2
= (((double) (floor_log2 (REG_N_REFS (PSEUDO_REGNO (p2
)))
939 / REG_LIVE_LENGTH (PSEUDO_REGNO (p2
)))
940 * (10000 / REG_FREQ_MAX
) * PSEUDO_REGNO_SIZE (PSEUDO_REGNO (p2
)));
944 /* If regs are equally good, sort by pseudos, so that the results of
945 qsort leave nothing to chance. */
946 return PSEUDO_NUM (p1
) - PSEUDO_NUM (p2
);
949 /* The function implements Chow's prioity-based coloring. */
951 priority_coloring (void)
953 int i
, hard_regs_num
;
956 memcpy (sorted_pseudos
, pseudos
, pseudos_num
* sizeof (pseudo_t
));
957 for (i
= 0; i
< pseudos_num
; i
++)
959 bitmap_set_bit (coloring_pseudo_bitmap
, i
);
961 hard_regs_num
= class_hard_regs_num
[PSEUDO_COVER_CLASS (p
)];
962 if (hard_regs_num
== 0)
964 memcpy (PSEUDO_CURR_HARD_REG_COSTS (p
),
965 PSEUDO_HARD_REG_COSTS (p
), sizeof (int) * hard_regs_num
);
966 memcpy (PSEUDO_CURR_CONFLICT_HARD_REG_COSTS (p
),
967 PSEUDO_CONFLICT_HARD_REG_COSTS (p
),
968 sizeof (int) * hard_regs_num
);
970 bitmap_copy (consideration_pseudo_bitmap
, coloring_pseudo_bitmap
);
971 qsort (sorted_pseudos
, pseudos_num
, sizeof (pseudo_t
),
972 pseudo_priority_compare_func
);
973 for (i
= 0; i
< pseudos_num
; i
++)
975 p
= sorted_pseudos
[i
];
976 if (ira_dump_file
!= NULL
)
978 fprintf (ira_dump_file
, " ");
979 print_expanded_pseudo (p
);
980 fprintf (ira_dump_file
, " -- ");
982 if (assign_hard_reg (p
, FALSE
))
984 if (ira_dump_file
!= NULL
)
985 fprintf (ira_dump_file
, "assign reg %d\n",
986 PSEUDO_HARD_REGNO (p
));
990 if (ira_dump_file
!= NULL
)
991 fprintf (ira_dump_file
, "spill\n");
993 PSEUDO_IN_GRAPH_P (p
) = TRUE
;
997 /* The function initialized common data for cloring and calls
998 functions to do Chaitin-Briggs, regional, and Chow's priority-based
1003 coloring_pseudo_bitmap
= ira_allocate_bitmap ();
1004 consideration_pseudo_bitmap
= ira_allocate_bitmap ();
1006 if (flag_ira_algorithm
== IRA_ALGORITHM_PRIORITY
)
1007 priority_coloring ();
1010 if (ira_dump_file
!= NULL
)
1011 fprintf (ira_dump_file
, "\n**** Pseudos coloring:\n\n");
1013 traverse_loop_tree (ira_loop_tree_root
, color_pass
, NULL
);
1016 if (ira_dump_file
!= NULL
)
1017 print_disposition (ira_dump_file
);
1019 ira_free_bitmap (consideration_pseudo_bitmap
);
1020 ira_free_bitmap (coloring_pseudo_bitmap
);
1025 /* The functions moves future spill/restore code to less frequent
1026 points (if it is profitable) by reassigning some pseudos to memory
1027 which means make longer live-range where the corresponding
1028 pseudo-registers will be in memory. */
1030 move_spill_restore (void)
1032 int i
, cost
, changed_p
, regno
, hard_regno
, index
;
1033 enum machine_mode mode
;
1034 enum reg_class
class;
1035 pseudo_t p
, father_pseudo
, grandfather_pseudo
;
1036 struct ira_loop_tree_node
*father
, *grandfather
;
1041 if (ira_dump_file
!= NULL
)
1042 fprintf (ira_dump_file
, "New iteration of spill/restore move\n");
1043 for (i
= 0; i
< pseudos_num
; i
++)
1046 regno
= PSEUDO_REGNO (p
);
1048 || PSEUDO_HARD_REGNO (p
) >= 0
1049 || (father
= PSEUDO_LOOP_TREE_NODE (p
)->father
) == NULL
)
1051 father_pseudo
= father
->regno_pseudo_map
[regno
];
1052 if (father_pseudo
== NULL
)
1054 if ((hard_regno
= PSEUDO_HARD_REGNO (father_pseudo
)) < 0)
1056 mode
= PSEUDO_MODE (p
);
1057 class = PSEUDO_COVER_CLASS (father_pseudo
);
1058 index
= class_hard_reg_index
[class] [hard_regno
];
1059 ira_assert (index
>= 0);
1060 cost
= (PSEUDO_ORIGINAL_MEMORY_COST (father_pseudo
)
1061 - PSEUDO_HARD_REG_COSTS (p
) [index
]);
1062 cost
-= (memory_move_cost
[mode
] [class] [0]
1063 * loop_edge_freq (PSEUDO_LOOP_TREE_NODE (p
), regno
, FALSE
)
1064 + memory_move_cost
[mode
] [class] [1]
1065 * loop_edge_freq (PSEUDO_LOOP_TREE_NODE (p
), regno
, TRUE
));
1066 if ((grandfather
= father
->father
) != NULL
)
1068 grandfather_pseudo
= father
->regno_pseudo_map
[regno
];
1069 if (grandfather_pseudo
!= NULL
)
1071 if (PSEUDO_HARD_REGNO (grandfather_pseudo
) < 0)
1072 cost
-= (memory_move_cost
[mode
] [class] [1]
1073 * loop_edge_freq (father
, regno
, FALSE
)
1074 + memory_move_cost
[mode
] [class] [0]
1075 * loop_edge_freq (father
, regno
, TRUE
));
1077 cost
+= (memory_move_cost
[mode
] [class] [0]
1078 * loop_edge_freq (father
, regno
, FALSE
)
1079 + memory_move_cost
[mode
] [class] [1]
1080 * loop_edge_freq (father
, regno
, TRUE
));
1085 PSEUDO_HARD_REGNO (father_pseudo
) = -1;
1096 /* The function called from the reload to mark changes in the
1097 allocation of REGNO made by the reload. */
1099 mark_allocation_change (int regno
)
1101 pseudo_t p
= regno_pseudo_map
[regno
];
1102 int old_hard_regno
, hard_regno
, cost
;
1103 enum reg_class cover_class
= PSEUDO_COVER_CLASS (p
);
1105 ira_assert (p
!= NULL
);
1106 hard_regno
= reg_renumber
[regno
];
1107 if ((old_hard_regno
= PSEUDO_HARD_REGNO (p
)) == hard_regno
)
1109 if (old_hard_regno
< 0)
1110 cost
= -PSEUDO_MEMORY_COST (p
);
1113 ira_assert (class_hard_reg_index
[cover_class
] [old_hard_regno
] >= 0);
1114 cost
= -(PSEUDO_HARD_REG_COSTS (p
)
1115 [class_hard_reg_index
[cover_class
] [old_hard_regno
]]);
1116 update_copy_costs (p
, FALSE
);
1118 overall_cost
-= cost
;
1119 PSEUDO_HARD_REGNO (p
) = hard_regno
;
1121 cost
+= PSEUDO_MEMORY_COST (p
);
1122 else if (class_hard_reg_index
[cover_class
] [hard_regno
] >= 0)
1124 cost
+= (PSEUDO_HARD_REG_COSTS (p
)
1125 [class_hard_reg_index
[cover_class
] [hard_regno
]]);
1126 update_copy_costs (p
, TRUE
);
1129 /* Reload chages class of the pseudo. */
1131 overall_cost
+= cost
;
1134 /* The function is analog of function `retry_global_alloc'. It is
1135 called by reload to try to put spilled pseudo-register REGNO into a
1136 hard register which is not in FORBIDDEN_REGS. */
1138 retry_ira_color (int regno
, HARD_REG_SET forbidden_regs
)
1141 int hard_regno
, hard_regs_num
;
1142 enum reg_class cover_class
;
1144 p
= regno_pseudo_map
[regno
];
1145 mark_allocation_change (regno
);
1146 ira_assert (reg_renumber
[regno
] < 0);
1147 if (ira_dump_file
!= NULL
)
1148 fprintf (ira_dump_file
, "spill %d(p%d), cost=%d", regno
, PSEUDO_NUM (p
),
1149 PSEUDO_MEMORY_COST (p
) - PSEUDO_COVER_CLASS_COST (p
));
1150 IOR_HARD_REG_SET (PSEUDO_CONFLICT_HARD_REGS (p
), forbidden_regs
);
1151 PSEUDO_ASSIGNED_P (p
) = FALSE
;
1152 cover_class
= PSEUDO_COVER_CLASS (p
);
1153 hard_regs_num
= class_hard_regs_num
[cover_class
];
1154 if (hard_regs_num
!= 0)
1156 memcpy (PSEUDO_CURR_HARD_REG_COSTS (p
), PSEUDO_HARD_REG_COSTS (p
),
1157 sizeof (int) * hard_regs_num
);
1158 memcpy (PSEUDO_CURR_CONFLICT_HARD_REG_COSTS (p
),
1159 PSEUDO_CONFLICT_HARD_REG_COSTS (p
),
1160 sizeof (int) * hard_regs_num
);
1162 assign_hard_reg (p
, TRUE
);
1163 hard_regno
= PSEUDO_HARD_REGNO (p
);
1164 reg_renumber
[regno
] = hard_regno
;
1165 if (hard_regno
>= 0)
1167 ira_assert (class_hard_reg_index
[cover_class
] [hard_regno
] >= 0);
1168 overall_cost
-= (PSEUDO_MEMORY_COST (p
)
1169 - (PSEUDO_HARD_REG_COSTS (p
)
1170 [class_hard_reg_index
1171 [cover_class
] [hard_regno
]]));
1172 if (PSEUDO_CALLS_CROSSED_NUM (p
) != 0
1173 && ! hard_reg_not_in_set_p (hard_regno
, PSEUDO_MODE (p
),
1176 ira_assert (flag_caller_saves
);
1177 caller_save_needed
= 1;
1181 /* If we found a register, modify the RTL for the register to show
1182 the hard register, and mark that register live. */
1183 if (reg_renumber
[regno
] >= 0)
1185 if (ira_dump_file
!= NULL
)
1186 fprintf (ira_dump_file
, ": reassign to %d", reg_renumber
[regno
]);
1187 REGNO (regno_reg_rtx
[regno
]) = reg_renumber
[regno
];
1188 mark_home_live (regno
);
1190 if (ira_dump_file
!= NULL
)
1191 fprintf (ira_dump_file
, "\n");
1196 /* The function called by the reload returns already allocated stack
1197 slot (if any) for REGNO with given INHERENT_SIZE and
1200 reuse_stack_slot (int regno
, unsigned int inherent_size
,
1201 unsigned int total_size
)
1207 struct spilled_reg_stack_slot
*slot
;
1209 ira_assert (flag_ira
&& inherent_size
== PSEUDO_REGNO_BYTES (regno
)
1210 && inherent_size
<= total_size
);
1212 if (flag_omit_frame_pointer
)
1213 n
= spilled_reg_stack_slots_num
- 1;
1218 if (flag_omit_frame_pointer
)
1222 slot
= &spilled_reg_stack_slots
[n
--];
1224 else if (n
>= spilled_reg_stack_slots_num
)
1227 slot
= &spilled_reg_stack_slots
[n
++];
1228 if (slot
->width
< total_size
1229 || GET_MODE_SIZE (GET_MODE (slot
->mem
)) < inherent_size
)
1232 EXECUTE_IF_SET_IN_BITMAP (&slot
->spilled_regs
,
1233 FIRST_PSEUDO_REGISTER
, i
, bi
)
1235 if (pseudo_reg_conflict_p (regno
, i
))
1239 SET_REGNO_REG_SET (&slot
->spilled_regs
, regno
);
1249 fprintf (ira_dump_file
, " Assigning %d slot of", regno
);
1250 EXECUTE_IF_SET_IN_BITMAP (&slot
->spilled_regs
,
1251 FIRST_PSEUDO_REGISTER
, i
, bi
)
1253 if ((unsigned) regno
!= i
)
1254 fprintf (ira_dump_file
, " %d", i
);
1256 fprintf (ira_dump_file
, "\n");
1262 /* The function called by the reload when a new stack slot X with
1263 TOTAL_SIZE was allocated for REGNO. */
1265 mark_new_stack_slot (rtx x
, int regno
, unsigned int total_size
)
1267 struct spilled_reg_stack_slot
*slot
;
1269 ira_assert (flag_ira
&& PSEUDO_REGNO_BYTES (regno
) <= total_size
);
1270 slot
= &spilled_reg_stack_slots
[spilled_reg_stack_slots_num
++];
1271 INIT_REG_SET (&slot
->spilled_regs
);
1272 SET_REGNO_REG_SET (&slot
->spilled_regs
, regno
);
1274 slot
->width
= total_size
;
1276 fprintf (ira_dump_file
, " Assigning %d a new slot\n", regno
);
1281 /* The function returns (through CALL_CLOBBERED_REGS) hard registers
1282 changed by all function calls in REGNO live range. */
1284 collect_pseudo_call_clobbered_regs (int regno
,
1285 HARD_REG_SET (*call_clobbered_regs
))
1289 HARD_REG_SET clobbered_regs
;
1290 rtx call
, *pseudo_calls
;
1292 p
= regno_pseudo_map
[regno
];
1293 CLEAR_HARD_REG_SET (*call_clobbered_regs
);
1294 pseudo_calls
= PSEUDO_CALLS_CROSSED (p
);
1295 for (i
= PSEUDO_CALLS_CROSSED_NUM (p
) - 1; i
>= 0; i
--)
1297 call
= pseudo_calls
[i
];
1298 get_call_invalidated_used_regs (call
, &clobbered_regs
, FALSE
);
1299 IOR_HARD_REG_SET (*call_clobbered_regs
, clobbered_regs
);
1305 /* Update avaliable hard registers for each pseudo. */
1307 update_pseudo_avaialable_regs (void)
1310 enum reg_class cover_class
;
1313 for (i
= 0; i
< pseudos_num
; i
++)
1316 cover_class
= PSEUDO_COVER_CLASS (p
);
1317 PSEUDO_AVAILABLE_REGS_NUM (p
) = available_class_regs
[cover_class
];
1318 if (cover_class
== NO_REGS
)
1320 cost
= PSEUDO_MEMORY_COST (p
);
1321 for (n
= 0, j
= class_hard_regs_num
[cover_class
] - 1; j
>= 0; j
--)
1322 if (TEST_HARD_REG_BIT (PSEUDO_CONFLICT_HARD_REGS (p
),
1323 class_hard_regs
[cover_class
] [j
]))
1325 if (n
> 0 && ira_dump_file
!= NULL
)
1326 fprintf (ira_dump_file
, "reg %d of %s has %d regs less\n",
1327 PSEUDO_REGNO (p
), reg_class_names
[cover_class
], n
);
1328 PSEUDO_AVAILABLE_REGS_NUM (p
) -= n
;
1332 /* Entry function doing color-based register allocation. */
1336 update_pseudo_avaialable_regs ();
1337 sorted_pseudos
= ira_allocate (sizeof (pseudo_t
) * pseudos_num
);
1338 VARRAY_GENERIC_PTR_NOGC_INIT (pseudo_stack_varray
, pseudos_num
,
1339 "stack of pseudos");
1340 memset (allocated_hardreg_p
, 0, sizeof (allocated_hardreg_p
));
1342 VARRAY_FREE (pseudo_stack_varray
);
1343 ira_free (sorted_pseudos
);
1344 move_spill_restore ();