2008-03-03 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / ira-color.c
blob6e0c59511ebf22ad4a5853c6726e505d2c2ffdfc
1 /* IRA allocation based on graph coloring.
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "target.h"
30 #include "varray.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "sbitmap.h"
34 #include "bitmap.h"
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "reload.h"
40 #include "params.h"
41 #include "df.h"
42 #include "ira-int.h"
44 /* We use optimistic colouring. */
46 static void initiate_cost_update (void);
47 static void finish_cost_update (void);
48 static void update_copy_costs_1 (allocno_t, int, int, int);
49 static void update_copy_costs (allocno_t, int);
50 static int assign_hard_reg (allocno_t, int);
52 static void add_allocno_to_bucket (allocno_t, allocno_t *);
53 static void add_allocno_to_ordered_bucket (allocno_t, allocno_t *);
54 static void delete_allocno_from_bucket (allocno_t, allocno_t *);
55 static void push_allocno_to_stack (allocno_t);
56 static void remove_allocno_from_bucket_and_push (allocno_t, int);
57 static void push_only_colorable (void);
58 static void push_allocno_to_spill (allocno_t);
59 static int calculate_allocno_spill_cost (allocno_t);
60 static void push_allocnos_to_stack (void);
61 static void pop_allocnos_from_stack (void);
62 static void setup_allocno_available_regs_num (allocno_t);
63 static void setup_allocno_left_conflicts_num (allocno_t);
64 static void put_allocno_into_bucket (allocno_t);
65 static int copy_freq_compare_func (const void *, const void *);
66 static void merge_allocnos (allocno_t, allocno_t);
67 static int coalesced_allocno_conflict_p (allocno_t, allocno_t);
68 static void coalesce_allocnos (int);
69 static void color_allocnos (void);
71 static void print_loop_title (loop_tree_node_t);
72 static void color_pass (loop_tree_node_t);
73 static int allocno_priority_compare_func (const void *, const void *);
74 static void start_allocno_priorities (allocno_t *, int);
75 static void do_coloring (void);
77 static void move_spill_restore (void);
79 static int coalesced_pseudo_reg_compare (const void *, const void *);
81 static void setup_curr_costs (allocno_t);
82 static int pseudo_reg_compare (const void *, const void *);
84 static int calculate_spill_cost (int *, rtx, rtx, rtx,
85 int *, int *, int *, int*);
87 /* Bitmap of allocnos which should be colored. */
88 static bitmap coloring_allocno_bitmap;
90 /* Bitmap of allocnos which should be taken into account during
91 coloring. In general case it contains allocnos from
92 coloring_allocno_bitmap plus other already colored conflicting
93 allocnos. */
94 static bitmap consideration_allocno_bitmap;
96 /* TRUE if we coalesced some allocnos. In other words, if we have
97 loops formed by members first_coalesced_allocno and
98 next_coalesced_allocno containing more one allocno. */
99 static int allocno_coalesced_p;
101 /* Bitmap used to prevent a repeated allocno processing because of
102 coalescing. */
103 static bitmap processed_coalesced_allocno_bitmap;
105 /* All allocnos sorted accoring their priorities. */
106 static allocno_t *sorted_allocnos;
110 /* This page contains function to choose hard register for allocnos. */
112 /* Array whose element value is TRUE if the corresponding hard
113 register already allocated for a allocno. */
114 static int allocated_hardreg_p [FIRST_PSEUDO_REGISTER];
116 /* Array used to check already processed allocanos during the current
117 update_copy_costs call. */
118 static int *allocno_update_cost_check;
119 /* The current value of update_copy_cost call count. */
120 static int update_cost_check;
122 /* Allocate and initialize data necessary for update_copy_costs. */
123 static void
124 initiate_cost_update (void)
126 allocno_update_cost_check = ira_allocate (allocnos_num * sizeof (int));
127 memset (allocno_update_cost_check, 0, allocnos_num * sizeof (int));
128 update_cost_check = 0;
131 /* Deallocate data used by update_copy_costs. */
132 static void
133 finish_cost_update (void)
135 ira_free (allocno_update_cost_check);
138 /* The function updates costs (decrease if DECR_P) of the allocnos
139 connected by copies with ALLOCNO. */
140 static void
141 update_copy_costs_1 (allocno_t allocno, int hard_regno,
142 int decr_p, int divisor)
144 int i, cost, update_cost, hard_regs_num;
145 enum machine_mode mode;
146 enum reg_class class;
147 allocno_t another_allocno;
148 copy_t cp, next_cp;
150 if (ALLOCNO_COVER_CLASS (allocno) == NO_REGS)
151 return;
152 if (allocno_update_cost_check [ALLOCNO_NUM (allocno)] == update_cost_check)
153 return;
154 allocno_update_cost_check [ALLOCNO_NUM (allocno)] = update_cost_check;
155 ira_assert (hard_regno >= 0);
156 i = class_hard_reg_index [ALLOCNO_COVER_CLASS (allocno)] [hard_regno];
157 ira_assert (i >= 0);
158 class = REGNO_REG_CLASS (hard_regno);
159 mode = ALLOCNO_MODE (allocno);
160 for (cp = ALLOCNO_COPIES (allocno); cp != NULL; cp = next_cp)
162 if (cp->first == allocno)
164 next_cp = cp->next_first_allocno_copy;
165 another_allocno = cp->second;
167 else if (cp->second == allocno)
169 next_cp = cp->next_second_allocno_copy;
170 another_allocno = cp->first;
172 else
173 gcc_unreachable ();
174 if (ALLOCNO_COVER_CLASS (allocno)
175 != ALLOCNO_COVER_CLASS (another_allocno)
176 || ALLOCNO_ASSIGNED_P (another_allocno))
177 continue;
178 hard_regs_num = class_hard_regs_num [ALLOCNO_COVER_CLASS (allocno)];
179 cost = (cp->second == allocno
180 ? register_move_cost [mode] [class]
181 [ALLOCNO_COVER_CLASS (another_allocno)]
182 : register_move_cost [mode]
183 [ALLOCNO_COVER_CLASS (another_allocno)] [class]);
184 if (decr_p)
185 cost = -cost;
186 allocate_and_set_or_copy_costs
187 (&ALLOCNO_UPDATED_HARD_REG_COSTS (another_allocno), hard_regs_num,
188 ALLOCNO_COVER_CLASS_COST (another_allocno),
189 ALLOCNO_HARD_REG_COSTS (another_allocno));
190 allocate_and_set_or_copy_costs
191 (&ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (another_allocno),
192 hard_regs_num, 0,
193 ALLOCNO_CONFLICT_HARD_REG_COSTS (another_allocno));
194 update_cost = cp->freq * cost / divisor;
195 ALLOCNO_UPDATED_HARD_REG_COSTS (another_allocno) [i] += update_cost;
196 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (another_allocno) [i]
197 += update_cost;
198 if (update_cost != 0)
199 update_copy_costs_1 (another_allocno, hard_regno,
200 decr_p, divisor * 4);
204 static void
205 update_copy_costs (allocno_t allocno, int decr_p)
207 update_cost_check++;
208 update_copy_costs_1 (allocno, ALLOCNO_HARD_REGNO (allocno), decr_p, 1);
211 /* The function is used to sort allocnos according to the profit to
212 use a hard register instead of memory for them. */
213 static int
214 allocno_cost_compare_func (const void *v1p, const void *v2p)
216 allocno_t p1 = *(const allocno_t *) v1p, p2 = *(const allocno_t *) v2p;
217 int c1, c2;
219 c1 = ALLOCNO_UPDATED_MEMORY_COST (p1) - ALLOCNO_COVER_CLASS_COST (p1);
220 c2 = ALLOCNO_UPDATED_MEMORY_COST (p2) - ALLOCNO_COVER_CLASS_COST (p2);
221 if (c1 - c2)
222 return c1 - c2;
224 /* If regs are equally good, sort by allocnos, so that the results of
225 qsort leave nothing to chance. */
226 return ALLOCNO_NUM (p1) - ALLOCNO_NUM (p2);
229 /* Print all allocnos coalesced with ALLOCNO. */
230 static void
231 print_coalesced_allocno (allocno_t allocno)
233 allocno_t a;
235 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
236 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
238 print_expanded_allocno (a);
239 if (a == allocno)
240 break;
241 fprintf (ira_dump_file, "+");
245 /* Varray representing the stack of allocnos used during coloring. */
246 static varray_type allocno_stack_varray;
248 /* Function choosing a hard register for ALLOCNO. If RETRY_P is
249 nonzero, it means that the function called from
250 `reassign_pseudos'. */
251 static int
252 assign_hard_reg (allocno_t allocno, int retry_p)
254 HARD_REG_SET conflicting_regs;
255 int i, j, hard_regno, best_hard_regno, class_size;
256 int cost, mem_cost, min_cost, full_cost, min_full_cost, add_cost;
257 int *a_costs;
258 int *conflict_costs;
259 enum reg_class cover_class, class;
260 enum machine_mode mode;
261 allocno_t a, conflict_allocno;
262 allocno_t *allocno_vec;
263 allocno_t another_allocno;
264 copy_t cp, next_cp;
265 static int costs [FIRST_PSEUDO_REGISTER], full_costs [FIRST_PSEUDO_REGISTER];
266 #ifdef STACK_REGS
267 int no_stack_reg_p;
268 #endif
270 ira_assert (! ALLOCNO_ASSIGNED_P (allocno));
271 cover_class = ALLOCNO_COVER_CLASS (allocno);
272 class_size = class_hard_regs_num [cover_class];
273 mode = ALLOCNO_MODE (allocno);
274 COPY_HARD_REG_SET (conflicting_regs, no_alloc_regs);
275 IOR_HARD_REG_SET (conflicting_regs,
276 prohibited_class_mode_regs [cover_class] [mode]);
277 IOR_COMPL_HARD_REG_SET (conflicting_regs, reg_class_contents [cover_class]);
278 best_hard_regno = -1;
279 memset (full_costs, 0, sizeof (int) * class_size);
280 mem_cost = 0;
281 if (allocno_coalesced_p)
282 bitmap_clear (processed_coalesced_allocno_bitmap);
283 memset (costs, 0, sizeof (int) * class_size);
284 memset (full_costs, 0, sizeof (int) * class_size);
285 #ifdef STACK_REGS
286 no_stack_reg_p = FALSE;
287 #endif
288 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
289 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
291 mem_cost += ALLOCNO_UPDATED_MEMORY_COST (a);
292 allocno_vec = ALLOCNO_CONFLICT_ALLOCNO_VEC (a);
293 IOR_HARD_REG_SET (conflicting_regs,
294 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
295 allocate_and_copy_costs (&ALLOCNO_UPDATED_HARD_REG_COSTS (a),
296 class_size, ALLOCNO_HARD_REG_COSTS (a));
297 a_costs = ALLOCNO_UPDATED_HARD_REG_COSTS (a);
298 #ifdef STACK_REGS
299 no_stack_reg_p = no_stack_reg_p || ALLOCNO_TOTAL_NO_STACK_REG_P (a);
300 #endif
301 for (cost = ALLOCNO_COVER_CLASS_COST (a), i = 0; i < class_size; i++)
302 if (a_costs != NULL)
304 costs [i] += a_costs [i];
305 full_costs [i] += a_costs [i];
307 else
309 costs [i] += cost;
310 full_costs [i] += cost;
312 for (i = 0; (conflict_allocno = allocno_vec [i]) != NULL; i++)
313 /* Reload can give another class so we need to check all
314 allocnos. */
315 if (retry_p || bitmap_bit_p (consideration_allocno_bitmap,
316 ALLOCNO_NUM (conflict_allocno)))
318 ira_assert (cover_class == ALLOCNO_COVER_CLASS (conflict_allocno));
319 if (allocno_coalesced_p)
321 if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
322 ALLOCNO_NUM (conflict_allocno)))
323 continue;
324 bitmap_set_bit (processed_coalesced_allocno_bitmap,
325 ALLOCNO_NUM (conflict_allocno));
327 if (ALLOCNO_ASSIGNED_P (conflict_allocno))
329 if ((hard_regno = ALLOCNO_HARD_REGNO (conflict_allocno)) >= 0)
331 IOR_HARD_REG_SET
332 (conflicting_regs,
333 reg_mode_hard_regset
334 [hard_regno] [ALLOCNO_MODE (conflict_allocno)]);
335 if (hard_reg_set_subset_p (reg_class_contents
336 [cover_class],
337 conflicting_regs))
338 goto fail;
340 continue;
342 else if (! ALLOCNO_MAY_BE_SPILLED_P (conflict_allocno))
344 allocate_and_copy_costs
345 (&ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (conflict_allocno),
346 class_size,
347 ALLOCNO_CONFLICT_HARD_REG_COSTS (conflict_allocno));
348 conflict_costs
349 = ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (conflict_allocno);
350 if (conflict_costs != NULL)
351 for (j = class_size - 1; j >= 0; j--)
352 full_costs [j] -= conflict_costs [j];
355 if (a == allocno)
356 break;
358 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
359 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
361 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
363 if (cp->first == a)
365 next_cp = cp->next_first_allocno_copy;
366 another_allocno = cp->second;
368 else if (cp->second == a)
370 next_cp = cp->next_second_allocno_copy;
371 another_allocno = cp->first;
373 else
374 gcc_unreachable ();
375 if (cover_class != ALLOCNO_COVER_CLASS (another_allocno)
376 || ALLOCNO_ASSIGNED_P (another_allocno))
377 continue;
378 allocate_and_copy_costs
379 (&ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (another_allocno),
380 class_size, ALLOCNO_CONFLICT_HARD_REG_COSTS (another_allocno));
381 conflict_costs
382 = ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (another_allocno);
383 if (conflict_costs != NULL
384 && ! ALLOCNO_MAY_BE_SPILLED_P (another_allocno))
385 for (j = class_size - 1; j >= 0; j--)
386 full_costs [j] += conflict_costs [j];
388 if (a == allocno)
389 break;
391 min_cost = min_full_cost = INT_MAX;
392 /* We don't care about giving callee saved registers to allocnos no
393 living through calls because call used register are allocated
394 first (it is usual practice to put them first in
395 REG_ALLOC_ORDER). */
396 for (i = 0; i < class_size; i++)
398 hard_regno = class_hard_regs [cover_class] [i];
399 #ifdef STACK_REGS
400 if (no_stack_reg_p
401 && FIRST_STACK_REG <= hard_regno && hard_regno <= LAST_STACK_REG)
402 continue;
403 #endif
404 if (! hard_reg_not_in_set_p (hard_regno, mode, conflicting_regs))
405 continue;
406 cost = costs [i];
407 full_cost = full_costs [i];
408 if (! allocated_hardreg_p [hard_regno]
409 && hard_reg_not_in_set_p (hard_regno, mode, call_used_reg_set))
410 /* We need to save/restore the register in epilogue/prologue.
411 Therefore we increase the cost. */
413 /* ??? If only part is call clobbered. */
414 class = REGNO_REG_CLASS (hard_regno);
415 add_cost = (memory_move_cost [mode] [class] [0]
416 + memory_move_cost [mode] [class] [1] - 1);
417 cost += add_cost;
418 full_cost += add_cost;
420 if (min_cost > cost)
421 min_cost = cost;
422 if (min_full_cost > full_cost)
424 min_full_cost = full_cost;
425 best_hard_regno = hard_regno;
426 ira_assert (hard_regno >= 0);
429 if (min_cost > mem_cost)
430 best_hard_regno = -1;
431 fail:
432 if (best_hard_regno < 0
433 && ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno) != allocno)
435 for (j = 0, a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
436 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
438 sorted_allocnos [j++] = a;
439 if (a == allocno)
440 break;
442 qsort (sorted_allocnos, j, sizeof (allocno_t),
443 allocno_cost_compare_func);
444 for (i = 0; i < j; i++)
446 a = sorted_allocnos [i];
447 ALLOCNO_FIRST_COALESCED_ALLOCNO (a) = a;
448 ALLOCNO_NEXT_COALESCED_ALLOCNO (a) = a;
449 VARRAY_PUSH_GENERIC_PTR (allocno_stack_varray, a);
450 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
452 fprintf (ira_dump_file, " Pushing");
453 print_coalesced_allocno (a);
454 fprintf (ira_dump_file, "\n");
457 return FALSE;
459 if (best_hard_regno >= 0)
460 allocated_hardreg_p [best_hard_regno] = TRUE;
461 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
462 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
464 ALLOCNO_HARD_REGNO (a) = best_hard_regno;
465 ALLOCNO_ASSIGNED_P (a) = TRUE;
466 if (best_hard_regno >= 0)
467 update_copy_costs (a, TRUE);
468 if (a == allocno)
469 break;
471 return best_hard_regno >= 0;
476 /* This page contains allocator based on Chaitin algorithm. */
478 /* Bucket of allocnos allocno be colored currently without spilling. */
479 static allocno_t colorable_allocno_bucket;
481 /* Bucket of allocnos allocno might be not colored currently without
482 spilling. */
483 static allocno_t uncolorable_allocno_bucket;
485 /* Add ALLOCNO to *BUCKET_PTR bucket. ALLOCNO should be not in a bucket
486 before the call. */
487 static void
488 add_allocno_to_bucket (allocno_t allocno, allocno_t *bucket_ptr)
490 allocno_t first_allocno;
492 first_allocno = *bucket_ptr;
493 ALLOCNO_NEXT_BUCKET_ALLOCNO (allocno) = first_allocno;
494 ALLOCNO_PREV_BUCKET_ALLOCNO (allocno) = NULL;
495 if (first_allocno != NULL)
496 ALLOCNO_PREV_BUCKET_ALLOCNO (first_allocno) = allocno;
497 *bucket_ptr = allocno;
500 /* The function returns best class and frequency for allocnos
501 coalesced with ALLOCNO. */
502 static void
503 get_coalesced_allocnos_best_class_and_freq (allocno_t allocno,
504 enum reg_class *best_class,
505 int *freq)
507 allocno_t a;
509 *freq = 0;
510 *best_class = ALL_REGS;
511 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
512 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
514 *freq += ALLOCNO_FREQ (a);
515 *best_class
516 = reg_class_intersect [ALLOCNO_BEST_CLASS (a)] [*best_class];
517 if (a == allocno)
518 break;
522 /* Add ALLOCNO to *BUCKET_PTR bucket maintaining the order according
523 their frequency. ALLOCNO should be not in a bucket before the
524 call. */
525 static void
526 add_allocno_to_ordered_bucket (allocno_t allocno, allocno_t *bucket_ptr)
528 allocno_t before, after;
529 enum reg_class cover_class, best_class, best_class_before;
530 int freq, freq_before, nregs;
532 cover_class = ALLOCNO_COVER_CLASS (allocno);
533 nregs = reg_class_nregs [cover_class] [ALLOCNO_MODE (allocno)];
534 get_coalesced_allocnos_best_class_and_freq (allocno, &best_class, &freq);
535 for (before = *bucket_ptr, after = NULL;
536 before != NULL;
537 after = before, before = ALLOCNO_NEXT_BUCKET_ALLOCNO (before))
539 if (ALLOCNO_COVER_CLASS (before) < cover_class)
540 continue;
541 if (ALLOCNO_COVER_CLASS (before) > cover_class)
542 break;
543 get_coalesced_allocnos_best_class_and_freq
544 (before, &best_class_before, &freq_before);
545 if (strict_class_subset_p [best_class_before] [best_class])
546 break;
547 else if (strict_class_subset_p [best_class] [best_class_before])
549 else if (freq_before > freq)
550 break;
552 ALLOCNO_NEXT_BUCKET_ALLOCNO (allocno) = before;
553 ALLOCNO_PREV_BUCKET_ALLOCNO (allocno) = after;
554 if (after == NULL)
555 *bucket_ptr = allocno;
556 else
557 ALLOCNO_NEXT_BUCKET_ALLOCNO (after) = allocno;
558 if (before != NULL)
559 ALLOCNO_PREV_BUCKET_ALLOCNO (before) = allocno;
562 /* Delete ALLOCNO from *BUCKET_PTR bucket. It should be there before
563 the call. */
564 static void
565 delete_allocno_from_bucket (allocno_t allocno, allocno_t *bucket_ptr)
567 allocno_t prev_allocno, next_allocno;
569 prev_allocno = ALLOCNO_PREV_BUCKET_ALLOCNO (allocno);
570 next_allocno = ALLOCNO_NEXT_BUCKET_ALLOCNO (allocno);
571 if (prev_allocno != NULL)
572 ALLOCNO_NEXT_BUCKET_ALLOCNO (prev_allocno) = next_allocno;
573 else
575 ira_assert (*bucket_ptr == allocno);
576 *bucket_ptr = next_allocno;
578 if (next_allocno != NULL)
579 ALLOCNO_PREV_BUCKET_ALLOCNO (next_allocno) = prev_allocno;
582 /* The function puts ALLOCNO onto the coloring stack without removing
583 it from the bucket. Such action can result in moving conflicting
584 allocnos from the uncolorable bucket to the colorable one. */
585 static void
586 push_allocno_to_stack (allocno_t allocno)
588 int i, conflicts_num, conflict_size, size;
589 allocno_t a, conflict_allocno;
590 allocno_t *allocno_vec;
591 enum reg_class cover_class;
593 ALLOCNO_IN_GRAPH_P (allocno) = FALSE;
594 VARRAY_PUSH_GENERIC_PTR (allocno_stack_varray, allocno);
595 cover_class = ALLOCNO_COVER_CLASS (allocno);
596 if (cover_class == NO_REGS)
597 return;
598 size = reg_class_nregs [cover_class] [ALLOCNO_MODE (allocno)];
599 if (allocno_coalesced_p)
600 bitmap_clear (processed_coalesced_allocno_bitmap);
601 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
602 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
604 allocno_vec = ALLOCNO_CONFLICT_ALLOCNO_VEC (a);
605 for (i = 0; (conflict_allocno = allocno_vec [i]) != NULL; i++)
606 if (bitmap_bit_p (coloring_allocno_bitmap,
607 ALLOCNO_NUM (conflict_allocno)))
609 ira_assert (cover_class == ALLOCNO_COVER_CLASS (conflict_allocno));
610 if (allocno_coalesced_p)
612 if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
613 ALLOCNO_NUM (conflict_allocno)))
614 continue;
615 bitmap_set_bit (processed_coalesced_allocno_bitmap,
616 ALLOCNO_NUM (conflict_allocno));
618 if (ALLOCNO_IN_GRAPH_P (conflict_allocno)
619 && ! ALLOCNO_ASSIGNED_P (conflict_allocno))
621 conflicts_num = ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno);
622 conflict_size
623 = (reg_class_nregs
624 [cover_class] [ALLOCNO_MODE (conflict_allocno)]);
625 ira_assert
626 (ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno) >= size);
627 ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno) -= size;
628 if (conflicts_num + conflict_size
629 <= ALLOCNO_AVAILABLE_REGS_NUM (conflict_allocno))
630 continue;
631 conflicts_num = ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno);
632 if (conflicts_num + conflict_size
633 <= ALLOCNO_AVAILABLE_REGS_NUM (conflict_allocno))
635 delete_allocno_from_bucket
636 (conflict_allocno, &uncolorable_allocno_bucket);
637 add_allocno_to_ordered_bucket (conflict_allocno,
638 &colorable_allocno_bucket);
642 if (a == allocno)
643 break;
647 /* The function puts ALLOCNO onto the coloring stack and removes it
648 from the bucket. The allocno is in the colorable bucket if
649 COLORABLE_P is nonzero. */
650 static void
651 remove_allocno_from_bucket_and_push (allocno_t allocno, int colorable_p)
653 enum reg_class cover_class;
654 allocno_t *bucket_ptr;
656 bucket_ptr = (colorable_p
657 ? &colorable_allocno_bucket : &uncolorable_allocno_bucket);
658 delete_allocno_from_bucket (allocno, bucket_ptr);
659 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
661 fprintf (ira_dump_file, " Pushing");
662 print_coalesced_allocno (allocno);
663 fprintf (ira_dump_file, "%s\n", colorable_p ? "" : "(potential spill)");
665 cover_class = ALLOCNO_COVER_CLASS (allocno);
666 ira_assert ((colorable_p
667 && (ALLOCNO_LEFT_CONFLICTS_NUM (allocno)
668 + reg_class_nregs [cover_class] [ALLOCNO_MODE (allocno)]
669 <= ALLOCNO_AVAILABLE_REGS_NUM (allocno)))
670 || (! colorable_p
671 && (ALLOCNO_LEFT_CONFLICTS_NUM (allocno)
672 + reg_class_nregs [cover_class] [ALLOCNO_MODE (allocno)]
673 > ALLOCNO_AVAILABLE_REGS_NUM (allocno))));
674 if (! colorable_p)
675 ALLOCNO_MAY_BE_SPILLED_P (allocno) = TRUE;
676 push_allocno_to_stack (allocno);
679 /* The function puts all allocnos from colorable bucket onto the
680 coloring stack. */
681 static void
682 push_only_colorable (void)
684 /* ??? sort here instead of putting it into ordered bucket. */
685 for (;colorable_allocno_bucket != NULL;)
686 remove_allocno_from_bucket_and_push (colorable_allocno_bucket, TRUE);
689 /* The function puts ALLOCNO chosen for potential spilling onto the
690 coloring stack. */
691 static void
692 push_allocno_to_spill (allocno_t allocno)
694 delete_allocno_from_bucket (allocno, &uncolorable_allocno_bucket);
695 ALLOCNO_MAY_BE_SPILLED_P (allocno) = TRUE;
696 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
697 fprintf (ira_dump_file, " Pushing p%d(%d) (potential spill)\n",
698 ALLOCNO_NUM (allocno), ALLOCNO_REGNO (allocno));
699 push_allocno_to_stack (allocno);
702 /* The function returns frequency of exit edges (if EXIT_P) or enter
703 from/to the loop given by its LOOP_NODE. */
705 loop_edge_freq (loop_tree_node_t loop_node, int regno, int exit_p)
707 int freq, i;
708 edge_iterator ei;
709 edge e;
710 VEC (edge, heap) *edges;
712 ira_assert (loop_node->loop != NULL
713 && (regno < 0 || regno >= FIRST_PSEUDO_REGISTER));
714 freq = 0;
715 if (! exit_p)
717 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
718 if (e->src != loop_node->loop->latch
719 && (regno < 0
720 || (bitmap_bit_p (DF_LR_OUT (e->src), regno)
721 && bitmap_bit_p (DF_LR_IN (e->dest), regno))))
722 freq += EDGE_FREQUENCY (e);
724 else
726 edges = get_loop_exit_edges (loop_node->loop);
727 for (i = 0; VEC_iterate (edge, edges, i, e); i++)
728 if (regno < 0
729 || (bitmap_bit_p (DF_LR_OUT (e->src), regno)
730 && bitmap_bit_p (DF_LR_IN (e->dest), regno)))
731 freq += EDGE_FREQUENCY (e);
732 VEC_free (edge, heap, edges);
735 return REG_FREQ_FROM_EDGE_FREQ (freq);
738 /* The function calculates and returns cost of putting allocno A into
739 memory. */
740 static int
741 calculate_allocno_spill_cost (allocno_t a)
743 int regno, cost;
744 enum machine_mode mode;
745 enum reg_class class;
746 allocno_t father_allocno;
747 loop_tree_node_t father_node, loop_node;
749 regno = ALLOCNO_REGNO (a);
750 cost = ALLOCNO_UPDATED_MEMORY_COST (a) - ALLOCNO_COVER_CLASS_COST (a);
751 if (ALLOCNO_CAP (a) != NULL)
752 return cost;
753 loop_node = ALLOCNO_LOOP_TREE_NODE (a);
754 if ((father_node = loop_node->father) == NULL)
755 return cost;
756 if ((father_allocno = father_node->regno_allocno_map [regno]) == NULL)
757 return cost;
758 mode = ALLOCNO_MODE (a);
759 class = ALLOCNO_COVER_CLASS (a);
760 if (ALLOCNO_HARD_REGNO (father_allocno) < 0)
761 cost -= (memory_move_cost [mode] [class] [0]
762 * loop_edge_freq (loop_node, regno, TRUE)
763 + memory_move_cost [mode] [class] [1]
764 * loop_edge_freq (loop_node, regno, FALSE));
765 else
766 cost += ((memory_move_cost [mode] [class] [1]
767 * loop_edge_freq (loop_node, regno, TRUE)
768 + memory_move_cost [mode] [class] [0]
769 * loop_edge_freq (loop_node, regno, FALSE))
770 - (register_move_cost [mode] [class] [class]
771 * (loop_edge_freq (loop_node, regno, FALSE)
772 + loop_edge_freq (loop_node, regno, TRUE))));
773 return cost;
776 /* Push allocnos on the coloring stack. The order of allocnos in the
777 stack defines the order for the subsequent coloring. */
778 static void
779 push_allocnos_to_stack (void)
781 int i, j;
782 double allocno_pri, i_allocno_pri;
783 allocno_t allocno, i_allocno;
784 allocno_t *allocno_vec;
785 enum reg_class cover_class;
786 int num, cover_class_allocnos_num [N_REG_CLASSES];
787 allocno_t *cover_class_allocnos [N_REG_CLASSES];
789 /* Initialize. */
790 for (i = 0; i < reg_class_cover_size; i++)
792 cover_class = reg_class_cover [i];
793 cover_class_allocnos_num [cover_class] = 0;
794 cover_class_allocnos [cover_class] = NULL;
796 /* Calculate uncolorable allocnos of each cover class. */
797 for (allocno = uncolorable_allocno_bucket;
798 allocno != NULL;
799 allocno = ALLOCNO_NEXT_BUCKET_ALLOCNO (allocno))
800 if ((cover_class = ALLOCNO_COVER_CLASS (allocno)) != NO_REGS)
802 cover_class_allocnos_num [cover_class]++;
803 ALLOCNO_TEMP (allocno) = INT_MAX;
805 /* Define place where to put uncolorable allocnos of the same cover
806 class. */
807 for (num = i = 0; i < reg_class_cover_size; i++)
809 cover_class = reg_class_cover [i];
810 if (cover_class_allocnos_num [cover_class] != 0)
812 cover_class_allocnos [cover_class] = sorted_allocnos + num;
813 num += cover_class_allocnos_num [cover_class];
814 cover_class_allocnos_num [cover_class] = 0;
817 ira_assert (num <= allocnos_num);
818 /* Put uncolorable allocnos of the same cover class together. */
819 for (allocno = uncolorable_allocno_bucket;
820 allocno != NULL;
821 allocno = ALLOCNO_NEXT_BUCKET_ALLOCNO (allocno))
822 if ((cover_class = ALLOCNO_COVER_CLASS (allocno)) != NO_REGS)
823 cover_class_allocnos
824 [cover_class] [cover_class_allocnos_num [cover_class]++] = allocno;
825 for (;;)
827 push_only_colorable ();
828 allocno = uncolorable_allocno_bucket;
829 if (allocno == NULL)
830 break;
831 cover_class = ALLOCNO_COVER_CLASS (allocno);
832 if (cover_class == NO_REGS)
834 push_allocno_to_spill (allocno);
835 continue;
837 /* Potential spilling. */
838 ira_assert (reg_class_nregs [cover_class] [ALLOCNO_MODE (allocno)] > 0);
839 num = cover_class_allocnos_num [cover_class];
840 ira_assert (num > 0);
841 allocno_vec = cover_class_allocnos [cover_class];
842 allocno = NULL;
843 allocno_pri = 0;
844 /* Sort uncolorable allocno to find the one with the lowest spill
845 cost. */
846 for (i = 0, j = num - 1; i <= j;)
848 i_allocno = allocno_vec [i];
849 if (! ALLOCNO_IN_GRAPH_P (i_allocno)
850 && ALLOCNO_IN_GRAPH_P (allocno_vec [j]))
852 i_allocno = allocno_vec [j];
853 allocno_vec [j] = allocno_vec [i];
854 allocno_vec [i] = i_allocno;
856 if (ALLOCNO_IN_GRAPH_P (i_allocno))
858 i++;
859 if (ALLOCNO_TEMP (i_allocno) == INT_MAX)
861 allocno_t a;
862 int cost = 0;
864 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (i_allocno);;
865 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
867 cost += calculate_allocno_spill_cost (i_allocno);
868 if (a == i_allocno)
869 break;
871 /* ??? Remove cost of copies between the coalesced
872 allocnos. */
873 ALLOCNO_TEMP (i_allocno) = cost;
875 i_allocno_pri
876 = ((double) ALLOCNO_TEMP (i_allocno)
877 / (ALLOCNO_LEFT_CONFLICTS_NUM (i_allocno)
878 * reg_class_nregs [ALLOCNO_COVER_CLASS (i_allocno)]
879 [ALLOCNO_MODE (i_allocno)] + 1));
880 if (allocno == NULL || allocno_pri > i_allocno_pri
881 || (allocno_pri == i_allocno_pri
882 && (ALLOCNO_NUM (allocno) > ALLOCNO_NUM (i_allocno))))
884 allocno = i_allocno;
885 allocno_pri = i_allocno_pri;
888 if (! ALLOCNO_IN_GRAPH_P (allocno_vec [j]))
889 j--;
891 ira_assert (allocno != NULL && j >= 0);
892 cover_class_allocnos_num [cover_class] = j + 1;
893 ira_assert (ALLOCNO_IN_GRAPH_P (allocno)
894 && ALLOCNO_COVER_CLASS (allocno) == cover_class
895 && (ALLOCNO_LEFT_CONFLICTS_NUM (allocno)
896 + reg_class_nregs [cover_class] [ALLOCNO_MODE (allocno)]
897 > ALLOCNO_AVAILABLE_REGS_NUM (allocno)));
898 remove_allocno_from_bucket_and_push (allocno, FALSE);
902 /* Assign hard registers to allocnos on the coloring stack. */
903 static void
904 pop_allocnos_from_stack (void)
906 allocno_t allocno;
907 enum reg_class cover_class;
909 for (;VARRAY_ACTIVE_SIZE (allocno_stack_varray) != 0;)
911 allocno = VARRAY_TOP_GENERIC_PTR (allocno_stack_varray);
912 VARRAY_POP (allocno_stack_varray);
913 cover_class = ALLOCNO_COVER_CLASS (allocno);
914 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
916 fprintf (ira_dump_file, " Popping");
917 print_coalesced_allocno (allocno);
918 fprintf (ira_dump_file, " -- ");
920 if (cover_class == NO_REGS)
922 ALLOCNO_HARD_REGNO (allocno) = -1;
923 ALLOCNO_ASSIGNED_P (allocno) = TRUE;
924 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
925 fprintf (ira_dump_file, "assign memory\n");
927 else if (assign_hard_reg (allocno, FALSE))
929 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
930 fprintf (ira_dump_file, "assign reg %d\n",
931 ALLOCNO_HARD_REGNO (allocno));
933 else if (ALLOCNO_ASSIGNED_P (allocno))
935 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
936 fprintf (ira_dump_file, "spill\n");
938 ALLOCNO_IN_GRAPH_P (allocno) = TRUE;
942 /* Set up number of available hard registers for ALLOCNO. */
943 static void
944 setup_allocno_available_regs_num (allocno_t allocno)
946 int i, n;
947 enum reg_class cover_class;
948 allocno_t a;
949 HARD_REG_SET temp_set;
951 cover_class = ALLOCNO_COVER_CLASS (allocno);
952 ALLOCNO_AVAILABLE_REGS_NUM (allocno) = available_class_regs [cover_class];
953 if (cover_class == NO_REGS)
954 return;
955 CLEAR_HARD_REG_SET (temp_set);
956 ira_assert (ALLOCNO_FIRST_COALESCED_ALLOCNO (allocno) == allocno);
957 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
958 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
960 IOR_HARD_REG_SET (temp_set, ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
961 if (a == allocno)
962 break;
964 for (n = 0, i = class_hard_regs_num [cover_class] - 1; i >= 0; i--)
965 if (TEST_HARD_REG_BIT (temp_set, class_hard_regs [cover_class] [i]))
966 n++;
967 if (internal_flag_ira_verbose > 2 && n > 0 && ira_dump_file != NULL)
968 fprintf (ira_dump_file, " Reg %d of %s has %d regs less\n",
969 ALLOCNO_REGNO (allocno), reg_class_names [cover_class], n);
970 ALLOCNO_AVAILABLE_REGS_NUM (allocno) -= n;
973 /* Set up ALLOCNO_LEFT_CONFLICTS_NUM for ALLOCNO. */
974 static void
975 setup_allocno_left_conflicts_num (allocno_t allocno)
977 int i, hard_regs_num, hard_regno, conflict_allocnos_size;
978 allocno_t a, conflict_allocno;
979 allocno_t *allocno_vec;
980 enum reg_class cover_class;
981 HARD_REG_SET temp_set;
983 cover_class = ALLOCNO_COVER_CLASS (allocno);
984 hard_regs_num = class_hard_regs_num [cover_class];
985 CLEAR_HARD_REG_SET (temp_set);
986 ira_assert (ALLOCNO_FIRST_COALESCED_ALLOCNO (allocno) == allocno);
987 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
988 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
990 IOR_HARD_REG_SET (temp_set, ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
991 if (a == allocno)
992 break;
994 AND_HARD_REG_SET (temp_set, reg_class_contents [cover_class]);
995 AND_COMPL_HARD_REG_SET (temp_set, no_alloc_regs);
996 conflict_allocnos_size = 0;
997 if (! hard_reg_set_equal_p (temp_set, zero_hard_reg_set))
998 for (i = 0; i < (int) hard_regs_num; i++)
1000 hard_regno = class_hard_regs [cover_class] [i];
1001 if (TEST_HARD_REG_BIT (temp_set, hard_regno))
1003 conflict_allocnos_size++;
1004 CLEAR_HARD_REG_BIT (temp_set, hard_regno);
1005 if (hard_reg_set_equal_p (temp_set, zero_hard_reg_set))
1006 break;
1009 CLEAR_HARD_REG_SET (temp_set);
1010 if (allocno_coalesced_p)
1011 bitmap_clear (processed_coalesced_allocno_bitmap);
1012 if (cover_class != NO_REGS)
1013 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
1014 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
1016 allocno_vec = ALLOCNO_CONFLICT_ALLOCNO_VEC (a);
1017 for (i = 0; (conflict_allocno = allocno_vec [i]) != NULL; i++)
1018 if (bitmap_bit_p (consideration_allocno_bitmap,
1019 ALLOCNO_NUM (conflict_allocno)))
1021 ira_assert (cover_class
1022 == ALLOCNO_COVER_CLASS (conflict_allocno));
1023 if (allocno_coalesced_p)
1025 if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
1026 ALLOCNO_NUM (conflict_allocno)))
1027 continue;
1028 bitmap_set_bit (processed_coalesced_allocno_bitmap,
1029 ALLOCNO_NUM (conflict_allocno));
1031 if (! ALLOCNO_ASSIGNED_P (conflict_allocno))
1032 conflict_allocnos_size
1033 += (reg_class_nregs
1034 [cover_class] [ALLOCNO_MODE (conflict_allocno)]);
1035 else if ((hard_regno = ALLOCNO_HARD_REGNO (conflict_allocno))
1036 >= 0)
1038 int last = (hard_regno
1039 + hard_regno_nregs
1040 [hard_regno] [ALLOCNO_MODE (conflict_allocno)]);
1042 while (hard_regno < last)
1044 if (! TEST_HARD_REG_BIT (temp_set, hard_regno))
1046 conflict_allocnos_size++;
1047 SET_HARD_REG_BIT (temp_set, hard_regno);
1049 hard_regno++;
1053 if (a == allocno)
1054 break;
1056 ALLOCNO_LEFT_CONFLICTS_NUM (allocno) = conflict_allocnos_size;
1059 /* The function put ALLOCNO in a bucket corresponding to its number and
1060 size of its conflicting allocnos and hard registers. */
1061 static void
1062 put_allocno_into_bucket (allocno_t allocno)
1064 int hard_regs_num;
1065 enum reg_class cover_class;
1067 cover_class = ALLOCNO_COVER_CLASS (allocno);
1068 hard_regs_num = class_hard_regs_num [cover_class];
1069 if (ALLOCNO_FIRST_COALESCED_ALLOCNO (allocno) != allocno)
1070 return;
1071 ALLOCNO_IN_GRAPH_P (allocno) = TRUE;
1072 setup_allocno_left_conflicts_num (allocno);
1073 setup_allocno_available_regs_num (allocno);
1074 if (ALLOCNO_LEFT_CONFLICTS_NUM (allocno)
1075 + reg_class_nregs [cover_class] [ALLOCNO_MODE (allocno)]
1076 <= ALLOCNO_AVAILABLE_REGS_NUM (allocno))
1077 add_allocno_to_ordered_bucket (allocno, &colorable_allocno_bucket);
1078 else
1079 add_allocno_to_bucket (allocno, &uncolorable_allocno_bucket);
1082 /* The function is used to sort allocnos according to their
1083 frequencies. */
1084 static int
1085 copy_freq_compare_func (const void *v1p, const void *v2p)
1087 copy_t cp1 = *(const copy_t *) v1p, cp2 = *(const copy_t *) v2p;
1088 int pri1, pri2;
1090 pri1 = cp1->freq;
1091 pri2 = cp2->freq;
1092 if (pri2 - pri1)
1093 return pri2 - pri1;
1095 /* If freqencies are equal, sort by copies, so that the results of
1096 qsort leave nothing to chance. */
1097 return cp1->num - cp2->num;
1100 /* The function merges two sets of coalesced allocnos given by
1101 allocnos A1 and A2. */
1102 static void
1103 merge_allocnos (allocno_t a1, allocno_t a2)
1105 allocno_t a, first, last, next;
1107 first = ALLOCNO_FIRST_COALESCED_ALLOCNO (a1);
1108 if (first == ALLOCNO_FIRST_COALESCED_ALLOCNO (a2))
1109 return;
1110 for (last = a2, a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a2);;
1111 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
1113 ALLOCNO_FIRST_COALESCED_ALLOCNO (a) = first;
1114 if (a == a2)
1115 break;
1116 last = a;
1118 next = ALLOCNO_NEXT_COALESCED_ALLOCNO (first);
1119 ALLOCNO_NEXT_COALESCED_ALLOCNO (first) = a2;
1120 ALLOCNO_NEXT_COALESCED_ALLOCNO (last) = next;
1123 /* The function returns non-zero if there are conflicting allocnos
1124 from two sets of coalesced allocnos given by allocnos A1 and
1125 A2. */
1126 static int
1127 coalesced_allocno_conflict_p (allocno_t a1, allocno_t a2)
1129 allocno_t a, conflict_allocno, *allocno_vec;
1130 int i;
1132 if (allocno_coalesced_p)
1134 bitmap_clear (processed_coalesced_allocno_bitmap);
1135 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a1);;
1136 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
1138 bitmap_set_bit (processed_coalesced_allocno_bitmap, ALLOCNO_NUM (a));
1139 if (a == a1)
1140 break;
1143 for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a2);;
1144 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
1146 allocno_vec = ALLOCNO_CONFLICT_ALLOCNO_VEC (a);
1147 for (i = 0; (conflict_allocno = allocno_vec [i]) != NULL; i++)
1148 if (conflict_allocno == a1
1149 || (allocno_coalesced_p
1150 && bitmap_bit_p (processed_coalesced_allocno_bitmap,
1151 ALLOCNO_NUM (conflict_allocno))))
1152 return TRUE;
1153 if (a == a2)
1154 break;
1156 return FALSE;
1159 /* The major function for aggressive coalescing. */
1160 static void
1161 coalesce_allocnos (int reload_p)
1163 allocno_t a;
1164 copy_t cp, next_cp, *sorted_copies;
1165 enum reg_class cover_class;
1166 enum machine_mode mode;
1167 unsigned int j;
1168 int i, n, cp_num;
1169 bitmap_iterator bi;
1171 sorted_copies = ira_allocate (copies_num * sizeof (copy_t));
1172 cp_num = 0;
1173 /* Collect copies. We can not use copies for this because some
1174 copies are actually removed. */
1175 EXECUTE_IF_SET_IN_BITMAP (coloring_allocno_bitmap, 0, j, bi)
1177 a = allocnos [j];
1178 if ((! reload_p && ALLOCNO_ASSIGNED_P (a))
1179 || (reload_p
1180 && (! ALLOCNO_ASSIGNED_P (a) || ALLOCNO_HARD_REGNO (a) >= 0)))
1181 continue;
1182 cover_class = ALLOCNO_COVER_CLASS (a);
1183 mode = ALLOCNO_MODE (a);
1184 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1186 if (cp->first == a)
1188 next_cp = cp->next_first_allocno_copy;
1189 if ((reload_p
1190 || (ALLOCNO_COVER_CLASS (cp->second) == cover_class
1191 && ALLOCNO_MODE (cp->second) == mode))
1192 && (reload_p || cp->insn != NULL)
1193 && ((! reload_p && ! ALLOCNO_ASSIGNED_P (cp->second))
1194 || (reload_p
1195 && ALLOCNO_ASSIGNED_P (cp->second)
1196 && ALLOCNO_HARD_REGNO (cp->second) < 0)))
1197 sorted_copies [cp_num++] = cp;
1199 else if (cp->second == a)
1200 next_cp = cp->next_second_allocno_copy;
1201 else
1202 gcc_unreachable ();
1205 qsort (sorted_copies, cp_num, sizeof (copy_t), copy_freq_compare_func);
1206 for (; cp_num != 0;)
1208 for (i = 0; i < cp_num; i++)
1210 cp = sorted_copies [i];
1211 if (! coalesced_allocno_conflict_p (cp->first, cp->second))
1213 allocno_coalesced_p = TRUE;
1214 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
1215 fprintf (ira_dump_file,
1216 " Coalescing copy %d:a%dr%d-a%dr%d (freq=%d)\n",
1217 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
1218 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second),
1219 cp->num, cp->freq);
1220 merge_allocnos (cp->first, cp->second);
1221 i++;
1222 break;
1225 for (n = 0; i < cp_num; i++)
1227 cp = sorted_copies [i];
1228 if (ALLOCNO_FIRST_COALESCED_ALLOCNO (cp->first)
1229 != ALLOCNO_FIRST_COALESCED_ALLOCNO (cp->second))
1230 sorted_copies [n++] = cp;
1232 cp_num = n;
1234 ira_free (sorted_copies);
1237 /* Function implements Chaitin-Briggs coloring for allocnos in
1238 COLORING_ALLOCNO_BITMAP taking into account allocnos in
1239 CONSIDERATION_ALLOCNO_BITMAP. */
1240 static void
1241 color_allocnos (void)
1243 unsigned int i;
1244 bitmap_iterator bi;
1245 allocno_t a;
1247 allocno_coalesced_p = FALSE;
1248 processed_coalesced_allocno_bitmap = ira_allocate_bitmap ();
1249 if (flag_ira_coalesce)
1250 coalesce_allocnos (FALSE);
1251 /* Put the allocnos into the corresponding buckets. */
1252 colorable_allocno_bucket = NULL;
1253 uncolorable_allocno_bucket = NULL;
1254 EXECUTE_IF_SET_IN_BITMAP (coloring_allocno_bitmap, 0, i, bi)
1256 a = allocnos [i];
1257 if (ALLOCNO_COVER_CLASS (a) == NO_REGS)
1259 ALLOCNO_HARD_REGNO (a) = -1;
1260 ALLOCNO_ASSIGNED_P (a) = TRUE;
1261 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
1263 fprintf (ira_dump_file, " Spill");
1264 print_coalesced_allocno (a);
1265 fprintf (ira_dump_file, "\n");
1267 continue;
1269 put_allocno_into_bucket (a);
1271 push_allocnos_to_stack ();
1272 pop_allocnos_from_stack ();
1273 if (flag_ira_coalesce)
1274 /* We don't need coalesced allocnos for reassign_pseudos. */
1275 EXECUTE_IF_SET_IN_BITMAP (coloring_allocno_bitmap, 0, i, bi)
1277 a = allocnos [i];
1278 ALLOCNO_FIRST_COALESCED_ALLOCNO (a) = a;
1279 ALLOCNO_NEXT_COALESCED_ALLOCNO (a) = a;
1281 ira_free_bitmap (processed_coalesced_allocno_bitmap);
1282 allocno_coalesced_p = FALSE;
1287 /* The function outputs information about the loop given by its
1288 LOOP_TREE_NODE. */
1289 static void
1290 print_loop_title (loop_tree_node_t loop_tree_node)
1292 unsigned int j;
1293 bitmap_iterator bi;
1295 ira_assert (loop_tree_node->loop != NULL);
1296 fprintf (ira_dump_file,
1297 "\n Loop %d (father %d, header bb%d, depth %d)\n ref:",
1298 loop_tree_node->loop->num,
1299 (loop_tree_node->father == NULL
1300 ? -1 : loop_tree_node->father->loop->num),
1301 loop_tree_node->loop->header->index,
1302 loop_depth (loop_tree_node->loop));
1303 EXECUTE_IF_SET_IN_BITMAP (loop_tree_node->mentioned_allocnos, 0, j, bi)
1304 fprintf (ira_dump_file, " %dr%d", j, ALLOCNO_REGNO (allocnos [j]));
1305 fprintf (ira_dump_file, "\n modified regnos:");
1306 EXECUTE_IF_SET_IN_BITMAP (loop_tree_node->modified_regnos, 0, j, bi)
1307 fprintf (ira_dump_file, " %d", j);
1308 fprintf (ira_dump_file, "\n border:");
1309 EXECUTE_IF_SET_IN_BITMAP (loop_tree_node->border_allocnos, 0, j, bi)
1310 fprintf (ira_dump_file, " %dr%d", j, ALLOCNO_REGNO (allocnos [j]));
1311 fprintf (ira_dump_file, "\n Pressure:");
1312 for (j = 0; (int) j < reg_class_cover_size; j++)
1314 enum reg_class cover_class;
1316 cover_class = reg_class_cover [j];
1317 if (loop_tree_node->reg_pressure [cover_class] == 0)
1318 continue;
1319 fprintf (ira_dump_file, " %s=%d", reg_class_names [cover_class],
1320 loop_tree_node->reg_pressure [cover_class]);
1322 fprintf (ira_dump_file, "\n");
1325 /* The function implements Chaitin-Briggs coloring for allocnos inside
1326 loop (in extreme case it can be all function) given by the
1327 corresponding LOOP_TREE_NODE. */
1328 static void
1329 color_pass (loop_tree_node_t loop_tree_node)
1331 int regno, hard_regno, hard_regs_num, index = -1;
1332 int cost, exit_freq, enter_freq;
1333 unsigned int j;
1334 bitmap_iterator bi;
1335 enum machine_mode mode;
1336 enum reg_class class;
1337 allocno_t a, subloop_allocno;
1338 loop_tree_node_t subloop_node;
1340 if (loop_tree_node->loop == NULL)
1341 return;
1342 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1343 print_loop_title (loop_tree_node);
1345 bitmap_copy (coloring_allocno_bitmap, loop_tree_node->mentioned_allocnos);
1346 bitmap_ior_into (coloring_allocno_bitmap, loop_tree_node->border_allocnos);
1347 bitmap_copy (consideration_allocno_bitmap, coloring_allocno_bitmap);
1348 EXECUTE_IF_SET_IN_BITMAP (consideration_allocno_bitmap, 0, j, bi)
1350 a = allocnos [j];
1351 if (! ALLOCNO_ASSIGNED_P (a))
1352 continue;
1353 bitmap_clear_bit (coloring_allocno_bitmap, ALLOCNO_NUM (a));
1355 /* Color all mentioned including transparent. */
1356 color_allocnos ();
1357 /* Update costs for subloops. */
1358 for (subloop_node = loop_tree_node->inner;
1359 subloop_node != NULL;
1360 subloop_node = subloop_node->next)
1361 if (subloop_node->bb == NULL)
1362 EXECUTE_IF_SET_IN_BITMAP (consideration_allocno_bitmap, 0, j, bi)
1364 a = allocnos [j];
1365 mode = ALLOCNO_MODE (a);
1366 class = ALLOCNO_COVER_CLASS (a);
1367 hard_regno = ALLOCNO_HARD_REGNO (a);
1368 if (hard_regno >= 0)
1370 index = class_hard_reg_index [class] [hard_regno];
1371 ira_assert (index >= 0);
1373 regno = ALLOCNO_REGNO (a);
1374 /* ??? conflict costs */
1375 if (ALLOCNO_CAP_MEMBER (a) == NULL)
1377 subloop_allocno = subloop_node->regno_allocno_map [regno];
1378 if (subloop_allocno == NULL)
1379 continue;
1380 if ((flag_ira_algorithm == IRA_ALGORITHM_MIXED
1381 && (loop_tree_node->reg_pressure [class]
1382 <= available_class_regs [class]))
1383 || (hard_regno < 0
1384 && ! bitmap_bit_p (subloop_node->mentioned_allocnos,
1385 ALLOCNO_NUM (subloop_allocno))))
1387 if (! ALLOCNO_ASSIGNED_P (subloop_allocno))
1389 ALLOCNO_HARD_REGNO (subloop_allocno) = hard_regno;
1390 ALLOCNO_ASSIGNED_P (subloop_allocno) = TRUE;
1391 if (hard_regno >= 0)
1392 update_copy_costs (subloop_allocno, TRUE);
1394 continue;
1396 exit_freq = loop_edge_freq (subloop_node, regno, TRUE);
1397 enter_freq = loop_edge_freq (subloop_node, regno, FALSE);
1398 if (reg_equiv_invariant_p [regno]
1399 || reg_equiv_const [regno] != NULL_RTX)
1401 if (! ALLOCNO_ASSIGNED_P (subloop_allocno))
1403 ALLOCNO_HARD_REGNO (subloop_allocno) = hard_regno;
1404 ALLOCNO_ASSIGNED_P (subloop_allocno) = TRUE;
1405 if (hard_regno >= 0)
1406 update_copy_costs (subloop_allocno, TRUE);
1409 else if (hard_regno < 0)
1411 ALLOCNO_UPDATED_MEMORY_COST (subloop_allocno)
1412 -= ((memory_move_cost [mode] [class] [1] * enter_freq)
1413 + (memory_move_cost [mode] [class] [0] * exit_freq));
1415 else
1417 hard_regs_num
1418 = class_hard_regs_num [ALLOCNO_COVER_CLASS
1419 (subloop_allocno)];
1420 allocate_and_set_costs
1421 (&ALLOCNO_HARD_REG_COSTS (subloop_allocno), hard_regs_num,
1422 ALLOCNO_COVER_CLASS_COST (subloop_allocno));
1423 allocate_and_set_costs
1424 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (subloop_allocno),
1425 hard_regs_num, 0);
1426 cost = (register_move_cost [mode] [class] [class]
1427 * (exit_freq + enter_freq));
1428 ALLOCNO_HARD_REG_COSTS (subloop_allocno) [index] -= cost;
1429 ALLOCNO_CONFLICT_HARD_REG_COSTS (subloop_allocno) [index]
1430 -= cost;
1431 ALLOCNO_UPDATED_MEMORY_COST (subloop_allocno)
1432 += (memory_move_cost [mode] [class] [0] * enter_freq
1433 + memory_move_cost [mode] [class] [1] * exit_freq);
1434 if (ALLOCNO_COVER_CLASS_COST (subloop_allocno)
1435 > ALLOCNO_HARD_REG_COSTS (subloop_allocno) [index])
1436 ALLOCNO_COVER_CLASS_COST (subloop_allocno)
1437 = ALLOCNO_HARD_REG_COSTS (subloop_allocno) [index];
1440 else
1442 subloop_allocno = ALLOCNO_CAP_MEMBER (a);
1443 if (ALLOCNO_LOOP_TREE_NODE (subloop_allocno) != subloop_node)
1444 continue;
1445 if ((flag_ira_algorithm == IRA_ALGORITHM_MIXED
1446 && loop_tree_node->reg_pressure [class]
1447 <= available_class_regs [class])
1448 || (hard_regno < 0
1449 && ! bitmap_bit_p (subloop_node->mentioned_allocnos,
1450 ALLOCNO_NUM (subloop_allocno))))
1452 if (! ALLOCNO_ASSIGNED_P (subloop_allocno))
1454 ALLOCNO_HARD_REGNO (subloop_allocno) = hard_regno;
1455 ALLOCNO_ASSIGNED_P (subloop_allocno) = TRUE;
1456 if (hard_regno >= 0)
1457 update_copy_costs (subloop_allocno, TRUE);
1460 else if (flag_ira_propagate_cost && hard_regno >= 0)
1462 exit_freq = loop_edge_freq (subloop_node, -1, TRUE);
1463 enter_freq = loop_edge_freq (subloop_node, -1, FALSE);
1464 cost = (register_move_cost [mode] [class] [class]
1465 * (exit_freq + enter_freq));
1466 hard_regs_num
1467 = class_hard_regs_num [ALLOCNO_COVER_CLASS
1468 (subloop_allocno)];
1469 allocate_and_set_costs
1470 (&ALLOCNO_HARD_REG_COSTS (subloop_allocno), hard_regs_num,
1471 ALLOCNO_COVER_CLASS_COST (subloop_allocno));
1472 allocate_and_set_costs
1473 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (subloop_allocno),
1474 hard_regs_num, 0);
1475 ALLOCNO_HARD_REG_COSTS (subloop_allocno) [index] -= cost;
1476 ALLOCNO_CONFLICT_HARD_REG_COSTS (subloop_allocno) [index]
1477 -= cost;
1478 ALLOCNO_UPDATED_MEMORY_COST (subloop_allocno)
1479 += (memory_move_cost [mode] [class] [0] * enter_freq
1480 + memory_move_cost [mode] [class] [1] * exit_freq);
1481 if (ALLOCNO_COVER_CLASS_COST (subloop_allocno)
1482 > ALLOCNO_HARD_REG_COSTS (subloop_allocno) [index])
1483 ALLOCNO_COVER_CLASS_COST (subloop_allocno)
1484 = ALLOCNO_HARD_REG_COSTS (subloop_allocno) [index];
1490 /* Map: allocno number -> allocno prioirity. */
1491 static int *allocno_priorities;
1493 /* Allocate array ALLOCNO_PRIORITIES and set up priorities for N allocnos in
1494 array CONSIDERATION_ALLOCNOS. */
1495 static void
1496 start_allocno_priorities (allocno_t *consideration_allocnos, int n)
1498 int i, length;
1499 allocno_t a;
1500 allocno_live_range_t r;
1502 for (i = 0; i < n; i++)
1504 a = consideration_allocnos [i];
1505 for (length = 0, r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
1506 length += r->finish - r->start + 1;
1507 if (length == 0)
1509 allocno_priorities [ALLOCNO_NUM (a)] = 0;
1510 continue;
1512 ira_assert (length > 0 && ALLOCNO_NREFS (a) > 0);
1513 allocno_priorities [ALLOCNO_NUM (a)]
1514 = (((double) (floor_log2 (ALLOCNO_NREFS (a)) * ALLOCNO_FREQ (a))
1515 / length)
1516 * (10000 / REG_FREQ_MAX) * PSEUDO_REGNO_SIZE (ALLOCNO_REGNO (a)));
1520 /* The function is used to sort allocnos according to their priorities
1521 which are calculated analogous to ones in file `global.c'. */
1522 static int
1523 allocno_priority_compare_func (const void *v1p, const void *v2p)
1525 allocno_t a1 = *(const allocno_t *) v1p, a2 = *(const allocno_t *) v2p;
1526 int pri1, pri2;
1528 pri1 = allocno_priorities [ALLOCNO_NUM (a1)];
1529 pri2 = allocno_priorities [ALLOCNO_NUM (a2)];
1530 if (pri2 - pri1)
1531 return pri2 - pri1;
1533 /* If regs are equally good, sort by allocnos, so that the results of
1534 qsort leave nothing to chance. */
1535 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
1538 /* The function initialized common data for cloring and calls
1539 functions to do Chaitin-Briggs, regional, and Chow's priority-based
1540 coloring. */
1541 static void
1542 do_coloring (void)
1544 coloring_allocno_bitmap = ira_allocate_bitmap ();
1546 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
1547 fprintf (ira_dump_file, "\n**** Allocnos coloring:\n\n");
1549 traverse_loop_tree (FALSE, ira_loop_tree_root, color_pass, NULL);
1551 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1552 print_disposition (ira_dump_file);
1554 ira_free_bitmap (coloring_allocno_bitmap);
1559 /* The functions moves future spill/restore code to less frequent
1560 points (if it is profitable) by reassigning some allocnos to memory
1561 which means make longer live-range where the corresponding
1562 pseudo-registers will be in memory. */
1563 static void
1564 move_spill_restore (void)
1566 int i, cost, changed_p, regno, hard_regno, hard_regno2, index;
1567 int enter_freq, exit_freq;
1568 enum machine_mode mode;
1569 enum reg_class class;
1570 allocno_t a, father_allocno, subloop_allocno;
1571 loop_tree_node_t father, loop_node, subloop_node;
1573 for (;;)
1575 changed_p = FALSE;
1576 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
1577 fprintf (ira_dump_file, "New iteration of spill/restore move\n");
1578 for (i = 0; i < allocnos_num; i++)
1580 a = allocnos [i];
1581 regno = ALLOCNO_REGNO (a);
1582 loop_node = ALLOCNO_LOOP_TREE_NODE (a);
1583 if (ALLOCNO_CAP_MEMBER (a) != NULL
1584 || (hard_regno = ALLOCNO_HARD_REGNO (a)) < 0
1585 || loop_node->inner == NULL)
1586 continue;
1587 mode = ALLOCNO_MODE (a);
1588 class = ALLOCNO_COVER_CLASS (a);
1589 index = class_hard_reg_index [class] [hard_regno];
1590 ira_assert (index >= 0);
1591 cost = (ALLOCNO_MEMORY_COST (a)
1592 - (ALLOCNO_HARD_REG_COSTS (a) == NULL
1593 ? ALLOCNO_COVER_CLASS_COST (a)
1594 : ALLOCNO_HARD_REG_COSTS (a) [index]));
1595 for (subloop_node = loop_node->inner;
1596 subloop_node != NULL;
1597 subloop_node = subloop_node->next)
1599 if (subloop_node->bb != NULL)
1600 continue;
1601 subloop_allocno = subloop_node->regno_allocno_map [regno];
1602 if (subloop_allocno == NULL)
1603 continue;
1604 cost -= (ALLOCNO_MEMORY_COST (subloop_allocno)
1605 - (ALLOCNO_HARD_REG_COSTS (subloop_allocno) == NULL
1606 ? ALLOCNO_COVER_CLASS_COST (subloop_allocno)
1607 : ALLOCNO_HARD_REG_COSTS (subloop_allocno) [index]));
1608 exit_freq = loop_edge_freq (subloop_node, regno, TRUE);
1609 enter_freq = loop_edge_freq (subloop_node, regno, FALSE);
1610 if ((hard_regno2 = ALLOCNO_HARD_REGNO (subloop_allocno)) < 0)
1611 cost -= (memory_move_cost [mode] [class] [0] * exit_freq
1612 + memory_move_cost [mode] [class] [1] * enter_freq);
1613 else
1615 cost += (memory_move_cost [mode] [class] [0] * exit_freq
1616 + memory_move_cost [mode] [class] [1] * enter_freq);
1617 if (hard_regno2 != hard_regno)
1618 cost -= (register_move_cost [mode] [class] [class]
1619 * (exit_freq + enter_freq));
1622 if ((father = loop_node->father) != NULL
1623 && (father_allocno = father->regno_allocno_map [regno]) != NULL)
1625 exit_freq = loop_edge_freq (loop_node, regno, TRUE);
1626 enter_freq = loop_edge_freq (loop_node, regno, FALSE);
1627 if ((hard_regno2 = ALLOCNO_HARD_REGNO (father_allocno)) < 0)
1628 cost -= (memory_move_cost [mode] [class] [0] * exit_freq
1629 + memory_move_cost [mode] [class] [1] * enter_freq);
1630 else
1632 cost += (memory_move_cost [mode] [class] [1] * exit_freq
1633 + memory_move_cost [mode] [class] [0] * enter_freq);
1634 if (hard_regno2 != hard_regno)
1635 cost -= (register_move_cost [mode] [class] [class]
1636 * (exit_freq + enter_freq));
1639 if (cost < 0)
1641 ALLOCNO_HARD_REGNO (a) = -1;
1642 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
1644 fprintf
1645 (ira_dump_file,
1646 " Moving spill/restore for a%dr%d up from loop %d",
1647 ALLOCNO_NUM (a), regno, loop_node->loop->num);
1648 fprintf (ira_dump_file, " - profit %d\n", -cost);
1650 changed_p = TRUE;
1653 if (! changed_p)
1654 break;
1660 /* Usage frequency and order number of coalesced allocno set to which
1661 given pseudo register belongs to. */
1662 static int *regno_coalesced_allocno_freq;
1663 static int *regno_coalesced_allocno_num;
1665 /* The function is used to sort pseudos according frequencies of
1666 coalesced allocnos they belong to (putting most frequently ones
1667 first when the frame pointer is needed or the last otherwise), and
1668 according to coalesced allocno numbers. */
1669 static int
1670 coalesced_pseudo_reg_compare (const void *v1p, const void *v2p)
1672 int regno1 = *(int *) v1p;
1673 int regno2 = *(int *) v2p;
1674 int diff;
1676 if ((diff = (regno_coalesced_allocno_freq [regno2]
1677 - regno_coalesced_allocno_freq [regno1])) != 0)
1678 return frame_pointer_needed ? diff : -diff;
1679 if ((diff = (regno_coalesced_allocno_num [regno1]
1680 - regno_coalesced_allocno_num [regno2])) != 0)
1681 return frame_pointer_needed ? diff : -diff;
1682 return frame_pointer_needed ? regno1 - regno2 : regno2 - regno1;
1685 /* The function sorts pseudo-register numbers in array PSEUDO_REGNOS
1686 of length N for subsequent assigning stack slots to them in the
1687 reload. */
1688 void
1689 sort_regnos_for_alter_reg (int *pseudo_regnos, int n)
1691 int max_regno = max_reg_num ();
1692 int i, regno, num, assign_p, freq;
1693 allocno_t allocno, a;
1695 coloring_allocno_bitmap = ira_allocate_bitmap ();
1696 for (i = 0; i < n; i++)
1698 regno = pseudo_regnos [i];
1699 allocno = regno_allocno_map [regno];
1700 if (allocno != NULL)
1701 bitmap_set_bit (coloring_allocno_bitmap,
1702 ALLOCNO_NUM (allocno));
1704 allocno_coalesced_p = FALSE;
1705 processed_coalesced_allocno_bitmap = ira_allocate_bitmap ();
1706 coalesce_allocnos (TRUE);
1707 ira_free_bitmap (processed_coalesced_allocno_bitmap);
1708 ira_free_bitmap (coloring_allocno_bitmap);
1709 allocno_coalesced_p = FALSE;
1710 regno_coalesced_allocno_freq = ira_allocate (max_regno * sizeof (int));
1711 regno_coalesced_allocno_num = ira_allocate (max_regno * sizeof (int));
1712 memset (regno_coalesced_allocno_num, 0, max_regno * sizeof (int));
1713 for (num = i = 0; i < n; i++)
1715 regno = pseudo_regnos [i];
1716 allocno = regno_allocno_map [regno];
1717 if (allocno == NULL)
1719 regno_coalesced_allocno_freq [regno] = 0;
1720 regno_coalesced_allocno_num [regno] = ++num;
1721 continue;
1723 assign_p = regno_coalesced_allocno_num [regno] == 0;
1724 if (assign_p)
1725 num++;
1726 for (freq = 0, a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
1727 a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
1729 if (assign_p)
1730 regno_coalesced_allocno_num [ALLOCNO_REGNO (a)] = num;
1731 freq += ALLOCNO_FREQ (a);
1732 if (a == allocno)
1733 break;
1735 regno_coalesced_allocno_freq [regno] = freq;
1737 /* Uncoalesce allocnos which is necessary for (re)assigning during
1738 the reload. */
1739 for (i = 0; i < allocnos_num; i++)
1740 ALLOCNO_NEXT_COALESCED_ALLOCNO (allocnos [i]) = allocnos [i];
1741 qsort (pseudo_regnos, n, sizeof (int), coalesced_pseudo_reg_compare);
1742 ira_free (regno_coalesced_allocno_num);
1743 ira_free (regno_coalesced_allocno_freq);
1748 /* Set up current hard reg costs and current conflict hard reg costs
1749 for allocno A. */
1750 static void
1751 setup_curr_costs (allocno_t a)
1753 int i, hard_regno, cost, hard_regs_num;
1754 enum machine_mode mode;
1755 enum reg_class cover_class, class;
1756 allocno_t another_a;
1757 copy_t cp, next_cp;
1759 ira_assert (! ALLOCNO_ASSIGNED_P (a));
1760 cover_class = ALLOCNO_COVER_CLASS (a);
1761 if (cover_class == NO_REGS)
1762 return;
1763 hard_regs_num = class_hard_regs_num [cover_class];
1764 if (hard_regs_num == 0)
1765 return;
1766 mode = ALLOCNO_MODE (a);
1767 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1769 if (cp->first == a)
1771 next_cp = cp->next_first_allocno_copy;
1772 another_a = cp->second;
1774 else if (cp->second == a)
1776 next_cp = cp->next_second_allocno_copy;
1777 another_a = cp->first;
1779 else
1780 gcc_unreachable ();
1781 if (cover_class != ALLOCNO_COVER_CLASS (another_a)
1782 || ! ALLOCNO_ASSIGNED_P (another_a)
1783 || (hard_regno = ALLOCNO_HARD_REGNO (another_a)) < 0)
1784 continue;
1785 class = REGNO_REG_CLASS (hard_regno);
1786 i = class_hard_reg_index [cover_class] [hard_regno];
1787 ira_assert (i >= 0);
1788 cost = (cp->first == a
1789 ? register_move_cost [mode] [class] [cover_class]
1790 : register_move_cost [mode] [cover_class] [class]);
1791 allocate_and_set_or_copy_costs
1792 (&ALLOCNO_UPDATED_HARD_REG_COSTS (a),
1793 hard_regs_num, ALLOCNO_COVER_CLASS_COST (a),
1794 ALLOCNO_HARD_REG_COSTS (a));
1795 allocate_and_set_or_copy_costs
1796 (&ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1797 hard_regs_num, 0, ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1798 ALLOCNO_UPDATED_HARD_REG_COSTS (a) [i] -= cp->freq * cost;
1799 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) [i] -= cp->freq * cost;
1803 /* Try to assign hard registers to the unassigned allocnos and allocnos
1804 conflicting with them or conflicting with allocnos whose regno >=
1805 START_REGNO. We only try to assign a hard register to allocnos
1806 which do not live across calls if NO_CALL_CROSS_P. */
1807 void
1808 reassign_conflict_allocnos (int start_regno, int no_call_cross_p)
1810 int i, j, allocnos_to_color_num;
1811 allocno_t a, conflict_a, *allocno_vec;
1812 enum reg_class cover_class;
1813 bitmap allocnos_to_color;
1815 allocnos_to_color = ira_allocate_bitmap ();
1816 allocnos_to_color_num = 0;
1817 for (i = 0; i < allocnos_num; i++)
1819 a = allocnos [i];
1820 if (! ALLOCNO_ASSIGNED_P (a)
1821 && ! bitmap_bit_p (allocnos_to_color, ALLOCNO_NUM (a)))
1823 if (ALLOCNO_COVER_CLASS (a) != NO_REGS
1824 && (! no_call_cross_p || ALLOCNO_CALLS_CROSSED_NUM (a) == 0))
1825 sorted_allocnos [allocnos_to_color_num++] = a;
1826 else
1828 ALLOCNO_ASSIGNED_P (a) = TRUE;
1829 ALLOCNO_HARD_REGNO (a) = -1;
1831 bitmap_set_bit (allocnos_to_color, ALLOCNO_NUM (a));
1833 if (ALLOCNO_REGNO (a) < start_regno
1834 || (cover_class = ALLOCNO_COVER_CLASS (a)) == NO_REGS)
1835 continue;
1836 allocno_vec = ALLOCNO_CONFLICT_ALLOCNO_VEC (a);
1837 for (j = 0; (conflict_a = allocno_vec [j]) != NULL; j++)
1839 ira_assert (cover_class == ALLOCNO_COVER_CLASS (conflict_a));
1840 if ((no_call_cross_p && ALLOCNO_CALLS_CROSSED_NUM (conflict_a) != 0)
1841 || bitmap_bit_p (allocnos_to_color, ALLOCNO_NUM (conflict_a)))
1842 continue;
1843 bitmap_set_bit (allocnos_to_color, ALLOCNO_NUM (conflict_a));
1844 sorted_allocnos [allocnos_to_color_num++] = conflict_a;
1847 ira_free_bitmap (allocnos_to_color);
1848 if (allocnos_to_color_num > 1)
1850 start_allocno_priorities (sorted_allocnos, allocnos_to_color_num);
1851 qsort (sorted_allocnos, allocnos_to_color_num, sizeof (allocno_t),
1852 allocno_priority_compare_func);
1854 for (i = 0; i < allocnos_to_color_num; i++)
1856 a = sorted_allocnos [i];
1857 ALLOCNO_ASSIGNED_P (a) = FALSE;
1858 setup_curr_costs (a);
1860 for (i = 0; i < allocnos_to_color_num; i++)
1862 a = sorted_allocnos [i];
1863 if (assign_hard_reg (a, TRUE))
1865 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
1866 fprintf
1867 (ira_dump_file,
1868 " Secondary allocation: assign hard reg %d to reg %d\n",
1869 ALLOCNO_HARD_REGNO (a), ALLOCNO_REGNO (a));
1876 /* The function called from the reload to mark changes in the
1877 allocation of REGNO made by the reload. */
1878 void
1879 mark_allocation_change (int regno)
1881 allocno_t a = regno_allocno_map [regno];
1882 int old_hard_regno, hard_regno, cost;
1883 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
1885 ira_assert (a != NULL);
1886 hard_regno = reg_renumber [regno];
1887 if ((old_hard_regno = ALLOCNO_HARD_REGNO (a)) == hard_regno)
1888 return;
1889 if (old_hard_regno < 0)
1890 cost = -ALLOCNO_MEMORY_COST (a);
1891 else
1893 ira_assert (class_hard_reg_index [cover_class] [old_hard_regno] >= 0);
1894 cost = -(ALLOCNO_HARD_REG_COSTS (a) == NULL
1895 ? ALLOCNO_COVER_CLASS_COST (a)
1896 : ALLOCNO_HARD_REG_COSTS (a)
1897 [class_hard_reg_index [cover_class] [old_hard_regno]]);
1898 update_copy_costs (a, FALSE);
1900 overall_cost -= cost;
1901 ALLOCNO_HARD_REGNO (a) = hard_regno;
1902 if (hard_regno < 0)
1903 cost += ALLOCNO_MEMORY_COST (a);
1904 else if (class_hard_reg_index [cover_class] [hard_regno] >= 0)
1906 cost += (ALLOCNO_HARD_REG_COSTS (a) == NULL
1907 ? ALLOCNO_COVER_CLASS_COST (a)
1908 : ALLOCNO_HARD_REG_COSTS (a)
1909 [class_hard_reg_index [cover_class] [hard_regno]]);
1910 update_copy_costs (a, TRUE);
1912 else
1913 /* Reload chages class of the allocno. */
1914 cost = 0;
1915 overall_cost += cost;
1918 /* This function is called when the reload deletes memory-memory move. */
1919 void
1920 mark_memory_move_deletion (int dst_regno, int src_regno)
1922 allocno_t dst = regno_allocno_map [dst_regno];
1923 allocno_t src = regno_allocno_map [src_regno];
1925 ira_assert (dst != NULL && src != NULL
1926 && ALLOCNO_HARD_REGNO (dst) < 0
1927 && ALLOCNO_HARD_REGNO (src) < 0);
1928 ALLOCNO_DONT_REASSIGN_P (dst) = TRUE;
1929 ALLOCNO_DONT_REASSIGN_P (src) = TRUE;
1932 /* The function tries to assign a hard register (except for
1933 FORBIDDEN_REGS) to allocno A and return TRUE in the case of
1934 success. */
1935 static int
1936 allocno_reload_assign (allocno_t a, HARD_REG_SET forbidden_regs)
1938 int hard_regno;
1939 enum reg_class cover_class;
1940 int regno = ALLOCNO_REGNO (a);
1942 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), forbidden_regs);
1943 if (! flag_caller_saves && ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
1944 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), call_used_reg_set);
1945 ALLOCNO_ASSIGNED_P (a) = FALSE;
1946 cover_class = ALLOCNO_COVER_CLASS (a);
1947 setup_curr_costs (a);
1948 assign_hard_reg (a, TRUE);
1949 hard_regno = ALLOCNO_HARD_REGNO (a);
1950 reg_renumber [regno] = hard_regno;
1951 if (hard_regno >= 0)
1953 ira_assert (class_hard_reg_index [cover_class] [hard_regno] >= 0);
1954 overall_cost -= (ALLOCNO_MEMORY_COST (a)
1955 - (ALLOCNO_HARD_REG_COSTS (a) == NULL
1956 ? ALLOCNO_COVER_CLASS_COST (a)
1957 : ALLOCNO_HARD_REG_COSTS (a)
1958 [class_hard_reg_index
1959 [cover_class] [hard_regno]]));
1960 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0
1961 && ! hard_reg_not_in_set_p (hard_regno, ALLOCNO_MODE (a),
1962 call_used_reg_set))
1964 ira_assert (flag_caller_saves);
1965 caller_save_needed = 1;
1969 /* If we found a register, modify the RTL for the register to show
1970 the hard register, and mark that register live. */
1971 if (reg_renumber[regno] >= 0)
1973 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
1974 fprintf (ira_dump_file, ": reassign to %d", reg_renumber[regno]);
1975 SET_REGNO (regno_reg_rtx[regno], reg_renumber[regno]);
1976 mark_home_live (regno);
1979 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
1980 fprintf (ira_dump_file, "\n");
1982 return reg_renumber[regno] >= 0;
1985 /* The function is used to sort pseudos according their usage
1986 frequencies (putting most frequently ones first). */
1987 static int
1988 pseudo_reg_compare (const void *v1p, const void *v2p)
1990 int regno1 = *(int *) v1p;
1991 int regno2 = *(int *) v2p;
1992 int diff;
1994 if ((diff = REG_FREQ (regno2) - REG_FREQ (regno1)) != 0)
1995 return diff;
1996 return regno1 - regno2;
1999 /* The function tries to allocate hard registers to
2000 SPILLED_PSEUDO_REGS (there are NUM of them) or spilled pseudos
2001 conflicting with pseudos in SPILLED_PSEUDO_REGS. It returns TRUE
2002 and update SPILLED, if the allocation has been changed. The
2003 function doesn't use BAD_SPILL_REGS and corresponding hard
2004 registers in PSEUDO_FORBIDDEN_REGS and PSEUDO_PREVIOUS_REGS. */
2006 reassign_pseudos (int *spilled_pseudo_regs, int num,
2007 HARD_REG_SET bad_spill_regs,
2008 HARD_REG_SET *pseudo_forbidden_regs,
2009 HARD_REG_SET *pseudo_previous_regs, bitmap spilled)
2011 int i, j, m, n, regno, changed_p;
2012 allocno_t a, conflict_a, *allocno_vec;
2013 HARD_REG_SET forbidden_regs;
2015 if (num > 1)
2016 qsort (spilled_pseudo_regs, num, sizeof (int), pseudo_reg_compare);
2017 changed_p = FALSE;
2018 for (m = i = 0; i < num; i++)
2020 regno = spilled_pseudo_regs [i];
2021 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
2022 IOR_HARD_REG_SET (forbidden_regs, pseudo_forbidden_regs [regno]);
2023 IOR_HARD_REG_SET (forbidden_regs, pseudo_previous_regs [regno]);
2024 gcc_assert (reg_renumber [regno] < 0);
2025 a = regno_allocno_map [regno];
2026 mark_allocation_change (regno);
2027 ira_assert (reg_renumber [regno] < 0);
2028 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
2029 fprintf (ira_dump_file,
2030 " Spill %d(a%d), cost=%d", regno, ALLOCNO_NUM (a),
2031 ALLOCNO_MEMORY_COST (a)
2032 - ALLOCNO_COVER_CLASS_COST (a));
2033 allocno_reload_assign (a, forbidden_regs);
2034 if (reg_renumber [regno] >= 0)
2036 CLEAR_REGNO_REG_SET (spilled, regno);
2037 changed_p = TRUE;
2039 else
2040 spilled_pseudo_regs [m++] = regno;
2042 if (m == 0)
2043 return changed_p;
2044 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
2046 fprintf (ira_dump_file, " Spilled regs");
2047 for (i = 0; i < m; i++)
2048 fprintf (ira_dump_file, " %d", spilled_pseudo_regs [i]);
2049 fprintf (ira_dump_file, "\n");
2051 for (i = n = 0; i < m; i++)
2053 regno = spilled_pseudo_regs [i];
2054 a = regno_allocno_map [regno];
2055 allocno_vec = ALLOCNO_CONFLICT_ALLOCNO_VEC (a);
2056 for (j = 0; (conflict_a = allocno_vec [j]) != NULL; j++)
2057 if (ALLOCNO_HARD_REGNO (conflict_a) < 0
2058 && ! ALLOCNO_DONT_REASSIGN_P (conflict_a)
2059 && ! bitmap_bit_p (consideration_allocno_bitmap,
2060 ALLOCNO_NUM (conflict_a)))
2062 sorted_allocnos [n++] = conflict_a;
2063 bitmap_set_bit (consideration_allocno_bitmap,
2064 ALLOCNO_NUM (conflict_a));
2067 if (n != 0)
2069 start_allocno_priorities (sorted_allocnos, n);
2070 qsort (sorted_allocnos, n, sizeof (allocno_t),
2071 allocno_priority_compare_func);
2072 for (i = 0; i < n; i++)
2074 a = sorted_allocnos [i];
2075 regno = ALLOCNO_REGNO (a);
2076 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
2077 IOR_HARD_REG_SET (forbidden_regs, pseudo_forbidden_regs [regno]);
2078 IOR_HARD_REG_SET (forbidden_regs, pseudo_previous_regs [regno]);
2079 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
2080 fprintf (ira_dump_file,
2081 " Try assign %d(a%d), cost=%d",
2082 regno, ALLOCNO_NUM (a),
2083 ALLOCNO_MEMORY_COST (a)
2084 - ALLOCNO_COVER_CLASS_COST (a));
2085 if (allocno_reload_assign (a, forbidden_regs))
2087 changed_p = TRUE;
2088 bitmap_clear_bit (spilled, regno);
2092 return changed_p;
2097 /* The function called by the reload returns already allocated stack
2098 slot (if any) for REGNO with given INHERENT_SIZE and
2099 TOTAL_SIZE. */
2101 reuse_stack_slot (int regno, unsigned int inherent_size,
2102 unsigned int total_size)
2104 unsigned int i;
2105 int n;
2106 int freq, best_freq = -1;
2107 struct spilled_reg_stack_slot *best_slot = NULL;
2108 allocno_t another_allocno, allocno = regno_allocno_map [regno];
2109 copy_t cp, next_cp;
2110 rtx x;
2111 bitmap_iterator bi;
2112 struct spilled_reg_stack_slot *slot = NULL;
2114 ira_assert (flag_ira && inherent_size == PSEUDO_REGNO_BYTES (regno)
2115 && inherent_size <= total_size);
2116 if (! flag_ira_share_spill_slots)
2117 return NULL_RTX;
2118 x = NULL_RTX;
2119 if (flag_omit_frame_pointer)
2120 n = spilled_reg_stack_slots_num - 1;
2121 else
2122 n = 0;
2123 if (x == NULL_RTX)
2125 for (;;)
2127 if (flag_omit_frame_pointer)
2129 if (n < 0)
2130 break;
2131 slot = &spilled_reg_stack_slots [n--];
2133 else if (n >= spilled_reg_stack_slots_num)
2134 break;
2135 else
2136 slot = &spilled_reg_stack_slots [n++];
2137 if (slot->width < total_size
2138 || GET_MODE_SIZE (GET_MODE (slot->mem)) < inherent_size)
2139 continue;
2141 EXECUTE_IF_SET_IN_BITMAP (&slot->spilled_regs,
2142 FIRST_PSEUDO_REGISTER, i, bi)
2144 if (pseudo_live_ranges_intersect_p (regno, i))
2145 goto cont;
2147 for (freq = 0, cp = ALLOCNO_COPIES (allocno);
2148 cp != NULL;
2149 cp = next_cp)
2151 if (cp->first == allocno)
2153 next_cp = cp->next_first_allocno_copy;
2154 another_allocno = cp->second;
2156 else if (cp->second == allocno)
2158 next_cp = cp->next_second_allocno_copy;
2159 another_allocno = cp->first;
2161 else
2162 gcc_unreachable ();
2163 if (bitmap_bit_p (&slot->spilled_regs,
2164 ALLOCNO_REGNO (another_allocno)))
2165 freq += cp->freq;
2167 if (freq > best_freq)
2169 best_freq = freq;
2170 best_slot = slot;
2172 cont:
2175 if (best_freq >= 0)
2177 SET_REGNO_REG_SET (&best_slot->spilled_regs, regno);
2178 x = best_slot->mem;
2181 if (x)
2183 if (internal_flag_ira_verbose > 3 && ira_dump_file)
2185 fprintf (ira_dump_file, " Assigning %d slot of", regno);
2186 EXECUTE_IF_SET_IN_BITMAP (&slot->spilled_regs,
2187 FIRST_PSEUDO_REGISTER, i, bi)
2189 if ((unsigned) regno != i)
2190 fprintf (ira_dump_file, " %d", i);
2192 fprintf (ira_dump_file, "\n");
2195 return x;
2198 /* The function called by the reload when a new stack slot X with
2199 TOTAL_SIZE was allocated for REGNO. */
2200 void
2201 mark_new_stack_slot (rtx x, int regno, unsigned int total_size)
2203 struct spilled_reg_stack_slot *slot;
2205 ira_assert (flag_ira && PSEUDO_REGNO_BYTES (regno) <= total_size);
2206 slot = &spilled_reg_stack_slots [spilled_reg_stack_slots_num++];
2207 INIT_REG_SET (&slot->spilled_regs);
2208 SET_REGNO_REG_SET (&slot->spilled_regs, regno);
2209 slot->mem = x;
2210 slot->width = total_size;
2211 if (internal_flag_ira_verbose > 3 && ira_dump_file)
2212 fprintf (ira_dump_file, " Assigning %d a new slot\n", regno);
2216 /* Return spill cost for pseudo-registers whose numbers are in array
2217 regnos (end marker is a negative number) for reload with given IN
2218 and OUT for INSN. Return also number points (through
2219 EXCESS_PRESSURE_LIVE_LENGTH) where the pseudo-register lives and
2220 the register pressure is high, number of references of the
2221 pesudo-registers (through NREFS), number of call used
2222 hard-registers occupied by the pseudo-registers (through
2223 CALL_USED_COUNT), and the first hard regno occupied by the
2224 pseudo-registers (through FIRST_HARD_REGNO). */
2225 static int
2226 calculate_spill_cost (int *regnos, rtx in, rtx out, rtx insn,
2227 int *excess_pressure_live_length,
2228 int *nrefs, int *call_used_count, int *first_hard_regno)
2230 int i, cost, regno, hard_regno, j, count, saved_cost, nregs, in_p, out_p;
2231 int length;
2232 allocno_t a;
2234 *nrefs = 0;
2235 for (length = count = cost = i = 0;; i++)
2237 regno = regnos [i];
2238 *nrefs += REG_N_REFS (regno);
2239 if (regno < 0)
2240 break;
2241 hard_regno = reg_renumber [regno];
2242 ira_assert (hard_regno >= 0);
2243 a = regno_allocno_map [regno];
2244 length += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
2245 cost += ALLOCNO_MEMORY_COST (a) - ALLOCNO_COVER_CLASS_COST (a);
2246 nregs = hard_regno_nregs [hard_regno] [ALLOCNO_MODE (a)];
2247 for (j = 0; j < nregs; j++)
2248 if (! TEST_HARD_REG_BIT (call_used_reg_set, hard_regno + j))
2249 break;
2250 if (j == nregs)
2251 count++;
2252 in_p = in && REG_P (in) && (int) REGNO (in) == hard_regno;
2253 out_p = out && REG_P (out) && (int) REGNO (out) == hard_regno;
2254 if ((in_p || out_p)
2255 && find_regno_note (insn, REG_DEAD, hard_regno) != NULL_RTX)
2257 saved_cost = 0;
2258 if (in_p)
2259 saved_cost += memory_move_cost
2260 [ALLOCNO_MODE (a)] [ALLOCNO_COVER_CLASS (a)] [1];
2261 if (out_p)
2262 saved_cost
2263 += memory_move_cost
2264 [ALLOCNO_MODE (a)] [ALLOCNO_COVER_CLASS (a)] [0];
2265 cost -= REG_FREQ_FROM_BB (BLOCK_FOR_INSN (insn)) * saved_cost;
2268 *excess_pressure_live_length = length;
2269 *call_used_count = count;
2270 hard_regno = -1;
2271 if (regnos [0] >= 0)
2273 hard_regno = reg_renumber [regnos [0]];
2275 *first_hard_regno = hard_regno;
2276 return cost;
2279 /* Return TRUE if spilling pseudo-registers whose numbers are in array
2280 REGNOS is better spilling pseudo-registers with numbers in
2281 OTHER_REGNOS for reload with given IN and OUT for INSN. */
2283 better_spill_reload_regno_p (int *regnos, int *other_regnos,
2284 rtx in, rtx out, rtx insn)
2286 int cost, other_cost;
2287 int length, other_length;
2288 int nrefs, other_nrefs;
2289 int call_used_count, other_call_used_count;
2290 int hard_regno, other_hard_regno;
2292 cost = calculate_spill_cost (regnos, in, out, insn,
2293 &length, &nrefs, &call_used_count, &hard_regno);
2294 other_cost = calculate_spill_cost (other_regnos, in, out, insn,
2295 &other_length, &other_nrefs,
2296 &other_call_used_count,
2297 &other_hard_regno);
2298 if (cost != other_cost)
2299 return cost < other_cost;
2300 if (length != other_length)
2301 return length > other_length;
2302 #ifdef REG_ALLOC_ORDER
2303 if (hard_regno >= 0 && other_hard_regno >= 0)
2304 return (inv_reg_alloc_order [hard_regno]
2305 < inv_reg_alloc_order [other_hard_regno]);
2306 #else
2307 if (call_used_count != other_call_used_count)
2308 return call_used_count > other_call_used_count;
2309 #endif
2310 return FALSE;
2315 /* The function returns (through CALL_CLOBBERED_REGS) hard registers
2316 changed by all function calls in REGNO live range. */
2317 void
2318 collect_pseudo_call_clobbered_regs (int regno,
2319 HARD_REG_SET (*call_clobbered_regs))
2321 int i;
2322 allocno_t a;
2323 HARD_REG_SET clobbered_regs;
2324 rtx call, *allocno_calls;
2326 a = regno_allocno_map [regno];
2327 CLEAR_HARD_REG_SET (*call_clobbered_regs);
2328 allocno_calls = (VEC_address (rtx, regno_calls [regno])
2329 + ALLOCNO_CALLS_CROSSED_START (a));
2330 for (i = ALLOCNO_CALLS_CROSSED_NUM (a) - 1; i >= 0; i--)
2332 call = allocno_calls [i];
2333 get_call_invalidated_used_regs (call, &clobbered_regs, FALSE);
2334 IOR_HARD_REG_SET (*call_clobbered_regs, clobbered_regs);
2340 /* Allocate and initialize data necessary for assign_hard_reg. */
2341 void
2342 initiate_ira_assign (void)
2344 sorted_allocnos = ira_allocate (sizeof (allocno_t) * allocnos_num);
2345 consideration_allocno_bitmap = ira_allocate_bitmap ();
2346 initiate_cost_update ();
2347 allocno_priorities = ira_allocate (sizeof (int) * allocnos_num);
2350 /* Deallocate data used by assign_hard_reg. */
2351 void
2352 finish_ira_assign (void)
2354 ira_free (sorted_allocnos);
2355 ira_free_bitmap (consideration_allocno_bitmap);
2356 finish_cost_update ();
2357 ira_free (allocno_priorities);
2362 /* Entry function doing color-based register allocation. */
2363 void
2364 ira_color (void)
2366 VARRAY_GENERIC_PTR_NOGC_INIT (allocno_stack_varray, allocnos_num,
2367 "stack of allocnos");
2368 memset (allocated_hardreg_p, 0, sizeof (allocated_hardreg_p));
2369 initiate_ira_assign ();
2370 do_coloring ();
2371 finish_ira_assign ();
2372 VARRAY_FREE (allocno_stack_varray);
2373 move_spill_restore ();