e <brendan@zen.org>
[official-gcc.git] / gcc / ra.c
blob34ac436209cf60e41d97d4c2d5284afdfeb6457f
1 /* Graph coloring register allocator
2 Copyright (C) 2001, 2002 Free Software Foundation, Inc.
3 Contributed by Michael Matz <matz@suse.de>
4 and Daniel Berlin <dan@cgsoftware.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under the
9 terms of the GNU General Public License as published by the Free Software
10 Foundation; either version 2, or (at your option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
15 details.
17 You should have received a copy of the GNU General Public License along
18 with GCC; see the file COPYING. If not, write to the Free Software
19 Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "rtl.h"
24 #include "tm_p.h"
25 #include "insn-config.h"
26 #include "recog.h"
27 #include "reload.h"
28 #include "integrate.h"
29 #include "function.h"
30 #include "regs.h"
31 #include "obstack.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "df.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "toplev.h"
38 #include "flags.h"
39 #include "ra.h"
41 #define obstack_chunk_alloc xmalloc
42 #define obstack_chunk_free free
44 /* This is the toplevel file of a graph coloring register allocator.
45 It is able to act like a George & Appel allocator, i.e. with iterative
46 coalescing plus spill coalescing/propagation.
47 And it can act as a traditional Briggs allocator, although with
48 optimistic coalescing. Additionally it has a custom pass, which
49 tries to reduce the overall cost of the colored graph.
51 We support two modes of spilling: spill-everywhere, which is extremely
52 fast, and interference region spilling, which reduces spill code to a
53 large extent, but is slower.
55 Helpful documents:
57 Briggs, P., Cooper, K. D., and Torczon, L. 1994. Improvements to graph
58 coloring register allocation. ACM Trans. Program. Lang. Syst. 16, 3 (May),
59 428-455.
61 Bergner, P., Dahl, P., Engebretsen, D., and O'Keefe, M. 1997. Spill code
62 minimization via interference region spilling. In Proc. ACM SIGPLAN '97
63 Conf. on Prog. Language Design and Implementation. ACM, 287-295.
65 George, L., Appel, A.W. 1996. Iterated register coalescing.
66 ACM Trans. Program. Lang. Syst. 18, 3 (May), 300-324.
70 /* This file contains the main entry point (reg_alloc), some helper routines
71 used by more than one file of the register allocator, and the toplevel
72 driver procedure (one_pass). */
74 /* Things, one might do somewhen:
76 * Lattice based rematerialization
77 * create definitions of ever-life regs at the beginning of
78 the insn chain
79 * insert loads as soon, stores as late as possile
80 * insert spill insns as outward as possible (either looptree, or LCM)
81 * reuse stack-slots
82 * delete coalesced insns. Partly done. The rest can only go, when we get
83 rid of reload.
84 * don't destroy coalescing information completely when spilling
85 * use the constraints from asms
88 static struct obstack ra_obstack;
89 static void create_insn_info PARAMS ((struct df *));
90 static void free_insn_info PARAMS ((void));
91 static void alloc_mem PARAMS ((struct df *));
92 static void free_mem PARAMS ((struct df *));
93 static void free_all_mem PARAMS ((struct df *df));
94 static int one_pass PARAMS ((struct df *, int));
95 static void check_df PARAMS ((struct df *));
96 static void init_ra PARAMS ((void));
98 void reg_alloc PARAMS ((void));
100 /* These global variables are "internal" to the register allocator.
101 They are all documented at their declarations in ra.h. */
103 /* Somewhen we want to get rid of one of those sbitmaps.
104 (for now I need the sup_igraph to note if there is any conflict between
105 parts of webs at all. I can't use igraph for this, as there only the real
106 conflicts are noted.) This is only used to prevent coalescing two
107 conflicting webs, were only parts of them are in conflict. */
108 sbitmap igraph;
109 sbitmap sup_igraph;
111 /* Note the insns not inserted by the allocator, where we detected any
112 deaths of pseudos. It is used to detect closeness of defs and uses.
113 In the first pass this is empty (we could initialize it from REG_DEAD
114 notes), in the other passes it is left from the pass before. */
115 sbitmap insns_with_deaths;
116 int death_insns_max_uid;
118 struct web_part *web_parts;
120 unsigned int num_webs;
121 unsigned int num_subwebs;
122 unsigned int num_allwebs;
123 struct web **id2web;
124 struct web *hardreg2web[FIRST_PSEUDO_REGISTER];
125 struct web **def2web;
126 struct web **use2web;
127 struct move_list *wl_moves;
128 int ra_max_regno;
129 short *ra_reg_renumber;
130 struct df *df;
131 bitmap *live_at_end;
132 int ra_pass;
133 unsigned int max_normal_pseudo;
134 int an_unusable_color;
136 /* The different lists on which a web can be (based on the type). */
137 struct dlist *web_lists[(int) LAST_NODE_TYPE];
139 unsigned int last_def_id;
140 unsigned int last_use_id;
141 unsigned int last_num_webs;
142 int last_max_uid;
143 sbitmap last_check_uses;
144 unsigned int remember_conflicts;
146 int orig_max_uid;
148 HARD_REG_SET never_use_colors;
149 HARD_REG_SET usable_regs[N_REG_CLASSES];
150 unsigned int num_free_regs[N_REG_CLASSES];
151 HARD_REG_SET hardregs_for_mode[NUM_MACHINE_MODES];
152 unsigned char byte2bitcount[256];
154 unsigned int debug_new_regalloc = -1;
155 int flag_ra_biased = 0;
156 int flag_ra_improved_spilling = 0;
157 int flag_ra_ir_spilling = 0;
158 int flag_ra_optimistic_coalescing = 0;
159 int flag_ra_break_aliases = 0;
160 int flag_ra_merge_spill_costs = 0;
161 int flag_ra_spill_every_use = 0;
162 int flag_ra_dump_notes = 0;
164 /* Fast allocation of small objects, which live until the allocator
165 is done. Allocate an object of SIZE bytes. */
167 void *
168 ra_alloc (size)
169 size_t size;
171 return obstack_alloc (&ra_obstack, size);
174 /* Like ra_alloc(), but clear the returned memory. */
176 void *
177 ra_calloc (size)
178 size_t size;
180 void *p = obstack_alloc (&ra_obstack, size);
181 memset (p, 0, size);
182 return p;
185 /* Returns the number of hardregs in HARD_REG_SET RS. */
188 hard_regs_count (rs)
189 HARD_REG_SET rs;
191 int count = 0;
192 #ifdef HARD_REG_SET
193 while (rs)
195 unsigned char byte = rs & 0xFF;
196 rs >>= 8;
197 /* Avoid memory access, if nothing is set. */
198 if (byte)
199 count += byte2bitcount[byte];
201 #else
202 unsigned int ofs;
203 for (ofs = 0; ofs < HARD_REG_SET_LONGS; ofs++)
205 HARD_REG_ELT_TYPE elt = rs[ofs];
206 while (elt)
208 unsigned char byte = elt & 0xFF;
209 elt >>= 8;
210 if (byte)
211 count += byte2bitcount[byte];
214 #endif
215 return count;
218 /* Basically like emit_move_insn (i.e. validifies constants and such),
219 but also handle MODE_CC moves (but then the operands must already
220 be basically valid. */
223 ra_emit_move_insn (x, y)
224 rtx x, y;
226 enum machine_mode mode = GET_MODE (x);
227 if (GET_MODE_CLASS (mode) == MODE_CC)
228 return emit_insn (gen_move_insn (x, y));
229 else
230 return emit_move_insn (x, y);
233 int insn_df_max_uid;
234 struct ra_insn_info *insn_df;
235 static struct ref **refs_for_insn_df;
237 /* Create the insn_df structure for each insn to have fast access to
238 all valid defs and uses in an insn. */
240 static void
241 create_insn_info (df)
242 struct df *df;
244 rtx insn;
245 struct ref **act_refs;
246 insn_df_max_uid = get_max_uid ();
247 insn_df = xcalloc (insn_df_max_uid, sizeof (insn_df[0]));
248 refs_for_insn_df = xcalloc (df->def_id + df->use_id, sizeof (struct ref *));
249 act_refs = refs_for_insn_df;
250 /* We create those things backwards to mimic the order in which
251 the insns are visited in rewrite_program2() and live_in(). */
252 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
254 int uid = INSN_UID (insn);
255 unsigned int n;
256 struct df_link *link;
257 if (!INSN_P (insn))
258 continue;
259 for (n = 0, link = DF_INSN_DEFS (df, insn); link; link = link->next)
260 if (link->ref
261 && (DF_REF_REGNO (link->ref) >= FIRST_PSEUDO_REGISTER
262 || !TEST_HARD_REG_BIT (never_use_colors,
263 DF_REF_REGNO (link->ref))))
265 if (n == 0)
266 insn_df[uid].defs = act_refs;
267 insn_df[uid].defs[n++] = link->ref;
269 act_refs += n;
270 insn_df[uid].num_defs = n;
271 for (n = 0, link = DF_INSN_USES (df, insn); link; link = link->next)
272 if (link->ref
273 && (DF_REF_REGNO (link->ref) >= FIRST_PSEUDO_REGISTER
274 || !TEST_HARD_REG_BIT (never_use_colors,
275 DF_REF_REGNO (link->ref))))
277 if (n == 0)
278 insn_df[uid].uses = act_refs;
279 insn_df[uid].uses[n++] = link->ref;
281 act_refs += n;
282 insn_df[uid].num_uses = n;
284 if (refs_for_insn_df + (df->def_id + df->use_id) < act_refs)
285 abort ();
288 /* Free the insn_df structures. */
290 static void
291 free_insn_info ()
293 free (refs_for_insn_df);
294 refs_for_insn_df = NULL;
295 free (insn_df);
296 insn_df = NULL;
297 insn_df_max_uid = 0;
300 /* Search WEB for a subweb, which represents REG. REG needs to
301 be a SUBREG, and the inner reg of it needs to be the one which is
302 represented by WEB. Returns the matching subweb or NULL. */
304 struct web *
305 find_subweb (web, reg)
306 struct web *web;
307 rtx reg;
309 struct web *w;
310 if (GET_CODE (reg) != SUBREG)
311 abort ();
312 for (w = web->subreg_next; w; w = w->subreg_next)
313 if (GET_MODE (w->orig_x) == GET_MODE (reg)
314 && SUBREG_BYTE (w->orig_x) == SUBREG_BYTE (reg))
315 return w;
316 return NULL;
319 /* Similar to find_subweb(), but matches according to SIZE_WORD,
320 a collection of the needed size and offset (in bytes). */
322 struct web *
323 find_subweb_2 (web, size_word)
324 struct web *web;
325 unsigned int size_word;
327 struct web *w = web;
328 if (size_word == GET_MODE_SIZE (GET_MODE (web->orig_x)))
329 /* size_word == size means BYTE_BEGIN(size_word) == 0. */
330 return web;
331 for (w = web->subreg_next; w; w = w->subreg_next)
333 unsigned int bl = rtx_to_bits (w->orig_x);
334 if (size_word == bl)
335 return w;
337 return NULL;
340 /* Returns the superweb for SUBWEB. */
342 struct web *
343 find_web_for_subweb_1 (subweb)
344 struct web *subweb;
346 while (subweb->parent_web)
347 subweb = subweb->parent_web;
348 return subweb;
351 /* Determine if two hard register sets intersect.
352 Return 1 if they do. */
355 hard_regs_intersect_p (a, b)
356 HARD_REG_SET *a, *b;
358 HARD_REG_SET c;
359 COPY_HARD_REG_SET (c, *a);
360 AND_HARD_REG_SET (c, *b);
361 GO_IF_HARD_REG_SUBSET (c, reg_class_contents[(int) NO_REGS], lose);
362 return 1;
363 lose:
364 return 0;
367 /* Allocate and initialize the memory necessary for one pass of the
368 register allocator. */
370 static void
371 alloc_mem (df)
372 struct df *df;
374 int i;
375 ra_build_realloc (df);
376 if (!live_at_end)
378 live_at_end = (bitmap *) xmalloc ((last_basic_block + 2)
379 * sizeof (bitmap));
380 for (i = 0; i < last_basic_block + 2; i++)
381 live_at_end[i] = BITMAP_XMALLOC ();
382 live_at_end += 2;
384 create_insn_info (df);
387 /* Free the memory which isn't necessary for the next pass. */
389 static void
390 free_mem (df)
391 struct df *df ATTRIBUTE_UNUSED;
393 free_insn_info ();
394 ra_build_free ();
397 /* Free all memory allocated for the register allocator. Used, when
398 it's done. */
400 static void
401 free_all_mem (df)
402 struct df *df;
404 unsigned int i;
405 live_at_end -= 2;
406 for (i = 0; i < (unsigned)last_basic_block + 2; i++)
407 BITMAP_XFREE (live_at_end[i]);
408 free (live_at_end);
410 ra_colorize_free_all ();
411 ra_build_free_all (df);
412 obstack_free (&ra_obstack, NULL);
415 static long ticks_build;
416 static long ticks_rebuild;
418 /* Perform one pass of allocation. Returns nonzero, if some spill code
419 was added, i.e. if the allocator needs to rerun. */
421 static int
422 one_pass (df, rebuild)
423 struct df *df;
424 int rebuild;
426 long ticks = clock ();
427 int something_spilled;
428 remember_conflicts = 0;
430 /* Build the complete interference graph, or if this is not the first
431 pass, rebuild it incrementally. */
432 build_i_graph (df);
434 /* From now on, if we create new conflicts, we need to remember the
435 initial list of conflicts per web. */
436 remember_conflicts = 1;
437 if (!rebuild)
438 dump_igraph_machine ();
440 /* Colorize the I-graph. This results in either a list of
441 spilled_webs, in which case we need to run the spill phase, and
442 rerun the allocator, or that list is empty, meaning we are done. */
443 ra_colorize_graph (df);
445 last_max_uid = get_max_uid ();
446 /* actual_spill() might change WEBS(SPILLED) and even empty it,
447 so we need to remember it's state. */
448 something_spilled = !!WEBS(SPILLED);
450 /* Add spill code if necessary. */
451 if (something_spilled)
452 actual_spill ();
454 ticks = clock () - ticks;
455 if (rebuild)
456 ticks_rebuild += ticks;
457 else
458 ticks_build += ticks;
459 return something_spilled;
462 /* Initialize various arrays for the register allocator. */
464 static void
465 init_ra ()
467 int i;
468 HARD_REG_SET rs;
469 #ifdef ELIMINABLE_REGS
470 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
471 unsigned int j;
472 #endif
473 int need_fp
474 = (! flag_omit_frame_pointer
475 #ifdef EXIT_IGNORE_STACK
476 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
477 #endif
478 || FRAME_POINTER_REQUIRED);
480 ra_colorize_init ();
482 /* We can't ever use any of the fixed regs. */
483 COPY_HARD_REG_SET (never_use_colors, fixed_reg_set);
485 /* Additionally don't even try to use hardregs, which we already
486 know are not eliminable. This includes also either the
487 hard framepointer or all regs which are eliminable into the
488 stack pointer, if need_fp is set. */
489 #ifdef ELIMINABLE_REGS
490 for (j = 0; j < ARRAY_SIZE (eliminables); j++)
492 if (! CAN_ELIMINATE (eliminables[j].from, eliminables[j].to)
493 || (eliminables[j].to == STACK_POINTER_REGNUM && need_fp))
494 for (i = HARD_REGNO_NREGS (eliminables[j].from, Pmode); i--;)
495 SET_HARD_REG_BIT (never_use_colors, eliminables[j].from + i);
497 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
498 if (need_fp)
499 for (i = HARD_REGNO_NREGS (HARD_FRAME_POINTER_REGNUM, Pmode); i--;)
500 SET_HARD_REG_BIT (never_use_colors, HARD_FRAME_POINTER_REGNUM + i);
501 #endif
503 #else
504 if (need_fp)
505 for (i = HARD_REGNO_NREGS (FRAME_POINTER_REGNUM, Pmode); i--;)
506 SET_HARD_REG_BIT (never_use_colors, FRAME_POINTER_REGNUM + i);
507 #endif
509 /* Stack and argument pointer are also rather useless to us. */
510 for (i = HARD_REGNO_NREGS (STACK_POINTER_REGNUM, Pmode); i--;)
511 SET_HARD_REG_BIT (never_use_colors, STACK_POINTER_REGNUM + i);
513 for (i = HARD_REGNO_NREGS (ARG_POINTER_REGNUM, Pmode); i--;)
514 SET_HARD_REG_BIT (never_use_colors, ARG_POINTER_REGNUM + i);
516 for (i = 0; i < 256; i++)
518 unsigned char byte = ((unsigned) i) & 0xFF;
519 unsigned char count = 0;
520 while (byte)
522 if (byte & 1)
523 count++;
524 byte >>= 1;
526 byte2bitcount[i] = count;
529 for (i = 0; i < N_REG_CLASSES; i++)
531 int size;
532 COPY_HARD_REG_SET (rs, reg_class_contents[i]);
533 AND_COMPL_HARD_REG_SET (rs, never_use_colors);
534 size = hard_regs_count (rs);
535 num_free_regs[i] = size;
536 COPY_HARD_REG_SET (usable_regs[i], rs);
539 /* Setup hardregs_for_mode[].
540 We are not interested only in the beginning of a multi-reg, but in
541 all the hardregs involved. Maybe HARD_REGNO_MODE_OK() only ok's
542 for beginnings. */
543 for (i = 0; i < NUM_MACHINE_MODES; i++)
545 int reg, size;
546 CLEAR_HARD_REG_SET (rs);
547 for (reg = 0; reg < FIRST_PSEUDO_REGISTER; reg++)
548 if (HARD_REGNO_MODE_OK (reg, i)
549 /* Ignore VOIDmode and similar things. */
550 && (size = HARD_REGNO_NREGS (reg, i)) != 0
551 && (reg + size) <= FIRST_PSEUDO_REGISTER)
553 while (size--)
554 SET_HARD_REG_BIT (rs, reg + size);
556 COPY_HARD_REG_SET (hardregs_for_mode[i], rs);
559 for (an_unusable_color = 0; an_unusable_color < FIRST_PSEUDO_REGISTER;
560 an_unusable_color++)
561 if (TEST_HARD_REG_BIT (never_use_colors, an_unusable_color))
562 break;
563 if (an_unusable_color == FIRST_PSEUDO_REGISTER)
564 abort ();
566 orig_max_uid = get_max_uid ();
567 compute_bb_for_insn ();
568 ra_reg_renumber = NULL;
569 insns_with_deaths = sbitmap_alloc (orig_max_uid);
570 death_insns_max_uid = orig_max_uid;
571 sbitmap_ones (insns_with_deaths);
572 gcc_obstack_init (&ra_obstack);
575 /* Check the consistency of DF. This aborts if it violates some
576 invariances we expect. */
578 static void
579 check_df (df)
580 struct df *df;
582 struct df_link *link;
583 rtx insn;
584 int regno;
585 unsigned int ui;
586 bitmap b = BITMAP_XMALLOC ();
587 bitmap empty_defs = BITMAP_XMALLOC ();
588 bitmap empty_uses = BITMAP_XMALLOC ();
590 /* Collect all the IDs of NULL references in the ID->REF arrays,
591 as df.c leaves them when updating the df structure. */
592 for (ui = 0; ui < df->def_id; ui++)
593 if (!df->defs[ui])
594 bitmap_set_bit (empty_defs, ui);
595 for (ui = 0; ui < df->use_id; ui++)
596 if (!df->uses[ui])
597 bitmap_set_bit (empty_uses, ui);
599 /* For each insn we check if the chain of references contain each
600 ref only once, doesn't contain NULL refs, or refs whose ID is invalid
601 (it df->refs[id] element is NULL). */
602 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
603 if (INSN_P (insn))
605 bitmap_clear (b);
606 for (link = DF_INSN_DEFS (df, insn); link; link = link->next)
607 if (!link->ref || bitmap_bit_p (empty_defs, DF_REF_ID (link->ref))
608 || bitmap_bit_p (b, DF_REF_ID (link->ref)))
609 abort ();
610 else
611 bitmap_set_bit (b, DF_REF_ID (link->ref));
613 bitmap_clear (b);
614 for (link = DF_INSN_USES (df, insn); link; link = link->next)
615 if (!link->ref || bitmap_bit_p (empty_uses, DF_REF_ID (link->ref))
616 || bitmap_bit_p (b, DF_REF_ID (link->ref)))
617 abort ();
618 else
619 bitmap_set_bit (b, DF_REF_ID (link->ref));
622 /* Now the same for the chains per register number. */
623 for (regno = 0; regno < max_reg_num (); regno++)
625 bitmap_clear (b);
626 for (link = df->regs[regno].defs; link; link = link->next)
627 if (!link->ref || bitmap_bit_p (empty_defs, DF_REF_ID (link->ref))
628 || bitmap_bit_p (b, DF_REF_ID (link->ref)))
629 abort ();
630 else
631 bitmap_set_bit (b, DF_REF_ID (link->ref));
633 bitmap_clear (b);
634 for (link = df->regs[regno].uses; link; link = link->next)
635 if (!link->ref || bitmap_bit_p (empty_uses, DF_REF_ID (link->ref))
636 || bitmap_bit_p (b, DF_REF_ID (link->ref)))
637 abort ();
638 else
639 bitmap_set_bit (b, DF_REF_ID (link->ref));
642 BITMAP_XFREE (empty_uses);
643 BITMAP_XFREE (empty_defs);
644 BITMAP_XFREE (b);
647 /* Main register allocator entry point. */
649 void
650 reg_alloc ()
652 int changed;
653 FILE *ra_dump_file = rtl_dump_file;
654 rtx last = get_last_insn ();
656 if (! INSN_P (last))
657 last = prev_real_insn (last);
658 /* If this is an empty function we shouldn't do all the following,
659 but instead just setup what's necessary, and return. */
661 /* We currently rely on the existance of the return value USE as
662 one of the last insns. Add it if it's not there anymore. */
663 if (last)
665 edge e;
666 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
668 basic_block bb = e->src;
669 last = bb->end;
670 if (!INSN_P (last) || GET_CODE (PATTERN (last)) != USE)
672 rtx insns;
673 start_sequence ();
674 use_return_register ();
675 insns = get_insns ();
676 end_sequence ();
677 emit_insn_after (insns, last);
682 /* Setup debugging levels. */
683 switch (0)
685 /* Some usefull presets of the debug level, I often use. */
686 case 0: debug_new_regalloc = DUMP_EVER; break;
687 case 1: debug_new_regalloc = DUMP_COSTS; break;
688 case 2: debug_new_regalloc = DUMP_IGRAPH_M; break;
689 case 3: debug_new_regalloc = DUMP_COLORIZE + DUMP_COSTS; break;
690 case 4: debug_new_regalloc = DUMP_COLORIZE + DUMP_COSTS + DUMP_WEBS;
691 break;
692 case 5: debug_new_regalloc = DUMP_FINAL_RTL + DUMP_COSTS +
693 DUMP_CONSTRAINTS;
694 break;
695 case 6: debug_new_regalloc = DUMP_VALIDIFY; break;
697 if (!rtl_dump_file)
698 debug_new_regalloc = 0;
700 /* Run regclass first, so we know the preferred and alternate classes
701 for each pseudo. Deactivate emitting of debug info, if it's not
702 explicitely requested. */
703 if ((debug_new_regalloc & DUMP_REGCLASS) == 0)
704 rtl_dump_file = NULL;
705 regclass (get_insns (), max_reg_num (), rtl_dump_file);
706 rtl_dump_file = ra_dump_file;
708 /* We don't use those NOTEs, and as we anyway change all registers,
709 they only make problems later. */
710 count_or_remove_death_notes (NULL, 1);
712 /* Initialize the different global arrays and regsets. */
713 init_ra ();
715 /* And some global variables. */
716 ra_pass = 0;
717 no_new_pseudos = 0;
718 max_normal_pseudo = (unsigned) max_reg_num ();
719 ra_rewrite_init ();
720 last_def_id = 0;
721 last_use_id = 0;
722 last_num_webs = 0;
723 last_max_uid = 0;
724 last_check_uses = NULL;
725 live_at_end = NULL;
726 WEBS(INITIAL) = NULL;
727 WEBS(FREE) = NULL;
728 memset (hardreg2web, 0, sizeof (hardreg2web));
729 ticks_build = ticks_rebuild = 0;
731 /* The default is to use optimistic coalescing with interference
732 region spilling, without biased coloring. */
733 flag_ra_biased = 0;
734 flag_ra_spill_every_use = 0;
735 flag_ra_improved_spilling = 1;
736 flag_ra_ir_spilling = 1;
737 flag_ra_break_aliases = 0;
738 flag_ra_optimistic_coalescing = 1;
739 flag_ra_merge_spill_costs = 1;
740 if (flag_ra_optimistic_coalescing)
741 flag_ra_break_aliases = 1;
742 flag_ra_dump_notes = 0;
744 /* Allocate the global df structure. */
745 df = df_init ();
747 /* This is the main loop, calling one_pass as long as there are still
748 some spilled webs. */
751 ra_debug_msg (DUMP_NEARLY_EVER, "RegAlloc Pass %d\n\n", ra_pass);
752 if (ra_pass++ > 40)
753 internal_error ("Didn't find a coloring.\n");
755 /* First collect all the register refs and put them into
756 chains per insn, and per regno. In later passes only update
757 that info from the new and modified insns. */
758 df_analyse (df, (ra_pass == 1) ? 0 : (bitmap) -1,
759 DF_HARD_REGS | DF_RD_CHAIN | DF_RU_CHAIN);
761 if ((debug_new_regalloc & DUMP_DF) != 0)
763 rtx insn;
764 df_dump (df, DF_HARD_REGS, rtl_dump_file);
765 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
766 if (INSN_P (insn))
767 df_insn_debug_regno (df, insn, rtl_dump_file);
769 check_df (df);
771 /* Now allocate the memory needed for this pass, or (if it's not the
772 first pass), reallocate only additional memory. */
773 alloc_mem (df);
775 /* Build and colorize the interference graph, and possibly emit
776 spill insns. This also might delete certain move insns. */
777 changed = one_pass (df, ra_pass > 1);
779 /* If that produced no changes, the graph was colorizable. */
780 if (!changed)
782 /* Change the insns to refer to the new pseudos (one per web). */
783 emit_colors (df);
784 /* Already setup a preliminary reg_renumber[] array, but don't
785 free our own version. reg_renumber[] will again be destroyed
786 later. We right now need it in dump_constraints() for
787 constrain_operands(1) whose subproc sometimes reference
788 it (because we are checking strictly, i.e. as if
789 after reload). */
790 setup_renumber (0);
791 /* Delete some more of the coalesced moves. */
792 delete_moves ();
793 dump_constraints ();
795 else
797 /* If there were changes, this means spill code was added,
798 therefore repeat some things, including some initialization
799 of global data structures. */
800 if ((debug_new_regalloc & DUMP_REGCLASS) == 0)
801 rtl_dump_file = NULL;
802 /* We have new pseudos (the stackwebs). */
803 allocate_reg_info (max_reg_num (), FALSE, FALSE);
804 /* And new insns. */
805 compute_bb_for_insn ();
806 /* Some of them might be dead. */
807 delete_trivially_dead_insns (get_insns (), max_reg_num ());
808 /* Those new pseudos need to have their REFS count set. */
809 reg_scan_update (get_insns (), NULL, max_regno);
810 max_regno = max_reg_num ();
811 /* And they need usefull classes too. */
812 regclass (get_insns (), max_reg_num (), rtl_dump_file);
813 rtl_dump_file = ra_dump_file;
815 /* Remember the number of defs and uses, so we can distinguish
816 new from old refs in the next pass. */
817 last_def_id = df->def_id;
818 last_use_id = df->use_id;
821 /* Output the graph, and possibly the current insn sequence. */
822 dump_ra (df);
823 if (changed && (debug_new_regalloc & DUMP_RTL) != 0)
825 ra_print_rtl_with_bb (rtl_dump_file, get_insns ());
826 fflush (rtl_dump_file);
829 /* Reset the web lists. */
830 reset_lists ();
831 free_mem (df);
833 while (changed);
835 /* We are done with allocation, free all memory and output some
836 debug info. */
837 free_all_mem (df);
838 df_finish (df);
839 if ((debug_new_regalloc & DUMP_RESULTS) == 0)
840 dump_cost (DUMP_COSTS);
841 ra_debug_msg (DUMP_COSTS, "ticks for build-phase: %ld\n", ticks_build);
842 ra_debug_msg (DUMP_COSTS, "ticks for rebuild-phase: %ld\n", ticks_rebuild);
843 if ((debug_new_regalloc & (DUMP_FINAL_RTL | DUMP_RTL)) != 0)
844 ra_print_rtl_with_bb (rtl_dump_file, get_insns ());
846 /* We might have new pseudos, so allocate the info arrays for them. */
847 if ((debug_new_regalloc & DUMP_SM) == 0)
848 rtl_dump_file = NULL;
849 no_new_pseudos = 0;
850 allocate_reg_info (max_reg_num (), FALSE, FALSE);
851 no_new_pseudos = 1;
852 rtl_dump_file = ra_dump_file;
854 /* Some spill insns could've been inserted after trapping calls, i.e.
855 at the end of a basic block, which really ends at that call.
856 Fixup that breakages by adjusting basic block boundaries. */
857 fixup_abnormal_edges ();
859 /* Cleanup the flow graph. */
860 if ((debug_new_regalloc & DUMP_LAST_FLOW) == 0)
861 rtl_dump_file = NULL;
862 life_analysis (get_insns (), rtl_dump_file,
863 PROP_DEATH_NOTES | PROP_LOG_LINKS | PROP_REG_INFO);
864 cleanup_cfg (CLEANUP_EXPENSIVE);
865 recompute_reg_usage (get_insns (), TRUE);
866 if (rtl_dump_file)
867 dump_flow_info (rtl_dump_file);
868 rtl_dump_file = ra_dump_file;
870 /* update_equiv_regs() can't be called after register allocation.
871 It might delete some pseudos, and insert other insns setting
872 up those pseudos in different places. This of course screws up
873 the allocation because that may destroy a hardreg for another
874 pseudo.
875 XXX we probably should do something like that on our own. I.e.
876 creating REG_EQUIV notes. */
877 /*update_equiv_regs ();*/
879 /* Setup the reg_renumber[] array for reload. */
880 setup_renumber (1);
881 sbitmap_free (insns_with_deaths);
883 /* Remove REG_DEAD notes which are incorrectly set. See the docu
884 of that function. */
885 remove_suspicious_death_notes ();
887 if ((debug_new_regalloc & DUMP_LAST_RTL) != 0)
888 ra_print_rtl_with_bb (rtl_dump_file, get_insns ());
889 dump_static_insn_cost (rtl_dump_file,
890 "after allocation/spilling, before reload", NULL);
892 /* Allocate the reg_equiv_memory_loc array for reload. */
893 reg_equiv_memory_loc = (rtx *) xcalloc (max_regno, sizeof (rtx));
894 /* And possibly initialize it. */
895 allocate_initial_values (reg_equiv_memory_loc);
896 /* And one last regclass pass just before reload. */
897 regclass (get_insns (), max_reg_num (), rtl_dump_file);
901 vim:cinoptions={.5s,g0,p5,t0,(0,^-0.5s,n-0.5s:tw=78:cindent:sw=4: