fix spaces vs. tabs for scripts.
[official-gcc.git] / gcc / ra.c
bloba821623ba529e8a56fab0935d58001a6fd90d448
1 /* Graph coloring register allocator
2 Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Michael Matz <matz@suse.de>
4 and Daniel Berlin <dan@cgsoftware.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under the
9 terms of the GNU General Public License as published by the Free Software
10 Foundation; either version 2, or (at your option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
15 details.
17 You should have received a copy of the GNU General Public License along
18 with GCC; see the file COPYING. If not, write to the Free Software
19 Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "recog.h"
29 #include "reload.h"
30 #include "integrate.h"
31 #include "function.h"
32 #include "regs.h"
33 #include "obstack.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "df.h"
37 #include "expr.h"
38 #include "output.h"
39 #include "toplev.h"
40 #include "flags.h"
41 #include "ra.h"
43 /* This is the toplevel file of a graph coloring register allocator.
44 It is able to act like a George & Appel allocator, i.e. with iterative
45 coalescing plus spill coalescing/propagation.
46 And it can act as a traditional Briggs allocator, although with
47 optimistic coalescing. Additionally it has a custom pass, which
48 tries to reduce the overall cost of the colored graph.
50 We support two modes of spilling: spill-everywhere, which is extremely
51 fast, and interference region spilling, which reduces spill code to a
52 large extent, but is slower.
54 Helpful documents:
56 Briggs, P., Cooper, K. D., and Torczon, L. 1994. Improvements to graph
57 coloring register allocation. ACM Trans. Program. Lang. Syst. 16, 3 (May),
58 428-455.
60 Bergner, P., Dahl, P., Engebretsen, D., and O'Keefe, M. 1997. Spill code
61 minimization via interference region spilling. In Proc. ACM SIGPLAN '97
62 Conf. on Prog. Language Design and Implementation. ACM, 287-295.
64 George, L., Appel, A.W. 1996. Iterated register coalescing.
65 ACM Trans. Program. Lang. Syst. 18, 3 (May), 300-324.
69 /* This file contains the main entry point (reg_alloc), some helper routines
70 used by more than one file of the register allocator, and the toplevel
71 driver procedure (one_pass). */
73 /* Things, one might do somewhen:
75 * Lattice based rematerialization
76 * create definitions of ever-life regs at the beginning of
77 the insn chain
78 * insert loads as soon, stores as late as possible
79 * insert spill insns as outward as possible (either looptree, or LCM)
80 * reuse stack-slots
81 * delete coalesced insns. Partly done. The rest can only go, when we get
82 rid of reload.
83 * don't destroy coalescing information completely when spilling
84 * use the constraints from asms
87 static int first_hard_reg (HARD_REG_SET);
88 static struct obstack ra_obstack;
89 static void create_insn_info (struct df *);
90 static void free_insn_info (void);
91 static void alloc_mem (struct df *);
92 static void free_mem (struct df *);
93 static void free_all_mem (struct df *df);
94 static int one_pass (struct df *, int);
95 static void check_df (struct df *);
96 static void init_ra (void);
98 void reg_alloc (void);
100 /* These global variables are "internal" to the register allocator.
101 They are all documented at their declarations in ra.h. */
103 /* Somewhen we want to get rid of one of those sbitmaps.
104 (for now I need the sup_igraph to note if there is any conflict between
105 parts of webs at all. I can't use igraph for this, as there only the real
106 conflicts are noted.) This is only used to prevent coalescing two
107 conflicting webs, were only parts of them are in conflict. */
108 sbitmap igraph;
109 sbitmap sup_igraph;
111 /* Note the insns not inserted by the allocator, where we detected any
112 deaths of pseudos. It is used to detect closeness of defs and uses.
113 In the first pass this is empty (we could initialize it from REG_DEAD
114 notes), in the other passes it is left from the pass before. */
115 sbitmap insns_with_deaths;
116 int death_insns_max_uid;
118 struct web_part *web_parts;
120 unsigned int num_webs;
121 unsigned int num_subwebs;
122 unsigned int num_allwebs;
123 struct web **id2web;
124 struct web *hardreg2web[FIRST_PSEUDO_REGISTER];
125 struct web **def2web;
126 struct web **use2web;
127 struct move_list *wl_moves;
128 int ra_max_regno;
129 short *ra_reg_renumber;
130 struct df *df;
131 bitmap *live_at_end;
132 int ra_pass;
133 unsigned int max_normal_pseudo;
134 int an_unusable_color;
136 /* The different lists on which a web can be (based on the type). */
137 struct dlist *web_lists[(int) LAST_NODE_TYPE];
139 unsigned int last_def_id;
140 unsigned int last_use_id;
141 unsigned int last_num_webs;
142 int last_max_uid;
143 sbitmap last_check_uses;
144 unsigned int remember_conflicts;
146 int orig_max_uid;
148 HARD_REG_SET never_use_colors;
149 HARD_REG_SET usable_regs[N_REG_CLASSES];
150 unsigned int num_free_regs[N_REG_CLASSES];
151 int single_reg_in_regclass[N_REG_CLASSES];
152 HARD_REG_SET hardregs_for_mode[NUM_MACHINE_MODES];
153 HARD_REG_SET invalid_mode_change_regs;
154 unsigned char byte2bitcount[256];
156 unsigned int debug_new_regalloc = -1;
157 int flag_ra_biased = 0;
158 int flag_ra_improved_spilling = 0;
159 int flag_ra_ir_spilling = 0;
160 int flag_ra_optimistic_coalescing = 0;
161 int flag_ra_break_aliases = 0;
162 int flag_ra_merge_spill_costs = 0;
163 int flag_ra_spill_every_use = 0;
164 int flag_ra_dump_notes = 0;
166 /* Fast allocation of small objects, which live until the allocator
167 is done. Allocate an object of SIZE bytes. */
169 void *
170 ra_alloc (size_t size)
172 return obstack_alloc (&ra_obstack, size);
175 /* Like ra_alloc(), but clear the returned memory. */
177 void *
178 ra_calloc (size_t size)
180 void *p = obstack_alloc (&ra_obstack, size);
181 memset (p, 0, size);
182 return p;
185 /* Returns the number of hardregs in HARD_REG_SET RS. */
188 hard_regs_count (HARD_REG_SET rs)
190 int count = 0;
191 #ifdef HARD_REG_SET
192 while (rs)
194 unsigned char byte = rs & 0xFF;
195 rs >>= 8;
196 /* Avoid memory access, if nothing is set. */
197 if (byte)
198 count += byte2bitcount[byte];
200 #else
201 unsigned int ofs;
202 for (ofs = 0; ofs < HARD_REG_SET_LONGS; ofs++)
204 HARD_REG_ELT_TYPE elt = rs[ofs];
205 while (elt)
207 unsigned char byte = elt & 0xFF;
208 elt >>= 8;
209 if (byte)
210 count += byte2bitcount[byte];
213 #endif
214 return count;
217 /* Returns the first hardreg in HARD_REG_SET RS. Assumes there is at
218 least one reg in the set. */
220 static int
221 first_hard_reg (HARD_REG_SET rs)
223 int c;
225 for (c = 0; c < FIRST_PSEUDO_REGISTER; c++)
226 if (TEST_HARD_REG_BIT (rs, c))
227 break;
228 gcc_assert (c < FIRST_PSEUDO_REGISTER);
229 return c;
232 /* Basically like emit_move_insn (i.e. validifies constants and such),
233 but also handle MODE_CC moves (but then the operands must already
234 be basically valid. */
237 ra_emit_move_insn (rtx x, rtx y)
239 enum machine_mode mode = GET_MODE (x);
240 if (GET_MODE_CLASS (mode) == MODE_CC)
241 return emit_insn (gen_move_insn (x, y));
242 else
243 return emit_move_insn (x, y);
246 int insn_df_max_uid;
247 struct ra_insn_info *insn_df;
248 static struct ref **refs_for_insn_df;
250 /* Create the insn_df structure for each insn to have fast access to
251 all valid defs and uses in an insn. */
253 static void
254 create_insn_info (struct df *df)
256 rtx insn;
257 struct ref **act_refs;
258 insn_df_max_uid = get_max_uid ();
259 insn_df = xcalloc (insn_df_max_uid, sizeof (insn_df[0]));
260 refs_for_insn_df = xcalloc (df->def_id + df->use_id, sizeof (struct ref *));
261 act_refs = refs_for_insn_df;
262 /* We create those things backwards to mimic the order in which
263 the insns are visited in rewrite_program2() and live_in(). */
264 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
266 int uid = INSN_UID (insn);
267 unsigned int n;
268 struct df_link *link;
269 if (!INSN_P (insn))
270 continue;
271 for (n = 0, link = DF_INSN_DEFS (df, insn); link; link = link->next)
272 if (link->ref
273 && (DF_REF_REGNO (link->ref) >= FIRST_PSEUDO_REGISTER
274 || !TEST_HARD_REG_BIT (never_use_colors,
275 DF_REF_REGNO (link->ref))))
277 if (n == 0)
278 insn_df[uid].defs = act_refs;
279 insn_df[uid].defs[n++] = link->ref;
281 act_refs += n;
282 insn_df[uid].num_defs = n;
283 for (n = 0, link = DF_INSN_USES (df, insn); link; link = link->next)
284 if (link->ref
285 && (DF_REF_REGNO (link->ref) >= FIRST_PSEUDO_REGISTER
286 || !TEST_HARD_REG_BIT (never_use_colors,
287 DF_REF_REGNO (link->ref))))
289 if (n == 0)
290 insn_df[uid].uses = act_refs;
291 insn_df[uid].uses[n++] = link->ref;
293 act_refs += n;
294 insn_df[uid].num_uses = n;
296 gcc_assert (refs_for_insn_df + (df->def_id + df->use_id) >= act_refs);
299 /* Free the insn_df structures. */
301 static void
302 free_insn_info (void)
304 free (refs_for_insn_df);
305 refs_for_insn_df = NULL;
306 free (insn_df);
307 insn_df = NULL;
308 insn_df_max_uid = 0;
311 /* Search WEB for a subweb, which represents REG. REG needs to
312 be a SUBREG, and the inner reg of it needs to be the one which is
313 represented by WEB. Returns the matching subweb or NULL. */
315 struct web *
316 find_subweb (struct web *web, rtx reg)
318 struct web *w;
319 gcc_assert (GET_CODE (reg) == SUBREG);
320 for (w = web->subreg_next; w; w = w->subreg_next)
321 if (GET_MODE (w->orig_x) == GET_MODE (reg)
322 && SUBREG_BYTE (w->orig_x) == SUBREG_BYTE (reg))
323 return w;
324 return NULL;
327 /* Similar to find_subweb(), but matches according to SIZE_WORD,
328 a collection of the needed size and offset (in bytes). */
330 struct web *
331 find_subweb_2 (struct web *web, unsigned int size_word)
333 struct web *w = web;
334 if (size_word == GET_MODE_SIZE (GET_MODE (web->orig_x)))
335 /* size_word == size means BYTE_BEGIN(size_word) == 0. */
336 return web;
337 for (w = web->subreg_next; w; w = w->subreg_next)
339 unsigned int bl = rtx_to_bits (w->orig_x);
340 if (size_word == bl)
341 return w;
343 return NULL;
346 /* Returns the superweb for SUBWEB. */
348 struct web *
349 find_web_for_subweb_1 (struct web *subweb)
351 while (subweb->parent_web)
352 subweb = subweb->parent_web;
353 return subweb;
356 /* Determine if two hard register sets intersect.
357 Return 1 if they do. */
360 hard_regs_intersect_p (HARD_REG_SET *a, HARD_REG_SET *b)
362 HARD_REG_SET c;
363 COPY_HARD_REG_SET (c, *a);
364 AND_HARD_REG_SET (c, *b);
365 GO_IF_HARD_REG_SUBSET (c, reg_class_contents[(int) NO_REGS], lose);
366 return 1;
367 lose:
368 return 0;
371 /* Allocate and initialize the memory necessary for one pass of the
372 register allocator. */
374 static void
375 alloc_mem (struct df *df)
377 int i;
378 ra_build_realloc (df);
379 if (!live_at_end)
381 live_at_end = xmalloc ((last_basic_block + 2) * sizeof (bitmap));
382 for (i = 0; i < last_basic_block + 2; i++)
383 live_at_end[i] = BITMAP_XMALLOC ();
384 live_at_end += 2;
386 create_insn_info (df);
389 /* Free the memory which isn't necessary for the next pass. */
391 static void
392 free_mem (struct df *df ATTRIBUTE_UNUSED)
394 free_insn_info ();
395 ra_build_free ();
398 /* Free all memory allocated for the register allocator. Used, when
399 it's done. */
401 static void
402 free_all_mem (struct df *df)
404 unsigned int i;
405 live_at_end -= 2;
406 for (i = 0; i < (unsigned)last_basic_block + 2; i++)
407 BITMAP_XFREE (live_at_end[i]);
408 free (live_at_end);
410 ra_colorize_free_all ();
411 ra_build_free_all (df);
412 obstack_free (&ra_obstack, NULL);
415 static long ticks_build;
416 static long ticks_rebuild;
418 /* Perform one pass of allocation. Returns nonzero, if some spill code
419 was added, i.e. if the allocator needs to rerun. */
421 static int
422 one_pass (struct df *df, int rebuild)
424 long ticks = clock ();
425 int something_spilled;
426 remember_conflicts = 0;
428 /* Build the complete interference graph, or if this is not the first
429 pass, rebuild it incrementally. */
430 build_i_graph (df);
432 /* From now on, if we create new conflicts, we need to remember the
433 initial list of conflicts per web. */
434 remember_conflicts = 1;
435 if (!rebuild)
436 dump_igraph_machine ();
438 /* Colorize the I-graph. This results in either a list of
439 spilled_webs, in which case we need to run the spill phase, and
440 rerun the allocator, or that list is empty, meaning we are done. */
441 ra_colorize_graph (df);
443 last_max_uid = get_max_uid ();
444 /* actual_spill() might change WEBS(SPILLED) and even empty it,
445 so we need to remember it's state. */
446 something_spilled = !!WEBS(SPILLED);
448 /* Add spill code if necessary. */
449 if (something_spilled)
450 actual_spill ();
452 ticks = clock () - ticks;
453 if (rebuild)
454 ticks_rebuild += ticks;
455 else
456 ticks_build += ticks;
457 return something_spilled;
460 /* Initialize various arrays for the register allocator. */
462 static void
463 init_ra (void)
465 int i;
466 HARD_REG_SET rs;
467 #ifdef ELIMINABLE_REGS
468 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
469 unsigned int j;
470 #endif
471 int need_fp
472 = (! flag_omit_frame_pointer
473 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
474 || FRAME_POINTER_REQUIRED);
476 ra_colorize_init ();
478 /* We can't ever use any of the fixed regs. */
479 COPY_HARD_REG_SET (never_use_colors, fixed_reg_set);
481 /* Additionally don't even try to use hardregs, which we already
482 know are not eliminable. This includes also either the
483 hard framepointer or all regs which are eliminable into the
484 stack pointer, if need_fp is set. */
485 #ifdef ELIMINABLE_REGS
486 for (j = 0; j < ARRAY_SIZE (eliminables); j++)
488 if (! CAN_ELIMINATE (eliminables[j].from, eliminables[j].to)
489 || (eliminables[j].to == STACK_POINTER_REGNUM && need_fp))
490 for (i = hard_regno_nregs[eliminables[j].from][Pmode]; i--;)
491 SET_HARD_REG_BIT (never_use_colors, eliminables[j].from + i);
493 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
494 if (need_fp)
495 for (i = hard_regno_nregs[HARD_FRAME_POINTER_REGNUM][Pmode]; i--;)
496 SET_HARD_REG_BIT (never_use_colors, HARD_FRAME_POINTER_REGNUM + i);
497 #endif
499 #else
500 if (need_fp)
501 for (i = hard_regno_nregs[FRAME_POINTER_REGNUM][Pmode]; i--;)
502 SET_HARD_REG_BIT (never_use_colors, FRAME_POINTER_REGNUM + i);
503 #endif
505 /* Stack and argument pointer are also rather useless to us. */
506 for (i = hard_regno_nregs[STACK_POINTER_REGNUM][Pmode]; i--;)
507 SET_HARD_REG_BIT (never_use_colors, STACK_POINTER_REGNUM + i);
509 for (i = hard_regno_nregs[ARG_POINTER_REGNUM][Pmode]; i--;)
510 SET_HARD_REG_BIT (never_use_colors, ARG_POINTER_REGNUM + i);
512 for (i = 0; i < 256; i++)
514 unsigned char byte = ((unsigned) i) & 0xFF;
515 unsigned char count = 0;
516 while (byte)
518 if (byte & 1)
519 count++;
520 byte >>= 1;
522 byte2bitcount[i] = count;
525 for (i = 0; i < N_REG_CLASSES; i++)
527 int size;
528 COPY_HARD_REG_SET (rs, reg_class_contents[i]);
529 AND_COMPL_HARD_REG_SET (rs, never_use_colors);
530 size = hard_regs_count (rs);
531 num_free_regs[i] = size;
532 COPY_HARD_REG_SET (usable_regs[i], rs);
533 if (size == 1)
534 single_reg_in_regclass[i] = first_hard_reg (rs);
535 else
536 single_reg_in_regclass[i] = -1;
539 /* Setup hardregs_for_mode[].
540 We are not interested only in the beginning of a multi-reg, but in
541 all the hardregs involved. Maybe HARD_REGNO_MODE_OK() only ok's
542 for beginnings. */
543 for (i = 0; i < NUM_MACHINE_MODES; i++)
545 int reg, size;
546 CLEAR_HARD_REG_SET (rs);
547 for (reg = 0; reg < FIRST_PSEUDO_REGISTER; reg++)
548 if (HARD_REGNO_MODE_OK (reg, i)
549 /* Ignore VOIDmode and similar things. */
550 && (size = hard_regno_nregs[reg][i]) != 0
551 && (reg + size) <= FIRST_PSEUDO_REGISTER)
553 while (size--)
554 SET_HARD_REG_BIT (rs, reg + size);
556 COPY_HARD_REG_SET (hardregs_for_mode[i], rs);
559 CLEAR_HARD_REG_SET (invalid_mode_change_regs);
560 #ifdef CANNOT_CHANGE_MODE_CLASS
561 if (0)
562 for (i = 0; i < NUM_MACHINE_MODES; i++)
564 enum machine_mode from = (enum machine_mode) i;
565 enum machine_mode to;
566 for (to = VOIDmode; to < MAX_MACHINE_MODE; ++to)
568 int r;
569 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
570 if (REG_CANNOT_CHANGE_MODE_P (from, to, r))
571 SET_HARD_REG_BIT (invalid_mode_change_regs, r);
574 #endif
576 for (an_unusable_color = 0; an_unusable_color < FIRST_PSEUDO_REGISTER;
577 an_unusable_color++)
578 if (TEST_HARD_REG_BIT (never_use_colors, an_unusable_color))
579 break;
580 gcc_assert (an_unusable_color != FIRST_PSEUDO_REGISTER);
582 orig_max_uid = get_max_uid ();
583 compute_bb_for_insn ();
584 ra_reg_renumber = NULL;
585 insns_with_deaths = sbitmap_alloc (orig_max_uid);
586 death_insns_max_uid = orig_max_uid;
587 sbitmap_ones (insns_with_deaths);
588 gcc_obstack_init (&ra_obstack);
591 /* Check the consistency of DF. This asserts if it violates some
592 invariances we expect. */
594 static void
595 check_df (struct df *df)
597 struct df_link *link;
598 rtx insn;
599 int regno;
600 unsigned int ui;
601 bitmap b = BITMAP_XMALLOC ();
602 bitmap empty_defs = BITMAP_XMALLOC ();
603 bitmap empty_uses = BITMAP_XMALLOC ();
605 /* Collect all the IDs of NULL references in the ID->REF arrays,
606 as df.c leaves them when updating the df structure. */
607 for (ui = 0; ui < df->def_id; ui++)
608 if (!df->defs[ui])
609 bitmap_set_bit (empty_defs, ui);
610 for (ui = 0; ui < df->use_id; ui++)
611 if (!df->uses[ui])
612 bitmap_set_bit (empty_uses, ui);
614 /* For each insn we check if the chain of references contain each
615 ref only once, doesn't contain NULL refs, or refs whose ID is invalid
616 (it df->refs[id] element is NULL). */
617 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
618 if (INSN_P (insn))
620 bitmap_clear (b);
621 for (link = DF_INSN_DEFS (df, insn); link; link = link->next)
623 gcc_assert (link->ref);
624 gcc_assert (!bitmap_bit_p (empty_defs, DF_REF_ID (link->ref)));
625 gcc_assert (!bitmap_bit_p (b, DF_REF_ID (link->ref)));
626 bitmap_set_bit (b, DF_REF_ID (link->ref));
629 bitmap_clear (b);
630 for (link = DF_INSN_USES (df, insn); link; link = link->next)
632 gcc_assert (link->ref);
633 gcc_assert (!bitmap_bit_p (empty_uses, DF_REF_ID (link->ref)));
634 gcc_assert (!bitmap_bit_p (b, DF_REF_ID (link->ref)));
635 bitmap_set_bit (b, DF_REF_ID (link->ref));
639 /* Now the same for the chains per register number. */
640 for (regno = 0; regno < max_reg_num (); regno++)
642 bitmap_clear (b);
643 for (link = df->regs[regno].defs; link; link = link->next)
645 gcc_assert (link->ref);
646 gcc_assert (!bitmap_bit_p (empty_defs, DF_REF_ID (link->ref)));
647 gcc_assert (!bitmap_bit_p (b, DF_REF_ID (link->ref)));
648 bitmap_set_bit (b, DF_REF_ID (link->ref));
651 bitmap_clear (b);
652 for (link = df->regs[regno].uses; link; link = link->next)
654 gcc_assert (link->ref);
655 gcc_assert (!bitmap_bit_p (empty_uses, DF_REF_ID (link->ref)));
656 gcc_assert (!bitmap_bit_p (b, DF_REF_ID (link->ref)));
657 bitmap_set_bit (b, DF_REF_ID (link->ref));
661 BITMAP_XFREE (empty_uses);
662 BITMAP_XFREE (empty_defs);
663 BITMAP_XFREE (b);
666 /* Main register allocator entry point. */
668 void
669 reg_alloc (void)
671 int changed;
672 FILE *ra_dump_file = dump_file;
673 rtx last = get_last_insn ();
675 if (! INSN_P (last))
676 last = prev_real_insn (last);
677 /* If this is an empty function we shouldn't do all the following,
678 but instead just setup what's necessary, and return. */
680 /* We currently rely on the existence of the return value USE as
681 one of the last insns. Add it if it's not there anymore. */
682 if (last)
684 edge e;
685 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
687 basic_block bb = e->src;
688 last = BB_END (bb);
689 if (!INSN_P (last) || GET_CODE (PATTERN (last)) != USE)
691 rtx insns;
692 start_sequence ();
693 use_return_register ();
694 insns = get_insns ();
695 end_sequence ();
696 emit_insn_after (insns, last);
701 /* Setup debugging levels. */
702 switch (0)
704 /* Some useful presets of the debug level, I often use. */
705 case 0: debug_new_regalloc = DUMP_EVER; break;
706 case 1: debug_new_regalloc = DUMP_COSTS; break;
707 case 2: debug_new_regalloc = DUMP_IGRAPH_M; break;
708 case 3: debug_new_regalloc = DUMP_COLORIZE + DUMP_COSTS; break;
709 case 4: debug_new_regalloc = DUMP_COLORIZE + DUMP_COSTS + DUMP_WEBS;
710 break;
711 case 5: debug_new_regalloc = DUMP_FINAL_RTL + DUMP_COSTS +
712 DUMP_CONSTRAINTS;
713 break;
714 case 6: debug_new_regalloc = DUMP_VALIDIFY; break;
716 if (!dump_file)
717 debug_new_regalloc = 0;
719 /* Run regclass first, so we know the preferred and alternate classes
720 for each pseudo. Deactivate emitting of debug info, if it's not
721 explicitly requested. */
722 if ((debug_new_regalloc & DUMP_REGCLASS) == 0)
723 dump_file = NULL;
724 regclass (get_insns (), max_reg_num (), dump_file);
725 dump_file = ra_dump_file;
727 /* We don't use those NOTEs, and as we anyway change all registers,
728 they only make problems later. */
729 count_or_remove_death_notes (NULL, 1);
731 /* Initialize the different global arrays and regsets. */
732 init_ra ();
734 /* And some global variables. */
735 ra_pass = 0;
736 no_new_pseudos = 0;
737 max_normal_pseudo = (unsigned) max_reg_num ();
738 ra_rewrite_init ();
739 last_def_id = 0;
740 last_use_id = 0;
741 last_num_webs = 0;
742 last_max_uid = 0;
743 last_check_uses = NULL;
744 live_at_end = NULL;
745 WEBS(INITIAL) = NULL;
746 WEBS(FREE) = NULL;
747 memset (hardreg2web, 0, sizeof (hardreg2web));
748 ticks_build = ticks_rebuild = 0;
750 /* The default is to use optimistic coalescing with interference
751 region spilling, without biased coloring. */
752 flag_ra_biased = 0;
753 flag_ra_spill_every_use = 0;
754 flag_ra_improved_spilling = 1;
755 flag_ra_ir_spilling = 1;
756 flag_ra_break_aliases = 0;
757 flag_ra_optimistic_coalescing = 1;
758 flag_ra_merge_spill_costs = 1;
759 if (flag_ra_optimistic_coalescing)
760 flag_ra_break_aliases = 1;
761 flag_ra_dump_notes = 0;
763 /* Allocate the global df structure. */
764 df = df_init ();
766 /* This is the main loop, calling one_pass as long as there are still
767 some spilled webs. */
770 ra_debug_msg (DUMP_NEARLY_EVER, "RegAlloc Pass %d\n\n", ra_pass);
771 if (ra_pass++ > 40)
772 internal_error ("Didn't find a coloring.\n");
774 /* First collect all the register refs and put them into
775 chains per insn, and per regno. In later passes only update
776 that info from the new and modified insns. */
777 df_analyze (df, (ra_pass == 1) ? 0 : (bitmap) -1,
778 DF_HARD_REGS | DF_RD_CHAIN | DF_RU_CHAIN | DF_FOR_REGALLOC);
780 if ((debug_new_regalloc & DUMP_DF) != 0)
782 rtx insn;
783 df_dump (df, DF_HARD_REGS, dump_file);
784 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
785 if (INSN_P (insn))
786 df_insn_debug_regno (df, insn, dump_file);
788 check_df (df);
790 /* Now allocate the memory needed for this pass, or (if it's not the
791 first pass), reallocate only additional memory. */
792 alloc_mem (df);
794 /* Build and colorize the interference graph, and possibly emit
795 spill insns. This also might delete certain move insns. */
796 changed = one_pass (df, ra_pass > 1);
798 /* If that produced no changes, the graph was colorizable. */
799 if (!changed)
801 /* Change the insns to refer to the new pseudos (one per web). */
802 emit_colors (df);
803 /* Already setup a preliminary reg_renumber[] array, but don't
804 free our own version. reg_renumber[] will again be destroyed
805 later. We right now need it in dump_constraints() for
806 constrain_operands(1) whose subproc sometimes reference
807 it (because we are checking strictly, i.e. as if
808 after reload). */
809 setup_renumber (0);
810 /* Delete some more of the coalesced moves. */
811 delete_moves ();
812 dump_constraints ();
814 else
816 /* If there were changes, this means spill code was added,
817 therefore repeat some things, including some initialization
818 of global data structures. */
819 if ((debug_new_regalloc & DUMP_REGCLASS) == 0)
820 dump_file = NULL;
821 /* We have new pseudos (the stackwebs). */
822 allocate_reg_info (max_reg_num (), FALSE, FALSE);
823 /* And new insns. */
824 compute_bb_for_insn ();
825 /* Some of them might be dead. */
826 delete_trivially_dead_insns (get_insns (), max_reg_num ());
827 /* Those new pseudos need to have their REFS count set. */
828 reg_scan_update (get_insns (), NULL, max_regno);
829 max_regno = max_reg_num ();
830 /* And they need useful classes too. */
831 regclass (get_insns (), max_reg_num (), dump_file);
832 dump_file = ra_dump_file;
834 /* Remember the number of defs and uses, so we can distinguish
835 new from old refs in the next pass. */
836 last_def_id = df->def_id;
837 last_use_id = df->use_id;
840 /* Output the graph, and possibly the current insn sequence. */
841 dump_ra (df);
842 if (changed && (debug_new_regalloc & DUMP_RTL) != 0)
844 ra_print_rtl_with_bb (dump_file, get_insns ());
845 fflush (dump_file);
848 /* Reset the web lists. */
849 reset_lists ();
850 free_mem (df);
852 while (changed);
854 /* We are done with allocation, free all memory and output some
855 debug info. */
856 free_all_mem (df);
857 df_finish (df);
858 if ((debug_new_regalloc & DUMP_RESULTS) == 0)
859 dump_cost (DUMP_COSTS);
860 ra_debug_msg (DUMP_COSTS, "ticks for build-phase: %ld\n", ticks_build);
861 ra_debug_msg (DUMP_COSTS, "ticks for rebuild-phase: %ld\n", ticks_rebuild);
862 if ((debug_new_regalloc & (DUMP_FINAL_RTL | DUMP_RTL)) != 0)
863 ra_print_rtl_with_bb (dump_file, get_insns ());
865 /* We might have new pseudos, so allocate the info arrays for them. */
866 if ((debug_new_regalloc & DUMP_SM) == 0)
867 dump_file = NULL;
868 no_new_pseudos = 0;
869 allocate_reg_info (max_reg_num (), FALSE, FALSE);
870 no_new_pseudos = 1;
871 dump_file = ra_dump_file;
873 /* Some spill insns could've been inserted after trapping calls, i.e.
874 at the end of a basic block, which really ends at that call.
875 Fixup that breakages by adjusting basic block boundaries. */
876 fixup_abnormal_edges ();
878 /* Cleanup the flow graph. */
879 if ((debug_new_regalloc & DUMP_LAST_FLOW) == 0)
880 dump_file = NULL;
881 life_analysis (dump_file,
882 PROP_DEATH_NOTES | PROP_LOG_LINKS | PROP_REG_INFO);
883 cleanup_cfg (CLEANUP_EXPENSIVE);
884 recompute_reg_usage (get_insns (), TRUE);
885 if (dump_file)
886 dump_flow_info (dump_file);
887 dump_file = ra_dump_file;
889 /* update_equiv_regs() can't be called after register allocation.
890 It might delete some pseudos, and insert other insns setting
891 up those pseudos in different places. This of course screws up
892 the allocation because that may destroy a hardreg for another
893 pseudo.
894 XXX we probably should do something like that on our own. I.e.
895 creating REG_EQUIV notes. */
896 /*update_equiv_regs ();*/
898 /* Setup the reg_renumber[] array for reload. */
899 setup_renumber (1);
900 sbitmap_free (insns_with_deaths);
902 /* Remove REG_DEAD notes which are incorrectly set. See the docu
903 of that function. */
904 remove_suspicious_death_notes ();
906 if ((debug_new_regalloc & DUMP_LAST_RTL) != 0)
907 ra_print_rtl_with_bb (dump_file, get_insns ());
908 dump_static_insn_cost (dump_file,
909 "after allocation/spilling, before reload", NULL);
911 /* Allocate the reg_equiv_memory_loc array for reload. */
912 VARRAY_GROW (reg_equiv_memory_loc_varray, max_regno);
913 reg_equiv_memory_loc = &VARRAY_RTX (reg_equiv_memory_loc_varray, 0);
914 /* And possibly initialize it. */
915 allocate_initial_values (reg_equiv_memory_loc);
916 /* And one last regclass pass just before reload. */
917 regclass (get_insns (), max_reg_num (), dump_file);
921 vim:cinoptions={.5s,g0,p5,t0,(0,^-0.5s,n-0.5s:tw=78:cindent:sw=4: