remove extraneous code checked in with previous delta
[official-gcc.git] / gcc / flow.c
blob79203d7a05097e55d542c9f904a14553dfe37357
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
44 ** life_analysis **
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
76 REG_DEAD notes.
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
95 that is never used.
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
113 /* TODO:
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
118 - log links creation
119 - pre/post modify transformation
122 #include "config.h"
123 #include "system.h"
124 #include "tree.h"
125 #include "rtl.h"
126 #include "tm_p.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "hard-reg-set.h"
131 #include "flags.h"
132 #include "output.h"
133 #include "function.h"
134 #include "except.h"
135 #include "toplev.h"
136 #include "recog.h"
137 #include "insn-flags.h"
138 #include "expr.h"
140 #include "obstack.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
173 int n_basic_blocks;
175 /* Number of edges in the current function. */
177 int n_edges;
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
186 = {{NULL, /* head */
187 NULL, /* end */
188 NULL, /* pred */
189 NULL, /* succ */
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
193 NULL, /* aux */
194 ENTRY_BLOCK, /* index */
195 0, /* loop_depth */
196 -1, -1 /* eh_beg, eh_end */
199 NULL, /* head */
200 NULL, /* end */
201 NULL, /* pred */
202 NULL, /* succ */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
206 NULL, /* aux */
207 EXIT_BLOCK, /* index */
208 0, /* loop_depth */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
214 int flow2_completed;
216 /* Maximum register number used in this function, plus one. */
218 int max_regno;
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of the reg_n_info table. */
226 unsigned int reg_n_max;
228 /* Element N is the next insn that uses (hard or pseudo) register number N
229 within the current basic block; or zero, if there is no such insn.
230 This is valid only during the final backward scan in propagate_block. */
232 static rtx *reg_next_use;
234 /* Size of a regset for the current function,
235 in (1) bytes and (2) elements. */
237 int regset_bytes;
238 int regset_size;
240 /* Regset of regs live when calls to `setjmp'-like functions happen. */
241 /* ??? Does this exist only for the setjmp-clobbered warning message? */
243 regset regs_live_at_setjmp;
245 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
246 that have to go in the same hard reg.
247 The first two regs in the list are a pair, and the next two
248 are another pair, etc. */
249 rtx regs_may_share;
251 /* Depth within loops of basic block being scanned for lifetime analysis,
252 plus one. This is the weight attached to references to registers. */
254 static int loop_depth;
256 /* During propagate_block, this is non-zero if the value of CC0 is live. */
258 static int cc0_live;
260 /* During propagate_block, this contains a list of all the MEMs we are
261 tracking for dead store elimination. */
263 static rtx mem_set_list;
265 /* Set of registers that may be eliminable. These are handled specially
266 in updating regs_ever_live. */
268 static HARD_REG_SET elim_reg_set;
270 /* The basic block structure for every insn, indexed by uid. */
272 varray_type basic_block_for_insn;
274 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
275 /* ??? Should probably be using LABEL_NUSES instead. It would take a
276 bit of surgery to be able to use or co-opt the routines in jump. */
278 static rtx label_value_list;
280 /* Forward declarations */
281 static int count_basic_blocks PARAMS ((rtx));
282 static rtx find_basic_blocks_1 PARAMS ((rtx));
283 static void clear_edges PARAMS ((void));
284 static void make_edges PARAMS ((rtx));
285 static void make_label_edge PARAMS ((sbitmap *, basic_block,
286 rtx, int));
287 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
288 basic_block, rtx, int));
289 static void mark_critical_edges PARAMS ((void));
290 static void move_stray_eh_region_notes PARAMS ((void));
291 static void record_active_eh_regions PARAMS ((rtx));
293 static void commit_one_edge_insertion PARAMS ((edge));
295 static void delete_unreachable_blocks PARAMS ((void));
296 static void delete_eh_regions PARAMS ((void));
297 static int can_delete_note_p PARAMS ((rtx));
298 static int delete_block PARAMS ((basic_block));
299 static void expunge_block PARAMS ((basic_block));
300 static int can_delete_label_p PARAMS ((rtx));
301 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
302 basic_block));
303 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
304 basic_block));
305 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
306 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
307 static void try_merge_blocks PARAMS ((void));
308 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
309 static void tidy_fallthru_edges PARAMS ((void));
310 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
311 static void verify_wide_reg PARAMS ((int, rtx, rtx));
312 static void verify_local_live_at_start PARAMS ((regset, basic_block));
313 static int set_noop_p PARAMS ((rtx));
314 static int noop_move_p PARAMS ((rtx));
315 static void delete_noop_moves PARAMS ((rtx));
316 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
317 static void notice_stack_pointer_modification PARAMS ((rtx));
318 static void mark_reg PARAMS ((rtx, void *));
319 static void mark_regs_live_at_end PARAMS ((regset));
320 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
321 static void propagate_block PARAMS ((basic_block, regset,
322 regset, int));
323 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
324 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
325 static void mark_set_regs PARAMS ((regset, regset, rtx,
326 rtx, regset, int));
327 static void mark_set_1 PARAMS ((regset, regset, rtx,
328 rtx, regset, int));
329 #ifdef AUTO_INC_DEC
330 static void find_auto_inc PARAMS ((regset, rtx, rtx));
331 static int try_pre_increment_1 PARAMS ((rtx));
332 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
333 #endif
334 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
335 void dump_flow_info PARAMS ((FILE *));
336 void debug_flow_info PARAMS ((void));
337 static void dump_edge_info PARAMS ((FILE *, edge, int));
339 static void count_reg_sets_1 PARAMS ((rtx));
340 static void count_reg_sets PARAMS ((rtx));
341 static void count_reg_references PARAMS ((rtx));
342 static void invalidate_mems_from_autoinc PARAMS ((rtx));
343 static void remove_fake_successors PARAMS ((basic_block));
344 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
345 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
346 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
347 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
348 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
349 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
350 static int flow_depth_first_order_compute PARAMS ((int *));
351 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
352 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
353 static void flow_loops_tree_build PARAMS ((struct loops *));
354 static int flow_loop_level_compute PARAMS ((struct loop *, int));
355 static int flow_loops_level_compute PARAMS ((struct loops *));
357 /* This function is always defined so it can be called from the
358 debugger, and it is declared extern so we don't get warnings about
359 it being unused. */
360 void verify_flow_info PARAMS ((void));
361 int flow_loop_outside_edge_p PARAMS ((const struct loop *, edge));
362 void clear_log_links PARAMS ((rtx));
364 /* Find basic blocks of the current function.
365 F is the first insn of the function and NREGS the number of register
366 numbers in use. */
368 void
369 find_basic_blocks (f, nregs, file)
370 rtx f;
371 int nregs ATTRIBUTE_UNUSED;
372 FILE *file ATTRIBUTE_UNUSED;
374 int max_uid;
376 /* Flush out existing data. */
377 if (basic_block_info != NULL)
379 int i;
381 clear_edges ();
383 /* Clear bb->aux on all extant basic blocks. We'll use this as a
384 tag for reuse during create_basic_block, just in case some pass
385 copies around basic block notes improperly. */
386 for (i = 0; i < n_basic_blocks; ++i)
387 BASIC_BLOCK (i)->aux = NULL;
389 VARRAY_FREE (basic_block_info);
392 n_basic_blocks = count_basic_blocks (f);
394 /* Size the basic block table. The actual structures will be allocated
395 by find_basic_blocks_1, since we want to keep the structure pointers
396 stable across calls to find_basic_blocks. */
397 /* ??? This whole issue would be much simpler if we called find_basic_blocks
398 exactly once, and thereafter we don't have a single long chain of
399 instructions at all until close to the end of compilation when we
400 actually lay them out. */
402 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
404 label_value_list = find_basic_blocks_1 (f);
406 /* Record the block to which an insn belongs. */
407 /* ??? This should be done another way, by which (perhaps) a label is
408 tagged directly with the basic block that it starts. It is used for
409 more than that currently, but IMO that is the only valid use. */
411 max_uid = get_max_uid ();
412 #ifdef AUTO_INC_DEC
413 /* Leave space for insns life_analysis makes in some cases for auto-inc.
414 These cases are rare, so we don't need too much space. */
415 max_uid += max_uid / 10;
416 #endif
418 compute_bb_for_insn (max_uid);
420 /* Discover the edges of our cfg. */
421 record_active_eh_regions (f);
422 make_edges (label_value_list);
424 /* Do very simple cleanup now, for the benefit of code that runs between
425 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
426 tidy_fallthru_edges ();
428 mark_critical_edges ();
430 #ifdef ENABLE_CHECKING
431 verify_flow_info ();
432 #endif
435 /* Count the basic blocks of the function. */
437 static int
438 count_basic_blocks (f)
439 rtx f;
441 register rtx insn;
442 register RTX_CODE prev_code;
443 register int count = 0;
444 int eh_region = 0;
445 int call_had_abnormal_edge = 0;
446 rtx prev_call = NULL_RTX;
448 prev_code = JUMP_INSN;
449 for (insn = f; insn; insn = NEXT_INSN (insn))
451 register RTX_CODE code = GET_CODE (insn);
453 if (code == CODE_LABEL
454 || (GET_RTX_CLASS (code) == 'i'
455 && (prev_code == JUMP_INSN
456 || prev_code == BARRIER
457 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
459 count++;
462 /* Record whether this call created an edge. */
463 if (code == CALL_INSN)
465 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
466 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
467 prev_call = insn;
468 call_had_abnormal_edge = 0;
470 /* If there is an EH region or rethrow, we have an edge. */
471 if ((eh_region && region > 0)
472 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
473 call_had_abnormal_edge = 1;
474 else
476 /* If there is a nonlocal goto label and the specified
477 region number isn't -1, we have an edge. (0 means
478 no throw, but might have a nonlocal goto). */
479 if (nonlocal_goto_handler_labels && region >= 0)
480 call_had_abnormal_edge = 1;
483 else if (code != NOTE)
484 prev_call = NULL_RTX;
486 if (code != NOTE)
487 prev_code = code;
488 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
489 ++eh_region;
490 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
491 --eh_region;
495 /* The rest of the compiler works a bit smoother when we don't have to
496 check for the edge case of do-nothing functions with no basic blocks. */
497 if (count == 0)
499 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
500 count = 1;
503 return count;
506 /* Find all basic blocks of the function whose first insn is F.
508 Collect and return a list of labels whose addresses are taken. This
509 will be used in make_edges for use with computed gotos. */
511 static rtx
512 find_basic_blocks_1 (f)
513 rtx f;
515 register rtx insn, next;
516 int call_has_abnormal_edge = 0;
517 int i = 0;
518 rtx bb_note = NULL_RTX;
519 rtx eh_list = NULL_RTX;
520 rtx label_value_list = NULL_RTX;
521 rtx head = NULL_RTX;
522 rtx end = NULL_RTX;
524 /* We process the instructions in a slightly different way than we did
525 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
526 closed out the previous block, so that it gets attached at the proper
527 place. Since this form should be equivalent to the previous,
528 count_basic_blocks continues to use the old form as a check. */
530 for (insn = f; insn; insn = next)
532 enum rtx_code code = GET_CODE (insn);
534 next = NEXT_INSN (insn);
536 if (code == CALL_INSN)
538 /* Record whether this call created an edge. */
539 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
540 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
541 call_has_abnormal_edge = 0;
543 /* If there is an EH region or rethrow, we have an edge. */
544 if ((eh_list && region > 0)
545 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
546 call_has_abnormal_edge = 1;
547 else
549 /* If there is a nonlocal goto label and the specified
550 region number isn't -1, we have an edge. (0 means
551 no throw, but might have a nonlocal goto). */
552 if (nonlocal_goto_handler_labels && region >= 0)
553 call_has_abnormal_edge = 1;
557 switch (code)
559 case NOTE:
561 int kind = NOTE_LINE_NUMBER (insn);
563 /* Keep a LIFO list of the currently active exception notes. */
564 if (kind == NOTE_INSN_EH_REGION_BEG)
565 eh_list = alloc_INSN_LIST (insn, eh_list);
566 else if (kind == NOTE_INSN_EH_REGION_END)
568 rtx t = eh_list;
569 eh_list = XEXP (eh_list, 1);
570 free_INSN_LIST_node (t);
573 /* Look for basic block notes with which to keep the
574 basic_block_info pointers stable. Unthread the note now;
575 we'll put it back at the right place in create_basic_block.
576 Or not at all if we've already found a note in this block. */
577 else if (kind == NOTE_INSN_BASIC_BLOCK)
579 if (bb_note == NULL_RTX)
580 bb_note = insn;
581 next = flow_delete_insn (insn);
584 break;
587 case CODE_LABEL:
588 /* A basic block starts at a label. If we've closed one off due
589 to a barrier or some such, no need to do it again. */
590 if (head != NULL_RTX)
592 /* While we now have edge lists with which other portions of
593 the compiler might determine a call ending a basic block
594 does not imply an abnormal edge, it will be a bit before
595 everything can be updated. So continue to emit a noop at
596 the end of such a block. */
597 if (GET_CODE (end) == CALL_INSN
598 && ! SIBLING_CALL_P (end))
600 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
601 end = emit_insn_after (nop, end);
604 create_basic_block (i++, head, end, bb_note);
605 bb_note = NULL_RTX;
607 head = end = insn;
608 break;
610 case JUMP_INSN:
611 /* A basic block ends at a jump. */
612 if (head == NULL_RTX)
613 head = insn;
614 else
616 /* ??? Make a special check for table jumps. The way this
617 happens is truly and amazingly gross. We are about to
618 create a basic block that contains just a code label and
619 an addr*vec jump insn. Worse, an addr_diff_vec creates
620 its own natural loop.
622 Prevent this bit of brain damage, pasting things together
623 correctly in make_edges.
625 The correct solution involves emitting the table directly
626 on the tablejump instruction as a note, or JUMP_LABEL. */
628 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
629 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
631 head = end = NULL;
632 n_basic_blocks--;
633 break;
636 end = insn;
637 goto new_bb_inclusive;
639 case BARRIER:
640 /* A basic block ends at a barrier. It may be that an unconditional
641 jump already closed the basic block -- no need to do it again. */
642 if (head == NULL_RTX)
643 break;
645 /* While we now have edge lists with which other portions of the
646 compiler might determine a call ending a basic block does not
647 imply an abnormal edge, it will be a bit before everything can
648 be updated. So continue to emit a noop at the end of such a
649 block. */
650 if (GET_CODE (end) == CALL_INSN
651 && ! SIBLING_CALL_P (end))
653 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
654 end = emit_insn_after (nop, end);
656 goto new_bb_exclusive;
658 case CALL_INSN:
659 /* A basic block ends at a call that can either throw or
660 do a non-local goto. */
661 if (call_has_abnormal_edge)
663 new_bb_inclusive:
664 if (head == NULL_RTX)
665 head = insn;
666 end = insn;
668 new_bb_exclusive:
669 create_basic_block (i++, head, end, bb_note);
670 head = end = NULL_RTX;
671 bb_note = NULL_RTX;
672 break;
674 /* FALLTHRU */
676 default:
677 if (GET_RTX_CLASS (code) == 'i')
679 if (head == NULL_RTX)
680 head = insn;
681 end = insn;
683 break;
686 if (GET_RTX_CLASS (code) == 'i')
688 rtx note;
690 /* Make a list of all labels referred to other than by jumps
691 (which just don't have the REG_LABEL notes).
693 Make a special exception for labels followed by an ADDR*VEC,
694 as this would be a part of the tablejump setup code.
696 Make a special exception for the eh_return_stub_label, which
697 we know isn't part of any otherwise visible control flow. */
699 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
700 if (REG_NOTE_KIND (note) == REG_LABEL)
702 rtx lab = XEXP (note, 0), next;
704 if (lab == eh_return_stub_label)
706 else if ((next = next_nonnote_insn (lab)) != NULL
707 && GET_CODE (next) == JUMP_INSN
708 && (GET_CODE (PATTERN (next)) == ADDR_VEC
709 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
711 else
712 label_value_list
713 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
718 if (head != NULL_RTX)
719 create_basic_block (i++, head, end, bb_note);
721 if (i != n_basic_blocks)
722 abort ();
724 return label_value_list;
727 /* Tidy the CFG by deleting unreachable code and whatnot. */
729 void
730 cleanup_cfg (f)
731 rtx f;
733 delete_unreachable_blocks ();
734 move_stray_eh_region_notes ();
735 record_active_eh_regions (f);
736 try_merge_blocks ();
737 mark_critical_edges ();
739 /* Kill the data we won't maintain. */
740 label_value_list = NULL_RTX;
743 /* Create a new basic block consisting of the instructions between
744 HEAD and END inclusive. Reuses the note and basic block struct
745 in BB_NOTE, if any. */
747 void
748 create_basic_block (index, head, end, bb_note)
749 int index;
750 rtx head, end, bb_note;
752 basic_block bb;
754 if (bb_note
755 && ! RTX_INTEGRATED_P (bb_note)
756 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
757 && bb->aux == NULL)
759 /* If we found an existing note, thread it back onto the chain. */
761 if (GET_CODE (head) == CODE_LABEL)
762 add_insn_after (bb_note, head);
763 else
765 add_insn_before (bb_note, head);
766 head = bb_note;
769 else
771 /* Otherwise we must create a note and a basic block structure.
772 Since we allow basic block structs in rtl, give the struct
773 the same lifetime by allocating it off the function obstack
774 rather than using malloc. */
776 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
777 memset (bb, 0, sizeof (*bb));
779 if (GET_CODE (head) == CODE_LABEL)
780 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
781 else
783 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
784 head = bb_note;
786 NOTE_BASIC_BLOCK (bb_note) = bb;
789 /* Always include the bb note in the block. */
790 if (NEXT_INSN (end) == bb_note)
791 end = bb_note;
793 bb->head = head;
794 bb->end = end;
795 bb->index = index;
796 BASIC_BLOCK (index) = bb;
798 /* Tag the block so that we know it has been used when considering
799 other basic block notes. */
800 bb->aux = bb;
803 /* Records the basic block struct in BB_FOR_INSN, for every instruction
804 indexed by INSN_UID. MAX is the size of the array. */
806 void
807 compute_bb_for_insn (max)
808 int max;
810 int i;
812 if (basic_block_for_insn)
813 VARRAY_FREE (basic_block_for_insn);
814 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
816 for (i = 0; i < n_basic_blocks; ++i)
818 basic_block bb = BASIC_BLOCK (i);
819 rtx insn, end;
821 end = bb->end;
822 insn = bb->head;
823 while (1)
825 int uid = INSN_UID (insn);
826 if (uid < max)
827 VARRAY_BB (basic_block_for_insn, uid) = bb;
828 if (insn == end)
829 break;
830 insn = NEXT_INSN (insn);
835 /* Free the memory associated with the edge structures. */
837 static void
838 clear_edges ()
840 int i;
841 edge n, e;
843 for (i = 0; i < n_basic_blocks; ++i)
845 basic_block bb = BASIC_BLOCK (i);
847 for (e = bb->succ; e ; e = n)
849 n = e->succ_next;
850 free (e);
853 bb->succ = 0;
854 bb->pred = 0;
857 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
859 n = e->succ_next;
860 free (e);
863 ENTRY_BLOCK_PTR->succ = 0;
864 EXIT_BLOCK_PTR->pred = 0;
866 n_edges = 0;
869 /* Identify the edges between basic blocks.
871 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
872 that are otherwise unreachable may be reachable with a non-local goto.
874 BB_EH_END is an array indexed by basic block number in which we record
875 the list of exception regions active at the end of the basic block. */
877 static void
878 make_edges (label_value_list)
879 rtx label_value_list;
881 int i;
882 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
883 sbitmap *edge_cache = NULL;
885 /* Assume no computed jump; revise as we create edges. */
886 current_function_has_computed_jump = 0;
888 /* Heavy use of computed goto in machine-generated code can lead to
889 nearly fully-connected CFGs. In that case we spend a significant
890 amount of time searching the edge lists for duplicates. */
891 if (forced_labels || label_value_list)
893 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
894 sbitmap_vector_zero (edge_cache, n_basic_blocks);
897 /* By nature of the way these get numbered, block 0 is always the entry. */
898 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
900 for (i = 0; i < n_basic_blocks; ++i)
902 basic_block bb = BASIC_BLOCK (i);
903 rtx insn, x;
904 enum rtx_code code;
905 int force_fallthru = 0;
907 /* Examine the last instruction of the block, and discover the
908 ways we can leave the block. */
910 insn = bb->end;
911 code = GET_CODE (insn);
913 /* A branch. */
914 if (code == JUMP_INSN)
916 rtx tmp;
918 /* ??? Recognize a tablejump and do the right thing. */
919 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
920 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
921 && GET_CODE (tmp) == JUMP_INSN
922 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
923 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
925 rtvec vec;
926 int j;
928 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
929 vec = XVEC (PATTERN (tmp), 0);
930 else
931 vec = XVEC (PATTERN (tmp), 1);
933 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
934 make_label_edge (edge_cache, bb,
935 XEXP (RTVEC_ELT (vec, j), 0), 0);
937 /* Some targets (eg, ARM) emit a conditional jump that also
938 contains the out-of-range target. Scan for these and
939 add an edge if necessary. */
940 if ((tmp = single_set (insn)) != NULL
941 && SET_DEST (tmp) == pc_rtx
942 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
943 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
944 make_label_edge (edge_cache, bb,
945 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
947 #ifdef CASE_DROPS_THROUGH
948 /* Silly VAXen. The ADDR_VEC is going to be in the way of
949 us naturally detecting fallthru into the next block. */
950 force_fallthru = 1;
951 #endif
954 /* If this is a computed jump, then mark it as reaching
955 everything on the label_value_list and forced_labels list. */
956 else if (computed_jump_p (insn))
958 current_function_has_computed_jump = 1;
960 for (x = label_value_list; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
963 for (x = forced_labels; x; x = XEXP (x, 1))
964 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
967 /* Returns create an exit out. */
968 else if (returnjump_p (insn))
969 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
971 /* Otherwise, we have a plain conditional or unconditional jump. */
972 else
974 if (! JUMP_LABEL (insn))
975 abort ();
976 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
980 /* If this is a sibling call insn, then this is in effect a
981 combined call and return, and so we need an edge to the
982 exit block. No need to worry about EH edges, since we
983 wouldn't have created the sibling call in the first place. */
985 if (code == CALL_INSN && SIBLING_CALL_P (insn))
986 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
987 else
989 /* If this is a CALL_INSN, then mark it as reaching the active EH
990 handler for this CALL_INSN. If we're handling asynchronous
991 exceptions then any insn can reach any of the active handlers.
993 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
995 if (code == CALL_INSN || asynchronous_exceptions)
997 /* Add any appropriate EH edges. We do this unconditionally
998 since there may be a REG_EH_REGION or REG_EH_RETHROW note
999 on the call, and this needn't be within an EH region. */
1000 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1002 /* If we have asynchronous exceptions, do the same for *all*
1003 exception regions active in the block. */
1004 if (asynchronous_exceptions
1005 && bb->eh_beg != bb->eh_end)
1007 if (bb->eh_beg >= 0)
1008 make_eh_edge (edge_cache, eh_nest_info, bb,
1009 NULL_RTX, bb->eh_beg);
1011 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1012 if (GET_CODE (x) == NOTE
1013 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1014 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1016 int region = NOTE_EH_HANDLER (x);
1017 make_eh_edge (edge_cache, eh_nest_info, bb,
1018 NULL_RTX, region);
1022 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1024 /* ??? This could be made smarter: in some cases it's possible
1025 to tell that certain calls will not do a nonlocal goto.
1027 For example, if the nested functions that do the nonlocal
1028 gotos do not have their addresses taken, then only calls to
1029 those functions or to other nested functions that use them
1030 could possibly do nonlocal gotos. */
1031 /* We do know that a REG_EH_REGION note with a value less
1032 than 0 is guaranteed not to perform a non-local goto. */
1033 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1034 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1035 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1036 make_label_edge (edge_cache, bb, XEXP (x, 0),
1037 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1041 /* We know something about the structure of the function __throw in
1042 libgcc2.c. It is the only function that ever contains eh_stub
1043 labels. It modifies its return address so that the last block
1044 returns to one of the eh_stub labels within it. So we have to
1045 make additional edges in the flow graph. */
1046 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1047 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1049 /* Find out if we can drop through to the next block. */
1050 insn = next_nonnote_insn (insn);
1051 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1052 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1053 else if (i + 1 < n_basic_blocks)
1055 rtx tmp = BLOCK_HEAD (i + 1);
1056 if (GET_CODE (tmp) == NOTE)
1057 tmp = next_nonnote_insn (tmp);
1058 if (force_fallthru || insn == tmp)
1059 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1063 free_eh_nesting_info (eh_nest_info);
1064 if (edge_cache)
1065 sbitmap_vector_free (edge_cache);
1068 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1069 about the edge that is accumulated between calls. */
1071 void
1072 make_edge (edge_cache, src, dst, flags)
1073 sbitmap *edge_cache;
1074 basic_block src, dst;
1075 int flags;
1077 int use_edge_cache;
1078 edge e;
1080 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1081 many edges to them, and we didn't allocate memory for it. */
1082 use_edge_cache = (edge_cache
1083 && src != ENTRY_BLOCK_PTR
1084 && dst != EXIT_BLOCK_PTR);
1086 /* Make sure we don't add duplicate edges. */
1087 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1088 for (e = src->succ; e ; e = e->succ_next)
1089 if (e->dest == dst)
1091 e->flags |= flags;
1092 return;
1095 e = (edge) xcalloc (1, sizeof (*e));
1096 n_edges++;
1098 e->succ_next = src->succ;
1099 e->pred_next = dst->pred;
1100 e->src = src;
1101 e->dest = dst;
1102 e->flags = flags;
1104 src->succ = e;
1105 dst->pred = e;
1107 if (use_edge_cache)
1108 SET_BIT (edge_cache[src->index], dst->index);
1111 /* Create an edge from a basic block to a label. */
1113 static void
1114 make_label_edge (edge_cache, src, label, flags)
1115 sbitmap *edge_cache;
1116 basic_block src;
1117 rtx label;
1118 int flags;
1120 if (GET_CODE (label) != CODE_LABEL)
1121 abort ();
1123 /* If the label was never emitted, this insn is junk, but avoid a
1124 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1125 as a result of a syntax error and a diagnostic has already been
1126 printed. */
1128 if (INSN_UID (label) == 0)
1129 return;
1131 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1134 /* Create the edges generated by INSN in REGION. */
1136 static void
1137 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1138 sbitmap *edge_cache;
1139 eh_nesting_info *eh_nest_info;
1140 basic_block src;
1141 rtx insn;
1142 int region;
1144 handler_info **handler_list;
1145 int num, is_call;
1147 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1148 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1149 while (--num >= 0)
1151 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1152 EDGE_ABNORMAL | EDGE_EH | is_call);
1156 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1157 dangerous if we intend to move basic blocks around. Move such notes
1158 into the following block. */
1160 static void
1161 move_stray_eh_region_notes ()
1163 int i;
1164 basic_block b1, b2;
1166 if (n_basic_blocks < 2)
1167 return;
1169 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1170 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1172 rtx insn, next, list = NULL_RTX;
1174 b1 = BASIC_BLOCK (i);
1175 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1177 next = NEXT_INSN (insn);
1178 if (GET_CODE (insn) == NOTE
1179 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1180 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1182 /* Unlink from the insn chain. */
1183 NEXT_INSN (PREV_INSN (insn)) = next;
1184 PREV_INSN (next) = PREV_INSN (insn);
1186 /* Queue it. */
1187 NEXT_INSN (insn) = list;
1188 list = insn;
1192 if (list == NULL_RTX)
1193 continue;
1195 /* Find where to insert these things. */
1196 insn = b2->head;
1197 if (GET_CODE (insn) == CODE_LABEL)
1198 insn = NEXT_INSN (insn);
1200 while (list)
1202 next = NEXT_INSN (list);
1203 add_insn_after (list, insn);
1204 list = next;
1209 /* Recompute eh_beg/eh_end for each basic block. */
1211 static void
1212 record_active_eh_regions (f)
1213 rtx f;
1215 rtx insn, eh_list = NULL_RTX;
1216 int i = 0;
1217 basic_block bb = BASIC_BLOCK (0);
1219 for (insn = f; insn ; insn = NEXT_INSN (insn))
1221 if (bb->head == insn)
1222 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1224 if (GET_CODE (insn) == NOTE)
1226 int kind = NOTE_LINE_NUMBER (insn);
1227 if (kind == NOTE_INSN_EH_REGION_BEG)
1228 eh_list = alloc_INSN_LIST (insn, eh_list);
1229 else if (kind == NOTE_INSN_EH_REGION_END)
1231 rtx t = XEXP (eh_list, 1);
1232 free_INSN_LIST_node (eh_list);
1233 eh_list = t;
1237 if (bb->end == insn)
1239 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1240 i += 1;
1241 if (i == n_basic_blocks)
1242 break;
1243 bb = BASIC_BLOCK (i);
1248 /* Identify critical edges and set the bits appropriately. */
1250 static void
1251 mark_critical_edges ()
1253 int i, n = n_basic_blocks;
1254 basic_block bb;
1256 /* We begin with the entry block. This is not terribly important now,
1257 but could be if a front end (Fortran) implemented alternate entry
1258 points. */
1259 bb = ENTRY_BLOCK_PTR;
1260 i = -1;
1262 while (1)
1264 edge e;
1266 /* (1) Critical edges must have a source with multiple successors. */
1267 if (bb->succ && bb->succ->succ_next)
1269 for (e = bb->succ; e ; e = e->succ_next)
1271 /* (2) Critical edges must have a destination with multiple
1272 predecessors. Note that we know there is at least one
1273 predecessor -- the edge we followed to get here. */
1274 if (e->dest->pred->pred_next)
1275 e->flags |= EDGE_CRITICAL;
1276 else
1277 e->flags &= ~EDGE_CRITICAL;
1280 else
1282 for (e = bb->succ; e ; e = e->succ_next)
1283 e->flags &= ~EDGE_CRITICAL;
1286 if (++i >= n)
1287 break;
1288 bb = BASIC_BLOCK (i);
1292 /* Split a (typically critical) edge. Return the new block.
1293 Abort on abnormal edges.
1295 ??? The code generally expects to be called on critical edges.
1296 The case of a block ending in an unconditional jump to a
1297 block with multiple predecessors is not handled optimally. */
1299 basic_block
1300 split_edge (edge_in)
1301 edge edge_in;
1303 basic_block old_pred, bb, old_succ;
1304 edge edge_out;
1305 rtx bb_note;
1306 int i, j;
1308 /* Abnormal edges cannot be split. */
1309 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1310 abort ();
1312 old_pred = edge_in->src;
1313 old_succ = edge_in->dest;
1315 /* Remove the existing edge from the destination's pred list. */
1317 edge *pp;
1318 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1319 continue;
1320 *pp = edge_in->pred_next;
1321 edge_in->pred_next = NULL;
1324 /* Create the new structures. */
1325 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1326 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1327 n_edges++;
1329 memset (bb, 0, sizeof (*bb));
1330 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1331 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1333 /* ??? This info is likely going to be out of date very soon. */
1334 if (old_succ->global_live_at_start)
1336 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1337 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1339 else
1341 CLEAR_REG_SET (bb->global_live_at_start);
1342 CLEAR_REG_SET (bb->global_live_at_end);
1345 /* Wire them up. */
1346 bb->pred = edge_in;
1347 bb->succ = edge_out;
1349 edge_in->dest = bb;
1350 edge_in->flags &= ~EDGE_CRITICAL;
1352 edge_out->pred_next = old_succ->pred;
1353 edge_out->succ_next = NULL;
1354 edge_out->src = bb;
1355 edge_out->dest = old_succ;
1356 edge_out->flags = EDGE_FALLTHRU;
1357 edge_out->probability = REG_BR_PROB_BASE;
1359 old_succ->pred = edge_out;
1361 /* Tricky case -- if there existed a fallthru into the successor
1362 (and we're not it) we must add a new unconditional jump around
1363 the new block we're actually interested in.
1365 Further, if that edge is critical, this means a second new basic
1366 block must be created to hold it. In order to simplify correct
1367 insn placement, do this before we touch the existing basic block
1368 ordering for the block we were really wanting. */
1369 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1371 edge e;
1372 for (e = edge_out->pred_next; e ; e = e->pred_next)
1373 if (e->flags & EDGE_FALLTHRU)
1374 break;
1376 if (e)
1378 basic_block jump_block;
1379 rtx pos;
1381 if ((e->flags & EDGE_CRITICAL) == 0
1382 && e->src != ENTRY_BLOCK_PTR)
1384 /* Non critical -- we can simply add a jump to the end
1385 of the existing predecessor. */
1386 jump_block = e->src;
1388 else
1390 /* We need a new block to hold the jump. The simplest
1391 way to do the bulk of the work here is to recursively
1392 call ourselves. */
1393 jump_block = split_edge (e);
1394 e = jump_block->succ;
1397 /* Now add the jump insn ... */
1398 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1399 jump_block->end);
1400 jump_block->end = pos;
1401 if (basic_block_for_insn)
1402 set_block_for_insn (pos, jump_block);
1403 emit_barrier_after (pos);
1405 /* ... let jump know that label is in use, ... */
1406 JUMP_LABEL (pos) = old_succ->head;
1407 ++LABEL_NUSES (old_succ->head);
1409 /* ... and clear fallthru on the outgoing edge. */
1410 e->flags &= ~EDGE_FALLTHRU;
1412 /* Continue splitting the interesting edge. */
1416 /* Place the new block just in front of the successor. */
1417 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1418 if (old_succ == EXIT_BLOCK_PTR)
1419 j = n_basic_blocks - 1;
1420 else
1421 j = old_succ->index;
1422 for (i = n_basic_blocks - 1; i > j; --i)
1424 basic_block tmp = BASIC_BLOCK (i - 1);
1425 BASIC_BLOCK (i) = tmp;
1426 tmp->index = i;
1428 BASIC_BLOCK (i) = bb;
1429 bb->index = i;
1431 /* Create the basic block note.
1433 Where we place the note can have a noticable impact on the generated
1434 code. Consider this cfg:
1441 +->1-->2--->E
1443 +--+
1445 If we need to insert an insn on the edge from block 0 to block 1,
1446 we want to ensure the instructions we insert are outside of any
1447 loop notes that physically sit between block 0 and block 1. Otherwise
1448 we confuse the loop optimizer into thinking the loop is a phony. */
1449 if (old_succ != EXIT_BLOCK_PTR
1450 && PREV_INSN (old_succ->head)
1451 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1452 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1453 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1454 PREV_INSN (old_succ->head));
1455 else if (old_succ != EXIT_BLOCK_PTR)
1456 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1457 else
1458 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1459 NOTE_BASIC_BLOCK (bb_note) = bb;
1460 bb->head = bb->end = bb_note;
1462 /* Not quite simple -- for non-fallthru edges, we must adjust the
1463 predecessor's jump instruction to target our new block. */
1464 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1466 rtx tmp, insn = old_pred->end;
1467 rtx old_label = old_succ->head;
1468 rtx new_label = gen_label_rtx ();
1470 if (GET_CODE (insn) != JUMP_INSN)
1471 abort ();
1473 /* ??? Recognize a tablejump and adjust all matching cases. */
1474 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1475 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1476 && GET_CODE (tmp) == JUMP_INSN
1477 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1478 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1480 rtvec vec;
1481 int j;
1483 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1484 vec = XVEC (PATTERN (tmp), 0);
1485 else
1486 vec = XVEC (PATTERN (tmp), 1);
1488 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1489 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1491 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1492 --LABEL_NUSES (old_label);
1493 ++LABEL_NUSES (new_label);
1496 /* Handle casesi dispatch insns */
1497 if ((tmp = single_set (insn)) != NULL
1498 && SET_DEST (tmp) == pc_rtx
1499 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1500 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1501 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1503 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1504 new_label);
1505 --LABEL_NUSES (old_label);
1506 ++LABEL_NUSES (new_label);
1509 else
1511 /* This would have indicated an abnormal edge. */
1512 if (computed_jump_p (insn))
1513 abort ();
1515 /* A return instruction can't be redirected. */
1516 if (returnjump_p (insn))
1517 abort ();
1519 /* If the insn doesn't go where we think, we're confused. */
1520 if (JUMP_LABEL (insn) != old_label)
1521 abort ();
1523 redirect_jump (insn, new_label);
1526 emit_label_before (new_label, bb_note);
1527 bb->head = new_label;
1530 return bb;
1533 /* Queue instructions for insertion on an edge between two basic blocks.
1534 The new instructions and basic blocks (if any) will not appear in the
1535 CFG until commit_edge_insertions is called. */
1537 void
1538 insert_insn_on_edge (pattern, e)
1539 rtx pattern;
1540 edge e;
1542 /* We cannot insert instructions on an abnormal critical edge.
1543 It will be easier to find the culprit if we die now. */
1544 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1545 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1546 abort ();
1548 if (e->insns == NULL_RTX)
1549 start_sequence ();
1550 else
1551 push_to_sequence (e->insns);
1553 emit_insn (pattern);
1555 e->insns = get_insns ();
1556 end_sequence();
1559 /* Update the CFG for the instructions queued on edge E. */
1561 static void
1562 commit_one_edge_insertion (e)
1563 edge e;
1565 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1566 basic_block bb;
1568 /* Pull the insns off the edge now since the edge might go away. */
1569 insns = e->insns;
1570 e->insns = NULL_RTX;
1572 /* Figure out where to put these things. If the destination has
1573 one predecessor, insert there. Except for the exit block. */
1574 if (e->dest->pred->pred_next == NULL
1575 && e->dest != EXIT_BLOCK_PTR)
1577 bb = e->dest;
1579 /* Get the location correct wrt a code label, and "nice" wrt
1580 a basic block note, and before everything else. */
1581 tmp = bb->head;
1582 if (GET_CODE (tmp) == CODE_LABEL)
1583 tmp = NEXT_INSN (tmp);
1584 if (GET_CODE (tmp) == NOTE
1585 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1586 tmp = NEXT_INSN (tmp);
1587 if (tmp == bb->head)
1588 before = tmp;
1589 else
1590 after = PREV_INSN (tmp);
1593 /* If the source has one successor and the edge is not abnormal,
1594 insert there. Except for the entry block. */
1595 else if ((e->flags & EDGE_ABNORMAL) == 0
1596 && e->src->succ->succ_next == NULL
1597 && e->src != ENTRY_BLOCK_PTR)
1599 bb = e->src;
1600 /* It is possible to have a non-simple jump here. Consider a target
1601 where some forms of unconditional jumps clobber a register. This
1602 happens on the fr30 for example.
1604 We know this block has a single successor, so we can just emit
1605 the queued insns before the jump. */
1606 if (GET_CODE (bb->end) == JUMP_INSN)
1608 before = bb->end;
1610 else
1612 /* We'd better be fallthru, or we've lost track of what's what. */
1613 if ((e->flags & EDGE_FALLTHRU) == 0)
1614 abort ();
1616 after = bb->end;
1620 /* Otherwise we must split the edge. */
1621 else
1623 bb = split_edge (e);
1624 after = bb->end;
1627 /* Now that we've found the spot, do the insertion. */
1629 /* Set the new block number for these insns, if structure is allocated. */
1630 if (basic_block_for_insn)
1632 rtx i;
1633 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1634 set_block_for_insn (i, bb);
1637 if (before)
1639 emit_insns_before (insns, before);
1640 if (before == bb->head)
1641 bb->head = insns;
1643 else
1645 rtx last = emit_insns_after (insns, after);
1646 if (after == bb->end)
1648 bb->end = last;
1650 if (GET_CODE (last) == JUMP_INSN)
1652 if (returnjump_p (last))
1654 /* ??? Remove all outgoing edges from BB and add one
1655 for EXIT. This is not currently a problem because
1656 this only happens for the (single) epilogue, which
1657 already has a fallthru edge to EXIT. */
1659 e = bb->succ;
1660 if (e->dest != EXIT_BLOCK_PTR
1661 || e->succ_next != NULL
1662 || (e->flags & EDGE_FALLTHRU) == 0)
1663 abort ();
1664 e->flags &= ~EDGE_FALLTHRU;
1666 emit_barrier_after (last);
1668 else
1669 abort ();
1675 /* Update the CFG for all queued instructions. */
1677 void
1678 commit_edge_insertions ()
1680 int i;
1681 basic_block bb;
1683 #ifdef ENABLE_CHECKING
1684 verify_flow_info ();
1685 #endif
1687 i = -1;
1688 bb = ENTRY_BLOCK_PTR;
1689 while (1)
1691 edge e, next;
1693 for (e = bb->succ; e ; e = next)
1695 next = e->succ_next;
1696 if (e->insns)
1697 commit_one_edge_insertion (e);
1700 if (++i >= n_basic_blocks)
1701 break;
1702 bb = BASIC_BLOCK (i);
1706 /* Delete all unreachable basic blocks. */
1708 static void
1709 delete_unreachable_blocks ()
1711 basic_block *worklist, *tos;
1712 int deleted_handler;
1713 edge e;
1714 int i, n;
1716 n = n_basic_blocks;
1717 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1719 /* Use basic_block->aux as a marker. Clear them all. */
1721 for (i = 0; i < n; ++i)
1722 BASIC_BLOCK (i)->aux = NULL;
1724 /* Add our starting points to the worklist. Almost always there will
1725 be only one. It isn't inconcievable that we might one day directly
1726 support Fortran alternate entry points. */
1728 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1730 *tos++ = e->dest;
1732 /* Mark the block with a handy non-null value. */
1733 e->dest->aux = e;
1736 /* Iterate: find everything reachable from what we've already seen. */
1738 while (tos != worklist)
1740 basic_block b = *--tos;
1742 for (e = b->succ; e ; e = e->succ_next)
1743 if (!e->dest->aux)
1745 *tos++ = e->dest;
1746 e->dest->aux = e;
1750 /* Delete all unreachable basic blocks. Count down so that we don't
1751 interfere with the block renumbering that happens in delete_block. */
1753 deleted_handler = 0;
1755 for (i = n - 1; i >= 0; --i)
1757 basic_block b = BASIC_BLOCK (i);
1759 if (b->aux != NULL)
1760 /* This block was found. Tidy up the mark. */
1761 b->aux = NULL;
1762 else
1763 deleted_handler |= delete_block (b);
1766 tidy_fallthru_edges ();
1768 /* If we deleted an exception handler, we may have EH region begin/end
1769 blocks to remove as well. */
1770 if (deleted_handler)
1771 delete_eh_regions ();
1773 free (worklist);
1776 /* Find EH regions for which there is no longer a handler, and delete them. */
1778 static void
1779 delete_eh_regions ()
1781 rtx insn;
1783 update_rethrow_references ();
1785 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1786 if (GET_CODE (insn) == NOTE)
1788 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1789 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1791 int num = NOTE_EH_HANDLER (insn);
1792 /* A NULL handler indicates a region is no longer needed,
1793 as long as its rethrow label isn't used. */
1794 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1796 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1797 NOTE_SOURCE_FILE (insn) = 0;
1803 /* Return true if NOTE is not one of the ones that must be kept paired,
1804 so that we may simply delete them. */
1806 static int
1807 can_delete_note_p (note)
1808 rtx note;
1810 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1811 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1814 /* Unlink a chain of insns between START and FINISH, leaving notes
1815 that must be paired. */
1817 void
1818 flow_delete_insn_chain (start, finish)
1819 rtx start, finish;
1821 /* Unchain the insns one by one. It would be quicker to delete all
1822 of these with a single unchaining, rather than one at a time, but
1823 we need to keep the NOTE's. */
1825 rtx next;
1827 while (1)
1829 next = NEXT_INSN (start);
1830 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1832 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1834 else
1835 next = flow_delete_insn (start);
1837 if (start == finish)
1838 break;
1839 start = next;
1843 /* Delete the insns in a (non-live) block. We physically delete every
1844 non-deleted-note insn, and update the flow graph appropriately.
1846 Return nonzero if we deleted an exception handler. */
1848 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1849 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1851 static int
1852 delete_block (b)
1853 basic_block b;
1855 int deleted_handler = 0;
1856 rtx insn, end, tmp;
1858 /* If the head of this block is a CODE_LABEL, then it might be the
1859 label for an exception handler which can't be reached.
1861 We need to remove the label from the exception_handler_label list
1862 and remove the associated NOTE_INSN_EH_REGION_BEG and
1863 NOTE_INSN_EH_REGION_END notes. */
1865 insn = b->head;
1867 never_reached_warning (insn);
1869 if (GET_CODE (insn) == CODE_LABEL)
1871 rtx x, *prev = &exception_handler_labels;
1873 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1875 if (XEXP (x, 0) == insn)
1877 /* Found a match, splice this label out of the EH label list. */
1878 *prev = XEXP (x, 1);
1879 XEXP (x, 1) = NULL_RTX;
1880 XEXP (x, 0) = NULL_RTX;
1882 /* Remove the handler from all regions */
1883 remove_handler (insn);
1884 deleted_handler = 1;
1885 break;
1887 prev = &XEXP (x, 1);
1890 /* This label may be referenced by code solely for its value, or
1891 referenced by static data, or something. We have determined
1892 that it is not reachable, but cannot delete the label itself.
1893 Save code space and continue to delete the balance of the block,
1894 along with properly updating the cfg. */
1895 if (!can_delete_label_p (insn))
1897 /* If we've only got one of these, skip the whole deleting
1898 insns thing. */
1899 if (insn == b->end)
1900 goto no_delete_insns;
1901 insn = NEXT_INSN (insn);
1905 /* Include any jump table following the basic block. */
1906 end = b->end;
1907 if (GET_CODE (end) == JUMP_INSN
1908 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1909 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1910 && GET_CODE (tmp) == JUMP_INSN
1911 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1912 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1913 end = tmp;
1915 /* Include any barrier that may follow the basic block. */
1916 tmp = next_nonnote_insn (b->end);
1917 if (tmp && GET_CODE (tmp) == BARRIER)
1918 end = tmp;
1920 /* Selectively delete the entire chain. */
1921 flow_delete_insn_chain (insn, end);
1923 no_delete_insns:
1925 /* Remove the edges into and out of this block. Note that there may
1926 indeed be edges in, if we are removing an unreachable loop. */
1928 edge e, next, *q;
1930 for (e = b->pred; e ; e = next)
1932 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1933 continue;
1934 *q = e->succ_next;
1935 next = e->pred_next;
1936 n_edges--;
1937 free (e);
1939 for (e = b->succ; e ; e = next)
1941 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1942 continue;
1943 *q = e->pred_next;
1944 next = e->succ_next;
1945 n_edges--;
1946 free (e);
1949 b->pred = NULL;
1950 b->succ = NULL;
1953 /* Remove the basic block from the array, and compact behind it. */
1954 expunge_block (b);
1956 return deleted_handler;
1959 /* Remove block B from the basic block array and compact behind it. */
1961 static void
1962 expunge_block (b)
1963 basic_block b;
1965 int i, n = n_basic_blocks;
1967 for (i = b->index; i + 1 < n; ++i)
1969 basic_block x = BASIC_BLOCK (i + 1);
1970 BASIC_BLOCK (i) = x;
1971 x->index = i;
1974 basic_block_info->num_elements--;
1975 n_basic_blocks--;
1978 /* Delete INSN by patching it out. Return the next insn. */
1981 flow_delete_insn (insn)
1982 rtx insn;
1984 rtx prev = PREV_INSN (insn);
1985 rtx next = NEXT_INSN (insn);
1986 rtx note;
1988 PREV_INSN (insn) = NULL_RTX;
1989 NEXT_INSN (insn) = NULL_RTX;
1991 if (prev)
1992 NEXT_INSN (prev) = next;
1993 if (next)
1994 PREV_INSN (next) = prev;
1995 else
1996 set_last_insn (prev);
1998 if (GET_CODE (insn) == CODE_LABEL)
1999 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2001 /* If deleting a jump, decrement the use count of the label. Deleting
2002 the label itself should happen in the normal course of block merging. */
2003 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2004 LABEL_NUSES (JUMP_LABEL (insn))--;
2006 /* Also if deleting an insn that references a label. */
2007 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2008 LABEL_NUSES (XEXP (note, 0))--;
2010 return next;
2013 /* True if a given label can be deleted. */
2015 static int
2016 can_delete_label_p (label)
2017 rtx label;
2019 rtx x;
2021 if (LABEL_PRESERVE_P (label))
2022 return 0;
2024 for (x = forced_labels; x ; x = XEXP (x, 1))
2025 if (label == XEXP (x, 0))
2026 return 0;
2027 for (x = label_value_list; x ; x = XEXP (x, 1))
2028 if (label == XEXP (x, 0))
2029 return 0;
2030 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2031 if (label == XEXP (x, 0))
2032 return 0;
2034 /* User declared labels must be preserved. */
2035 if (LABEL_NAME (label) != 0)
2036 return 0;
2038 return 1;
2041 /* Blocks A and B are to be merged into a single block. A has no incoming
2042 fallthru edge, so it can be moved before B without adding or modifying
2043 any jumps (aside from the jump from A to B). */
2045 static int
2046 merge_blocks_move_predecessor_nojumps (a, b)
2047 basic_block a, b;
2049 rtx start, end, barrier;
2050 int index;
2052 start = a->head;
2053 end = a->end;
2055 /* We want to delete the BARRIER after the end of the insns we are
2056 going to move. If we don't find a BARRIER, then do nothing. This
2057 can happen in some cases if we have labels we can not delete.
2059 Similarly, do nothing if we can not delete the label at the start
2060 of the target block. */
2061 barrier = next_nonnote_insn (end);
2062 if (GET_CODE (barrier) != BARRIER
2063 || (GET_CODE (b->head) == CODE_LABEL
2064 && ! can_delete_label_p (b->head)))
2065 return 0;
2066 else
2067 flow_delete_insn (barrier);
2069 /* Move block and loop notes out of the chain so that we do not
2070 disturb their order.
2072 ??? A better solution would be to squeeze out all the non-nested notes
2073 and adjust the block trees appropriately. Even better would be to have
2074 a tighter connection between block trees and rtl so that this is not
2075 necessary. */
2076 start = squeeze_notes (start, end);
2078 /* Scramble the insn chain. */
2079 if (end != PREV_INSN (b->head))
2080 reorder_insns (start, end, PREV_INSN (b->head));
2082 if (rtl_dump_file)
2084 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2085 a->index, b->index);
2088 /* Swap the records for the two blocks around. Although we are deleting B,
2089 A is now where B was and we want to compact the BB array from where
2090 A used to be. */
2091 BASIC_BLOCK(a->index) = b;
2092 BASIC_BLOCK(b->index) = a;
2093 index = a->index;
2094 a->index = b->index;
2095 b->index = index;
2097 /* Now blocks A and B are contiguous. Merge them. */
2098 merge_blocks_nomove (a, b);
2100 return 1;
2103 /* Blocks A and B are to be merged into a single block. B has no outgoing
2104 fallthru edge, so it can be moved after A without adding or modifying
2105 any jumps (aside from the jump from A to B). */
2107 static int
2108 merge_blocks_move_successor_nojumps (a, b)
2109 basic_block a, b;
2111 rtx start, end, barrier;
2113 start = b->head;
2114 end = b->end;
2116 /* We want to delete the BARRIER after the end of the insns we are
2117 going to move. If we don't find a BARRIER, then do nothing. This
2118 can happen in some cases if we have labels we can not delete.
2120 Similarly, do nothing if we can not delete the label at the start
2121 of the target block. */
2122 barrier = next_nonnote_insn (end);
2123 if (GET_CODE (barrier) != BARRIER
2124 || (GET_CODE (b->head) == CODE_LABEL
2125 && ! can_delete_label_p (b->head)))
2126 return 0;
2127 else
2128 flow_delete_insn (barrier);
2130 /* Move block and loop notes out of the chain so that we do not
2131 disturb their order.
2133 ??? A better solution would be to squeeze out all the non-nested notes
2134 and adjust the block trees appropriately. Even better would be to have
2135 a tighter connection between block trees and rtl so that this is not
2136 necessary. */
2137 start = squeeze_notes (start, end);
2139 /* Scramble the insn chain. */
2140 reorder_insns (start, end, a->end);
2142 /* Now blocks A and B are contiguous. Merge them. */
2143 merge_blocks_nomove (a, b);
2145 if (rtl_dump_file)
2147 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2148 b->index, a->index);
2151 return 1;
2154 /* Blocks A and B are to be merged into a single block. The insns
2155 are already contiguous, hence `nomove'. */
2157 static void
2158 merge_blocks_nomove (a, b)
2159 basic_block a, b;
2161 edge e;
2162 rtx b_head, b_end, a_end;
2163 int b_empty = 0;
2165 /* If there was a CODE_LABEL beginning B, delete it. */
2166 b_head = b->head;
2167 b_end = b->end;
2168 if (GET_CODE (b_head) == CODE_LABEL)
2170 /* Detect basic blocks with nothing but a label. This can happen
2171 in particular at the end of a function. */
2172 if (b_head == b_end)
2173 b_empty = 1;
2174 b_head = flow_delete_insn (b_head);
2177 /* Delete the basic block note. */
2178 if (GET_CODE (b_head) == NOTE
2179 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2181 if (b_head == b_end)
2182 b_empty = 1;
2183 b_head = flow_delete_insn (b_head);
2186 /* If there was a jump out of A, delete it. */
2187 a_end = a->end;
2188 if (GET_CODE (a_end) == JUMP_INSN)
2190 rtx prev;
2192 prev = prev_nonnote_insn (a_end);
2193 if (!prev)
2194 prev = a->head;
2196 #ifdef HAVE_cc0
2197 /* If this was a conditional jump, we need to also delete
2198 the insn that set cc0. */
2200 if (prev && sets_cc0_p (prev))
2202 rtx tmp = prev;
2203 prev = prev_nonnote_insn (prev);
2204 if (!prev)
2205 prev = a->head;
2206 flow_delete_insn (tmp);
2208 #endif
2210 /* Note that a->head != a->end, since we should have at least a
2211 bb note plus the jump, so prev != insn. */
2212 flow_delete_insn (a_end);
2213 a_end = prev;
2216 /* By definition, there should only be one successor of A, and that is
2217 B. Free that edge struct. */
2218 n_edges--;
2219 free (a->succ);
2221 /* Adjust the edges out of B for the new owner. */
2222 for (e = b->succ; e ; e = e->succ_next)
2223 e->src = a;
2224 a->succ = b->succ;
2226 /* Reassociate the insns of B with A. */
2227 if (!b_empty)
2229 BLOCK_FOR_INSN (b_head) = a;
2230 while (b_head != b_end)
2232 b_head = NEXT_INSN (b_head);
2233 BLOCK_FOR_INSN (b_head) = a;
2235 a_end = b_head;
2237 a->end = a_end;
2239 /* Compact the basic block array. */
2240 expunge_block (b);
2243 /* Attempt to merge basic blocks that are potentially non-adjacent.
2244 Return true iff the attempt succeeded. */
2246 static int
2247 merge_blocks (e, b, c)
2248 edge e;
2249 basic_block b, c;
2251 /* If B has a fallthru edge to C, no need to move anything. */
2252 if (e->flags & EDGE_FALLTHRU)
2254 /* If a label still appears somewhere and we cannot delete the label,
2255 then we cannot merge the blocks. The edge was tidied already. */
2257 rtx insn, stop = NEXT_INSN (c->head);
2258 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2259 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2260 return 0;
2262 merge_blocks_nomove (b, c);
2264 if (rtl_dump_file)
2266 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2267 b->index, c->index);
2270 return 1;
2272 else
2274 edge tmp_edge;
2275 basic_block d;
2276 int c_has_outgoing_fallthru;
2277 int b_has_incoming_fallthru;
2279 /* We must make sure to not munge nesting of exception regions,
2280 lexical blocks, and loop notes.
2282 The first is taken care of by requiring that the active eh
2283 region at the end of one block always matches the active eh
2284 region at the beginning of the next block.
2286 The later two are taken care of by squeezing out all the notes. */
2288 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2289 executed and we may want to treat blocks which have two out
2290 edges, one normal, one abnormal as only having one edge for
2291 block merging purposes. */
2293 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2294 if (tmp_edge->flags & EDGE_FALLTHRU)
2295 break;
2296 c_has_outgoing_fallthru = (tmp_edge != NULL);
2298 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2299 if (tmp_edge->flags & EDGE_FALLTHRU)
2300 break;
2301 b_has_incoming_fallthru = (tmp_edge != NULL);
2303 /* If B does not have an incoming fallthru, and the exception regions
2304 match, then it can be moved immediately before C without introducing
2305 or modifying jumps.
2307 C can not be the first block, so we do not have to worry about
2308 accessing a non-existent block. */
2309 d = BASIC_BLOCK (c->index - 1);
2310 if (! b_has_incoming_fallthru
2311 && d->eh_end == b->eh_beg
2312 && b->eh_end == c->eh_beg)
2313 return merge_blocks_move_predecessor_nojumps (b, c);
2315 /* Otherwise, we're going to try to move C after B. Make sure the
2316 exception regions match.
2318 If B is the last basic block, then we must not try to access the
2319 block structure for block B + 1. Luckily in that case we do not
2320 need to worry about matching exception regions. */
2321 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2322 if (b->eh_end == c->eh_beg
2323 && (d == NULL || c->eh_end == d->eh_beg))
2325 /* If C does not have an outgoing fallthru, then it can be moved
2326 immediately after B without introducing or modifying jumps. */
2327 if (! c_has_outgoing_fallthru)
2328 return merge_blocks_move_successor_nojumps (b, c);
2330 /* Otherwise, we'll need to insert an extra jump, and possibly
2331 a new block to contain it. */
2332 /* ??? Not implemented yet. */
2335 return 0;
2339 /* Top level driver for merge_blocks. */
2341 static void
2342 try_merge_blocks ()
2344 int i;
2346 /* Attempt to merge blocks as made possible by edge removal. If a block
2347 has only one successor, and the successor has only one predecessor,
2348 they may be combined. */
2350 for (i = 0; i < n_basic_blocks; )
2352 basic_block c, b = BASIC_BLOCK (i);
2353 edge s;
2355 /* A loop because chains of blocks might be combineable. */
2356 while ((s = b->succ) != NULL
2357 && s->succ_next == NULL
2358 && (s->flags & EDGE_EH) == 0
2359 && (c = s->dest) != EXIT_BLOCK_PTR
2360 && c->pred->pred_next == NULL
2361 /* If the jump insn has side effects, we can't kill the edge. */
2362 && (GET_CODE (b->end) != JUMP_INSN
2363 || onlyjump_p (b->end))
2364 && merge_blocks (s, b, c))
2365 continue;
2367 /* Don't get confused by the index shift caused by deleting blocks. */
2368 i = b->index + 1;
2372 /* The given edge should potentially be a fallthru edge. If that is in
2373 fact true, delete the jump and barriers that are in the way. */
2375 static void
2376 tidy_fallthru_edge (e, b, c)
2377 edge e;
2378 basic_block b, c;
2380 rtx q;
2382 /* ??? In a late-running flow pass, other folks may have deleted basic
2383 blocks by nopping out blocks, leaving multiple BARRIERs between here
2384 and the target label. They ought to be chastized and fixed.
2386 We can also wind up with a sequence of undeletable labels between
2387 one block and the next.
2389 So search through a sequence of barriers, labels, and notes for
2390 the head of block C and assert that we really do fall through. */
2392 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2393 return;
2395 /* Remove what will soon cease being the jump insn from the source block.
2396 If block B consisted only of this single jump, turn it into a deleted
2397 note. */
2398 q = b->end;
2399 if (GET_CODE (q) == JUMP_INSN)
2401 #ifdef HAVE_cc0
2402 /* If this was a conditional jump, we need to also delete
2403 the insn that set cc0. */
2404 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2405 q = PREV_INSN (q);
2406 #endif
2408 if (b->head == q)
2410 PUT_CODE (q, NOTE);
2411 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2412 NOTE_SOURCE_FILE (q) = 0;
2414 else
2415 b->end = q = PREV_INSN (q);
2418 /* Selectively unlink the sequence. */
2419 if (q != PREV_INSN (c->head))
2420 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2422 e->flags |= EDGE_FALLTHRU;
2425 /* Fix up edges that now fall through, or rather should now fall through
2426 but previously required a jump around now deleted blocks. Simplify
2427 the search by only examining blocks numerically adjacent, since this
2428 is how find_basic_blocks created them. */
2430 static void
2431 tidy_fallthru_edges ()
2433 int i;
2435 for (i = 1; i < n_basic_blocks; ++i)
2437 basic_block b = BASIC_BLOCK (i - 1);
2438 basic_block c = BASIC_BLOCK (i);
2439 edge s;
2441 /* We care about simple conditional or unconditional jumps with
2442 a single successor.
2444 If we had a conditional branch to the next instruction when
2445 find_basic_blocks was called, then there will only be one
2446 out edge for the block which ended with the conditional
2447 branch (since we do not create duplicate edges).
2449 Furthermore, the edge will be marked as a fallthru because we
2450 merge the flags for the duplicate edges. So we do not want to
2451 check that the edge is not a FALLTHRU edge. */
2452 if ((s = b->succ) != NULL
2453 && s->succ_next == NULL
2454 && s->dest == c
2455 /* If the jump insn has side effects, we can't tidy the edge. */
2456 && (GET_CODE (b->end) != JUMP_INSN
2457 || onlyjump_p (b->end)))
2458 tidy_fallthru_edge (s, b, c);
2462 /* Discover and record the loop depth at the head of each basic block. */
2464 void
2465 calculate_loop_depth (dump)
2466 FILE *dump;
2468 struct loops loops;
2470 /* The loop infrastructure does the real job for us. */
2471 flow_loops_find (&loops);
2473 if (dump)
2474 flow_loops_dump (&loops, dump, 0);
2476 flow_loops_free (&loops);
2479 /* Perform data flow analysis.
2480 F is the first insn of the function and NREGS the number of register numbers
2481 in use. */
2483 void
2484 life_analysis (f, nregs, file, remove_dead_code)
2485 rtx f;
2486 int nregs;
2487 FILE *file;
2488 int remove_dead_code;
2490 #ifdef ELIMINABLE_REGS
2491 register int i;
2492 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2493 #endif
2494 int flags;
2495 sbitmap all_blocks;
2497 /* Record which registers will be eliminated. We use this in
2498 mark_used_regs. */
2500 CLEAR_HARD_REG_SET (elim_reg_set);
2502 #ifdef ELIMINABLE_REGS
2503 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2504 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2505 #else
2506 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2507 #endif
2509 /* We want alias analysis information for local dead store elimination. */
2510 init_alias_analysis ();
2512 if (! optimize)
2513 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2514 else
2516 flags = PROP_FINAL;
2517 if (! remove_dead_code)
2518 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2521 /* The post-reload life analysis have (on a global basis) the same
2522 registers live as was computed by reload itself. elimination
2523 Otherwise offsets and such may be incorrect.
2525 Reload will make some registers as live even though they do not
2526 appear in the rtl. */
2527 if (reload_completed)
2528 flags &= ~PROP_REG_INFO;
2530 max_regno = nregs;
2532 /* Always remove no-op moves. Do this before other processing so
2533 that we don't have to keep re-scanning them. */
2534 delete_noop_moves (f);
2536 /* Some targets can emit simpler epilogues if they know that sp was
2537 not ever modified during the function. After reload, of course,
2538 we've already emitted the epilogue so there's no sense searching. */
2539 if (! reload_completed)
2540 notice_stack_pointer_modification (f);
2542 /* Allocate and zero out data structures that will record the
2543 data from lifetime analysis. */
2544 allocate_reg_life_data ();
2545 allocate_bb_life_data ();
2546 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2547 all_blocks = sbitmap_alloc (n_basic_blocks);
2548 sbitmap_ones (all_blocks);
2550 /* Find the set of registers live on function exit. */
2551 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2553 /* "Update" life info from zero. It'd be nice to begin the
2554 relaxation with just the exit and noreturn blocks, but that set
2555 is not immediately handy. */
2557 if (flags & PROP_REG_INFO)
2558 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2559 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2561 /* Clean up. */
2562 sbitmap_free (all_blocks);
2563 free (reg_next_use);
2564 reg_next_use = NULL;
2565 end_alias_analysis ();
2567 if (file)
2568 dump_flow_info (file);
2570 free_basic_block_vars (1);
2573 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2574 Search for REGNO. If found, abort if it is not wider than word_mode. */
2576 static int
2577 verify_wide_reg_1 (px, pregno)
2578 rtx *px;
2579 void *pregno;
2581 rtx x = *px;
2582 int regno = *(int *) pregno;
2584 if (GET_CODE (x) == REG && REGNO (x) == regno)
2586 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2587 abort ();
2588 return 1;
2590 return 0;
2593 /* A subroutine of verify_local_live_at_start. Search through insns
2594 between HEAD and END looking for register REGNO. */
2596 static void
2597 verify_wide_reg (regno, head, end)
2598 int regno;
2599 rtx head, end;
2601 while (1)
2603 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2604 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2605 return;
2606 if (head == end)
2607 break;
2608 head = NEXT_INSN (head);
2611 /* We didn't find the register at all. Something's way screwy. */
2612 abort ();
2615 /* A subroutine of update_life_info. Verify that there are no untoward
2616 changes in live_at_start during a local update. */
2618 static void
2619 verify_local_live_at_start (new_live_at_start, bb)
2620 regset new_live_at_start;
2621 basic_block bb;
2623 if (reload_completed)
2625 /* After reload, there are no pseudos, nor subregs of multi-word
2626 registers. The regsets should exactly match. */
2627 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2628 abort ();
2630 else
2632 int i;
2634 /* Find the set of changed registers. */
2635 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2637 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2639 /* No registers should die. */
2640 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2641 abort ();
2642 /* Verify that the now-live register is wider than word_mode. */
2643 verify_wide_reg (i, bb->head, bb->end);
2648 /* Updates life information starting with the basic blocks set in BLOCKS.
2650 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2651 expecting local modifications to basic blocks. If we find extra
2652 registers live at the beginning of a block, then we either killed
2653 useful data, or we have a broken split that wants data not provided.
2654 If we find registers removed from live_at_start, that means we have
2655 a broken peephole that is killing a register it shouldn't.
2657 ??? This is not true in one situation -- when a pre-reload splitter
2658 generates subregs of a multi-word pseudo, current life analysis will
2659 lose the kill. So we _can_ have a pseudo go live. How irritating.
2661 BLOCK_FOR_INSN is assumed to be correct.
2663 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2664 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2665 regs_ever_live unless the caller resets it to zero. */
2667 void
2668 update_life_info (blocks, extent, prop_flags)
2669 sbitmap blocks;
2670 enum update_life_extent extent;
2671 int prop_flags;
2673 regset tmp;
2674 regset_head tmp_head;
2675 int i;
2677 tmp = INITIALIZE_REG_SET (tmp_head);
2679 /* For a global update, we go through the relaxation process again. */
2680 if (extent != UPDATE_LIFE_LOCAL)
2682 calculate_global_regs_live (blocks, blocks,
2683 prop_flags & PROP_SCAN_DEAD_CODE);
2685 /* If asked, remove notes from the blocks we'll update. */
2686 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2687 count_or_remove_death_notes (blocks, 1);
2690 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2692 basic_block bb = BASIC_BLOCK (i);
2694 COPY_REG_SET (tmp, bb->global_live_at_end);
2695 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2697 if (extent == UPDATE_LIFE_LOCAL)
2698 verify_local_live_at_start (tmp, bb);
2701 FREE_REG_SET (tmp);
2703 if (prop_flags & PROP_REG_INFO)
2705 /* The only pseudos that are live at the beginning of the function
2706 are those that were not set anywhere in the function. local-alloc
2707 doesn't know how to handle these correctly, so mark them as not
2708 local to any one basic block. */
2709 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2710 FIRST_PSEUDO_REGISTER, i,
2711 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2713 /* We have a problem with any pseudoreg that lives across the setjmp.
2714 ANSI says that if a user variable does not change in value between
2715 the setjmp and the longjmp, then the longjmp preserves it. This
2716 includes longjmp from a place where the pseudo appears dead.
2717 (In principle, the value still exists if it is in scope.)
2718 If the pseudo goes in a hard reg, some other value may occupy
2719 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2720 Conclusion: such a pseudo must not go in a hard reg. */
2721 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2722 FIRST_PSEUDO_REGISTER, i,
2724 if (regno_reg_rtx[i] != 0)
2726 REG_LIVE_LENGTH (i) = -1;
2727 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2733 /* Free the variables allocated by find_basic_blocks.
2735 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2737 void
2738 free_basic_block_vars (keep_head_end_p)
2739 int keep_head_end_p;
2741 if (basic_block_for_insn)
2743 VARRAY_FREE (basic_block_for_insn);
2744 basic_block_for_insn = NULL;
2747 if (! keep_head_end_p)
2749 clear_edges ();
2750 VARRAY_FREE (basic_block_info);
2751 n_basic_blocks = 0;
2753 ENTRY_BLOCK_PTR->aux = NULL;
2754 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2755 EXIT_BLOCK_PTR->aux = NULL;
2756 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2760 /* Return nonzero if the destination of SET equals the source. */
2761 static int
2762 set_noop_p (set)
2763 rtx set;
2765 rtx src = SET_SRC (set);
2766 rtx dst = SET_DEST (set);
2768 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2770 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2771 return 0;
2772 src = SUBREG_REG (src);
2773 dst = SUBREG_REG (dst);
2776 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2777 && REGNO (src) == REGNO (dst));
2780 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2781 value to itself. */
2782 static int
2783 noop_move_p (insn)
2784 rtx insn;
2786 rtx pat = PATTERN (insn);
2788 /* Insns carrying these notes are useful later on. */
2789 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2790 return 0;
2792 if (GET_CODE (pat) == SET && set_noop_p (pat))
2793 return 1;
2795 if (GET_CODE (pat) == PARALLEL)
2797 int i;
2798 /* If nothing but SETs of registers to themselves,
2799 this insn can also be deleted. */
2800 for (i = 0; i < XVECLEN (pat, 0); i++)
2802 rtx tem = XVECEXP (pat, 0, i);
2804 if (GET_CODE (tem) == USE
2805 || GET_CODE (tem) == CLOBBER)
2806 continue;
2808 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2809 return 0;
2812 return 1;
2814 return 0;
2817 /* Delete any insns that copy a register to itself. */
2819 static void
2820 delete_noop_moves (f)
2821 rtx f;
2823 rtx insn;
2824 for (insn = f; insn; insn = NEXT_INSN (insn))
2826 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2828 PUT_CODE (insn, NOTE);
2829 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2830 NOTE_SOURCE_FILE (insn) = 0;
2835 /* Determine if the stack pointer is constant over the life of the function.
2836 Only useful before prologues have been emitted. */
2838 static void
2839 notice_stack_pointer_modification_1 (x, pat, data)
2840 rtx x;
2841 rtx pat ATTRIBUTE_UNUSED;
2842 void *data ATTRIBUTE_UNUSED;
2844 if (x == stack_pointer_rtx
2845 /* The stack pointer is only modified indirectly as the result
2846 of a push until later in flow. See the comments in rtl.texi
2847 regarding Embedded Side-Effects on Addresses. */
2848 || (GET_CODE (x) == MEM
2849 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2850 || GET_CODE (XEXP (x, 0)) == PRE_INC
2851 || GET_CODE (XEXP (x, 0)) == POST_DEC
2852 || GET_CODE (XEXP (x, 0)) == POST_INC)
2853 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2854 current_function_sp_is_unchanging = 0;
2857 static void
2858 notice_stack_pointer_modification (f)
2859 rtx f;
2861 rtx insn;
2863 /* Assume that the stack pointer is unchanging if alloca hasn't
2864 been used. */
2865 current_function_sp_is_unchanging = !current_function_calls_alloca;
2866 if (! current_function_sp_is_unchanging)
2867 return;
2869 for (insn = f; insn; insn = NEXT_INSN (insn))
2871 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2873 /* Check if insn modifies the stack pointer. */
2874 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2875 NULL);
2876 if (! current_function_sp_is_unchanging)
2877 return;
2882 /* Mark a register in SET. Hard registers in large modes get all
2883 of their component registers set as well. */
2884 static void
2885 mark_reg (reg, xset)
2886 rtx reg;
2887 void *xset;
2889 regset set = (regset) xset;
2890 int regno = REGNO (reg);
2892 if (GET_MODE (reg) == BLKmode)
2893 abort ();
2895 SET_REGNO_REG_SET (set, regno);
2896 if (regno < FIRST_PSEUDO_REGISTER)
2898 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2899 while (--n > 0)
2900 SET_REGNO_REG_SET (set, regno + n);
2904 /* Mark those regs which are needed at the end of the function as live
2905 at the end of the last basic block. */
2906 static void
2907 mark_regs_live_at_end (set)
2908 regset set;
2910 int i;
2912 /* If exiting needs the right stack value, consider the stack pointer
2913 live at the end of the function. */
2914 if ((HAVE_epilogue && reload_completed)
2915 || ! EXIT_IGNORE_STACK
2916 || (! FRAME_POINTER_REQUIRED
2917 && ! current_function_calls_alloca
2918 && flag_omit_frame_pointer)
2919 || current_function_sp_is_unchanging)
2921 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2924 /* Mark the frame pointer if needed at the end of the function. If
2925 we end up eliminating it, it will be removed from the live list
2926 of each basic block by reload. */
2928 if (! reload_completed || frame_pointer_needed)
2930 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2931 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2932 /* If they are different, also mark the hard frame pointer as live */
2933 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2934 #endif
2937 #ifdef PIC_OFFSET_TABLE_REGNUM
2938 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2939 /* Many architectures have a GP register even without flag_pic.
2940 Assume the pic register is not in use, or will be handled by
2941 other means, if it is not fixed. */
2942 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2943 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2944 #endif
2945 #endif
2947 /* Mark all global registers, and all registers used by the epilogue
2948 as being live at the end of the function since they may be
2949 referenced by our caller. */
2950 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2951 if (global_regs[i]
2952 #ifdef EPILOGUE_USES
2953 || EPILOGUE_USES (i)
2954 #endif
2956 SET_REGNO_REG_SET (set, i);
2958 /* Mark all call-saved registers that we actaully used. */
2959 if (HAVE_epilogue && reload_completed)
2961 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2962 if (! call_used_regs[i] && regs_ever_live[i])
2963 SET_REGNO_REG_SET (set, i);
2966 /* Mark function return value. */
2967 diddle_return_value (mark_reg, set);
2970 /* Propagate global life info around the graph of basic blocks. Begin
2971 considering blocks with their corresponding bit set in BLOCKS_IN.
2972 BLOCKS_OUT is set for every block that was changed. */
2974 static void
2975 calculate_global_regs_live (blocks_in, blocks_out, flags)
2976 sbitmap blocks_in, blocks_out;
2977 int flags;
2979 basic_block *queue, *qhead, *qtail, *qend;
2980 regset tmp, new_live_at_end;
2981 regset_head tmp_head;
2982 regset_head new_live_at_end_head;
2983 int i;
2985 tmp = INITIALIZE_REG_SET (tmp_head);
2986 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
2988 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2989 because the `head == tail' style test for an empty queue doesn't
2990 work with a full queue. */
2991 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2992 qtail = queue;
2993 qhead = qend = queue + n_basic_blocks + 2;
2995 /* Clear out the garbage that might be hanging out in bb->aux. */
2996 for (i = n_basic_blocks - 1; i >= 0; --i)
2997 BASIC_BLOCK (i)->aux = NULL;
2999 /* Queue the blocks set in the initial mask. Do this in reverse block
3000 number order so that we are more likely for the first round to do
3001 useful work. We use AUX non-null to flag that the block is queued. */
3002 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3004 basic_block bb = BASIC_BLOCK (i);
3005 *--qhead = bb;
3006 bb->aux = bb;
3009 sbitmap_zero (blocks_out);
3011 while (qhead != qtail)
3013 int rescan, changed;
3014 basic_block bb;
3015 edge e;
3017 bb = *qhead++;
3018 if (qhead == qend)
3019 qhead = queue;
3020 bb->aux = NULL;
3022 /* Begin by propogating live_at_start from the successor blocks. */
3023 CLEAR_REG_SET (new_live_at_end);
3024 for (e = bb->succ; e ; e = e->succ_next)
3026 basic_block sb = e->dest;
3027 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3030 if (bb == ENTRY_BLOCK_PTR)
3032 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3033 continue;
3036 /* On our first pass through this block, we'll go ahead and continue.
3037 Recognize first pass by local_set NULL. On subsequent passes, we
3038 get to skip out early if live_at_end wouldn't have changed. */
3040 if (bb->local_set == NULL)
3042 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3043 rescan = 1;
3045 else
3047 /* If any bits were removed from live_at_end, we'll have to
3048 rescan the block. This wouldn't be necessary if we had
3049 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3050 local_live is really dependant on live_at_end. */
3051 CLEAR_REG_SET (tmp);
3052 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3053 new_live_at_end, BITMAP_AND_COMPL);
3055 if (! rescan)
3057 /* Find the set of changed bits. Take this opportunity
3058 to notice that this set is empty and early out. */
3059 CLEAR_REG_SET (tmp);
3060 changed = bitmap_operation (tmp, bb->global_live_at_end,
3061 new_live_at_end, BITMAP_XOR);
3062 if (! changed)
3063 continue;
3065 /* If any of the changed bits overlap with local_set,
3066 we'll have to rescan the block. Detect overlap by
3067 the AND with ~local_set turning off bits. */
3068 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3069 BITMAP_AND_COMPL);
3073 /* Let our caller know that BB changed enough to require its
3074 death notes updated. */
3075 SET_BIT (blocks_out, bb->index);
3077 if (! rescan)
3079 /* Add to live_at_start the set of all registers in
3080 new_live_at_end that aren't in the old live_at_end. */
3082 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3083 BITMAP_AND_COMPL);
3084 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3086 changed = bitmap_operation (bb->global_live_at_start,
3087 bb->global_live_at_start,
3088 tmp, BITMAP_IOR);
3089 if (! changed)
3090 continue;
3092 else
3094 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3096 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3097 into live_at_start. */
3098 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3100 /* If live_at start didn't change, no need to go farther. */
3101 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3102 continue;
3104 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3107 /* Queue all predecessors of BB so that we may re-examine
3108 their live_at_end. */
3109 for (e = bb->pred; e ; e = e->pred_next)
3111 basic_block pb = e->src;
3112 if (pb->aux == NULL)
3114 *qtail++ = pb;
3115 if (qtail == qend)
3116 qtail = queue;
3117 pb->aux = pb;
3122 FREE_REG_SET (tmp);
3123 FREE_REG_SET (new_live_at_end);
3125 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3127 basic_block bb = BASIC_BLOCK (i);
3128 FREE_REG_SET (bb->local_set);
3131 free (queue);
3134 /* Subroutines of life analysis. */
3136 /* Allocate the permanent data structures that represent the results
3137 of life analysis. Not static since used also for stupid life analysis. */
3139 void
3140 allocate_bb_life_data ()
3142 register int i;
3144 for (i = 0; i < n_basic_blocks; i++)
3146 basic_block bb = BASIC_BLOCK (i);
3148 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3149 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3152 ENTRY_BLOCK_PTR->global_live_at_end
3153 = OBSTACK_ALLOC_REG_SET (function_obstack);
3154 EXIT_BLOCK_PTR->global_live_at_start
3155 = OBSTACK_ALLOC_REG_SET (function_obstack);
3157 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3160 void
3161 allocate_reg_life_data ()
3163 int i;
3165 /* Recalculate the register space, in case it has grown. Old style
3166 vector oriented regsets would set regset_{size,bytes} here also. */
3167 allocate_reg_info (max_regno, FALSE, FALSE);
3169 /* Reset all the data we'll collect in propagate_block and its
3170 subroutines. */
3171 for (i = 0; i < max_regno; i++)
3173 REG_N_SETS (i) = 0;
3174 REG_N_REFS (i) = 0;
3175 REG_N_DEATHS (i) = 0;
3176 REG_N_CALLS_CROSSED (i) = 0;
3177 REG_LIVE_LENGTH (i) = 0;
3178 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3182 /* Compute the registers live at the beginning of a basic block
3183 from those live at the end.
3185 When called, OLD contains those live at the end.
3186 On return, it contains those live at the beginning.
3187 FIRST and LAST are the first and last insns of the basic block.
3189 FINAL is nonzero if we are doing the final pass which is not
3190 for computing the life info (since that has already been done)
3191 but for acting on it. On this pass, we delete dead stores,
3192 set up the logical links and dead-variables lists of instructions,
3193 and merge instructions for autoincrement and autodecrement addresses.
3195 SIGNIFICANT is nonzero only the first time for each basic block.
3196 If it is nonzero, it points to a regset in which we store
3197 a 1 for each register that is set within the block.
3199 BNUM is the number of the basic block. */
3201 static void
3202 propagate_block (bb, old, significant, flags)
3203 basic_block bb;
3204 regset old;
3205 regset significant;
3206 int flags;
3208 register rtx insn;
3209 rtx prev;
3210 regset live;
3211 regset_head live_head;
3212 regset dead;
3213 regset_head dead_head;
3215 /* Find the loop depth for this block. Ignore loop level changes in the
3216 middle of the basic block -- for register allocation purposes, the
3217 important uses will be in the blocks wholely contained within the loop
3218 not in the loop pre-header or post-trailer. */
3219 loop_depth = bb->loop_depth;
3221 dead = INITIALIZE_REG_SET (live_head);
3222 live = INITIALIZE_REG_SET (dead_head);
3224 cc0_live = 0;
3226 if (flags & PROP_REG_INFO)
3228 register int i;
3230 /* Process the regs live at the end of the block.
3231 Mark them as not local to any one basic block. */
3232 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3234 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3238 /* Scan the block an insn at a time from end to beginning. */
3240 for (insn = bb->end; ; insn = prev)
3242 prev = PREV_INSN (insn);
3244 if (GET_CODE (insn) == NOTE)
3246 /* If this is a call to `setjmp' et al,
3247 warn if any non-volatile datum is live. */
3249 if ((flags & PROP_REG_INFO)
3250 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3251 IOR_REG_SET (regs_live_at_setjmp, old);
3254 /* Update the life-status of regs for this insn.
3255 First DEAD gets which regs are set in this insn
3256 then LIVE gets which regs are used in this insn.
3257 Then the regs live before the insn
3258 are those live after, with DEAD regs turned off,
3259 and then LIVE regs turned on. */
3261 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3263 register int i;
3264 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3265 int insn_is_dead = 0;
3266 int libcall_is_dead = 0;
3268 if (flags & PROP_SCAN_DEAD_CODE)
3270 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3271 REG_NOTES (insn));
3272 libcall_is_dead = (insn_is_dead && note != 0
3273 && libcall_dead_p (PATTERN (insn), old,
3274 note, insn));
3277 /* We almost certainly don't want to delete prologue or epilogue
3278 instructions. Warn about probable compiler losage. */
3279 if (insn_is_dead
3280 && reload_completed
3281 && (((HAVE_epilogue || HAVE_prologue)
3282 && prologue_epilogue_contains (insn))
3283 || (HAVE_sibcall_epilogue
3284 && sibcall_epilogue_contains (insn))))
3286 if (flags & PROP_KILL_DEAD_CODE)
3288 warning ("ICE: would have deleted prologue/epilogue insn");
3289 if (!inhibit_warnings)
3290 debug_rtx (insn);
3292 libcall_is_dead = insn_is_dead = 0;
3295 /* If an instruction consists of just dead store(s) on final pass,
3296 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3297 We could really delete it with delete_insn, but that
3298 can cause trouble for first or last insn in a basic block. */
3299 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3301 rtx inote;
3302 /* If the insn referred to a label, note that the label is
3303 now less used. */
3304 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3306 if (REG_NOTE_KIND (inote) == REG_LABEL)
3308 rtx label = XEXP (inote, 0);
3309 rtx next;
3310 LABEL_NUSES (label)--;
3312 /* If this label was attached to an ADDR_VEC, it's
3313 safe to delete the ADDR_VEC. In fact, it's pretty
3314 much mandatory to delete it, because the ADDR_VEC may
3315 be referencing labels that no longer exist. */
3316 if (LABEL_NUSES (label) == 0
3317 && (next = next_nonnote_insn (label)) != NULL
3318 && GET_CODE (next) == JUMP_INSN
3319 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3320 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3322 rtx pat = PATTERN (next);
3323 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3324 int len = XVECLEN (pat, diff_vec_p);
3325 int i;
3326 for (i = 0; i < len; i++)
3327 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3328 PUT_CODE (next, NOTE);
3329 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3330 NOTE_SOURCE_FILE (next) = 0;
3335 PUT_CODE (insn, NOTE);
3336 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3337 NOTE_SOURCE_FILE (insn) = 0;
3339 /* CC0 is now known to be dead. Either this insn used it,
3340 in which case it doesn't anymore, or clobbered it,
3341 so the next insn can't use it. */
3342 cc0_live = 0;
3344 /* If this insn is copying the return value from a library call,
3345 delete the entire library call. */
3346 if (libcall_is_dead)
3348 rtx first = XEXP (note, 0);
3349 rtx p = insn;
3350 while (INSN_DELETED_P (first))
3351 first = NEXT_INSN (first);
3352 while (p != first)
3354 p = PREV_INSN (p);
3355 PUT_CODE (p, NOTE);
3356 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3357 NOTE_SOURCE_FILE (p) = 0;
3360 goto flushed;
3363 CLEAR_REG_SET (dead);
3364 CLEAR_REG_SET (live);
3366 /* See if this is an increment or decrement that can be
3367 merged into a following memory address. */
3368 #ifdef AUTO_INC_DEC
3370 register rtx x = single_set (insn);
3372 /* Does this instruction increment or decrement a register? */
3373 if (!reload_completed
3374 && (flags & PROP_AUTOINC)
3375 && x != 0
3376 && GET_CODE (SET_DEST (x)) == REG
3377 && (GET_CODE (SET_SRC (x)) == PLUS
3378 || GET_CODE (SET_SRC (x)) == MINUS)
3379 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3380 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3381 /* Ok, look for a following memory ref we can combine with.
3382 If one is found, change the memory ref to a PRE_INC
3383 or PRE_DEC, cancel this insn, and return 1.
3384 Return 0 if nothing has been done. */
3385 && try_pre_increment_1 (insn))
3386 goto flushed;
3388 #endif /* AUTO_INC_DEC */
3390 /* If this is not the final pass, and this insn is copying the
3391 value of a library call and it's dead, don't scan the
3392 insns that perform the library call, so that the call's
3393 arguments are not marked live. */
3394 if (libcall_is_dead)
3396 /* Mark the dest reg as `significant'. */
3397 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3398 significant, flags);
3400 insn = XEXP (note, 0);
3401 prev = PREV_INSN (insn);
3403 else if (GET_CODE (PATTERN (insn)) == SET
3404 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3405 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3406 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3407 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3408 /* We have an insn to pop a constant amount off the stack.
3409 (Such insns use PLUS regardless of the direction of the stack,
3410 and any insn to adjust the stack by a constant is always a pop.)
3411 These insns, if not dead stores, have no effect on life. */
3413 else
3415 /* Any regs live at the time of a call instruction
3416 must not go in a register clobbered by calls.
3417 Find all regs now live and record this for them. */
3419 if (GET_CODE (insn) == CALL_INSN
3420 && (flags & PROP_REG_INFO))
3421 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3423 REG_N_CALLS_CROSSED (i)++;
3426 /* LIVE gets the regs used in INSN;
3427 DEAD gets those set by it. Dead insns don't make anything
3428 live. */
3430 mark_set_regs (old, dead, PATTERN (insn),
3431 insn, significant, flags);
3433 /* If an insn doesn't use CC0, it becomes dead since we
3434 assume that every insn clobbers it. So show it dead here;
3435 mark_used_regs will set it live if it is referenced. */
3436 cc0_live = 0;
3438 if (! insn_is_dead)
3439 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3441 /* Sometimes we may have inserted something before INSN (such as
3442 a move) when we make an auto-inc. So ensure we will scan
3443 those insns. */
3444 #ifdef AUTO_INC_DEC
3445 prev = PREV_INSN (insn);
3446 #endif
3448 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3450 register int i;
3452 rtx note;
3454 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3455 note;
3456 note = XEXP (note, 1))
3457 if (GET_CODE (XEXP (note, 0)) == USE)
3458 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3459 flags, insn);
3461 /* Each call clobbers all call-clobbered regs that are not
3462 global or fixed. Note that the function-value reg is a
3463 call-clobbered reg, and mark_set_regs has already had
3464 a chance to handle it. */
3466 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3467 if (call_used_regs[i] && ! global_regs[i]
3468 && ! fixed_regs[i])
3470 SET_REGNO_REG_SET (dead, i);
3471 if (significant)
3472 SET_REGNO_REG_SET (significant, i);
3475 /* The stack ptr is used (honorarily) by a CALL insn. */
3476 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3478 /* Calls may also reference any of the global registers,
3479 so they are made live. */
3480 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3481 if (global_regs[i])
3482 mark_used_regs (old, live,
3483 gen_rtx_REG (reg_raw_mode[i], i),
3484 flags, insn);
3486 /* Calls also clobber memory. */
3487 free_EXPR_LIST_list (&mem_set_list);
3490 /* Update OLD for the registers used or set. */
3491 AND_COMPL_REG_SET (old, dead);
3492 IOR_REG_SET (old, live);
3496 /* On final pass, update counts of how many insns in which
3497 each reg is live. */
3498 if (flags & PROP_REG_INFO)
3499 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3501 flushed:
3502 if (insn == bb->head)
3503 break;
3506 FREE_REG_SET (dead);
3507 FREE_REG_SET (live);
3508 free_EXPR_LIST_list (&mem_set_list);
3511 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3512 (SET expressions whose destinations are registers dead after the insn).
3513 NEEDED is the regset that says which regs are alive after the insn.
3515 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3517 If X is the entire body of an insn, NOTES contains the reg notes
3518 pertaining to the insn. */
3520 static int
3521 insn_dead_p (x, needed, call_ok, notes)
3522 rtx x;
3523 regset needed;
3524 int call_ok;
3525 rtx notes ATTRIBUTE_UNUSED;
3527 enum rtx_code code = GET_CODE (x);
3529 #ifdef AUTO_INC_DEC
3530 /* If flow is invoked after reload, we must take existing AUTO_INC
3531 expresions into account. */
3532 if (reload_completed)
3534 for ( ; notes; notes = XEXP (notes, 1))
3536 if (REG_NOTE_KIND (notes) == REG_INC)
3538 int regno = REGNO (XEXP (notes, 0));
3540 /* Don't delete insns to set global regs. */
3541 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3542 || REGNO_REG_SET_P (needed, regno))
3543 return 0;
3547 #endif
3549 /* If setting something that's a reg or part of one,
3550 see if that register's altered value will be live. */
3552 if (code == SET)
3554 rtx r = SET_DEST (x);
3556 #ifdef HAVE_cc0
3557 if (GET_CODE (r) == CC0)
3558 return ! cc0_live;
3559 #endif
3561 /* A SET that is a subroutine call cannot be dead. */
3562 if (GET_CODE (SET_SRC (x)) == CALL)
3564 if (! call_ok)
3565 return 0;
3568 /* Don't eliminate loads from volatile memory or volatile asms. */
3569 else if (volatile_refs_p (SET_SRC (x)))
3570 return 0;
3572 if (GET_CODE (r) == MEM)
3574 rtx temp;
3576 if (MEM_VOLATILE_P (r))
3577 return 0;
3579 /* Walk the set of memory locations we are currently tracking
3580 and see if one is an identical match to this memory location.
3581 If so, this memory write is dead (remember, we're walking
3582 backwards from the end of the block to the start. */
3583 temp = mem_set_list;
3584 while (temp)
3586 if (rtx_equal_p (XEXP (temp, 0), r))
3587 return 1;
3588 temp = XEXP (temp, 1);
3591 else
3593 while (GET_CODE (r) == SUBREG
3594 || GET_CODE (r) == STRICT_LOW_PART
3595 || GET_CODE (r) == ZERO_EXTRACT)
3596 r = XEXP (r, 0);
3598 if (GET_CODE (r) == REG)
3600 int regno = REGNO (r);
3602 /* Obvious. */
3603 if (REGNO_REG_SET_P (needed, regno))
3604 return 0;
3606 /* If this is a hard register, verify that subsequent
3607 words are not needed. */
3608 if (regno < FIRST_PSEUDO_REGISTER)
3610 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3612 while (--n > 0)
3613 if (REGNO_REG_SET_P (needed, regno+n))
3614 return 0;
3617 /* Don't delete insns to set global regs. */
3618 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3619 return 0;
3621 /* Make sure insns to set the stack pointer aren't deleted. */
3622 if (regno == STACK_POINTER_REGNUM)
3623 return 0;
3625 /* Make sure insns to set the frame pointer aren't deleted. */
3626 if (regno == FRAME_POINTER_REGNUM
3627 && (! reload_completed || frame_pointer_needed))
3628 return 0;
3629 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3630 if (regno == HARD_FRAME_POINTER_REGNUM
3631 && (! reload_completed || frame_pointer_needed))
3632 return 0;
3633 #endif
3635 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3636 /* Make sure insns to set arg pointer are never deleted
3637 (if the arg pointer isn't fixed, there will be a USE
3638 for it, so we can treat it normally). */
3639 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3640 return 0;
3641 #endif
3643 /* Otherwise, the set is dead. */
3644 return 1;
3649 /* If performing several activities, insn is dead if each activity
3650 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3651 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3652 worth keeping. */
3653 else if (code == PARALLEL)
3655 int i = XVECLEN (x, 0);
3657 for (i--; i >= 0; i--)
3658 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3659 && GET_CODE (XVECEXP (x, 0, i)) != USE
3660 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3661 return 0;
3663 return 1;
3666 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3667 is not necessarily true for hard registers. */
3668 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3669 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3670 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3671 return 1;
3673 /* We do not check other CLOBBER or USE here. An insn consisting of just
3674 a CLOBBER or just a USE should not be deleted. */
3675 return 0;
3678 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3679 return 1 if the entire library call is dead.
3680 This is true if X copies a register (hard or pseudo)
3681 and if the hard return reg of the call insn is dead.
3682 (The caller should have tested the destination of X already for death.)
3684 If this insn doesn't just copy a register, then we don't
3685 have an ordinary libcall. In that case, cse could not have
3686 managed to substitute the source for the dest later on,
3687 so we can assume the libcall is dead.
3689 NEEDED is the bit vector of pseudoregs live before this insn.
3690 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3692 static int
3693 libcall_dead_p (x, needed, note, insn)
3694 rtx x;
3695 regset needed;
3696 rtx note;
3697 rtx insn;
3699 register RTX_CODE code = GET_CODE (x);
3701 if (code == SET)
3703 register rtx r = SET_SRC (x);
3704 if (GET_CODE (r) == REG)
3706 rtx call = XEXP (note, 0);
3707 rtx call_pat;
3708 register int i;
3710 /* Find the call insn. */
3711 while (call != insn && GET_CODE (call) != CALL_INSN)
3712 call = NEXT_INSN (call);
3714 /* If there is none, do nothing special,
3715 since ordinary death handling can understand these insns. */
3716 if (call == insn)
3717 return 0;
3719 /* See if the hard reg holding the value is dead.
3720 If this is a PARALLEL, find the call within it. */
3721 call_pat = PATTERN (call);
3722 if (GET_CODE (call_pat) == PARALLEL)
3724 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3725 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3726 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3727 break;
3729 /* This may be a library call that is returning a value
3730 via invisible pointer. Do nothing special, since
3731 ordinary death handling can understand these insns. */
3732 if (i < 0)
3733 return 0;
3735 call_pat = XVECEXP (call_pat, 0, i);
3738 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3741 return 1;
3744 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3745 live at function entry. Don't count global register variables, variables
3746 in registers that can be used for function arg passing, or variables in
3747 fixed hard registers. */
3750 regno_uninitialized (regno)
3751 int regno;
3753 if (n_basic_blocks == 0
3754 || (regno < FIRST_PSEUDO_REGISTER
3755 && (global_regs[regno]
3756 || fixed_regs[regno]
3757 || FUNCTION_ARG_REGNO_P (regno))))
3758 return 0;
3760 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3763 /* 1 if register REGNO was alive at a place where `setjmp' was called
3764 and was set more than once or is an argument.
3765 Such regs may be clobbered by `longjmp'. */
3768 regno_clobbered_at_setjmp (regno)
3769 int regno;
3771 if (n_basic_blocks == 0)
3772 return 0;
3774 return ((REG_N_SETS (regno) > 1
3775 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3776 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3779 /* INSN references memory, possibly using autoincrement addressing modes.
3780 Find any entries on the mem_set_list that need to be invalidated due
3781 to an address change. */
3782 static void
3783 invalidate_mems_from_autoinc (insn)
3784 rtx insn;
3786 rtx note = REG_NOTES (insn);
3787 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3789 if (REG_NOTE_KIND (note) == REG_INC)
3791 rtx temp = mem_set_list;
3792 rtx prev = NULL_RTX;
3793 rtx next;
3795 while (temp)
3797 next = XEXP (temp, 1);
3798 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3800 /* Splice temp out of list. */
3801 if (prev)
3802 XEXP (prev, 1) = next;
3803 else
3804 mem_set_list = next;
3805 free_EXPR_LIST_node (temp);
3807 else
3808 prev = temp;
3809 temp = next;
3815 /* Process the registers that are set within X. Their bits are set to
3816 1 in the regset DEAD, because they are dead prior to this insn.
3818 If INSN is nonzero, it is the insn being processed.
3820 FLAGS is the set of operations to perform. */
3822 static void
3823 mark_set_regs (needed, dead, x, insn, significant, flags)
3824 regset needed;
3825 regset dead;
3826 rtx x;
3827 rtx insn;
3828 regset significant;
3829 int flags;
3831 register RTX_CODE code = GET_CODE (x);
3833 if (code == SET || code == CLOBBER)
3834 mark_set_1 (needed, dead, x, insn, significant, flags);
3835 else if (code == PARALLEL)
3837 register int i;
3838 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3840 code = GET_CODE (XVECEXP (x, 0, i));
3841 if (code == SET || code == CLOBBER)
3842 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3843 significant, flags);
3848 /* Process a single SET rtx, X. */
3850 static void
3851 mark_set_1 (needed, dead, x, insn, significant, flags)
3852 regset needed;
3853 regset dead;
3854 rtx x;
3855 rtx insn;
3856 regset significant;
3857 int flags;
3859 register int regno = -1;
3860 register rtx reg = SET_DEST (x);
3862 /* Some targets place small structures in registers for
3863 return values of functions. We have to detect this
3864 case specially here to get correct flow information. */
3865 if (GET_CODE (reg) == PARALLEL
3866 && GET_MODE (reg) == BLKmode)
3868 register int i;
3870 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3871 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3872 significant, flags);
3873 return;
3876 /* Modifying just one hardware register of a multi-reg value
3877 or just a byte field of a register
3878 does not mean the value from before this insn is now dead.
3879 But it does mean liveness of that register at the end of the block
3880 is significant.
3882 Within mark_set_1, however, we treat it as if the register is
3883 indeed modified. mark_used_regs will, however, also treat this
3884 register as being used. Thus, we treat these insns as setting a
3885 new value for the register as a function of its old value. This
3886 cases LOG_LINKS to be made appropriately and this will help combine. */
3888 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3889 || GET_CODE (reg) == SIGN_EXTRACT
3890 || GET_CODE (reg) == STRICT_LOW_PART)
3891 reg = XEXP (reg, 0);
3893 /* If this set is a MEM, then it kills any aliased writes.
3894 If this set is a REG, then it kills any MEMs which use the reg. */
3895 if (flags & PROP_SCAN_DEAD_CODE)
3897 if (GET_CODE (reg) == MEM
3898 || GET_CODE (reg) == REG)
3900 rtx temp = mem_set_list;
3901 rtx prev = NULL_RTX;
3902 rtx next;
3904 while (temp)
3906 next = XEXP (temp, 1);
3907 if ((GET_CODE (reg) == MEM
3908 && output_dependence (XEXP (temp, 0), reg))
3909 || (GET_CODE (reg) == REG
3910 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3912 /* Splice this entry out of the list. */
3913 if (prev)
3914 XEXP (prev, 1) = next;
3915 else
3916 mem_set_list = next;
3917 free_EXPR_LIST_node (temp);
3919 else
3920 prev = temp;
3921 temp = next;
3925 /* If the memory reference had embedded side effects (autoincrement
3926 address modes. Then we may need to kill some entries on the
3927 memory set list. */
3928 if (insn && GET_CODE (reg) == MEM)
3929 invalidate_mems_from_autoinc (insn);
3931 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3932 /* We do not know the size of a BLKmode store, so we do not track
3933 them for redundant store elimination. */
3934 && GET_MODE (reg) != BLKmode
3935 /* There are no REG_INC notes for SP, so we can't assume we'll see
3936 everything that invalidates it. To be safe, don't eliminate any
3937 stores though SP; none of them should be redundant anyway. */
3938 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3939 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3942 if (GET_CODE (reg) == REG
3943 && (regno = REGNO (reg),
3944 ! (regno == FRAME_POINTER_REGNUM
3945 && (! reload_completed || frame_pointer_needed)))
3946 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3947 && ! (regno == HARD_FRAME_POINTER_REGNUM
3948 && (! reload_completed || frame_pointer_needed))
3949 #endif
3950 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3951 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3952 #endif
3953 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3954 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3956 int some_needed = REGNO_REG_SET_P (needed, regno);
3957 int some_not_needed = ! some_needed;
3959 /* Mark it as a significant register for this basic block. */
3960 if (significant)
3961 SET_REGNO_REG_SET (significant, regno);
3963 /* Mark it as dead before this insn. */
3964 SET_REGNO_REG_SET (dead, regno);
3966 /* A hard reg in a wide mode may really be multiple registers.
3967 If so, mark all of them just like the first. */
3968 if (regno < FIRST_PSEUDO_REGISTER)
3970 int n;
3972 /* Nothing below is needed for the stack pointer; get out asap.
3973 Eg, log links aren't needed, since combine won't use them. */
3974 if (regno == STACK_POINTER_REGNUM)
3975 return;
3977 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3978 while (--n > 0)
3980 int regno_n = regno + n;
3981 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3982 if (significant)
3983 SET_REGNO_REG_SET (significant, regno_n);
3985 SET_REGNO_REG_SET (dead, regno_n);
3986 some_needed |= needed_regno;
3987 some_not_needed |= ! needed_regno;
3991 /* Additional data to record if this is the final pass. */
3992 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3993 | PROP_DEATH_NOTES | PROP_AUTOINC))
3995 register rtx y;
3996 register int blocknum = BLOCK_NUM (insn);
3998 y = NULL_RTX;
3999 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4000 y = reg_next_use[regno];
4002 /* If this is a hard reg, record this function uses the reg. */
4004 if (regno < FIRST_PSEUDO_REGISTER)
4006 register int i;
4007 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4009 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4010 for (i = regno; i < endregno; i++)
4012 /* The next use is no longer "next", since a store
4013 intervenes. */
4014 reg_next_use[i] = 0;
4017 if (flags & PROP_REG_INFO)
4018 for (i = regno; i < endregno; i++)
4020 regs_ever_live[i] = 1;
4021 REG_N_SETS (i)++;
4024 else
4026 /* The next use is no longer "next", since a store
4027 intervenes. */
4028 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4029 reg_next_use[regno] = 0;
4031 /* Keep track of which basic blocks each reg appears in. */
4033 if (flags & PROP_REG_INFO)
4035 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4036 REG_BASIC_BLOCK (regno) = blocknum;
4037 else if (REG_BASIC_BLOCK (regno) != blocknum)
4038 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4040 /* Count (weighted) references, stores, etc. This counts a
4041 register twice if it is modified, but that is correct. */
4042 REG_N_SETS (regno)++;
4043 REG_N_REFS (regno) += loop_depth + 1;
4045 /* The insns where a reg is live are normally counted
4046 elsewhere, but we want the count to include the insn
4047 where the reg is set, and the normal counting mechanism
4048 would not count it. */
4049 REG_LIVE_LENGTH (regno)++;
4053 if (! some_not_needed)
4055 if (flags & PROP_LOG_LINKS)
4057 /* Make a logical link from the next following insn
4058 that uses this register, back to this insn.
4059 The following insns have already been processed.
4061 We don't build a LOG_LINK for hard registers containing
4062 in ASM_OPERANDs. If these registers get replaced,
4063 we might wind up changing the semantics of the insn,
4064 even if reload can make what appear to be valid
4065 assignments later. */
4066 if (y && (BLOCK_NUM (y) == blocknum)
4067 && (regno >= FIRST_PSEUDO_REGISTER
4068 || asm_noperands (PATTERN (y)) < 0))
4069 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4072 else if (! some_needed)
4074 if (flags & PROP_REG_INFO)
4075 REG_N_DEATHS (REGNO (reg))++;
4077 if (flags & PROP_DEATH_NOTES)
4079 /* Note that dead stores have already been deleted
4080 when possible. If we get here, we have found a
4081 dead store that cannot be eliminated (because the
4082 same insn does something useful). Indicate this
4083 by marking the reg being set as dying here. */
4084 REG_NOTES (insn)
4085 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4088 else
4090 if (flags & PROP_DEATH_NOTES)
4092 /* This is a case where we have a multi-word hard register
4093 and some, but not all, of the words of the register are
4094 needed in subsequent insns. Write REG_UNUSED notes
4095 for those parts that were not needed. This case should
4096 be rare. */
4098 int i;
4100 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4101 i >= 0; i--)
4102 if (!REGNO_REG_SET_P (needed, regno + i))
4103 REG_NOTES (insn)
4104 = (alloc_EXPR_LIST
4105 (REG_UNUSED,
4106 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4107 REG_NOTES (insn)));
4112 else if (GET_CODE (reg) == REG)
4114 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4115 reg_next_use[regno] = 0;
4118 /* If this is the last pass and this is a SCRATCH, show it will be dying
4119 here and count it. */
4120 else if (GET_CODE (reg) == SCRATCH)
4122 if (flags & PROP_DEATH_NOTES)
4123 REG_NOTES (insn)
4124 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4128 #ifdef AUTO_INC_DEC
4130 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4131 reference. */
4133 static void
4134 find_auto_inc (needed, x, insn)
4135 regset needed;
4136 rtx x;
4137 rtx insn;
4139 rtx addr = XEXP (x, 0);
4140 HOST_WIDE_INT offset = 0;
4141 rtx set;
4143 /* Here we detect use of an index register which might be good for
4144 postincrement, postdecrement, preincrement, or predecrement. */
4146 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4147 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4149 if (GET_CODE (addr) == REG)
4151 register rtx y;
4152 register int size = GET_MODE_SIZE (GET_MODE (x));
4153 rtx use;
4154 rtx incr;
4155 int regno = REGNO (addr);
4157 /* Is the next use an increment that might make auto-increment? */
4158 if ((incr = reg_next_use[regno]) != 0
4159 && (set = single_set (incr)) != 0
4160 && GET_CODE (set) == SET
4161 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4162 /* Can't add side effects to jumps; if reg is spilled and
4163 reloaded, there's no way to store back the altered value. */
4164 && GET_CODE (insn) != JUMP_INSN
4165 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4166 && XEXP (y, 0) == addr
4167 && GET_CODE (XEXP (y, 1)) == CONST_INT
4168 && ((HAVE_POST_INCREMENT
4169 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4170 || (HAVE_POST_DECREMENT
4171 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4172 || (HAVE_PRE_INCREMENT
4173 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4174 || (HAVE_PRE_DECREMENT
4175 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4176 /* Make sure this reg appears only once in this insn. */
4177 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4178 use != 0 && use != (rtx) 1))
4180 rtx q = SET_DEST (set);
4181 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4182 ? (offset ? PRE_INC : POST_INC)
4183 : (offset ? PRE_DEC : POST_DEC));
4185 if (dead_or_set_p (incr, addr))
4187 /* This is the simple case. Try to make the auto-inc. If
4188 we can't, we are done. Otherwise, we will do any
4189 needed updates below. */
4190 if (! validate_change (insn, &XEXP (x, 0),
4191 gen_rtx_fmt_e (inc_code, Pmode, addr),
4193 return;
4195 else if (GET_CODE (q) == REG
4196 /* PREV_INSN used here to check the semi-open interval
4197 [insn,incr). */
4198 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4199 /* We must also check for sets of q as q may be
4200 a call clobbered hard register and there may
4201 be a call between PREV_INSN (insn) and incr. */
4202 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4204 /* We have *p followed sometime later by q = p+size.
4205 Both p and q must be live afterward,
4206 and q is not used between INSN and its assignment.
4207 Change it to q = p, ...*q..., q = q+size.
4208 Then fall into the usual case. */
4209 rtx insns, temp;
4210 basic_block bb;
4212 start_sequence ();
4213 emit_move_insn (q, addr);
4214 insns = get_insns ();
4215 end_sequence ();
4217 bb = BLOCK_FOR_INSN (insn);
4218 for (temp = insns; temp; temp = NEXT_INSN (temp))
4219 set_block_for_insn (temp, bb);
4221 /* If we can't make the auto-inc, or can't make the
4222 replacement into Y, exit. There's no point in making
4223 the change below if we can't do the auto-inc and doing
4224 so is not correct in the pre-inc case. */
4226 validate_change (insn, &XEXP (x, 0),
4227 gen_rtx_fmt_e (inc_code, Pmode, q),
4229 validate_change (incr, &XEXP (y, 0), q, 1);
4230 if (! apply_change_group ())
4231 return;
4233 /* We now know we'll be doing this change, so emit the
4234 new insn(s) and do the updates. */
4235 emit_insns_before (insns, insn);
4237 if (BLOCK_FOR_INSN (insn)->head == insn)
4238 BLOCK_FOR_INSN (insn)->head = insns;
4240 /* INCR will become a NOTE and INSN won't contain a
4241 use of ADDR. If a use of ADDR was just placed in
4242 the insn before INSN, make that the next use.
4243 Otherwise, invalidate it. */
4244 if (GET_CODE (PREV_INSN (insn)) == INSN
4245 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4246 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4247 reg_next_use[regno] = PREV_INSN (insn);
4248 else
4249 reg_next_use[regno] = 0;
4251 addr = q;
4252 regno = REGNO (q);
4254 /* REGNO is now used in INCR which is below INSN, but
4255 it previously wasn't live here. If we don't mark
4256 it as needed, we'll put a REG_DEAD note for it
4257 on this insn, which is incorrect. */
4258 SET_REGNO_REG_SET (needed, regno);
4260 /* If there are any calls between INSN and INCR, show
4261 that REGNO now crosses them. */
4262 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4263 if (GET_CODE (temp) == CALL_INSN)
4264 REG_N_CALLS_CROSSED (regno)++;
4266 else
4267 return;
4269 /* If we haven't returned, it means we were able to make the
4270 auto-inc, so update the status. First, record that this insn
4271 has an implicit side effect. */
4273 REG_NOTES (insn)
4274 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4276 /* Modify the old increment-insn to simply copy
4277 the already-incremented value of our register. */
4278 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4279 abort ();
4281 /* If that makes it a no-op (copying the register into itself) delete
4282 it so it won't appear to be a "use" and a "set" of this
4283 register. */
4284 if (SET_DEST (set) == addr)
4286 PUT_CODE (incr, NOTE);
4287 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4288 NOTE_SOURCE_FILE (incr) = 0;
4291 if (regno >= FIRST_PSEUDO_REGISTER)
4293 /* Count an extra reference to the reg. When a reg is
4294 incremented, spilling it is worse, so we want to make
4295 that less likely. */
4296 REG_N_REFS (regno) += loop_depth + 1;
4298 /* Count the increment as a setting of the register,
4299 even though it isn't a SET in rtl. */
4300 REG_N_SETS (regno)++;
4305 #endif /* AUTO_INC_DEC */
4307 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4308 This is done assuming the registers needed from X
4309 are those that have 1-bits in NEEDED.
4311 FLAGS is the set of enabled operations.
4313 INSN is the containing instruction. If INSN is dead, this function is not
4314 called. */
4316 static void
4317 mark_used_regs (needed, live, x, flags, insn)
4318 regset needed;
4319 regset live;
4320 rtx x;
4321 int flags;
4322 rtx insn;
4324 register RTX_CODE code;
4325 register int regno;
4326 int i;
4328 retry:
4329 code = GET_CODE (x);
4330 switch (code)
4332 case LABEL_REF:
4333 case SYMBOL_REF:
4334 case CONST_INT:
4335 case CONST:
4336 case CONST_DOUBLE:
4337 case PC:
4338 case ADDR_VEC:
4339 case ADDR_DIFF_VEC:
4340 return;
4342 #ifdef HAVE_cc0
4343 case CC0:
4344 cc0_live = 1;
4345 return;
4346 #endif
4348 case CLOBBER:
4349 /* If we are clobbering a MEM, mark any registers inside the address
4350 as being used. */
4351 if (GET_CODE (XEXP (x, 0)) == MEM)
4352 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4353 return;
4355 case MEM:
4356 /* Don't bother watching stores to mems if this is not the
4357 final pass. We'll not be deleting dead stores this round. */
4358 if (flags & PROP_SCAN_DEAD_CODE)
4360 /* Invalidate the data for the last MEM stored, but only if MEM is
4361 something that can be stored into. */
4362 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4363 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4364 ; /* needn't clear the memory set list */
4365 else
4367 rtx temp = mem_set_list;
4368 rtx prev = NULL_RTX;
4369 rtx next;
4371 while (temp)
4373 next = XEXP (temp, 1);
4374 if (anti_dependence (XEXP (temp, 0), x))
4376 /* Splice temp out of the list. */
4377 if (prev)
4378 XEXP (prev, 1) = next;
4379 else
4380 mem_set_list = next;
4381 free_EXPR_LIST_node (temp);
4383 else
4384 prev = temp;
4385 temp = next;
4389 /* If the memory reference had embedded side effects (autoincrement
4390 address modes. Then we may need to kill some entries on the
4391 memory set list. */
4392 if (insn)
4393 invalidate_mems_from_autoinc (insn);
4396 #ifdef AUTO_INC_DEC
4397 if (flags & PROP_AUTOINC)
4398 find_auto_inc (needed, x, insn);
4399 #endif
4400 break;
4402 case SUBREG:
4403 if (GET_CODE (SUBREG_REG (x)) == REG
4404 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4405 && (GET_MODE_SIZE (GET_MODE (x))
4406 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4407 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4409 /* While we're here, optimize this case. */
4410 x = SUBREG_REG (x);
4412 /* In case the SUBREG is not of a register, don't optimize */
4413 if (GET_CODE (x) != REG)
4415 mark_used_regs (needed, live, x, flags, insn);
4416 return;
4419 /* ... fall through ... */
4421 case REG:
4422 /* See a register other than being set
4423 => mark it as needed. */
4425 regno = REGNO (x);
4427 int some_needed = REGNO_REG_SET_P (needed, regno);
4428 int some_not_needed = ! some_needed;
4430 SET_REGNO_REG_SET (live, regno);
4432 /* A hard reg in a wide mode may really be multiple registers.
4433 If so, mark all of them just like the first. */
4434 if (regno < FIRST_PSEUDO_REGISTER)
4436 int n;
4438 /* For stack ptr or fixed arg pointer,
4439 nothing below can be necessary, so waste no more time. */
4440 if (regno == STACK_POINTER_REGNUM
4441 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4442 || (regno == HARD_FRAME_POINTER_REGNUM
4443 && (! reload_completed || frame_pointer_needed))
4444 #endif
4445 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4446 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4447 #endif
4448 || (regno == FRAME_POINTER_REGNUM
4449 && (! reload_completed || frame_pointer_needed)))
4451 /* If this is a register we are going to try to eliminate,
4452 don't mark it live here. If we are successful in
4453 eliminating it, it need not be live unless it is used for
4454 pseudos, in which case it will have been set live when
4455 it was allocated to the pseudos. If the register will not
4456 be eliminated, reload will set it live at that point. */
4458 if ((flags & PROP_REG_INFO)
4459 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4460 regs_ever_live[regno] = 1;
4461 return;
4463 /* No death notes for global register variables;
4464 their values are live after this function exits. */
4465 if (global_regs[regno])
4467 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4468 reg_next_use[regno] = insn;
4469 return;
4472 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4473 while (--n > 0)
4475 int regno_n = regno + n;
4476 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4478 SET_REGNO_REG_SET (live, regno_n);
4479 some_needed |= needed_regno;
4480 some_not_needed |= ! needed_regno;
4484 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4486 /* Record where each reg is used, so when the reg
4487 is set we know the next insn that uses it. */
4489 reg_next_use[regno] = insn;
4491 if (flags & PROP_REG_INFO)
4493 if (regno < FIRST_PSEUDO_REGISTER)
4495 /* If a hard reg is being used,
4496 record that this function does use it. */
4498 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4499 if (i == 0)
4500 i = 1;
4502 regs_ever_live[regno + --i] = 1;
4503 while (i > 0);
4505 else
4507 /* Keep track of which basic block each reg appears in. */
4509 register int blocknum = BLOCK_NUM (insn);
4511 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4512 REG_BASIC_BLOCK (regno) = blocknum;
4513 else if (REG_BASIC_BLOCK (regno) != blocknum)
4514 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4516 /* Count (weighted) number of uses of each reg. */
4518 REG_N_REFS (regno) += loop_depth + 1;
4522 /* Record and count the insns in which a reg dies.
4523 If it is used in this insn and was dead below the insn
4524 then it dies in this insn. If it was set in this insn,
4525 we do not make a REG_DEAD note; likewise if we already
4526 made such a note. */
4528 if (flags & PROP_DEATH_NOTES)
4530 if (some_not_needed
4531 && ! dead_or_set_p (insn, x)
4532 #if 0
4533 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4534 #endif
4537 /* Check for the case where the register dying partially
4538 overlaps the register set by this insn. */
4539 if (regno < FIRST_PSEUDO_REGISTER
4540 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4542 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4543 while (--n >= 0)
4544 some_needed |= dead_or_set_regno_p (insn, regno + n);
4547 /* If none of the words in X is needed, make a REG_DEAD
4548 note. Otherwise, we must make partial REG_DEAD notes. */
4549 if (! some_needed)
4551 REG_NOTES (insn)
4552 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4553 REG_N_DEATHS (regno)++;
4555 else
4557 int i;
4559 /* Don't make a REG_DEAD note for a part of a register
4560 that is set in the insn. */
4562 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4563 i >= 0; i--)
4564 if (!REGNO_REG_SET_P (needed, regno + i)
4565 && ! dead_or_set_regno_p (insn, regno + i))
4566 REG_NOTES (insn)
4567 = (alloc_EXPR_LIST
4568 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4569 regno + i),
4570 REG_NOTES (insn)));
4575 return;
4577 case SET:
4579 register rtx testreg = SET_DEST (x);
4580 int mark_dest = 0;
4582 /* If storing into MEM, don't show it as being used. But do
4583 show the address as being used. */
4584 if (GET_CODE (testreg) == MEM)
4586 #ifdef AUTO_INC_DEC
4587 if (flags & PROP_AUTOINC)
4588 find_auto_inc (needed, testreg, insn);
4589 #endif
4590 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4591 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4592 return;
4595 /* Storing in STRICT_LOW_PART is like storing in a reg
4596 in that this SET might be dead, so ignore it in TESTREG.
4597 but in some other ways it is like using the reg.
4599 Storing in a SUBREG or a bit field is like storing the entire
4600 register in that if the register's value is not used
4601 then this SET is not needed. */
4602 while (GET_CODE (testreg) == STRICT_LOW_PART
4603 || GET_CODE (testreg) == ZERO_EXTRACT
4604 || GET_CODE (testreg) == SIGN_EXTRACT
4605 || GET_CODE (testreg) == SUBREG)
4607 if (GET_CODE (testreg) == SUBREG
4608 && GET_CODE (SUBREG_REG (testreg)) == REG
4609 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4610 && (GET_MODE_SIZE (GET_MODE (testreg))
4611 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4612 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4614 /* Modifying a single register in an alternate mode
4615 does not use any of the old value. But these other
4616 ways of storing in a register do use the old value. */
4617 if (GET_CODE (testreg) == SUBREG
4618 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4620 else
4621 mark_dest = 1;
4623 testreg = XEXP (testreg, 0);
4626 /* If this is a store into a register,
4627 recursively scan the value being stored. */
4629 if ((GET_CODE (testreg) == PARALLEL
4630 && GET_MODE (testreg) == BLKmode)
4631 || (GET_CODE (testreg) == REG
4632 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4633 && (! reload_completed || frame_pointer_needed)))
4634 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4635 && ! (regno == HARD_FRAME_POINTER_REGNUM
4636 && (! reload_completed || frame_pointer_needed))
4637 #endif
4638 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4639 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4640 #endif
4642 /* We used to exclude global_regs here, but that seems wrong.
4643 Storing in them is like storing in mem. */
4645 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4646 if (mark_dest)
4647 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4648 return;
4651 break;
4653 case ASM_OPERANDS:
4654 case UNSPEC_VOLATILE:
4655 case TRAP_IF:
4656 case ASM_INPUT:
4658 /* Traditional and volatile asm instructions must be considered to use
4659 and clobber all hard registers, all pseudo-registers and all of
4660 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4662 Consider for instance a volatile asm that changes the fpu rounding
4663 mode. An insn should not be moved across this even if it only uses
4664 pseudo-regs because it might give an incorrectly rounded result.
4666 ?!? Unfortunately, marking all hard registers as live causes massive
4667 problems for the register allocator and marking all pseudos as live
4668 creates mountains of uninitialized variable warnings.
4670 So for now, just clear the memory set list and mark any regs
4671 we can find in ASM_OPERANDS as used. */
4672 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4673 free_EXPR_LIST_list (&mem_set_list);
4675 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4676 We can not just fall through here since then we would be confused
4677 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4678 traditional asms unlike their normal usage. */
4679 if (code == ASM_OPERANDS)
4681 int j;
4683 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4684 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4685 flags, insn);
4687 break;
4691 default:
4692 break;
4695 /* Recursively scan the operands of this expression. */
4698 register const char *fmt = GET_RTX_FORMAT (code);
4699 register int i;
4701 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4703 if (fmt[i] == 'e')
4705 /* Tail recursive case: save a function call level. */
4706 if (i == 0)
4708 x = XEXP (x, 0);
4709 goto retry;
4711 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4713 else if (fmt[i] == 'E')
4715 register int j;
4716 for (j = 0; j < XVECLEN (x, i); j++)
4717 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4723 #ifdef AUTO_INC_DEC
4725 static int
4726 try_pre_increment_1 (insn)
4727 rtx insn;
4729 /* Find the next use of this reg. If in same basic block,
4730 make it do pre-increment or pre-decrement if appropriate. */
4731 rtx x = single_set (insn);
4732 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4733 * INTVAL (XEXP (SET_SRC (x), 1)));
4734 int regno = REGNO (SET_DEST (x));
4735 rtx y = reg_next_use[regno];
4736 if (y != 0
4737 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4738 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4739 mode would be better. */
4740 && ! dead_or_set_p (y, SET_DEST (x))
4741 && try_pre_increment (y, SET_DEST (x), amount))
4743 /* We have found a suitable auto-increment
4744 and already changed insn Y to do it.
4745 So flush this increment-instruction. */
4746 PUT_CODE (insn, NOTE);
4747 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4748 NOTE_SOURCE_FILE (insn) = 0;
4749 /* Count a reference to this reg for the increment
4750 insn we are deleting. When a reg is incremented.
4751 spilling it is worse, so we want to make that
4752 less likely. */
4753 if (regno >= FIRST_PSEUDO_REGISTER)
4755 REG_N_REFS (regno) += loop_depth + 1;
4756 REG_N_SETS (regno)++;
4758 return 1;
4760 return 0;
4763 /* Try to change INSN so that it does pre-increment or pre-decrement
4764 addressing on register REG in order to add AMOUNT to REG.
4765 AMOUNT is negative for pre-decrement.
4766 Returns 1 if the change could be made.
4767 This checks all about the validity of the result of modifying INSN. */
4769 static int
4770 try_pre_increment (insn, reg, amount)
4771 rtx insn, reg;
4772 HOST_WIDE_INT amount;
4774 register rtx use;
4776 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4777 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4778 int pre_ok = 0;
4779 /* Nonzero if we can try to make a post-increment or post-decrement.
4780 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4781 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4782 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4783 int post_ok = 0;
4785 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4786 int do_post = 0;
4788 /* From the sign of increment, see which possibilities are conceivable
4789 on this target machine. */
4790 if (HAVE_PRE_INCREMENT && amount > 0)
4791 pre_ok = 1;
4792 if (HAVE_POST_INCREMENT && amount > 0)
4793 post_ok = 1;
4795 if (HAVE_PRE_DECREMENT && amount < 0)
4796 pre_ok = 1;
4797 if (HAVE_POST_DECREMENT && amount < 0)
4798 post_ok = 1;
4800 if (! (pre_ok || post_ok))
4801 return 0;
4803 /* It is not safe to add a side effect to a jump insn
4804 because if the incremented register is spilled and must be reloaded
4805 there would be no way to store the incremented value back in memory. */
4807 if (GET_CODE (insn) == JUMP_INSN)
4808 return 0;
4810 use = 0;
4811 if (pre_ok)
4812 use = find_use_as_address (PATTERN (insn), reg, 0);
4813 if (post_ok && (use == 0 || use == (rtx) 1))
4815 use = find_use_as_address (PATTERN (insn), reg, -amount);
4816 do_post = 1;
4819 if (use == 0 || use == (rtx) 1)
4820 return 0;
4822 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4823 return 0;
4825 /* See if this combination of instruction and addressing mode exists. */
4826 if (! validate_change (insn, &XEXP (use, 0),
4827 gen_rtx_fmt_e (amount > 0
4828 ? (do_post ? POST_INC : PRE_INC)
4829 : (do_post ? POST_DEC : PRE_DEC),
4830 Pmode, reg), 0))
4831 return 0;
4833 /* Record that this insn now has an implicit side effect on X. */
4834 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4835 return 1;
4838 #endif /* AUTO_INC_DEC */
4840 /* Find the place in the rtx X where REG is used as a memory address.
4841 Return the MEM rtx that so uses it.
4842 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4843 (plus REG (const_int PLUSCONST)).
4845 If such an address does not appear, return 0.
4846 If REG appears more than once, or is used other than in such an address,
4847 return (rtx)1. */
4850 find_use_as_address (x, reg, plusconst)
4851 register rtx x;
4852 rtx reg;
4853 HOST_WIDE_INT plusconst;
4855 enum rtx_code code = GET_CODE (x);
4856 const char *fmt = GET_RTX_FORMAT (code);
4857 register int i;
4858 register rtx value = 0;
4859 register rtx tem;
4861 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4862 return x;
4864 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4865 && XEXP (XEXP (x, 0), 0) == reg
4866 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4867 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4868 return x;
4870 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4872 /* If REG occurs inside a MEM used in a bit-field reference,
4873 that is unacceptable. */
4874 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4875 return (rtx) (HOST_WIDE_INT) 1;
4878 if (x == reg)
4879 return (rtx) (HOST_WIDE_INT) 1;
4881 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4883 if (fmt[i] == 'e')
4885 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4886 if (value == 0)
4887 value = tem;
4888 else if (tem != 0)
4889 return (rtx) (HOST_WIDE_INT) 1;
4891 else if (fmt[i] == 'E')
4893 register int j;
4894 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4896 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4897 if (value == 0)
4898 value = tem;
4899 else if (tem != 0)
4900 return (rtx) (HOST_WIDE_INT) 1;
4905 return value;
4908 /* Write information about registers and basic blocks into FILE.
4909 This is part of making a debugging dump. */
4911 void
4912 dump_regset (r, outf)
4913 regset r;
4914 FILE *outf;
4916 int i;
4917 if (r == NULL)
4919 fputs (" (nil)", outf);
4920 return;
4923 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4925 fprintf (outf, " %d", i);
4926 if (i < FIRST_PSEUDO_REGISTER)
4927 fprintf (outf, " [%s]",
4928 reg_names[i]);
4932 void
4933 debug_regset (r)
4934 regset r;
4936 dump_regset (r, stderr);
4937 putc ('\n', stderr);
4940 void
4941 dump_flow_info (file)
4942 FILE *file;
4944 register int i;
4945 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4947 fprintf (file, "%d registers.\n", max_regno);
4948 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4949 if (REG_N_REFS (i))
4951 enum reg_class class, altclass;
4952 fprintf (file, "\nRegister %d used %d times across %d insns",
4953 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4954 if (REG_BASIC_BLOCK (i) >= 0)
4955 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4956 if (REG_N_SETS (i))
4957 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4958 (REG_N_SETS (i) == 1) ? "" : "s");
4959 if (REG_USERVAR_P (regno_reg_rtx[i]))
4960 fprintf (file, "; user var");
4961 if (REG_N_DEATHS (i) != 1)
4962 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4963 if (REG_N_CALLS_CROSSED (i) == 1)
4964 fprintf (file, "; crosses 1 call");
4965 else if (REG_N_CALLS_CROSSED (i))
4966 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4967 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4968 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4969 class = reg_preferred_class (i);
4970 altclass = reg_alternate_class (i);
4971 if (class != GENERAL_REGS || altclass != ALL_REGS)
4973 if (altclass == ALL_REGS || class == ALL_REGS)
4974 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4975 else if (altclass == NO_REGS)
4976 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4977 else
4978 fprintf (file, "; pref %s, else %s",
4979 reg_class_names[(int) class],
4980 reg_class_names[(int) altclass]);
4982 if (REGNO_POINTER_FLAG (i))
4983 fprintf (file, "; pointer");
4984 fprintf (file, ".\n");
4987 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4988 for (i = 0; i < n_basic_blocks; i++)
4990 register basic_block bb = BASIC_BLOCK (i);
4991 register edge e;
4993 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
4994 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
4996 fprintf (file, "Predecessors: ");
4997 for (e = bb->pred; e ; e = e->pred_next)
4998 dump_edge_info (file, e, 0);
5000 fprintf (file, "\nSuccessors: ");
5001 for (e = bb->succ; e ; e = e->succ_next)
5002 dump_edge_info (file, e, 1);
5004 fprintf (file, "\nRegisters live at start:");
5005 dump_regset (bb->global_live_at_start, file);
5007 fprintf (file, "\nRegisters live at end:");
5008 dump_regset (bb->global_live_at_end, file);
5010 putc('\n', file);
5013 putc('\n', file);
5016 void
5017 debug_flow_info ()
5019 dump_flow_info (stderr);
5022 static void
5023 dump_edge_info (file, e, do_succ)
5024 FILE *file;
5025 edge e;
5026 int do_succ;
5028 basic_block side = (do_succ ? e->dest : e->src);
5030 if (side == ENTRY_BLOCK_PTR)
5031 fputs (" ENTRY", file);
5032 else if (side == EXIT_BLOCK_PTR)
5033 fputs (" EXIT", file);
5034 else
5035 fprintf (file, " %d", side->index);
5037 if (e->flags)
5039 static const char * const bitnames[] = {
5040 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5042 int comma = 0;
5043 int i, flags = e->flags;
5045 fputc (' ', file);
5046 fputc ('(', file);
5047 for (i = 0; flags; i++)
5048 if (flags & (1 << i))
5050 flags &= ~(1 << i);
5052 if (comma)
5053 fputc (',', file);
5054 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5055 fputs (bitnames[i], file);
5056 else
5057 fprintf (file, "%d", i);
5058 comma = 1;
5060 fputc (')', file);
5065 /* Print out one basic block with live information at start and end. */
5066 void
5067 dump_bb (bb, outf)
5068 basic_block bb;
5069 FILE *outf;
5071 rtx insn;
5072 rtx last;
5073 edge e;
5075 fprintf (outf, ";; Basic block %d, loop depth %d",
5076 bb->index, bb->loop_depth - 1);
5077 if (bb->eh_beg != -1 || bb->eh_end != -1)
5078 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5079 putc ('\n', outf);
5081 fputs (";; Predecessors: ", outf);
5082 for (e = bb->pred; e ; e = e->pred_next)
5083 dump_edge_info (outf, e, 0);
5084 putc ('\n', outf);
5086 fputs (";; Registers live at start:", outf);
5087 dump_regset (bb->global_live_at_start, outf);
5088 putc ('\n', outf);
5090 for (insn = bb->head, last = NEXT_INSN (bb->end);
5091 insn != last;
5092 insn = NEXT_INSN (insn))
5093 print_rtl_single (outf, insn);
5095 fputs (";; Registers live at end:", outf);
5096 dump_regset (bb->global_live_at_end, outf);
5097 putc ('\n', outf);
5099 fputs (";; Successors: ", outf);
5100 for (e = bb->succ; e; e = e->succ_next)
5101 dump_edge_info (outf, e, 1);
5102 putc ('\n', outf);
5105 void
5106 debug_bb (bb)
5107 basic_block bb;
5109 dump_bb (bb, stderr);
5112 void
5113 debug_bb_n (n)
5114 int n;
5116 dump_bb (BASIC_BLOCK(n), stderr);
5119 /* Like print_rtl, but also print out live information for the start of each
5120 basic block. */
5122 void
5123 print_rtl_with_bb (outf, rtx_first)
5124 FILE *outf;
5125 rtx rtx_first;
5127 register rtx tmp_rtx;
5129 if (rtx_first == 0)
5130 fprintf (outf, "(nil)\n");
5131 else
5133 int i;
5134 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5135 int max_uid = get_max_uid ();
5136 basic_block *start = (basic_block *)
5137 xcalloc (max_uid, sizeof (basic_block));
5138 basic_block *end = (basic_block *)
5139 xcalloc (max_uid, sizeof (basic_block));
5140 enum bb_state *in_bb_p = (enum bb_state *)
5141 xcalloc (max_uid, sizeof (enum bb_state));
5143 for (i = n_basic_blocks - 1; i >= 0; i--)
5145 basic_block bb = BASIC_BLOCK (i);
5146 rtx x;
5148 start[INSN_UID (bb->head)] = bb;
5149 end[INSN_UID (bb->end)] = bb;
5150 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5152 enum bb_state state = IN_MULTIPLE_BB;
5153 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5154 state = IN_ONE_BB;
5155 in_bb_p[INSN_UID(x)] = state;
5157 if (x == bb->end)
5158 break;
5162 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5164 int did_output;
5165 basic_block bb;
5167 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5169 fprintf (outf, ";; Start of basic block %d, registers live:",
5170 bb->index);
5171 dump_regset (bb->global_live_at_start, outf);
5172 putc ('\n', outf);
5175 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5176 && GET_CODE (tmp_rtx) != NOTE
5177 && GET_CODE (tmp_rtx) != BARRIER)
5178 fprintf (outf, ";; Insn is not within a basic block\n");
5179 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5180 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5182 did_output = print_rtl_single (outf, tmp_rtx);
5184 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5185 fprintf (outf, ";; End of basic block %d\n", bb->index);
5187 if (did_output)
5188 putc ('\n', outf);
5191 free (start);
5192 free (end);
5193 free (in_bb_p);
5196 if (current_function_epilogue_delay_list != 0)
5198 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5199 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5200 tmp_rtx = XEXP (tmp_rtx, 1))
5201 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5205 /* Compute dominator relationships using new flow graph structures. */
5206 void
5207 compute_flow_dominators (dominators, post_dominators)
5208 sbitmap *dominators;
5209 sbitmap *post_dominators;
5211 int bb;
5212 sbitmap *temp_bitmap;
5213 edge e;
5214 basic_block *worklist, *tos;
5216 /* Allocate a worklist array/queue. Entries are only added to the
5217 list if they were not already on the list. So the size is
5218 bounded by the number of basic blocks. */
5219 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5220 * n_basic_blocks);
5222 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5223 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5225 if (dominators)
5227 /* The optimistic setting of dominators requires us to put every
5228 block on the work list initially. */
5229 for (bb = 0; bb < n_basic_blocks; bb++)
5231 *tos++ = BASIC_BLOCK (bb);
5232 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5235 /* We want a maximal solution, so initially assume everything dominates
5236 everything else. */
5237 sbitmap_vector_ones (dominators, n_basic_blocks);
5239 /* Mark successors of the entry block so we can identify them below. */
5240 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5241 e->dest->aux = ENTRY_BLOCK_PTR;
5243 /* Iterate until the worklist is empty. */
5244 while (tos != worklist)
5246 /* Take the first entry off the worklist. */
5247 basic_block b = *--tos;
5248 bb = b->index;
5250 /* Compute the intersection of the dominators of all the
5251 predecessor blocks.
5253 If one of the predecessor blocks is the ENTRY block, then the
5254 intersection of the dominators of the predecessor blocks is
5255 defined as the null set. We can identify such blocks by the
5256 special value in the AUX field in the block structure. */
5257 if (b->aux == ENTRY_BLOCK_PTR)
5259 /* Do not clear the aux field for blocks which are
5260 successors of the ENTRY block. That way we we never
5261 add them to the worklist again.
5263 The intersect of dominators of the preds of this block is
5264 defined as the null set. */
5265 sbitmap_zero (temp_bitmap[bb]);
5267 else
5269 /* Clear the aux field of this block so it can be added to
5270 the worklist again if necessary. */
5271 b->aux = NULL;
5272 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5275 /* Make sure each block always dominates itself. */
5276 SET_BIT (temp_bitmap[bb], bb);
5278 /* If the out state of this block changed, then we need to
5279 add the successors of this block to the worklist if they
5280 are not already on the worklist. */
5281 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5283 for (e = b->succ; e; e = e->succ_next)
5285 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5287 *tos++ = e->dest;
5288 e->dest->aux = e;
5295 if (post_dominators)
5297 /* The optimistic setting of dominators requires us to put every
5298 block on the work list initially. */
5299 for (bb = 0; bb < n_basic_blocks; bb++)
5301 *tos++ = BASIC_BLOCK (bb);
5302 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5305 /* We want a maximal solution, so initially assume everything post
5306 dominates everything else. */
5307 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5309 /* Mark predecessors of the exit block so we can identify them below. */
5310 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5311 e->src->aux = EXIT_BLOCK_PTR;
5313 /* Iterate until the worklist is empty. */
5314 while (tos != worklist)
5316 /* Take the first entry off the worklist. */
5317 basic_block b = *--tos;
5318 bb = b->index;
5320 /* Compute the intersection of the post dominators of all the
5321 successor blocks.
5323 If one of the successor blocks is the EXIT block, then the
5324 intersection of the dominators of the successor blocks is
5325 defined as the null set. We can identify such blocks by the
5326 special value in the AUX field in the block structure. */
5327 if (b->aux == EXIT_BLOCK_PTR)
5329 /* Do not clear the aux field for blocks which are
5330 predecessors of the EXIT block. That way we we never
5331 add them to the worklist again.
5333 The intersect of dominators of the succs of this block is
5334 defined as the null set. */
5335 sbitmap_zero (temp_bitmap[bb]);
5337 else
5339 /* Clear the aux field of this block so it can be added to
5340 the worklist again if necessary. */
5341 b->aux = NULL;
5342 sbitmap_intersection_of_succs (temp_bitmap[bb],
5343 post_dominators, bb);
5346 /* Make sure each block always post dominates itself. */
5347 SET_BIT (temp_bitmap[bb], bb);
5349 /* If the out state of this block changed, then we need to
5350 add the successors of this block to the worklist if they
5351 are not already on the worklist. */
5352 if (sbitmap_a_and_b (post_dominators[bb],
5353 post_dominators[bb],
5354 temp_bitmap[bb]))
5356 for (e = b->pred; e; e = e->pred_next)
5358 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5360 *tos++ = e->src;
5361 e->src->aux = e;
5367 free (temp_bitmap);
5370 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5372 void
5373 compute_immediate_dominators (idom, dominators)
5374 int *idom;
5375 sbitmap *dominators;
5377 sbitmap *tmp;
5378 int b;
5380 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5382 /* Begin with tmp(n) = dom(n) - { n }. */
5383 for (b = n_basic_blocks; --b >= 0; )
5385 sbitmap_copy (tmp[b], dominators[b]);
5386 RESET_BIT (tmp[b], b);
5389 /* Subtract out all of our dominator's dominators. */
5390 for (b = n_basic_blocks; --b >= 0; )
5392 sbitmap tmp_b = tmp[b];
5393 int s;
5395 for (s = n_basic_blocks; --s >= 0; )
5396 if (TEST_BIT (tmp_b, s))
5397 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5400 /* Find the one bit set in the bitmap and put it in the output array. */
5401 for (b = n_basic_blocks; --b >= 0; )
5403 int t;
5404 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5407 sbitmap_vector_free (tmp);
5410 /* Count for a single SET rtx, X. */
5412 static void
5413 count_reg_sets_1 (x)
5414 rtx x;
5416 register int regno;
5417 register rtx reg = SET_DEST (x);
5419 /* Find the register that's set/clobbered. */
5420 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5421 || GET_CODE (reg) == SIGN_EXTRACT
5422 || GET_CODE (reg) == STRICT_LOW_PART)
5423 reg = XEXP (reg, 0);
5425 if (GET_CODE (reg) == PARALLEL
5426 && GET_MODE (reg) == BLKmode)
5428 register int i;
5429 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5430 count_reg_sets_1 (XVECEXP (reg, 0, i));
5431 return;
5434 if (GET_CODE (reg) == REG)
5436 regno = REGNO (reg);
5437 if (regno >= FIRST_PSEUDO_REGISTER)
5439 /* Count (weighted) references, stores, etc. This counts a
5440 register twice if it is modified, but that is correct. */
5441 REG_N_SETS (regno)++;
5442 REG_N_REFS (regno) += loop_depth + 1;
5447 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5448 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5450 static void
5451 count_reg_sets (x)
5452 rtx x;
5454 register RTX_CODE code = GET_CODE (x);
5456 if (code == SET || code == CLOBBER)
5457 count_reg_sets_1 (x);
5458 else if (code == PARALLEL)
5460 register int i;
5461 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5463 code = GET_CODE (XVECEXP (x, 0, i));
5464 if (code == SET || code == CLOBBER)
5465 count_reg_sets_1 (XVECEXP (x, 0, i));
5470 /* Increment REG_N_REFS by the current loop depth each register reference
5471 found in X. */
5473 static void
5474 count_reg_references (x)
5475 rtx x;
5477 register RTX_CODE code;
5479 retry:
5480 code = GET_CODE (x);
5481 switch (code)
5483 case LABEL_REF:
5484 case SYMBOL_REF:
5485 case CONST_INT:
5486 case CONST:
5487 case CONST_DOUBLE:
5488 case PC:
5489 case ADDR_VEC:
5490 case ADDR_DIFF_VEC:
5491 case ASM_INPUT:
5492 return;
5494 #ifdef HAVE_cc0
5495 case CC0:
5496 return;
5497 #endif
5499 case CLOBBER:
5500 /* If we are clobbering a MEM, mark any registers inside the address
5501 as being used. */
5502 if (GET_CODE (XEXP (x, 0)) == MEM)
5503 count_reg_references (XEXP (XEXP (x, 0), 0));
5504 return;
5506 case SUBREG:
5507 /* While we're here, optimize this case. */
5508 x = SUBREG_REG (x);
5510 /* In case the SUBREG is not of a register, don't optimize */
5511 if (GET_CODE (x) != REG)
5513 count_reg_references (x);
5514 return;
5517 /* ... fall through ... */
5519 case REG:
5520 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5521 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5522 return;
5524 case SET:
5526 register rtx testreg = SET_DEST (x);
5527 int mark_dest = 0;
5529 /* If storing into MEM, don't show it as being used. But do
5530 show the address as being used. */
5531 if (GET_CODE (testreg) == MEM)
5533 count_reg_references (XEXP (testreg, 0));
5534 count_reg_references (SET_SRC (x));
5535 return;
5538 /* Storing in STRICT_LOW_PART is like storing in a reg
5539 in that this SET might be dead, so ignore it in TESTREG.
5540 but in some other ways it is like using the reg.
5542 Storing in a SUBREG or a bit field is like storing the entire
5543 register in that if the register's value is not used
5544 then this SET is not needed. */
5545 while (GET_CODE (testreg) == STRICT_LOW_PART
5546 || GET_CODE (testreg) == ZERO_EXTRACT
5547 || GET_CODE (testreg) == SIGN_EXTRACT
5548 || GET_CODE (testreg) == SUBREG)
5550 /* Modifying a single register in an alternate mode
5551 does not use any of the old value. But these other
5552 ways of storing in a register do use the old value. */
5553 if (GET_CODE (testreg) == SUBREG
5554 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5556 else
5557 mark_dest = 1;
5559 testreg = XEXP (testreg, 0);
5562 /* If this is a store into a register,
5563 recursively scan the value being stored. */
5565 if ((GET_CODE (testreg) == PARALLEL
5566 && GET_MODE (testreg) == BLKmode)
5567 || GET_CODE (testreg) == REG)
5569 count_reg_references (SET_SRC (x));
5570 if (mark_dest)
5571 count_reg_references (SET_DEST (x));
5572 return;
5575 break;
5577 default:
5578 break;
5581 /* Recursively scan the operands of this expression. */
5584 register const char *fmt = GET_RTX_FORMAT (code);
5585 register int i;
5587 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5589 if (fmt[i] == 'e')
5591 /* Tail recursive case: save a function call level. */
5592 if (i == 0)
5594 x = XEXP (x, 0);
5595 goto retry;
5597 count_reg_references (XEXP (x, i));
5599 else if (fmt[i] == 'E')
5601 register int j;
5602 for (j = 0; j < XVECLEN (x, i); j++)
5603 count_reg_references (XVECEXP (x, i, j));
5609 /* Recompute register set/reference counts immediately prior to register
5610 allocation.
5612 This avoids problems with set/reference counts changing to/from values
5613 which have special meanings to the register allocators.
5615 Additionally, the reference counts are the primary component used by the
5616 register allocators to prioritize pseudos for allocation to hard regs.
5617 More accurate reference counts generally lead to better register allocation.
5619 F is the first insn to be scanned.
5621 LOOP_STEP denotes how much loop_depth should be incremented per
5622 loop nesting level in order to increase the ref count more for
5623 references in a loop.
5625 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5626 possibly other information which is used by the register allocators. */
5628 void
5629 recompute_reg_usage (f, loop_step)
5630 rtx f ATTRIBUTE_UNUSED;
5631 int loop_step ATTRIBUTE_UNUSED;
5633 rtx insn;
5634 int i, max_reg;
5635 int index;
5637 /* Clear out the old data. */
5638 max_reg = max_reg_num ();
5639 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5641 REG_N_SETS (i) = 0;
5642 REG_N_REFS (i) = 0;
5645 /* Scan each insn in the chain and count how many times each register is
5646 set/used. */
5647 for (index = 0; index < n_basic_blocks; index++)
5649 basic_block bb = BASIC_BLOCK (index);
5650 loop_depth = bb->loop_depth;
5651 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5653 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5655 rtx links;
5657 /* This call will increment REG_N_SETS for each SET or CLOBBER
5658 of a register in INSN. It will also increment REG_N_REFS
5659 by the loop depth for each set of a register in INSN. */
5660 count_reg_sets (PATTERN (insn));
5662 /* count_reg_sets does not detect autoincrement address modes, so
5663 detect them here by looking at the notes attached to INSN. */
5664 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5666 if (REG_NOTE_KIND (links) == REG_INC)
5667 /* Count (weighted) references, stores, etc. This counts a
5668 register twice if it is modified, but that is correct. */
5669 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5672 /* This call will increment REG_N_REFS by the current loop depth for
5673 each reference to a register in INSN. */
5674 count_reg_references (PATTERN (insn));
5676 /* count_reg_references will not include counts for arguments to
5677 function calls, so detect them here by examining the
5678 CALL_INSN_FUNCTION_USAGE data. */
5679 if (GET_CODE (insn) == CALL_INSN)
5681 rtx note;
5683 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5684 note;
5685 note = XEXP (note, 1))
5686 if (GET_CODE (XEXP (note, 0)) == USE)
5687 count_reg_references (XEXP (XEXP (note, 0), 0));
5690 if (insn == bb->end)
5691 break;
5696 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5697 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5698 of the number of registers that died. */
5701 count_or_remove_death_notes (blocks, kill)
5702 sbitmap blocks;
5703 int kill;
5705 int i, count = 0;
5707 for (i = n_basic_blocks - 1; i >= 0; --i)
5709 basic_block bb;
5710 rtx insn;
5712 if (blocks && ! TEST_BIT (blocks, i))
5713 continue;
5715 bb = BASIC_BLOCK (i);
5717 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5719 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5721 rtx *pprev = &REG_NOTES (insn);
5722 rtx link = *pprev;
5724 while (link)
5726 switch (REG_NOTE_KIND (link))
5728 case REG_DEAD:
5729 if (GET_CODE (XEXP (link, 0)) == REG)
5731 rtx reg = XEXP (link, 0);
5732 int n;
5734 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5735 n = 1;
5736 else
5737 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5738 count += n;
5740 /* FALLTHRU */
5742 case REG_UNUSED:
5743 if (kill)
5745 rtx next = XEXP (link, 1);
5746 free_EXPR_LIST_node (link);
5747 *pprev = link = next;
5748 break;
5750 /* FALLTHRU */
5752 default:
5753 pprev = &XEXP (link, 1);
5754 link = *pprev;
5755 break;
5760 if (insn == bb->end)
5761 break;
5765 return count;
5768 /* Record INSN's block as BB. */
5770 void
5771 set_block_for_insn (insn, bb)
5772 rtx insn;
5773 basic_block bb;
5775 size_t uid = INSN_UID (insn);
5776 if (uid >= basic_block_for_insn->num_elements)
5778 int new_size;
5780 /* Add one-eighth the size so we don't keep calling xrealloc. */
5781 new_size = uid + (uid + 7) / 8;
5783 VARRAY_GROW (basic_block_for_insn, new_size);
5785 VARRAY_BB (basic_block_for_insn, uid) = bb;
5788 /* Record INSN's block number as BB. */
5789 /* ??? This has got to go. */
5791 void
5792 set_block_num (insn, bb)
5793 rtx insn;
5794 int bb;
5796 set_block_for_insn (insn, BASIC_BLOCK (bb));
5799 /* Verify the CFG consistency. This function check some CFG invariants and
5800 aborts when something is wrong. Hope that this function will help to
5801 convert many optimization passes to preserve CFG consistent.
5803 Currently it does following checks:
5805 - test head/end pointers
5806 - overlapping of basic blocks
5807 - edge list corectness
5808 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5809 - tails of basic blocks (ensure that boundary is necesary)
5810 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5811 and NOTE_INSN_BASIC_BLOCK
5812 - check that all insns are in the basic blocks
5813 (except the switch handling code, barriers and notes)
5814 - check that all returns are followed by barriers
5816 In future it can be extended check a lot of other stuff as well
5817 (reachability of basic blocks, life information, etc. etc.). */
5819 void
5820 verify_flow_info ()
5822 const int max_uid = get_max_uid ();
5823 const rtx rtx_first = get_insns ();
5824 basic_block *bb_info;
5825 rtx x;
5826 int i, err = 0;
5828 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5830 /* First pass check head/end pointers and set bb_info array used by
5831 later passes. */
5832 for (i = n_basic_blocks - 1; i >= 0; i--)
5834 basic_block bb = BASIC_BLOCK (i);
5836 /* Check the head pointer and make sure that it is pointing into
5837 insn list. */
5838 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5839 if (x == bb->head)
5840 break;
5841 if (!x)
5843 error ("Head insn %d for block %d not found in the insn stream.",
5844 INSN_UID (bb->head), bb->index);
5845 err = 1;
5848 /* Check the end pointer and make sure that it is pointing into
5849 insn list. */
5850 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5852 if (bb_info[INSN_UID (x)] != NULL)
5854 error ("Insn %d is in multiple basic blocks (%d and %d)",
5855 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5856 err = 1;
5858 bb_info[INSN_UID (x)] = bb;
5860 if (x == bb->end)
5861 break;
5863 if (!x)
5865 error ("End insn %d for block %d not found in the insn stream.",
5866 INSN_UID (bb->end), bb->index);
5867 err = 1;
5871 /* Now check the basic blocks (boundaries etc.) */
5872 for (i = n_basic_blocks - 1; i >= 0; i--)
5874 basic_block bb = BASIC_BLOCK (i);
5875 /* Check corectness of edge lists */
5876 edge e;
5878 e = bb->succ;
5879 while (e)
5881 if (e->src != bb)
5883 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5884 bb->index);
5885 fprintf (stderr, "Predecessor: ");
5886 dump_edge_info (stderr, e, 0);
5887 fprintf (stderr, "\nSuccessor: ");
5888 dump_edge_info (stderr, e, 1);
5889 fflush (stderr);
5890 err = 1;
5892 if (e->dest != EXIT_BLOCK_PTR)
5894 edge e2 = e->dest->pred;
5895 while (e2 && e2 != e)
5896 e2 = e2->pred_next;
5897 if (!e2)
5899 error ("Basic block %i edge lists are corrupted", bb->index);
5900 err = 1;
5903 e = e->succ_next;
5906 e = bb->pred;
5907 while (e)
5909 if (e->dest != bb)
5911 error ("Basic block %d pred edge is corrupted", bb->index);
5912 fputs ("Predecessor: ", stderr);
5913 dump_edge_info (stderr, e, 0);
5914 fputs ("\nSuccessor: ", stderr);
5915 dump_edge_info (stderr, e, 1);
5916 fputc ('\n', stderr);
5917 err = 1;
5919 if (e->src != ENTRY_BLOCK_PTR)
5921 edge e2 = e->src->succ;
5922 while (e2 && e2 != e)
5923 e2 = e2->succ_next;
5924 if (!e2)
5926 error ("Basic block %i edge lists are corrupted", bb->index);
5927 err = 1;
5930 e = e->pred_next;
5933 /* OK pointers are correct. Now check the header of basic
5934 block. It ought to contain optional CODE_LABEL followed
5935 by NOTE_BASIC_BLOCK. */
5936 x = bb->head;
5937 if (GET_CODE (x) == CODE_LABEL)
5939 if (bb->end == x)
5941 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5942 bb->index);
5943 err = 1;
5945 x = NEXT_INSN (x);
5947 if (GET_CODE (x) != NOTE
5948 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5949 || NOTE_BASIC_BLOCK (x) != bb)
5951 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5952 bb->index);
5953 err = 1;
5956 if (bb->end == x)
5958 /* Do checks for empty blocks here */
5960 else
5962 x = NEXT_INSN (x);
5963 while (x)
5965 if (GET_CODE (x) == NOTE
5966 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5968 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5969 INSN_UID (x), bb->index);
5970 err = 1;
5973 if (x == bb->end)
5974 break;
5976 if (GET_CODE (x) == JUMP_INSN
5977 || GET_CODE (x) == CODE_LABEL
5978 || GET_CODE (x) == BARRIER)
5980 error ("In basic block %d:", bb->index);
5981 fatal_insn ("Flow control insn inside a basic block", x);
5984 x = NEXT_INSN (x);
5989 x = rtx_first;
5990 while (x)
5992 if (!bb_info[INSN_UID (x)])
5994 switch (GET_CODE (x))
5996 case BARRIER:
5997 case NOTE:
5998 break;
6000 case CODE_LABEL:
6001 /* An addr_vec is placed outside any block block. */
6002 if (NEXT_INSN (x)
6003 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6004 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6005 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6007 x = NEXT_INSN (x);
6010 /* But in any case, non-deletable labels can appear anywhere. */
6011 break;
6013 default:
6014 fatal_insn ("Insn outside basic block", x);
6018 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6019 && GET_CODE (x) == JUMP_INSN
6020 && returnjump_p (x) && ! condjump_p (x)
6021 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6022 fatal_insn ("Return not followed by barrier", x);
6024 x = NEXT_INSN (x);
6027 if (err)
6028 abort ();
6030 /* Clean up. */
6031 free (bb_info);
6034 /* Functions to access an edge list with a vector representation.
6035 Enough data is kept such that given an index number, the
6036 pred and succ that edge reprsents can be determined, or
6037 given a pred and a succ, it's index number can be returned.
6038 This allows algorithms which comsume a lot of memory to
6039 represent the normally full matrix of edge (pred,succ) with a
6040 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6041 wasted space in the client code due to sparse flow graphs. */
6043 /* This functions initializes the edge list. Basically the entire
6044 flowgraph is processed, and all edges are assigned a number,
6045 and the data structure is filed in. */
6046 struct edge_list *
6047 create_edge_list ()
6049 struct edge_list *elist;
6050 edge e;
6051 int num_edges;
6052 int x;
6053 int block_count;
6055 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6057 num_edges = 0;
6059 /* Determine the number of edges in the flow graph by counting successor
6060 edges on each basic block. */
6061 for (x = 0; x < n_basic_blocks; x++)
6063 basic_block bb = BASIC_BLOCK (x);
6065 for (e = bb->succ; e; e = e->succ_next)
6066 num_edges++;
6068 /* Don't forget successors of the entry block. */
6069 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6070 num_edges++;
6072 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6073 elist->num_blocks = block_count;
6074 elist->num_edges = num_edges;
6075 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6077 num_edges = 0;
6079 /* Follow successors of the entry block, and register these edges. */
6080 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6082 elist->index_to_edge[num_edges] = e;
6083 num_edges++;
6086 for (x = 0; x < n_basic_blocks; x++)
6088 basic_block bb = BASIC_BLOCK (x);
6090 /* Follow all successors of blocks, and register these edges. */
6091 for (e = bb->succ; e; e = e->succ_next)
6093 elist->index_to_edge[num_edges] = e;
6094 num_edges++;
6097 return elist;
6100 /* This function free's memory associated with an edge list. */
6101 void
6102 free_edge_list (elist)
6103 struct edge_list *elist;
6105 if (elist)
6107 free (elist->index_to_edge);
6108 free (elist);
6112 /* This function provides debug output showing an edge list. */
6113 void
6114 print_edge_list (f, elist)
6115 FILE *f;
6116 struct edge_list *elist;
6118 int x;
6119 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6120 elist->num_blocks - 2, elist->num_edges);
6122 for (x = 0; x < elist->num_edges; x++)
6124 fprintf (f, " %-4d - edge(", x);
6125 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6126 fprintf (f,"entry,");
6127 else
6128 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6130 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6131 fprintf (f,"exit)\n");
6132 else
6133 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6137 /* This function provides an internal consistancy check of an edge list,
6138 verifying that all edges are present, and that there are no
6139 extra edges. */
6140 void
6141 verify_edge_list (f, elist)
6142 FILE *f;
6143 struct edge_list *elist;
6145 int x, pred, succ, index;
6146 edge e;
6148 for (x = 0; x < n_basic_blocks; x++)
6150 basic_block bb = BASIC_BLOCK (x);
6152 for (e = bb->succ; e; e = e->succ_next)
6154 pred = e->src->index;
6155 succ = e->dest->index;
6156 index = EDGE_INDEX (elist, e->src, e->dest);
6157 if (index == EDGE_INDEX_NO_EDGE)
6159 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6160 continue;
6162 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6163 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6164 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6165 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6166 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6167 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6170 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6172 pred = e->src->index;
6173 succ = e->dest->index;
6174 index = EDGE_INDEX (elist, e->src, e->dest);
6175 if (index == EDGE_INDEX_NO_EDGE)
6177 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6178 continue;
6180 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6181 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6182 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6183 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6184 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6185 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6187 /* We've verified that all the edges are in the list, no lets make sure
6188 there are no spurious edges in the list. */
6190 for (pred = 0 ; pred < n_basic_blocks; pred++)
6191 for (succ = 0 ; succ < n_basic_blocks; succ++)
6193 basic_block p = BASIC_BLOCK (pred);
6194 basic_block s = BASIC_BLOCK (succ);
6196 int found_edge = 0;
6198 for (e = p->succ; e; e = e->succ_next)
6199 if (e->dest == s)
6201 found_edge = 1;
6202 break;
6204 for (e = s->pred; e; e = e->pred_next)
6205 if (e->src == p)
6207 found_edge = 1;
6208 break;
6210 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6211 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6212 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6213 pred, succ);
6214 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6215 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6216 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6217 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6218 BASIC_BLOCK (succ)));
6220 for (succ = 0 ; succ < n_basic_blocks; succ++)
6222 basic_block p = ENTRY_BLOCK_PTR;
6223 basic_block s = BASIC_BLOCK (succ);
6225 int found_edge = 0;
6227 for (e = p->succ; e; e = e->succ_next)
6228 if (e->dest == s)
6230 found_edge = 1;
6231 break;
6233 for (e = s->pred; e; e = e->pred_next)
6234 if (e->src == p)
6236 found_edge = 1;
6237 break;
6239 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6240 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6241 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6242 succ);
6243 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6244 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6245 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6246 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6247 BASIC_BLOCK (succ)));
6249 for (pred = 0 ; pred < n_basic_blocks; pred++)
6251 basic_block p = BASIC_BLOCK (pred);
6252 basic_block s = EXIT_BLOCK_PTR;
6254 int found_edge = 0;
6256 for (e = p->succ; e; e = e->succ_next)
6257 if (e->dest == s)
6259 found_edge = 1;
6260 break;
6262 for (e = s->pred; e; e = e->pred_next)
6263 if (e->src == p)
6265 found_edge = 1;
6266 break;
6268 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6269 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6270 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6271 pred);
6272 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6273 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6274 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6275 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6276 EXIT_BLOCK_PTR));
6280 /* This routine will determine what, if any, edge there is between
6281 a specified predecessor and successor. */
6284 find_edge_index (edge_list, pred, succ)
6285 struct edge_list *edge_list;
6286 basic_block pred, succ;
6288 int x;
6289 for (x = 0; x < NUM_EDGES (edge_list); x++)
6291 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6292 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6293 return x;
6295 return (EDGE_INDEX_NO_EDGE);
6298 /* This function will remove an edge from the flow graph. */
6299 void
6300 remove_edge (e)
6301 edge e;
6303 edge last_pred = NULL;
6304 edge last_succ = NULL;
6305 edge tmp;
6306 basic_block src, dest;
6307 src = e->src;
6308 dest = e->dest;
6309 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6310 last_succ = tmp;
6312 if (!tmp)
6313 abort ();
6314 if (last_succ)
6315 last_succ->succ_next = e->succ_next;
6316 else
6317 src->succ = e->succ_next;
6319 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6320 last_pred = tmp;
6322 if (!tmp)
6323 abort ();
6324 if (last_pred)
6325 last_pred->pred_next = e->pred_next;
6326 else
6327 dest->pred = e->pred_next;
6329 n_edges--;
6330 free (e);
6333 /* This routine will remove any fake successor edges for a basic block.
6334 When the edge is removed, it is also removed from whatever predecessor
6335 list it is in. */
6336 static void
6337 remove_fake_successors (bb)
6338 basic_block bb;
6340 edge e;
6341 for (e = bb->succ; e ; )
6343 edge tmp = e;
6344 e = e->succ_next;
6345 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6346 remove_edge (tmp);
6350 /* This routine will remove all fake edges from the flow graph. If
6351 we remove all fake successors, it will automatically remove all
6352 fake predecessors. */
6353 void
6354 remove_fake_edges ()
6356 int x;
6358 for (x = 0; x < n_basic_blocks; x++)
6359 remove_fake_successors (BASIC_BLOCK (x));
6361 /* We've handled all successors except the entry block's. */
6362 remove_fake_successors (ENTRY_BLOCK_PTR);
6365 /* This functions will add a fake edge between any block which has no
6366 successors, and the exit block. Some data flow equations require these
6367 edges to exist. */
6368 void
6369 add_noreturn_fake_exit_edges ()
6371 int x;
6373 for (x = 0; x < n_basic_blocks; x++)
6374 if (BASIC_BLOCK (x)->succ == NULL)
6375 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6378 /* Dump the list of basic blocks in the bitmap NODES. */
6379 static void
6380 flow_nodes_print (str, nodes, file)
6381 const char *str;
6382 const sbitmap nodes;
6383 FILE *file;
6385 int node;
6387 fprintf (file, "%s { ", str);
6388 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6389 fputs ("}\n", file);
6393 /* Dump the list of exiting edges in the array EDGES. */
6394 static void
6395 flow_exits_print (str, edges, num_edges, file)
6396 const char *str;
6397 const edge *edges;
6398 int num_edges;
6399 FILE *file;
6401 int i;
6403 fprintf (file, "%s { ", str);
6404 for (i = 0; i < num_edges; i++)
6405 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6406 fputs ("}\n", file);
6410 /* Dump loop related CFG information. */
6411 static void
6412 flow_loops_cfg_dump (loops, file)
6413 const struct loops *loops;
6414 FILE *file;
6416 int i;
6418 if (! loops->num || ! file || ! loops->cfg.dom)
6419 return;
6421 for (i = 0; i < n_basic_blocks; i++)
6423 edge succ;
6425 fprintf (file, ";; %d succs { ", i);
6426 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6427 fprintf (file, "%d ", succ->dest->index);
6428 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6432 /* Dump the DFS node order. */
6433 if (loops->cfg.dfs_order)
6435 fputs (";; DFS order: ", file);
6436 for (i = 0; i < n_basic_blocks; i++)
6437 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6438 fputs ("\n", file);
6443 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6444 static int
6445 flow_loop_nested_p (outer, loop)
6446 struct loop *outer;
6447 struct loop *loop;
6449 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6453 /* Dump the loop information specified by LOOPS to the stream FILE. */
6454 void
6455 flow_loops_dump (loops, file, verbose)
6456 const struct loops *loops;
6457 FILE *file;
6458 int verbose;
6460 int i;
6461 int num_loops;
6463 num_loops = loops->num;
6464 if (! num_loops || ! file)
6465 return;
6467 fprintf (file, ";; %d loops found, %d levels\n",
6468 num_loops, loops->levels);
6470 for (i = 0; i < num_loops; i++)
6472 struct loop *loop = &loops->array[i];
6474 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6475 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6476 loop->header->index, loop->latch->index,
6477 loop->pre_header ? loop->pre_header->index : -1,
6478 loop->depth, loop->level,
6479 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6480 fprintf (file, ";; %d", loop->num_nodes);
6481 flow_nodes_print (" nodes", loop->nodes, file);
6482 fprintf (file, ";; %d", loop->num_exits);
6483 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6485 if (loop->shared)
6487 int j;
6489 for (j = 0; j < i; j++)
6491 struct loop *oloop = &loops->array[j];
6493 if (loop->header == oloop->header)
6495 int disjoint;
6496 int smaller;
6498 smaller = loop->num_nodes < oloop->num_nodes;
6500 /* If the union of LOOP and OLOOP is different than
6501 the larger of LOOP and OLOOP then LOOP and OLOOP
6502 must be disjoint. */
6503 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6504 smaller ? oloop : loop);
6505 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6506 loop->header->index, i, j,
6507 disjoint ? "disjoint" : "nested");
6512 if (verbose)
6514 /* Print diagnostics to compare our concept of a loop with
6515 what the loop notes say. */
6516 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6517 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6518 != NOTE_INSN_LOOP_BEG)
6519 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6520 INSN_UID (PREV_INSN (loop->first->head)));
6521 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6522 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6523 != NOTE_INSN_LOOP_END)
6524 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6525 INSN_UID (NEXT_INSN (loop->last->end)));
6529 if (verbose)
6530 flow_loops_cfg_dump (loops, file);
6534 /* Free all the memory allocated for LOOPS. */
6535 void
6536 flow_loops_free (loops)
6537 struct loops *loops;
6539 if (loops->array)
6541 int i;
6543 if (! loops->num)
6544 abort ();
6546 /* Free the loop descriptors. */
6547 for (i = 0; i < loops->num; i++)
6549 struct loop *loop = &loops->array[i];
6551 if (loop->nodes)
6552 sbitmap_free (loop->nodes);
6553 if (loop->exits)
6554 free (loop->exits);
6556 free (loops->array);
6557 loops->array = NULL;
6559 if (loops->cfg.dom)
6560 sbitmap_vector_free (loops->cfg.dom);
6561 if (loops->cfg.dfs_order)
6562 free (loops->cfg.dfs_order);
6564 sbitmap_free (loops->shared_headers);
6569 /* Find the exits from the loop using the bitmap of loop nodes NODES
6570 and store in EXITS array. Return the number of exits from the
6571 loop. */
6572 static int
6573 flow_loop_exits_find (nodes, exits)
6574 const sbitmap nodes;
6575 edge **exits;
6577 edge e;
6578 int node;
6579 int num_exits;
6581 *exits = NULL;
6583 /* Check all nodes within the loop to see if there are any
6584 successors not in the loop. Note that a node may have multiple
6585 exiting edges. */
6586 num_exits = 0;
6587 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6588 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6590 basic_block dest = e->dest;
6592 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6593 num_exits++;
6597 if (! num_exits)
6598 return 0;
6600 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6602 /* Store all exiting edges into an array. */
6603 num_exits = 0;
6604 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6605 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6607 basic_block dest = e->dest;
6609 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6610 (*exits)[num_exits++] = e;
6614 return num_exits;
6618 /* Find the nodes contained within the loop with header HEADER and
6619 latch LATCH and store in NODES. Return the number of nodes within
6620 the loop. */
6621 static int
6622 flow_loop_nodes_find (header, latch, nodes)
6623 basic_block header;
6624 basic_block latch;
6625 sbitmap nodes;
6627 basic_block *stack;
6628 int sp;
6629 int num_nodes = 0;
6631 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6632 sp = 0;
6634 /* Start with only the loop header in the set of loop nodes. */
6635 sbitmap_zero (nodes);
6636 SET_BIT (nodes, header->index);
6637 num_nodes++;
6638 header->loop_depth++;
6640 /* Push the loop latch on to the stack. */
6641 if (! TEST_BIT (nodes, latch->index))
6643 SET_BIT (nodes, latch->index);
6644 latch->loop_depth++;
6645 num_nodes++;
6646 stack[sp++] = latch;
6649 while (sp)
6651 basic_block node;
6652 edge e;
6654 node = stack[--sp];
6655 for (e = node->pred; e; e = e->pred_next)
6657 basic_block ancestor = e->src;
6659 /* If each ancestor not marked as part of loop, add to set of
6660 loop nodes and push on to stack. */
6661 if (ancestor != ENTRY_BLOCK_PTR
6662 && ! TEST_BIT (nodes, ancestor->index))
6664 SET_BIT (nodes, ancestor->index);
6665 ancestor->loop_depth++;
6666 num_nodes++;
6667 stack[sp++] = ancestor;
6671 free (stack);
6672 return num_nodes;
6676 /* Compute the depth first search order and store in the array
6677 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6678 number of nodes visited. */
6679 static int
6680 flow_depth_first_order_compute (dfs_order)
6681 int *dfs_order;
6683 edge e;
6684 edge *stack;
6685 int sp;
6686 int dfsnum = 0;
6687 sbitmap visited;
6689 /* Allocate stack for back-tracking up CFG. */
6690 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6691 sp = 0;
6693 /* Allocate bitmap to track nodes that have been visited. */
6694 visited = sbitmap_alloc (n_basic_blocks);
6696 /* None of the nodes in the CFG have been visited yet. */
6697 sbitmap_zero (visited);
6699 /* Start with the first successor edge from the entry block. */
6700 e = ENTRY_BLOCK_PTR->succ;
6701 while (e)
6703 basic_block src = e->src;
6704 basic_block dest = e->dest;
6706 /* Mark that we have visited this node. */
6707 if (src != ENTRY_BLOCK_PTR)
6708 SET_BIT (visited, src->index);
6710 /* If this node has not been visited before, push the current
6711 edge on to the stack and proceed with the first successor
6712 edge of this node. */
6713 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6714 && dest->succ)
6716 stack[sp++] = e;
6717 e = dest->succ;
6719 else
6721 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6722 && ! dest->succ)
6724 /* DEST has no successors (for example, a non-returning
6725 function is called) so do not push the current edge
6726 but carry on with its next successor. */
6727 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6728 SET_BIT (visited, dest->index);
6731 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6733 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6735 /* Pop edge off stack. */
6736 e = stack[--sp];
6737 src = e->src;
6739 e = e->succ_next;
6742 free (stack);
6743 sbitmap_free (visited);
6745 /* The number of nodes visited should not be greater than
6746 n_basic_blocks. */
6747 if (dfsnum > n_basic_blocks)
6748 abort ();
6750 /* There are some nodes left in the CFG that are unreachable. */
6751 if (dfsnum < n_basic_blocks)
6752 abort ();
6753 return dfsnum;
6757 /* Return the block for the pre-header of the loop with header
6758 HEADER where DOM specifies the dominator information. Return NULL if
6759 there is no pre-header. */
6760 static basic_block
6761 flow_loop_pre_header_find (header, dom)
6762 basic_block header;
6763 const sbitmap *dom;
6765 basic_block pre_header;
6766 edge e;
6768 /* If block p is a predecessor of the header and is the only block
6769 that the header does not dominate, then it is the pre-header. */
6770 pre_header = NULL;
6771 for (e = header->pred; e; e = e->pred_next)
6773 basic_block node = e->src;
6775 if (node != ENTRY_BLOCK_PTR
6776 && ! TEST_BIT (dom[node->index], header->index))
6778 if (pre_header == NULL)
6779 pre_header = node;
6780 else
6782 /* There are multiple edges into the header from outside
6783 the loop so there is no pre-header block. */
6784 pre_header = NULL;
6785 break;
6789 return pre_header;
6793 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6794 previously added. The insertion algorithm assumes that the loops
6795 are added in the order found by a depth first search of the CFG. */
6796 static void
6797 flow_loop_tree_node_add (prevloop, loop)
6798 struct loop *prevloop;
6799 struct loop *loop;
6802 if (flow_loop_nested_p (prevloop, loop))
6804 prevloop->inner = loop;
6805 loop->outer = prevloop;
6806 return;
6809 while (prevloop->outer)
6811 if (flow_loop_nested_p (prevloop->outer, loop))
6813 prevloop->next = loop;
6814 loop->outer = prevloop->outer;
6815 return;
6817 prevloop = prevloop->outer;
6820 prevloop->next = loop;
6821 loop->outer = NULL;
6825 /* Build the loop hierarchy tree for LOOPS. */
6826 static void
6827 flow_loops_tree_build (loops)
6828 struct loops *loops;
6830 int i;
6831 int num_loops;
6833 num_loops = loops->num;
6834 if (! num_loops)
6835 return;
6837 /* Root the loop hierarchy tree with the first loop found.
6838 Since we used a depth first search this should be the
6839 outermost loop. */
6840 loops->tree = &loops->array[0];
6841 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6843 /* Add the remaining loops to the tree. */
6844 for (i = 1; i < num_loops; i++)
6845 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6849 /* Helper function to compute loop nesting depth and enclosed loop level
6850 for the natural loop specified by LOOP at the loop depth DEPTH.
6851 Returns the loop level. */
6852 static int
6853 flow_loop_level_compute (loop, depth)
6854 struct loop *loop;
6855 int depth;
6857 struct loop *inner;
6858 int level = 1;
6860 if (! loop)
6861 return 0;
6863 /* Traverse loop tree assigning depth and computing level as the
6864 maximum level of all the inner loops of this loop. The loop
6865 level is equivalent to the height of the loop in the loop tree
6866 and corresponds to the number of enclosed loop levels (including
6867 itself). */
6868 for (inner = loop->inner; inner; inner = inner->next)
6870 int ilevel;
6872 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6874 if (ilevel > level)
6875 level = ilevel;
6877 loop->level = level;
6878 loop->depth = depth;
6879 return level;
6883 /* Compute the loop nesting depth and enclosed loop level for the loop
6884 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6885 level. */
6887 static int
6888 flow_loops_level_compute (loops)
6889 struct loops *loops;
6891 struct loop *loop;
6892 int level;
6893 int levels = 0;
6895 /* Traverse all the outer level loops. */
6896 for (loop = loops->tree; loop; loop = loop->next)
6898 level = flow_loop_level_compute (loop, 1);
6899 if (level > levels)
6900 levels = level;
6902 return levels;
6906 /* Find all the natural loops in the function and save in LOOPS structure
6907 and recalculate loop_depth information in basic block structures.
6908 Return the number of natural loops found. */
6910 int
6911 flow_loops_find (loops)
6912 struct loops *loops;
6914 int i;
6915 int b;
6916 int num_loops;
6917 edge e;
6918 sbitmap headers;
6919 sbitmap *dom;
6920 int *dfs_order;
6922 loops->num = 0;
6923 loops->array = NULL;
6924 loops->tree = NULL;
6925 dfs_order = NULL;
6927 /* Taking care of this degenerate case makes the rest of
6928 this code simpler. */
6929 if (n_basic_blocks == 0)
6930 return 0;
6932 /* Compute the dominators. */
6933 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6934 compute_flow_dominators (dom, NULL);
6936 /* Count the number of loop edges (back edges). This should be the
6937 same as the number of natural loops. Also clear the loop_depth
6938 and as we work from inner->outer in a loop nest we call
6939 find_loop_nodes_find which will increment loop_depth for nodes
6940 within the current loop, which happens to enclose inner loops. */
6942 num_loops = 0;
6943 for (b = 0; b < n_basic_blocks; b++)
6945 BASIC_BLOCK (b)->loop_depth = 0;
6946 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6948 basic_block latch = e->src;
6950 /* Look for back edges where a predecessor is dominated
6951 by this block. A natural loop has a single entry
6952 node (header) that dominates all the nodes in the
6953 loop. It also has single back edge to the header
6954 from a latch node. Note that multiple natural loops
6955 may share the same header. */
6956 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6957 num_loops++;
6961 if (num_loops)
6963 /* Compute depth first search order of the CFG so that outer
6964 natural loops will be found before inner natural loops. */
6965 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6966 flow_depth_first_order_compute (dfs_order);
6968 /* Allocate loop structures. */
6969 loops->array
6970 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6972 headers = sbitmap_alloc (n_basic_blocks);
6973 sbitmap_zero (headers);
6975 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6976 sbitmap_zero (loops->shared_headers);
6978 /* Find and record information about all the natural loops
6979 in the CFG. */
6980 num_loops = 0;
6981 for (b = 0; b < n_basic_blocks; b++)
6983 basic_block header;
6985 /* Search the nodes of the CFG in DFS order that we can find
6986 outer loops first. */
6987 header = BASIC_BLOCK (dfs_order[b]);
6989 /* Look for all the possible latch blocks for this header. */
6990 for (e = header->pred; e; e = e->pred_next)
6992 basic_block latch = e->src;
6994 /* Look for back edges where a predecessor is dominated
6995 by this block. A natural loop has a single entry
6996 node (header) that dominates all the nodes in the
6997 loop. It also has single back edge to the header
6998 from a latch node. Note that multiple natural loops
6999 may share the same header. */
7000 if (latch != ENTRY_BLOCK_PTR
7001 && TEST_BIT (dom[latch->index], header->index))
7003 struct loop *loop;
7005 loop = loops->array + num_loops;
7007 loop->header = header;
7008 loop->latch = latch;
7010 /* Keep track of blocks that are loop headers so
7011 that we can tell which loops should be merged. */
7012 if (TEST_BIT (headers, header->index))
7013 SET_BIT (loops->shared_headers, header->index);
7014 SET_BIT (headers, header->index);
7016 /* Find nodes contained within the loop. */
7017 loop->nodes = sbitmap_alloc (n_basic_blocks);
7018 loop->num_nodes
7019 = flow_loop_nodes_find (header, latch, loop->nodes);
7021 /* Compute first and last blocks within the loop.
7022 These are often the same as the loop header and
7023 loop latch respectively, but this is not always
7024 the case. */
7025 loop->first
7026 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7027 loop->last
7028 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7030 /* Find edges which exit the loop. Note that a node
7031 may have several exit edges. */
7032 loop->num_exits
7033 = flow_loop_exits_find (loop->nodes, &loop->exits);
7035 /* Look to see if the loop has a pre-header node. */
7036 loop->pre_header
7037 = flow_loop_pre_header_find (header, dom);
7039 num_loops++;
7044 /* Natural loops with shared headers may either be disjoint or
7045 nested. Disjoint loops with shared headers cannot be inner
7046 loops and should be merged. For now just mark loops that share
7047 headers. */
7048 for (i = 0; i < num_loops; i++)
7049 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7050 loops->array[i].shared = 1;
7052 sbitmap_free (headers);
7055 loops->num = num_loops;
7057 /* Save CFG derived information to avoid recomputing it. */
7058 loops->cfg.dom = dom;
7059 loops->cfg.dfs_order = dfs_order;
7061 /* Build the loop hierarchy tree. */
7062 flow_loops_tree_build (loops);
7064 /* Assign the loop nesting depth and enclosed loop level for each
7065 loop. */
7066 loops->levels = flow_loops_level_compute (loops);
7068 return num_loops;
7072 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7074 flow_loop_outside_edge_p (loop, e)
7075 const struct loop *loop;
7076 edge e;
7078 if (e->dest != loop->header)
7079 abort ();
7080 return (e->src == ENTRY_BLOCK_PTR)
7081 || ! TEST_BIT (loop->nodes, e->src->index);
7085 /* Clear LOG_LINKS fields of insns in a chain. */
7086 void
7087 clear_log_links (insns)
7088 rtx insns;
7090 rtx i;
7091 for (i = insns; i; i = NEXT_INSN (i))
7092 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')
7093 LOG_LINKS (i) = 0;