Add support for generating unique sections for unitialised data.
[official-gcc.git] / gcc / flow.c
blob6d0ce0b1ce6d0bb0cda2a34b66fbf7d3076685b7
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "basic-block.h"
127 #include "insn-config.h"
128 #include "regs.h"
129 #include "hard-reg-set.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
138 #include "obstack.h"
140 #define obstack_chunk_alloc xmalloc
141 #define obstack_chunk_free free
144 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
145 the stack pointer does not matter. The value is tested only in
146 functions that have frame pointers.
147 No definition is equivalent to always zero. */
148 #ifndef EXIT_IGNORE_STACK
149 #define EXIT_IGNORE_STACK 0
150 #endif
152 #ifndef HAVE_epilogue
153 #define HAVE_epilogue 0
154 #endif
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
158 #endif
160 /* The contents of the current function definition are allocated
161 in this obstack, and all are freed at the end of the function.
162 For top-level functions, this is temporary_obstack.
163 Separate obstacks are made for nested functions. */
165 extern struct obstack *function_obstack;
167 /* Number of basic blocks in the current function. */
169 int n_basic_blocks;
171 /* Number of edges in the current function. */
173 int n_edges;
175 /* The basic block array. */
177 varray_type basic_block_info;
179 /* The special entry and exit blocks. */
181 struct basic_block_def entry_exit_blocks[2]
182 = {{NULL, /* head */
183 NULL, /* end */
184 NULL, /* pred */
185 NULL, /* succ */
186 NULL, /* local_set */
187 NULL, /* global_live_at_start */
188 NULL, /* global_live_at_end */
189 NULL, /* aux */
190 ENTRY_BLOCK, /* index */
191 0, /* loop_depth */
192 -1, -1 /* eh_beg, eh_end */
195 NULL, /* head */
196 NULL, /* end */
197 NULL, /* pred */
198 NULL, /* succ */
199 NULL, /* local_set */
200 NULL, /* global_live_at_start */
201 NULL, /* global_live_at_end */
202 NULL, /* aux */
203 EXIT_BLOCK, /* index */
204 0, /* loop_depth */
205 -1, -1 /* eh_beg, eh_end */
209 /* Nonzero if the second flow pass has completed. */
210 int flow2_completed;
212 /* Maximum register number used in this function, plus one. */
214 int max_regno;
216 /* Indexed by n, giving various register information */
218 varray_type reg_n_info;
220 /* Size of the reg_n_info table. */
222 unsigned int reg_n_max;
224 /* Element N is the next insn that uses (hard or pseudo) register number N
225 within the current basic block; or zero, if there is no such insn.
226 This is valid only during the final backward scan in propagate_block. */
228 static rtx *reg_next_use;
230 /* Size of a regset for the current function,
231 in (1) bytes and (2) elements. */
233 int regset_bytes;
234 int regset_size;
236 /* Regset of regs live when calls to `setjmp'-like functions happen. */
237 /* ??? Does this exist only for the setjmp-clobbered warning message? */
239 regset regs_live_at_setjmp;
241 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
242 that have to go in the same hard reg.
243 The first two regs in the list are a pair, and the next two
244 are another pair, etc. */
245 rtx regs_may_share;
247 /* Depth within loops of basic block being scanned for lifetime analysis,
248 plus one. This is the weight attached to references to registers. */
250 static int loop_depth;
252 /* During propagate_block, this is non-zero if the value of CC0 is live. */
254 static int cc0_live;
256 /* During propagate_block, this contains a list of all the MEMs we are
257 tracking for dead store elimination. */
259 static rtx mem_set_list;
261 /* Set of registers that may be eliminable. These are handled specially
262 in updating regs_ever_live. */
264 static HARD_REG_SET elim_reg_set;
266 /* The basic block structure for every insn, indexed by uid. */
268 varray_type basic_block_for_insn;
270 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
271 /* ??? Should probably be using LABEL_NUSES instead. It would take a
272 bit of surgery to be able to use or co-opt the routines in jump. */
274 static rtx label_value_list;
276 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
278 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
279 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
280 static bitmap uid_volatile;
282 /* Forward declarations */
283 static int count_basic_blocks PROTO((rtx));
284 static rtx find_basic_blocks_1 PROTO((rtx));
285 static void create_basic_block PROTO((int, rtx, rtx, rtx));
286 static void clear_edges PROTO((void));
287 static void make_edges PROTO((rtx));
288 static void make_edge PROTO((sbitmap *, basic_block,
289 basic_block, int));
290 static void make_label_edge PROTO((sbitmap *, basic_block,
291 rtx, int));
292 static void make_eh_edge PROTO((sbitmap *, eh_nesting_info *,
293 basic_block, rtx, int));
294 static void mark_critical_edges PROTO((void));
295 static void move_stray_eh_region_notes PROTO((void));
296 static void record_active_eh_regions PROTO((rtx));
298 static void commit_one_edge_insertion PROTO((edge));
300 static void delete_unreachable_blocks PROTO((void));
301 static void delete_eh_regions PROTO((void));
302 static int can_delete_note_p PROTO((rtx));
303 static int delete_block PROTO((basic_block));
304 static void expunge_block PROTO((basic_block));
305 static rtx flow_delete_insn PROTO((rtx));
306 static int can_delete_label_p PROTO((rtx));
307 static int merge_blocks_move_predecessor_nojumps PROTO((basic_block,
308 basic_block));
309 static int merge_blocks_move_successor_nojumps PROTO((basic_block,
310 basic_block));
311 static void merge_blocks_nomove PROTO((basic_block, basic_block));
312 static int merge_blocks PROTO((edge,basic_block,basic_block));
313 static void try_merge_blocks PROTO((void));
314 static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block));
316 static int verify_wide_reg_1 PROTO((rtx *, void *));
317 static void verify_wide_reg PROTO((int, rtx, rtx));
318 static void verify_local_live_at_start PROTO((regset, basic_block));
319 static int set_noop_p PROTO((rtx));
320 static int noop_move_p PROTO((rtx));
321 static void notice_stack_pointer_modification PROTO ((rtx, rtx, void *));
322 static void record_volatile_insns PROTO((rtx));
323 static void mark_reg PROTO((regset, rtx));
324 static void mark_regs_live_at_end PROTO((regset));
325 static void life_analysis_1 PROTO((rtx, int, int));
326 static void calculate_global_regs_live PROTO((sbitmap, sbitmap, int));
327 static void propagate_block PROTO((regset, rtx, rtx,
328 regset, int, int));
329 static int insn_dead_p PROTO((rtx, regset, int, rtx));
330 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
331 static void mark_set_regs PROTO((regset, regset, rtx,
332 rtx, regset, int));
333 static void mark_set_1 PROTO((regset, regset, rtx,
334 rtx, regset, int));
335 #ifdef AUTO_INC_DEC
336 static void find_auto_inc PROTO((regset, rtx, rtx));
337 static int try_pre_increment_1 PROTO((rtx));
338 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
339 #endif
340 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
341 void dump_flow_info PROTO((FILE *));
342 void debug_flow_info PROTO((void));
343 static void dump_edge_info PROTO((FILE *, edge, int));
345 static void count_reg_sets_1 PROTO ((rtx));
346 static void count_reg_sets PROTO ((rtx));
347 static void count_reg_references PROTO ((rtx));
348 static void invalidate_mems_from_autoinc PROTO ((rtx));
349 static void remove_edge PROTO ((edge));
350 static void remove_fake_successors PROTO ((basic_block));
351 static void flow_nodes_print PROTO ((const char *, const sbitmap, FILE *));
352 static void flow_exits_print PROTO ((const char *, const edge *, int, FILE *));
353 static void flow_loops_cfg_dump PROTO ((const struct loops *, FILE *));
354 static int flow_loop_nested_p PROTO ((struct loop *, struct loop *));
355 static int flow_loop_exits_find PROTO ((const sbitmap, edge **));
356 static int flow_loop_nodes_find PROTO ((basic_block, basic_block, sbitmap));
357 static int flow_depth_first_order_compute PROTO ((int *));
358 static basic_block flow_loop_pre_header_find PROTO ((basic_block, const sbitmap *));
359 static void flow_loop_tree_node_add PROTO ((struct loop *, struct loop *));
360 static void flow_loops_tree_build PROTO ((struct loops *));
361 static int flow_loop_level_compute PROTO ((struct loop *, int));
362 static int flow_loops_level_compute PROTO ((struct loops *));
364 /* This function is always defined so it can be called from the
365 debugger, and it is declared extern so we don't get warnings about
366 it being unused. */
367 void verify_flow_info PROTO ((void));
368 int flow_loop_outside_edge_p PROTO ((const struct loop *, edge));
370 /* Find basic blocks of the current function.
371 F is the first insn of the function and NREGS the number of register
372 numbers in use. */
374 void
375 find_basic_blocks (f, nregs, file, do_cleanup)
376 rtx f;
377 int nregs ATTRIBUTE_UNUSED;
378 FILE *file ATTRIBUTE_UNUSED;
379 int do_cleanup;
381 int max_uid;
383 /* Flush out existing data. */
384 if (basic_block_info != NULL)
386 int i;
388 clear_edges ();
390 /* Clear bb->aux on all extant basic blocks. We'll use this as a
391 tag for reuse during create_basic_block, just in case some pass
392 copies around basic block notes improperly. */
393 for (i = 0; i < n_basic_blocks; ++i)
394 BASIC_BLOCK (i)->aux = NULL;
396 VARRAY_FREE (basic_block_info);
399 n_basic_blocks = count_basic_blocks (f);
401 /* Size the basic block table. The actual structures will be allocated
402 by find_basic_blocks_1, since we want to keep the structure pointers
403 stable across calls to find_basic_blocks. */
404 /* ??? This whole issue would be much simpler if we called find_basic_blocks
405 exactly once, and thereafter we don't have a single long chain of
406 instructions at all until close to the end of compilation when we
407 actually lay them out. */
409 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
411 label_value_list = find_basic_blocks_1 (f);
413 /* Record the block to which an insn belongs. */
414 /* ??? This should be done another way, by which (perhaps) a label is
415 tagged directly with the basic block that it starts. It is used for
416 more than that currently, but IMO that is the only valid use. */
418 max_uid = get_max_uid ();
419 #ifdef AUTO_INC_DEC
420 /* Leave space for insns life_analysis makes in some cases for auto-inc.
421 These cases are rare, so we don't need too much space. */
422 max_uid += max_uid / 10;
423 #endif
425 compute_bb_for_insn (max_uid);
427 /* Discover the edges of our cfg. */
429 record_active_eh_regions (f);
430 make_edges (label_value_list);
432 /* Delete unreachable blocks, then merge blocks when possible. */
434 if (do_cleanup)
436 delete_unreachable_blocks ();
437 move_stray_eh_region_notes ();
438 record_active_eh_regions (f);
439 try_merge_blocks ();
442 /* Mark critical edges. */
444 mark_critical_edges ();
446 /* Kill the data we won't maintain. */
447 label_value_list = NULL_RTX;
449 #ifdef ENABLE_CHECKING
450 verify_flow_info ();
451 #endif
454 /* Count the basic blocks of the function. */
456 static int
457 count_basic_blocks (f)
458 rtx f;
460 register rtx insn;
461 register RTX_CODE prev_code;
462 register int count = 0;
463 int eh_region = 0;
464 int call_had_abnormal_edge = 0;
465 rtx prev_call = NULL_RTX;
467 prev_code = JUMP_INSN;
468 for (insn = f; insn; insn = NEXT_INSN (insn))
470 register RTX_CODE code = GET_CODE (insn);
472 if (code == CODE_LABEL
473 || (GET_RTX_CLASS (code) == 'i'
474 && (prev_code == JUMP_INSN
475 || prev_code == BARRIER
476 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
478 count++;
481 /* Record whether this call created an edge. */
482 if (code == CALL_INSN)
484 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
485 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
486 prev_call = insn;
487 call_had_abnormal_edge = 0;
489 /* If there is a specified EH region, we have an edge. */
490 if (eh_region && region > 0)
491 call_had_abnormal_edge = 1;
492 else
494 /* If there is a nonlocal goto label and the specified
495 region number isn't -1, we have an edge. (0 means
496 no throw, but might have a nonlocal goto). */
497 if (nonlocal_goto_handler_labels && region >= 0)
498 call_had_abnormal_edge = 1;
501 else if (code != NOTE)
502 prev_call = NULL_RTX;
504 if (code != NOTE)
505 prev_code = code;
506 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
507 ++eh_region;
508 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
509 --eh_region;
513 /* The rest of the compiler works a bit smoother when we don't have to
514 check for the edge case of do-nothing functions with no basic blocks. */
515 if (count == 0)
517 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
518 count = 1;
521 return count;
524 /* Find all basic blocks of the function whose first insn is F.
526 Collect and return a list of labels whose addresses are taken. This
527 will be used in make_edges for use with computed gotos. */
529 static rtx
530 find_basic_blocks_1 (f)
531 rtx f;
533 register rtx insn, next;
534 int call_has_abnormal_edge = 0;
535 int i = 0;
536 rtx bb_note = NULL_RTX;
537 rtx eh_list = NULL_RTX;
538 rtx label_value_list = NULL_RTX;
539 rtx head = NULL_RTX;
540 rtx end = NULL_RTX;
542 /* We process the instructions in a slightly different way than we did
543 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
544 closed out the previous block, so that it gets attached at the proper
545 place. Since this form should be equivalent to the previous,
546 count_basic_blocks continues to use the old form as a check. */
548 for (insn = f; insn; insn = next)
550 enum rtx_code code = GET_CODE (insn);
552 next = NEXT_INSN (insn);
554 if (code == CALL_INSN)
556 /* Record whether this call created an edge. */
557 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
558 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
559 call_has_abnormal_edge = 0;
561 /* If there is an EH region, we have an edge. */
562 if (eh_list && region > 0)
563 call_has_abnormal_edge = 1;
564 else
566 /* If there is a nonlocal goto label and the specified
567 region number isn't -1, we have an edge. (0 means
568 no throw, but might have a nonlocal goto). */
569 if (nonlocal_goto_handler_labels && region >= 0)
570 call_has_abnormal_edge = 1;
574 switch (code)
576 case NOTE:
578 int kind = NOTE_LINE_NUMBER (insn);
580 /* Keep a LIFO list of the currently active exception notes. */
581 if (kind == NOTE_INSN_EH_REGION_BEG)
582 eh_list = alloc_INSN_LIST (insn, eh_list);
583 else if (kind == NOTE_INSN_EH_REGION_END)
585 rtx t = eh_list;
586 eh_list = XEXP (eh_list, 1);
587 free_INSN_LIST_node (t);
590 /* Look for basic block notes with which to keep the
591 basic_block_info pointers stable. Unthread the note now;
592 we'll put it back at the right place in create_basic_block.
593 Or not at all if we've already found a note in this block. */
594 else if (kind == NOTE_INSN_BASIC_BLOCK)
596 if (bb_note == NULL_RTX)
597 bb_note = insn;
598 next = flow_delete_insn (insn);
601 break;
604 case CODE_LABEL:
605 /* A basic block starts at a label. If we've closed one off due
606 to a barrier or some such, no need to do it again. */
607 if (head != NULL_RTX)
609 /* While we now have edge lists with which other portions of
610 the compiler might determine a call ending a basic block
611 does not imply an abnormal edge, it will be a bit before
612 everything can be updated. So continue to emit a noop at
613 the end of such a block. */
614 if (GET_CODE (end) == CALL_INSN)
616 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
617 end = emit_insn_after (nop, end);
620 create_basic_block (i++, head, end, bb_note);
621 bb_note = NULL_RTX;
623 head = end = insn;
624 break;
626 case JUMP_INSN:
627 /* A basic block ends at a jump. */
628 if (head == NULL_RTX)
629 head = insn;
630 else
632 /* ??? Make a special check for table jumps. The way this
633 happens is truly and amazingly gross. We are about to
634 create a basic block that contains just a code label and
635 an addr*vec jump insn. Worse, an addr_diff_vec creates
636 its own natural loop.
638 Prevent this bit of brain damage, pasting things together
639 correctly in make_edges.
641 The correct solution involves emitting the table directly
642 on the tablejump instruction as a note, or JUMP_LABEL. */
644 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
645 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
647 head = end = NULL;
648 n_basic_blocks--;
649 break;
652 end = insn;
653 goto new_bb_inclusive;
655 case BARRIER:
656 /* A basic block ends at a barrier. It may be that an unconditional
657 jump already closed the basic block -- no need to do it again. */
658 if (head == NULL_RTX)
659 break;
661 /* While we now have edge lists with which other portions of the
662 compiler might determine a call ending a basic block does not
663 imply an abnormal edge, it will be a bit before everything can
664 be updated. So continue to emit a noop at the end of such a
665 block. */
666 if (GET_CODE (end) == CALL_INSN)
668 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
669 end = emit_insn_after (nop, end);
671 goto new_bb_exclusive;
673 case CALL_INSN:
674 /* A basic block ends at a call that can either throw or
675 do a non-local goto. */
676 if (call_has_abnormal_edge)
678 new_bb_inclusive:
679 if (head == NULL_RTX)
680 head = insn;
681 end = insn;
683 new_bb_exclusive:
684 create_basic_block (i++, head, end, bb_note);
685 head = end = NULL_RTX;
686 bb_note = NULL_RTX;
687 break;
689 /* FALLTHRU */
691 default:
692 if (GET_RTX_CLASS (code) == 'i')
694 if (head == NULL_RTX)
695 head = insn;
696 end = insn;
698 break;
701 if (GET_RTX_CLASS (code) == 'i')
703 rtx note;
705 /* Make a list of all labels referred to other than by jumps
706 (which just don't have the REG_LABEL notes).
708 Make a special exception for labels followed by an ADDR*VEC,
709 as this would be a part of the tablejump setup code.
711 Make a special exception for the eh_return_stub_label, which
712 we know isn't part of any otherwise visible control flow. */
714 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
715 if (REG_NOTE_KIND (note) == REG_LABEL)
717 rtx lab = XEXP (note, 0), next;
719 if (lab == eh_return_stub_label)
721 else if ((next = next_nonnote_insn (lab)) != NULL
722 && GET_CODE (next) == JUMP_INSN
723 && (GET_CODE (PATTERN (next)) == ADDR_VEC
724 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
726 else
727 label_value_list
728 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
733 if (head != NULL_RTX)
734 create_basic_block (i++, head, end, bb_note);
736 if (i != n_basic_blocks)
737 abort ();
739 return label_value_list;
742 /* Create a new basic block consisting of the instructions between
743 HEAD and END inclusive. Reuses the note and basic block struct
744 in BB_NOTE, if any. */
746 static void
747 create_basic_block (index, head, end, bb_note)
748 int index;
749 rtx head, end, bb_note;
751 basic_block bb;
753 if (bb_note
754 && ! RTX_INTEGRATED_P (bb_note)
755 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
756 && bb->aux == NULL)
758 /* If we found an existing note, thread it back onto the chain. */
760 if (GET_CODE (head) == CODE_LABEL)
761 add_insn_after (bb_note, head);
762 else
764 add_insn_before (bb_note, head);
765 head = bb_note;
768 else
770 /* Otherwise we must create a note and a basic block structure.
771 Since we allow basic block structs in rtl, give the struct
772 the same lifetime by allocating it off the function obstack
773 rather than using malloc. */
775 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
776 memset (bb, 0, sizeof (*bb));
778 if (GET_CODE (head) == CODE_LABEL)
779 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
780 else
782 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
783 head = bb_note;
785 NOTE_BASIC_BLOCK (bb_note) = bb;
788 /* Always include the bb note in the block. */
789 if (NEXT_INSN (end) == bb_note)
790 end = bb_note;
792 bb->head = head;
793 bb->end = end;
794 bb->index = index;
795 BASIC_BLOCK (index) = bb;
797 /* Tag the block so that we know it has been used when considering
798 other basic block notes. */
799 bb->aux = bb;
802 /* Records the basic block struct in BB_FOR_INSN, for every instruction
803 indexed by INSN_UID. MAX is the size of the array. */
805 void
806 compute_bb_for_insn (max)
807 int max;
809 int i;
811 if (basic_block_for_insn)
812 VARRAY_FREE (basic_block_for_insn);
813 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
815 for (i = 0; i < n_basic_blocks; ++i)
817 basic_block bb = BASIC_BLOCK (i);
818 rtx insn, end;
820 end = bb->end;
821 insn = bb->head;
822 while (1)
824 int uid = INSN_UID (insn);
825 if (uid < max)
826 VARRAY_BB (basic_block_for_insn, uid) = bb;
827 if (insn == end)
828 break;
829 insn = NEXT_INSN (insn);
834 /* Free the memory associated with the edge structures. */
836 static void
837 clear_edges ()
839 int i;
840 edge n, e;
842 for (i = 0; i < n_basic_blocks; ++i)
844 basic_block bb = BASIC_BLOCK (i);
846 for (e = bb->succ; e ; e = n)
848 n = e->succ_next;
849 free (e);
852 bb->succ = 0;
853 bb->pred = 0;
856 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
858 n = e->succ_next;
859 free (e);
862 ENTRY_BLOCK_PTR->succ = 0;
863 EXIT_BLOCK_PTR->pred = 0;
865 n_edges = 0;
868 /* Identify the edges between basic blocks.
870 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
871 that are otherwise unreachable may be reachable with a non-local goto.
873 BB_EH_END is an array indexed by basic block number in which we record
874 the list of exception regions active at the end of the basic block. */
876 static void
877 make_edges (label_value_list)
878 rtx label_value_list;
880 int i;
881 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
882 sbitmap *edge_cache = NULL;
884 /* Assume no computed jump; revise as we create edges. */
885 current_function_has_computed_jump = 0;
887 /* Heavy use of computed goto in machine-generated code can lead to
888 nearly fully-connected CFGs. In that case we spend a significant
889 amount of time searching the edge lists for duplicates. */
890 if (forced_labels || label_value_list)
892 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
893 sbitmap_vector_zero (edge_cache, n_basic_blocks);
896 /* By nature of the way these get numbered, block 0 is always the entry. */
897 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
899 for (i = 0; i < n_basic_blocks; ++i)
901 basic_block bb = BASIC_BLOCK (i);
902 rtx insn, x;
903 enum rtx_code code;
904 int force_fallthru = 0;
906 /* Examine the last instruction of the block, and discover the
907 ways we can leave the block. */
909 insn = bb->end;
910 code = GET_CODE (insn);
912 /* A branch. */
913 if (code == JUMP_INSN)
915 rtx tmp;
917 /* ??? Recognize a tablejump and do the right thing. */
918 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
919 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
920 && GET_CODE (tmp) == JUMP_INSN
921 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
922 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
924 rtvec vec;
925 int j;
927 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
928 vec = XVEC (PATTERN (tmp), 0);
929 else
930 vec = XVEC (PATTERN (tmp), 1);
932 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
933 make_label_edge (edge_cache, bb,
934 XEXP (RTVEC_ELT (vec, j), 0), 0);
936 /* Some targets (eg, ARM) emit a conditional jump that also
937 contains the out-of-range target. Scan for these and
938 add an edge if necessary. */
939 if ((tmp = single_set (insn)) != NULL
940 && SET_DEST (tmp) == pc_rtx
941 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
942 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
943 make_label_edge (edge_cache, bb,
944 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
946 #ifdef CASE_DROPS_THROUGH
947 /* Silly VAXen. The ADDR_VEC is going to be in the way of
948 us naturally detecting fallthru into the next block. */
949 force_fallthru = 1;
950 #endif
953 /* If this is a computed jump, then mark it as reaching
954 everything on the label_value_list and forced_labels list. */
955 else if (computed_jump_p (insn))
957 current_function_has_computed_jump = 1;
959 for (x = label_value_list; x; x = XEXP (x, 1))
960 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
962 for (x = forced_labels; x; x = XEXP (x, 1))
963 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
966 /* Returns create an exit out. */
967 else if (returnjump_p (insn))
968 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
970 /* Otherwise, we have a plain conditional or unconditional jump. */
971 else
973 if (! JUMP_LABEL (insn))
974 abort ();
975 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
979 /* If this is a CALL_INSN, then mark it as reaching the active EH
980 handler for this CALL_INSN. If we're handling asynchronous
981 exceptions then any insn can reach any of the active handlers.
983 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
985 if (code == CALL_INSN || asynchronous_exceptions)
987 /* If there's an EH region active at the end of a block,
988 add the appropriate edges. */
989 if (bb->eh_end >= 0)
990 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
992 /* If we have asynchronous exceptions, do the same for *all*
993 exception regions active in the block. */
994 if (asynchronous_exceptions
995 && bb->eh_beg != bb->eh_end)
997 if (bb->eh_beg >= 0)
998 make_eh_edge (edge_cache, eh_nest_info, bb,
999 NULL_RTX, bb->eh_beg);
1001 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1002 if (GET_CODE (x) == NOTE
1003 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1004 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1006 int region = NOTE_EH_HANDLER (x);
1007 make_eh_edge (edge_cache, eh_nest_info, bb,
1008 NULL_RTX, region);
1012 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1014 /* ??? This could be made smarter: in some cases it's possible
1015 to tell that certain calls will not do a nonlocal goto.
1017 For example, if the nested functions that do the nonlocal
1018 gotos do not have their addresses taken, then only calls to
1019 those functions or to other nested functions that use them
1020 could possibly do nonlocal gotos. */
1021 /* We do know that a REG_EH_REGION note with a value less
1022 than 0 is guaranteed not to perform a non-local goto. */
1023 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1024 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1025 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1026 make_label_edge (edge_cache, bb, XEXP (x, 0),
1027 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1031 /* We know something about the structure of the function __throw in
1032 libgcc2.c. It is the only function that ever contains eh_stub
1033 labels. It modifies its return address so that the last block
1034 returns to one of the eh_stub labels within it. So we have to
1035 make additional edges in the flow graph. */
1036 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1037 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1039 /* Find out if we can drop through to the next block. */
1040 insn = next_nonnote_insn (insn);
1041 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1042 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1043 else if (i + 1 < n_basic_blocks)
1045 rtx tmp = BLOCK_HEAD (i + 1);
1046 if (GET_CODE (tmp) == NOTE)
1047 tmp = next_nonnote_insn (tmp);
1048 if (force_fallthru || insn == tmp)
1049 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1053 free_eh_nesting_info (eh_nest_info);
1054 if (edge_cache)
1055 sbitmap_vector_free (edge_cache);
1058 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1059 about the edge that is accumulated between calls. */
1061 static void
1062 make_edge (edge_cache, src, dst, flags)
1063 sbitmap *edge_cache;
1064 basic_block src, dst;
1065 int flags;
1067 int use_edge_cache;
1068 edge e;
1070 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1071 many edges to them, and we didn't allocate memory for it. */
1072 use_edge_cache = (edge_cache
1073 && src != ENTRY_BLOCK_PTR
1074 && dst != EXIT_BLOCK_PTR);
1076 /* Make sure we don't add duplicate edges. */
1077 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1078 for (e = src->succ; e ; e = e->succ_next)
1079 if (e->dest == dst)
1081 e->flags |= flags;
1082 return;
1085 e = (edge) xcalloc (1, sizeof (*e));
1086 n_edges++;
1088 e->succ_next = src->succ;
1089 e->pred_next = dst->pred;
1090 e->src = src;
1091 e->dest = dst;
1092 e->flags = flags;
1094 src->succ = e;
1095 dst->pred = e;
1097 if (use_edge_cache)
1098 SET_BIT (edge_cache[src->index], dst->index);
1101 /* Create an edge from a basic block to a label. */
1103 static void
1104 make_label_edge (edge_cache, src, label, flags)
1105 sbitmap *edge_cache;
1106 basic_block src;
1107 rtx label;
1108 int flags;
1110 if (GET_CODE (label) != CODE_LABEL)
1111 abort ();
1113 /* If the label was never emitted, this insn is junk, but avoid a
1114 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1115 as a result of a syntax error and a diagnostic has already been
1116 printed. */
1118 if (INSN_UID (label) == 0)
1119 return;
1121 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1124 /* Create the edges generated by INSN in REGION. */
1126 static void
1127 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1128 sbitmap *edge_cache;
1129 eh_nesting_info *eh_nest_info;
1130 basic_block src;
1131 rtx insn;
1132 int region;
1134 handler_info **handler_list;
1135 int num, is_call;
1137 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1138 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1139 while (--num >= 0)
1141 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1142 EDGE_ABNORMAL | EDGE_EH | is_call);
1146 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1147 dangerous if we intend to move basic blocks around. Move such notes
1148 into the following block. */
1150 static void
1151 move_stray_eh_region_notes ()
1153 int i;
1154 basic_block b1, b2;
1156 if (n_basic_blocks < 2)
1157 return;
1159 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1160 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1162 rtx insn, next, list = NULL_RTX;
1164 b1 = BASIC_BLOCK (i);
1165 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1167 next = NEXT_INSN (insn);
1168 if (GET_CODE (insn) == NOTE
1169 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1170 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1172 /* Unlink from the insn chain. */
1173 NEXT_INSN (PREV_INSN (insn)) = next;
1174 PREV_INSN (next) = PREV_INSN (insn);
1176 /* Queue it. */
1177 NEXT_INSN (insn) = list;
1178 list = insn;
1182 if (list == NULL_RTX)
1183 continue;
1185 /* Find where to insert these things. */
1186 insn = b2->head;
1187 if (GET_CODE (insn) == CODE_LABEL)
1188 insn = NEXT_INSN (insn);
1190 while (list)
1192 next = NEXT_INSN (list);
1193 add_insn_after (list, insn);
1194 list = next;
1199 /* Recompute eh_beg/eh_end for each basic block. */
1201 static void
1202 record_active_eh_regions (f)
1203 rtx f;
1205 rtx insn, eh_list = NULL_RTX;
1206 int i = 0;
1207 basic_block bb = BASIC_BLOCK (0);
1209 for (insn = f; insn ; insn = NEXT_INSN (insn))
1211 if (bb->head == insn)
1212 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1214 if (GET_CODE (insn) == NOTE)
1216 int kind = NOTE_LINE_NUMBER (insn);
1217 if (kind == NOTE_INSN_EH_REGION_BEG)
1218 eh_list = alloc_INSN_LIST (insn, eh_list);
1219 else if (kind == NOTE_INSN_EH_REGION_END)
1221 rtx t = XEXP (eh_list, 1);
1222 free_INSN_LIST_node (eh_list);
1223 eh_list = t;
1227 if (bb->end == insn)
1229 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1230 i += 1;
1231 if (i == n_basic_blocks)
1232 break;
1233 bb = BASIC_BLOCK (i);
1238 /* Identify critical edges and set the bits appropriately. */
1240 static void
1241 mark_critical_edges ()
1243 int i, n = n_basic_blocks;
1244 basic_block bb;
1246 /* We begin with the entry block. This is not terribly important now,
1247 but could be if a front end (Fortran) implemented alternate entry
1248 points. */
1249 bb = ENTRY_BLOCK_PTR;
1250 i = -1;
1252 while (1)
1254 edge e;
1256 /* (1) Critical edges must have a source with multiple successors. */
1257 if (bb->succ && bb->succ->succ_next)
1259 for (e = bb->succ; e ; e = e->succ_next)
1261 /* (2) Critical edges must have a destination with multiple
1262 predecessors. Note that we know there is at least one
1263 predecessor -- the edge we followed to get here. */
1264 if (e->dest->pred->pred_next)
1265 e->flags |= EDGE_CRITICAL;
1266 else
1267 e->flags &= ~EDGE_CRITICAL;
1270 else
1272 for (e = bb->succ; e ; e = e->succ_next)
1273 e->flags &= ~EDGE_CRITICAL;
1276 if (++i >= n)
1277 break;
1278 bb = BASIC_BLOCK (i);
1282 /* Split a (typically critical) edge. Return the new block.
1283 Abort on abnormal edges.
1285 ??? The code generally expects to be called on critical edges.
1286 The case of a block ending in an unconditional jump to a
1287 block with multiple predecessors is not handled optimally. */
1289 basic_block
1290 split_edge (edge_in)
1291 edge edge_in;
1293 basic_block old_pred, bb, old_succ;
1294 edge edge_out;
1295 rtx bb_note;
1296 int i, j;
1298 /* Abnormal edges cannot be split. */
1299 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1300 abort ();
1302 old_pred = edge_in->src;
1303 old_succ = edge_in->dest;
1305 /* Remove the existing edge from the destination's pred list. */
1307 edge *pp;
1308 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1309 continue;
1310 *pp = edge_in->pred_next;
1311 edge_in->pred_next = NULL;
1314 /* Create the new structures. */
1315 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1316 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1317 n_edges++;
1319 memset (bb, 0, sizeof (*bb));
1320 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1321 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1323 /* ??? This info is likely going to be out of date very soon. */
1324 if (old_succ->global_live_at_start)
1326 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1327 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1329 else
1331 CLEAR_REG_SET (bb->global_live_at_start);
1332 CLEAR_REG_SET (bb->global_live_at_end);
1335 /* Wire them up. */
1336 bb->pred = edge_in;
1337 bb->succ = edge_out;
1339 edge_in->dest = bb;
1340 edge_in->flags &= ~EDGE_CRITICAL;
1342 edge_out->pred_next = old_succ->pred;
1343 edge_out->succ_next = NULL;
1344 edge_out->src = bb;
1345 edge_out->dest = old_succ;
1346 edge_out->flags = EDGE_FALLTHRU;
1347 edge_out->probability = REG_BR_PROB_BASE;
1349 old_succ->pred = edge_out;
1351 /* Tricky case -- if there existed a fallthru into the successor
1352 (and we're not it) we must add a new unconditional jump around
1353 the new block we're actually interested in.
1355 Further, if that edge is critical, this means a second new basic
1356 block must be created to hold it. In order to simplify correct
1357 insn placement, do this before we touch the existing basic block
1358 ordering for the block we were really wanting. */
1359 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1361 edge e;
1362 for (e = edge_out->pred_next; e ; e = e->pred_next)
1363 if (e->flags & EDGE_FALLTHRU)
1364 break;
1366 if (e)
1368 basic_block jump_block;
1369 rtx pos;
1371 if ((e->flags & EDGE_CRITICAL) == 0)
1373 /* Non critical -- we can simply add a jump to the end
1374 of the existing predecessor. */
1375 jump_block = e->src;
1377 else
1379 /* We need a new block to hold the jump. The simplest
1380 way to do the bulk of the work here is to recursively
1381 call ourselves. */
1382 jump_block = split_edge (e);
1383 e = jump_block->succ;
1386 /* Now add the jump insn ... */
1387 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1388 jump_block->end);
1389 jump_block->end = pos;
1390 emit_barrier_after (pos);
1392 /* ... let jump know that label is in use, ... */
1393 JUMP_LABEL (pos) = old_succ->head;
1394 ++LABEL_NUSES (old_succ->head);
1396 /* ... and clear fallthru on the outgoing edge. */
1397 e->flags &= ~EDGE_FALLTHRU;
1399 /* Continue splitting the interesting edge. */
1403 /* Place the new block just in front of the successor. */
1404 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1405 if (old_succ == EXIT_BLOCK_PTR)
1406 j = n_basic_blocks - 1;
1407 else
1408 j = old_succ->index;
1409 for (i = n_basic_blocks - 1; i > j; --i)
1411 basic_block tmp = BASIC_BLOCK (i - 1);
1412 BASIC_BLOCK (i) = tmp;
1413 tmp->index = i;
1415 BASIC_BLOCK (i) = bb;
1416 bb->index = i;
1418 /* Create the basic block note.
1420 Where we place the note can have a noticable impact on the generated
1421 code. Consider this cfg:
1428 +->1-->2--->E
1430 +--+
1432 If we need to insert an insn on the edge from block 0 to block 1,
1433 we want to ensure the instructions we insert are outside of any
1434 loop notes that physically sit between block 0 and block 1. Otherwise
1435 we confuse the loop optimizer into thinking the loop is a phony. */
1436 if (old_succ != EXIT_BLOCK_PTR
1437 && PREV_INSN (old_succ->head)
1438 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1439 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1440 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1441 PREV_INSN (old_succ->head));
1442 else if (old_succ != EXIT_BLOCK_PTR)
1443 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1444 else
1445 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1446 NOTE_BASIC_BLOCK (bb_note) = bb;
1447 bb->head = bb->end = bb_note;
1449 /* Not quite simple -- for non-fallthru edges, we must adjust the
1450 predecessor's jump instruction to target our new block. */
1451 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1453 rtx tmp, insn = old_pred->end;
1454 rtx old_label = old_succ->head;
1455 rtx new_label = gen_label_rtx ();
1457 if (GET_CODE (insn) != JUMP_INSN)
1458 abort ();
1460 /* ??? Recognize a tablejump and adjust all matching cases. */
1461 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1462 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1463 && GET_CODE (tmp) == JUMP_INSN
1464 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1465 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1467 rtvec vec;
1468 int j;
1470 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1471 vec = XVEC (PATTERN (tmp), 0);
1472 else
1473 vec = XVEC (PATTERN (tmp), 1);
1475 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1476 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1478 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1479 --LABEL_NUSES (old_label);
1480 ++LABEL_NUSES (new_label);
1483 /* Handle casesi dispatch insns */
1484 if ((tmp = single_set (insn)) != NULL
1485 && SET_DEST (tmp) == pc_rtx
1486 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1487 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1488 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1490 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1491 new_label);
1492 --LABEL_NUSES (old_label);
1493 ++LABEL_NUSES (new_label);
1496 else
1498 /* This would have indicated an abnormal edge. */
1499 if (computed_jump_p (insn))
1500 abort ();
1502 /* A return instruction can't be redirected. */
1503 if (returnjump_p (insn))
1504 abort ();
1506 /* If the insn doesn't go where we think, we're confused. */
1507 if (JUMP_LABEL (insn) != old_label)
1508 abort ();
1510 redirect_jump (insn, new_label);
1513 emit_label_before (new_label, bb_note);
1514 bb->head = new_label;
1517 return bb;
1520 /* Queue instructions for insertion on an edge between two basic blocks.
1521 The new instructions and basic blocks (if any) will not appear in the
1522 CFG until commit_edge_insertions is called. */
1524 void
1525 insert_insn_on_edge (pattern, e)
1526 rtx pattern;
1527 edge e;
1529 /* We cannot insert instructions on an abnormal critical edge.
1530 It will be easier to find the culprit if we die now. */
1531 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1532 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1533 abort ();
1535 if (e->insns == NULL_RTX)
1536 start_sequence ();
1537 else
1538 push_to_sequence (e->insns);
1540 emit_insn (pattern);
1542 e->insns = get_insns ();
1543 end_sequence();
1546 /* Update the CFG for the instructions queued on edge E. */
1548 static void
1549 commit_one_edge_insertion (e)
1550 edge e;
1552 rtx before = NULL_RTX, after = NULL_RTX, tmp;
1553 basic_block bb;
1555 /* Figure out where to put these things. If the destination has
1556 one predecessor, insert there. Except for the exit block. */
1557 if (e->dest->pred->pred_next == NULL
1558 && e->dest != EXIT_BLOCK_PTR)
1560 bb = e->dest;
1562 /* Get the location correct wrt a code label, and "nice" wrt
1563 a basic block note, and before everything else. */
1564 tmp = bb->head;
1565 if (GET_CODE (tmp) == CODE_LABEL)
1566 tmp = NEXT_INSN (tmp);
1567 if (GET_CODE (tmp) == NOTE
1568 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1569 tmp = NEXT_INSN (tmp);
1570 if (tmp == bb->head)
1571 before = tmp;
1572 else
1573 after = PREV_INSN (tmp);
1576 /* If the source has one successor and the edge is not abnormal,
1577 insert there. Except for the entry block. */
1578 else if ((e->flags & EDGE_ABNORMAL) == 0
1579 && e->src->succ->succ_next == NULL
1580 && e->src != ENTRY_BLOCK_PTR)
1582 bb = e->src;
1583 if (GET_CODE (bb->end) == JUMP_INSN)
1585 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1586 a jump with delay slots already filled? */
1587 if (! simplejump_p (bb->end))
1588 abort ();
1590 before = bb->end;
1592 else
1594 /* We'd better be fallthru, or we've lost track of what's what. */
1595 if ((e->flags & EDGE_FALLTHRU) == 0)
1596 abort ();
1598 after = bb->end;
1602 /* Otherwise we must split the edge. */
1603 else
1605 bb = split_edge (e);
1606 after = bb->end;
1609 /* Now that we've found the spot, do the insertion. */
1610 tmp = e->insns;
1611 e->insns = NULL_RTX;
1613 /* Set the new block number for these insns, if structure is allocated. */
1614 if (basic_block_for_insn)
1616 rtx i;
1617 for (i = tmp; i != NULL_RTX; i = NEXT_INSN (i))
1618 set_block_for_insn (i, bb);
1621 if (before)
1623 emit_insns_before (tmp, before);
1624 if (before == bb->head)
1625 bb->head = tmp;
1627 else
1629 tmp = emit_insns_after (tmp, after);
1630 if (after == bb->end)
1631 bb->end = tmp;
1635 /* Update the CFG for all queued instructions. */
1637 void
1638 commit_edge_insertions ()
1640 int i;
1641 basic_block bb;
1643 i = -1;
1644 bb = ENTRY_BLOCK_PTR;
1645 while (1)
1647 edge e, next;
1649 for (e = bb->succ; e ; e = next)
1651 next = e->succ_next;
1652 if (e->insns)
1653 commit_one_edge_insertion (e);
1656 if (++i >= n_basic_blocks)
1657 break;
1658 bb = BASIC_BLOCK (i);
1662 /* Delete all unreachable basic blocks. */
1664 static void
1665 delete_unreachable_blocks ()
1667 basic_block *worklist, *tos;
1668 int deleted_handler;
1669 edge e;
1670 int i, n;
1672 n = n_basic_blocks;
1673 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1675 /* Use basic_block->aux as a marker. Clear them all. */
1677 for (i = 0; i < n; ++i)
1678 BASIC_BLOCK (i)->aux = NULL;
1680 /* Add our starting points to the worklist. Almost always there will
1681 be only one. It isn't inconcievable that we might one day directly
1682 support Fortran alternate entry points. */
1684 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1686 *tos++ = e->dest;
1688 /* Mark the block with a handy non-null value. */
1689 e->dest->aux = e;
1692 /* Iterate: find everything reachable from what we've already seen. */
1694 while (tos != worklist)
1696 basic_block b = *--tos;
1698 for (e = b->succ; e ; e = e->succ_next)
1699 if (!e->dest->aux)
1701 *tos++ = e->dest;
1702 e->dest->aux = e;
1706 /* Delete all unreachable basic blocks. Count down so that we don't
1707 interfere with the block renumbering that happens in delete_block. */
1709 deleted_handler = 0;
1711 for (i = n - 1; i >= 0; --i)
1713 basic_block b = BASIC_BLOCK (i);
1715 if (b->aux != NULL)
1716 /* This block was found. Tidy up the mark. */
1717 b->aux = NULL;
1718 else
1719 deleted_handler |= delete_block (b);
1722 /* Fix up edges that now fall through, or rather should now fall through
1723 but previously required a jump around now deleted blocks. Simplify
1724 the search by only examining blocks numerically adjacent, since this
1725 is how find_basic_blocks created them. */
1727 for (i = 1; i < n_basic_blocks; ++i)
1729 basic_block b = BASIC_BLOCK (i - 1);
1730 basic_block c = BASIC_BLOCK (i);
1731 edge s;
1733 /* We care about simple conditional or unconditional jumps with
1734 a single successor.
1736 If we had a conditional branch to the next instruction when
1737 find_basic_blocks was called, then there will only be one
1738 out edge for the block which ended with the conditional
1739 branch (since we do not create duplicate edges).
1741 Furthermore, the edge will be marked as a fallthru because we
1742 merge the flags for the duplicate edges. So we do not want to
1743 check that the edge is not a FALLTHRU edge. */
1744 if ((s = b->succ) != NULL
1745 && s->succ_next == NULL
1746 && s->dest == c
1747 /* If the jump insn has side effects, we can't tidy the edge. */
1748 && (GET_CODE (b->end) != JUMP_INSN
1749 || onlyjump_p (b->end)))
1750 tidy_fallthru_edge (s, b, c);
1753 /* If we deleted an exception handler, we may have EH region begin/end
1754 blocks to remove as well. */
1755 if (deleted_handler)
1756 delete_eh_regions ();
1758 free (worklist);
1761 /* Find EH regions for which there is no longer a handler, and delete them. */
1763 static void
1764 delete_eh_regions ()
1766 rtx insn;
1768 update_rethrow_references ();
1770 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1771 if (GET_CODE (insn) == NOTE)
1773 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1774 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1776 int num = NOTE_EH_HANDLER (insn);
1777 /* A NULL handler indicates a region is no longer needed,
1778 as long as it isn't the target of a rethrow. */
1779 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1781 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1782 NOTE_SOURCE_FILE (insn) = 0;
1788 /* Return true if NOTE is not one of the ones that must be kept paired,
1789 so that we may simply delete them. */
1791 static int
1792 can_delete_note_p (note)
1793 rtx note;
1795 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1796 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1799 /* Unlink a chain of insns between START and FINISH, leaving notes
1800 that must be paired. */
1802 void
1803 flow_delete_insn_chain (start, finish)
1804 rtx start, finish;
1806 /* Unchain the insns one by one. It would be quicker to delete all
1807 of these with a single unchaining, rather than one at a time, but
1808 we need to keep the NOTE's. */
1810 rtx next;
1812 while (1)
1814 next = NEXT_INSN (start);
1815 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1817 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1819 else
1820 next = flow_delete_insn (start);
1822 if (start == finish)
1823 break;
1824 start = next;
1828 /* Delete the insns in a (non-live) block. We physically delete every
1829 non-deleted-note insn, and update the flow graph appropriately.
1831 Return nonzero if we deleted an exception handler. */
1833 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1834 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1836 static int
1837 delete_block (b)
1838 basic_block b;
1840 int deleted_handler = 0;
1841 rtx insn, end;
1843 /* If the head of this block is a CODE_LABEL, then it might be the
1844 label for an exception handler which can't be reached.
1846 We need to remove the label from the exception_handler_label list
1847 and remove the associated NOTE_INSN_EH_REGION_BEG and
1848 NOTE_INSN_EH_REGION_END notes. */
1850 insn = b->head;
1852 never_reached_warning (insn);
1854 if (GET_CODE (insn) == CODE_LABEL)
1856 rtx x, *prev = &exception_handler_labels;
1858 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1860 if (XEXP (x, 0) == insn)
1862 /* Found a match, splice this label out of the EH label list. */
1863 *prev = XEXP (x, 1);
1864 XEXP (x, 1) = NULL_RTX;
1865 XEXP (x, 0) = NULL_RTX;
1867 /* Remove the handler from all regions */
1868 remove_handler (insn);
1869 deleted_handler = 1;
1870 break;
1872 prev = &XEXP (x, 1);
1875 /* This label may be referenced by code solely for its value, or
1876 referenced by static data, or something. We have determined
1877 that it is not reachable, but cannot delete the label itself.
1878 Save code space and continue to delete the balance of the block,
1879 along with properly updating the cfg. */
1880 if (!can_delete_label_p (insn))
1882 /* If we've only got one of these, skip the whole deleting
1883 insns thing. */
1884 if (insn == b->end)
1885 goto no_delete_insns;
1886 insn = NEXT_INSN (insn);
1890 /* Selectively unlink the insn chain. Include any BARRIER that may
1891 follow the basic block. */
1892 end = next_nonnote_insn (b->end);
1893 if (!end || GET_CODE (end) != BARRIER)
1894 end = b->end;
1895 flow_delete_insn_chain (insn, end);
1897 no_delete_insns:
1899 /* Remove the edges into and out of this block. Note that there may
1900 indeed be edges in, if we are removing an unreachable loop. */
1902 edge e, next, *q;
1904 for (e = b->pred; e ; e = next)
1906 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1907 continue;
1908 *q = e->succ_next;
1909 next = e->pred_next;
1910 n_edges--;
1911 free (e);
1913 for (e = b->succ; e ; e = next)
1915 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1916 continue;
1917 *q = e->pred_next;
1918 next = e->succ_next;
1919 n_edges--;
1920 free (e);
1923 b->pred = NULL;
1924 b->succ = NULL;
1927 /* Remove the basic block from the array, and compact behind it. */
1928 expunge_block (b);
1930 return deleted_handler;
1933 /* Remove block B from the basic block array and compact behind it. */
1935 static void
1936 expunge_block (b)
1937 basic_block b;
1939 int i, n = n_basic_blocks;
1941 for (i = b->index; i + 1 < n; ++i)
1943 basic_block x = BASIC_BLOCK (i + 1);
1944 BASIC_BLOCK (i) = x;
1945 x->index = i;
1948 basic_block_info->num_elements--;
1949 n_basic_blocks--;
1952 /* Delete INSN by patching it out. Return the next insn. */
1954 static rtx
1955 flow_delete_insn (insn)
1956 rtx insn;
1958 rtx prev = PREV_INSN (insn);
1959 rtx next = NEXT_INSN (insn);
1961 PREV_INSN (insn) = NULL_RTX;
1962 NEXT_INSN (insn) = NULL_RTX;
1964 if (prev)
1965 NEXT_INSN (prev) = next;
1966 if (next)
1967 PREV_INSN (next) = prev;
1968 else
1969 set_last_insn (prev);
1971 if (GET_CODE (insn) == CODE_LABEL)
1972 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1974 /* If deleting a jump, decrement the use count of the label. Deleting
1975 the label itself should happen in the normal course of block merging. */
1976 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1977 LABEL_NUSES (JUMP_LABEL (insn))--;
1979 return next;
1982 /* True if a given label can be deleted. */
1984 static int
1985 can_delete_label_p (label)
1986 rtx label;
1988 rtx x;
1990 if (LABEL_PRESERVE_P (label))
1991 return 0;
1993 for (x = forced_labels; x ; x = XEXP (x, 1))
1994 if (label == XEXP (x, 0))
1995 return 0;
1996 for (x = label_value_list; x ; x = XEXP (x, 1))
1997 if (label == XEXP (x, 0))
1998 return 0;
1999 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2000 if (label == XEXP (x, 0))
2001 return 0;
2003 /* User declared labels must be preserved. */
2004 if (LABEL_NAME (label) != 0)
2005 return 0;
2007 return 1;
2010 /* Blocks A and B are to be merged into a single block. A has no incoming
2011 fallthru edge, so it can be moved before B without adding or modifying
2012 any jumps (aside from the jump from A to B). */
2014 static int
2015 merge_blocks_move_predecessor_nojumps (a, b)
2016 basic_block a, b;
2018 rtx start, end, barrier;
2019 int index;
2021 start = a->head;
2022 end = a->end;
2024 /* We want to delete the BARRIER after the end of the insns we are
2025 going to move. If we don't find a BARRIER, then do nothing. This
2026 can happen in some cases if we have labels we can not delete.
2028 Similarly, do nothing if we can not delete the label at the start
2029 of the target block. */
2030 barrier = next_nonnote_insn (end);
2031 if (GET_CODE (barrier) != BARRIER
2032 || (GET_CODE (b->head) == CODE_LABEL
2033 && ! can_delete_label_p (b->head)))
2034 return 0;
2035 else
2036 flow_delete_insn (barrier);
2038 /* Move block and loop notes out of the chain so that we do not
2039 disturb their order.
2041 ??? A better solution would be to squeeze out all the non-nested notes
2042 and adjust the block trees appropriately. Even better would be to have
2043 a tighter connection between block trees and rtl so that this is not
2044 necessary. */
2045 start = squeeze_notes (start, end);
2047 /* Scramble the insn chain. */
2048 if (end != PREV_INSN (b->head))
2049 reorder_insns (start, end, PREV_INSN (b->head));
2051 if (rtl_dump_file)
2053 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2054 a->index, b->index);
2057 /* Swap the records for the two blocks around. Although we are deleting B,
2058 A is now where B was and we want to compact the BB array from where
2059 A used to be. */
2060 BASIC_BLOCK(a->index) = b;
2061 BASIC_BLOCK(b->index) = a;
2062 index = a->index;
2063 a->index = b->index;
2064 b->index = index;
2066 /* Now blocks A and B are contiguous. Merge them. */
2067 merge_blocks_nomove (a, b);
2069 return 1;
2072 /* Blocks A and B are to be merged into a single block. B has no outgoing
2073 fallthru edge, so it can be moved after A without adding or modifying
2074 any jumps (aside from the jump from A to B). */
2076 static int
2077 merge_blocks_move_successor_nojumps (a, b)
2078 basic_block a, b;
2080 rtx start, end, barrier;
2082 start = b->head;
2083 end = b->end;
2085 /* We want to delete the BARRIER after the end of the insns we are
2086 going to move. If we don't find a BARRIER, then do nothing. This
2087 can happen in some cases if we have labels we can not delete.
2089 Similarly, do nothing if we can not delete the label at the start
2090 of the target block. */
2091 barrier = next_nonnote_insn (end);
2092 if (GET_CODE (barrier) != BARRIER
2093 || (GET_CODE (b->head) == CODE_LABEL
2094 && ! can_delete_label_p (b->head)))
2095 return 0;
2096 else
2097 flow_delete_insn (barrier);
2099 /* Move block and loop notes out of the chain so that we do not
2100 disturb their order.
2102 ??? A better solution would be to squeeze out all the non-nested notes
2103 and adjust the block trees appropriately. Even better would be to have
2104 a tighter connection between block trees and rtl so that this is not
2105 necessary. */
2106 start = squeeze_notes (start, end);
2108 /* Scramble the insn chain. */
2109 reorder_insns (start, end, a->end);
2111 /* Now blocks A and B are contiguous. Merge them. */
2112 merge_blocks_nomove (a, b);
2114 if (rtl_dump_file)
2116 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2117 b->index, a->index);
2120 return 1;
2123 /* Blocks A and B are to be merged into a single block. The insns
2124 are already contiguous, hence `nomove'. */
2126 static void
2127 merge_blocks_nomove (a, b)
2128 basic_block a, b;
2130 edge e;
2131 rtx b_head, b_end, a_end;
2132 int b_empty = 0;
2134 /* If there was a CODE_LABEL beginning B, delete it. */
2135 b_head = b->head;
2136 b_end = b->end;
2137 if (GET_CODE (b_head) == CODE_LABEL)
2139 /* Detect basic blocks with nothing but a label. This can happen
2140 in particular at the end of a function. */
2141 if (b_head == b_end)
2142 b_empty = 1;
2143 b_head = flow_delete_insn (b_head);
2146 /* Delete the basic block note. */
2147 if (GET_CODE (b_head) == NOTE
2148 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2150 if (b_head == b_end)
2151 b_empty = 1;
2152 b_head = flow_delete_insn (b_head);
2155 /* If there was a jump out of A, delete it. */
2156 a_end = a->end;
2157 if (GET_CODE (a_end) == JUMP_INSN)
2159 rtx prev;
2161 prev = prev_nonnote_insn (a_end);
2162 if (!prev)
2163 prev = a->head;
2165 #ifdef HAVE_cc0
2166 /* If this was a conditional jump, we need to also delete
2167 the insn that set cc0. */
2169 if (prev && sets_cc0_p (prev))
2171 rtx tmp = prev;
2172 prev = prev_nonnote_insn (prev);
2173 if (!prev)
2174 prev = a->head;
2175 flow_delete_insn (tmp);
2177 #endif
2179 /* Note that a->head != a->end, since we should have at least a
2180 bb note plus the jump, so prev != insn. */
2181 flow_delete_insn (a_end);
2182 a_end = prev;
2185 /* By definition, there should only be one successor of A, and that is
2186 B. Free that edge struct. */
2187 n_edges--;
2188 free (a->succ);
2190 /* Adjust the edges out of B for the new owner. */
2191 for (e = b->succ; e ; e = e->succ_next)
2192 e->src = a;
2193 a->succ = b->succ;
2195 /* Reassociate the insns of B with A. */
2196 if (!b_empty)
2198 BLOCK_FOR_INSN (b_head) = a;
2199 while (b_head != b_end)
2201 b_head = NEXT_INSN (b_head);
2202 BLOCK_FOR_INSN (b_head) = a;
2204 a_end = b_head;
2206 a->end = a_end;
2208 /* Compact the basic block array. */
2209 expunge_block (b);
2212 /* Attempt to merge basic blocks that are potentially non-adjacent.
2213 Return true iff the attempt succeeded. */
2215 static int
2216 merge_blocks (e, b, c)
2217 edge e;
2218 basic_block b, c;
2220 /* If B has a fallthru edge to C, no need to move anything. */
2221 if (e->flags & EDGE_FALLTHRU)
2223 /* If a label still appears somewhere and we cannot delete the label,
2224 then we cannot merge the blocks. The edge was tidied already. */
2226 rtx insn, stop = NEXT_INSN (c->head);
2227 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2228 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2229 return 0;
2231 merge_blocks_nomove (b, c);
2233 if (rtl_dump_file)
2235 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2236 b->index, c->index);
2239 return 1;
2241 else
2243 edge tmp_edge;
2244 basic_block d;
2245 int c_has_outgoing_fallthru;
2246 int b_has_incoming_fallthru;
2248 /* We must make sure to not munge nesting of exception regions,
2249 lexical blocks, and loop notes.
2251 The first is taken care of by requiring that the active eh
2252 region at the end of one block always matches the active eh
2253 region at the beginning of the next block.
2255 The later two are taken care of by squeezing out all the notes. */
2257 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2258 executed and we may want to treat blocks which have two out
2259 edges, one normal, one abnormal as only having one edge for
2260 block merging purposes. */
2262 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2263 if (tmp_edge->flags & EDGE_FALLTHRU)
2264 break;
2265 c_has_outgoing_fallthru = (tmp_edge != NULL);
2267 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2268 if (tmp_edge->flags & EDGE_FALLTHRU)
2269 break;
2270 b_has_incoming_fallthru = (tmp_edge != NULL);
2272 /* If B does not have an incoming fallthru, and the exception regions
2273 match, then it can be moved immediately before C without introducing
2274 or modifying jumps.
2276 C can not be the first block, so we do not have to worry about
2277 accessing a non-existent block. */
2278 d = BASIC_BLOCK (c->index - 1);
2279 if (! b_has_incoming_fallthru
2280 && d->eh_end == b->eh_beg
2281 && b->eh_end == c->eh_beg)
2282 return merge_blocks_move_predecessor_nojumps (b, c);
2284 /* Otherwise, we're going to try to move C after B. Make sure the
2285 exception regions match.
2287 If B is the last basic block, then we must not try to access the
2288 block structure for block B + 1. Luckily in that case we do not
2289 need to worry about matching exception regions. */
2290 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2291 if (b->eh_end == c->eh_beg
2292 && (d == NULL || c->eh_end == d->eh_beg))
2294 /* If C does not have an outgoing fallthru, then it can be moved
2295 immediately after B without introducing or modifying jumps. */
2296 if (! c_has_outgoing_fallthru)
2297 return merge_blocks_move_successor_nojumps (b, c);
2299 /* Otherwise, we'll need to insert an extra jump, and possibly
2300 a new block to contain it. */
2301 /* ??? Not implemented yet. */
2304 return 0;
2308 /* Top level driver for merge_blocks. */
2310 static void
2311 try_merge_blocks ()
2313 int i;
2315 /* Attempt to merge blocks as made possible by edge removal. If a block
2316 has only one successor, and the successor has only one predecessor,
2317 they may be combined. */
2319 for (i = 0; i < n_basic_blocks; )
2321 basic_block c, b = BASIC_BLOCK (i);
2322 edge s;
2324 /* A loop because chains of blocks might be combineable. */
2325 while ((s = b->succ) != NULL
2326 && s->succ_next == NULL
2327 && (s->flags & EDGE_EH) == 0
2328 && (c = s->dest) != EXIT_BLOCK_PTR
2329 && c->pred->pred_next == NULL
2330 /* If the jump insn has side effects, we can't kill the edge. */
2331 && (GET_CODE (b->end) != JUMP_INSN
2332 || onlyjump_p (b->end))
2333 && merge_blocks (s, b, c))
2334 continue;
2336 /* Don't get confused by the index shift caused by deleting blocks. */
2337 i = b->index + 1;
2341 /* The given edge should potentially a fallthru edge. If that is in
2342 fact true, delete the unconditional jump and barriers that are in
2343 the way. */
2345 static void
2346 tidy_fallthru_edge (e, b, c)
2347 edge e;
2348 basic_block b, c;
2350 rtx q;
2352 /* ??? In a late-running flow pass, other folks may have deleted basic
2353 blocks by nopping out blocks, leaving multiple BARRIERs between here
2354 and the target label. They ought to be chastized and fixed.
2356 We can also wind up with a sequence of undeletable labels between
2357 one block and the next.
2359 So search through a sequence of barriers, labels, and notes for
2360 the head of block C and assert that we really do fall through. */
2362 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2363 return;
2365 /* Remove what will soon cease being the jump insn from the source block.
2366 If block B consisted only of this single jump, turn it into a deleted
2367 note. */
2368 q = b->end;
2369 if (GET_CODE (q) == JUMP_INSN)
2371 #ifdef HAVE_cc0
2372 /* If this was a conditional jump, we need to also delete
2373 the insn that set cc0. */
2374 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2375 q = PREV_INSN (q);
2376 #endif
2378 if (b->head == q)
2380 PUT_CODE (q, NOTE);
2381 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2382 NOTE_SOURCE_FILE (q) = 0;
2384 else
2385 b->end = q = PREV_INSN (q);
2388 /* Selectively unlink the sequence. */
2389 if (q != PREV_INSN (c->head))
2390 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2392 e->flags |= EDGE_FALLTHRU;
2395 /* Discover and record the loop depth at the head of each basic block. */
2397 void
2398 calculate_loop_depth (dump)
2399 FILE *dump;
2401 struct loops loops;
2403 /* The loop infrastructure does the real job for us. */
2404 flow_loops_find (&loops);
2406 if (dump)
2407 flow_loops_dump (&loops, dump, 0);
2409 flow_loops_free (&loops);
2412 /* Perform data flow analysis.
2413 F is the first insn of the function and NREGS the number of register numbers
2414 in use. */
2416 void
2417 life_analysis (f, nregs, file, remove_dead_code)
2418 rtx f;
2419 int nregs;
2420 FILE *file;
2421 int remove_dead_code;
2423 #ifdef ELIMINABLE_REGS
2424 register size_t i;
2425 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2426 #endif
2427 int flags;
2429 /* Record which registers will be eliminated. We use this in
2430 mark_used_regs. */
2432 CLEAR_HARD_REG_SET (elim_reg_set);
2434 #ifdef ELIMINABLE_REGS
2435 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2436 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2437 #else
2438 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2439 #endif
2441 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2442 uid_volatile = BITMAP_XMALLOC ();
2444 /* We want alias analysis information for local dead store elimination. */
2445 init_alias_analysis ();
2447 flags = PROP_FINAL;
2448 if (! remove_dead_code)
2449 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2450 life_analysis_1 (f, nregs, flags);
2452 if (! reload_completed)
2453 mark_constant_function ();
2455 end_alias_analysis ();
2457 if (file)
2458 dump_flow_info (file);
2460 BITMAP_XFREE (uid_volatile);
2461 free_basic_block_vars (1);
2464 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2465 Search for REGNO. If found, abort if it is not wider than word_mode. */
2467 static int
2468 verify_wide_reg_1 (px, pregno)
2469 rtx *px;
2470 void *pregno;
2472 rtx x = *px;
2473 int regno = *(int *) pregno;
2475 if (GET_CODE (x) == REG && REGNO (x) == regno)
2477 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2478 abort ();
2479 return 1;
2481 return 0;
2484 /* A subroutine of verify_local_live_at_start. Search through insns
2485 between HEAD and END looking for register REGNO. */
2487 static void
2488 verify_wide_reg (regno, head, end)
2489 int regno;
2490 rtx head, end;
2492 while (1)
2494 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2495 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2496 return;
2497 if (head == end)
2498 break;
2499 head = NEXT_INSN (head);
2502 /* We didn't find the register at all. Something's way screwy. */
2503 abort ();
2506 /* A subroutine of update_life_info. Verify that there are no untoward
2507 changes in live_at_start during a local update. */
2509 static void
2510 verify_local_live_at_start (new_live_at_start, bb)
2511 regset new_live_at_start;
2512 basic_block bb;
2514 if (reload_completed)
2516 /* After reload, there are no pseudos, nor subregs of multi-word
2517 registers. The regsets should exactly match. */
2518 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2519 abort ();
2521 else
2523 int i;
2525 /* Find the set of changed registers. */
2526 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2528 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2530 /* No registers should die. */
2531 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2532 abort ();
2533 /* Verify that the now-live register is wider than word_mode. */
2534 verify_wide_reg (i, bb->head, bb->end);
2539 /* Updates death notes starting with the basic blocks set in BLOCKS.
2541 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2542 expecting local modifications to basic blocks. If we find extra
2543 registers live at the beginning of a block, then we either killed
2544 useful data, or we have a broken split that wants data not provided.
2545 If we find registers removed from live_at_start, that means we have
2546 a broken peephole that is killing a register it shouldn't.
2548 ??? This is not true in one situation -- when a pre-reload splitter
2549 generates subregs of a multi-word pseudo, current life analysis will
2550 lose the kill. So we _can_ have a pseudo go live. How irritating.
2552 BLOCK_FOR_INSN is assumed to be correct.
2554 ??? PROP_FLAGS should not contain PROP_LOG_LINKS. Need to set up
2555 reg_next_use for that. Including PROP_REG_INFO does not refresh
2556 regs_ever_live unless the caller resets it to zero. */
2558 void
2559 update_life_info (blocks, extent, prop_flags)
2560 sbitmap blocks;
2561 enum update_life_extent extent;
2562 int prop_flags;
2564 regset tmp;
2565 int i;
2567 tmp = ALLOCA_REG_SET ();
2569 /* For a global update, we go through the relaxation process again. */
2570 if (extent != UPDATE_LIFE_LOCAL)
2572 calculate_global_regs_live (blocks, blocks,
2573 prop_flags & PROP_SCAN_DEAD_CODE);
2575 /* If asked, remove notes from the blocks we'll update. */
2576 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2577 count_or_remove_death_notes (blocks, 1);
2580 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2582 basic_block bb = BASIC_BLOCK (i);
2584 COPY_REG_SET (tmp, bb->global_live_at_end);
2585 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i,
2586 prop_flags);
2588 if (extent == UPDATE_LIFE_LOCAL)
2589 verify_local_live_at_start (tmp, bb);
2592 FREE_REG_SET (tmp);
2595 /* Free the variables allocated by find_basic_blocks.
2597 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2599 void
2600 free_basic_block_vars (keep_head_end_p)
2601 int keep_head_end_p;
2603 if (basic_block_for_insn)
2605 VARRAY_FREE (basic_block_for_insn);
2606 basic_block_for_insn = NULL;
2609 if (! keep_head_end_p)
2611 clear_edges ();
2612 VARRAY_FREE (basic_block_info);
2613 n_basic_blocks = 0;
2615 ENTRY_BLOCK_PTR->aux = NULL;
2616 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2617 EXIT_BLOCK_PTR->aux = NULL;
2618 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2622 /* Return nonzero if the destination of SET equals the source. */
2623 static int
2624 set_noop_p (set)
2625 rtx set;
2627 rtx src = SET_SRC (set);
2628 rtx dst = SET_DEST (set);
2629 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2630 && REGNO (src) == REGNO (dst))
2631 return 1;
2632 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2633 || SUBREG_WORD (src) != SUBREG_WORD (dst))
2634 return 0;
2635 src = SUBREG_REG (src);
2636 dst = SUBREG_REG (dst);
2637 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2638 && REGNO (src) == REGNO (dst))
2639 return 1;
2640 return 0;
2643 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2644 value to itself. */
2645 static int
2646 noop_move_p (insn)
2647 rtx insn;
2649 rtx pat = PATTERN (insn);
2651 /* Insns carrying these notes are useful later on. */
2652 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2653 return 0;
2655 if (GET_CODE (pat) == SET && set_noop_p (pat))
2656 return 1;
2658 if (GET_CODE (pat) == PARALLEL)
2660 int i;
2661 /* If nothing but SETs of registers to themselves,
2662 this insn can also be deleted. */
2663 for (i = 0; i < XVECLEN (pat, 0); i++)
2665 rtx tem = XVECEXP (pat, 0, i);
2667 if (GET_CODE (tem) == USE
2668 || GET_CODE (tem) == CLOBBER)
2669 continue;
2671 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2672 return 0;
2675 return 1;
2677 return 0;
2680 static void
2681 notice_stack_pointer_modification (x, pat, data)
2682 rtx x;
2683 rtx pat ATTRIBUTE_UNUSED;
2684 void *data ATTRIBUTE_UNUSED;
2686 if (x == stack_pointer_rtx
2687 /* The stack pointer is only modified indirectly as the result
2688 of a push until later in flow. See the comments in rtl.texi
2689 regarding Embedded Side-Effects on Addresses. */
2690 || (GET_CODE (x) == MEM
2691 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2692 || GET_CODE (XEXP (x, 0)) == PRE_INC
2693 || GET_CODE (XEXP (x, 0)) == POST_DEC
2694 || GET_CODE (XEXP (x, 0)) == POST_INC)
2695 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2696 current_function_sp_is_unchanging = 0;
2699 /* Record which insns refer to any volatile memory
2700 or for any reason can't be deleted just because they are dead stores.
2701 Also, delete any insns that copy a register to itself.
2702 And see if the stack pointer is modified. */
2703 static void
2704 record_volatile_insns (f)
2705 rtx f;
2707 rtx insn;
2708 for (insn = f; insn; insn = NEXT_INSN (insn))
2710 enum rtx_code code1 = GET_CODE (insn);
2711 if (code1 == CALL_INSN)
2712 SET_INSN_VOLATILE (insn);
2713 else if (code1 == INSN || code1 == JUMP_INSN)
2715 if (GET_CODE (PATTERN (insn)) != USE
2716 && volatile_refs_p (PATTERN (insn)))
2717 SET_INSN_VOLATILE (insn);
2719 /* A SET that makes space on the stack cannot be dead.
2720 (Such SETs occur only for allocating variable-size data,
2721 so they will always have a PLUS or MINUS according to the
2722 direction of stack growth.)
2723 Even if this function never uses this stack pointer value,
2724 signal handlers do! */
2725 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2726 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2727 #ifdef STACK_GROWS_DOWNWARD
2728 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2729 #else
2730 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2731 #endif
2732 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2733 SET_INSN_VOLATILE (insn);
2735 /* Delete (in effect) any obvious no-op moves. */
2736 else if (noop_move_p (insn))
2738 PUT_CODE (insn, NOTE);
2739 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2740 NOTE_SOURCE_FILE (insn) = 0;
2744 /* Check if insn modifies the stack pointer. */
2745 if ( current_function_sp_is_unchanging
2746 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2747 note_stores (PATTERN (insn),
2748 notice_stack_pointer_modification,
2749 NULL);
2753 /* Mark a register in SET. Hard registers in large modes get all
2754 of their component registers set as well. */
2755 static void
2756 mark_reg (set, reg)
2757 regset set;
2758 rtx reg;
2760 int regno = REGNO (reg);
2762 SET_REGNO_REG_SET (set, regno);
2763 if (regno < FIRST_PSEUDO_REGISTER)
2765 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2766 while (--n > 0)
2767 SET_REGNO_REG_SET (set, regno + n);
2771 /* Mark those regs which are needed at the end of the function as live
2772 at the end of the last basic block. */
2773 static void
2774 mark_regs_live_at_end (set)
2775 regset set;
2777 tree type;
2778 int i;
2780 /* If exiting needs the right stack value, consider the stack pointer
2781 live at the end of the function. */
2782 if ((HAVE_epilogue && reload_completed)
2783 || ! EXIT_IGNORE_STACK
2784 || (! FRAME_POINTER_REQUIRED
2785 && ! current_function_calls_alloca
2786 && flag_omit_frame_pointer)
2787 || current_function_sp_is_unchanging)
2789 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2792 /* Mark the frame pointer if needed at the end of the function. If
2793 we end up eliminating it, it will be removed from the live list
2794 of each basic block by reload. */
2796 if (! reload_completed || frame_pointer_needed)
2798 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2799 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2800 /* If they are different, also mark the hard frame pointer as live */
2801 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2802 #endif
2805 #ifdef PIC_OFFSET_TABLE_REGNUM
2806 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2807 /* Many architectures have a GP register even without flag_pic.
2808 Assume the pic register is not in use, or will be handled by
2809 other means, if it is not fixed. */
2810 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2811 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2812 #endif
2813 #endif
2815 /* Mark all global registers, and all registers used by the epilogue
2816 as being live at the end of the function since they may be
2817 referenced by our caller. */
2818 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2819 if (global_regs[i]
2820 #ifdef EPILOGUE_USES
2821 || EPILOGUE_USES (i)
2822 #endif
2824 SET_REGNO_REG_SET (set, i);
2826 /* Mark all call-saved registers that we actaully used. */
2827 if (HAVE_epilogue && reload_completed)
2829 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2830 if (! call_used_regs[i] && regs_ever_live[i])
2831 SET_REGNO_REG_SET (set, i);
2834 /* Mark function return value. */
2835 /* ??? Only do this after reload. Consider a non-void function that
2836 omits a return statement. Across that edge we'll have the return
2837 register live, and no set for it. Thus the return register will
2838 be live back through the CFG to the entry, and thus we die. A
2839 possible solution is to emit a clobber at exits without returns. */
2841 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2842 if (reload_completed
2843 && type != void_type_node)
2845 rtx outgoing;
2847 if (current_function_returns_struct
2848 || current_function_returns_pcc_struct)
2849 type = build_pointer_type (type);
2851 #ifdef FUNCTION_OUTGOING_VALUE
2852 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
2853 #else
2854 outgoing = FUNCTION_VALUE (type, current_function_decl);
2855 #endif
2857 if (GET_CODE (outgoing) == REG)
2858 mark_reg (set, outgoing);
2859 else if (GET_CODE (outgoing) == PARALLEL)
2861 int len = XVECLEN (outgoing, 0);
2863 /* Check for a NULL entry, used to indicate that the parameter
2864 goes on the stack and in registers. */
2865 i = (XEXP (XVECEXP (outgoing, 0, 0), 0) ? 0 : 1);
2867 for ( ; i < len; ++i)
2869 rtx r = XVECEXP (outgoing, 0, i);
2870 if (GET_CODE (r) == REG)
2871 mark_reg (set, r);
2874 else
2875 abort ();
2879 /* Determine which registers are live at the start of each
2880 basic block of the function whose first insn is F.
2881 NREGS is the number of registers used in F.
2882 We allocate the vector basic_block_live_at_start
2883 and the regsets that it points to, and fill them with the data.
2884 regset_size and regset_bytes are also set here. */
2886 static void
2887 life_analysis_1 (f, nregs, flags)
2888 rtx f;
2889 int nregs;
2890 int flags;
2892 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2893 register int i;
2895 max_regno = nregs;
2897 /* Allocate and zero out many data structures
2898 that will record the data from lifetime analysis. */
2900 allocate_reg_life_data ();
2901 allocate_bb_life_data ();
2903 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2905 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2906 This will be cleared by record_volatile_insns if it encounters an insn
2907 which modifies the stack pointer. */
2908 current_function_sp_is_unchanging = !current_function_calls_alloca;
2909 record_volatile_insns (f);
2911 /* Find the set of registers live on function exit. Do this before
2912 zeroing regs_ever_live, as we use that data post-reload. */
2913 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2915 /* The post-reload life analysis have (on a global basis) the same
2916 registers live as was computed by reload itself. elimination
2917 Otherwise offsets and such may be incorrect.
2919 Reload will make some registers as live even though they do not
2920 appear in the rtl. */
2921 if (reload_completed)
2922 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2923 memset (regs_ever_live, 0, sizeof regs_ever_live);
2925 /* Compute register life at block boundaries. It'd be nice to
2926 begin with just the exit and noreturn blocks, but that set
2927 is not immediately handy. */
2929 sbitmap blocks;
2930 blocks = sbitmap_alloc (n_basic_blocks);
2931 sbitmap_ones (blocks);
2932 calculate_global_regs_live (blocks, blocks, flags & PROP_SCAN_DEAD_CODE);
2933 sbitmap_free (blocks);
2936 /* The only pseudos that are live at the beginning of the function are
2937 those that were not set anywhere in the function. local-alloc doesn't
2938 know how to handle these correctly, so mark them as not local to any
2939 one basic block. */
2941 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2942 FIRST_PSEUDO_REGISTER, i,
2943 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2945 /* Now the life information is accurate. Make one more pass over each
2946 basic block to delete dead stores, create autoincrement addressing
2947 and record how many times each register is used, is set, or dies. */
2949 regset tmp;
2950 tmp = ALLOCA_REG_SET ();
2952 for (i = n_basic_blocks - 1; i >= 0; --i)
2954 basic_block bb = BASIC_BLOCK (i);
2956 COPY_REG_SET (tmp, bb->global_live_at_end);
2957 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i, flags);
2960 FREE_REG_SET (tmp);
2963 /* We have a problem with any pseudoreg that lives across the setjmp.
2964 ANSI says that if a user variable does not change in value between
2965 the setjmp and the longjmp, then the longjmp preserves it. This
2966 includes longjmp from a place where the pseudo appears dead.
2967 (In principle, the value still exists if it is in scope.)
2968 If the pseudo goes in a hard reg, some other value may occupy
2969 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2970 Conclusion: such a pseudo must not go in a hard reg. */
2971 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2972 FIRST_PSEUDO_REGISTER, i,
2974 if (regno_reg_rtx[i] != 0)
2976 REG_LIVE_LENGTH (i) = -1;
2977 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2981 /* Restore regs_ever_live that was provided by reload. */
2982 if (reload_completed)
2983 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2985 /* Clean up. */
2986 free (reg_next_use);
2987 reg_next_use = NULL;
2990 /* Propagate global life info around the graph of basic blocks. Begin
2991 considering blocks with their corresponding bit set in BLOCKS_IN.
2992 BLOCKS_OUT is set for every block that was changed. */
2994 static void
2995 calculate_global_regs_live (blocks_in, blocks_out, flags)
2996 sbitmap blocks_in, blocks_out;
2997 int flags;
2999 basic_block *queue, *qhead, *qtail, *qend;
3000 regset tmp, new_live_at_end;
3001 int i;
3003 tmp = ALLOCA_REG_SET ();
3004 new_live_at_end = ALLOCA_REG_SET ();
3006 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3007 because the `head == tail' style test for an empty queue doesn't
3008 work with a full queue. */
3009 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3010 qtail = queue;
3011 qhead = qend = queue + n_basic_blocks + 2;
3013 /* Clear out the garbage that might be hanging out in bb->aux. */
3014 for (i = n_basic_blocks - 1; i >= 0; --i)
3015 BASIC_BLOCK (i)->aux = NULL;
3017 /* Queue the blocks set in the initial mask. Do this in reverse block
3018 number order so that we are more likely for the first round to do
3019 useful work. We use AUX non-null to flag that the block is queued. */
3020 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3022 basic_block bb = BASIC_BLOCK (i);
3023 *--qhead = bb;
3024 bb->aux = bb;
3027 sbitmap_zero (blocks_out);
3029 while (qhead != qtail)
3031 int rescan, changed;
3032 basic_block bb;
3033 edge e;
3035 bb = *qhead++;
3036 if (qhead == qend)
3037 qhead = queue;
3038 bb->aux = NULL;
3040 /* Begin by propogating live_at_start from the successor blocks. */
3041 CLEAR_REG_SET (new_live_at_end);
3042 for (e = bb->succ; e ; e = e->succ_next)
3044 basic_block sb = e->dest;
3045 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3048 if (bb == ENTRY_BLOCK_PTR)
3050 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3051 continue;
3054 /* On our first pass through this block, we'll go ahead and continue.
3055 Recognize first pass by local_set NULL. On subsequent passes, we
3056 get to skip out early if live_at_end wouldn't have changed. */
3058 if (bb->local_set == NULL)
3060 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3061 rescan = 1;
3063 else
3065 /* If any bits were removed from live_at_end, we'll have to
3066 rescan the block. This wouldn't be necessary if we had
3067 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3068 local_live is really dependant on live_at_end. */
3069 CLEAR_REG_SET (tmp);
3070 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3071 new_live_at_end, BITMAP_AND_COMPL);
3073 if (! rescan)
3075 /* Find the set of changed bits. Take this opportunity
3076 to notice that this set is empty and early out. */
3077 CLEAR_REG_SET (tmp);
3078 changed = bitmap_operation (tmp, bb->global_live_at_end,
3079 new_live_at_end, BITMAP_XOR);
3080 if (! changed)
3081 continue;
3083 /* If any of the changed bits overlap with local_set,
3084 we'll have to rescan the block. Detect overlap by
3085 the AND with ~local_set turning off bits. */
3086 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3087 BITMAP_AND_COMPL);
3091 /* Let our caller know that BB changed enough to require its
3092 death notes updated. */
3093 SET_BIT (blocks_out, bb->index);
3095 if (! rescan)
3097 /* Add to live_at_start the set of all registers in
3098 new_live_at_end that aren't in the old live_at_end. */
3100 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3101 BITMAP_AND_COMPL);
3102 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3104 changed = bitmap_operation (bb->global_live_at_start,
3105 bb->global_live_at_start,
3106 tmp, BITMAP_IOR);
3107 if (! changed)
3108 continue;
3110 else
3112 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3114 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3115 into live_at_start. */
3116 propagate_block (new_live_at_end, bb->head, bb->end,
3117 bb->local_set, bb->index, flags);
3119 /* If live_at start didn't change, no need to go farther. */
3120 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3121 continue;
3123 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3126 /* Queue all predecessors of BB so that we may re-examine
3127 their live_at_end. */
3128 for (e = bb->pred; e ; e = e->pred_next)
3130 basic_block pb = e->src;
3131 if (pb->aux == NULL)
3133 *qtail++ = pb;
3134 if (qtail == qend)
3135 qtail = queue;
3136 pb->aux = pb;
3141 FREE_REG_SET (tmp);
3142 FREE_REG_SET (new_live_at_end);
3144 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3146 basic_block bb = BASIC_BLOCK (i);
3147 FREE_REG_SET (bb->local_set);
3150 free (queue);
3153 /* Subroutines of life analysis. */
3155 /* Allocate the permanent data structures that represent the results
3156 of life analysis. Not static since used also for stupid life analysis. */
3158 void
3159 allocate_bb_life_data ()
3161 register int i;
3163 for (i = 0; i < n_basic_blocks; i++)
3165 basic_block bb = BASIC_BLOCK (i);
3167 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3168 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3171 ENTRY_BLOCK_PTR->global_live_at_end
3172 = OBSTACK_ALLOC_REG_SET (function_obstack);
3173 EXIT_BLOCK_PTR->global_live_at_start
3174 = OBSTACK_ALLOC_REG_SET (function_obstack);
3176 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3179 void
3180 allocate_reg_life_data ()
3182 int i;
3184 /* Recalculate the register space, in case it has grown. Old style
3185 vector oriented regsets would set regset_{size,bytes} here also. */
3186 allocate_reg_info (max_regno, FALSE, FALSE);
3188 /* Reset all the data we'll collect in propagate_block and its
3189 subroutines. */
3190 for (i = 0; i < max_regno; i++)
3192 REG_N_SETS (i) = 0;
3193 REG_N_REFS (i) = 0;
3194 REG_N_DEATHS (i) = 0;
3195 REG_N_CALLS_CROSSED (i) = 0;
3196 REG_LIVE_LENGTH (i) = 0;
3197 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3201 /* Compute the registers live at the beginning of a basic block
3202 from those live at the end.
3204 When called, OLD contains those live at the end.
3205 On return, it contains those live at the beginning.
3206 FIRST and LAST are the first and last insns of the basic block.
3208 FINAL is nonzero if we are doing the final pass which is not
3209 for computing the life info (since that has already been done)
3210 but for acting on it. On this pass, we delete dead stores,
3211 set up the logical links and dead-variables lists of instructions,
3212 and merge instructions for autoincrement and autodecrement addresses.
3214 SIGNIFICANT is nonzero only the first time for each basic block.
3215 If it is nonzero, it points to a regset in which we store
3216 a 1 for each register that is set within the block.
3218 BNUM is the number of the basic block. */
3220 static void
3221 propagate_block (old, first, last, significant, bnum, flags)
3222 register regset old;
3223 rtx first;
3224 rtx last;
3225 regset significant;
3226 int bnum;
3227 int flags;
3229 register rtx insn;
3230 rtx prev;
3231 regset live;
3232 regset dead;
3234 /* Find the loop depth for this block. Ignore loop level changes in the
3235 middle of the basic block -- for register allocation purposes, the
3236 important uses will be in the blocks wholely contained within the loop
3237 not in the loop pre-header or post-trailer. */
3238 loop_depth = BASIC_BLOCK (bnum)->loop_depth;
3240 dead = ALLOCA_REG_SET ();
3241 live = ALLOCA_REG_SET ();
3243 cc0_live = 0;
3245 if (flags & PROP_REG_INFO)
3247 register int i;
3249 /* Process the regs live at the end of the block.
3250 Mark them as not local to any one basic block. */
3251 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3253 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3257 /* Scan the block an insn at a time from end to beginning. */
3259 for (insn = last; ; insn = prev)
3261 prev = PREV_INSN (insn);
3263 if (GET_CODE (insn) == NOTE)
3265 /* If this is a call to `setjmp' et al,
3266 warn if any non-volatile datum is live. */
3268 if ((flags & PROP_REG_INFO)
3269 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3270 IOR_REG_SET (regs_live_at_setjmp, old);
3273 /* Update the life-status of regs for this insn.
3274 First DEAD gets which regs are set in this insn
3275 then LIVE gets which regs are used in this insn.
3276 Then the regs live before the insn
3277 are those live after, with DEAD regs turned off,
3278 and then LIVE regs turned on. */
3280 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3282 register int i;
3283 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3284 int insn_is_dead = 0;
3285 int libcall_is_dead = 0;
3287 if (flags & PROP_SCAN_DEAD_CODE)
3289 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
3290 /* Don't delete something that refers to volatile storage! */
3291 && ! INSN_VOLATILE (insn));
3292 libcall_is_dead = (insn_is_dead && note != 0
3293 && libcall_dead_p (PATTERN (insn), old, note, insn));
3296 /* We almost certainly don't want to delete prologue or epilogue
3297 instructions. Warn about probable compiler losage. */
3298 if ((flags & PROP_KILL_DEAD_CODE)
3299 && insn_is_dead
3300 && reload_completed
3301 && (HAVE_epilogue || HAVE_prologue)
3302 && prologue_epilogue_contains (insn))
3304 warning ("ICE: would have deleted prologue/epilogue insn");
3305 debug_rtx (insn);
3306 libcall_is_dead = insn_is_dead = 0;
3309 /* If an instruction consists of just dead store(s) on final pass,
3310 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3311 We could really delete it with delete_insn, but that
3312 can cause trouble for first or last insn in a basic block. */
3313 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3315 rtx inote;
3316 /* If the insn referred to a label, note that the label is
3317 now less used. */
3318 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3320 if (REG_NOTE_KIND (inote) == REG_LABEL)
3322 rtx label = XEXP (inote, 0);
3323 rtx next;
3324 LABEL_NUSES (label)--;
3326 /* If this label was attached to an ADDR_VEC, it's
3327 safe to delete the ADDR_VEC. In fact, it's pretty much
3328 mandatory to delete it, because the ADDR_VEC may
3329 be referencing labels that no longer exist. */
3330 if (LABEL_NUSES (label) == 0
3331 && (next = next_nonnote_insn (label)) != NULL
3332 && GET_CODE (next) == JUMP_INSN
3333 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3334 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3336 rtx pat = PATTERN (next);
3337 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3338 int len = XVECLEN (pat, diff_vec_p);
3339 int i;
3340 for (i = 0; i < len; i++)
3341 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3342 PUT_CODE (next, NOTE);
3343 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3344 NOTE_SOURCE_FILE (next) = 0;
3349 PUT_CODE (insn, NOTE);
3350 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3351 NOTE_SOURCE_FILE (insn) = 0;
3353 /* CC0 is now known to be dead. Either this insn used it,
3354 in which case it doesn't anymore, or clobbered it,
3355 so the next insn can't use it. */
3356 cc0_live = 0;
3358 /* If this insn is copying the return value from a library call,
3359 delete the entire library call. */
3360 if (libcall_is_dead)
3362 rtx first = XEXP (note, 0);
3363 rtx p = insn;
3364 while (INSN_DELETED_P (first))
3365 first = NEXT_INSN (first);
3366 while (p != first)
3368 p = PREV_INSN (p);
3369 PUT_CODE (p, NOTE);
3370 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3371 NOTE_SOURCE_FILE (p) = 0;
3374 goto flushed;
3377 CLEAR_REG_SET (dead);
3378 CLEAR_REG_SET (live);
3380 /* See if this is an increment or decrement that can be
3381 merged into a following memory address. */
3382 #ifdef AUTO_INC_DEC
3384 register rtx x = single_set (insn);
3386 /* Does this instruction increment or decrement a register? */
3387 if (!reload_completed
3388 && (flags & PROP_AUTOINC)
3389 && x != 0
3390 && GET_CODE (SET_DEST (x)) == REG
3391 && (GET_CODE (SET_SRC (x)) == PLUS
3392 || GET_CODE (SET_SRC (x)) == MINUS)
3393 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3394 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3395 /* Ok, look for a following memory ref we can combine with.
3396 If one is found, change the memory ref to a PRE_INC
3397 or PRE_DEC, cancel this insn, and return 1.
3398 Return 0 if nothing has been done. */
3399 && try_pre_increment_1 (insn))
3400 goto flushed;
3402 #endif /* AUTO_INC_DEC */
3404 /* If this is not the final pass, and this insn is copying the
3405 value of a library call and it's dead, don't scan the
3406 insns that perform the library call, so that the call's
3407 arguments are not marked live. */
3408 if (libcall_is_dead)
3410 /* Mark the dest reg as `significant'. */
3411 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3412 significant, flags);
3414 insn = XEXP (note, 0);
3415 prev = PREV_INSN (insn);
3417 else if (GET_CODE (PATTERN (insn)) == SET
3418 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3419 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3420 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3421 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3422 /* We have an insn to pop a constant amount off the stack.
3423 (Such insns use PLUS regardless of the direction of the stack,
3424 and any insn to adjust the stack by a constant is always a pop.)
3425 These insns, if not dead stores, have no effect on life. */
3427 else
3429 /* Any regs live at the time of a call instruction
3430 must not go in a register clobbered by calls.
3431 Find all regs now live and record this for them. */
3433 if (GET_CODE (insn) == CALL_INSN
3434 && (flags & PROP_REG_INFO))
3435 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3437 REG_N_CALLS_CROSSED (i)++;
3440 /* LIVE gets the regs used in INSN;
3441 DEAD gets those set by it. Dead insns don't make anything
3442 live. */
3444 mark_set_regs (old, dead, PATTERN (insn),
3445 insn, significant, flags);
3447 /* If an insn doesn't use CC0, it becomes dead since we
3448 assume that every insn clobbers it. So show it dead here;
3449 mark_used_regs will set it live if it is referenced. */
3450 cc0_live = 0;
3452 if (! insn_is_dead)
3453 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3455 /* Sometimes we may have inserted something before INSN (such as
3456 a move) when we make an auto-inc. So ensure we will scan
3457 those insns. */
3458 #ifdef AUTO_INC_DEC
3459 prev = PREV_INSN (insn);
3460 #endif
3462 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3464 register int i;
3466 rtx note;
3468 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3469 note;
3470 note = XEXP (note, 1))
3471 if (GET_CODE (XEXP (note, 0)) == USE)
3472 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3473 flags, insn);
3475 /* Each call clobbers all call-clobbered regs that are not
3476 global or fixed. Note that the function-value reg is a
3477 call-clobbered reg, and mark_set_regs has already had
3478 a chance to handle it. */
3480 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3481 if (call_used_regs[i] && ! global_regs[i]
3482 && ! fixed_regs[i])
3484 SET_REGNO_REG_SET (dead, i);
3485 if (significant)
3486 SET_REGNO_REG_SET (significant, i);
3489 /* The stack ptr is used (honorarily) by a CALL insn. */
3490 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3492 /* Calls may also reference any of the global registers,
3493 so they are made live. */
3494 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3495 if (global_regs[i])
3496 mark_used_regs (old, live,
3497 gen_rtx_REG (reg_raw_mode[i], i),
3498 flags, insn);
3500 /* Calls also clobber memory. */
3501 free_EXPR_LIST_list (&mem_set_list);
3504 /* Update OLD for the registers used or set. */
3505 AND_COMPL_REG_SET (old, dead);
3506 IOR_REG_SET (old, live);
3510 /* On final pass, update counts of how many insns each reg is live
3511 at. */
3512 if (flags & PROP_REG_INFO)
3513 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3514 { REG_LIVE_LENGTH (i)++; });
3516 flushed: ;
3517 if (insn == first)
3518 break;
3521 FREE_REG_SET (dead);
3522 FREE_REG_SET (live);
3523 free_EXPR_LIST_list (&mem_set_list);
3526 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3527 (SET expressions whose destinations are registers dead after the insn).
3528 NEEDED is the regset that says which regs are alive after the insn.
3530 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3532 If X is the entire body of an insn, NOTES contains the reg notes
3533 pertaining to the insn. */
3535 static int
3536 insn_dead_p (x, needed, call_ok, notes)
3537 rtx x;
3538 regset needed;
3539 int call_ok;
3540 rtx notes ATTRIBUTE_UNUSED;
3542 enum rtx_code code = GET_CODE (x);
3544 #ifdef AUTO_INC_DEC
3545 /* If flow is invoked after reload, we must take existing AUTO_INC
3546 expresions into account. */
3547 if (reload_completed)
3549 for ( ; notes; notes = XEXP (notes, 1))
3551 if (REG_NOTE_KIND (notes) == REG_INC)
3553 int regno = REGNO (XEXP (notes, 0));
3555 /* Don't delete insns to set global regs. */
3556 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3557 || REGNO_REG_SET_P (needed, regno))
3558 return 0;
3562 #endif
3564 /* If setting something that's a reg or part of one,
3565 see if that register's altered value will be live. */
3567 if (code == SET)
3569 rtx r = SET_DEST (x);
3571 /* A SET that is a subroutine call cannot be dead. */
3572 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
3573 return 0;
3575 #ifdef HAVE_cc0
3576 if (GET_CODE (r) == CC0)
3577 return ! cc0_live;
3578 #endif
3580 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
3582 rtx temp;
3583 /* Walk the set of memory locations we are currently tracking
3584 and see if one is an identical match to this memory location.
3585 If so, this memory write is dead (remember, we're walking
3586 backwards from the end of the block to the start. */
3587 temp = mem_set_list;
3588 while (temp)
3590 if (rtx_equal_p (XEXP (temp, 0), r))
3591 return 1;
3592 temp = XEXP (temp, 1);
3596 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
3597 || GET_CODE (r) == ZERO_EXTRACT)
3598 r = XEXP (r, 0);
3600 if (GET_CODE (r) == REG)
3602 int regno = REGNO (r);
3604 /* Don't delete insns to set global regs. */
3605 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3606 /* Make sure insns to set frame pointer aren't deleted. */
3607 || (regno == FRAME_POINTER_REGNUM
3608 && (! reload_completed || frame_pointer_needed))
3609 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3610 || (regno == HARD_FRAME_POINTER_REGNUM
3611 && (! reload_completed || frame_pointer_needed))
3612 #endif
3613 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3614 /* Make sure insns to set arg pointer are never deleted
3615 (if the arg pointer isn't fixed, there will be a USE for
3616 it, so we can treat it normally). */
3617 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3618 #endif
3619 || REGNO_REG_SET_P (needed, regno))
3620 return 0;
3622 /* If this is a hard register, verify that subsequent words are
3623 not needed. */
3624 if (regno < FIRST_PSEUDO_REGISTER)
3626 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3628 while (--n > 0)
3629 if (REGNO_REG_SET_P (needed, regno+n))
3630 return 0;
3633 return 1;
3637 /* If performing several activities,
3638 insn is dead if each activity is individually dead.
3639 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3640 that's inside a PARALLEL doesn't make the insn worth keeping. */
3641 else if (code == PARALLEL)
3643 int i = XVECLEN (x, 0);
3645 for (i--; i >= 0; i--)
3646 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3647 && GET_CODE (XVECEXP (x, 0, i)) != USE
3648 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3649 return 0;
3651 return 1;
3654 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3655 is not necessarily true for hard registers. */
3656 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3657 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3658 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3659 return 1;
3661 /* We do not check other CLOBBER or USE here. An insn consisting of just
3662 a CLOBBER or just a USE should not be deleted. */
3663 return 0;
3666 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3667 return 1 if the entire library call is dead.
3668 This is true if X copies a register (hard or pseudo)
3669 and if the hard return reg of the call insn is dead.
3670 (The caller should have tested the destination of X already for death.)
3672 If this insn doesn't just copy a register, then we don't
3673 have an ordinary libcall. In that case, cse could not have
3674 managed to substitute the source for the dest later on,
3675 so we can assume the libcall is dead.
3677 NEEDED is the bit vector of pseudoregs live before this insn.
3678 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3680 static int
3681 libcall_dead_p (x, needed, note, insn)
3682 rtx x;
3683 regset needed;
3684 rtx note;
3685 rtx insn;
3687 register RTX_CODE code = GET_CODE (x);
3689 if (code == SET)
3691 register rtx r = SET_SRC (x);
3692 if (GET_CODE (r) == REG)
3694 rtx call = XEXP (note, 0);
3695 rtx call_pat;
3696 register int i;
3698 /* Find the call insn. */
3699 while (call != insn && GET_CODE (call) != CALL_INSN)
3700 call = NEXT_INSN (call);
3702 /* If there is none, do nothing special,
3703 since ordinary death handling can understand these insns. */
3704 if (call == insn)
3705 return 0;
3707 /* See if the hard reg holding the value is dead.
3708 If this is a PARALLEL, find the call within it. */
3709 call_pat = PATTERN (call);
3710 if (GET_CODE (call_pat) == PARALLEL)
3712 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3713 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3714 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3715 break;
3717 /* This may be a library call that is returning a value
3718 via invisible pointer. Do nothing special, since
3719 ordinary death handling can understand these insns. */
3720 if (i < 0)
3721 return 0;
3723 call_pat = XVECEXP (call_pat, 0, i);
3726 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3729 return 1;
3732 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3733 live at function entry. Don't count global register variables, variables
3734 in registers that can be used for function arg passing, or variables in
3735 fixed hard registers. */
3738 regno_uninitialized (regno)
3739 int regno;
3741 if (n_basic_blocks == 0
3742 || (regno < FIRST_PSEUDO_REGISTER
3743 && (global_regs[regno]
3744 || fixed_regs[regno]
3745 || FUNCTION_ARG_REGNO_P (regno))))
3746 return 0;
3748 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3751 /* 1 if register REGNO was alive at a place where `setjmp' was called
3752 and was set more than once or is an argument.
3753 Such regs may be clobbered by `longjmp'. */
3756 regno_clobbered_at_setjmp (regno)
3757 int regno;
3759 if (n_basic_blocks == 0)
3760 return 0;
3762 return ((REG_N_SETS (regno) > 1
3763 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3764 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3767 /* INSN references memory, possibly using autoincrement addressing modes.
3768 Find any entries on the mem_set_list that need to be invalidated due
3769 to an address change. */
3770 static void
3771 invalidate_mems_from_autoinc (insn)
3772 rtx insn;
3774 rtx note = REG_NOTES (insn);
3775 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3777 if (REG_NOTE_KIND (note) == REG_INC)
3779 rtx temp = mem_set_list;
3780 rtx prev = NULL_RTX;
3781 rtx next;
3783 while (temp)
3785 next = XEXP (temp, 1);
3786 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3788 /* Splice temp out of list. */
3789 if (prev)
3790 XEXP (prev, 1) = next;
3791 else
3792 mem_set_list = next;
3793 free_EXPR_LIST_node (temp);
3795 else
3796 prev = temp;
3797 temp = next;
3803 /* Process the registers that are set within X. Their bits are set to
3804 1 in the regset DEAD, because they are dead prior to this insn.
3806 If INSN is nonzero, it is the insn being processed.
3808 FLAGS is the set of operations to perform. */
3810 static void
3811 mark_set_regs (needed, dead, x, insn, significant, flags)
3812 regset needed;
3813 regset dead;
3814 rtx x;
3815 rtx insn;
3816 regset significant;
3817 int flags;
3819 register RTX_CODE code = GET_CODE (x);
3821 if (code == SET || code == CLOBBER)
3822 mark_set_1 (needed, dead, x, insn, significant, flags);
3823 else if (code == PARALLEL)
3825 register int i;
3826 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3828 code = GET_CODE (XVECEXP (x, 0, i));
3829 if (code == SET || code == CLOBBER)
3830 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3831 significant, flags);
3836 /* Process a single SET rtx, X. */
3838 static void
3839 mark_set_1 (needed, dead, x, insn, significant, flags)
3840 regset needed;
3841 regset dead;
3842 rtx x;
3843 rtx insn;
3844 regset significant;
3845 int flags;
3847 register int regno = -1;
3848 register rtx reg = SET_DEST (x);
3850 /* Some targets place small structures in registers for
3851 return values of functions. We have to detect this
3852 case specially here to get correct flow information. */
3853 if (GET_CODE (reg) == PARALLEL
3854 && GET_MODE (reg) == BLKmode)
3856 register int i;
3858 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3859 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3860 significant, flags);
3861 return;
3864 /* Modifying just one hardware register of a multi-reg value
3865 or just a byte field of a register
3866 does not mean the value from before this insn is now dead.
3867 But it does mean liveness of that register at the end of the block
3868 is significant.
3870 Within mark_set_1, however, we treat it as if the register is
3871 indeed modified. mark_used_regs will, however, also treat this
3872 register as being used. Thus, we treat these insns as setting a
3873 new value for the register as a function of its old value. This
3874 cases LOG_LINKS to be made appropriately and this will help combine. */
3876 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3877 || GET_CODE (reg) == SIGN_EXTRACT
3878 || GET_CODE (reg) == STRICT_LOW_PART)
3879 reg = XEXP (reg, 0);
3881 /* If this set is a MEM, then it kills any aliased writes.
3882 If this set is a REG, then it kills any MEMs which use the reg. */
3883 if (flags & PROP_SCAN_DEAD_CODE)
3885 if (GET_CODE (reg) == MEM
3886 || GET_CODE (reg) == REG)
3888 rtx temp = mem_set_list;
3889 rtx prev = NULL_RTX;
3890 rtx next;
3892 while (temp)
3894 next = XEXP (temp, 1);
3895 if ((GET_CODE (reg) == MEM
3896 && output_dependence (XEXP (temp, 0), reg))
3897 || (GET_CODE (reg) == REG
3898 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3900 /* Splice this entry out of the list. */
3901 if (prev)
3902 XEXP (prev, 1) = next;
3903 else
3904 mem_set_list = next;
3905 free_EXPR_LIST_node (temp);
3907 else
3908 prev = temp;
3909 temp = next;
3913 /* If the memory reference had embedded side effects (autoincrement
3914 address modes. Then we may need to kill some entries on the
3915 memory set list. */
3916 if (insn && GET_CODE (reg) == MEM)
3917 invalidate_mems_from_autoinc (insn);
3919 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3920 /* We do not know the size of a BLKmode store, so we do not track
3921 them for redundant store elimination. */
3922 && GET_MODE (reg) != BLKmode
3923 /* There are no REG_INC notes for SP, so we can't assume we'll see
3924 everything that invalidates it. To be safe, don't eliminate any
3925 stores though SP; none of them should be redundant anyway. */
3926 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3927 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3930 if (GET_CODE (reg) == REG
3931 && (regno = REGNO (reg),
3932 ! (regno == FRAME_POINTER_REGNUM
3933 && (! reload_completed || frame_pointer_needed)))
3934 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3935 && ! (regno == HARD_FRAME_POINTER_REGNUM
3936 && (! reload_completed || frame_pointer_needed))
3937 #endif
3938 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3939 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3940 #endif
3941 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3942 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3944 int some_needed = REGNO_REG_SET_P (needed, regno);
3945 int some_not_needed = ! some_needed;
3947 /* Mark it as a significant register for this basic block. */
3948 if (significant)
3949 SET_REGNO_REG_SET (significant, regno);
3951 /* Mark it as dead before this insn. */
3952 SET_REGNO_REG_SET (dead, regno);
3954 /* A hard reg in a wide mode may really be multiple registers.
3955 If so, mark all of them just like the first. */
3956 if (regno < FIRST_PSEUDO_REGISTER)
3958 int n;
3960 /* Nothing below is needed for the stack pointer; get out asap.
3961 Eg, log links aren't needed, since combine won't use them. */
3962 if (regno == STACK_POINTER_REGNUM)
3963 return;
3965 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3966 while (--n > 0)
3968 int regno_n = regno + n;
3969 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3970 if (significant)
3971 SET_REGNO_REG_SET (significant, regno_n);
3973 SET_REGNO_REG_SET (dead, regno_n);
3974 some_needed |= needed_regno;
3975 some_not_needed |= ! needed_regno;
3979 /* Additional data to record if this is the final pass. */
3980 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3981 | PROP_DEATH_NOTES | PROP_AUTOINC))
3983 register rtx y;
3984 register int blocknum = BLOCK_NUM (insn);
3986 y = NULL_RTX;
3987 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3988 y = reg_next_use[regno];
3990 /* If this is a hard reg, record this function uses the reg. */
3992 if (regno < FIRST_PSEUDO_REGISTER)
3994 register int i;
3995 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3997 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3998 for (i = regno; i < endregno; i++)
4000 /* The next use is no longer "next", since a store
4001 intervenes. */
4002 reg_next_use[i] = 0;
4005 if (flags & PROP_REG_INFO)
4006 for (i = regno; i < endregno; i++)
4008 regs_ever_live[i] = 1;
4009 REG_N_SETS (i)++;
4012 else
4014 /* The next use is no longer "next", since a store
4015 intervenes. */
4016 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4017 reg_next_use[regno] = 0;
4019 /* Keep track of which basic blocks each reg appears in. */
4021 if (flags & PROP_REG_INFO)
4023 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4024 REG_BASIC_BLOCK (regno) = blocknum;
4025 else if (REG_BASIC_BLOCK (regno) != blocknum)
4026 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4028 /* Count (weighted) references, stores, etc. This counts a
4029 register twice if it is modified, but that is correct. */
4030 REG_N_SETS (regno)++;
4031 REG_N_REFS (regno) += loop_depth;
4033 /* The insns where a reg is live are normally counted
4034 elsewhere, but we want the count to include the insn
4035 where the reg is set, and the normal counting mechanism
4036 would not count it. */
4037 REG_LIVE_LENGTH (regno)++;
4041 if (! some_not_needed)
4043 if (flags & PROP_LOG_LINKS)
4045 /* Make a logical link from the next following insn
4046 that uses this register, back to this insn.
4047 The following insns have already been processed.
4049 We don't build a LOG_LINK for hard registers containing
4050 in ASM_OPERANDs. If these registers get replaced,
4051 we might wind up changing the semantics of the insn,
4052 even if reload can make what appear to be valid
4053 assignments later. */
4054 if (y && (BLOCK_NUM (y) == blocknum)
4055 && (regno >= FIRST_PSEUDO_REGISTER
4056 || asm_noperands (PATTERN (y)) < 0))
4057 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4060 else if (! some_needed)
4062 if (flags & PROP_REG_INFO)
4063 REG_N_DEATHS (REGNO (reg))++;
4065 if (flags & PROP_DEATH_NOTES)
4067 /* Note that dead stores have already been deleted
4068 when possible. If we get here, we have found a
4069 dead store that cannot be eliminated (because the
4070 same insn does something useful). Indicate this
4071 by marking the reg being set as dying here. */
4072 REG_NOTES (insn)
4073 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4076 else
4078 if (flags & PROP_DEATH_NOTES)
4080 /* This is a case where we have a multi-word hard register
4081 and some, but not all, of the words of the register are
4082 needed in subsequent insns. Write REG_UNUSED notes
4083 for those parts that were not needed. This case should
4084 be rare. */
4086 int i;
4088 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4089 i >= 0; i--)
4090 if (!REGNO_REG_SET_P (needed, regno + i))
4091 REG_NOTES (insn)
4092 = (alloc_EXPR_LIST
4093 (REG_UNUSED,
4094 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4095 REG_NOTES (insn)));
4100 else if (GET_CODE (reg) == REG)
4102 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4103 reg_next_use[regno] = 0;
4106 /* If this is the last pass and this is a SCRATCH, show it will be dying
4107 here and count it. */
4108 else if (GET_CODE (reg) == SCRATCH)
4110 if (flags & PROP_DEATH_NOTES)
4111 REG_NOTES (insn)
4112 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4116 #ifdef AUTO_INC_DEC
4118 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4119 reference. */
4121 static void
4122 find_auto_inc (needed, x, insn)
4123 regset needed;
4124 rtx x;
4125 rtx insn;
4127 rtx addr = XEXP (x, 0);
4128 HOST_WIDE_INT offset = 0;
4129 rtx set;
4131 /* Here we detect use of an index register which might be good for
4132 postincrement, postdecrement, preincrement, or predecrement. */
4134 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4135 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4137 if (GET_CODE (addr) == REG)
4139 register rtx y;
4140 register int size = GET_MODE_SIZE (GET_MODE (x));
4141 rtx use;
4142 rtx incr;
4143 int regno = REGNO (addr);
4145 /* Is the next use an increment that might make auto-increment? */
4146 if ((incr = reg_next_use[regno]) != 0
4147 && (set = single_set (incr)) != 0
4148 && GET_CODE (set) == SET
4149 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4150 /* Can't add side effects to jumps; if reg is spilled and
4151 reloaded, there's no way to store back the altered value. */
4152 && GET_CODE (insn) != JUMP_INSN
4153 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4154 && XEXP (y, 0) == addr
4155 && GET_CODE (XEXP (y, 1)) == CONST_INT
4156 && ((HAVE_POST_INCREMENT
4157 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4158 || (HAVE_POST_DECREMENT
4159 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4160 || (HAVE_PRE_INCREMENT
4161 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4162 || (HAVE_PRE_DECREMENT
4163 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4164 /* Make sure this reg appears only once in this insn. */
4165 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4166 use != 0 && use != (rtx) 1))
4168 rtx q = SET_DEST (set);
4169 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4170 ? (offset ? PRE_INC : POST_INC)
4171 : (offset ? PRE_DEC : POST_DEC));
4173 if (dead_or_set_p (incr, addr))
4175 /* This is the simple case. Try to make the auto-inc. If
4176 we can't, we are done. Otherwise, we will do any
4177 needed updates below. */
4178 if (! validate_change (insn, &XEXP (x, 0),
4179 gen_rtx_fmt_e (inc_code, Pmode, addr),
4181 return;
4183 else if (GET_CODE (q) == REG
4184 /* PREV_INSN used here to check the semi-open interval
4185 [insn,incr). */
4186 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4187 /* We must also check for sets of q as q may be
4188 a call clobbered hard register and there may
4189 be a call between PREV_INSN (insn) and incr. */
4190 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4192 /* We have *p followed sometime later by q = p+size.
4193 Both p and q must be live afterward,
4194 and q is not used between INSN and its assignment.
4195 Change it to q = p, ...*q..., q = q+size.
4196 Then fall into the usual case. */
4197 rtx insns, temp;
4198 basic_block bb;
4200 start_sequence ();
4201 emit_move_insn (q, addr);
4202 insns = get_insns ();
4203 end_sequence ();
4205 bb = BLOCK_FOR_INSN (insn);
4206 for (temp = insns; temp; temp = NEXT_INSN (temp))
4207 set_block_for_insn (temp, bb);
4209 /* If we can't make the auto-inc, or can't make the
4210 replacement into Y, exit. There's no point in making
4211 the change below if we can't do the auto-inc and doing
4212 so is not correct in the pre-inc case. */
4214 validate_change (insn, &XEXP (x, 0),
4215 gen_rtx_fmt_e (inc_code, Pmode, q),
4217 validate_change (incr, &XEXP (y, 0), q, 1);
4218 if (! apply_change_group ())
4219 return;
4221 /* We now know we'll be doing this change, so emit the
4222 new insn(s) and do the updates. */
4223 emit_insns_before (insns, insn);
4225 if (BLOCK_FOR_INSN (insn)->head == insn)
4226 BLOCK_FOR_INSN (insn)->head = insns;
4228 /* INCR will become a NOTE and INSN won't contain a
4229 use of ADDR. If a use of ADDR was just placed in
4230 the insn before INSN, make that the next use.
4231 Otherwise, invalidate it. */
4232 if (GET_CODE (PREV_INSN (insn)) == INSN
4233 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4234 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4235 reg_next_use[regno] = PREV_INSN (insn);
4236 else
4237 reg_next_use[regno] = 0;
4239 addr = q;
4240 regno = REGNO (q);
4242 /* REGNO is now used in INCR which is below INSN, but
4243 it previously wasn't live here. If we don't mark
4244 it as needed, we'll put a REG_DEAD note for it
4245 on this insn, which is incorrect. */
4246 SET_REGNO_REG_SET (needed, regno);
4248 /* If there are any calls between INSN and INCR, show
4249 that REGNO now crosses them. */
4250 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4251 if (GET_CODE (temp) == CALL_INSN)
4252 REG_N_CALLS_CROSSED (regno)++;
4254 else
4255 return;
4257 /* If we haven't returned, it means we were able to make the
4258 auto-inc, so update the status. First, record that this insn
4259 has an implicit side effect. */
4261 REG_NOTES (insn)
4262 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4264 /* Modify the old increment-insn to simply copy
4265 the already-incremented value of our register. */
4266 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4267 abort ();
4269 /* If that makes it a no-op (copying the register into itself) delete
4270 it so it won't appear to be a "use" and a "set" of this
4271 register. */
4272 if (SET_DEST (set) == addr)
4274 PUT_CODE (incr, NOTE);
4275 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4276 NOTE_SOURCE_FILE (incr) = 0;
4279 if (regno >= FIRST_PSEUDO_REGISTER)
4281 /* Count an extra reference to the reg. When a reg is
4282 incremented, spilling it is worse, so we want to make
4283 that less likely. */
4284 REG_N_REFS (regno) += loop_depth;
4286 /* Count the increment as a setting of the register,
4287 even though it isn't a SET in rtl. */
4288 REG_N_SETS (regno)++;
4293 #endif /* AUTO_INC_DEC */
4295 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4296 This is done assuming the registers needed from X
4297 are those that have 1-bits in NEEDED.
4299 FLAGS is the set of enabled operations.
4301 INSN is the containing instruction. If INSN is dead, this function is not
4302 called. */
4304 static void
4305 mark_used_regs (needed, live, x, flags, insn)
4306 regset needed;
4307 regset live;
4308 rtx x;
4309 int flags;
4310 rtx insn;
4312 register RTX_CODE code;
4313 register int regno;
4314 int i;
4316 retry:
4317 code = GET_CODE (x);
4318 switch (code)
4320 case LABEL_REF:
4321 case SYMBOL_REF:
4322 case CONST_INT:
4323 case CONST:
4324 case CONST_DOUBLE:
4325 case PC:
4326 case ADDR_VEC:
4327 case ADDR_DIFF_VEC:
4328 return;
4330 #ifdef HAVE_cc0
4331 case CC0:
4332 cc0_live = 1;
4333 return;
4334 #endif
4336 case CLOBBER:
4337 /* If we are clobbering a MEM, mark any registers inside the address
4338 as being used. */
4339 if (GET_CODE (XEXP (x, 0)) == MEM)
4340 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4341 return;
4343 case MEM:
4344 /* Don't bother watching stores to mems if this is not the
4345 final pass. We'll not be deleting dead stores this round. */
4346 if (flags & PROP_SCAN_DEAD_CODE)
4348 /* Invalidate the data for the last MEM stored, but only if MEM is
4349 something that can be stored into. */
4350 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4351 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4352 ; /* needn't clear the memory set list */
4353 else
4355 rtx temp = mem_set_list;
4356 rtx prev = NULL_RTX;
4357 rtx next;
4359 while (temp)
4361 next = XEXP (temp, 1);
4362 if (anti_dependence (XEXP (temp, 0), x))
4364 /* Splice temp out of the list. */
4365 if (prev)
4366 XEXP (prev, 1) = next;
4367 else
4368 mem_set_list = next;
4369 free_EXPR_LIST_node (temp);
4371 else
4372 prev = temp;
4373 temp = next;
4377 /* If the memory reference had embedded side effects (autoincrement
4378 address modes. Then we may need to kill some entries on the
4379 memory set list. */
4380 if (insn)
4381 invalidate_mems_from_autoinc (insn);
4384 #ifdef AUTO_INC_DEC
4385 if (flags & PROP_AUTOINC)
4386 find_auto_inc (needed, x, insn);
4387 #endif
4388 break;
4390 case SUBREG:
4391 if (GET_CODE (SUBREG_REG (x)) == REG
4392 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4393 && (GET_MODE_SIZE (GET_MODE (x))
4394 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4395 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4397 /* While we're here, optimize this case. */
4398 x = SUBREG_REG (x);
4400 /* In case the SUBREG is not of a register, don't optimize */
4401 if (GET_CODE (x) != REG)
4403 mark_used_regs (needed, live, x, flags, insn);
4404 return;
4407 /* ... fall through ... */
4409 case REG:
4410 /* See a register other than being set
4411 => mark it as needed. */
4413 regno = REGNO (x);
4415 int some_needed = REGNO_REG_SET_P (needed, regno);
4416 int some_not_needed = ! some_needed;
4418 SET_REGNO_REG_SET (live, regno);
4420 /* A hard reg in a wide mode may really be multiple registers.
4421 If so, mark all of them just like the first. */
4422 if (regno < FIRST_PSEUDO_REGISTER)
4424 int n;
4426 /* For stack ptr or fixed arg pointer,
4427 nothing below can be necessary, so waste no more time. */
4428 if (regno == STACK_POINTER_REGNUM
4429 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4430 || (regno == HARD_FRAME_POINTER_REGNUM
4431 && (! reload_completed || frame_pointer_needed))
4432 #endif
4433 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4434 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4435 #endif
4436 || (regno == FRAME_POINTER_REGNUM
4437 && (! reload_completed || frame_pointer_needed)))
4439 /* If this is a register we are going to try to eliminate,
4440 don't mark it live here. If we are successful in
4441 eliminating it, it need not be live unless it is used for
4442 pseudos, in which case it will have been set live when
4443 it was allocated to the pseudos. If the register will not
4444 be eliminated, reload will set it live at that point. */
4446 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4447 regs_ever_live[regno] = 1;
4448 return;
4450 /* No death notes for global register variables;
4451 their values are live after this function exits. */
4452 if (global_regs[regno])
4454 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4455 reg_next_use[regno] = insn;
4456 return;
4459 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4460 while (--n > 0)
4462 int regno_n = regno + n;
4463 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4465 SET_REGNO_REG_SET (live, regno_n);
4466 some_needed |= needed_regno;
4467 some_not_needed |= ! needed_regno;
4471 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4473 /* Record where each reg is used, so when the reg
4474 is set we know the next insn that uses it. */
4476 reg_next_use[regno] = insn;
4478 if (flags & PROP_REG_INFO)
4480 if (regno < FIRST_PSEUDO_REGISTER)
4482 /* If a hard reg is being used,
4483 record that this function does use it. */
4485 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4486 if (i == 0)
4487 i = 1;
4489 regs_ever_live[regno + --i] = 1;
4490 while (i > 0);
4492 else
4494 /* Keep track of which basic block each reg appears in. */
4496 register int blocknum = BLOCK_NUM (insn);
4498 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4499 REG_BASIC_BLOCK (regno) = blocknum;
4500 else if (REG_BASIC_BLOCK (regno) != blocknum)
4501 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4503 /* Count (weighted) number of uses of each reg. */
4505 REG_N_REFS (regno) += loop_depth;
4509 /* Record and count the insns in which a reg dies.
4510 If it is used in this insn and was dead below the insn
4511 then it dies in this insn. If it was set in this insn,
4512 we do not make a REG_DEAD note; likewise if we already
4513 made such a note. */
4515 if (flags & PROP_DEATH_NOTES)
4517 if (some_not_needed
4518 && ! dead_or_set_p (insn, x)
4519 #if 0
4520 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4521 #endif
4524 /* Check for the case where the register dying partially
4525 overlaps the register set by this insn. */
4526 if (regno < FIRST_PSEUDO_REGISTER
4527 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4529 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4530 while (--n >= 0)
4531 some_needed |= dead_or_set_regno_p (insn, regno + n);
4534 /* If none of the words in X is needed, make a REG_DEAD
4535 note. Otherwise, we must make partial REG_DEAD notes. */
4536 if (! some_needed)
4538 REG_NOTES (insn)
4539 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4540 REG_N_DEATHS (regno)++;
4542 else
4544 int i;
4546 /* Don't make a REG_DEAD note for a part of a register
4547 that is set in the insn. */
4549 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4550 i >= 0; i--)
4551 if (!REGNO_REG_SET_P (needed, regno + i)
4552 && ! dead_or_set_regno_p (insn, regno + i))
4553 REG_NOTES (insn)
4554 = (alloc_EXPR_LIST
4555 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4556 regno + i),
4557 REG_NOTES (insn)));
4562 return;
4564 case SET:
4566 register rtx testreg = SET_DEST (x);
4567 int mark_dest = 0;
4569 /* If storing into MEM, don't show it as being used. But do
4570 show the address as being used. */
4571 if (GET_CODE (testreg) == MEM)
4573 #ifdef AUTO_INC_DEC
4574 if (flags & PROP_AUTOINC)
4575 find_auto_inc (needed, testreg, insn);
4576 #endif
4577 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4578 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4579 return;
4582 /* Storing in STRICT_LOW_PART is like storing in a reg
4583 in that this SET might be dead, so ignore it in TESTREG.
4584 but in some other ways it is like using the reg.
4586 Storing in a SUBREG or a bit field is like storing the entire
4587 register in that if the register's value is not used
4588 then this SET is not needed. */
4589 while (GET_CODE (testreg) == STRICT_LOW_PART
4590 || GET_CODE (testreg) == ZERO_EXTRACT
4591 || GET_CODE (testreg) == SIGN_EXTRACT
4592 || GET_CODE (testreg) == SUBREG)
4594 if (GET_CODE (testreg) == SUBREG
4595 && GET_CODE (SUBREG_REG (testreg)) == REG
4596 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4597 && (GET_MODE_SIZE (GET_MODE (testreg))
4598 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4599 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4601 /* Modifying a single register in an alternate mode
4602 does not use any of the old value. But these other
4603 ways of storing in a register do use the old value. */
4604 if (GET_CODE (testreg) == SUBREG
4605 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4607 else
4608 mark_dest = 1;
4610 testreg = XEXP (testreg, 0);
4613 /* If this is a store into a register,
4614 recursively scan the value being stored. */
4616 if ((GET_CODE (testreg) == PARALLEL
4617 && GET_MODE (testreg) == BLKmode)
4618 || (GET_CODE (testreg) == REG
4619 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4620 && (! reload_completed || frame_pointer_needed)))
4621 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4622 && ! (regno == HARD_FRAME_POINTER_REGNUM
4623 && (! reload_completed || frame_pointer_needed))
4624 #endif
4625 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4626 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4627 #endif
4629 /* We used to exclude global_regs here, but that seems wrong.
4630 Storing in them is like storing in mem. */
4632 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4633 if (mark_dest)
4634 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4635 return;
4638 break;
4640 case RETURN:
4641 /* ??? This info should have been gotten from mark_regs_live_at_end,
4642 as applied to the EXIT block, and propagated along the edge that
4643 connects this block to the EXIT. */
4644 break;
4646 case ASM_OPERANDS:
4647 case UNSPEC_VOLATILE:
4648 case TRAP_IF:
4649 case ASM_INPUT:
4651 /* Traditional and volatile asm instructions must be considered to use
4652 and clobber all hard registers, all pseudo-registers and all of
4653 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4655 Consider for instance a volatile asm that changes the fpu rounding
4656 mode. An insn should not be moved across this even if it only uses
4657 pseudo-regs because it might give an incorrectly rounded result.
4659 ?!? Unfortunately, marking all hard registers as live causes massive
4660 problems for the register allocator and marking all pseudos as live
4661 creates mountains of uninitialized variable warnings.
4663 So for now, just clear the memory set list and mark any regs
4664 we can find in ASM_OPERANDS as used. */
4665 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4666 free_EXPR_LIST_list (&mem_set_list);
4668 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4669 We can not just fall through here since then we would be confused
4670 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4671 traditional asms unlike their normal usage. */
4672 if (code == ASM_OPERANDS)
4674 int j;
4676 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4677 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4678 flags, insn);
4680 break;
4684 default:
4685 break;
4688 /* Recursively scan the operands of this expression. */
4691 register const char *fmt = GET_RTX_FORMAT (code);
4692 register int i;
4694 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4696 if (fmt[i] == 'e')
4698 /* Tail recursive case: save a function call level. */
4699 if (i == 0)
4701 x = XEXP (x, 0);
4702 goto retry;
4704 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4706 else if (fmt[i] == 'E')
4708 register int j;
4709 for (j = 0; j < XVECLEN (x, i); j++)
4710 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4716 #ifdef AUTO_INC_DEC
4718 static int
4719 try_pre_increment_1 (insn)
4720 rtx insn;
4722 /* Find the next use of this reg. If in same basic block,
4723 make it do pre-increment or pre-decrement if appropriate. */
4724 rtx x = single_set (insn);
4725 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4726 * INTVAL (XEXP (SET_SRC (x), 1)));
4727 int regno = REGNO (SET_DEST (x));
4728 rtx y = reg_next_use[regno];
4729 if (y != 0
4730 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4731 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4732 mode would be better. */
4733 && ! dead_or_set_p (y, SET_DEST (x))
4734 && try_pre_increment (y, SET_DEST (x), amount))
4736 /* We have found a suitable auto-increment
4737 and already changed insn Y to do it.
4738 So flush this increment-instruction. */
4739 PUT_CODE (insn, NOTE);
4740 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4741 NOTE_SOURCE_FILE (insn) = 0;
4742 /* Count a reference to this reg for the increment
4743 insn we are deleting. When a reg is incremented.
4744 spilling it is worse, so we want to make that
4745 less likely. */
4746 if (regno >= FIRST_PSEUDO_REGISTER)
4748 REG_N_REFS (regno) += loop_depth;
4749 REG_N_SETS (regno)++;
4751 return 1;
4753 return 0;
4756 /* Try to change INSN so that it does pre-increment or pre-decrement
4757 addressing on register REG in order to add AMOUNT to REG.
4758 AMOUNT is negative for pre-decrement.
4759 Returns 1 if the change could be made.
4760 This checks all about the validity of the result of modifying INSN. */
4762 static int
4763 try_pre_increment (insn, reg, amount)
4764 rtx insn, reg;
4765 HOST_WIDE_INT amount;
4767 register rtx use;
4769 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4770 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4771 int pre_ok = 0;
4772 /* Nonzero if we can try to make a post-increment or post-decrement.
4773 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4774 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4775 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4776 int post_ok = 0;
4778 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4779 int do_post = 0;
4781 /* From the sign of increment, see which possibilities are conceivable
4782 on this target machine. */
4783 if (HAVE_PRE_INCREMENT && amount > 0)
4784 pre_ok = 1;
4785 if (HAVE_POST_INCREMENT && amount > 0)
4786 post_ok = 1;
4788 if (HAVE_PRE_DECREMENT && amount < 0)
4789 pre_ok = 1;
4790 if (HAVE_POST_DECREMENT && amount < 0)
4791 post_ok = 1;
4793 if (! (pre_ok || post_ok))
4794 return 0;
4796 /* It is not safe to add a side effect to a jump insn
4797 because if the incremented register is spilled and must be reloaded
4798 there would be no way to store the incremented value back in memory. */
4800 if (GET_CODE (insn) == JUMP_INSN)
4801 return 0;
4803 use = 0;
4804 if (pre_ok)
4805 use = find_use_as_address (PATTERN (insn), reg, 0);
4806 if (post_ok && (use == 0 || use == (rtx) 1))
4808 use = find_use_as_address (PATTERN (insn), reg, -amount);
4809 do_post = 1;
4812 if (use == 0 || use == (rtx) 1)
4813 return 0;
4815 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4816 return 0;
4818 /* See if this combination of instruction and addressing mode exists. */
4819 if (! validate_change (insn, &XEXP (use, 0),
4820 gen_rtx_fmt_e (amount > 0
4821 ? (do_post ? POST_INC : PRE_INC)
4822 : (do_post ? POST_DEC : PRE_DEC),
4823 Pmode, reg), 0))
4824 return 0;
4826 /* Record that this insn now has an implicit side effect on X. */
4827 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4828 return 1;
4831 #endif /* AUTO_INC_DEC */
4833 /* Find the place in the rtx X where REG is used as a memory address.
4834 Return the MEM rtx that so uses it.
4835 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4836 (plus REG (const_int PLUSCONST)).
4838 If such an address does not appear, return 0.
4839 If REG appears more than once, or is used other than in such an address,
4840 return (rtx)1. */
4843 find_use_as_address (x, reg, plusconst)
4844 register rtx x;
4845 rtx reg;
4846 HOST_WIDE_INT plusconst;
4848 enum rtx_code code = GET_CODE (x);
4849 const char *fmt = GET_RTX_FORMAT (code);
4850 register int i;
4851 register rtx value = 0;
4852 register rtx tem;
4854 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4855 return x;
4857 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4858 && XEXP (XEXP (x, 0), 0) == reg
4859 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4860 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4861 return x;
4863 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4865 /* If REG occurs inside a MEM used in a bit-field reference,
4866 that is unacceptable. */
4867 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4868 return (rtx) (HOST_WIDE_INT) 1;
4871 if (x == reg)
4872 return (rtx) (HOST_WIDE_INT) 1;
4874 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4876 if (fmt[i] == 'e')
4878 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4879 if (value == 0)
4880 value = tem;
4881 else if (tem != 0)
4882 return (rtx) (HOST_WIDE_INT) 1;
4884 else if (fmt[i] == 'E')
4886 register int j;
4887 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4889 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4890 if (value == 0)
4891 value = tem;
4892 else if (tem != 0)
4893 return (rtx) (HOST_WIDE_INT) 1;
4898 return value;
4901 /* Write information about registers and basic blocks into FILE.
4902 This is part of making a debugging dump. */
4904 void
4905 dump_flow_info (file)
4906 FILE *file;
4908 register int i;
4909 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4911 fprintf (file, "%d registers.\n", max_regno);
4912 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4913 if (REG_N_REFS (i))
4915 enum reg_class class, altclass;
4916 fprintf (file, "\nRegister %d used %d times across %d insns",
4917 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4918 if (REG_BASIC_BLOCK (i) >= 0)
4919 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4920 if (REG_N_SETS (i))
4921 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4922 (REG_N_SETS (i) == 1) ? "" : "s");
4923 if (REG_USERVAR_P (regno_reg_rtx[i]))
4924 fprintf (file, "; user var");
4925 if (REG_N_DEATHS (i) != 1)
4926 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4927 if (REG_N_CALLS_CROSSED (i) == 1)
4928 fprintf (file, "; crosses 1 call");
4929 else if (REG_N_CALLS_CROSSED (i))
4930 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4931 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4932 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4933 class = reg_preferred_class (i);
4934 altclass = reg_alternate_class (i);
4935 if (class != GENERAL_REGS || altclass != ALL_REGS)
4937 if (altclass == ALL_REGS || class == ALL_REGS)
4938 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4939 else if (altclass == NO_REGS)
4940 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4941 else
4942 fprintf (file, "; pref %s, else %s",
4943 reg_class_names[(int) class],
4944 reg_class_names[(int) altclass]);
4946 if (REGNO_POINTER_FLAG (i))
4947 fprintf (file, "; pointer");
4948 fprintf (file, ".\n");
4951 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4952 for (i = 0; i < n_basic_blocks; i++)
4954 register basic_block bb = BASIC_BLOCK (i);
4955 register int regno;
4956 register edge e;
4958 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
4959 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
4961 fprintf (file, "Predecessors: ");
4962 for (e = bb->pred; e ; e = e->pred_next)
4963 dump_edge_info (file, e, 0);
4965 fprintf (file, "\nSuccessors: ");
4966 for (e = bb->succ; e ; e = e->succ_next)
4967 dump_edge_info (file, e, 1);
4969 fprintf (file, "\nRegisters live at start:");
4970 if (bb->global_live_at_start)
4972 for (regno = 0; regno < max_regno; regno++)
4973 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4974 fprintf (file, " %d", regno);
4976 else
4977 fprintf (file, " n/a");
4979 fprintf (file, "\nRegisters live at end:");
4980 if (bb->global_live_at_end)
4982 for (regno = 0; regno < max_regno; regno++)
4983 if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4984 fprintf (file, " %d", regno);
4986 else
4987 fprintf (file, " n/a");
4989 putc('\n', file);
4992 putc('\n', file);
4995 void
4996 debug_flow_info ()
4998 dump_flow_info (stderr);
5001 static void
5002 dump_edge_info (file, e, do_succ)
5003 FILE *file;
5004 edge e;
5005 int do_succ;
5007 basic_block side = (do_succ ? e->dest : e->src);
5009 if (side == ENTRY_BLOCK_PTR)
5010 fputs (" ENTRY", file);
5011 else if (side == EXIT_BLOCK_PTR)
5012 fputs (" EXIT", file);
5013 else
5014 fprintf (file, " %d", side->index);
5016 if (e->flags)
5018 static const char * const bitnames[] = {
5019 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5021 int comma = 0;
5022 int i, flags = e->flags;
5024 fputc (' ', file);
5025 fputc ('(', file);
5026 for (i = 0; flags; i++)
5027 if (flags & (1 << i))
5029 flags &= ~(1 << i);
5031 if (comma)
5032 fputc (',', file);
5033 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5034 fputs (bitnames[i], file);
5035 else
5036 fprintf (file, "%d", i);
5037 comma = 1;
5039 fputc (')', file);
5044 /* Like print_rtl, but also print out live information for the start of each
5045 basic block. */
5047 void
5048 print_rtl_with_bb (outf, rtx_first)
5049 FILE *outf;
5050 rtx rtx_first;
5052 register rtx tmp_rtx;
5054 if (rtx_first == 0)
5055 fprintf (outf, "(nil)\n");
5056 else
5058 int i;
5059 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5060 int max_uid = get_max_uid ();
5061 basic_block *start = (basic_block *)
5062 xcalloc (max_uid, sizeof (basic_block));
5063 basic_block *end = (basic_block *)
5064 xcalloc (max_uid, sizeof (basic_block));
5065 enum bb_state *in_bb_p = (enum bb_state *)
5066 xcalloc (max_uid, sizeof (enum bb_state));
5068 for (i = n_basic_blocks - 1; i >= 0; i--)
5070 basic_block bb = BASIC_BLOCK (i);
5071 rtx x;
5073 start[INSN_UID (bb->head)] = bb;
5074 end[INSN_UID (bb->end)] = bb;
5075 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5077 enum bb_state state = IN_MULTIPLE_BB;
5078 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5079 state = IN_ONE_BB;
5080 in_bb_p[INSN_UID(x)] = state;
5082 if (x == bb->end)
5083 break;
5087 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5089 int did_output;
5090 basic_block bb;
5092 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5094 fprintf (outf, ";; Start of basic block %d, registers live:",
5095 bb->index);
5097 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
5099 fprintf (outf, " %d", i);
5100 if (i < FIRST_PSEUDO_REGISTER)
5101 fprintf (outf, " [%s]",
5102 reg_names[i]);
5104 putc ('\n', outf);
5107 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5108 && GET_CODE (tmp_rtx) != NOTE
5109 && GET_CODE (tmp_rtx) != BARRIER
5110 && ! obey_regdecls)
5111 fprintf (outf, ";; Insn is not within a basic block\n");
5112 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5113 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5115 did_output = print_rtl_single (outf, tmp_rtx);
5117 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5118 fprintf (outf, ";; End of basic block %d\n", bb->index);
5120 if (did_output)
5121 putc ('\n', outf);
5124 free (start);
5125 free (end);
5126 free (in_bb_p);
5129 if (current_function_epilogue_delay_list != 0)
5131 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5132 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5133 tmp_rtx = XEXP (tmp_rtx, 1))
5134 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5138 /* Compute dominator relationships using new flow graph structures. */
5139 void
5140 compute_flow_dominators (dominators, post_dominators)
5141 sbitmap *dominators;
5142 sbitmap *post_dominators;
5144 int bb;
5145 sbitmap *temp_bitmap;
5146 edge e;
5147 basic_block *worklist, *tos;
5149 /* Allocate a worklist array/queue. Entries are only added to the
5150 list if they were not already on the list. So the size is
5151 bounded by the number of basic blocks. */
5152 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5153 * n_basic_blocks);
5155 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5156 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5158 if (dominators)
5160 /* The optimistic setting of dominators requires us to put every
5161 block on the work list initially. */
5162 for (bb = 0; bb < n_basic_blocks; bb++)
5164 *tos++ = BASIC_BLOCK (bb);
5165 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5168 /* We want a maximal solution, so initially assume everything dominates
5169 everything else. */
5170 sbitmap_vector_ones (dominators, n_basic_blocks);
5172 /* Mark successors of the entry block so we can identify them below. */
5173 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5174 e->dest->aux = ENTRY_BLOCK_PTR;
5176 /* Iterate until the worklist is empty. */
5177 while (tos != worklist)
5179 /* Take the first entry off the worklist. */
5180 basic_block b = *--tos;
5181 bb = b->index;
5183 /* Compute the intersection of the dominators of all the
5184 predecessor blocks.
5186 If one of the predecessor blocks is the ENTRY block, then the
5187 intersection of the dominators of the predecessor blocks is
5188 defined as the null set. We can identify such blocks by the
5189 special value in the AUX field in the block structure. */
5190 if (b->aux == ENTRY_BLOCK_PTR)
5192 /* Do not clear the aux field for blocks which are
5193 successors of the ENTRY block. That way we we never
5194 add them to the worklist again.
5196 The intersect of dominators of the preds of this block is
5197 defined as the null set. */
5198 sbitmap_zero (temp_bitmap[bb]);
5200 else
5202 /* Clear the aux field of this block so it can be added to
5203 the worklist again if necessary. */
5204 b->aux = NULL;
5205 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5208 /* Make sure each block always dominates itself. */
5209 SET_BIT (temp_bitmap[bb], bb);
5211 /* If the out state of this block changed, then we need to
5212 add the successors of this block to the worklist if they
5213 are not already on the worklist. */
5214 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5216 for (e = b->succ; e; e = e->succ_next)
5218 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5220 *tos++ = e->dest;
5221 e->dest->aux = e;
5228 if (post_dominators)
5230 /* The optimistic setting of dominators requires us to put every
5231 block on the work list initially. */
5232 for (bb = 0; bb < n_basic_blocks; bb++)
5234 *tos++ = BASIC_BLOCK (bb);
5235 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5238 /* We want a maximal solution, so initially assume everything post
5239 dominates everything else. */
5240 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5242 /* Mark predecessors of the exit block so we can identify them below. */
5243 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5244 e->src->aux = EXIT_BLOCK_PTR;
5246 /* Iterate until the worklist is empty. */
5247 while (tos != worklist)
5249 /* Take the first entry off the worklist. */
5250 basic_block b = *--tos;
5251 bb = b->index;
5253 /* Compute the intersection of the post dominators of all the
5254 successor blocks.
5256 If one of the successor blocks is the EXIT block, then the
5257 intersection of the dominators of the successor blocks is
5258 defined as the null set. We can identify such blocks by the
5259 special value in the AUX field in the block structure. */
5260 if (b->aux == EXIT_BLOCK_PTR)
5262 /* Do not clear the aux field for blocks which are
5263 predecessors of the EXIT block. That way we we never
5264 add them to the worklist again.
5266 The intersect of dominators of the succs of this block is
5267 defined as the null set. */
5268 sbitmap_zero (temp_bitmap[bb]);
5270 else
5272 /* Clear the aux field of this block so it can be added to
5273 the worklist again if necessary. */
5274 b->aux = NULL;
5275 sbitmap_intersection_of_succs (temp_bitmap[bb],
5276 post_dominators, bb);
5279 /* Make sure each block always post dominates itself. */
5280 SET_BIT (temp_bitmap[bb], bb);
5282 /* If the out state of this block changed, then we need to
5283 add the successors of this block to the worklist if they
5284 are not already on the worklist. */
5285 if (sbitmap_a_and_b (post_dominators[bb],
5286 post_dominators[bb],
5287 temp_bitmap[bb]))
5289 for (e = b->pred; e; e = e->pred_next)
5291 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5293 *tos++ = e->src;
5294 e->src->aux = e;
5300 free (temp_bitmap);
5303 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5305 void
5306 compute_immediate_dominators (idom, dominators)
5307 int *idom;
5308 sbitmap *dominators;
5310 sbitmap *tmp;
5311 int b;
5313 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5315 /* Begin with tmp(n) = dom(n) - { n }. */
5316 for (b = n_basic_blocks; --b >= 0; )
5318 sbitmap_copy (tmp[b], dominators[b]);
5319 RESET_BIT (tmp[b], b);
5322 /* Subtract out all of our dominator's dominators. */
5323 for (b = n_basic_blocks; --b >= 0; )
5325 sbitmap tmp_b = tmp[b];
5326 int s;
5328 for (s = n_basic_blocks; --s >= 0; )
5329 if (TEST_BIT (tmp_b, s))
5330 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5333 /* Find the one bit set in the bitmap and put it in the output array. */
5334 for (b = n_basic_blocks; --b >= 0; )
5336 int t;
5337 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5340 sbitmap_vector_free (tmp);
5343 /* Count for a single SET rtx, X. */
5345 static void
5346 count_reg_sets_1 (x)
5347 rtx x;
5349 register int regno;
5350 register rtx reg = SET_DEST (x);
5352 /* Find the register that's set/clobbered. */
5353 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5354 || GET_CODE (reg) == SIGN_EXTRACT
5355 || GET_CODE (reg) == STRICT_LOW_PART)
5356 reg = XEXP (reg, 0);
5358 if (GET_CODE (reg) == PARALLEL
5359 && GET_MODE (reg) == BLKmode)
5361 register int i;
5362 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5363 count_reg_sets_1 (XVECEXP (reg, 0, i));
5364 return;
5367 if (GET_CODE (reg) == REG)
5369 regno = REGNO (reg);
5370 if (regno >= FIRST_PSEUDO_REGISTER)
5372 /* Count (weighted) references, stores, etc. This counts a
5373 register twice if it is modified, but that is correct. */
5374 REG_N_SETS (regno)++;
5376 REG_N_REFS (regno) += loop_depth;
5381 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5382 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5384 static void
5385 count_reg_sets (x)
5386 rtx x;
5388 register RTX_CODE code = GET_CODE (x);
5390 if (code == SET || code == CLOBBER)
5391 count_reg_sets_1 (x);
5392 else if (code == PARALLEL)
5394 register int i;
5395 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5397 code = GET_CODE (XVECEXP (x, 0, i));
5398 if (code == SET || code == CLOBBER)
5399 count_reg_sets_1 (XVECEXP (x, 0, i));
5404 /* Increment REG_N_REFS by the current loop depth each register reference
5405 found in X. */
5407 static void
5408 count_reg_references (x)
5409 rtx x;
5411 register RTX_CODE code;
5413 retry:
5414 code = GET_CODE (x);
5415 switch (code)
5417 case LABEL_REF:
5418 case SYMBOL_REF:
5419 case CONST_INT:
5420 case CONST:
5421 case CONST_DOUBLE:
5422 case PC:
5423 case ADDR_VEC:
5424 case ADDR_DIFF_VEC:
5425 case ASM_INPUT:
5426 return;
5428 #ifdef HAVE_cc0
5429 case CC0:
5430 return;
5431 #endif
5433 case CLOBBER:
5434 /* If we are clobbering a MEM, mark any registers inside the address
5435 as being used. */
5436 if (GET_CODE (XEXP (x, 0)) == MEM)
5437 count_reg_references (XEXP (XEXP (x, 0), 0));
5438 return;
5440 case SUBREG:
5441 /* While we're here, optimize this case. */
5442 x = SUBREG_REG (x);
5444 /* In case the SUBREG is not of a register, don't optimize */
5445 if (GET_CODE (x) != REG)
5447 count_reg_references (x);
5448 return;
5451 /* ... fall through ... */
5453 case REG:
5454 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5455 REG_N_REFS (REGNO (x)) += loop_depth;
5456 return;
5458 case SET:
5460 register rtx testreg = SET_DEST (x);
5461 int mark_dest = 0;
5463 /* If storing into MEM, don't show it as being used. But do
5464 show the address as being used. */
5465 if (GET_CODE (testreg) == MEM)
5467 count_reg_references (XEXP (testreg, 0));
5468 count_reg_references (SET_SRC (x));
5469 return;
5472 /* Storing in STRICT_LOW_PART is like storing in a reg
5473 in that this SET might be dead, so ignore it in TESTREG.
5474 but in some other ways it is like using the reg.
5476 Storing in a SUBREG or a bit field is like storing the entire
5477 register in that if the register's value is not used
5478 then this SET is not needed. */
5479 while (GET_CODE (testreg) == STRICT_LOW_PART
5480 || GET_CODE (testreg) == ZERO_EXTRACT
5481 || GET_CODE (testreg) == SIGN_EXTRACT
5482 || GET_CODE (testreg) == SUBREG)
5484 /* Modifying a single register in an alternate mode
5485 does not use any of the old value. But these other
5486 ways of storing in a register do use the old value. */
5487 if (GET_CODE (testreg) == SUBREG
5488 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5490 else
5491 mark_dest = 1;
5493 testreg = XEXP (testreg, 0);
5496 /* If this is a store into a register,
5497 recursively scan the value being stored. */
5499 if ((GET_CODE (testreg) == PARALLEL
5500 && GET_MODE (testreg) == BLKmode)
5501 || GET_CODE (testreg) == REG)
5503 count_reg_references (SET_SRC (x));
5504 if (mark_dest)
5505 count_reg_references (SET_DEST (x));
5506 return;
5509 break;
5511 default:
5512 break;
5515 /* Recursively scan the operands of this expression. */
5518 register const char *fmt = GET_RTX_FORMAT (code);
5519 register int i;
5521 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5523 if (fmt[i] == 'e')
5525 /* Tail recursive case: save a function call level. */
5526 if (i == 0)
5528 x = XEXP (x, 0);
5529 goto retry;
5531 count_reg_references (XEXP (x, i));
5533 else if (fmt[i] == 'E')
5535 register int j;
5536 for (j = 0; j < XVECLEN (x, i); j++)
5537 count_reg_references (XVECEXP (x, i, j));
5543 /* Recompute register set/reference counts immediately prior to register
5544 allocation.
5546 This avoids problems with set/reference counts changing to/from values
5547 which have special meanings to the register allocators.
5549 Additionally, the reference counts are the primary component used by the
5550 register allocators to prioritize pseudos for allocation to hard regs.
5551 More accurate reference counts generally lead to better register allocation.
5553 F is the first insn to be scanned.
5554 LOOP_STEP denotes how much loop_depth should be incremented per
5555 loop nesting level in order to increase the ref count more for references
5556 in a loop.
5558 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5559 possibly other information which is used by the register allocators. */
5561 void
5562 recompute_reg_usage (f, loop_step)
5563 rtx f ATTRIBUTE_UNUSED;
5564 int loop_step ATTRIBUTE_UNUSED;
5566 rtx insn;
5567 int i, max_reg;
5568 int index;
5570 /* Clear out the old data. */
5571 max_reg = max_reg_num ();
5572 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5574 REG_N_SETS (i) = 0;
5575 REG_N_REFS (i) = 0;
5578 /* Scan each insn in the chain and count how many times each register is
5579 set/used. */
5580 loop_depth = 1;
5581 for (index = 0; index < n_basic_blocks; index++)
5583 basic_block bb = BASIC_BLOCK (index);
5584 loop_depth = bb->loop_depth;
5585 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5587 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5589 rtx links;
5591 /* This call will increment REG_N_SETS for each SET or CLOBBER
5592 of a register in INSN. It will also increment REG_N_REFS
5593 by the loop depth for each set of a register in INSN. */
5594 count_reg_sets (PATTERN (insn));
5596 /* count_reg_sets does not detect autoincrement address modes, so
5597 detect them here by looking at the notes attached to INSN. */
5598 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5600 if (REG_NOTE_KIND (links) == REG_INC)
5601 /* Count (weighted) references, stores, etc. This counts a
5602 register twice if it is modified, but that is correct. */
5603 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5606 /* This call will increment REG_N_REFS by the current loop depth for
5607 each reference to a register in INSN. */
5608 count_reg_references (PATTERN (insn));
5610 /* count_reg_references will not include counts for arguments to
5611 function calls, so detect them here by examining the
5612 CALL_INSN_FUNCTION_USAGE data. */
5613 if (GET_CODE (insn) == CALL_INSN)
5615 rtx note;
5617 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5618 note;
5619 note = XEXP (note, 1))
5620 if (GET_CODE (XEXP (note, 0)) == USE)
5621 count_reg_references (XEXP (XEXP (note, 0), 0));
5624 if (insn == bb->end)
5625 break;
5630 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5631 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5632 of the number of registers that died. */
5635 count_or_remove_death_notes (blocks, kill)
5636 sbitmap blocks;
5637 int kill;
5639 int i, count = 0;
5641 for (i = n_basic_blocks - 1; i >= 0; --i)
5643 basic_block bb;
5644 rtx insn;
5646 if (blocks && ! TEST_BIT (blocks, i))
5647 continue;
5649 bb = BASIC_BLOCK (i);
5651 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5653 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5655 rtx *pprev = &REG_NOTES (insn);
5656 rtx link = *pprev;
5658 while (link)
5660 switch (REG_NOTE_KIND (link))
5662 case REG_DEAD:
5663 if (GET_CODE (XEXP (link, 0)) == REG)
5665 rtx reg = XEXP (link, 0);
5666 int n;
5668 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5669 n = 1;
5670 else
5671 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5672 count += n;
5674 /* FALLTHRU */
5676 case REG_UNUSED:
5677 if (kill)
5679 rtx next = XEXP (link, 1);
5680 free_EXPR_LIST_node (link);
5681 *pprev = link = next;
5682 break;
5684 /* FALLTHRU */
5686 default:
5687 pprev = &XEXP (link, 1);
5688 link = *pprev;
5689 break;
5694 if (insn == bb->end)
5695 break;
5699 return count;
5702 /* Record INSN's block as BB. */
5704 void
5705 set_block_for_insn (insn, bb)
5706 rtx insn;
5707 basic_block bb;
5709 size_t uid = INSN_UID (insn);
5710 if (uid >= basic_block_for_insn->num_elements)
5712 int new_size;
5714 /* Add one-eighth the size so we don't keep calling xrealloc. */
5715 new_size = uid + (uid + 7) / 8;
5717 VARRAY_GROW (basic_block_for_insn, new_size);
5719 VARRAY_BB (basic_block_for_insn, uid) = bb;
5722 /* Record INSN's block number as BB. */
5723 /* ??? This has got to go. */
5725 void
5726 set_block_num (insn, bb)
5727 rtx insn;
5728 int bb;
5730 set_block_for_insn (insn, BASIC_BLOCK (bb));
5733 /* Verify the CFG consistency. This function check some CFG invariants and
5734 aborts when something is wrong. Hope that this function will help to
5735 convert many optimization passes to preserve CFG consistent.
5737 Currently it does following checks:
5739 - test head/end pointers
5740 - overlapping of basic blocks
5741 - edge list corectness
5742 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5743 - tails of basic blocks (ensure that boundary is necesary)
5744 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5745 and NOTE_INSN_BASIC_BLOCK
5746 - check that all insns are in the basic blocks
5747 (except the switch handling code, barriers and notes)
5749 In future it can be extended check a lot of other stuff as well
5750 (reachability of basic blocks, life information, etc. etc.). */
5752 void
5753 verify_flow_info ()
5755 const int max_uid = get_max_uid ();
5756 const rtx rtx_first = get_insns ();
5757 basic_block *bb_info;
5758 rtx x;
5759 int i, err = 0;
5761 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5763 /* First pass check head/end pointers and set bb_info array used by
5764 later passes. */
5765 for (i = n_basic_blocks - 1; i >= 0; i--)
5767 basic_block bb = BASIC_BLOCK (i);
5769 /* Check the head pointer and make sure that it is pointing into
5770 insn list. */
5771 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5772 if (x == bb->head)
5773 break;
5774 if (!x)
5776 error ("Head insn %d for block %d not found in the insn stream.",
5777 INSN_UID (bb->head), bb->index);
5778 err = 1;
5781 /* Check the end pointer and make sure that it is pointing into
5782 insn list. */
5783 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5785 if (bb_info[INSN_UID (x)] != NULL)
5787 error ("Insn %d is in multiple basic blocks (%d and %d)",
5788 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5789 err = 1;
5791 bb_info[INSN_UID (x)] = bb;
5793 if (x == bb->end)
5794 break;
5796 if (!x)
5798 error ("End insn %d for block %d not found in the insn stream.",
5799 INSN_UID (bb->end), bb->index);
5800 err = 1;
5804 /* Now check the basic blocks (boundaries etc.) */
5805 for (i = n_basic_blocks - 1; i >= 0; i--)
5807 basic_block bb = BASIC_BLOCK (i);
5808 /* Check corectness of edge lists */
5809 edge e;
5811 e = bb->succ;
5812 while (e)
5814 if (e->src != bb)
5816 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5817 bb->index);
5818 fprintf (stderr, "Predecessor: ");
5819 dump_edge_info (stderr, e, 0);
5820 fprintf (stderr, "\nSuccessor: ");
5821 dump_edge_info (stderr, e, 1);
5822 fflush (stderr);
5823 err = 1;
5825 if (e->dest != EXIT_BLOCK_PTR)
5827 edge e2 = e->dest->pred;
5828 while (e2 && e2 != e)
5829 e2 = e2->pred_next;
5830 if (!e2)
5832 error ("Basic block %i edge lists are corrupted", bb->index);
5833 err = 1;
5836 e = e->succ_next;
5839 e = bb->pred;
5840 while (e)
5842 if (e->dest != bb)
5844 error ("Basic block %d pred edge is corrupted", bb->index);
5845 fputs ("Predecessor: ", stderr);
5846 dump_edge_info (stderr, e, 0);
5847 fputs ("\nSuccessor: ", stderr);
5848 dump_edge_info (stderr, e, 1);
5849 fputc ('\n', stderr);
5850 err = 1;
5852 if (e->src != ENTRY_BLOCK_PTR)
5854 edge e2 = e->src->succ;
5855 while (e2 && e2 != e)
5856 e2 = e2->succ_next;
5857 if (!e2)
5859 error ("Basic block %i edge lists are corrupted", bb->index);
5860 err = 1;
5863 e = e->pred_next;
5866 /* OK pointers are correct. Now check the header of basic
5867 block. It ought to contain optional CODE_LABEL followed
5868 by NOTE_BASIC_BLOCK. */
5869 x = bb->head;
5870 if (GET_CODE (x) == CODE_LABEL)
5872 if (bb->end == x)
5874 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5875 bb->index);
5876 err = 1;
5878 x = NEXT_INSN (x);
5880 if (GET_CODE (x) != NOTE
5881 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5882 || NOTE_BASIC_BLOCK (x) != bb)
5884 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5885 bb->index);
5886 err = 1;
5889 if (bb->end == x)
5891 /* Do checks for empty blocks here */
5893 else
5895 x = NEXT_INSN (x);
5896 while (x)
5898 if (GET_CODE (x) == NOTE
5899 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5901 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5902 INSN_UID (x), bb->index);
5903 err = 1;
5906 if (x == bb->end)
5907 break;
5909 if (GET_CODE (x) == JUMP_INSN
5910 || GET_CODE (x) == CODE_LABEL
5911 || GET_CODE (x) == BARRIER)
5913 error ("In basic block %d:", bb->index);
5914 fatal_insn ("Flow control insn inside a basic block", x);
5917 x = NEXT_INSN (x);
5922 x = rtx_first;
5923 while (x)
5925 if (!bb_info[INSN_UID (x)])
5927 switch (GET_CODE (x))
5929 case BARRIER:
5930 case NOTE:
5931 break;
5933 case CODE_LABEL:
5934 /* An addr_vec is placed outside any block block. */
5935 if (NEXT_INSN (x)
5936 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5937 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5938 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5940 x = NEXT_INSN (x);
5943 /* But in any case, non-deletable labels can appear anywhere. */
5944 break;
5946 default:
5947 fatal_insn ("Insn outside basic block", x);
5951 x = NEXT_INSN (x);
5954 if (err)
5955 abort ();
5957 /* Clean up. */
5958 free (bb_info);
5961 /* Functions to access an edge list with a vector representation.
5962 Enough data is kept such that given an index number, the
5963 pred and succ that edge reprsents can be determined, or
5964 given a pred and a succ, it's index number can be returned.
5965 This allows algorithms which comsume a lot of memory to
5966 represent the normally full matrix of edge (pred,succ) with a
5967 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
5968 wasted space in the client code due to sparse flow graphs. */
5970 /* This functions initializes the edge list. Basically the entire
5971 flowgraph is processed, and all edges are assigned a number,
5972 and the data structure is filed in. */
5973 struct edge_list *
5974 create_edge_list ()
5976 struct edge_list *elist;
5977 edge e;
5978 int num_edges;
5979 int x;
5980 int block_count;
5982 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
5984 num_edges = 0;
5986 /* Determine the number of edges in the flow graph by counting successor
5987 edges on each basic block. */
5988 for (x = 0; x < n_basic_blocks; x++)
5990 basic_block bb = BASIC_BLOCK (x);
5992 for (e = bb->succ; e; e = e->succ_next)
5993 num_edges++;
5995 /* Don't forget successors of the entry block. */
5996 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5997 num_edges++;
5999 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6000 elist->num_blocks = block_count;
6001 elist->num_edges = num_edges;
6002 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6004 num_edges = 0;
6006 /* Follow successors of the entry block, and register these edges. */
6007 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6009 elist->index_to_edge[num_edges] = e;
6010 num_edges++;
6013 for (x = 0; x < n_basic_blocks; x++)
6015 basic_block bb = BASIC_BLOCK (x);
6017 /* Follow all successors of blocks, and register these edges. */
6018 for (e = bb->succ; e; e = e->succ_next)
6020 elist->index_to_edge[num_edges] = e;
6021 num_edges++;
6024 return elist;
6027 /* This function free's memory associated with an edge list. */
6028 void
6029 free_edge_list (elist)
6030 struct edge_list *elist;
6032 if (elist)
6034 free (elist->index_to_edge);
6035 free (elist);
6039 /* This function provides debug output showing an edge list. */
6040 void
6041 print_edge_list (f, elist)
6042 FILE *f;
6043 struct edge_list *elist;
6045 int x;
6046 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6047 elist->num_blocks - 2, elist->num_edges);
6049 for (x = 0; x < elist->num_edges; x++)
6051 fprintf (f, " %-4d - edge(", x);
6052 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6053 fprintf (f,"entry,");
6054 else
6055 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6057 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6058 fprintf (f,"exit)\n");
6059 else
6060 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6064 /* This function provides an internal consistancy check of an edge list,
6065 verifying that all edges are present, and that there are no
6066 extra edges. */
6067 void
6068 verify_edge_list (f, elist)
6069 FILE *f;
6070 struct edge_list *elist;
6072 int x, pred, succ, index;
6073 edge e;
6075 for (x = 0; x < n_basic_blocks; x++)
6077 basic_block bb = BASIC_BLOCK (x);
6079 for (e = bb->succ; e; e = e->succ_next)
6081 pred = e->src->index;
6082 succ = e->dest->index;
6083 index = EDGE_INDEX (elist, e->src, e->dest);
6084 if (index == EDGE_INDEX_NO_EDGE)
6086 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6087 continue;
6089 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6090 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6091 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6092 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6093 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6094 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6097 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6099 pred = e->src->index;
6100 succ = e->dest->index;
6101 index = EDGE_INDEX (elist, e->src, e->dest);
6102 if (index == EDGE_INDEX_NO_EDGE)
6104 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6105 continue;
6107 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6108 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6109 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6110 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6111 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6112 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6114 /* We've verified that all the edges are in the list, no lets make sure
6115 there are no spurious edges in the list. */
6117 for (pred = 0 ; pred < n_basic_blocks; pred++)
6118 for (succ = 0 ; succ < n_basic_blocks; succ++)
6120 basic_block p = BASIC_BLOCK (pred);
6121 basic_block s = BASIC_BLOCK (succ);
6123 int found_edge = 0;
6125 for (e = p->succ; e; e = e->succ_next)
6126 if (e->dest == s)
6128 found_edge = 1;
6129 break;
6131 for (e = s->pred; e; e = e->pred_next)
6132 if (e->src == p)
6134 found_edge = 1;
6135 break;
6137 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6138 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6139 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6140 pred, succ);
6141 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6142 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6143 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6144 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6145 BASIC_BLOCK (succ)));
6147 for (succ = 0 ; succ < n_basic_blocks; succ++)
6149 basic_block p = ENTRY_BLOCK_PTR;
6150 basic_block s = BASIC_BLOCK (succ);
6152 int found_edge = 0;
6154 for (e = p->succ; e; e = e->succ_next)
6155 if (e->dest == s)
6157 found_edge = 1;
6158 break;
6160 for (e = s->pred; e; e = e->pred_next)
6161 if (e->src == p)
6163 found_edge = 1;
6164 break;
6166 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6167 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6168 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6169 succ);
6170 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6171 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6172 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6173 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6174 BASIC_BLOCK (succ)));
6176 for (pred = 0 ; pred < n_basic_blocks; pred++)
6178 basic_block p = BASIC_BLOCK (pred);
6179 basic_block s = EXIT_BLOCK_PTR;
6181 int found_edge = 0;
6183 for (e = p->succ; e; e = e->succ_next)
6184 if (e->dest == s)
6186 found_edge = 1;
6187 break;
6189 for (e = s->pred; e; e = e->pred_next)
6190 if (e->src == p)
6192 found_edge = 1;
6193 break;
6195 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6196 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6197 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6198 pred);
6199 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6200 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6201 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6202 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6203 EXIT_BLOCK_PTR));
6207 /* This routine will determine what, if any, edge there is between
6208 a specified predecessor and successor. */
6211 find_edge_index (edge_list, pred, succ)
6212 struct edge_list *edge_list;
6213 basic_block pred, succ;
6215 int x;
6216 for (x = 0; x < NUM_EDGES (edge_list); x++)
6218 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6219 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6220 return x;
6222 return (EDGE_INDEX_NO_EDGE);
6225 /* This function will remove an edge from the flow graph. */
6226 static void
6227 remove_edge (e)
6228 edge e;
6230 edge last_pred = NULL;
6231 edge last_succ = NULL;
6232 edge tmp;
6233 basic_block src, dest;
6234 src = e->src;
6235 dest = e->dest;
6236 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6237 last_succ = tmp;
6239 if (!tmp)
6240 abort ();
6241 if (last_succ)
6242 last_succ->succ_next = e->succ_next;
6243 else
6244 src->succ = e->succ_next;
6246 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6247 last_pred = tmp;
6249 if (!tmp)
6250 abort ();
6251 if (last_pred)
6252 last_pred->pred_next = e->pred_next;
6253 else
6254 dest->pred = e->pred_next;
6256 n_edges--;
6257 free (e);
6260 /* This routine will remove any fake successor edges for a basic block.
6261 When the edge is removed, it is also removed from whatever predecessor
6262 list it is in. */
6263 static void
6264 remove_fake_successors (bb)
6265 basic_block bb;
6267 edge e;
6268 for (e = bb->succ; e ; )
6270 edge tmp = e;
6271 e = e->succ_next;
6272 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6273 remove_edge (tmp);
6277 /* This routine will remove all fake edges from the flow graph. If
6278 we remove all fake successors, it will automatically remove all
6279 fake predecessors. */
6280 void
6281 remove_fake_edges ()
6283 int x;
6285 for (x = 0; x < n_basic_blocks; x++)
6286 remove_fake_successors (BASIC_BLOCK (x));
6288 /* We've handled all successors except the entry block's. */
6289 remove_fake_successors (ENTRY_BLOCK_PTR);
6292 /* This functions will add a fake edge between any block which has no
6293 successors, and the exit block. Some data flow equations require these
6294 edges to exist. */
6295 void
6296 add_noreturn_fake_exit_edges ()
6298 int x;
6300 for (x = 0; x < n_basic_blocks; x++)
6301 if (BASIC_BLOCK (x)->succ == NULL)
6302 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6305 /* Dump the list of basic blocks in the bitmap NODES. */
6306 static void
6307 flow_nodes_print (str, nodes, file)
6308 const char *str;
6309 const sbitmap nodes;
6310 FILE *file;
6312 int node;
6314 fprintf (file, "%s { ", str);
6315 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6316 fputs ("}\n", file);
6320 /* Dump the list of exiting edges in the array EDGES. */
6321 static void
6322 flow_exits_print (str, edges, num_edges, file)
6323 const char *str;
6324 const edge *edges;
6325 int num_edges;
6326 FILE *file;
6328 int i;
6330 fprintf (file, "%s { ", str);
6331 for (i = 0; i < num_edges; i++)
6332 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6333 fputs ("}\n", file);
6337 /* Dump loop related CFG information. */
6338 static void
6339 flow_loops_cfg_dump (loops, file)
6340 const struct loops *loops;
6341 FILE *file;
6343 int i;
6345 if (! loops->num || ! file || ! loops->cfg.dom)
6346 return;
6348 for (i = 0; i < n_basic_blocks; i++)
6350 edge succ;
6352 fprintf (file, ";; %d succs { ", i);
6353 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6354 fprintf (file, "%d ", succ->dest->index);
6355 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6359 /* Dump the DFS node order. */
6360 if (loops->cfg.dfs_order)
6362 fputs (";; DFS order: ", file);
6363 for (i = 0; i < n_basic_blocks; i++)
6364 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6365 fputs ("\n", file);
6370 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6371 static int
6372 flow_loop_nested_p (outer, loop)
6373 struct loop *outer;
6374 struct loop *loop;
6376 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6380 /* Dump the loop information specified by LOOPS to the stream FILE. */
6381 void
6382 flow_loops_dump (loops, file, verbose)
6383 const struct loops *loops;
6384 FILE *file;
6385 int verbose;
6387 int i;
6388 int num_loops;
6390 num_loops = loops->num;
6391 if (! num_loops || ! file)
6392 return;
6394 fprintf (file, ";; %d loops found\n", num_loops);
6396 for (i = 0; i < num_loops; i++)
6398 struct loop *loop = &loops->array[i];
6400 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6401 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6402 loop->header->index, loop->latch->index,
6403 loop->pre_header ? loop->pre_header->index : -1,
6404 loop->depth, loop->level,
6405 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6406 fprintf (file, ";; %d", loop->num_nodes);
6407 flow_nodes_print (" nodes", loop->nodes, file);
6408 fprintf (file, ";; %d", loop->num_exits);
6409 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6411 if (loop->shared)
6413 int j;
6415 for (j = 0; j < i; j++)
6417 struct loop *oloop = &loops->array[j];
6419 if (loop->header == oloop->header)
6421 int disjoint;
6422 int smaller;
6424 smaller = loop->num_nodes < oloop->num_nodes;
6426 /* If the union of LOOP and OLOOP is different than
6427 the larger of LOOP and OLOOP then LOOP and OLOOP
6428 must be disjoint. */
6429 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6430 smaller ? oloop : loop);
6431 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6432 loop->header->index, i, j,
6433 disjoint ? "disjoint" : "nested");
6438 if (verbose)
6440 /* Print diagnostics to compare our concept of a loop with
6441 what the loop notes say. */
6442 if (GET_CODE (PREV_INSN (loop->header->head)) != NOTE
6443 || NOTE_LINE_NUMBER (PREV_INSN (loop->header->head))
6444 != NOTE_INSN_LOOP_BEG)
6445 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6446 INSN_UID (PREV_INSN (loop->header->head)));
6447 if (GET_CODE (NEXT_INSN (loop->latch->end)) != NOTE
6448 || NOTE_LINE_NUMBER (NEXT_INSN (loop->latch->end))
6449 != NOTE_INSN_LOOP_END)
6450 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6451 INSN_UID (NEXT_INSN (loop->latch->end)));
6455 if (verbose)
6456 flow_loops_cfg_dump (loops, file);
6460 /* Free all the memory allocated for LOOPS. */
6461 void
6462 flow_loops_free (loops)
6463 struct loops *loops;
6465 if (loops->array)
6467 int i;
6469 if (! loops->num)
6470 abort ();
6472 /* Free the loop descriptors. */
6473 for (i = 0; i < loops->num; i++)
6475 struct loop *loop = &loops->array[i];
6477 if (loop->nodes)
6478 sbitmap_free (loop->nodes);
6479 if (loop->exits)
6480 free (loop->exits);
6482 free (loops->array);
6483 loops->array = NULL;
6485 if (loops->cfg.dom)
6486 sbitmap_vector_free (loops->cfg.dom);
6487 if (loops->cfg.dfs_order)
6488 free (loops->cfg.dfs_order);
6490 sbitmap_free (loops->shared_headers);
6495 /* Find the exits from the loop using the bitmap of loop nodes NODES
6496 and store in EXITS array. Return the number of exits from the
6497 loop. */
6498 static int
6499 flow_loop_exits_find (nodes, exits)
6500 const sbitmap nodes;
6501 edge **exits;
6503 edge e;
6504 int node;
6505 int num_exits;
6507 *exits = NULL;
6509 /* Check all nodes within the loop to see if there are any
6510 successors not in the loop. Note that a node may have multiple
6511 exiting edges. */
6512 num_exits = 0;
6513 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6514 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6516 basic_block dest = e->dest;
6518 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6519 num_exits++;
6523 if (! num_exits)
6524 return 0;
6526 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6528 /* Store all exiting edges into an array. */
6529 num_exits = 0;
6530 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6531 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6533 basic_block dest = e->dest;
6535 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6536 (*exits)[num_exits++] = e;
6540 return num_exits;
6544 /* Find the nodes contained within the loop with header HEADER and
6545 latch LATCH and store in NODES. Return the number of nodes within
6546 the loop. */
6547 static int
6548 flow_loop_nodes_find (header, latch, nodes)
6549 basic_block header;
6550 basic_block latch;
6551 sbitmap nodes;
6553 basic_block *stack;
6554 int sp;
6555 int num_nodes = 0;
6557 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6558 sp = 0;
6560 /* Start with only the loop header in the set of loop nodes. */
6561 sbitmap_zero (nodes);
6562 SET_BIT (nodes, header->index);
6563 num_nodes++;
6564 header->loop_depth++;
6566 /* Push the loop latch on to the stack. */
6567 if (! TEST_BIT (nodes, latch->index))
6569 SET_BIT (nodes, latch->index);
6570 latch->loop_depth++;
6571 num_nodes++;
6572 stack[sp++] = latch;
6575 while (sp)
6577 basic_block node;
6578 edge e;
6580 node = stack[--sp];
6581 for (e = node->pred; e; e = e->pred_next)
6583 basic_block ancestor = e->src;
6585 /* If each ancestor not marked as part of loop, add to set of
6586 loop nodes and push on to stack. */
6587 if (ancestor != ENTRY_BLOCK_PTR
6588 && ! TEST_BIT (nodes, ancestor->index))
6590 SET_BIT (nodes, ancestor->index);
6591 ancestor->loop_depth++;
6592 num_nodes++;
6593 stack[sp++] = ancestor;
6597 free (stack);
6598 return num_nodes;
6602 /* Compute the depth first search order and store in the array
6603 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6604 number of nodes visited. */
6605 static int
6606 flow_depth_first_order_compute (dfs_order)
6607 int *dfs_order;
6609 edge e;
6610 edge *stack;
6611 int sp;
6612 int dfsnum = 0;
6613 sbitmap visited;
6615 /* Allocate stack for back-tracking up CFG. */
6616 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6617 sp = 0;
6619 /* Allocate bitmap to track nodes that have been visited. */
6620 visited = sbitmap_alloc (n_basic_blocks);
6622 /* None of the nodes in the CFG have been visited yet. */
6623 sbitmap_zero (visited);
6625 /* Start with the first successor edge from the entry block. */
6626 e = ENTRY_BLOCK_PTR->succ;
6627 while (e)
6629 basic_block src = e->src;
6630 basic_block dest = e->dest;
6632 /* Mark that we have visited this node. */
6633 if (src != ENTRY_BLOCK_PTR)
6634 SET_BIT (visited, src->index);
6636 /* If this node has not been visited before, push the current
6637 edge on to the stack and proceed with the first successor
6638 edge of this node. */
6639 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6640 && dest->succ)
6642 stack[sp++] = e;
6643 e = dest->succ;
6645 else
6647 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6648 && ! dest->succ)
6650 /* DEST has no successors (for example, a non-returning
6651 function is called) so do not push the current edge
6652 but carry on with its next successor. */
6653 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6654 SET_BIT (visited, dest->index);
6657 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6659 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6661 /* Pop edge off stack. */
6662 e = stack[--sp];
6663 src = e->src;
6665 e = e->succ_next;
6668 free (stack);
6669 sbitmap_free (visited);
6671 /* The number of nodes visited should not be greater than
6672 n_basic_blocks. */
6673 if (dfsnum > n_basic_blocks)
6674 abort ();
6676 /* There are some nodes left in the CFG that are unreachable. */
6677 if (dfsnum < n_basic_blocks)
6678 abort ();
6679 return dfsnum;
6683 /* Return the block for the pre-header of the loop with header
6684 HEADER where DOM specifies the dominator information. Return NULL if
6685 there is no pre-header. */
6686 static basic_block
6687 flow_loop_pre_header_find (header, dom)
6688 basic_block header;
6689 const sbitmap *dom;
6691 basic_block pre_header;
6692 edge e;
6694 /* If block p is a predecessor of the header and is the only block
6695 that the header does not dominate, then it is the pre-header. */
6696 pre_header = NULL;
6697 for (e = header->pred; e; e = e->pred_next)
6699 basic_block node = e->src;
6701 if (node != ENTRY_BLOCK_PTR
6702 && ! TEST_BIT (dom[node->index], header->index))
6704 if (pre_header == NULL)
6705 pre_header = node;
6706 else
6708 /* There are multiple edges into the header from outside
6709 the loop so there is no pre-header block. */
6710 pre_header = NULL;
6711 break;
6715 return pre_header;
6719 /* Add LOOP to the loop hierarchy tree so that it is a sibling or a
6720 descendant of ROOT. */
6721 static void
6722 flow_loop_tree_node_add (root, loop)
6723 struct loop *root;
6724 struct loop *loop;
6726 struct loop *outer;
6728 if (! loop)
6729 return;
6731 for (outer = root; outer; outer = outer->next)
6733 if (flow_loop_nested_p (outer, loop))
6735 if (outer->inner)
6737 /* Add LOOP as a sibling or descendent of OUTER->INNER. */
6738 flow_loop_tree_node_add (outer->inner, loop);
6740 else
6742 /* Add LOOP as child of OUTER. */
6743 outer->inner = loop;
6744 loop->outer = outer;
6745 loop->next = NULL;
6747 return;
6750 /* Add LOOP as a sibling of ROOT. */
6751 loop->next = root->next;
6752 root->next = loop;
6753 loop->outer = root->outer;
6757 /* Build the loop hierarchy tree for LOOPS. */
6758 static void
6759 flow_loops_tree_build (loops)
6760 struct loops *loops;
6762 int i;
6763 int num_loops;
6765 num_loops = loops->num;
6766 if (! num_loops)
6767 return;
6769 /* Root the loop hierarchy tree with the first loop found.
6770 Since we used a depth first search this should be the
6771 outermost loop. */
6772 loops->tree = &loops->array[0];
6773 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6775 /* Add the remaining loops to the tree. */
6776 for (i = 1; i < num_loops; i++)
6777 flow_loop_tree_node_add (loops->tree, &loops->array[i]);
6781 /* Helper function to compute loop nesting depth and enclosed loop level
6782 for the natural loop specified by LOOP at the loop depth DEPTH.
6783 Returns the loop level. */
6784 static int
6785 flow_loop_level_compute (loop, depth)
6786 struct loop *loop;
6787 int depth;
6789 struct loop *inner;
6790 int level = 0;
6792 if (! loop)
6793 return 0;
6795 /* Traverse loop tree assigning depth and computing level as the
6796 maximum level of all the inner loops of this loop. The loop
6797 level is equivalent to the height of the loop in the loop tree
6798 and corresponds to the number of enclosed loop levels. */
6799 for (inner = loop->inner; inner; inner = inner->next)
6801 int ilevel;
6803 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6805 if (ilevel > level)
6806 level = ilevel;
6808 loop->level = level;
6809 loop->depth = depth;
6810 return level;
6814 /* Compute the loop nesting depth and enclosed loop level for the loop
6815 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6816 level. */
6818 static int
6819 flow_loops_level_compute (loops)
6820 struct loops *loops;
6822 return flow_loop_level_compute (loops->tree, 0);
6826 /* Find all the natural loops in the function and save in LOOPS structure
6827 and recalculate loop_depth information in basic block structures.
6828 Return the number of natural loops found. */
6830 int
6831 flow_loops_find (loops)
6832 struct loops *loops;
6834 int i;
6835 int b;
6836 int num_loops;
6837 edge e;
6838 sbitmap headers;
6839 sbitmap *dom;
6840 int *dfs_order;
6842 loops->num = 0;
6843 loops->array = NULL;
6844 loops->tree = NULL;
6845 dfs_order = NULL;
6847 /* Taking care of this degenerate case makes the rest of
6848 this code simpler. */
6849 if (n_basic_blocks == 0)
6850 return 0;
6852 /* Compute the dominators. */
6853 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6854 compute_flow_dominators (dom, NULL);
6856 /* Count the number of loop edges (back edges). This should be the
6857 same as the number of natural loops. Also clear the loop_depth
6858 and as we work from inner->outer in a loop nest we call
6859 find_loop_nodes_find which will increment loop_depth for nodes
6860 within the current loop, which happens to enclose inner loops. */
6862 num_loops = 0;
6863 for (b = 0; b < n_basic_blocks; b++)
6865 BASIC_BLOCK (b)->loop_depth = 1;
6866 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6868 basic_block latch = e->src;
6870 /* Look for back edges where a predecessor is dominated
6871 by this block. A natural loop has a single entry
6872 node (header) that dominates all the nodes in the
6873 loop. It also has single back edge to the header
6874 from a latch node. Note that multiple natural loops
6875 may share the same header. */
6876 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6877 num_loops++;
6881 if (num_loops)
6883 /* Compute depth first search order of the CFG so that outer
6884 natural loops will be found before inner natural loops. */
6885 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6886 flow_depth_first_order_compute (dfs_order);
6888 /* Allocate loop structures. */
6889 loops->array
6890 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6892 headers = sbitmap_alloc (n_basic_blocks);
6893 sbitmap_zero (headers);
6895 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6896 sbitmap_zero (loops->shared_headers);
6898 /* Find and record information about all the natural loops
6899 in the CFG. */
6900 num_loops = 0;
6901 for (b = 0; b < n_basic_blocks; b++)
6903 basic_block header;
6905 /* Search the nodes of the CFG in DFS order that we can find
6906 outer loops first. */
6907 header = BASIC_BLOCK (dfs_order[b]);
6909 /* Look for all the possible latch blocks for this header. */
6910 for (e = header->pred; e; e = e->pred_next)
6912 basic_block latch = e->src;
6914 /* Look for back edges where a predecessor is dominated
6915 by this block. A natural loop has a single entry
6916 node (header) that dominates all the nodes in the
6917 loop. It also has single back edge to the header
6918 from a latch node. Note that multiple natural loops
6919 may share the same header. */
6920 if (latch != ENTRY_BLOCK_PTR
6921 && TEST_BIT (dom[latch->index], header->index))
6923 struct loop *loop;
6925 loop = loops->array + num_loops;
6927 loop->header = header;
6928 loop->latch = latch;
6930 /* Keep track of blocks that are loop headers so
6931 that we can tell which loops should be merged. */
6932 if (TEST_BIT (headers, header->index))
6933 SET_BIT (loops->shared_headers, header->index);
6934 SET_BIT (headers, header->index);
6936 /* Find nodes contained within the loop. */
6937 loop->nodes = sbitmap_alloc (n_basic_blocks);
6938 loop->num_nodes
6939 = flow_loop_nodes_find (header, latch, loop->nodes);
6941 /* Find edges which exit the loop. Note that a node
6942 may have several exit edges. */
6943 loop->num_exits
6944 = flow_loop_exits_find (loop->nodes, &loop->exits);
6946 /* Look to see if the loop has a pre-header node. */
6947 loop->pre_header
6948 = flow_loop_pre_header_find (header, dom);
6950 num_loops++;
6955 /* Natural loops with shared headers may either be disjoint or
6956 nested. Disjoint loops with shared headers cannot be inner
6957 loops and should be merged. For now just mark loops that share
6958 headers. */
6959 for (i = 0; i < num_loops; i++)
6960 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
6961 loops->array[i].shared = 1;
6963 sbitmap_free (headers);
6966 loops->num = num_loops;
6968 /* Save CFG derived information to avoid recomputing it. */
6969 loops->cfg.dom = dom;
6970 loops->cfg.dfs_order = dfs_order;
6972 /* Build the loop hierarchy tree. */
6973 flow_loops_tree_build (loops);
6975 /* Assign the loop nesting depth and enclosed loop level for each
6976 loop. */
6977 flow_loops_level_compute (loops);
6979 return num_loops;
6983 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
6985 flow_loop_outside_edge_p (loop, e)
6986 const struct loop *loop;
6987 edge e;
6989 if (e->dest != loop->header)
6990 abort ();
6991 return (e->src == ENTRY_BLOCK_PTR)
6992 || ! TEST_BIT (loop->nodes, e->src->index);