* optimize.c (initialize_inlined_parameters): Take FN to which the
[official-gcc.git] / gcc / flow.c
bloba16d0ed5b2805caa18159c94b5e02ac5e15f6457
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "basic-block.h"
127 #include "insn-config.h"
128 #include "regs.h"
129 #include "hard-reg-set.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
138 #include "obstack.h"
140 #define obstack_chunk_alloc xmalloc
141 #define obstack_chunk_free free
144 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
145 the stack pointer does not matter. The value is tested only in
146 functions that have frame pointers.
147 No definition is equivalent to always zero. */
148 #ifndef EXIT_IGNORE_STACK
149 #define EXIT_IGNORE_STACK 0
150 #endif
152 #ifndef HAVE_epilogue
153 #define HAVE_epilogue 0
154 #endif
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
158 #endif
160 /* The contents of the current function definition are allocated
161 in this obstack, and all are freed at the end of the function.
162 For top-level functions, this is temporary_obstack.
163 Separate obstacks are made for nested functions. */
165 extern struct obstack *function_obstack;
167 /* Number of basic blocks in the current function. */
169 int n_basic_blocks;
171 /* Number of edges in the current function. */
173 int n_edges;
175 /* The basic block array. */
177 varray_type basic_block_info;
179 /* The special entry and exit blocks. */
181 struct basic_block_def entry_exit_blocks[2] =
184 NULL, /* head */
185 NULL, /* end */
186 NULL, /* pred */
187 NULL, /* succ */
188 NULL, /* local_set */
189 NULL, /* global_live_at_start */
190 NULL, /* global_live_at_end */
191 NULL, /* aux */
192 ENTRY_BLOCK, /* index */
193 0, /* loop_depth */
194 -1, -1 /* eh_beg, eh_end */
197 NULL, /* head */
198 NULL, /* end */
199 NULL, /* pred */
200 NULL, /* succ */
201 NULL, /* local_set */
202 NULL, /* global_live_at_start */
203 NULL, /* global_live_at_end */
204 NULL, /* aux */
205 EXIT_BLOCK, /* index */
206 0, /* loop_depth */
207 -1, -1 /* eh_beg, eh_end */
211 /* Nonzero if the second flow pass has completed. */
212 int flow2_completed;
214 /* Maximum register number used in this function, plus one. */
216 int max_regno;
218 /* Indexed by n, giving various register information */
220 varray_type reg_n_info;
222 /* Size of the reg_n_info table. */
224 unsigned int reg_n_max;
226 /* Element N is the next insn that uses (hard or pseudo) register number N
227 within the current basic block; or zero, if there is no such insn.
228 This is valid only during the final backward scan in propagate_block. */
230 static rtx *reg_next_use;
232 /* Size of a regset for the current function,
233 in (1) bytes and (2) elements. */
235 int regset_bytes;
236 int regset_size;
238 /* Regset of regs live when calls to `setjmp'-like functions happen. */
239 /* ??? Does this exist only for the setjmp-clobbered warning message? */
241 regset regs_live_at_setjmp;
243 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
244 that have to go in the same hard reg.
245 The first two regs in the list are a pair, and the next two
246 are another pair, etc. */
247 rtx regs_may_share;
249 /* Depth within loops of basic block being scanned for lifetime analysis,
250 plus one. This is the weight attached to references to registers. */
252 static int loop_depth;
254 /* During propagate_block, this is non-zero if the value of CC0 is live. */
256 static int cc0_live;
258 /* During propagate_block, this contains a list of all the MEMs we are
259 tracking for dead store elimination. */
261 static rtx mem_set_list;
263 /* Set of registers that may be eliminable. These are handled specially
264 in updating regs_ever_live. */
266 static HARD_REG_SET elim_reg_set;
268 /* The basic block structure for every insn, indexed by uid. */
270 varray_type basic_block_for_insn;
272 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
273 /* ??? Should probably be using LABEL_NUSES instead. It would take a
274 bit of surgery to be able to use or co-opt the routines in jump. */
276 static rtx label_value_list;
278 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
280 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
281 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
282 static bitmap uid_volatile;
284 /* Forward declarations */
285 static int count_basic_blocks PROTO((rtx));
286 static rtx find_basic_blocks_1 PROTO((rtx));
287 static void create_basic_block PROTO((int, rtx, rtx, rtx));
288 static void clear_edges PROTO((void));
289 static void make_edges PROTO((rtx));
290 static void make_edge PROTO((sbitmap *, basic_block,
291 basic_block, int));
292 static void make_label_edge PROTO((sbitmap *, basic_block,
293 rtx, int));
294 static void make_eh_edge PROTO((sbitmap *, eh_nesting_info *,
295 basic_block, rtx, int));
296 static void mark_critical_edges PROTO((void));
297 static void move_stray_eh_region_notes PROTO((void));
298 static void record_active_eh_regions PROTO((rtx));
300 static void commit_one_edge_insertion PROTO((edge));
302 static void delete_unreachable_blocks PROTO((void));
303 static void delete_eh_regions PROTO((void));
304 static int can_delete_note_p PROTO((rtx));
305 static int delete_block PROTO((basic_block));
306 static void expunge_block PROTO((basic_block));
307 static rtx flow_delete_insn PROTO((rtx));
308 static int can_delete_label_p PROTO((rtx));
309 static int merge_blocks_move_predecessor_nojumps PROTO((basic_block,
310 basic_block));
311 static int merge_blocks_move_successor_nojumps PROTO((basic_block,
312 basic_block));
313 static void merge_blocks_nomove PROTO((basic_block, basic_block));
314 static int merge_blocks PROTO((edge,basic_block,basic_block));
315 static void try_merge_blocks PROTO((void));
316 static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block));
317 static void calculate_loop_depth PROTO((rtx));
319 static int verify_wide_reg_1 PROTO((rtx *, void *));
320 static void verify_wide_reg PROTO((int, rtx, rtx));
321 static void verify_local_live_at_start PROTO((regset, basic_block));
322 static int set_noop_p PROTO((rtx));
323 static int noop_move_p PROTO((rtx));
324 static void notice_stack_pointer_modification PROTO ((rtx, rtx, void *));
325 static void record_volatile_insns PROTO((rtx));
326 static void mark_reg PROTO((regset, rtx));
327 static void mark_regs_live_at_end PROTO((regset));
328 static void life_analysis_1 PROTO((rtx, int, int));
329 static void calculate_global_regs_live PROTO((sbitmap, sbitmap, int));
330 static void propagate_block PROTO((regset, rtx, rtx,
331 regset, int, int));
332 static int insn_dead_p PROTO((rtx, regset, int, rtx));
333 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
334 static void mark_set_regs PROTO((regset, regset, rtx,
335 rtx, regset, int));
336 static void mark_set_1 PROTO((regset, regset, rtx,
337 rtx, regset, int));
338 #ifdef AUTO_INC_DEC
339 static void find_auto_inc PROTO((regset, rtx, rtx));
340 static int try_pre_increment_1 PROTO((rtx));
341 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
342 #endif
343 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
344 void dump_flow_info PROTO((FILE *));
345 void debug_flow_info PROTO((void));
346 static void dump_edge_info PROTO((FILE *, edge, int));
348 static void count_reg_sets_1 PROTO ((rtx));
349 static void count_reg_sets PROTO ((rtx));
350 static void count_reg_references PROTO ((rtx));
351 static void invalidate_mems_from_autoinc PROTO ((rtx));
352 static void remove_edge PROTO ((edge));
353 static void remove_fake_successors PROTO ((basic_block));
355 /* This function is always defined so it can be called from the
356 debugger, and it is declared extern so we don't get warnings about
357 it being unused. */
358 void verify_flow_info PROTO ((void));
361 /* Find basic blocks of the current function.
362 F is the first insn of the function and NREGS the number of register
363 numbers in use. */
365 void
366 find_basic_blocks (f, nregs, file, do_cleanup)
367 rtx f;
368 int nregs ATTRIBUTE_UNUSED;
369 FILE *file ATTRIBUTE_UNUSED;
370 int do_cleanup;
372 int max_uid;
374 /* Flush out existing data. */
375 if (basic_block_info != NULL)
377 int i;
379 clear_edges ();
381 /* Clear bb->aux on all extant basic blocks. We'll use this as a
382 tag for reuse during create_basic_block, just in case some pass
383 copies around basic block notes improperly. */
384 for (i = 0; i < n_basic_blocks; ++i)
385 BASIC_BLOCK (i)->aux = NULL;
387 VARRAY_FREE (basic_block_info);
390 n_basic_blocks = count_basic_blocks (f);
392 /* Size the basic block table. The actual structures will be allocated
393 by find_basic_blocks_1, since we want to keep the structure pointers
394 stable across calls to find_basic_blocks. */
395 /* ??? This whole issue would be much simpler if we called find_basic_blocks
396 exactly once, and thereafter we don't have a single long chain of
397 instructions at all until close to the end of compilation when we
398 actually lay them out. */
400 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
402 label_value_list = find_basic_blocks_1 (f);
404 /* Record the block to which an insn belongs. */
405 /* ??? This should be done another way, by which (perhaps) a label is
406 tagged directly with the basic block that it starts. It is used for
407 more than that currently, but IMO that is the only valid use. */
409 max_uid = get_max_uid ();
410 #ifdef AUTO_INC_DEC
411 /* Leave space for insns life_analysis makes in some cases for auto-inc.
412 These cases are rare, so we don't need too much space. */
413 max_uid += max_uid / 10;
414 #endif
416 compute_bb_for_insn (max_uid);
418 /* Discover the edges of our cfg. */
420 record_active_eh_regions (f);
421 make_edges (label_value_list);
423 /* Delete unreachable blocks, then merge blocks when possible. */
425 if (do_cleanup)
427 delete_unreachable_blocks ();
428 move_stray_eh_region_notes ();
429 record_active_eh_regions (f);
430 try_merge_blocks ();
433 /* Mark critical edges. */
435 mark_critical_edges ();
437 /* Discover the loop depth at the start of each basic block to aid
438 register allocation. */
439 calculate_loop_depth (f);
441 /* Kill the data we won't maintain. */
442 label_value_list = NULL_RTX;
444 #ifdef ENABLE_CHECKING
445 verify_flow_info ();
446 #endif
449 /* Count the basic blocks of the function. */
451 static int
452 count_basic_blocks (f)
453 rtx f;
455 register rtx insn;
456 register RTX_CODE prev_code;
457 register int count = 0;
458 int eh_region = 0;
459 int call_had_abnormal_edge = 0;
460 rtx prev_call = NULL_RTX;
462 prev_code = JUMP_INSN;
463 for (insn = f; insn; insn = NEXT_INSN (insn))
465 register RTX_CODE code = GET_CODE (insn);
467 if (code == CODE_LABEL
468 || (GET_RTX_CLASS (code) == 'i'
469 && (prev_code == JUMP_INSN
470 || prev_code == BARRIER
471 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
473 count++;
476 /* Record whether this call created an edge. */
477 if (code == CALL_INSN)
479 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
480 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
481 prev_call = insn;
482 call_had_abnormal_edge = 0;
484 /* If there is a specified EH region, we have an edge. */
485 if (eh_region && region > 0)
486 call_had_abnormal_edge = 1;
487 else
489 /* If there is a nonlocal goto label and the specified
490 region number isn't -1, we have an edge. (0 means
491 no throw, but might have a nonlocal goto). */
492 if (nonlocal_goto_handler_labels && region >= 0)
493 call_had_abnormal_edge = 1;
496 else if (code != NOTE)
497 prev_call = NULL_RTX;
499 if (code != NOTE)
500 prev_code = code;
501 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
502 ++eh_region;
503 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
504 --eh_region;
508 /* The rest of the compiler works a bit smoother when we don't have to
509 check for the edge case of do-nothing functions with no basic blocks. */
510 if (count == 0)
512 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
513 count = 1;
516 return count;
519 /* Find all basic blocks of the function whose first insn is F.
521 Collect and return a list of labels whose addresses are taken. This
522 will be used in make_edges for use with computed gotos. */
524 static rtx
525 find_basic_blocks_1 (f)
526 rtx f;
528 register rtx insn, next;
529 int call_has_abnormal_edge = 0;
530 int i = 0;
531 rtx bb_note = NULL_RTX;
532 rtx eh_list = NULL_RTX;
533 rtx label_value_list = NULL_RTX;
534 rtx head = NULL_RTX;
535 rtx end = NULL_RTX;
537 /* We process the instructions in a slightly different way than we did
538 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
539 closed out the previous block, so that it gets attached at the proper
540 place. Since this form should be equivalent to the previous,
541 count_basic_blocks continues to use the old form as a check. */
543 for (insn = f; insn; insn = next)
545 enum rtx_code code = GET_CODE (insn);
547 next = NEXT_INSN (insn);
549 if (code == CALL_INSN)
551 /* Record whether this call created an edge. */
552 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
553 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
554 call_has_abnormal_edge = 0;
556 /* If there is an EH region, we have an edge. */
557 if (eh_list && region > 0)
558 call_has_abnormal_edge = 1;
559 else
561 /* If there is a nonlocal goto label and the specified
562 region number isn't -1, we have an edge. (0 means
563 no throw, but might have a nonlocal goto). */
564 if (nonlocal_goto_handler_labels && region >= 0)
565 call_has_abnormal_edge = 1;
569 switch (code)
571 case NOTE:
573 int kind = NOTE_LINE_NUMBER (insn);
575 /* Keep a LIFO list of the currently active exception notes. */
576 if (kind == NOTE_INSN_EH_REGION_BEG)
577 eh_list = alloc_INSN_LIST (insn, eh_list);
578 else if (kind == NOTE_INSN_EH_REGION_END)
580 rtx t = eh_list;
581 eh_list = XEXP (eh_list, 1);
582 free_INSN_LIST_node (t);
585 /* Look for basic block notes with which to keep the
586 basic_block_info pointers stable. Unthread the note now;
587 we'll put it back at the right place in create_basic_block.
588 Or not at all if we've already found a note in this block. */
589 else if (kind == NOTE_INSN_BASIC_BLOCK)
591 if (bb_note == NULL_RTX)
592 bb_note = insn;
593 next = flow_delete_insn (insn);
596 break;
599 case CODE_LABEL:
600 /* A basic block starts at a label. If we've closed one off due
601 to a barrier or some such, no need to do it again. */
602 if (head != NULL_RTX)
604 /* While we now have edge lists with which other portions of
605 the compiler might determine a call ending a basic block
606 does not imply an abnormal edge, it will be a bit before
607 everything can be updated. So continue to emit a noop at
608 the end of such a block. */
609 if (GET_CODE (end) == CALL_INSN)
611 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
612 end = emit_insn_after (nop, end);
615 create_basic_block (i++, head, end, bb_note);
616 bb_note = NULL_RTX;
618 head = end = insn;
619 break;
621 case JUMP_INSN:
622 /* A basic block ends at a jump. */
623 if (head == NULL_RTX)
624 head = insn;
625 else
627 /* ??? Make a special check for table jumps. The way this
628 happens is truly and amazingly gross. We are about to
629 create a basic block that contains just a code label and
630 an addr*vec jump insn. Worse, an addr_diff_vec creates
631 its own natural loop.
633 Prevent this bit of brain damage, pasting things together
634 correctly in make_edges.
636 The correct solution involves emitting the table directly
637 on the tablejump instruction as a note, or JUMP_LABEL. */
639 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
640 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
642 head = end = NULL;
643 n_basic_blocks--;
644 break;
647 end = insn;
648 goto new_bb_inclusive;
650 case BARRIER:
651 /* A basic block ends at a barrier. It may be that an unconditional
652 jump already closed the basic block -- no need to do it again. */
653 if (head == NULL_RTX)
654 break;
656 /* While we now have edge lists with which other portions of the
657 compiler might determine a call ending a basic block does not
658 imply an abnormal edge, it will be a bit before everything can
659 be updated. So continue to emit a noop at the end of such a
660 block. */
661 if (GET_CODE (end) == CALL_INSN)
663 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
664 end = emit_insn_after (nop, end);
666 goto new_bb_exclusive;
668 case CALL_INSN:
669 /* A basic block ends at a call that can either throw or
670 do a non-local goto. */
671 if (call_has_abnormal_edge)
673 new_bb_inclusive:
674 if (head == NULL_RTX)
675 head = insn;
676 end = insn;
678 new_bb_exclusive:
679 create_basic_block (i++, head, end, bb_note);
680 head = end = NULL_RTX;
681 bb_note = NULL_RTX;
682 break;
684 /* FALLTHRU */
686 default:
687 if (GET_RTX_CLASS (code) == 'i')
689 if (head == NULL_RTX)
690 head = insn;
691 end = insn;
693 break;
696 if (GET_RTX_CLASS (code) == 'i')
698 rtx note;
700 /* Make a list of all labels referred to other than by jumps
701 (which just don't have the REG_LABEL notes).
703 Make a special exception for labels followed by an ADDR*VEC,
704 as this would be a part of the tablejump setup code.
706 Make a special exception for the eh_return_stub_label, which
707 we know isn't part of any otherwise visible control flow. */
709 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
710 if (REG_NOTE_KIND (note) == REG_LABEL)
712 rtx lab = XEXP (note, 0), next;
714 if (lab == eh_return_stub_label)
716 else if ((next = next_nonnote_insn (lab)) != NULL
717 && GET_CODE (next) == JUMP_INSN
718 && (GET_CODE (PATTERN (next)) == ADDR_VEC
719 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
721 else
722 label_value_list
723 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
728 if (head != NULL_RTX)
729 create_basic_block (i++, head, end, bb_note);
731 if (i != n_basic_blocks)
732 abort ();
734 return label_value_list;
737 /* Create a new basic block consisting of the instructions between
738 HEAD and END inclusive. Reuses the note and basic block struct
739 in BB_NOTE, if any. */
741 static void
742 create_basic_block (index, head, end, bb_note)
743 int index;
744 rtx head, end, bb_note;
746 basic_block bb;
748 if (bb_note
749 && ! RTX_INTEGRATED_P (bb_note)
750 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
751 && bb->aux == NULL)
753 /* If we found an existing note, thread it back onto the chain. */
755 if (GET_CODE (head) == CODE_LABEL)
756 add_insn_after (bb_note, head);
757 else
759 add_insn_before (bb_note, head);
760 head = bb_note;
763 else
765 /* Otherwise we must create a note and a basic block structure.
766 Since we allow basic block structs in rtl, give the struct
767 the same lifetime by allocating it off the function obstack
768 rather than using malloc. */
770 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
771 memset (bb, 0, sizeof (*bb));
773 if (GET_CODE (head) == CODE_LABEL)
774 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
775 else
777 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
778 head = bb_note;
780 NOTE_BASIC_BLOCK (bb_note) = bb;
783 /* Always include the bb note in the block. */
784 if (NEXT_INSN (end) == bb_note)
785 end = bb_note;
787 bb->head = head;
788 bb->end = end;
789 bb->index = index;
790 BASIC_BLOCK (index) = bb;
792 /* Tag the block so that we know it has been used when considering
793 other basic block notes. */
794 bb->aux = bb;
797 /* Records the basic block struct in BB_FOR_INSN, for every instruction
798 indexed by INSN_UID. MAX is the size of the array. */
800 void
801 compute_bb_for_insn (max)
802 int max;
804 int i;
806 if (basic_block_for_insn)
807 VARRAY_FREE (basic_block_for_insn);
808 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
810 for (i = 0; i < n_basic_blocks; ++i)
812 basic_block bb = BASIC_BLOCK (i);
813 rtx insn, end;
815 end = bb->end;
816 insn = bb->head;
817 while (1)
819 int uid = INSN_UID (insn);
820 if (uid < max)
821 VARRAY_BB (basic_block_for_insn, uid) = bb;
822 if (insn == end)
823 break;
824 insn = NEXT_INSN (insn);
829 /* Free the memory associated with the edge structures. */
831 static void
832 clear_edges ()
834 int i;
835 edge n, e;
837 for (i = 0; i < n_basic_blocks; ++i)
839 basic_block bb = BASIC_BLOCK (i);
841 for (e = bb->succ; e ; e = n)
843 n = e->succ_next;
844 free (e);
847 bb->succ = 0;
848 bb->pred = 0;
851 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
853 n = e->succ_next;
854 free (e);
857 ENTRY_BLOCK_PTR->succ = 0;
858 EXIT_BLOCK_PTR->pred = 0;
860 n_edges = 0;
863 /* Identify the edges between basic blocks.
865 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
866 that are otherwise unreachable may be reachable with a non-local goto.
868 BB_EH_END is an array indexed by basic block number in which we record
869 the list of exception regions active at the end of the basic block. */
871 static void
872 make_edges (label_value_list)
873 rtx label_value_list;
875 int i;
876 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
877 sbitmap *edge_cache = NULL;
879 /* Assume no computed jump; revise as we create edges. */
880 current_function_has_computed_jump = 0;
882 /* Heavy use of computed goto in machine-generated code can lead to
883 nearly fully-connected CFGs. In that case we spend a significant
884 amount of time searching the edge lists for duplicates. */
885 if (forced_labels || label_value_list)
887 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
888 sbitmap_vector_zero (edge_cache, n_basic_blocks);
891 /* By nature of the way these get numbered, block 0 is always the entry. */
892 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
894 for (i = 0; i < n_basic_blocks; ++i)
896 basic_block bb = BASIC_BLOCK (i);
897 rtx insn, x;
898 enum rtx_code code;
899 int force_fallthru = 0;
901 /* Examine the last instruction of the block, and discover the
902 ways we can leave the block. */
904 insn = bb->end;
905 code = GET_CODE (insn);
907 /* A branch. */
908 if (code == JUMP_INSN)
910 rtx tmp;
912 /* ??? Recognize a tablejump and do the right thing. */
913 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
914 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
915 && GET_CODE (tmp) == JUMP_INSN
916 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
917 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
919 rtvec vec;
920 int j;
922 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
923 vec = XVEC (PATTERN (tmp), 0);
924 else
925 vec = XVEC (PATTERN (tmp), 1);
927 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
928 make_label_edge (edge_cache, bb,
929 XEXP (RTVEC_ELT (vec, j), 0), 0);
931 /* Some targets (eg, ARM) emit a conditional jump that also
932 contains the out-of-range target. Scan for these and
933 add an edge if necessary. */
934 if ((tmp = single_set (insn)) != NULL
935 && SET_DEST (tmp) == pc_rtx
936 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
937 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
938 make_label_edge (edge_cache, bb,
939 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
941 #ifdef CASE_DROPS_THROUGH
942 /* Silly VAXen. The ADDR_VEC is going to be in the way of
943 us naturally detecting fallthru into the next block. */
944 force_fallthru = 1;
945 #endif
948 /* If this is a computed jump, then mark it as reaching
949 everything on the label_value_list and forced_labels list. */
950 else if (computed_jump_p (insn))
952 current_function_has_computed_jump = 1;
954 for (x = label_value_list; x; x = XEXP (x, 1))
955 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
957 for (x = forced_labels; x; x = XEXP (x, 1))
958 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
961 /* Returns create an exit out. */
962 else if (returnjump_p (insn))
963 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
965 /* Otherwise, we have a plain conditional or unconditional jump. */
966 else
968 if (! JUMP_LABEL (insn))
969 abort ();
970 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
974 /* If this is a CALL_INSN, then mark it as reaching the active EH
975 handler for this CALL_INSN. If we're handling asynchronous
976 exceptions then any insn can reach any of the active handlers.
978 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
980 if (code == CALL_INSN || asynchronous_exceptions)
982 /* If there's an EH region active at the end of a block,
983 add the appropriate edges. */
984 if (bb->eh_end >= 0)
985 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
987 /* If we have asynchronous exceptions, do the same for *all*
988 exception regions active in the block. */
989 if (asynchronous_exceptions
990 && bb->eh_beg != bb->eh_end)
992 if (bb->eh_beg >= 0)
993 make_eh_edge (edge_cache, eh_nest_info, bb,
994 NULL_RTX, bb->eh_beg);
996 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
997 if (GET_CODE (x) == NOTE
998 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
999 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1001 int region = NOTE_EH_HANDLER (x);
1002 make_eh_edge (edge_cache, eh_nest_info, bb,
1003 NULL_RTX, region);
1007 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1009 /* ??? This could be made smarter: in some cases it's possible
1010 to tell that certain calls will not do a nonlocal goto.
1012 For example, if the nested functions that do the nonlocal
1013 gotos do not have their addresses taken, then only calls to
1014 those functions or to other nested functions that use them
1015 could possibly do nonlocal gotos. */
1016 /* We do know that a REG_EH_REGION note with a value less
1017 than 0 is guaranteed not to perform a non-local goto. */
1018 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1019 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1020 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1021 make_label_edge (edge_cache, bb, XEXP (x, 0),
1022 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1026 /* We know something about the structure of the function __throw in
1027 libgcc2.c. It is the only function that ever contains eh_stub
1028 labels. It modifies its return address so that the last block
1029 returns to one of the eh_stub labels within it. So we have to
1030 make additional edges in the flow graph. */
1031 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1032 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1034 /* Find out if we can drop through to the next block. */
1035 insn = next_nonnote_insn (insn);
1036 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1037 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1038 else if (i + 1 < n_basic_blocks)
1040 rtx tmp = BLOCK_HEAD (i + 1);
1041 if (GET_CODE (tmp) == NOTE)
1042 tmp = next_nonnote_insn (tmp);
1043 if (force_fallthru || insn == tmp)
1044 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1048 free_eh_nesting_info (eh_nest_info);
1049 if (edge_cache)
1050 sbitmap_vector_free (edge_cache);
1053 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1054 about the edge that is accumulated between calls. */
1056 static void
1057 make_edge (edge_cache, src, dst, flags)
1058 sbitmap *edge_cache;
1059 basic_block src, dst;
1060 int flags;
1062 int use_edge_cache;
1063 edge e;
1065 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1066 many edges to them, and we didn't allocate memory for it. */
1067 use_edge_cache = (edge_cache
1068 && src != ENTRY_BLOCK_PTR
1069 && dst != EXIT_BLOCK_PTR);
1071 /* Make sure we don't add duplicate edges. */
1072 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1073 for (e = src->succ; e ; e = e->succ_next)
1074 if (e->dest == dst)
1076 e->flags |= flags;
1077 return;
1080 e = (edge) xcalloc (1, sizeof (*e));
1081 n_edges++;
1083 e->succ_next = src->succ;
1084 e->pred_next = dst->pred;
1085 e->src = src;
1086 e->dest = dst;
1087 e->flags = flags;
1089 src->succ = e;
1090 dst->pred = e;
1092 if (use_edge_cache)
1093 SET_BIT (edge_cache[src->index], dst->index);
1096 /* Create an edge from a basic block to a label. */
1098 static void
1099 make_label_edge (edge_cache, src, label, flags)
1100 sbitmap *edge_cache;
1101 basic_block src;
1102 rtx label;
1103 int flags;
1105 if (GET_CODE (label) != CODE_LABEL)
1106 abort ();
1108 /* If the label was never emitted, this insn is junk, but avoid a
1109 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1110 as a result of a syntax error and a diagnostic has already been
1111 printed. */
1113 if (INSN_UID (label) == 0)
1114 return;
1116 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1119 /* Create the edges generated by INSN in REGION. */
1121 static void
1122 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1123 sbitmap *edge_cache;
1124 eh_nesting_info *eh_nest_info;
1125 basic_block src;
1126 rtx insn;
1127 int region;
1129 handler_info **handler_list;
1130 int num, is_call;
1132 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1133 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1134 while (--num >= 0)
1136 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1137 EDGE_ABNORMAL | EDGE_EH | is_call);
1141 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1142 dangerous if we intend to move basic blocks around. Move such notes
1143 into the following block. */
1145 static void
1146 move_stray_eh_region_notes ()
1148 int i;
1149 basic_block b1, b2;
1151 if (n_basic_blocks < 2)
1152 return;
1154 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1155 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1157 rtx insn, next, list = NULL_RTX;
1159 b1 = BASIC_BLOCK (i);
1160 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1162 next = NEXT_INSN (insn);
1163 if (GET_CODE (insn) == NOTE
1164 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1165 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1167 /* Unlink from the insn chain. */
1168 NEXT_INSN (PREV_INSN (insn)) = next;
1169 PREV_INSN (next) = PREV_INSN (insn);
1171 /* Queue it. */
1172 NEXT_INSN (insn) = list;
1173 list = insn;
1177 if (list == NULL_RTX)
1178 continue;
1180 /* Find where to insert these things. */
1181 insn = b2->head;
1182 if (GET_CODE (insn) == CODE_LABEL)
1183 insn = NEXT_INSN (insn);
1185 while (list)
1187 next = NEXT_INSN (list);
1188 add_insn_after (list, insn);
1189 list = next;
1194 /* Recompute eh_beg/eh_end for each basic block. */
1196 static void
1197 record_active_eh_regions (f)
1198 rtx f;
1200 rtx insn, eh_list = NULL_RTX;
1201 int i = 0;
1202 basic_block bb = BASIC_BLOCK (0);
1204 for (insn = f; insn ; insn = NEXT_INSN (insn))
1206 if (bb->head == insn)
1207 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1209 if (GET_CODE (insn) == NOTE)
1211 int kind = NOTE_LINE_NUMBER (insn);
1212 if (kind == NOTE_INSN_EH_REGION_BEG)
1213 eh_list = alloc_INSN_LIST (insn, eh_list);
1214 else if (kind == NOTE_INSN_EH_REGION_END)
1216 rtx t = XEXP (eh_list, 1);
1217 free_INSN_LIST_node (eh_list);
1218 eh_list = t;
1222 if (bb->end == insn)
1224 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1225 i += 1;
1226 if (i == n_basic_blocks)
1227 break;
1228 bb = BASIC_BLOCK (i);
1233 /* Identify critical edges and set the bits appropriately. */
1235 static void
1236 mark_critical_edges ()
1238 int i, n = n_basic_blocks;
1239 basic_block bb;
1241 /* We begin with the entry block. This is not terribly important now,
1242 but could be if a front end (Fortran) implemented alternate entry
1243 points. */
1244 bb = ENTRY_BLOCK_PTR;
1245 i = -1;
1247 while (1)
1249 edge e;
1251 /* (1) Critical edges must have a source with multiple successors. */
1252 if (bb->succ && bb->succ->succ_next)
1254 for (e = bb->succ; e ; e = e->succ_next)
1256 /* (2) Critical edges must have a destination with multiple
1257 predecessors. Note that we know there is at least one
1258 predecessor -- the edge we followed to get here. */
1259 if (e->dest->pred->pred_next)
1260 e->flags |= EDGE_CRITICAL;
1261 else
1262 e->flags &= ~EDGE_CRITICAL;
1265 else
1267 for (e = bb->succ; e ; e = e->succ_next)
1268 e->flags &= ~EDGE_CRITICAL;
1271 if (++i >= n)
1272 break;
1273 bb = BASIC_BLOCK (i);
1277 /* Split a (typically critical) edge. Return the new block.
1278 Abort on abnormal edges.
1280 ??? The code generally expects to be called on critical edges.
1281 The case of a block ending in an unconditional jump to a
1282 block with multiple predecessors is not handled optimally. */
1284 basic_block
1285 split_edge (edge_in)
1286 edge edge_in;
1288 basic_block old_pred, bb, old_succ;
1289 edge edge_out;
1290 rtx bb_note;
1291 int i, j;
1293 /* Abnormal edges cannot be split. */
1294 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1295 abort ();
1297 old_pred = edge_in->src;
1298 old_succ = edge_in->dest;
1300 /* Remove the existing edge from the destination's pred list. */
1302 edge *pp;
1303 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1304 continue;
1305 *pp = edge_in->pred_next;
1306 edge_in->pred_next = NULL;
1309 /* Create the new structures. */
1310 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1311 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1312 n_edges++;
1314 memset (bb, 0, sizeof (*bb));
1315 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1316 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1318 /* ??? This info is likely going to be out of date very soon. */
1319 if (old_succ->global_live_at_start)
1321 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1322 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1324 else
1326 CLEAR_REG_SET (bb->global_live_at_start);
1327 CLEAR_REG_SET (bb->global_live_at_end);
1330 /* Wire them up. */
1331 bb->pred = edge_in;
1332 bb->succ = edge_out;
1334 edge_in->dest = bb;
1335 edge_in->flags &= ~EDGE_CRITICAL;
1337 edge_out->pred_next = old_succ->pred;
1338 edge_out->succ_next = NULL;
1339 edge_out->src = bb;
1340 edge_out->dest = old_succ;
1341 edge_out->flags = EDGE_FALLTHRU;
1342 edge_out->probability = REG_BR_PROB_BASE;
1344 old_succ->pred = edge_out;
1346 /* Tricky case -- if there existed a fallthru into the successor
1347 (and we're not it) we must add a new unconditional jump around
1348 the new block we're actually interested in.
1350 Further, if that edge is critical, this means a second new basic
1351 block must be created to hold it. In order to simplify correct
1352 insn placement, do this before we touch the existing basic block
1353 ordering for the block we were really wanting. */
1354 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1356 edge e;
1357 for (e = edge_out->pred_next; e ; e = e->pred_next)
1358 if (e->flags & EDGE_FALLTHRU)
1359 break;
1361 if (e)
1363 basic_block jump_block;
1364 rtx pos;
1366 if ((e->flags & EDGE_CRITICAL) == 0)
1368 /* Non critical -- we can simply add a jump to the end
1369 of the existing predecessor. */
1370 jump_block = e->src;
1372 else
1374 /* We need a new block to hold the jump. The simplest
1375 way to do the bulk of the work here is to recursively
1376 call ourselves. */
1377 jump_block = split_edge (e);
1378 e = jump_block->succ;
1381 /* Now add the jump insn ... */
1382 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1383 jump_block->end);
1384 jump_block->end = pos;
1385 emit_barrier_after (pos);
1387 /* ... let jump know that label is in use, ... */
1388 JUMP_LABEL (pos) = old_succ->head;
1389 ++LABEL_NUSES (old_succ->head);
1391 /* ... and clear fallthru on the outgoing edge. */
1392 e->flags &= ~EDGE_FALLTHRU;
1394 /* Continue splitting the interesting edge. */
1398 /* Place the new block just in front of the successor. */
1399 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1400 if (old_succ == EXIT_BLOCK_PTR)
1401 j = n_basic_blocks - 1;
1402 else
1403 j = old_succ->index;
1404 for (i = n_basic_blocks - 1; i > j; --i)
1406 basic_block tmp = BASIC_BLOCK (i - 1);
1407 BASIC_BLOCK (i) = tmp;
1408 tmp->index = i;
1410 BASIC_BLOCK (i) = bb;
1411 bb->index = i;
1413 /* Create the basic block note.
1415 Where we place the note can have a noticable impact on the generated
1416 code. Consider this cfg:
1423 +->1-->2--->E
1425 +--+
1427 If we need to insert an insn on the edge from block 0 to block 1,
1428 we want to ensure the instructions we insert are outside of any
1429 loop notes that physically sit between block 0 and block 1. Otherwise
1430 we confuse the loop optimizer into thinking the loop is a phony. */
1431 if (old_succ != EXIT_BLOCK_PTR
1432 && PREV_INSN (old_succ->head)
1433 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1434 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1435 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1436 PREV_INSN (old_succ->head));
1437 else if (old_succ != EXIT_BLOCK_PTR)
1438 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1439 else
1440 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1441 NOTE_BASIC_BLOCK (bb_note) = bb;
1442 bb->head = bb->end = bb_note;
1444 /* Not quite simple -- for non-fallthru edges, we must adjust the
1445 predecessor's jump instruction to target our new block. */
1446 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1448 rtx tmp, insn = old_pred->end;
1449 rtx old_label = old_succ->head;
1450 rtx new_label = gen_label_rtx ();
1452 if (GET_CODE (insn) != JUMP_INSN)
1453 abort ();
1455 /* ??? Recognize a tablejump and adjust all matching cases. */
1456 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1457 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1458 && GET_CODE (tmp) == JUMP_INSN
1459 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1460 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1462 rtvec vec;
1463 int j;
1465 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1466 vec = XVEC (PATTERN (tmp), 0);
1467 else
1468 vec = XVEC (PATTERN (tmp), 1);
1470 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1471 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1473 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1474 --LABEL_NUSES (old_label);
1475 ++LABEL_NUSES (new_label);
1478 /* Handle casesi dispatch insns */
1479 if ((tmp = single_set (insn)) != NULL
1480 && SET_DEST (tmp) == pc_rtx
1481 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1482 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1483 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1485 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1486 new_label);
1487 --LABEL_NUSES (old_label);
1488 ++LABEL_NUSES (new_label);
1491 else
1493 /* This would have indicated an abnormal edge. */
1494 if (computed_jump_p (insn))
1495 abort ();
1497 /* A return instruction can't be redirected. */
1498 if (returnjump_p (insn))
1499 abort ();
1501 /* If the insn doesn't go where we think, we're confused. */
1502 if (JUMP_LABEL (insn) != old_label)
1503 abort ();
1505 redirect_jump (insn, new_label);
1508 emit_label_before (new_label, bb_note);
1509 bb->head = new_label;
1512 return bb;
1515 /* Queue instructions for insertion on an edge between two basic blocks.
1516 The new instructions and basic blocks (if any) will not appear in the
1517 CFG until commit_edge_insertions is called. */
1519 void
1520 insert_insn_on_edge (pattern, e)
1521 rtx pattern;
1522 edge e;
1524 /* We cannot insert instructions on an abnormal critical edge.
1525 It will be easier to find the culprit if we die now. */
1526 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1527 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1528 abort ();
1530 if (e->insns == NULL_RTX)
1531 start_sequence ();
1532 else
1533 push_to_sequence (e->insns);
1535 emit_insn (pattern);
1537 e->insns = get_insns ();
1538 end_sequence();
1541 /* Update the CFG for the instructions queued on edge E. */
1543 static void
1544 commit_one_edge_insertion (e)
1545 edge e;
1547 rtx before = NULL_RTX, after = NULL_RTX, tmp;
1548 basic_block bb;
1550 /* Figure out where to put these things. If the destination has
1551 one predecessor, insert there. Except for the exit block. */
1552 if (e->dest->pred->pred_next == NULL
1553 && e->dest != EXIT_BLOCK_PTR)
1555 bb = e->dest;
1557 /* Get the location correct wrt a code label, and "nice" wrt
1558 a basic block note, and before everything else. */
1559 tmp = bb->head;
1560 if (GET_CODE (tmp) == CODE_LABEL)
1561 tmp = NEXT_INSN (tmp);
1562 if (GET_CODE (tmp) == NOTE
1563 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1564 tmp = NEXT_INSN (tmp);
1565 if (tmp == bb->head)
1566 before = tmp;
1567 else
1568 after = PREV_INSN (tmp);
1571 /* If the source has one successor and the edge is not abnormal,
1572 insert there. Except for the entry block. */
1573 else if ((e->flags & EDGE_ABNORMAL) == 0
1574 && e->src->succ->succ_next == NULL
1575 && e->src != ENTRY_BLOCK_PTR)
1577 bb = e->src;
1578 if (GET_CODE (bb->end) == JUMP_INSN)
1580 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1581 a jump with delay slots already filled? */
1582 if (! simplejump_p (bb->end))
1583 abort ();
1585 before = bb->end;
1587 else
1589 /* We'd better be fallthru, or we've lost track of what's what. */
1590 if ((e->flags & EDGE_FALLTHRU) == 0)
1591 abort ();
1593 after = bb->end;
1597 /* Otherwise we must split the edge. */
1598 else
1600 bb = split_edge (e);
1601 after = bb->end;
1604 /* Now that we've found the spot, do the insertion. */
1605 tmp = e->insns;
1606 e->insns = NULL_RTX;
1608 /* Set the new block number for these insns, if structure is allocated. */
1609 if (basic_block_for_insn)
1611 rtx i;
1612 for (i = tmp; i != NULL_RTX; i = NEXT_INSN (i))
1613 set_block_for_insn (i, bb);
1616 if (before)
1618 emit_insns_before (tmp, before);
1619 if (before == bb->head)
1620 bb->head = tmp;
1622 else
1624 tmp = emit_insns_after (tmp, after);
1625 if (after == bb->end)
1626 bb->end = tmp;
1630 /* Update the CFG for all queued instructions. */
1632 void
1633 commit_edge_insertions ()
1635 int i;
1636 basic_block bb;
1638 i = -1;
1639 bb = ENTRY_BLOCK_PTR;
1640 while (1)
1642 edge e, next;
1644 for (e = bb->succ; e ; e = next)
1646 next = e->succ_next;
1647 if (e->insns)
1648 commit_one_edge_insertion (e);
1651 if (++i >= n_basic_blocks)
1652 break;
1653 bb = BASIC_BLOCK (i);
1657 /* Delete all unreachable basic blocks. */
1659 static void
1660 delete_unreachable_blocks ()
1662 basic_block *worklist, *tos;
1663 int deleted_handler;
1664 edge e;
1665 int i, n;
1667 n = n_basic_blocks;
1668 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1670 /* Use basic_block->aux as a marker. Clear them all. */
1672 for (i = 0; i < n; ++i)
1673 BASIC_BLOCK (i)->aux = NULL;
1675 /* Add our starting points to the worklist. Almost always there will
1676 be only one. It isn't inconcievable that we might one day directly
1677 support Fortran alternate entry points. */
1679 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1681 *tos++ = e->dest;
1683 /* Mark the block with a handy non-null value. */
1684 e->dest->aux = e;
1687 /* Iterate: find everything reachable from what we've already seen. */
1689 while (tos != worklist)
1691 basic_block b = *--tos;
1693 for (e = b->succ; e ; e = e->succ_next)
1694 if (!e->dest->aux)
1696 *tos++ = e->dest;
1697 e->dest->aux = e;
1701 /* Delete all unreachable basic blocks. Count down so that we don't
1702 interfere with the block renumbering that happens in delete_block. */
1704 deleted_handler = 0;
1706 for (i = n - 1; i >= 0; --i)
1708 basic_block b = BASIC_BLOCK (i);
1710 if (b->aux != NULL)
1711 /* This block was found. Tidy up the mark. */
1712 b->aux = NULL;
1713 else
1714 deleted_handler |= delete_block (b);
1717 /* Fix up edges that now fall through, or rather should now fall through
1718 but previously required a jump around now deleted blocks. Simplify
1719 the search by only examining blocks numerically adjacent, since this
1720 is how find_basic_blocks created them. */
1722 for (i = 1; i < n_basic_blocks; ++i)
1724 basic_block b = BASIC_BLOCK (i - 1);
1725 basic_block c = BASIC_BLOCK (i);
1726 edge s;
1728 /* We care about simple conditional or unconditional jumps with
1729 a single successor.
1731 If we had a conditional branch to the next instruction when
1732 find_basic_blocks was called, then there will only be one
1733 out edge for the block which ended with the conditional
1734 branch (since we do not create duplicate edges).
1736 Furthermore, the edge will be marked as a fallthru because we
1737 merge the flags for the duplicate edges. So we do not want to
1738 check that the edge is not a FALLTHRU edge. */
1739 if ((s = b->succ) != NULL
1740 && s->succ_next == NULL
1741 && s->dest == c
1742 /* If the jump insn has side effects, we can't tidy the edge. */
1743 && (GET_CODE (b->end) != JUMP_INSN
1744 || onlyjump_p (b->end)))
1745 tidy_fallthru_edge (s, b, c);
1748 /* If we deleted an exception handler, we may have EH region begin/end
1749 blocks to remove as well. */
1750 if (deleted_handler)
1751 delete_eh_regions ();
1753 free (worklist);
1756 /* Find EH regions for which there is no longer a handler, and delete them. */
1758 static void
1759 delete_eh_regions ()
1761 rtx insn;
1763 update_rethrow_references ();
1765 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1766 if (GET_CODE (insn) == NOTE)
1768 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1769 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1771 int num = NOTE_EH_HANDLER (insn);
1772 /* A NULL handler indicates a region is no longer needed,
1773 as long as it isn't the target of a rethrow. */
1774 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1776 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1777 NOTE_SOURCE_FILE (insn) = 0;
1783 /* Return true if NOTE is not one of the ones that must be kept paired,
1784 so that we may simply delete them. */
1786 static int
1787 can_delete_note_p (note)
1788 rtx note;
1790 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1791 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1794 /* Unlink a chain of insns between START and FINISH, leaving notes
1795 that must be paired. */
1797 void
1798 flow_delete_insn_chain (start, finish)
1799 rtx start, finish;
1801 /* Unchain the insns one by one. It would be quicker to delete all
1802 of these with a single unchaining, rather than one at a time, but
1803 we need to keep the NOTE's. */
1805 rtx next;
1807 while (1)
1809 next = NEXT_INSN (start);
1810 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1812 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1814 else
1815 next = flow_delete_insn (start);
1817 if (start == finish)
1818 break;
1819 start = next;
1823 /* Delete the insns in a (non-live) block. We physically delete every
1824 non-deleted-note insn, and update the flow graph appropriately.
1826 Return nonzero if we deleted an exception handler. */
1828 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1829 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1831 static int
1832 delete_block (b)
1833 basic_block b;
1835 int deleted_handler = 0;
1836 rtx insn, end;
1838 /* If the head of this block is a CODE_LABEL, then it might be the
1839 label for an exception handler which can't be reached.
1841 We need to remove the label from the exception_handler_label list
1842 and remove the associated NOTE_INSN_EH_REGION_BEG and
1843 NOTE_INSN_EH_REGION_END notes. */
1845 insn = b->head;
1847 never_reached_warning (insn);
1849 if (GET_CODE (insn) == CODE_LABEL)
1851 rtx x, *prev = &exception_handler_labels;
1853 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1855 if (XEXP (x, 0) == insn)
1857 /* Found a match, splice this label out of the EH label list. */
1858 *prev = XEXP (x, 1);
1859 XEXP (x, 1) = NULL_RTX;
1860 XEXP (x, 0) = NULL_RTX;
1862 /* Remove the handler from all regions */
1863 remove_handler (insn);
1864 deleted_handler = 1;
1865 break;
1867 prev = &XEXP (x, 1);
1870 /* This label may be referenced by code solely for its value, or
1871 referenced by static data, or something. We have determined
1872 that it is not reachable, but cannot delete the label itself.
1873 Save code space and continue to delete the balance of the block,
1874 along with properly updating the cfg. */
1875 if (!can_delete_label_p (insn))
1877 /* If we've only got one of these, skip the whole deleting
1878 insns thing. */
1879 if (insn == b->end)
1880 goto no_delete_insns;
1881 insn = NEXT_INSN (insn);
1885 /* Selectively unlink the insn chain. Include any BARRIER that may
1886 follow the basic block. */
1887 end = next_nonnote_insn (b->end);
1888 if (!end || GET_CODE (end) != BARRIER)
1889 end = b->end;
1890 flow_delete_insn_chain (insn, end);
1892 no_delete_insns:
1894 /* Remove the edges into and out of this block. Note that there may
1895 indeed be edges in, if we are removing an unreachable loop. */
1897 edge e, next, *q;
1899 for (e = b->pred; e ; e = next)
1901 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1902 continue;
1903 *q = e->succ_next;
1904 next = e->pred_next;
1905 n_edges--;
1906 free (e);
1908 for (e = b->succ; e ; e = next)
1910 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1911 continue;
1912 *q = e->pred_next;
1913 next = e->succ_next;
1914 n_edges--;
1915 free (e);
1918 b->pred = NULL;
1919 b->succ = NULL;
1922 /* Remove the basic block from the array, and compact behind it. */
1923 expunge_block (b);
1925 return deleted_handler;
1928 /* Remove block B from the basic block array and compact behind it. */
1930 static void
1931 expunge_block (b)
1932 basic_block b;
1934 int i, n = n_basic_blocks;
1936 for (i = b->index; i + 1 < n; ++i)
1938 basic_block x = BASIC_BLOCK (i + 1);
1939 BASIC_BLOCK (i) = x;
1940 x->index = i;
1943 basic_block_info->num_elements--;
1944 n_basic_blocks--;
1947 /* Delete INSN by patching it out. Return the next insn. */
1949 static rtx
1950 flow_delete_insn (insn)
1951 rtx insn;
1953 rtx prev = PREV_INSN (insn);
1954 rtx next = NEXT_INSN (insn);
1956 PREV_INSN (insn) = NULL_RTX;
1957 NEXT_INSN (insn) = NULL_RTX;
1959 if (prev)
1960 NEXT_INSN (prev) = next;
1961 if (next)
1962 PREV_INSN (next) = prev;
1963 else
1964 set_last_insn (prev);
1966 if (GET_CODE (insn) == CODE_LABEL)
1967 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1969 /* If deleting a jump, decrement the use count of the label. Deleting
1970 the label itself should happen in the normal course of block merging. */
1971 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1972 LABEL_NUSES (JUMP_LABEL (insn))--;
1974 return next;
1977 /* True if a given label can be deleted. */
1979 static int
1980 can_delete_label_p (label)
1981 rtx label;
1983 rtx x;
1985 if (LABEL_PRESERVE_P (label))
1986 return 0;
1988 for (x = forced_labels; x ; x = XEXP (x, 1))
1989 if (label == XEXP (x, 0))
1990 return 0;
1991 for (x = label_value_list; x ; x = XEXP (x, 1))
1992 if (label == XEXP (x, 0))
1993 return 0;
1994 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1995 if (label == XEXP (x, 0))
1996 return 0;
1998 /* User declared labels must be preserved. */
1999 if (LABEL_NAME (label) != 0)
2000 return 0;
2002 return 1;
2005 /* Blocks A and B are to be merged into a single block. A has no incoming
2006 fallthru edge, so it can be moved before B without adding or modifying
2007 any jumps (aside from the jump from A to B). */
2009 static int
2010 merge_blocks_move_predecessor_nojumps (a, b)
2011 basic_block a, b;
2013 rtx start, end, barrier;
2014 int index;
2016 start = a->head;
2017 end = a->end;
2019 /* We want to delete the BARRIER after the end of the insns we are
2020 going to move. If we don't find a BARRIER, then do nothing. This
2021 can happen in some cases if we have labels we can not delete.
2023 Similarly, do nothing if we can not delete the label at the start
2024 of the target block. */
2025 barrier = next_nonnote_insn (end);
2026 if (GET_CODE (barrier) != BARRIER
2027 || (GET_CODE (b->head) == CODE_LABEL
2028 && ! can_delete_label_p (b->head)))
2029 return 0;
2030 else
2031 flow_delete_insn (barrier);
2033 /* Move block and loop notes out of the chain so that we do not
2034 disturb their order.
2036 ??? A better solution would be to squeeze out all the non-nested notes
2037 and adjust the block trees appropriately. Even better would be to have
2038 a tighter connection between block trees and rtl so that this is not
2039 necessary. */
2040 start = squeeze_notes (start, end);
2042 /* Scramble the insn chain. */
2043 if (end != PREV_INSN (b->head))
2044 reorder_insns (start, end, PREV_INSN (b->head));
2046 if (rtl_dump_file)
2048 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2049 a->index, b->index);
2052 /* Swap the records for the two blocks around. Although we are deleting B,
2053 A is now where B was and we want to compact the BB array from where
2054 A used to be. */
2055 BASIC_BLOCK(a->index) = b;
2056 BASIC_BLOCK(b->index) = a;
2057 index = a->index;
2058 a->index = b->index;
2059 b->index = index;
2061 /* Now blocks A and B are contiguous. Merge them. */
2062 merge_blocks_nomove (a, b);
2064 return 1;
2067 /* Blocks A and B are to be merged into a single block. B has no outgoing
2068 fallthru edge, so it can be moved after A without adding or modifying
2069 any jumps (aside from the jump from A to B). */
2071 static int
2072 merge_blocks_move_successor_nojumps (a, b)
2073 basic_block a, b;
2075 rtx start, end, barrier;
2077 start = b->head;
2078 end = b->end;
2080 /* We want to delete the BARRIER after the end of the insns we are
2081 going to move. If we don't find a BARRIER, then do nothing. This
2082 can happen in some cases if we have labels we can not delete.
2084 Similarly, do nothing if we can not delete the label at the start
2085 of the target block. */
2086 barrier = next_nonnote_insn (end);
2087 if (GET_CODE (barrier) != BARRIER
2088 || (GET_CODE (b->head) == CODE_LABEL
2089 && ! can_delete_label_p (b->head)))
2090 return 0;
2091 else
2092 flow_delete_insn (barrier);
2094 /* Move block and loop notes out of the chain so that we do not
2095 disturb their order.
2097 ??? A better solution would be to squeeze out all the non-nested notes
2098 and adjust the block trees appropriately. Even better would be to have
2099 a tighter connection between block trees and rtl so that this is not
2100 necessary. */
2101 start = squeeze_notes (start, end);
2103 /* Scramble the insn chain. */
2104 reorder_insns (start, end, a->end);
2106 /* Now blocks A and B are contiguous. Merge them. */
2107 merge_blocks_nomove (a, b);
2109 if (rtl_dump_file)
2111 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2112 b->index, a->index);
2115 return 1;
2118 /* Blocks A and B are to be merged into a single block. The insns
2119 are already contiguous, hence `nomove'. */
2121 static void
2122 merge_blocks_nomove (a, b)
2123 basic_block a, b;
2125 edge e;
2126 rtx b_head, b_end, a_end;
2127 int b_empty = 0;
2129 /* If there was a CODE_LABEL beginning B, delete it. */
2130 b_head = b->head;
2131 b_end = b->end;
2132 if (GET_CODE (b_head) == CODE_LABEL)
2134 /* Detect basic blocks with nothing but a label. This can happen
2135 in particular at the end of a function. */
2136 if (b_head == b_end)
2137 b_empty = 1;
2138 b_head = flow_delete_insn (b_head);
2141 /* Delete the basic block note. */
2142 if (GET_CODE (b_head) == NOTE
2143 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2145 if (b_head == b_end)
2146 b_empty = 1;
2147 b_head = flow_delete_insn (b_head);
2150 /* If there was a jump out of A, delete it. */
2151 a_end = a->end;
2152 if (GET_CODE (a_end) == JUMP_INSN)
2154 rtx prev;
2156 prev = prev_nonnote_insn (a_end);
2157 if (!prev)
2158 prev = a->head;
2160 #ifdef HAVE_cc0
2161 /* If this was a conditional jump, we need to also delete
2162 the insn that set cc0. */
2164 if (prev && sets_cc0_p (prev))
2166 rtx tmp = prev;
2167 prev = prev_nonnote_insn (prev);
2168 if (!prev)
2169 prev = a->head;
2170 flow_delete_insn (tmp);
2172 #endif
2174 /* Note that a->head != a->end, since we should have at least a
2175 bb note plus the jump, so prev != insn. */
2176 flow_delete_insn (a_end);
2177 a_end = prev;
2180 /* By definition, there should only be one successor of A, and that is
2181 B. Free that edge struct. */
2182 n_edges--;
2183 free (a->succ);
2185 /* Adjust the edges out of B for the new owner. */
2186 for (e = b->succ; e ; e = e->succ_next)
2187 e->src = a;
2188 a->succ = b->succ;
2190 /* Reassociate the insns of B with A. */
2191 if (!b_empty)
2193 BLOCK_FOR_INSN (b_head) = a;
2194 while (b_head != b_end)
2196 b_head = NEXT_INSN (b_head);
2197 BLOCK_FOR_INSN (b_head) = a;
2199 a_end = b_head;
2201 a->end = a_end;
2203 /* Compact the basic block array. */
2204 expunge_block (b);
2207 /* Attempt to merge basic blocks that are potentially non-adjacent.
2208 Return true iff the attempt succeeded. */
2210 static int
2211 merge_blocks (e, b, c)
2212 edge e;
2213 basic_block b, c;
2215 /* If B has a fallthru edge to C, no need to move anything. */
2216 if (e->flags & EDGE_FALLTHRU)
2218 /* If a label still appears somewhere and we cannot delete the label,
2219 then we cannot merge the blocks. The edge was tidied already. */
2221 rtx insn, stop = NEXT_INSN (c->head);
2222 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2223 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2224 return 0;
2226 merge_blocks_nomove (b, c);
2228 if (rtl_dump_file)
2230 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2231 b->index, c->index);
2234 return 1;
2236 else
2238 edge tmp_edge;
2239 basic_block d;
2240 int c_has_outgoing_fallthru;
2241 int b_has_incoming_fallthru;
2243 /* We must make sure to not munge nesting of exception regions,
2244 lexical blocks, and loop notes.
2246 The first is taken care of by requiring that the active eh
2247 region at the end of one block always matches the active eh
2248 region at the beginning of the next block.
2250 The later two are taken care of by squeezing out all the notes. */
2252 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2253 executed and we may want to treat blocks which have two out
2254 edges, one normal, one abnormal as only having one edge for
2255 block merging purposes. */
2257 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2258 if (tmp_edge->flags & EDGE_FALLTHRU)
2259 break;
2260 c_has_outgoing_fallthru = (tmp_edge != NULL);
2262 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2263 if (tmp_edge->flags & EDGE_FALLTHRU)
2264 break;
2265 b_has_incoming_fallthru = (tmp_edge != NULL);
2267 /* If B does not have an incoming fallthru, and the exception regions
2268 match, then it can be moved immediately before C without introducing
2269 or modifying jumps.
2271 C can not be the first block, so we do not have to worry about
2272 accessing a non-existent block. */
2273 d = BASIC_BLOCK (c->index - 1);
2274 if (! b_has_incoming_fallthru
2275 && d->eh_end == b->eh_beg
2276 && b->eh_end == c->eh_beg)
2277 return merge_blocks_move_predecessor_nojumps (b, c);
2279 /* Otherwise, we're going to try to move C after B. Make sure the
2280 exception regions match.
2282 If B is the last basic block, then we must not try to access the
2283 block structure for block B + 1. Luckily in that case we do not
2284 need to worry about matching exception regions. */
2285 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2286 if (b->eh_end == c->eh_beg
2287 && (d == NULL || c->eh_end == d->eh_beg))
2289 /* If C does not have an outgoing fallthru, then it can be moved
2290 immediately after B without introducing or modifying jumps. */
2291 if (! c_has_outgoing_fallthru)
2292 return merge_blocks_move_successor_nojumps (b, c);
2294 /* Otherwise, we'll need to insert an extra jump, and possibly
2295 a new block to contain it. */
2296 /* ??? Not implemented yet. */
2299 return 0;
2303 /* Top level driver for merge_blocks. */
2305 static void
2306 try_merge_blocks ()
2308 int i;
2310 /* Attempt to merge blocks as made possible by edge removal. If a block
2311 has only one successor, and the successor has only one predecessor,
2312 they may be combined. */
2314 for (i = 0; i < n_basic_blocks; )
2316 basic_block c, b = BASIC_BLOCK (i);
2317 edge s;
2319 /* A loop because chains of blocks might be combineable. */
2320 while ((s = b->succ) != NULL
2321 && s->succ_next == NULL
2322 && (s->flags & EDGE_EH) == 0
2323 && (c = s->dest) != EXIT_BLOCK_PTR
2324 && c->pred->pred_next == NULL
2325 /* If the jump insn has side effects, we can't kill the edge. */
2326 && (GET_CODE (b->end) != JUMP_INSN
2327 || onlyjump_p (b->end))
2328 && merge_blocks (s, b, c))
2329 continue;
2331 /* Don't get confused by the index shift caused by deleting blocks. */
2332 i = b->index + 1;
2336 /* The given edge should potentially a fallthru edge. If that is in
2337 fact true, delete the unconditional jump and barriers that are in
2338 the way. */
2340 static void
2341 tidy_fallthru_edge (e, b, c)
2342 edge e;
2343 basic_block b, c;
2345 rtx q;
2347 /* ??? In a late-running flow pass, other folks may have deleted basic
2348 blocks by nopping out blocks, leaving multiple BARRIERs between here
2349 and the target label. They ought to be chastized and fixed.
2351 We can also wind up with a sequence of undeletable labels between
2352 one block and the next.
2354 So search through a sequence of barriers, labels, and notes for
2355 the head of block C and assert that we really do fall through. */
2357 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2358 return;
2360 /* Remove what will soon cease being the jump insn from the source block.
2361 If block B consisted only of this single jump, turn it into a deleted
2362 note. */
2363 q = b->end;
2364 if (GET_CODE (q) == JUMP_INSN)
2366 #ifdef HAVE_cc0
2367 /* If this was a conditional jump, we need to also delete
2368 the insn that set cc0. */
2369 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2370 q = PREV_INSN (q);
2371 #endif
2373 if (b->head == q)
2375 PUT_CODE (q, NOTE);
2376 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2377 NOTE_SOURCE_FILE (q) = 0;
2379 else
2380 b->end = q = PREV_INSN (q);
2383 /* Selectively unlink the sequence. */
2384 if (q != PREV_INSN (c->head))
2385 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2387 e->flags |= EDGE_FALLTHRU;
2390 /* Discover and record the loop depth at the head of each basic block. */
2392 static void
2393 calculate_loop_depth (insns)
2394 rtx insns;
2396 basic_block bb;
2397 rtx insn;
2398 int i = 0, depth = 1;
2400 bb = BASIC_BLOCK (i);
2401 for (insn = insns; insn ; insn = NEXT_INSN (insn))
2403 if (insn == bb->head)
2405 bb->loop_depth = depth;
2406 if (++i >= n_basic_blocks)
2407 break;
2408 bb = BASIC_BLOCK (i);
2411 if (GET_CODE (insn) == NOTE)
2413 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2414 depth++;
2415 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2416 depth--;
2418 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2419 if (depth == 0)
2420 abort ();
2425 /* Perform data flow analysis.
2426 F is the first insn of the function and NREGS the number of register numbers
2427 in use. */
2429 void
2430 life_analysis (f, nregs, file, remove_dead_code)
2431 rtx f;
2432 int nregs;
2433 FILE *file;
2434 int remove_dead_code;
2436 #ifdef ELIMINABLE_REGS
2437 register size_t i;
2438 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2439 #endif
2440 int flags;
2442 /* Record which registers will be eliminated. We use this in
2443 mark_used_regs. */
2445 CLEAR_HARD_REG_SET (elim_reg_set);
2447 #ifdef ELIMINABLE_REGS
2448 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2449 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2450 #else
2451 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2452 #endif
2454 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2455 uid_volatile = BITMAP_XMALLOC ();
2457 /* We want alias analysis information for local dead store elimination. */
2458 init_alias_analysis ();
2460 flags = PROP_FINAL;
2461 if (! remove_dead_code)
2462 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2463 life_analysis_1 (f, nregs, flags);
2465 if (! reload_completed)
2466 mark_constant_function ();
2468 end_alias_analysis ();
2470 if (file)
2471 dump_flow_info (file);
2473 BITMAP_XFREE (uid_volatile);
2474 free_basic_block_vars (1);
2477 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2478 Search for REGNO. If found, abort if it is not wider than word_mode. */
2480 static int
2481 verify_wide_reg_1 (px, pregno)
2482 rtx *px;
2483 void *pregno;
2485 rtx x = *px;
2486 int regno = *(int *) pregno;
2488 if (GET_CODE (x) == REG && REGNO (x) == regno)
2490 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2491 abort ();
2492 return 1;
2494 return 0;
2497 /* A subroutine of verify_local_live_at_start. Search through insns
2498 between HEAD and END looking for register REGNO. */
2500 static void
2501 verify_wide_reg (regno, head, end)
2502 int regno;
2503 rtx head, end;
2505 while (1)
2507 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2508 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2509 return;
2510 if (head == end)
2511 break;
2512 head = NEXT_INSN (head);
2515 /* We didn't find the register at all. Something's way screwy. */
2516 abort ();
2519 /* A subroutine of update_life_info. Verify that there are no untoward
2520 changes in live_at_start during a local update. */
2522 static void
2523 verify_local_live_at_start (new_live_at_start, bb)
2524 regset new_live_at_start;
2525 basic_block bb;
2527 if (reload_completed)
2529 /* After reload, there are no pseudos, nor subregs of multi-word
2530 registers. The regsets should exactly match. */
2531 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2532 abort ();
2534 else
2536 int i;
2538 /* Find the set of changed registers. */
2539 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2541 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2543 /* No registers should die. */
2544 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2545 abort ();
2546 /* Verify that the now-live register is wider than word_mode. */
2547 verify_wide_reg (i, bb->head, bb->end);
2552 /* Updates death notes starting with the basic blocks set in BLOCKS.
2554 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2555 expecting local modifications to basic blocks. If we find extra
2556 registers live at the beginning of a block, then we either killed
2557 useful data, or we have a broken split that wants data not provided.
2558 If we find registers removed from live_at_start, that means we have
2559 a broken peephole that is killing a register it shouldn't.
2561 ??? This is not true in one situation -- when a pre-reload splitter
2562 generates subregs of a multi-word pseudo, current life analysis will
2563 lose the kill. So we _can_ have a pseudo go live. How irritating.
2565 BLOCK_FOR_INSN is assumed to be correct.
2567 ??? PROP_FLAGS should not contain PROP_LOG_LINKS. Need to set up
2568 reg_next_use for that. Including PROP_REG_INFO does not refresh
2569 regs_ever_live unless the caller resets it to zero. */
2571 void
2572 update_life_info (blocks, extent, prop_flags)
2573 sbitmap blocks;
2574 enum update_life_extent extent;
2575 int prop_flags;
2577 regset tmp;
2578 int i;
2580 tmp = ALLOCA_REG_SET ();
2582 /* For a global update, we go through the relaxation process again. */
2583 if (extent != UPDATE_LIFE_LOCAL)
2585 calculate_global_regs_live (blocks, blocks,
2586 prop_flags & PROP_SCAN_DEAD_CODE);
2588 /* If asked, remove notes from the blocks we'll update. */
2589 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2590 count_or_remove_death_notes (blocks, 1);
2593 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2595 basic_block bb = BASIC_BLOCK (i);
2597 COPY_REG_SET (tmp, bb->global_live_at_end);
2598 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i,
2599 prop_flags);
2601 if (extent == UPDATE_LIFE_LOCAL)
2602 verify_local_live_at_start (tmp, bb);
2605 FREE_REG_SET (tmp);
2608 /* Free the variables allocated by find_basic_blocks.
2610 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2612 void
2613 free_basic_block_vars (keep_head_end_p)
2614 int keep_head_end_p;
2616 if (basic_block_for_insn)
2618 VARRAY_FREE (basic_block_for_insn);
2619 basic_block_for_insn = NULL;
2622 if (! keep_head_end_p)
2624 clear_edges ();
2625 VARRAY_FREE (basic_block_info);
2626 n_basic_blocks = 0;
2628 ENTRY_BLOCK_PTR->aux = NULL;
2629 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2630 EXIT_BLOCK_PTR->aux = NULL;
2631 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2635 /* Return nonzero if the destination of SET equals the source. */
2636 static int
2637 set_noop_p (set)
2638 rtx set;
2640 rtx src = SET_SRC (set);
2641 rtx dst = SET_DEST (set);
2642 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2643 && REGNO (src) == REGNO (dst))
2644 return 1;
2645 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2646 || SUBREG_WORD (src) != SUBREG_WORD (dst))
2647 return 0;
2648 src = SUBREG_REG (src);
2649 dst = SUBREG_REG (dst);
2650 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2651 && REGNO (src) == REGNO (dst))
2652 return 1;
2653 return 0;
2656 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2657 value to itself. */
2658 static int
2659 noop_move_p (insn)
2660 rtx insn;
2662 rtx pat = PATTERN (insn);
2664 /* Insns carrying these notes are useful later on. */
2665 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2666 return 0;
2668 if (GET_CODE (pat) == SET && set_noop_p (pat))
2669 return 1;
2671 if (GET_CODE (pat) == PARALLEL)
2673 int i;
2674 /* If nothing but SETs of registers to themselves,
2675 this insn can also be deleted. */
2676 for (i = 0; i < XVECLEN (pat, 0); i++)
2678 rtx tem = XVECEXP (pat, 0, i);
2680 if (GET_CODE (tem) == USE
2681 || GET_CODE (tem) == CLOBBER)
2682 continue;
2684 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2685 return 0;
2688 return 1;
2690 return 0;
2693 static void
2694 notice_stack_pointer_modification (x, pat, data)
2695 rtx x;
2696 rtx pat ATTRIBUTE_UNUSED;
2697 void *data ATTRIBUTE_UNUSED;
2699 if (x == stack_pointer_rtx
2700 /* The stack pointer is only modified indirectly as the result
2701 of a push until later in flow. See the comments in rtl.texi
2702 regarding Embedded Side-Effects on Addresses. */
2703 || (GET_CODE (x) == MEM
2704 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2705 || GET_CODE (XEXP (x, 0)) == PRE_INC
2706 || GET_CODE (XEXP (x, 0)) == POST_DEC
2707 || GET_CODE (XEXP (x, 0)) == POST_INC)
2708 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2709 current_function_sp_is_unchanging = 0;
2712 /* Record which insns refer to any volatile memory
2713 or for any reason can't be deleted just because they are dead stores.
2714 Also, delete any insns that copy a register to itself.
2715 And see if the stack pointer is modified. */
2716 static void
2717 record_volatile_insns (f)
2718 rtx f;
2720 rtx insn;
2721 for (insn = f; insn; insn = NEXT_INSN (insn))
2723 enum rtx_code code1 = GET_CODE (insn);
2724 if (code1 == CALL_INSN)
2725 SET_INSN_VOLATILE (insn);
2726 else if (code1 == INSN || code1 == JUMP_INSN)
2728 if (GET_CODE (PATTERN (insn)) != USE
2729 && volatile_refs_p (PATTERN (insn)))
2730 SET_INSN_VOLATILE (insn);
2732 /* A SET that makes space on the stack cannot be dead.
2733 (Such SETs occur only for allocating variable-size data,
2734 so they will always have a PLUS or MINUS according to the
2735 direction of stack growth.)
2736 Even if this function never uses this stack pointer value,
2737 signal handlers do! */
2738 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2739 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2740 #ifdef STACK_GROWS_DOWNWARD
2741 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2742 #else
2743 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2744 #endif
2745 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2746 SET_INSN_VOLATILE (insn);
2748 /* Delete (in effect) any obvious no-op moves. */
2749 else if (noop_move_p (insn))
2751 PUT_CODE (insn, NOTE);
2752 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2753 NOTE_SOURCE_FILE (insn) = 0;
2757 /* Check if insn modifies the stack pointer. */
2758 if ( current_function_sp_is_unchanging
2759 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2760 note_stores (PATTERN (insn),
2761 notice_stack_pointer_modification,
2762 NULL);
2766 /* Mark a register in SET. Hard registers in large modes get all
2767 of their component registers set as well. */
2768 static void
2769 mark_reg (set, reg)
2770 regset set;
2771 rtx reg;
2773 int regno = REGNO (reg);
2775 SET_REGNO_REG_SET (set, regno);
2776 if (regno < FIRST_PSEUDO_REGISTER)
2778 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2779 while (--n > 0)
2780 SET_REGNO_REG_SET (set, regno + n);
2784 /* Mark those regs which are needed at the end of the function as live
2785 at the end of the last basic block. */
2786 static void
2787 mark_regs_live_at_end (set)
2788 regset set;
2790 tree type;
2791 int i;
2793 /* If exiting needs the right stack value, consider the stack pointer
2794 live at the end of the function. */
2795 if ((HAVE_epilogue && reload_completed)
2796 || ! EXIT_IGNORE_STACK
2797 || (! FRAME_POINTER_REQUIRED
2798 && ! current_function_calls_alloca
2799 && flag_omit_frame_pointer)
2800 || current_function_sp_is_unchanging)
2802 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2805 /* Mark the frame pointer if needed at the end of the function. If
2806 we end up eliminating it, it will be removed from the live list
2807 of each basic block by reload. */
2809 if (! reload_completed || frame_pointer_needed)
2811 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2812 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2813 /* If they are different, also mark the hard frame pointer as live */
2814 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2815 #endif
2818 #ifdef PIC_OFFSET_TABLE_REGNUM
2819 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2820 /* Many architectures have a GP register even without flag_pic.
2821 Assume the pic register is not in use, or will be handled by
2822 other means, if it is not fixed. */
2823 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2824 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2825 #endif
2826 #endif
2828 /* Mark all global registers, and all registers used by the epilogue
2829 as being live at the end of the function since they may be
2830 referenced by our caller. */
2831 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2832 if (global_regs[i]
2833 #ifdef EPILOGUE_USES
2834 || EPILOGUE_USES (i)
2835 #endif
2837 SET_REGNO_REG_SET (set, i);
2839 /* Mark all call-saved registers that we actaully used. */
2840 if (HAVE_epilogue && reload_completed)
2842 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2843 if (! call_used_regs[i] && regs_ever_live[i])
2844 SET_REGNO_REG_SET (set, i);
2847 /* Mark function return value. */
2848 /* ??? Only do this after reload. Consider a non-void function that
2849 omits a return statement. Across that edge we'll have the return
2850 register live, and no set for it. Thus the return register will
2851 be live back through the CFG to the entry, and thus we die. A
2852 possible solution is to emit a clobber at exits without returns. */
2854 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2855 if (reload_completed
2856 && type != void_type_node)
2858 rtx outgoing;
2860 if (current_function_returns_struct
2861 || current_function_returns_pcc_struct)
2862 type = build_pointer_type (type);
2864 #ifdef FUNCTION_OUTGOING_VALUE
2865 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
2866 #else
2867 outgoing = FUNCTION_VALUE (type, current_function_decl);
2868 #endif
2870 if (GET_CODE (outgoing) == REG)
2871 mark_reg (set, outgoing);
2872 else if (GET_CODE (outgoing) == PARALLEL)
2874 int len = XVECLEN (outgoing, 0);
2876 /* Check for a NULL entry, used to indicate that the parameter
2877 goes on the stack and in registers. */
2878 i = (XEXP (XVECEXP (outgoing, 0, 0), 0) ? 0 : 1);
2880 for ( ; i < len; ++i)
2882 rtx r = XVECEXP (outgoing, 0, i);
2883 if (GET_CODE (r) == REG)
2884 mark_reg (set, r);
2887 else
2888 abort ();
2892 /* Determine which registers are live at the start of each
2893 basic block of the function whose first insn is F.
2894 NREGS is the number of registers used in F.
2895 We allocate the vector basic_block_live_at_start
2896 and the regsets that it points to, and fill them with the data.
2897 regset_size and regset_bytes are also set here. */
2899 static void
2900 life_analysis_1 (f, nregs, flags)
2901 rtx f;
2902 int nregs;
2903 int flags;
2905 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2906 register int i;
2908 max_regno = nregs;
2910 /* Allocate and zero out many data structures
2911 that will record the data from lifetime analysis. */
2913 allocate_reg_life_data ();
2914 allocate_bb_life_data ();
2916 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2918 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2919 This will be cleared by record_volatile_insns if it encounters an insn
2920 which modifies the stack pointer. */
2921 current_function_sp_is_unchanging = !current_function_calls_alloca;
2922 record_volatile_insns (f);
2924 /* Find the set of registers live on function exit. Do this before
2925 zeroing regs_ever_live, as we use that data post-reload. */
2926 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2928 /* The post-reload life analysis have (on a global basis) the same
2929 registers live as was computed by reload itself. elimination
2930 Otherwise offsets and such may be incorrect.
2932 Reload will make some registers as live even though they do not
2933 appear in the rtl. */
2934 if (reload_completed)
2935 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2936 memset (regs_ever_live, 0, sizeof regs_ever_live);
2938 /* Compute register life at block boundaries. It'd be nice to
2939 begin with just the exit and noreturn blocks, but that set
2940 is not immediately handy. */
2942 sbitmap blocks;
2943 blocks = sbitmap_alloc (n_basic_blocks);
2944 sbitmap_ones (blocks);
2945 calculate_global_regs_live (blocks, blocks, flags & PROP_SCAN_DEAD_CODE);
2946 sbitmap_free (blocks);
2949 /* The only pseudos that are live at the beginning of the function are
2950 those that were not set anywhere in the function. local-alloc doesn't
2951 know how to handle these correctly, so mark them as not local to any
2952 one basic block. */
2954 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2955 FIRST_PSEUDO_REGISTER, i,
2956 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2958 /* Now the life information is accurate. Make one more pass over each
2959 basic block to delete dead stores, create autoincrement addressing
2960 and record how many times each register is used, is set, or dies. */
2962 regset tmp;
2963 tmp = ALLOCA_REG_SET ();
2965 for (i = n_basic_blocks - 1; i >= 0; --i)
2967 basic_block bb = BASIC_BLOCK (i);
2969 COPY_REG_SET (tmp, bb->global_live_at_end);
2970 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i, flags);
2973 FREE_REG_SET (tmp);
2976 /* We have a problem with any pseudoreg that lives across the setjmp.
2977 ANSI says that if a user variable does not change in value between
2978 the setjmp and the longjmp, then the longjmp preserves it. This
2979 includes longjmp from a place where the pseudo appears dead.
2980 (In principle, the value still exists if it is in scope.)
2981 If the pseudo goes in a hard reg, some other value may occupy
2982 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2983 Conclusion: such a pseudo must not go in a hard reg. */
2984 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2985 FIRST_PSEUDO_REGISTER, i,
2987 if (regno_reg_rtx[i] != 0)
2989 REG_LIVE_LENGTH (i) = -1;
2990 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2994 /* Restore regs_ever_live that was provided by reload. */
2995 if (reload_completed)
2996 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2998 /* Clean up. */
2999 free (reg_next_use);
3000 reg_next_use = NULL;
3003 /* Propagate global life info around the graph of basic blocks. Begin
3004 considering blocks with their corresponding bit set in BLOCKS_IN.
3005 BLOCKS_OUT is set for every block that was changed. */
3007 static void
3008 calculate_global_regs_live (blocks_in, blocks_out, flags)
3009 sbitmap blocks_in, blocks_out;
3010 int flags;
3012 basic_block *queue, *qhead, *qtail, *qend;
3013 regset tmp, new_live_at_end;
3014 int i;
3016 tmp = ALLOCA_REG_SET ();
3017 new_live_at_end = ALLOCA_REG_SET ();
3019 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3020 because the `head == tail' style test for an empty queue doesn't
3021 work with a full queue. */
3022 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3023 qtail = queue;
3024 qhead = qend = queue + n_basic_blocks + 2;
3026 /* Clear out the garbage that might be hanging out in bb->aux. */
3027 for (i = n_basic_blocks - 1; i >= 0; --i)
3028 BASIC_BLOCK (i)->aux = NULL;
3030 /* Queue the blocks set in the initial mask. Do this in reverse block
3031 number order so that we are more likely for the first round to do
3032 useful work. We use AUX non-null to flag that the block is queued. */
3033 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3035 basic_block bb = BASIC_BLOCK (i);
3036 *--qhead = bb;
3037 bb->aux = bb;
3040 sbitmap_zero (blocks_out);
3042 while (qhead != qtail)
3044 int rescan, changed;
3045 basic_block bb;
3046 edge e;
3048 bb = *qhead++;
3049 if (qhead == qend)
3050 qhead = queue;
3051 bb->aux = NULL;
3053 /* Begin by propogating live_at_start from the successor blocks. */
3054 CLEAR_REG_SET (new_live_at_end);
3055 for (e = bb->succ; e ; e = e->succ_next)
3057 basic_block sb = e->dest;
3058 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3061 if (bb == ENTRY_BLOCK_PTR)
3063 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3064 continue;
3067 /* On our first pass through this block, we'll go ahead and continue.
3068 Recognize first pass by local_set NULL. On subsequent passes, we
3069 get to skip out early if live_at_end wouldn't have changed. */
3071 if (bb->local_set == NULL)
3073 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3074 rescan = 1;
3076 else
3078 /* If any bits were removed from live_at_end, we'll have to
3079 rescan the block. This wouldn't be necessary if we had
3080 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3081 local_live is really dependant on live_at_end. */
3082 CLEAR_REG_SET (tmp);
3083 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3084 new_live_at_end, BITMAP_AND_COMPL);
3086 if (! rescan)
3088 /* Find the set of changed bits. Take this opportunity
3089 to notice that this set is empty and early out. */
3090 CLEAR_REG_SET (tmp);
3091 changed = bitmap_operation (tmp, bb->global_live_at_end,
3092 new_live_at_end, BITMAP_XOR);
3093 if (! changed)
3094 continue;
3096 /* If any of the changed bits overlap with local_set,
3097 we'll have to rescan the block. Detect overlap by
3098 the AND with ~local_set turning off bits. */
3099 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3100 BITMAP_AND_COMPL);
3104 /* Let our caller know that BB changed enough to require its
3105 death notes updated. */
3106 SET_BIT (blocks_out, bb->index);
3108 if (! rescan)
3110 /* Add to live_at_start the set of all registers in
3111 new_live_at_end that aren't in the old live_at_end. */
3113 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3114 BITMAP_AND_COMPL);
3115 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3117 changed = bitmap_operation (bb->global_live_at_start,
3118 bb->global_live_at_start,
3119 tmp, BITMAP_IOR);
3120 if (! changed)
3121 continue;
3123 else
3125 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3127 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3128 into live_at_start. */
3129 propagate_block (new_live_at_end, bb->head, bb->end,
3130 bb->local_set, bb->index, flags);
3132 /* If live_at start didn't change, no need to go farther. */
3133 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3134 continue;
3136 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3139 /* Queue all predecessors of BB so that we may re-examine
3140 their live_at_end. */
3141 for (e = bb->pred; e ; e = e->pred_next)
3143 basic_block pb = e->src;
3144 if (pb->aux == NULL)
3146 *qtail++ = pb;
3147 if (qtail == qend)
3148 qtail = queue;
3149 pb->aux = pb;
3154 FREE_REG_SET (tmp);
3155 FREE_REG_SET (new_live_at_end);
3157 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3159 basic_block bb = BASIC_BLOCK (i);
3160 FREE_REG_SET (bb->local_set);
3163 free (queue);
3166 /* Subroutines of life analysis. */
3168 /* Allocate the permanent data structures that represent the results
3169 of life analysis. Not static since used also for stupid life analysis. */
3171 void
3172 allocate_bb_life_data ()
3174 register int i;
3176 for (i = 0; i < n_basic_blocks; i++)
3178 basic_block bb = BASIC_BLOCK (i);
3180 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3181 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3184 ENTRY_BLOCK_PTR->global_live_at_end
3185 = OBSTACK_ALLOC_REG_SET (function_obstack);
3186 EXIT_BLOCK_PTR->global_live_at_start
3187 = OBSTACK_ALLOC_REG_SET (function_obstack);
3189 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3192 void
3193 allocate_reg_life_data ()
3195 int i;
3197 /* Recalculate the register space, in case it has grown. Old style
3198 vector oriented regsets would set regset_{size,bytes} here also. */
3199 allocate_reg_info (max_regno, FALSE, FALSE);
3201 /* Reset all the data we'll collect in propagate_block and its
3202 subroutines. */
3203 for (i = 0; i < max_regno; i++)
3205 REG_N_SETS (i) = 0;
3206 REG_N_REFS (i) = 0;
3207 REG_N_DEATHS (i) = 0;
3208 REG_N_CALLS_CROSSED (i) = 0;
3209 REG_LIVE_LENGTH (i) = 0;
3210 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3214 /* Compute the registers live at the beginning of a basic block
3215 from those live at the end.
3217 When called, OLD contains those live at the end.
3218 On return, it contains those live at the beginning.
3219 FIRST and LAST are the first and last insns of the basic block.
3221 FINAL is nonzero if we are doing the final pass which is not
3222 for computing the life info (since that has already been done)
3223 but for acting on it. On this pass, we delete dead stores,
3224 set up the logical links and dead-variables lists of instructions,
3225 and merge instructions for autoincrement and autodecrement addresses.
3227 SIGNIFICANT is nonzero only the first time for each basic block.
3228 If it is nonzero, it points to a regset in which we store
3229 a 1 for each register that is set within the block.
3231 BNUM is the number of the basic block. */
3233 static void
3234 propagate_block (old, first, last, significant, bnum, flags)
3235 register regset old;
3236 rtx first;
3237 rtx last;
3238 regset significant;
3239 int bnum;
3240 int flags;
3242 register rtx insn;
3243 rtx prev;
3244 regset live;
3245 regset dead;
3247 /* Find the loop depth for this block. Ignore loop level changes in the
3248 middle of the basic block -- for register allocation purposes, the
3249 important uses will be in the blocks wholely contained within the loop
3250 not in the loop pre-header or post-trailer. */
3251 loop_depth = BASIC_BLOCK (bnum)->loop_depth;
3253 dead = ALLOCA_REG_SET ();
3254 live = ALLOCA_REG_SET ();
3256 cc0_live = 0;
3258 if (flags & PROP_REG_INFO)
3260 register int i;
3262 /* Process the regs live at the end of the block.
3263 Mark them as not local to any one basic block. */
3264 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3266 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3270 /* Scan the block an insn at a time from end to beginning. */
3272 for (insn = last; ; insn = prev)
3274 prev = PREV_INSN (insn);
3276 if (GET_CODE (insn) == NOTE)
3278 /* If this is a call to `setjmp' et al,
3279 warn if any non-volatile datum is live. */
3281 if ((flags & PROP_REG_INFO)
3282 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3283 IOR_REG_SET (regs_live_at_setjmp, old);
3286 /* Update the life-status of regs for this insn.
3287 First DEAD gets which regs are set in this insn
3288 then LIVE gets which regs are used in this insn.
3289 Then the regs live before the insn
3290 are those live after, with DEAD regs turned off,
3291 and then LIVE regs turned on. */
3293 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3295 register int i;
3296 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3297 int insn_is_dead = 0;
3298 int libcall_is_dead = 0;
3300 if (flags & PROP_SCAN_DEAD_CODE)
3302 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
3303 /* Don't delete something that refers to volatile storage! */
3304 && ! INSN_VOLATILE (insn));
3305 libcall_is_dead = (insn_is_dead && note != 0
3306 && libcall_dead_p (PATTERN (insn), old, note, insn));
3309 /* We almost certainly don't want to delete prologue or epilogue
3310 instructions. Warn about probable compiler losage. */
3311 if ((flags & PROP_KILL_DEAD_CODE)
3312 && insn_is_dead
3313 && reload_completed
3314 && (HAVE_epilogue || HAVE_prologue)
3315 && prologue_epilogue_contains (insn))
3317 warning ("ICE: would have deleted prologue/epilogue insn");
3318 debug_rtx (insn);
3319 libcall_is_dead = insn_is_dead = 0;
3322 /* If an instruction consists of just dead store(s) on final pass,
3323 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3324 We could really delete it with delete_insn, but that
3325 can cause trouble for first or last insn in a basic block. */
3326 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3328 rtx inote;
3329 /* If the insn referred to a label, note that the label is
3330 now less used. */
3331 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3333 if (REG_NOTE_KIND (inote) == REG_LABEL)
3335 rtx label = XEXP (inote, 0);
3336 rtx next;
3337 LABEL_NUSES (label)--;
3339 /* If this label was attached to an ADDR_VEC, it's
3340 safe to delete the ADDR_VEC. In fact, it's pretty much
3341 mandatory to delete it, because the ADDR_VEC may
3342 be referencing labels that no longer exist. */
3343 if (LABEL_NUSES (label) == 0
3344 && (next = next_nonnote_insn (label)) != NULL
3345 && GET_CODE (next) == JUMP_INSN
3346 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3347 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3349 rtx pat = PATTERN (next);
3350 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3351 int len = XVECLEN (pat, diff_vec_p);
3352 int i;
3353 for (i = 0; i < len; i++)
3354 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3355 PUT_CODE (next, NOTE);
3356 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3357 NOTE_SOURCE_FILE (next) = 0;
3362 PUT_CODE (insn, NOTE);
3363 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3364 NOTE_SOURCE_FILE (insn) = 0;
3366 /* CC0 is now known to be dead. Either this insn used it,
3367 in which case it doesn't anymore, or clobbered it,
3368 so the next insn can't use it. */
3369 cc0_live = 0;
3371 /* If this insn is copying the return value from a library call,
3372 delete the entire library call. */
3373 if (libcall_is_dead)
3375 rtx first = XEXP (note, 0);
3376 rtx p = insn;
3377 while (INSN_DELETED_P (first))
3378 first = NEXT_INSN (first);
3379 while (p != first)
3381 p = PREV_INSN (p);
3382 PUT_CODE (p, NOTE);
3383 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3384 NOTE_SOURCE_FILE (p) = 0;
3387 goto flushed;
3390 CLEAR_REG_SET (dead);
3391 CLEAR_REG_SET (live);
3393 /* See if this is an increment or decrement that can be
3394 merged into a following memory address. */
3395 #ifdef AUTO_INC_DEC
3397 register rtx x = single_set (insn);
3399 /* Does this instruction increment or decrement a register? */
3400 if (!reload_completed
3401 && (flags & PROP_AUTOINC)
3402 && x != 0
3403 && GET_CODE (SET_DEST (x)) == REG
3404 && (GET_CODE (SET_SRC (x)) == PLUS
3405 || GET_CODE (SET_SRC (x)) == MINUS)
3406 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3407 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3408 /* Ok, look for a following memory ref we can combine with.
3409 If one is found, change the memory ref to a PRE_INC
3410 or PRE_DEC, cancel this insn, and return 1.
3411 Return 0 if nothing has been done. */
3412 && try_pre_increment_1 (insn))
3413 goto flushed;
3415 #endif /* AUTO_INC_DEC */
3417 /* If this is not the final pass, and this insn is copying the
3418 value of a library call and it's dead, don't scan the
3419 insns that perform the library call, so that the call's
3420 arguments are not marked live. */
3421 if (libcall_is_dead)
3423 /* Mark the dest reg as `significant'. */
3424 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3425 significant, flags);
3427 insn = XEXP (note, 0);
3428 prev = PREV_INSN (insn);
3430 else if (GET_CODE (PATTERN (insn)) == SET
3431 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3432 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3433 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3434 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3435 /* We have an insn to pop a constant amount off the stack.
3436 (Such insns use PLUS regardless of the direction of the stack,
3437 and any insn to adjust the stack by a constant is always a pop.)
3438 These insns, if not dead stores, have no effect on life. */
3440 else
3442 /* Any regs live at the time of a call instruction
3443 must not go in a register clobbered by calls.
3444 Find all regs now live and record this for them. */
3446 if (GET_CODE (insn) == CALL_INSN
3447 && (flags & PROP_REG_INFO))
3448 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3450 REG_N_CALLS_CROSSED (i)++;
3453 /* LIVE gets the regs used in INSN;
3454 DEAD gets those set by it. Dead insns don't make anything
3455 live. */
3457 mark_set_regs (old, dead, PATTERN (insn),
3458 insn, significant, flags);
3460 /* If an insn doesn't use CC0, it becomes dead since we
3461 assume that every insn clobbers it. So show it dead here;
3462 mark_used_regs will set it live if it is referenced. */
3463 cc0_live = 0;
3465 if (! insn_is_dead)
3466 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3468 /* Sometimes we may have inserted something before INSN (such as
3469 a move) when we make an auto-inc. So ensure we will scan
3470 those insns. */
3471 #ifdef AUTO_INC_DEC
3472 prev = PREV_INSN (insn);
3473 #endif
3475 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3477 register int i;
3479 rtx note;
3481 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3482 note;
3483 note = XEXP (note, 1))
3484 if (GET_CODE (XEXP (note, 0)) == USE)
3485 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3486 flags, insn);
3488 /* Each call clobbers all call-clobbered regs that are not
3489 global or fixed. Note that the function-value reg is a
3490 call-clobbered reg, and mark_set_regs has already had
3491 a chance to handle it. */
3493 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3494 if (call_used_regs[i] && ! global_regs[i]
3495 && ! fixed_regs[i])
3497 SET_REGNO_REG_SET (dead, i);
3498 if (significant)
3499 SET_REGNO_REG_SET (significant, i);
3502 /* The stack ptr is used (honorarily) by a CALL insn. */
3503 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3505 /* Calls may also reference any of the global registers,
3506 so they are made live. */
3507 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3508 if (global_regs[i])
3509 mark_used_regs (old, live,
3510 gen_rtx_REG (reg_raw_mode[i], i),
3511 flags, insn);
3513 /* Calls also clobber memory. */
3514 free_EXPR_LIST_list (&mem_set_list);
3517 /* Update OLD for the registers used or set. */
3518 AND_COMPL_REG_SET (old, dead);
3519 IOR_REG_SET (old, live);
3523 /* On final pass, update counts of how many insns each reg is live
3524 at. */
3525 if (flags & PROP_REG_INFO)
3526 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3527 { REG_LIVE_LENGTH (i)++; });
3529 flushed: ;
3530 if (insn == first)
3531 break;
3534 FREE_REG_SET (dead);
3535 FREE_REG_SET (live);
3536 free_EXPR_LIST_list (&mem_set_list);
3539 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3540 (SET expressions whose destinations are registers dead after the insn).
3541 NEEDED is the regset that says which regs are alive after the insn.
3543 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3545 If X is the entire body of an insn, NOTES contains the reg notes
3546 pertaining to the insn. */
3548 static int
3549 insn_dead_p (x, needed, call_ok, notes)
3550 rtx x;
3551 regset needed;
3552 int call_ok;
3553 rtx notes ATTRIBUTE_UNUSED;
3555 enum rtx_code code = GET_CODE (x);
3557 #ifdef AUTO_INC_DEC
3558 /* If flow is invoked after reload, we must take existing AUTO_INC
3559 expresions into account. */
3560 if (reload_completed)
3562 for ( ; notes; notes = XEXP (notes, 1))
3564 if (REG_NOTE_KIND (notes) == REG_INC)
3566 int regno = REGNO (XEXP (notes, 0));
3568 /* Don't delete insns to set global regs. */
3569 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3570 || REGNO_REG_SET_P (needed, regno))
3571 return 0;
3575 #endif
3577 /* If setting something that's a reg or part of one,
3578 see if that register's altered value will be live. */
3580 if (code == SET)
3582 rtx r = SET_DEST (x);
3584 /* A SET that is a subroutine call cannot be dead. */
3585 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
3586 return 0;
3588 #ifdef HAVE_cc0
3589 if (GET_CODE (r) == CC0)
3590 return ! cc0_live;
3591 #endif
3593 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
3595 rtx temp;
3596 /* Walk the set of memory locations we are currently tracking
3597 and see if one is an identical match to this memory location.
3598 If so, this memory write is dead (remember, we're walking
3599 backwards from the end of the block to the start. */
3600 temp = mem_set_list;
3601 while (temp)
3603 if (rtx_equal_p (XEXP (temp, 0), r))
3604 return 1;
3605 temp = XEXP (temp, 1);
3609 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
3610 || GET_CODE (r) == ZERO_EXTRACT)
3611 r = XEXP (r, 0);
3613 if (GET_CODE (r) == REG)
3615 int regno = REGNO (r);
3617 /* Don't delete insns to set global regs. */
3618 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3619 /* Make sure insns to set frame pointer aren't deleted. */
3620 || (regno == FRAME_POINTER_REGNUM
3621 && (! reload_completed || frame_pointer_needed))
3622 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3623 || (regno == HARD_FRAME_POINTER_REGNUM
3624 && (! reload_completed || frame_pointer_needed))
3625 #endif
3626 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3627 /* Make sure insns to set arg pointer are never deleted
3628 (if the arg pointer isn't fixed, there will be a USE for
3629 it, so we can treat it normally). */
3630 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3631 #endif
3632 || REGNO_REG_SET_P (needed, regno))
3633 return 0;
3635 /* If this is a hard register, verify that subsequent words are
3636 not needed. */
3637 if (regno < FIRST_PSEUDO_REGISTER)
3639 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3641 while (--n > 0)
3642 if (REGNO_REG_SET_P (needed, regno+n))
3643 return 0;
3646 return 1;
3650 /* If performing several activities,
3651 insn is dead if each activity is individually dead.
3652 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3653 that's inside a PARALLEL doesn't make the insn worth keeping. */
3654 else if (code == PARALLEL)
3656 int i = XVECLEN (x, 0);
3658 for (i--; i >= 0; i--)
3659 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3660 && GET_CODE (XVECEXP (x, 0, i)) != USE
3661 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3662 return 0;
3664 return 1;
3667 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3668 is not necessarily true for hard registers. */
3669 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3670 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3671 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3672 return 1;
3674 /* We do not check other CLOBBER or USE here. An insn consisting of just
3675 a CLOBBER or just a USE should not be deleted. */
3676 return 0;
3679 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3680 return 1 if the entire library call is dead.
3681 This is true if X copies a register (hard or pseudo)
3682 and if the hard return reg of the call insn is dead.
3683 (The caller should have tested the destination of X already for death.)
3685 If this insn doesn't just copy a register, then we don't
3686 have an ordinary libcall. In that case, cse could not have
3687 managed to substitute the source for the dest later on,
3688 so we can assume the libcall is dead.
3690 NEEDED is the bit vector of pseudoregs live before this insn.
3691 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3693 static int
3694 libcall_dead_p (x, needed, note, insn)
3695 rtx x;
3696 regset needed;
3697 rtx note;
3698 rtx insn;
3700 register RTX_CODE code = GET_CODE (x);
3702 if (code == SET)
3704 register rtx r = SET_SRC (x);
3705 if (GET_CODE (r) == REG)
3707 rtx call = XEXP (note, 0);
3708 rtx call_pat;
3709 register int i;
3711 /* Find the call insn. */
3712 while (call != insn && GET_CODE (call) != CALL_INSN)
3713 call = NEXT_INSN (call);
3715 /* If there is none, do nothing special,
3716 since ordinary death handling can understand these insns. */
3717 if (call == insn)
3718 return 0;
3720 /* See if the hard reg holding the value is dead.
3721 If this is a PARALLEL, find the call within it. */
3722 call_pat = PATTERN (call);
3723 if (GET_CODE (call_pat) == PARALLEL)
3725 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3726 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3727 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3728 break;
3730 /* This may be a library call that is returning a value
3731 via invisible pointer. Do nothing special, since
3732 ordinary death handling can understand these insns. */
3733 if (i < 0)
3734 return 0;
3736 call_pat = XVECEXP (call_pat, 0, i);
3739 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3742 return 1;
3745 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3746 live at function entry. Don't count global register variables, variables
3747 in registers that can be used for function arg passing, or variables in
3748 fixed hard registers. */
3751 regno_uninitialized (regno)
3752 int regno;
3754 if (n_basic_blocks == 0
3755 || (regno < FIRST_PSEUDO_REGISTER
3756 && (global_regs[regno]
3757 || fixed_regs[regno]
3758 || FUNCTION_ARG_REGNO_P (regno))))
3759 return 0;
3761 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3764 /* 1 if register REGNO was alive at a place where `setjmp' was called
3765 and was set more than once or is an argument.
3766 Such regs may be clobbered by `longjmp'. */
3769 regno_clobbered_at_setjmp (regno)
3770 int regno;
3772 if (n_basic_blocks == 0)
3773 return 0;
3775 return ((REG_N_SETS (regno) > 1
3776 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3777 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3780 /* INSN references memory, possibly using autoincrement addressing modes.
3781 Find any entries on the mem_set_list that need to be invalidated due
3782 to an address change. */
3783 static void
3784 invalidate_mems_from_autoinc (insn)
3785 rtx insn;
3787 rtx note = REG_NOTES (insn);
3788 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3790 if (REG_NOTE_KIND (note) == REG_INC)
3792 rtx temp = mem_set_list;
3793 rtx prev = NULL_RTX;
3794 rtx next;
3796 while (temp)
3798 next = XEXP (temp, 1);
3799 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3801 /* Splice temp out of list. */
3802 if (prev)
3803 XEXP (prev, 1) = next;
3804 else
3805 mem_set_list = next;
3806 free_EXPR_LIST_node (temp);
3808 else
3809 prev = temp;
3810 temp = next;
3816 /* Process the registers that are set within X. Their bits are set to
3817 1 in the regset DEAD, because they are dead prior to this insn.
3819 If INSN is nonzero, it is the insn being processed.
3821 FLAGS is the set of operations to perform. */
3823 static void
3824 mark_set_regs (needed, dead, x, insn, significant, flags)
3825 regset needed;
3826 regset dead;
3827 rtx x;
3828 rtx insn;
3829 regset significant;
3830 int flags;
3832 register RTX_CODE code = GET_CODE (x);
3834 if (code == SET || code == CLOBBER)
3835 mark_set_1 (needed, dead, x, insn, significant, flags);
3836 else if (code == PARALLEL)
3838 register int i;
3839 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3841 code = GET_CODE (XVECEXP (x, 0, i));
3842 if (code == SET || code == CLOBBER)
3843 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3844 significant, flags);
3849 /* Process a single SET rtx, X. */
3851 static void
3852 mark_set_1 (needed, dead, x, insn, significant, flags)
3853 regset needed;
3854 regset dead;
3855 rtx x;
3856 rtx insn;
3857 regset significant;
3858 int flags;
3860 register int regno = -1;
3861 register rtx reg = SET_DEST (x);
3863 /* Some targets place small structures in registers for
3864 return values of functions. We have to detect this
3865 case specially here to get correct flow information. */
3866 if (GET_CODE (reg) == PARALLEL
3867 && GET_MODE (reg) == BLKmode)
3869 register int i;
3871 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3872 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3873 significant, flags);
3874 return;
3877 /* Modifying just one hardware register of a multi-reg value
3878 or just a byte field of a register
3879 does not mean the value from before this insn is now dead.
3880 But it does mean liveness of that register at the end of the block
3881 is significant.
3883 Within mark_set_1, however, we treat it as if the register is
3884 indeed modified. mark_used_regs will, however, also treat this
3885 register as being used. Thus, we treat these insns as setting a
3886 new value for the register as a function of its old value. This
3887 cases LOG_LINKS to be made appropriately and this will help combine. */
3889 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3890 || GET_CODE (reg) == SIGN_EXTRACT
3891 || GET_CODE (reg) == STRICT_LOW_PART)
3892 reg = XEXP (reg, 0);
3894 /* If this set is a MEM, then it kills any aliased writes.
3895 If this set is a REG, then it kills any MEMs which use the reg. */
3896 if (flags & PROP_SCAN_DEAD_CODE)
3898 if (GET_CODE (reg) == MEM
3899 || GET_CODE (reg) == REG)
3901 rtx temp = mem_set_list;
3902 rtx prev = NULL_RTX;
3903 rtx next;
3905 while (temp)
3907 next = XEXP (temp, 1);
3908 if ((GET_CODE (reg) == MEM
3909 && output_dependence (XEXP (temp, 0), reg))
3910 || (GET_CODE (reg) == REG
3911 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3913 /* Splice this entry out of the list. */
3914 if (prev)
3915 XEXP (prev, 1) = next;
3916 else
3917 mem_set_list = next;
3918 free_EXPR_LIST_node (temp);
3920 else
3921 prev = temp;
3922 temp = next;
3926 /* If the memory reference had embedded side effects (autoincrement
3927 address modes. Then we may need to kill some entries on the
3928 memory set list. */
3929 if (insn && GET_CODE (reg) == MEM)
3930 invalidate_mems_from_autoinc (insn);
3932 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3933 /* We do not know the size of a BLKmode store, so we do not track
3934 them for redundant store elimination. */
3935 && GET_MODE (reg) != BLKmode
3936 /* There are no REG_INC notes for SP, so we can't assume we'll see
3937 everything that invalidates it. To be safe, don't eliminate any
3938 stores though SP; none of them should be redundant anyway. */
3939 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3940 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3943 if (GET_CODE (reg) == REG
3944 && (regno = REGNO (reg),
3945 ! (regno == FRAME_POINTER_REGNUM
3946 && (! reload_completed || frame_pointer_needed)))
3947 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3948 && ! (regno == HARD_FRAME_POINTER_REGNUM
3949 && (! reload_completed || frame_pointer_needed))
3950 #endif
3951 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3952 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3953 #endif
3954 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3955 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3957 int some_needed = REGNO_REG_SET_P (needed, regno);
3958 int some_not_needed = ! some_needed;
3960 /* Mark it as a significant register for this basic block. */
3961 if (significant)
3962 SET_REGNO_REG_SET (significant, regno);
3964 /* Mark it as dead before this insn. */
3965 SET_REGNO_REG_SET (dead, regno);
3967 /* A hard reg in a wide mode may really be multiple registers.
3968 If so, mark all of them just like the first. */
3969 if (regno < FIRST_PSEUDO_REGISTER)
3971 int n;
3973 /* Nothing below is needed for the stack pointer; get out asap.
3974 Eg, log links aren't needed, since combine won't use them. */
3975 if (regno == STACK_POINTER_REGNUM)
3976 return;
3978 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3979 while (--n > 0)
3981 int regno_n = regno + n;
3982 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3983 if (significant)
3984 SET_REGNO_REG_SET (significant, regno_n);
3986 SET_REGNO_REG_SET (dead, regno_n);
3987 some_needed |= needed_regno;
3988 some_not_needed |= ! needed_regno;
3992 /* Additional data to record if this is the final pass. */
3993 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3994 | PROP_DEATH_NOTES | PROP_AUTOINC))
3996 register rtx y;
3997 register int blocknum = BLOCK_NUM (insn);
3999 y = NULL_RTX;
4000 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4001 y = reg_next_use[regno];
4003 /* If this is a hard reg, record this function uses the reg. */
4005 if (regno < FIRST_PSEUDO_REGISTER)
4007 register int i;
4008 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4010 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4011 for (i = regno; i < endregno; i++)
4013 /* The next use is no longer "next", since a store
4014 intervenes. */
4015 reg_next_use[i] = 0;
4018 if (flags & PROP_REG_INFO)
4019 for (i = regno; i < endregno; i++)
4021 regs_ever_live[i] = 1;
4022 REG_N_SETS (i)++;
4025 else
4027 /* The next use is no longer "next", since a store
4028 intervenes. */
4029 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4030 reg_next_use[regno] = 0;
4032 /* Keep track of which basic blocks each reg appears in. */
4034 if (flags & PROP_REG_INFO)
4036 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4037 REG_BASIC_BLOCK (regno) = blocknum;
4038 else if (REG_BASIC_BLOCK (regno) != blocknum)
4039 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4041 /* Count (weighted) references, stores, etc. This counts a
4042 register twice if it is modified, but that is correct. */
4043 REG_N_SETS (regno)++;
4044 REG_N_REFS (regno) += loop_depth;
4046 /* The insns where a reg is live are normally counted
4047 elsewhere, but we want the count to include the insn
4048 where the reg is set, and the normal counting mechanism
4049 would not count it. */
4050 REG_LIVE_LENGTH (regno)++;
4054 if (! some_not_needed)
4056 if (flags & PROP_LOG_LINKS)
4058 /* Make a logical link from the next following insn
4059 that uses this register, back to this insn.
4060 The following insns have already been processed.
4062 We don't build a LOG_LINK for hard registers containing
4063 in ASM_OPERANDs. If these registers get replaced,
4064 we might wind up changing the semantics of the insn,
4065 even if reload can make what appear to be valid
4066 assignments later. */
4067 if (y && (BLOCK_NUM (y) == blocknum)
4068 && (regno >= FIRST_PSEUDO_REGISTER
4069 || asm_noperands (PATTERN (y)) < 0))
4070 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4073 else if (! some_needed)
4075 if (flags & PROP_REG_INFO)
4076 REG_N_DEATHS (REGNO (reg))++;
4078 if (flags & PROP_DEATH_NOTES)
4080 /* Note that dead stores have already been deleted
4081 when possible. If we get here, we have found a
4082 dead store that cannot be eliminated (because the
4083 same insn does something useful). Indicate this
4084 by marking the reg being set as dying here. */
4085 REG_NOTES (insn)
4086 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4089 else
4091 if (flags & PROP_DEATH_NOTES)
4093 /* This is a case where we have a multi-word hard register
4094 and some, but not all, of the words of the register are
4095 needed in subsequent insns. Write REG_UNUSED notes
4096 for those parts that were not needed. This case should
4097 be rare. */
4099 int i;
4101 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4102 i >= 0; i--)
4103 if (!REGNO_REG_SET_P (needed, regno + i))
4104 REG_NOTES (insn)
4105 = (alloc_EXPR_LIST
4106 (REG_UNUSED,
4107 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4108 REG_NOTES (insn)));
4113 else if (GET_CODE (reg) == REG)
4115 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4116 reg_next_use[regno] = 0;
4119 /* If this is the last pass and this is a SCRATCH, show it will be dying
4120 here and count it. */
4121 else if (GET_CODE (reg) == SCRATCH)
4123 if (flags & PROP_DEATH_NOTES)
4124 REG_NOTES (insn)
4125 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4129 #ifdef AUTO_INC_DEC
4131 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4132 reference. */
4134 static void
4135 find_auto_inc (needed, x, insn)
4136 regset needed;
4137 rtx x;
4138 rtx insn;
4140 rtx addr = XEXP (x, 0);
4141 HOST_WIDE_INT offset = 0;
4142 rtx set;
4144 /* Here we detect use of an index register which might be good for
4145 postincrement, postdecrement, preincrement, or predecrement. */
4147 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4148 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4150 if (GET_CODE (addr) == REG)
4152 register rtx y;
4153 register int size = GET_MODE_SIZE (GET_MODE (x));
4154 rtx use;
4155 rtx incr;
4156 int regno = REGNO (addr);
4158 /* Is the next use an increment that might make auto-increment? */
4159 if ((incr = reg_next_use[regno]) != 0
4160 && (set = single_set (incr)) != 0
4161 && GET_CODE (set) == SET
4162 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4163 /* Can't add side effects to jumps; if reg is spilled and
4164 reloaded, there's no way to store back the altered value. */
4165 && GET_CODE (insn) != JUMP_INSN
4166 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4167 && XEXP (y, 0) == addr
4168 && GET_CODE (XEXP (y, 1)) == CONST_INT
4169 && ((HAVE_POST_INCREMENT
4170 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4171 || (HAVE_POST_DECREMENT
4172 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4173 || (HAVE_PRE_INCREMENT
4174 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4175 || (HAVE_PRE_DECREMENT
4176 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4177 /* Make sure this reg appears only once in this insn. */
4178 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4179 use != 0 && use != (rtx) 1))
4181 rtx q = SET_DEST (set);
4182 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4183 ? (offset ? PRE_INC : POST_INC)
4184 : (offset ? PRE_DEC : POST_DEC));
4186 if (dead_or_set_p (incr, addr))
4188 /* This is the simple case. Try to make the auto-inc. If
4189 we can't, we are done. Otherwise, we will do any
4190 needed updates below. */
4191 if (! validate_change (insn, &XEXP (x, 0),
4192 gen_rtx_fmt_e (inc_code, Pmode, addr),
4194 return;
4196 else if (GET_CODE (q) == REG
4197 /* PREV_INSN used here to check the semi-open interval
4198 [insn,incr). */
4199 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4200 /* We must also check for sets of q as q may be
4201 a call clobbered hard register and there may
4202 be a call between PREV_INSN (insn) and incr. */
4203 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4205 /* We have *p followed sometime later by q = p+size.
4206 Both p and q must be live afterward,
4207 and q is not used between INSN and its assignment.
4208 Change it to q = p, ...*q..., q = q+size.
4209 Then fall into the usual case. */
4210 rtx insns, temp;
4211 basic_block bb;
4213 start_sequence ();
4214 emit_move_insn (q, addr);
4215 insns = get_insns ();
4216 end_sequence ();
4218 bb = BLOCK_FOR_INSN (insn);
4219 for (temp = insns; temp; temp = NEXT_INSN (temp))
4220 set_block_for_insn (temp, bb);
4222 /* If we can't make the auto-inc, or can't make the
4223 replacement into Y, exit. There's no point in making
4224 the change below if we can't do the auto-inc and doing
4225 so is not correct in the pre-inc case. */
4227 validate_change (insn, &XEXP (x, 0),
4228 gen_rtx_fmt_e (inc_code, Pmode, q),
4230 validate_change (incr, &XEXP (y, 0), q, 1);
4231 if (! apply_change_group ())
4232 return;
4234 /* We now know we'll be doing this change, so emit the
4235 new insn(s) and do the updates. */
4236 emit_insns_before (insns, insn);
4238 if (BLOCK_FOR_INSN (insn)->head == insn)
4239 BLOCK_FOR_INSN (insn)->head = insns;
4241 /* INCR will become a NOTE and INSN won't contain a
4242 use of ADDR. If a use of ADDR was just placed in
4243 the insn before INSN, make that the next use.
4244 Otherwise, invalidate it. */
4245 if (GET_CODE (PREV_INSN (insn)) == INSN
4246 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4247 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4248 reg_next_use[regno] = PREV_INSN (insn);
4249 else
4250 reg_next_use[regno] = 0;
4252 addr = q;
4253 regno = REGNO (q);
4255 /* REGNO is now used in INCR which is below INSN, but
4256 it previously wasn't live here. If we don't mark
4257 it as needed, we'll put a REG_DEAD note for it
4258 on this insn, which is incorrect. */
4259 SET_REGNO_REG_SET (needed, regno);
4261 /* If there are any calls between INSN and INCR, show
4262 that REGNO now crosses them. */
4263 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4264 if (GET_CODE (temp) == CALL_INSN)
4265 REG_N_CALLS_CROSSED (regno)++;
4267 else
4268 return;
4270 /* If we haven't returned, it means we were able to make the
4271 auto-inc, so update the status. First, record that this insn
4272 has an implicit side effect. */
4274 REG_NOTES (insn)
4275 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4277 /* Modify the old increment-insn to simply copy
4278 the already-incremented value of our register. */
4279 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4280 abort ();
4282 /* If that makes it a no-op (copying the register into itself) delete
4283 it so it won't appear to be a "use" and a "set" of this
4284 register. */
4285 if (SET_DEST (set) == addr)
4287 PUT_CODE (incr, NOTE);
4288 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4289 NOTE_SOURCE_FILE (incr) = 0;
4292 if (regno >= FIRST_PSEUDO_REGISTER)
4294 /* Count an extra reference to the reg. When a reg is
4295 incremented, spilling it is worse, so we want to make
4296 that less likely. */
4297 REG_N_REFS (regno) += loop_depth;
4299 /* Count the increment as a setting of the register,
4300 even though it isn't a SET in rtl. */
4301 REG_N_SETS (regno)++;
4306 #endif /* AUTO_INC_DEC */
4308 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4309 This is done assuming the registers needed from X
4310 are those that have 1-bits in NEEDED.
4312 FLAGS is the set of enabled operations.
4314 INSN is the containing instruction. If INSN is dead, this function is not
4315 called. */
4317 static void
4318 mark_used_regs (needed, live, x, flags, insn)
4319 regset needed;
4320 regset live;
4321 rtx x;
4322 int flags;
4323 rtx insn;
4325 register RTX_CODE code;
4326 register int regno;
4327 int i;
4329 retry:
4330 code = GET_CODE (x);
4331 switch (code)
4333 case LABEL_REF:
4334 case SYMBOL_REF:
4335 case CONST_INT:
4336 case CONST:
4337 case CONST_DOUBLE:
4338 case PC:
4339 case ADDR_VEC:
4340 case ADDR_DIFF_VEC:
4341 return;
4343 #ifdef HAVE_cc0
4344 case CC0:
4345 cc0_live = 1;
4346 return;
4347 #endif
4349 case CLOBBER:
4350 /* If we are clobbering a MEM, mark any registers inside the address
4351 as being used. */
4352 if (GET_CODE (XEXP (x, 0)) == MEM)
4353 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4354 return;
4356 case MEM:
4357 /* Don't bother watching stores to mems if this is not the
4358 final pass. We'll not be deleting dead stores this round. */
4359 if (flags & PROP_SCAN_DEAD_CODE)
4361 /* Invalidate the data for the last MEM stored, but only if MEM is
4362 something that can be stored into. */
4363 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4364 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4365 ; /* needn't clear the memory set list */
4366 else
4368 rtx temp = mem_set_list;
4369 rtx prev = NULL_RTX;
4370 rtx next;
4372 while (temp)
4374 next = XEXP (temp, 1);
4375 if (anti_dependence (XEXP (temp, 0), x))
4377 /* Splice temp out of the list. */
4378 if (prev)
4379 XEXP (prev, 1) = next;
4380 else
4381 mem_set_list = next;
4382 free_EXPR_LIST_node (temp);
4384 else
4385 prev = temp;
4386 temp = next;
4390 /* If the memory reference had embedded side effects (autoincrement
4391 address modes. Then we may need to kill some entries on the
4392 memory set list. */
4393 if (insn)
4394 invalidate_mems_from_autoinc (insn);
4397 #ifdef AUTO_INC_DEC
4398 if (flags & PROP_AUTOINC)
4399 find_auto_inc (needed, x, insn);
4400 #endif
4401 break;
4403 case SUBREG:
4404 if (GET_CODE (SUBREG_REG (x)) == REG
4405 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4406 && (GET_MODE_SIZE (GET_MODE (x))
4407 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4408 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4410 /* While we're here, optimize this case. */
4411 x = SUBREG_REG (x);
4413 /* In case the SUBREG is not of a register, don't optimize */
4414 if (GET_CODE (x) != REG)
4416 mark_used_regs (needed, live, x, flags, insn);
4417 return;
4420 /* ... fall through ... */
4422 case REG:
4423 /* See a register other than being set
4424 => mark it as needed. */
4426 regno = REGNO (x);
4428 int some_needed = REGNO_REG_SET_P (needed, regno);
4429 int some_not_needed = ! some_needed;
4431 SET_REGNO_REG_SET (live, regno);
4433 /* A hard reg in a wide mode may really be multiple registers.
4434 If so, mark all of them just like the first. */
4435 if (regno < FIRST_PSEUDO_REGISTER)
4437 int n;
4439 /* For stack ptr or fixed arg pointer,
4440 nothing below can be necessary, so waste no more time. */
4441 if (regno == STACK_POINTER_REGNUM
4442 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4443 || (regno == HARD_FRAME_POINTER_REGNUM
4444 && (! reload_completed || frame_pointer_needed))
4445 #endif
4446 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4447 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4448 #endif
4449 || (regno == FRAME_POINTER_REGNUM
4450 && (! reload_completed || frame_pointer_needed)))
4452 /* If this is a register we are going to try to eliminate,
4453 don't mark it live here. If we are successful in
4454 eliminating it, it need not be live unless it is used for
4455 pseudos, in which case it will have been set live when
4456 it was allocated to the pseudos. If the register will not
4457 be eliminated, reload will set it live at that point. */
4459 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4460 regs_ever_live[regno] = 1;
4461 return;
4463 /* No death notes for global register variables;
4464 their values are live after this function exits. */
4465 if (global_regs[regno])
4467 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4468 reg_next_use[regno] = insn;
4469 return;
4472 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4473 while (--n > 0)
4475 int regno_n = regno + n;
4476 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4478 SET_REGNO_REG_SET (live, regno_n);
4479 some_needed |= needed_regno;
4480 some_not_needed |= ! needed_regno;
4484 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4486 /* Record where each reg is used, so when the reg
4487 is set we know the next insn that uses it. */
4489 reg_next_use[regno] = insn;
4491 if (flags & PROP_REG_INFO)
4493 if (regno < FIRST_PSEUDO_REGISTER)
4495 /* If a hard reg is being used,
4496 record that this function does use it. */
4498 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4499 if (i == 0)
4500 i = 1;
4502 regs_ever_live[regno + --i] = 1;
4503 while (i > 0);
4505 else
4507 /* Keep track of which basic block each reg appears in. */
4509 register int blocknum = BLOCK_NUM (insn);
4511 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4512 REG_BASIC_BLOCK (regno) = blocknum;
4513 else if (REG_BASIC_BLOCK (regno) != blocknum)
4514 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4516 /* Count (weighted) number of uses of each reg. */
4518 REG_N_REFS (regno) += loop_depth;
4522 /* Record and count the insns in which a reg dies.
4523 If it is used in this insn and was dead below the insn
4524 then it dies in this insn. If it was set in this insn,
4525 we do not make a REG_DEAD note; likewise if we already
4526 made such a note. */
4528 if (flags & PROP_DEATH_NOTES)
4530 if (some_not_needed
4531 && ! dead_or_set_p (insn, x)
4532 #if 0
4533 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4534 #endif
4537 /* Check for the case where the register dying partially
4538 overlaps the register set by this insn. */
4539 if (regno < FIRST_PSEUDO_REGISTER
4540 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4542 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4543 while (--n >= 0)
4544 some_needed |= dead_or_set_regno_p (insn, regno + n);
4547 /* If none of the words in X is needed, make a REG_DEAD
4548 note. Otherwise, we must make partial REG_DEAD notes. */
4549 if (! some_needed)
4551 REG_NOTES (insn)
4552 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4553 REG_N_DEATHS (regno)++;
4555 else
4557 int i;
4559 /* Don't make a REG_DEAD note for a part of a register
4560 that is set in the insn. */
4562 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4563 i >= 0; i--)
4564 if (!REGNO_REG_SET_P (needed, regno + i)
4565 && ! dead_or_set_regno_p (insn, regno + i))
4566 REG_NOTES (insn)
4567 = (alloc_EXPR_LIST
4568 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4569 regno + i),
4570 REG_NOTES (insn)));
4575 return;
4577 case SET:
4579 register rtx testreg = SET_DEST (x);
4580 int mark_dest = 0;
4582 /* If storing into MEM, don't show it as being used. But do
4583 show the address as being used. */
4584 if (GET_CODE (testreg) == MEM)
4586 #ifdef AUTO_INC_DEC
4587 if (flags & PROP_AUTOINC)
4588 find_auto_inc (needed, testreg, insn);
4589 #endif
4590 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4591 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4592 return;
4595 /* Storing in STRICT_LOW_PART is like storing in a reg
4596 in that this SET might be dead, so ignore it in TESTREG.
4597 but in some other ways it is like using the reg.
4599 Storing in a SUBREG or a bit field is like storing the entire
4600 register in that if the register's value is not used
4601 then this SET is not needed. */
4602 while (GET_CODE (testreg) == STRICT_LOW_PART
4603 || GET_CODE (testreg) == ZERO_EXTRACT
4604 || GET_CODE (testreg) == SIGN_EXTRACT
4605 || GET_CODE (testreg) == SUBREG)
4607 if (GET_CODE (testreg) == SUBREG
4608 && GET_CODE (SUBREG_REG (testreg)) == REG
4609 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4610 && (GET_MODE_SIZE (GET_MODE (testreg))
4611 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4612 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4614 /* Modifying a single register in an alternate mode
4615 does not use any of the old value. But these other
4616 ways of storing in a register do use the old value. */
4617 if (GET_CODE (testreg) == SUBREG
4618 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4620 else
4621 mark_dest = 1;
4623 testreg = XEXP (testreg, 0);
4626 /* If this is a store into a register,
4627 recursively scan the value being stored. */
4629 if ((GET_CODE (testreg) == PARALLEL
4630 && GET_MODE (testreg) == BLKmode)
4631 || (GET_CODE (testreg) == REG
4632 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4633 && (! reload_completed || frame_pointer_needed)))
4634 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4635 && ! (regno == HARD_FRAME_POINTER_REGNUM
4636 && (! reload_completed || frame_pointer_needed))
4637 #endif
4638 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4639 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4640 #endif
4642 /* We used to exclude global_regs here, but that seems wrong.
4643 Storing in them is like storing in mem. */
4645 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4646 if (mark_dest)
4647 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4648 return;
4651 break;
4653 case RETURN:
4654 /* ??? This info should have been gotten from mark_regs_live_at_end,
4655 as applied to the EXIT block, and propagated along the edge that
4656 connects this block to the EXIT. */
4657 break;
4659 case ASM_OPERANDS:
4660 case UNSPEC_VOLATILE:
4661 case TRAP_IF:
4662 case ASM_INPUT:
4664 /* Traditional and volatile asm instructions must be considered to use
4665 and clobber all hard registers, all pseudo-registers and all of
4666 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4668 Consider for instance a volatile asm that changes the fpu rounding
4669 mode. An insn should not be moved across this even if it only uses
4670 pseudo-regs because it might give an incorrectly rounded result.
4672 ?!? Unfortunately, marking all hard registers as live causes massive
4673 problems for the register allocator and marking all pseudos as live
4674 creates mountains of uninitialized variable warnings.
4676 So for now, just clear the memory set list and mark any regs
4677 we can find in ASM_OPERANDS as used. */
4678 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4679 free_EXPR_LIST_list (&mem_set_list);
4681 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4682 We can not just fall through here since then we would be confused
4683 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4684 traditional asms unlike their normal usage. */
4685 if (code == ASM_OPERANDS)
4687 int j;
4689 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4690 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4691 flags, insn);
4693 break;
4697 default:
4698 break;
4701 /* Recursively scan the operands of this expression. */
4704 register const char *fmt = GET_RTX_FORMAT (code);
4705 register int i;
4707 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4709 if (fmt[i] == 'e')
4711 /* Tail recursive case: save a function call level. */
4712 if (i == 0)
4714 x = XEXP (x, 0);
4715 goto retry;
4717 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4719 else if (fmt[i] == 'E')
4721 register int j;
4722 for (j = 0; j < XVECLEN (x, i); j++)
4723 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4729 #ifdef AUTO_INC_DEC
4731 static int
4732 try_pre_increment_1 (insn)
4733 rtx insn;
4735 /* Find the next use of this reg. If in same basic block,
4736 make it do pre-increment or pre-decrement if appropriate. */
4737 rtx x = single_set (insn);
4738 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4739 * INTVAL (XEXP (SET_SRC (x), 1)));
4740 int regno = REGNO (SET_DEST (x));
4741 rtx y = reg_next_use[regno];
4742 if (y != 0
4743 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4744 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4745 mode would be better. */
4746 && ! dead_or_set_p (y, SET_DEST (x))
4747 && try_pre_increment (y, SET_DEST (x), amount))
4749 /* We have found a suitable auto-increment
4750 and already changed insn Y to do it.
4751 So flush this increment-instruction. */
4752 PUT_CODE (insn, NOTE);
4753 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4754 NOTE_SOURCE_FILE (insn) = 0;
4755 /* Count a reference to this reg for the increment
4756 insn we are deleting. When a reg is incremented.
4757 spilling it is worse, so we want to make that
4758 less likely. */
4759 if (regno >= FIRST_PSEUDO_REGISTER)
4761 REG_N_REFS (regno) += loop_depth;
4762 REG_N_SETS (regno)++;
4764 return 1;
4766 return 0;
4769 /* Try to change INSN so that it does pre-increment or pre-decrement
4770 addressing on register REG in order to add AMOUNT to REG.
4771 AMOUNT is negative for pre-decrement.
4772 Returns 1 if the change could be made.
4773 This checks all about the validity of the result of modifying INSN. */
4775 static int
4776 try_pre_increment (insn, reg, amount)
4777 rtx insn, reg;
4778 HOST_WIDE_INT amount;
4780 register rtx use;
4782 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4783 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4784 int pre_ok = 0;
4785 /* Nonzero if we can try to make a post-increment or post-decrement.
4786 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4787 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4788 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4789 int post_ok = 0;
4791 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4792 int do_post = 0;
4794 /* From the sign of increment, see which possibilities are conceivable
4795 on this target machine. */
4796 if (HAVE_PRE_INCREMENT && amount > 0)
4797 pre_ok = 1;
4798 if (HAVE_POST_INCREMENT && amount > 0)
4799 post_ok = 1;
4801 if (HAVE_PRE_DECREMENT && amount < 0)
4802 pre_ok = 1;
4803 if (HAVE_POST_DECREMENT && amount < 0)
4804 post_ok = 1;
4806 if (! (pre_ok || post_ok))
4807 return 0;
4809 /* It is not safe to add a side effect to a jump insn
4810 because if the incremented register is spilled and must be reloaded
4811 there would be no way to store the incremented value back in memory. */
4813 if (GET_CODE (insn) == JUMP_INSN)
4814 return 0;
4816 use = 0;
4817 if (pre_ok)
4818 use = find_use_as_address (PATTERN (insn), reg, 0);
4819 if (post_ok && (use == 0 || use == (rtx) 1))
4821 use = find_use_as_address (PATTERN (insn), reg, -amount);
4822 do_post = 1;
4825 if (use == 0 || use == (rtx) 1)
4826 return 0;
4828 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4829 return 0;
4831 /* See if this combination of instruction and addressing mode exists. */
4832 if (! validate_change (insn, &XEXP (use, 0),
4833 gen_rtx_fmt_e (amount > 0
4834 ? (do_post ? POST_INC : PRE_INC)
4835 : (do_post ? POST_DEC : PRE_DEC),
4836 Pmode, reg), 0))
4837 return 0;
4839 /* Record that this insn now has an implicit side effect on X. */
4840 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4841 return 1;
4844 #endif /* AUTO_INC_DEC */
4846 /* Find the place in the rtx X where REG is used as a memory address.
4847 Return the MEM rtx that so uses it.
4848 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4849 (plus REG (const_int PLUSCONST)).
4851 If such an address does not appear, return 0.
4852 If REG appears more than once, or is used other than in such an address,
4853 return (rtx)1. */
4856 find_use_as_address (x, reg, plusconst)
4857 register rtx x;
4858 rtx reg;
4859 HOST_WIDE_INT plusconst;
4861 enum rtx_code code = GET_CODE (x);
4862 const char *fmt = GET_RTX_FORMAT (code);
4863 register int i;
4864 register rtx value = 0;
4865 register rtx tem;
4867 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4868 return x;
4870 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4871 && XEXP (XEXP (x, 0), 0) == reg
4872 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4873 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4874 return x;
4876 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4878 /* If REG occurs inside a MEM used in a bit-field reference,
4879 that is unacceptable. */
4880 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4881 return (rtx) (HOST_WIDE_INT) 1;
4884 if (x == reg)
4885 return (rtx) (HOST_WIDE_INT) 1;
4887 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4889 if (fmt[i] == 'e')
4891 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4892 if (value == 0)
4893 value = tem;
4894 else if (tem != 0)
4895 return (rtx) (HOST_WIDE_INT) 1;
4897 if (fmt[i] == 'E')
4899 register int j;
4900 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4902 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4903 if (value == 0)
4904 value = tem;
4905 else if (tem != 0)
4906 return (rtx) (HOST_WIDE_INT) 1;
4911 return value;
4914 /* Write information about registers and basic blocks into FILE.
4915 This is part of making a debugging dump. */
4917 void
4918 dump_flow_info (file)
4919 FILE *file;
4921 register int i;
4922 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4924 fprintf (file, "%d registers.\n", max_regno);
4925 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4926 if (REG_N_REFS (i))
4928 enum reg_class class, altclass;
4929 fprintf (file, "\nRegister %d used %d times across %d insns",
4930 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4931 if (REG_BASIC_BLOCK (i) >= 0)
4932 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4933 if (REG_N_SETS (i))
4934 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4935 (REG_N_SETS (i) == 1) ? "" : "s");
4936 if (REG_USERVAR_P (regno_reg_rtx[i]))
4937 fprintf (file, "; user var");
4938 if (REG_N_DEATHS (i) != 1)
4939 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4940 if (REG_N_CALLS_CROSSED (i) == 1)
4941 fprintf (file, "; crosses 1 call");
4942 else if (REG_N_CALLS_CROSSED (i))
4943 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4944 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4945 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4946 class = reg_preferred_class (i);
4947 altclass = reg_alternate_class (i);
4948 if (class != GENERAL_REGS || altclass != ALL_REGS)
4950 if (altclass == ALL_REGS || class == ALL_REGS)
4951 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4952 else if (altclass == NO_REGS)
4953 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4954 else
4955 fprintf (file, "; pref %s, else %s",
4956 reg_class_names[(int) class],
4957 reg_class_names[(int) altclass]);
4959 if (REGNO_POINTER_FLAG (i))
4960 fprintf (file, "; pointer");
4961 fprintf (file, ".\n");
4964 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4965 for (i = 0; i < n_basic_blocks; i++)
4967 register basic_block bb = BASIC_BLOCK (i);
4968 register int regno;
4969 register edge e;
4971 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4972 i, INSN_UID (bb->head), INSN_UID (bb->end));
4974 fprintf (file, "Predecessors: ");
4975 for (e = bb->pred; e ; e = e->pred_next)
4976 dump_edge_info (file, e, 0);
4978 fprintf (file, "\nSuccessors: ");
4979 for (e = bb->succ; e ; e = e->succ_next)
4980 dump_edge_info (file, e, 1);
4982 fprintf (file, "\nRegisters live at start:");
4983 if (bb->global_live_at_start)
4985 for (regno = 0; regno < max_regno; regno++)
4986 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4987 fprintf (file, " %d", regno);
4989 else
4990 fprintf (file, " n/a");
4992 fprintf (file, "\nRegisters live at end:");
4993 if (bb->global_live_at_end)
4995 for (regno = 0; regno < max_regno; regno++)
4996 if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4997 fprintf (file, " %d", regno);
4999 else
5000 fprintf (file, " n/a");
5002 putc('\n', file);
5005 putc('\n', file);
5008 void
5009 debug_flow_info ()
5011 dump_flow_info (stderr);
5014 static void
5015 dump_edge_info (file, e, do_succ)
5016 FILE *file;
5017 edge e;
5018 int do_succ;
5020 basic_block side = (do_succ ? e->dest : e->src);
5022 if (side == ENTRY_BLOCK_PTR)
5023 fputs (" ENTRY", file);
5024 else if (side == EXIT_BLOCK_PTR)
5025 fputs (" EXIT", file);
5026 else
5027 fprintf (file, " %d", side->index);
5029 if (e->flags)
5031 static const char * const bitnames[] = {
5032 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5034 int comma = 0;
5035 int i, flags = e->flags;
5037 fputc (' ', file);
5038 fputc ('(', file);
5039 for (i = 0; flags; i++)
5040 if (flags & (1 << i))
5042 flags &= ~(1 << i);
5044 if (comma)
5045 fputc (',', file);
5046 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5047 fputs (bitnames[i], file);
5048 else
5049 fprintf (file, "%d", i);
5050 comma = 1;
5052 fputc (')', file);
5057 /* Like print_rtl, but also print out live information for the start of each
5058 basic block. */
5060 void
5061 print_rtl_with_bb (outf, rtx_first)
5062 FILE *outf;
5063 rtx rtx_first;
5065 register rtx tmp_rtx;
5067 if (rtx_first == 0)
5068 fprintf (outf, "(nil)\n");
5069 else
5071 int i;
5072 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5073 int max_uid = get_max_uid ();
5074 basic_block *start = (basic_block *)
5075 xcalloc (max_uid, sizeof (basic_block));
5076 basic_block *end = (basic_block *)
5077 xcalloc (max_uid, sizeof (basic_block));
5078 enum bb_state *in_bb_p = (enum bb_state *)
5079 xcalloc (max_uid, sizeof (enum bb_state));
5081 for (i = n_basic_blocks - 1; i >= 0; i--)
5083 basic_block bb = BASIC_BLOCK (i);
5084 rtx x;
5086 start[INSN_UID (bb->head)] = bb;
5087 end[INSN_UID (bb->end)] = bb;
5088 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5090 enum bb_state state = IN_MULTIPLE_BB;
5091 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5092 state = IN_ONE_BB;
5093 in_bb_p[INSN_UID(x)] = state;
5095 if (x == bb->end)
5096 break;
5100 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5102 int did_output;
5103 basic_block bb;
5105 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5107 fprintf (outf, ";; Start of basic block %d, registers live:",
5108 bb->index);
5110 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
5112 fprintf (outf, " %d", i);
5113 if (i < FIRST_PSEUDO_REGISTER)
5114 fprintf (outf, " [%s]",
5115 reg_names[i]);
5117 putc ('\n', outf);
5120 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5121 && GET_CODE (tmp_rtx) != NOTE
5122 && GET_CODE (tmp_rtx) != BARRIER
5123 && ! obey_regdecls)
5124 fprintf (outf, ";; Insn is not within a basic block\n");
5125 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5126 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5128 did_output = print_rtl_single (outf, tmp_rtx);
5130 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5131 fprintf (outf, ";; End of basic block %d\n", bb->index);
5133 if (did_output)
5134 putc ('\n', outf);
5137 free (start);
5138 free (end);
5139 free (in_bb_p);
5142 if (current_function_epilogue_delay_list != 0)
5144 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5145 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5146 tmp_rtx = XEXP (tmp_rtx, 1))
5147 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5151 /* Compute dominator relationships using new flow graph structures. */
5152 void
5153 compute_flow_dominators (dominators, post_dominators)
5154 sbitmap *dominators;
5155 sbitmap *post_dominators;
5157 int bb;
5158 sbitmap *temp_bitmap;
5159 edge e;
5160 basic_block *worklist, *tos;
5162 /* Allocate a worklist array/queue. Entries are only added to the
5163 list if they were not already on the list. So the size is
5164 bounded by the number of basic blocks. */
5165 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5166 * n_basic_blocks);
5168 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5169 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5171 if (dominators)
5173 /* The optimistic setting of dominators requires us to put every
5174 block on the work list initially. */
5175 for (bb = 0; bb < n_basic_blocks; bb++)
5177 *tos++ = BASIC_BLOCK (bb);
5178 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5181 /* We want a maximal solution, so initially assume everything dominates
5182 everything else. */
5183 sbitmap_vector_ones (dominators, n_basic_blocks);
5185 /* Mark successors of the entry block so we can identify them below. */
5186 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5187 e->dest->aux = ENTRY_BLOCK_PTR;
5189 /* Iterate until the worklist is empty. */
5190 while (tos != worklist)
5192 /* Take the first entry off the worklist. */
5193 basic_block b = *--tos;
5194 bb = b->index;
5196 /* Compute the intersection of the dominators of all the
5197 predecessor blocks.
5199 If one of the predecessor blocks is the ENTRY block, then the
5200 intersection of the dominators of the predecessor blocks is
5201 defined as the null set. We can identify such blocks by the
5202 special value in the AUX field in the block structure. */
5203 if (b->aux == ENTRY_BLOCK_PTR)
5205 /* Do not clear the aux field for blocks which are
5206 successors of the ENTRY block. That way we we never
5207 add them to the worklist again.
5209 The intersect of dominators of the preds of this block is
5210 defined as the null set. */
5211 sbitmap_zero (temp_bitmap[bb]);
5213 else
5215 /* Clear the aux field of this block so it can be added to
5216 the worklist again if necessary. */
5217 b->aux = NULL;
5218 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5221 /* Make sure each block always dominates itself. */
5222 SET_BIT (temp_bitmap[bb], bb);
5224 /* If the out state of this block changed, then we need to
5225 add the successors of this block to the worklist if they
5226 are not already on the worklist. */
5227 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5229 for (e = b->succ; e; e = e->succ_next)
5231 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5233 *tos++ = e->dest;
5234 e->dest->aux = e;
5241 if (post_dominators)
5243 /* The optimistic setting of dominators requires us to put every
5244 block on the work list initially. */
5245 for (bb = 0; bb < n_basic_blocks; bb++)
5247 *tos++ = BASIC_BLOCK (bb);
5248 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5251 /* We want a maximal solution, so initially assume everything post
5252 dominates everything else. */
5253 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5255 /* Mark predecessors of the exit block so we can identify them below. */
5256 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5257 e->src->aux = EXIT_BLOCK_PTR;
5259 /* Iterate until the worklist is empty. */
5260 while (tos != worklist)
5262 /* Take the first entry off the worklist. */
5263 basic_block b = *--tos;
5264 bb = b->index;
5266 /* Compute the intersection of the post dominators of all the
5267 successor blocks.
5269 If one of the successor blocks is the EXIT block, then the
5270 intersection of the dominators of the successor blocks is
5271 defined as the null set. We can identify such blocks by the
5272 special value in the AUX field in the block structure. */
5273 if (b->aux == EXIT_BLOCK_PTR)
5275 /* Do not clear the aux field for blocks which are
5276 predecessors of the EXIT block. That way we we never
5277 add them to the worklist again.
5279 The intersect of dominators of the succs of this block is
5280 defined as the null set. */
5281 sbitmap_zero (temp_bitmap[bb]);
5283 else
5285 /* Clear the aux field of this block so it can be added to
5286 the worklist again if necessary. */
5287 b->aux = NULL;
5288 sbitmap_intersection_of_succs (temp_bitmap[bb],
5289 post_dominators, bb);
5292 /* Make sure each block always post dominates itself. */
5293 SET_BIT (temp_bitmap[bb], bb);
5295 /* If the out state of this block changed, then we need to
5296 add the successors of this block to the worklist if they
5297 are not already on the worklist. */
5298 if (sbitmap_a_and_b (post_dominators[bb],
5299 post_dominators[bb],
5300 temp_bitmap[bb]))
5302 for (e = b->pred; e; e = e->pred_next)
5304 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5306 *tos++ = e->src;
5307 e->src->aux = e;
5313 free (temp_bitmap);
5316 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5318 void
5319 compute_immediate_dominators (idom, dominators)
5320 int *idom;
5321 sbitmap *dominators;
5323 sbitmap *tmp;
5324 int b;
5326 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5328 /* Begin with tmp(n) = dom(n) - { n }. */
5329 for (b = n_basic_blocks; --b >= 0; )
5331 sbitmap_copy (tmp[b], dominators[b]);
5332 RESET_BIT (tmp[b], b);
5335 /* Subtract out all of our dominator's dominators. */
5336 for (b = n_basic_blocks; --b >= 0; )
5338 sbitmap tmp_b = tmp[b];
5339 int s;
5341 for (s = n_basic_blocks; --s >= 0; )
5342 if (TEST_BIT (tmp_b, s))
5343 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5346 /* Find the one bit set in the bitmap and put it in the output array. */
5347 for (b = n_basic_blocks; --b >= 0; )
5349 int t;
5350 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5353 sbitmap_vector_free (tmp);
5356 /* Count for a single SET rtx, X. */
5358 static void
5359 count_reg_sets_1 (x)
5360 rtx x;
5362 register int regno;
5363 register rtx reg = SET_DEST (x);
5365 /* Find the register that's set/clobbered. */
5366 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5367 || GET_CODE (reg) == SIGN_EXTRACT
5368 || GET_CODE (reg) == STRICT_LOW_PART)
5369 reg = XEXP (reg, 0);
5371 if (GET_CODE (reg) == PARALLEL
5372 && GET_MODE (reg) == BLKmode)
5374 register int i;
5375 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5376 count_reg_sets_1 (XVECEXP (reg, 0, i));
5377 return;
5380 if (GET_CODE (reg) == REG)
5382 regno = REGNO (reg);
5383 if (regno >= FIRST_PSEUDO_REGISTER)
5385 /* Count (weighted) references, stores, etc. This counts a
5386 register twice if it is modified, but that is correct. */
5387 REG_N_SETS (regno)++;
5389 REG_N_REFS (regno) += loop_depth;
5394 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5395 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5397 static void
5398 count_reg_sets (x)
5399 rtx x;
5401 register RTX_CODE code = GET_CODE (x);
5403 if (code == SET || code == CLOBBER)
5404 count_reg_sets_1 (x);
5405 else if (code == PARALLEL)
5407 register int i;
5408 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5410 code = GET_CODE (XVECEXP (x, 0, i));
5411 if (code == SET || code == CLOBBER)
5412 count_reg_sets_1 (XVECEXP (x, 0, i));
5417 /* Increment REG_N_REFS by the current loop depth each register reference
5418 found in X. */
5420 static void
5421 count_reg_references (x)
5422 rtx x;
5424 register RTX_CODE code;
5426 retry:
5427 code = GET_CODE (x);
5428 switch (code)
5430 case LABEL_REF:
5431 case SYMBOL_REF:
5432 case CONST_INT:
5433 case CONST:
5434 case CONST_DOUBLE:
5435 case PC:
5436 case ADDR_VEC:
5437 case ADDR_DIFF_VEC:
5438 case ASM_INPUT:
5439 return;
5441 #ifdef HAVE_cc0
5442 case CC0:
5443 return;
5444 #endif
5446 case CLOBBER:
5447 /* If we are clobbering a MEM, mark any registers inside the address
5448 as being used. */
5449 if (GET_CODE (XEXP (x, 0)) == MEM)
5450 count_reg_references (XEXP (XEXP (x, 0), 0));
5451 return;
5453 case SUBREG:
5454 /* While we're here, optimize this case. */
5455 x = SUBREG_REG (x);
5457 /* In case the SUBREG is not of a register, don't optimize */
5458 if (GET_CODE (x) != REG)
5460 count_reg_references (x);
5461 return;
5464 /* ... fall through ... */
5466 case REG:
5467 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5468 REG_N_REFS (REGNO (x)) += loop_depth;
5469 return;
5471 case SET:
5473 register rtx testreg = SET_DEST (x);
5474 int mark_dest = 0;
5476 /* If storing into MEM, don't show it as being used. But do
5477 show the address as being used. */
5478 if (GET_CODE (testreg) == MEM)
5480 count_reg_references (XEXP (testreg, 0));
5481 count_reg_references (SET_SRC (x));
5482 return;
5485 /* Storing in STRICT_LOW_PART is like storing in a reg
5486 in that this SET might be dead, so ignore it in TESTREG.
5487 but in some other ways it is like using the reg.
5489 Storing in a SUBREG or a bit field is like storing the entire
5490 register in that if the register's value is not used
5491 then this SET is not needed. */
5492 while (GET_CODE (testreg) == STRICT_LOW_PART
5493 || GET_CODE (testreg) == ZERO_EXTRACT
5494 || GET_CODE (testreg) == SIGN_EXTRACT
5495 || GET_CODE (testreg) == SUBREG)
5497 /* Modifying a single register in an alternate mode
5498 does not use any of the old value. But these other
5499 ways of storing in a register do use the old value. */
5500 if (GET_CODE (testreg) == SUBREG
5501 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5503 else
5504 mark_dest = 1;
5506 testreg = XEXP (testreg, 0);
5509 /* If this is a store into a register,
5510 recursively scan the value being stored. */
5512 if ((GET_CODE (testreg) == PARALLEL
5513 && GET_MODE (testreg) == BLKmode)
5514 || GET_CODE (testreg) == REG)
5516 count_reg_references (SET_SRC (x));
5517 if (mark_dest)
5518 count_reg_references (SET_DEST (x));
5519 return;
5522 break;
5524 default:
5525 break;
5528 /* Recursively scan the operands of this expression. */
5531 register const char *fmt = GET_RTX_FORMAT (code);
5532 register int i;
5534 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5536 if (fmt[i] == 'e')
5538 /* Tail recursive case: save a function call level. */
5539 if (i == 0)
5541 x = XEXP (x, 0);
5542 goto retry;
5544 count_reg_references (XEXP (x, i));
5546 else if (fmt[i] == 'E')
5548 register int j;
5549 for (j = 0; j < XVECLEN (x, i); j++)
5550 count_reg_references (XVECEXP (x, i, j));
5556 /* Recompute register set/reference counts immediately prior to register
5557 allocation.
5559 This avoids problems with set/reference counts changing to/from values
5560 which have special meanings to the register allocators.
5562 Additionally, the reference counts are the primary component used by the
5563 register allocators to prioritize pseudos for allocation to hard regs.
5564 More accurate reference counts generally lead to better register allocation.
5566 F is the first insn to be scanned.
5567 LOOP_STEP denotes how much loop_depth should be incremented per
5568 loop nesting level in order to increase the ref count more for references
5569 in a loop.
5571 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5572 possibly other information which is used by the register allocators. */
5574 void
5575 recompute_reg_usage (f, loop_step)
5576 rtx f;
5577 int loop_step;
5579 rtx insn;
5580 int i, max_reg;
5581 int index;
5583 /* Clear out the old data. */
5584 max_reg = max_reg_num ();
5585 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5587 REG_N_SETS (i) = 0;
5588 REG_N_REFS (i) = 0;
5591 /* Scan each insn in the chain and count how many times each register is
5592 set/used. */
5593 loop_depth = 1;
5594 for (index = 0; index < n_basic_blocks; index++)
5596 basic_block bb = BASIC_BLOCK (index);
5597 loop_depth = bb->loop_depth;
5598 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5600 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5602 rtx links;
5604 /* This call will increment REG_N_SETS for each SET or CLOBBER
5605 of a register in INSN. It will also increment REG_N_REFS
5606 by the loop depth for each set of a register in INSN. */
5607 count_reg_sets (PATTERN (insn));
5609 /* count_reg_sets does not detect autoincrement address modes, so
5610 detect them here by looking at the notes attached to INSN. */
5611 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5613 if (REG_NOTE_KIND (links) == REG_INC)
5614 /* Count (weighted) references, stores, etc. This counts a
5615 register twice if it is modified, but that is correct. */
5616 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5619 /* This call will increment REG_N_REFS by the current loop depth for
5620 each reference to a register in INSN. */
5621 count_reg_references (PATTERN (insn));
5623 /* count_reg_references will not include counts for arguments to
5624 function calls, so detect them here by examining the
5625 CALL_INSN_FUNCTION_USAGE data. */
5626 if (GET_CODE (insn) == CALL_INSN)
5628 rtx note;
5630 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5631 note;
5632 note = XEXP (note, 1))
5633 if (GET_CODE (XEXP (note, 0)) == USE)
5634 count_reg_references (XEXP (XEXP (note, 0), 0));
5637 if (insn == bb->end)
5638 break;
5643 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5644 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5645 of the number of registers that died. */
5648 count_or_remove_death_notes (blocks, kill)
5649 sbitmap blocks;
5650 int kill;
5652 int i, count = 0;
5654 for (i = n_basic_blocks - 1; i >= 0; --i)
5656 basic_block bb;
5657 rtx insn;
5659 if (blocks && ! TEST_BIT (blocks, i))
5660 continue;
5662 bb = BASIC_BLOCK (i);
5664 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5666 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5668 rtx *pprev = &REG_NOTES (insn);
5669 rtx link = *pprev;
5671 while (link)
5673 switch (REG_NOTE_KIND (link))
5675 case REG_DEAD:
5676 if (GET_CODE (XEXP (link, 0)) == REG)
5678 rtx reg = XEXP (link, 0);
5679 int n;
5681 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5682 n = 1;
5683 else
5684 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5685 count += n;
5687 /* FALLTHRU */
5689 case REG_UNUSED:
5690 if (kill)
5692 rtx next = XEXP (link, 1);
5693 free_EXPR_LIST_node (link);
5694 *pprev = link = next;
5695 break;
5697 /* FALLTHRU */
5699 default:
5700 pprev = &XEXP (link, 1);
5701 link = *pprev;
5702 break;
5707 if (insn == bb->end)
5708 break;
5712 return count;
5715 /* Record INSN's block as BB. */
5717 void
5718 set_block_for_insn (insn, bb)
5719 rtx insn;
5720 basic_block bb;
5722 size_t uid = INSN_UID (insn);
5723 if (uid >= basic_block_for_insn->num_elements)
5725 int new_size;
5727 /* Add one-eighth the size so we don't keep calling xrealloc. */
5728 new_size = uid + (uid + 7) / 8;
5730 VARRAY_GROW (basic_block_for_insn, new_size);
5732 VARRAY_BB (basic_block_for_insn, uid) = bb;
5735 /* Record INSN's block number as BB. */
5736 /* ??? This has got to go. */
5738 void
5739 set_block_num (insn, bb)
5740 rtx insn;
5741 int bb;
5743 set_block_for_insn (insn, BASIC_BLOCK (bb));
5746 /* Verify the CFG consistency. This function check some CFG invariants and
5747 aborts when something is wrong. Hope that this function will help to
5748 convert many optimization passes to preserve CFG consistent.
5750 Currently it does following checks:
5752 - test head/end pointers
5753 - overlapping of basic blocks
5754 - edge list corectness
5755 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5756 - tails of basic blocks (ensure that boundary is necesary)
5757 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5758 and NOTE_INSN_BASIC_BLOCK
5759 - check that all insns are in the basic blocks
5760 (except the switch handling code, barriers and notes)
5762 In future it can be extended check a lot of other stuff as well
5763 (reachability of basic blocks, life information, etc. etc.). */
5765 void
5766 verify_flow_info ()
5768 const int max_uid = get_max_uid ();
5769 const rtx rtx_first = get_insns ();
5770 basic_block *bb_info;
5771 rtx x;
5772 int i, err = 0;
5774 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5776 /* First pass check head/end pointers and set bb_info array used by
5777 later passes. */
5778 for (i = n_basic_blocks - 1; i >= 0; i--)
5780 basic_block bb = BASIC_BLOCK (i);
5782 /* Check the head pointer and make sure that it is pointing into
5783 insn list. */
5784 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5785 if (x == bb->head)
5786 break;
5787 if (!x)
5789 error ("Head insn %d for block %d not found in the insn stream.",
5790 INSN_UID (bb->head), bb->index);
5791 err = 1;
5794 /* Check the end pointer and make sure that it is pointing into
5795 insn list. */
5796 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5798 if (bb_info[INSN_UID (x)] != NULL)
5800 error ("Insn %d is in multiple basic blocks (%d and %d)",
5801 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5802 err = 1;
5804 bb_info[INSN_UID (x)] = bb;
5806 if (x == bb->end)
5807 break;
5809 if (!x)
5811 error ("End insn %d for block %d not found in the insn stream.",
5812 INSN_UID (bb->end), bb->index);
5813 err = 1;
5817 /* Now check the basic blocks (boundaries etc.) */
5818 for (i = n_basic_blocks - 1; i >= 0; i--)
5820 basic_block bb = BASIC_BLOCK (i);
5821 /* Check corectness of edge lists */
5822 edge e;
5824 e = bb->succ;
5825 while (e)
5827 if (e->src != bb)
5829 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5830 bb->index);
5831 fprintf (stderr, "Predecessor: ");
5832 dump_edge_info (stderr, e, 0);
5833 fprintf (stderr, "\nSuccessor: ");
5834 dump_edge_info (stderr, e, 1);
5835 fflush (stderr);
5836 err = 1;
5838 if (e->dest != EXIT_BLOCK_PTR)
5840 edge e2 = e->dest->pred;
5841 while (e2 && e2 != e)
5842 e2 = e2->pred_next;
5843 if (!e2)
5845 error ("Basic block %i edge lists are corrupted", bb->index);
5846 err = 1;
5849 e = e->succ_next;
5852 e = bb->pred;
5853 while (e)
5855 if (e->dest != bb)
5857 error ("Basic block %d pred edge is corrupted", bb->index);
5858 fputs ("Predecessor: ", stderr);
5859 dump_edge_info (stderr, e, 0);
5860 fputs ("\nSuccessor: ", stderr);
5861 dump_edge_info (stderr, e, 1);
5862 fputc ('\n', stderr);
5863 err = 1;
5865 if (e->src != ENTRY_BLOCK_PTR)
5867 edge e2 = e->src->succ;
5868 while (e2 && e2 != e)
5869 e2 = e2->succ_next;
5870 if (!e2)
5872 error ("Basic block %i edge lists are corrupted", bb->index);
5873 err = 1;
5876 e = e->pred_next;
5879 /* OK pointers are correct. Now check the header of basic
5880 block. It ought to contain optional CODE_LABEL followed
5881 by NOTE_BASIC_BLOCK. */
5882 x = bb->head;
5883 if (GET_CODE (x) == CODE_LABEL)
5885 if (bb->end == x)
5887 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5888 bb->index);
5889 err = 1;
5891 x = NEXT_INSN (x);
5893 if (GET_CODE (x) != NOTE
5894 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5895 || NOTE_BASIC_BLOCK (x) != bb)
5897 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5898 bb->index);
5899 err = 1;
5902 if (bb->end == x)
5904 /* Do checks for empty blocks here */
5906 else
5908 x = NEXT_INSN (x);
5909 while (x)
5911 if (GET_CODE (x) == NOTE
5912 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5914 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5915 INSN_UID (x), bb->index);
5916 err = 1;
5919 if (x == bb->end)
5920 break;
5922 if (GET_CODE (x) == JUMP_INSN
5923 || GET_CODE (x) == CODE_LABEL
5924 || GET_CODE (x) == BARRIER)
5926 error ("In basic block %d:", bb->index);
5927 fatal_insn ("Flow control insn inside a basic block", x);
5930 x = NEXT_INSN (x);
5935 x = rtx_first;
5936 while (x)
5938 if (!bb_info[INSN_UID (x)])
5940 switch (GET_CODE (x))
5942 case BARRIER:
5943 case NOTE:
5944 break;
5946 case CODE_LABEL:
5947 /* An addr_vec is placed outside any block block. */
5948 if (NEXT_INSN (x)
5949 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5950 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5951 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5953 x = NEXT_INSN (x);
5956 /* But in any case, non-deletable labels can appear anywhere. */
5957 break;
5959 default:
5960 fatal_insn ("Insn outside basic block", x);
5964 x = NEXT_INSN (x);
5967 if (err)
5968 abort ();
5970 /* Clean up. */
5971 free (bb_info);
5974 /* Functions to access an edge list with a vector representation.
5975 Enough data is kept such that given an index number, the
5976 pred and succ that edge reprsents can be determined, or
5977 given a pred and a succ, it's index number can be returned.
5978 This allows algorithms which comsume a lot of memory to
5979 represent the normally full matrix of edge (pred,succ) with a
5980 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
5981 wasted space in the client code due to sparse flow graphs. */
5983 /* This functions initializes the edge list. Basically the entire
5984 flowgraph is processed, and all edges are assigned a number,
5985 and the data structure is filed in. */
5986 struct edge_list *
5987 create_edge_list ()
5989 struct edge_list *elist;
5990 edge e;
5991 int num_edges;
5992 int x;
5993 int block_count;
5995 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
5997 num_edges = 0;
5999 /* Determine the number of edges in the flow graph by counting successor
6000 edges on each basic block. */
6001 for (x = 0; x < n_basic_blocks; x++)
6003 basic_block bb = BASIC_BLOCK (x);
6005 for (e = bb->succ; e; e = e->succ_next)
6006 num_edges++;
6008 /* Don't forget successors of the entry block. */
6009 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6010 num_edges++;
6012 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6013 elist->num_blocks = block_count;
6014 elist->num_edges = num_edges;
6015 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6017 num_edges = 0;
6019 /* Follow successors of the entry block, and register these edges. */
6020 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6022 elist->index_to_edge[num_edges] = e;
6023 num_edges++;
6026 for (x = 0; x < n_basic_blocks; x++)
6028 basic_block bb = BASIC_BLOCK (x);
6030 /* Follow all successors of blocks, and register these edges. */
6031 for (e = bb->succ; e; e = e->succ_next)
6033 elist->index_to_edge[num_edges] = e;
6034 num_edges++;
6037 return elist;
6040 /* This function free's memory associated with an edge list. */
6041 void
6042 free_edge_list (elist)
6043 struct edge_list *elist;
6045 if (elist)
6047 free (elist->index_to_edge);
6048 free (elist);
6052 /* This function provides debug output showing an edge list. */
6053 void
6054 print_edge_list (f, elist)
6055 FILE *f;
6056 struct edge_list *elist;
6058 int x;
6059 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6060 elist->num_blocks - 2, elist->num_edges);
6062 for (x = 0; x < elist->num_edges; x++)
6064 fprintf (f, " %-4d - edge(", x);
6065 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6066 fprintf (f,"entry,");
6067 else
6068 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6070 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6071 fprintf (f,"exit)\n");
6072 else
6073 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6077 /* This function provides an internal consistancy check of an edge list,
6078 verifying that all edges are present, and that there are no
6079 extra edges. */
6080 void
6081 verify_edge_list (f, elist)
6082 FILE *f;
6083 struct edge_list *elist;
6085 int x, pred, succ, index;
6086 edge e;
6088 for (x = 0; x < n_basic_blocks; x++)
6090 basic_block bb = BASIC_BLOCK (x);
6092 for (e = bb->succ; e; e = e->succ_next)
6094 pred = e->src->index;
6095 succ = e->dest->index;
6096 index = EDGE_INDEX (elist, e->src, e->dest);
6097 if (index == EDGE_INDEX_NO_EDGE)
6099 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6100 continue;
6102 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6103 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6104 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6105 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6106 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6107 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6110 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6112 pred = e->src->index;
6113 succ = e->dest->index;
6114 index = EDGE_INDEX (elist, e->src, e->dest);
6115 if (index == EDGE_INDEX_NO_EDGE)
6117 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6118 continue;
6120 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6121 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6122 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6123 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6124 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6125 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6127 /* We've verified that all the edges are in the list, no lets make sure
6128 there are no spurious edges in the list. */
6130 for (pred = 0 ; pred < n_basic_blocks; pred++)
6131 for (succ = 0 ; succ < n_basic_blocks; succ++)
6133 basic_block p = BASIC_BLOCK (pred);
6134 basic_block s = BASIC_BLOCK (succ);
6136 int found_edge = 0;
6138 for (e = p->succ; e; e = e->succ_next)
6139 if (e->dest == s)
6141 found_edge = 1;
6142 break;
6144 for (e = s->pred; e; e = e->pred_next)
6145 if (e->src == p)
6147 found_edge = 1;
6148 break;
6150 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6151 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6152 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6153 pred, succ);
6154 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6155 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6156 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6157 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6158 BASIC_BLOCK (succ)));
6160 for (succ = 0 ; succ < n_basic_blocks; succ++)
6162 basic_block p = ENTRY_BLOCK_PTR;
6163 basic_block s = BASIC_BLOCK (succ);
6165 int found_edge = 0;
6167 for (e = p->succ; e; e = e->succ_next)
6168 if (e->dest == s)
6170 found_edge = 1;
6171 break;
6173 for (e = s->pred; e; e = e->pred_next)
6174 if (e->src == p)
6176 found_edge = 1;
6177 break;
6179 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6180 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6181 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6182 succ);
6183 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6184 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6185 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6186 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6187 BASIC_BLOCK (succ)));
6189 for (pred = 0 ; pred < n_basic_blocks; pred++)
6191 basic_block p = BASIC_BLOCK (pred);
6192 basic_block s = EXIT_BLOCK_PTR;
6194 int found_edge = 0;
6196 for (e = p->succ; e; e = e->succ_next)
6197 if (e->dest == s)
6199 found_edge = 1;
6200 break;
6202 for (e = s->pred; e; e = e->pred_next)
6203 if (e->src == p)
6205 found_edge = 1;
6206 break;
6208 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6209 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6210 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6211 pred);
6212 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6213 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6214 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6215 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6216 EXIT_BLOCK_PTR));
6220 /* This routine will determine what, if any, edge there is between
6221 a specified predecessor and successor. */
6224 find_edge_index (edge_list, pred, succ)
6225 struct edge_list *edge_list;
6226 basic_block pred, succ;
6228 int x;
6229 for (x = 0; x < NUM_EDGES (edge_list); x++)
6231 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6232 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6233 return x;
6235 return (EDGE_INDEX_NO_EDGE);
6238 /* This function will remove an edge from the flow graph. */
6239 static void
6240 remove_edge (e)
6241 edge e;
6243 edge last_pred = NULL;
6244 edge last_succ = NULL;
6245 edge tmp;
6246 basic_block src, dest;
6247 src = e->src;
6248 dest = e->dest;
6249 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6250 last_succ = tmp;
6252 if (!tmp)
6253 abort ();
6254 if (last_succ)
6255 last_succ->succ_next = e->succ_next;
6256 else
6257 src->succ = e->succ_next;
6259 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6260 last_pred = tmp;
6262 if (!tmp)
6263 abort ();
6264 if (last_pred)
6265 last_pred->pred_next = e->pred_next;
6266 else
6267 dest->pred = e->pred_next;
6269 n_edges--;
6270 free (e);
6273 /* This routine will remove any fake successor edges for a basic block.
6274 When the edge is removed, it is also removed from whatever predecessor
6275 list it is in. */
6276 static void
6277 remove_fake_successors (bb)
6278 basic_block bb;
6280 edge e;
6281 for (e = bb->succ; e ; )
6283 edge tmp = e;
6284 e = e->succ_next;
6285 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6286 remove_edge (tmp);
6290 /* This routine will remove all fake edges from the flow graph. If
6291 we remove all fake successors, it will automatically remove all
6292 fake predecessors. */
6293 void
6294 remove_fake_edges ()
6296 int x;
6298 for (x = 0; x < n_basic_blocks; x++)
6299 remove_fake_successors (BASIC_BLOCK (x));
6301 /* We've handled all successors except the entry block's. */
6302 remove_fake_successors (ENTRY_BLOCK_PTR);
6305 /* This functions will add a fake edge between any block which has no
6306 successors, and the exit block. Some data flow equations require these
6307 edges to exist. */
6308 void
6309 add_noreturn_fake_exit_edges ()
6311 int x;
6313 for (x = 0; x < n_basic_blocks; x++)
6314 if (BASIC_BLOCK (x)->succ == NULL)
6315 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6318 /* Dump the list of basic blocks in the bitmap NODES. */
6319 static void
6320 flow_nodes_print (str, nodes, file)
6321 const char *str;
6322 const sbitmap nodes;
6323 FILE *file;
6325 int node;
6327 fprintf (file, "%s { ", str);
6328 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6329 fputs ("}\n", file);
6333 /* Dump the list of exiting edges in the array EDGES. */
6334 static void
6335 flow_exits_print (str, edges, num_edges, file)
6336 const char *str;
6337 const edge *edges;
6338 int num_edges;
6339 FILE *file;
6341 int i;
6343 fprintf (file, "%s { ", str);
6344 for (i = 0; i < num_edges; i++)
6345 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6346 fputs ("}\n", file);
6350 /* Dump loop related CFG information. */
6351 static void
6352 flow_loops_cfg_dump (loops, file)
6353 const struct loops *loops;
6354 FILE *file;
6356 int i;
6358 if (! loops->num || ! file || ! loops->cfg.dom)
6359 return;
6361 for (i = 0; i < n_basic_blocks; i++)
6363 edge succ;
6365 fprintf (file, ";; %d succs { ", i);
6366 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6367 fprintf (file, "%d ", succ->dest->index);
6368 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6372 /* Dump the DFS node order. */
6373 if (loops->cfg.dfs_order)
6375 fputs (";; DFS order: ", file);
6376 for (i = 0; i < n_basic_blocks; i++)
6377 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6378 fputs ("\n", file);
6383 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6385 flow_loop_nested_p (outer, loop)
6386 struct loop *outer;
6387 struct loop *loop;
6389 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6393 /* Dump the loop information specified by LOOPS to the stream FILE. */
6394 void
6395 flow_loops_dump (loops, file, verbose)
6396 const struct loops *loops;
6397 FILE *file;
6398 int verbose;
6400 int i;
6401 int num_loops;
6403 num_loops = loops->num;
6404 if (! num_loops || ! file)
6405 return;
6407 fprintf (file, ";; %d loops found\n", num_loops);
6409 for (i = 0; i < num_loops; i++)
6411 struct loop *loop = &loops->array[i];
6413 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6414 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6415 loop->header->index, loop->latch->index,
6416 loop->pre_header ? loop->pre_header->index : -1,
6417 loop->depth, loop->level,
6418 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6419 fprintf (file, ";; %d", loop->num_nodes);
6420 flow_nodes_print (" nodes", loop->nodes, file);
6421 fprintf (file, ";; %d", loop->num_exits);
6422 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6424 if (loop->shared)
6426 int j;
6428 for (j = 0; j < i; j++)
6430 struct loop *oloop = &loops->array[j];
6432 if (loop->header == oloop->header)
6434 int disjoint;
6435 int smaller;
6437 smaller = loop->num_nodes < oloop->num_nodes;
6439 /* If the union of LOOP and OLOOP is different than
6440 the larger of LOOP and OLOOP then LOOP and OLOOP
6441 must be disjoint. */
6442 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6443 smaller ? oloop : loop);
6444 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6445 loop->header->index, i, j,
6446 disjoint ? "disjoint" : "nested");
6451 if (verbose)
6453 /* Print diagnostics to compare our concept of a loop with
6454 what the loop notes say. */
6455 if (GET_CODE (PREV_INSN (loop->header->head)) != NOTE
6456 || NOTE_LINE_NUMBER (PREV_INSN (loop->header->head))
6457 != NOTE_INSN_LOOP_BEG)
6458 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6459 INSN_UID (PREV_INSN (loop->header->head)));
6460 if (GET_CODE (NEXT_INSN (loop->latch->end)) != NOTE
6461 || NOTE_LINE_NUMBER (NEXT_INSN (loop->latch->end))
6462 != NOTE_INSN_LOOP_END)
6463 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6464 INSN_UID (NEXT_INSN (loop->latch->end)));
6468 if (verbose)
6469 flow_loops_cfg_dump (loops, file);
6473 /* Free all the memory allocated for LOOPS. */
6474 void
6475 flow_loops_free (loops)
6476 struct loops *loops;
6478 if (loops->array)
6480 int i;
6482 if (! loops->num)
6483 abort ();
6485 /* Free the loop descriptors. */
6486 for (i = 0; i < loops->num; i++)
6488 struct loop *loop = &loops->array[i];
6490 if (loop->nodes)
6491 sbitmap_free (loop->nodes);
6492 if (loop->exits)
6493 free (loop->exits);
6495 free (loops->array);
6496 loops->array = NULL;
6498 if (loops->cfg.dom)
6499 sbitmap_vector_free (loops->cfg.dom);
6500 if (loops->cfg.dfs_order)
6501 free (loops->cfg.dfs_order);
6503 sbitmap_free (loops->shared_headers);
6508 /* Find the exits from the loop using the bitmap of loop nodes NODES
6509 and store in EXITS array. Return the number of exits from the
6510 loop. */
6511 static int
6512 flow_loop_exits_find (nodes, exits)
6513 const sbitmap nodes;
6514 edge **exits;
6516 edge e;
6517 int node;
6518 int num_exits;
6520 *exits = NULL;
6522 /* Check all nodes within the loop to see if there are any
6523 successors not in the loop. Note that a node may have multiple
6524 exiting edges. */
6525 num_exits = 0;
6526 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6527 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6529 basic_block dest = e->dest;
6531 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6532 num_exits++;
6536 if (! num_exits)
6537 return 0;
6539 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6541 /* Store all exiting edges into an array. */
6542 num_exits = 0;
6543 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6544 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6546 basic_block dest = e->dest;
6548 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6549 (*exits)[num_exits++] = e;
6553 return num_exits;
6557 /* Find the nodes contained within the loop with header HEADER and
6558 latch LATCH and store in NODES. Return the number of nodes within
6559 the loop. */
6560 static int
6561 flow_loop_nodes_find (header, latch, nodes)
6562 basic_block header;
6563 basic_block latch;
6564 sbitmap nodes;
6566 basic_block *stack;
6567 int sp;
6568 int num_nodes = 0;
6570 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6571 sp = 0;
6573 /* Start with only the loop header in the set of loop nodes. */
6574 sbitmap_zero (nodes);
6575 SET_BIT (nodes, header->index);
6576 num_nodes++;
6578 /* Push the loop latch on to the stack. */
6579 if (! TEST_BIT (nodes, latch->index))
6581 SET_BIT (nodes, latch->index);
6582 num_nodes++;
6583 stack[sp++] = latch;
6586 while (sp)
6588 basic_block node;
6589 edge e;
6591 node = stack[--sp];
6592 for (e = node->pred; e; e = e->pred_next)
6594 basic_block ancestor = e->src;
6596 /* If each ancestor not marked as part of loop, add to set of
6597 loop nodes and push on to stack. */
6598 if (ancestor != ENTRY_BLOCK_PTR
6599 && ! TEST_BIT (nodes, ancestor->index))
6601 SET_BIT (nodes, ancestor->index);
6602 num_nodes++;
6603 stack[sp++] = ancestor;
6607 free (stack);
6608 return num_nodes;
6612 /* Compute the depth first search order and store in the array
6613 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6614 number of nodes visited. */
6615 static int
6616 flow_depth_first_order_compute (dfs_order)
6617 int *dfs_order;
6619 edge e;
6620 edge *stack;
6621 int sp;
6622 int dfsnum = 0;
6623 sbitmap visited;
6625 /* Allocate stack for back-tracking up CFG. */
6626 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6627 sp = 0;
6629 /* Allocate bitmap to track nodes that have been visited. */
6630 visited = sbitmap_alloc (n_basic_blocks);
6632 /* None of the nodes in the CFG have been visited yet. */
6633 sbitmap_zero (visited);
6635 /* Start with the first successor edge from the entry block. */
6636 e = ENTRY_BLOCK_PTR->succ;
6637 while (e)
6639 basic_block src = e->src;
6640 basic_block dest = e->dest;
6642 /* Mark that we have visited this node. */
6643 if (src != ENTRY_BLOCK_PTR)
6644 SET_BIT (visited, src->index);
6646 /* If this node has not been visited before, push the current
6647 edge on to the stack and proceed with the first successor
6648 edge of this node. */
6649 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6650 && dest->succ)
6652 stack[sp++] = e;
6653 e = dest->succ;
6655 else
6657 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6658 && ! dest->succ)
6660 /* DEST has no successors (for example, a non-returning
6661 function is called) so do not push the current edge
6662 but carry on with its next successor. */
6663 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6664 SET_BIT (visited, dest->index);
6667 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6669 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6671 /* Pop edge off stack. */
6672 e = stack[--sp];
6673 src = e->src;
6675 e = e->succ_next;
6678 free (stack);
6679 sbitmap_free (visited);
6681 /* The number of nodes visited should not be greater than
6682 n_basic_blocks. */
6683 if (dfsnum > n_basic_blocks)
6684 abort ();
6686 /* There are some nodes left in the CFG that are unreachable. */
6687 if (dfsnum < n_basic_blocks)
6688 abort ();
6689 return dfsnum;
6693 /* Return the block for the pre-header of the loop with header
6694 HEADER where DOM specifies the dominator information. Return NULL if
6695 there is no pre-header. */
6696 static basic_block
6697 flow_loop_pre_header_find (header, dom)
6698 basic_block header;
6699 const sbitmap *dom;
6701 basic_block pre_header;
6702 edge e;
6704 /* If block p is a predecessor of the header and is the only block
6705 that the header does not dominate, then it is the pre-header. */
6706 pre_header = NULL;
6707 for (e = header->pred; e; e = e->pred_next)
6709 basic_block node = e->src;
6711 if (node != ENTRY_BLOCK_PTR
6712 && ! TEST_BIT (dom[node->index], header->index))
6714 if (pre_header == NULL)
6715 pre_header = node;
6716 else
6718 /* There are multiple edges into the header from outside
6719 the loop so there is no pre-header block. */
6720 pre_header = NULL;
6721 break;
6725 return pre_header;
6729 /* Add LOOP to the loop hierarchy tree so that it is a sibling or a
6730 descendant of ROOT. */
6731 static void
6732 flow_loop_tree_node_add (root, loop)
6733 struct loop *root;
6734 struct loop *loop;
6736 struct loop *outer;
6738 if (! loop)
6739 return;
6741 for (outer = root; outer; outer = outer->next)
6743 if (flow_loop_nested_p (outer, loop))
6745 if (outer->inner)
6747 /* Add LOOP as a sibling or descendent of OUTER->INNER. */
6748 flow_loop_tree_node_add (outer->inner, loop);
6750 else
6752 /* Add LOOP as child of OUTER. */
6753 outer->inner = loop;
6754 loop->outer = outer;
6755 loop->next = NULL;
6757 return;
6760 /* Add LOOP as a sibling of ROOT. */
6761 loop->next = root->next;
6762 root->next = loop;
6763 loop->outer = root->outer;
6767 /* Build the loop hierarchy tree for LOOPS. */
6768 static void
6769 flow_loops_tree_build (loops)
6770 struct loops *loops;
6772 int i;
6773 int num_loops;
6775 num_loops = loops->num;
6776 if (! num_loops)
6777 return;
6779 /* Root the loop hierarchy tree with the first loop found.
6780 Since we used a depth first search this should be the
6781 outermost loop. */
6782 loops->tree = &loops->array[0];
6783 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6785 /* Add the remaining loops to the tree. */
6786 for (i = 1; i < num_loops; i++)
6787 flow_loop_tree_node_add (loops->tree, &loops->array[i]);
6791 /* Helper function to compute loop nesting depth and enclosed loop level
6792 for the natural loop specified by LOOP at the loop depth DEPTH.
6793 Returns the loop level. */
6794 static int
6795 flow_loop_level_compute (loop, depth)
6796 struct loop *loop;
6797 int depth;
6799 struct loop *inner;
6800 int level = 0;
6802 if (! loop)
6803 return 0;
6805 /* Traverse loop tree assigning depth and computing level as the
6806 maximum level of all the inner loops of this loop. The loop
6807 level is equivalent to the height of the loop in the loop tree
6808 and corresponds to the number of enclosed loop levels. */
6809 for (inner = loop->inner; inner; inner = inner->next)
6811 int ilevel;
6813 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6815 if (ilevel > level)
6816 level = ilevel;
6818 loop->level = level;
6819 loop->depth = depth;
6820 return level;
6824 /* Compute the loop nesting depth and enclosed loop level for the loop
6825 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6826 level. */
6827 static int
6828 flow_loops_level_compute (loops)
6829 struct loops *loops;
6831 return flow_loop_level_compute (loops->tree, 0);
6835 /* Find all the natural loops in the function and save in LOOPS structure.
6836 Return the number of natural loops found. */
6837 int
6838 flow_loops_find (loops)
6839 struct loops *loops;
6841 int i;
6842 int b;
6843 int num_loops;
6844 edge e;
6845 sbitmap headers;
6846 sbitmap *dom;
6847 int *dfs_order;
6849 loops->num = 0;
6850 loops->array = NULL;
6851 loops->tree = NULL;
6852 dfs_order = NULL;
6854 /* Taking care of this degenerate case makes the rest of
6855 this code simpler. */
6856 if (n_basic_blocks == 0)
6857 return 0;
6859 /* Compute the dominators. */
6860 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6861 compute_flow_dominators (dom, NULL);
6863 /* Count the number of loop edges (back edges). This should be the
6864 same as the number of natural loops. */
6865 num_loops = 0;
6866 for (b = 0; b < n_basic_blocks; b++)
6868 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6870 basic_block latch = e->src;
6872 /* Look for back edges where a predecessor is dominated
6873 by this block. A natural loop has a single entry
6874 node (header) that dominates all the nodes in the
6875 loop. It also has single back edge to the header
6876 from a latch node. Note that multiple natural loops
6877 may share the same header. */
6878 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6879 num_loops++;
6883 if (num_loops)
6885 /* Compute depth first search order of the CFG so that outer
6886 natural loops will be found before inner natural loops. */
6887 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6888 flow_depth_first_order_compute (dfs_order);
6890 /* Allocate loop structures. */
6891 loops->array = (struct loop *)
6892 xcalloc (num_loops, sizeof (struct loop));
6894 headers = sbitmap_alloc (n_basic_blocks);
6895 sbitmap_zero (headers);
6897 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6898 sbitmap_zero (loops->shared_headers);
6900 /* Find and record information about all the natural loops
6901 in the CFG. */
6902 num_loops = 0;
6903 for (b = 0; b < n_basic_blocks; b++)
6905 basic_block header;
6907 /* Search the nodes of the CFG in DFS order that we can find
6908 outer loops first. */
6909 header = BASIC_BLOCK (dfs_order[b]);
6911 /* Look for all the possible latch blocks for this header. */
6912 for (e = header->pred; e; e = e->pred_next)
6914 basic_block latch = e->src;
6916 /* Look for back edges where a predecessor is dominated
6917 by this block. A natural loop has a single entry
6918 node (header) that dominates all the nodes in the
6919 loop. It also has single back edge to the header
6920 from a latch node. Note that multiple natural loops
6921 may share the same header. */
6922 if (latch != ENTRY_BLOCK_PTR
6923 && TEST_BIT (dom[latch->index], header->index))
6925 struct loop *loop;
6927 loop = loops->array + num_loops;
6929 loop->header = header;
6930 loop->latch = latch;
6932 /* Keep track of blocks that are loop headers so
6933 that we can tell which loops should be merged. */
6934 if (TEST_BIT (headers, header->index))
6935 SET_BIT (loops->shared_headers, header->index);
6936 SET_BIT (headers, header->index);
6938 /* Find nodes contained within the loop. */
6939 loop->nodes = sbitmap_alloc (n_basic_blocks);
6940 loop->num_nodes =
6941 flow_loop_nodes_find (header, latch, loop->nodes);
6943 /* Find edges which exit the loop. Note that a node
6944 may have several exit edges. */
6945 loop->num_exits
6946 = flow_loop_exits_find (loop->nodes, &loop->exits);
6948 /* Look to see if the loop has a pre-header node. */
6949 loop->pre_header
6950 = flow_loop_pre_header_find (header, dom);
6952 num_loops++;
6957 /* Natural loops with shared headers may either be disjoint or
6958 nested. Disjoint loops with shared headers cannot be inner
6959 loops and should be merged. For now just mark loops that share
6960 headers. */
6961 for (i = 0; i < num_loops; i++)
6962 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
6963 loops->array[i].shared = 1;
6965 sbitmap_free (headers);
6968 loops->num = num_loops;
6970 /* Save CFG derived information to avoid recomputing it. */
6971 loops->cfg.dom = dom;
6972 loops->cfg.dfs_order = dfs_order;
6974 /* Build the loop hierarchy tree. */
6975 flow_loops_tree_build (loops);
6977 /* Assign the loop nesting depth and enclosed loop level for each
6978 loop. */
6979 flow_loops_level_compute (loops);
6981 return num_loops;
6985 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
6987 flow_loop_outside_edge_p (loop, e)
6988 const struct loop *loop;
6989 edge e;
6991 if (e->dest != loop->header)
6992 abort ();
6993 return (e->src == ENTRY_BLOCK_PTR)
6994 || ! TEST_BIT (loop->nodes, e->src->index);