Daily bump.
[official-gcc.git] / gcc / flow.c
blobedcee2919ad7cadf7685784be1c2a1d72f9e65f1
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "rtl.h"
124 #include "basic-block.h"
125 #include "insn-config.h"
126 #include "regs.h"
127 #include "hard-reg-set.h"
128 #include "flags.h"
129 #include "output.h"
130 #include "except.h"
131 #include "toplev.h"
132 #include "recog.h"
133 #include "insn-flags.h"
135 #include "obstack.h"
136 #define obstack_chunk_alloc xmalloc
137 #define obstack_chunk_free free
140 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
141 the stack pointer does not matter. The value is tested only in
142 functions that have frame pointers.
143 No definition is equivalent to always zero. */
144 #ifndef EXIT_IGNORE_STACK
145 #define EXIT_IGNORE_STACK 0
146 #endif
149 /* The contents of the current function definition are allocated
150 in this obstack, and all are freed at the end of the function.
151 For top-level functions, this is temporary_obstack.
152 Separate obstacks are made for nested functions. */
154 extern struct obstack *function_obstack;
156 /* List of labels that must never be deleted. */
157 extern rtx forced_labels;
159 /* Number of basic blocks in the current function. */
161 int n_basic_blocks;
163 /* The basic block array. */
165 varray_type basic_block_info;
167 /* The special entry and exit blocks. */
169 struct basic_block_def entry_exit_blocks[2] =
172 NULL, /* head */
173 NULL, /* end */
174 NULL, /* pred */
175 NULL, /* succ */
176 NULL, /* local_set */
177 NULL, /* global_live_at_start */
178 NULL, /* global_live_at_end */
179 NULL, /* aux */
180 ENTRY_BLOCK, /* index */
181 0 /* loop_depth */
184 NULL, /* head */
185 NULL, /* end */
186 NULL, /* pred */
187 NULL, /* succ */
188 NULL, /* local_set */
189 NULL, /* global_live_at_start */
190 NULL, /* global_live_at_end */
191 NULL, /* aux */
192 EXIT_BLOCK, /* index */
193 0 /* loop_depth */
197 /* Nonzero if the second flow pass has completed. */
198 int flow2_completed;
200 /* Maximum register number used in this function, plus one. */
202 int max_regno;
204 /* Indexed by n, giving various register information */
206 varray_type reg_n_info;
208 /* Size of the reg_n_info table. */
210 unsigned int reg_n_max;
212 /* Element N is the next insn that uses (hard or pseudo) register number N
213 within the current basic block; or zero, if there is no such insn.
214 This is valid only during the final backward scan in propagate_block. */
216 static rtx *reg_next_use;
218 /* Size of a regset for the current function,
219 in (1) bytes and (2) elements. */
221 int regset_bytes;
222 int regset_size;
224 /* Regset of regs live when calls to `setjmp'-like functions happen. */
225 /* ??? Does this exist only for the setjmp-clobbered warning message? */
227 regset regs_live_at_setjmp;
229 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
230 that have to go in the same hard reg.
231 The first two regs in the list are a pair, and the next two
232 are another pair, etc. */
233 rtx regs_may_share;
235 /* Depth within loops of basic block being scanned for lifetime analysis,
236 plus one. This is the weight attached to references to registers. */
238 static int loop_depth;
240 /* During propagate_block, this is non-zero if the value of CC0 is live. */
242 static int cc0_live;
244 /* During propagate_block, this contains a list of all the MEMs we are
245 tracking for dead store elimination.
247 ?!? Note we leak memory by not free-ing items on this list. We need to
248 write some generic routines to operate on memory lists since cse, gcse,
249 loop, sched, flow and possibly other passes all need to do basically the
250 same operations on these lists. */
252 static rtx mem_set_list;
254 /* Set of registers that may be eliminable. These are handled specially
255 in updating regs_ever_live. */
257 static HARD_REG_SET elim_reg_set;
259 /* The basic block structure for every insn, indexed by uid. */
261 varray_type basic_block_for_insn;
263 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
264 /* ??? Should probably be using LABEL_NUSES instead. It would take a
265 bit of surgery to be able to use or co-opt the routines in jump. */
267 static rtx label_value_list;
269 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
271 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
272 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
273 static bitmap uid_volatile;
275 /* Forward declarations */
276 static int count_basic_blocks PROTO((rtx));
277 static rtx find_basic_blocks_1 PROTO((rtx, rtx*));
278 static void create_basic_block PROTO((int, rtx, rtx, rtx));
279 static void compute_bb_for_insn PROTO((varray_type, int));
280 static void clear_edges PROTO((void));
281 static void make_edges PROTO((rtx, rtx*));
282 static void make_edge PROTO((basic_block, basic_block, int));
283 static void make_label_edge PROTO((basic_block, rtx, int));
284 static void mark_critical_edges PROTO((void));
286 static void commit_one_edge_insertion PROTO((edge));
288 static void delete_unreachable_blocks PROTO((void));
289 static void delete_eh_regions PROTO((void));
290 static int can_delete_note_p PROTO((rtx));
291 static void delete_insn_chain PROTO((rtx, rtx));
292 static int delete_block PROTO((basic_block));
293 static void expunge_block PROTO((basic_block));
294 static rtx flow_delete_insn PROTO((rtx));
295 static int can_delete_label_p PROTO((rtx));
296 static void merge_blocks_nomove PROTO((basic_block, basic_block));
297 static int merge_blocks PROTO((edge,basic_block,basic_block));
298 static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block));
299 static void calculate_loop_depth PROTO((rtx));
301 static int set_noop_p PROTO((rtx));
302 static int noop_move_p PROTO((rtx));
303 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
304 static void record_volatile_insns PROTO((rtx));
305 static void mark_regs_live_at_end PROTO((regset));
306 static void life_analysis_1 PROTO((rtx, int, int));
307 static void init_regset_vector PROTO ((regset *, int,
308 struct obstack *));
309 static void propagate_block PROTO((regset, rtx, rtx, int,
310 regset, int, int));
311 static int insn_dead_p PROTO((rtx, regset, int, rtx));
312 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
313 static void mark_set_regs PROTO((regset, regset, rtx,
314 rtx, regset));
315 static void mark_set_1 PROTO((regset, regset, rtx,
316 rtx, regset));
317 #ifdef AUTO_INC_DEC
318 static void find_auto_inc PROTO((regset, rtx, rtx));
319 static int try_pre_increment_1 PROTO((rtx));
320 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
321 #endif
322 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
323 void dump_flow_info PROTO((FILE *));
324 static void dump_edge_info PROTO((FILE *, edge, int));
326 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
327 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
328 int_list **, int));
330 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
331 int_list_ptr *, int *, int *));
333 static void count_reg_sets_1 PROTO ((rtx));
334 static void count_reg_sets PROTO ((rtx));
335 static void count_reg_references PROTO ((rtx));
336 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
337 static void invalidate_mems_from_autoinc PROTO ((rtx));
338 void verify_flow_info PROTO ((void));
340 /* Find basic blocks of the current function.
341 F is the first insn of the function and NREGS the number of register
342 numbers in use. */
344 void
345 find_basic_blocks (f, nregs, file, do_cleanup)
346 rtx f;
347 int nregs ATTRIBUTE_UNUSED;
348 FILE *file ATTRIBUTE_UNUSED;
349 int do_cleanup;
351 rtx *bb_eh_end;
352 int max_uid;
354 /* Flush out existing data. */
355 if (basic_block_info != NULL)
357 int i;
359 clear_edges ();
361 /* Clear bb->aux on all extant basic blocks. We'll use this as a
362 tag for reuse during create_basic_block, just in case some pass
363 copies around basic block notes improperly. */
364 for (i = 0; i < n_basic_blocks; ++i)
365 BASIC_BLOCK (i)->aux = NULL;
367 VARRAY_FREE (basic_block_info);
370 n_basic_blocks = count_basic_blocks (f);
372 /* Size the basic block table. The actual structures will be allocated
373 by find_basic_blocks_1, since we want to keep the structure pointers
374 stable across calls to find_basic_blocks. */
375 /* ??? This whole issue would be much simpler if we called find_basic_blocks
376 exactly once, and thereafter we don't have a single long chain of
377 instructions at all until close to the end of compilation when we
378 actually lay them out. */
380 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
382 /* An array to record the active exception region at the end of each
383 basic block. It is filled in by find_basic_blocks_1 for make_edges. */
384 bb_eh_end = (rtx *) alloca (n_basic_blocks * sizeof (rtx));
386 label_value_list = find_basic_blocks_1 (f, bb_eh_end);
388 /* Record the block to which an insn belongs. */
389 /* ??? This should be done another way, by which (perhaps) a label is
390 tagged directly with the basic block that it starts. It is used for
391 more than that currently, but IMO that is the only valid use. */
393 max_uid = get_max_uid ();
394 #ifdef AUTO_INC_DEC
395 /* Leave space for insns life_analysis makes in some cases for auto-inc.
396 These cases are rare, so we don't need too much space. */
397 max_uid += max_uid / 10;
398 #endif
400 VARRAY_BB_INIT (basic_block_for_insn, max_uid, "basic_block_for_insn");
401 compute_bb_for_insn (basic_block_for_insn, max_uid);
403 /* Discover the edges of our cfg. */
405 make_edges (label_value_list, bb_eh_end);
407 /* Delete unreachable blocks. */
409 if (do_cleanup)
410 delete_unreachable_blocks ();
412 /* Mark critical edges. */
414 mark_critical_edges ();
416 /* Discover the loop depth at the start of each basic block to aid
417 register allocation. */
418 calculate_loop_depth (f);
420 /* Kill the data we won't maintain. */
421 label_value_list = 0;
423 #ifdef ENABLE_CHECKING
424 verify_flow_info ();
425 #endif
428 /* Count the basic blocks of the function. */
430 static int
431 count_basic_blocks (f)
432 rtx f;
434 register rtx insn;
435 register RTX_CODE prev_code;
436 register int count = 0;
437 int eh_region = 0;
438 int call_had_abnormal_edge = 0;
439 rtx prev_call = NULL_RTX;
441 prev_code = JUMP_INSN;
442 for (insn = f; insn; insn = NEXT_INSN (insn))
444 register RTX_CODE code = GET_CODE (insn);
446 if (code == CODE_LABEL
447 || (GET_RTX_CLASS (code) == 'i'
448 && (prev_code == JUMP_INSN
449 || prev_code == BARRIER
450 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
452 count++;
454 /* If the previous insn was a call that did not create an
455 abnormal edge, we want to add a nop so that the CALL_INSN
456 itself is not at basic_block_end. This allows us to
457 easily distinguish between normal calls and those which
458 create abnormal edges in the flow graph. */
460 if (count > 0 && prev_call != 0 && !call_had_abnormal_edge)
462 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
463 emit_insn_after (nop, prev_call);
467 /* Record whether this call created an edge. */
468 if (code == CALL_INSN)
470 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
471 int region = (note ? XINT (XEXP (note, 0), 0) : 1);
472 prev_call = insn;
473 call_had_abnormal_edge = 0;
475 /* If there is a specified EH region, we have an edge. */
476 if (eh_region && region > 0)
477 call_had_abnormal_edge = 1;
478 else
480 /* If there is a nonlocal goto label and the specified
481 region number isn't -1, we have an edge. (0 means
482 no throw, but might have a nonlocal goto). */
483 if (nonlocal_goto_handler_labels && region >= 0)
484 call_had_abnormal_edge = 1;
487 else if (code != NOTE)
488 prev_call = NULL_RTX;
490 if (code != NOTE)
491 prev_code = code;
492 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
493 ++eh_region;
494 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
495 --eh_region;
499 /* The rest of the compiler works a bit smoother when we don't have to
500 check for the edge case of do-nothing functions with no basic blocks. */
501 if (count == 0)
503 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
504 count = 1;
507 return count;
510 /* Find all basic blocks of the function whose first insn is F.
511 Store the correct data in the tables that describe the basic blocks,
512 set up the chains of references for each CODE_LABEL, and
513 delete any entire basic blocks that cannot be reached.
515 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
516 that are otherwise unreachable may be reachable with a non-local goto.
518 BB_EH_END is an array in which we record the list of exception regions
519 active at the end of every basic block. */
521 static rtx
522 find_basic_blocks_1 (f, bb_eh_end)
523 rtx f;
524 rtx *bb_eh_end;
526 register rtx insn, next;
527 int call_has_abnormal_edge = 0;
528 int i = 0;
529 rtx bb_note = NULL_RTX;
530 rtx eh_list = NULL_RTX;
531 rtx label_value_list = NULL_RTX;
532 rtx head = NULL_RTX;
533 rtx end = NULL_RTX;
535 /* We process the instructions in a slightly different way than we did
536 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
537 closed out the previous block, so that it gets attached at the proper
538 place. Since this form should be equivalent to the previous,
539 find_basic_blocks_0 continues to use the old form as a check. */
541 for (insn = f; insn; insn = next)
543 enum rtx_code code = GET_CODE (insn);
545 next = NEXT_INSN (insn);
547 if (code == CALL_INSN)
549 /* Record whether this call created an edge. */
550 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
551 int region = (note ? XINT (XEXP (note, 0), 0) : 1);
552 call_has_abnormal_edge = 0;
554 /* If there is an EH region, we have an edge. */
555 if (eh_list && region > 0)
556 call_has_abnormal_edge = 1;
557 else
559 /* If there is a nonlocal goto label and the specified
560 region number isn't -1, we have an edge. (0 means
561 no throw, but might have a nonlocal goto). */
562 if (nonlocal_goto_handler_labels && region >= 0)
563 call_has_abnormal_edge = 1;
567 switch (code)
569 case NOTE:
571 int kind = NOTE_LINE_NUMBER (insn);
573 /* Keep a LIFO list of the currently active exception notes. */
574 if (kind == NOTE_INSN_EH_REGION_BEG)
575 eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
576 else if (kind == NOTE_INSN_EH_REGION_END)
577 eh_list = XEXP (eh_list, 1);
579 /* Look for basic block notes with which to keep the
580 basic_block_info pointers stable. Unthread the note now;
581 we'll put it back at the right place in create_basic_block.
582 Or not at all if we've already found a note in this block. */
583 else if (kind == NOTE_INSN_BASIC_BLOCK)
585 if (bb_note == NULL_RTX)
586 bb_note = insn;
587 next = flow_delete_insn (insn);
590 break;
593 case CODE_LABEL:
594 /* A basic block starts at a label. If we've closed one off due
595 to a barrier or some such, no need to do it again. */
596 if (head != NULL_RTX)
598 /* While we now have edge lists with which other portions of
599 the compiler might determine a call ending a basic block
600 does not imply an abnormal edge, it will be a bit before
601 everything can be updated. So continue to emit a noop at
602 the end of such a block. */
603 if (GET_CODE (end) == CALL_INSN)
605 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
606 end = emit_insn_after (nop, end);
609 bb_eh_end[i] = eh_list;
610 create_basic_block (i++, head, end, bb_note);
611 bb_note = NULL_RTX;
613 head = end = insn;
614 break;
616 case JUMP_INSN:
617 /* A basic block ends at a jump. */
618 if (head == NULL_RTX)
619 head = insn;
620 else
622 /* ??? Make a special check for table jumps. The way this
623 happens is truely and amazingly gross. We are about to
624 create a basic block that contains just a code label and
625 an addr*vec jump insn. Worse, an addr_diff_vec creates
626 its own natural loop.
628 Prevent this bit of brain damage, pasting things together
629 correctly in make_edges.
631 The correct solution involves emitting the table directly
632 on the tablejump instruction as a note, or JUMP_LABEL. */
634 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
635 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
637 head = end = NULL;
638 n_basic_blocks--;
639 break;
642 end = insn;
643 goto new_bb_inclusive;
645 case BARRIER:
646 /* A basic block ends at a barrier. It may be that an unconditional
647 jump already closed the basic block -- no need to do it again. */
648 if (head == NULL_RTX)
649 break;
651 /* While we now have edge lists with which other portions of the
652 compiler might determine a call ending a basic block does not
653 imply an abnormal edge, it will be a bit before everything can
654 be updated. So continue to emit a noop at the end of such a
655 block. */
656 if (GET_CODE (end) == CALL_INSN)
658 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
659 end = emit_insn_after (nop, end);
661 goto new_bb_exclusive;
663 case CALL_INSN:
664 /* A basic block ends at a call that can either throw or
665 do a non-local goto. */
666 if (call_has_abnormal_edge)
668 new_bb_inclusive:
669 if (head == NULL_RTX)
670 head = insn;
671 end = insn;
673 new_bb_exclusive:
674 bb_eh_end[i] = eh_list;
675 create_basic_block (i++, head, end, bb_note);
676 head = end = NULL_RTX;
677 bb_note = NULL_RTX;
678 break;
680 /* FALLTHRU */
682 default:
683 if (GET_RTX_CLASS (code) == 'i')
685 if (head == NULL_RTX)
686 head = insn;
687 end = insn;
689 break;
692 if (GET_RTX_CLASS (code) == 'i')
694 rtx note;
696 /* Make a list of all labels referred to other than by jumps
697 (which just don't have the REG_LABEL notes).
699 Make a special exception for labels followed by an ADDR*VEC,
700 as this would be a part of the tablejump setup code.
702 Make a special exception for the eh_return_stub_label, which
703 we know isn't part of any otherwise visible control flow. */
705 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
706 if (REG_NOTE_KIND (note) == REG_LABEL)
708 rtx lab = XEXP (note, 0), next;
710 if (lab == eh_return_stub_label)
712 else if ((next = next_nonnote_insn (lab)) != NULL
713 && GET_CODE (next) == JUMP_INSN
714 && (GET_CODE (PATTERN (next)) == ADDR_VEC
715 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
717 else
718 label_value_list
719 = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
720 label_value_list);
725 if (head != NULL_RTX)
727 bb_eh_end[i] = eh_list;
728 create_basic_block (i++, head, end, bb_note);
731 if (i != n_basic_blocks)
732 abort ();
734 return label_value_list;
737 /* Create a new basic block consisting of the instructions between
738 HEAD and END inclusive. Reuses the note and basic block struct
739 in BB_NOTE, if any. */
741 static void
742 create_basic_block (index, head, end, bb_note)
743 int index;
744 rtx head, end, bb_note;
746 basic_block bb;
748 if (bb_note
749 && ! RTX_INTEGRATED_P (bb_note)
750 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
751 && bb->aux == NULL)
753 /* If we found an existing note, thread it back onto the chain. */
755 if (GET_CODE (head) == CODE_LABEL)
756 add_insn_after (bb_note, head);
757 else
759 add_insn_before (bb_note, head);
760 head = bb_note;
763 else
765 /* Otherwise we must create a note and a basic block structure.
766 Since we allow basic block structs in rtl, give the struct
767 the same lifetime by allocating it off the function obstack
768 rather than using malloc. */
770 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
771 memset (bb, 0, sizeof (*bb));
773 if (GET_CODE (head) == CODE_LABEL)
774 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
775 else
777 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
778 head = bb_note;
780 NOTE_BASIC_BLOCK (bb_note) = bb;
783 /* Always include the bb note in the block. */
784 if (NEXT_INSN (end) == bb_note)
785 end = bb_note;
787 bb->head = head;
788 bb->end = end;
789 bb->index = index;
790 BASIC_BLOCK (index) = bb;
792 /* Tag the block so that we know it has been used when considering
793 other basic block notes. */
794 bb->aux = bb;
797 /* Records the basic block struct in BB_FOR_INSN, for every instruction
798 indexed by INSN_UID. MAX is the size of the array. */
800 static void
801 compute_bb_for_insn (bb_for_insn, max)
802 varray_type bb_for_insn;
803 int max;
805 int i;
807 for (i = 0; i < n_basic_blocks; ++i)
809 basic_block bb = BASIC_BLOCK (i);
810 rtx insn, end;
812 end = bb->end;
813 insn = bb->head;
814 while (1)
816 int uid = INSN_UID (insn);
817 if (uid < max)
818 VARRAY_BB (bb_for_insn, uid) = bb;
819 if (insn == end)
820 break;
821 insn = NEXT_INSN (insn);
826 /* Free the memory associated with the edge structures. */
828 static void
829 clear_edges ()
831 int i;
832 edge n, e;
834 for (i = 0; i < n_basic_blocks; ++i)
836 basic_block bb = BASIC_BLOCK (i);
838 for (e = bb->succ; e ; e = n)
840 n = e->succ_next;
841 free (e);
844 bb->succ = 0;
845 bb->pred = 0;
848 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
850 n = e->succ_next;
851 free (e);
854 ENTRY_BLOCK_PTR->succ = 0;
855 EXIT_BLOCK_PTR->pred = 0;
858 /* Identify the edges between basic blocks.
860 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
861 that are otherwise unreachable may be reachable with a non-local goto.
863 BB_EH_END is an array indexed by basic block number in which we record
864 the list of exception regions active at the end of the basic block. */
866 static void
867 make_edges (label_value_list, bb_eh_end)
868 rtx label_value_list;
869 rtx *bb_eh_end;
871 int i;
873 /* Assume no computed jump; revise as we create edges. */
874 current_function_has_computed_jump = 0;
876 /* By nature of the way these get numbered, block 0 is always the entry. */
877 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
879 for (i = 0; i < n_basic_blocks; ++i)
881 basic_block bb = BASIC_BLOCK (i);
882 rtx insn, x, eh_list;
883 enum rtx_code code;
884 int force_fallthru = 0;
886 /* If we have asynchronous exceptions, scan the notes for all exception
887 regions active in the block. In the normal case, we only need the
888 one active at the end of the block, which is bb_eh_end[i]. */
890 eh_list = bb_eh_end[i];
891 if (asynchronous_exceptions)
893 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
895 if (GET_CODE (insn) == NOTE
896 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
897 eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
901 /* Now examine the last instruction of the block, and discover the
902 ways we can leave the block. */
904 insn = bb->end;
905 code = GET_CODE (insn);
907 /* A branch. */
908 if (code == JUMP_INSN)
910 rtx tmp;
912 /* ??? Recognize a tablejump and do the right thing. */
913 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
914 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
915 && GET_CODE (tmp) == JUMP_INSN
916 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
917 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
919 rtvec vec;
920 int j;
922 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
923 vec = XVEC (PATTERN (tmp), 0);
924 else
925 vec = XVEC (PATTERN (tmp), 1);
927 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
928 make_label_edge (bb, XEXP (RTVEC_ELT (vec, j), 0), 0);
930 /* Some targets (eg, ARM) emit a conditional jump that also
931 contains the out-of-range target. Scan for these and
932 add an edge if necessary. */
933 if ((tmp = single_set (insn)) != NULL
934 && SET_DEST (tmp) == pc_rtx
935 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
936 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
937 make_label_edge (bb, XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
939 #ifdef CASE_DROPS_THROUGH
940 /* Silly VAXen. The ADDR_VEC is going to be in the way of
941 us naturally detecting fallthru into the next block. */
942 force_fallthru = 1;
943 #endif
946 /* If this is a computed jump, then mark it as reaching
947 everything on the label_value_list and forced_labels list. */
948 else if (computed_jump_p (insn))
950 current_function_has_computed_jump = 1;
952 for (x = label_value_list; x; x = XEXP (x, 1))
953 make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
955 for (x = forced_labels; x; x = XEXP (x, 1))
956 make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
959 /* Returns create an exit out. */
960 else if (returnjump_p (insn))
961 make_edge (bb, EXIT_BLOCK_PTR, 0);
963 /* Otherwise, we have a plain conditional or unconditional jump. */
964 else
966 if (! JUMP_LABEL (insn))
967 abort ();
968 make_label_edge (bb, JUMP_LABEL (insn), 0);
972 /* If this is a CALL_INSN, then mark it as reaching the active EH
973 handler for this CALL_INSN. If we're handling asynchronous
974 exceptions then any insn can reach any of the active handlers.
976 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
978 if (code == CALL_INSN || asynchronous_exceptions)
980 int is_call = (code == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
981 handler_info *ptr;
983 /* Use REG_EH_RETHROW and REG_EH_REGION if available. */
984 /* ??? REG_EH_REGION is not generated presently. Is it
985 inteded that there be multiple notes for the regions?
986 or is my eh_list collection redundant with handler linking? */
988 x = find_reg_note (insn, REG_EH_RETHROW, 0);
989 if (!x)
990 x = find_reg_note (insn, REG_EH_REGION, 0);
991 if (x)
993 if (XINT (XEXP (x, 0), 0) > 0)
995 ptr = get_first_handler (XINT (XEXP (x, 0), 0));
996 while (ptr)
998 make_label_edge (bb, ptr->handler_label,
999 EDGE_ABNORMAL | EDGE_EH | is_call);
1000 ptr = ptr->next;
1004 else
1006 for (x = eh_list; x; x = XEXP (x, 1))
1008 ptr = get_first_handler (NOTE_BLOCK_NUMBER (XEXP (x, 0)));
1009 while (ptr)
1011 make_label_edge (bb, ptr->handler_label,
1012 EDGE_ABNORMAL | EDGE_EH | is_call);
1013 ptr = ptr->next;
1018 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1020 /* ??? This could be made smarter: in some cases it's possible
1021 to tell that certain calls will not do a nonlocal goto.
1023 For example, if the nested functions that do the nonlocal
1024 gotos do not have their addresses taken, then only calls to
1025 those functions or to other nested functions that use them
1026 could possibly do nonlocal gotos. */
1028 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1029 make_label_edge (bb, XEXP (x, 0),
1030 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1034 /* We know something about the structure of the function __throw in
1035 libgcc2.c. It is the only function that ever contains eh_stub
1036 labels. It modifies its return address so that the last block
1037 returns to one of the eh_stub labels within it. So we have to
1038 make additional edges in the flow graph. */
1039 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1040 make_label_edge (bb, eh_return_stub_label, EDGE_EH);
1042 /* Find out if we can drop through to the next block. */
1043 insn = next_nonnote_insn (insn);
1044 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1045 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1046 else if (i + 1 < n_basic_blocks)
1048 rtx tmp = BLOCK_HEAD (i + 1);
1049 if (GET_CODE (tmp) == NOTE)
1050 tmp = next_nonnote_insn (tmp);
1051 if (force_fallthru || insn == tmp)
1052 make_edge (bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1057 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1058 about the edge that is accumulated between calls. */
1060 static void
1061 make_edge (src, dst, flags)
1062 basic_block src, dst;
1063 int flags;
1065 edge e;
1067 /* Make sure we don't add duplicate edges. */
1069 for (e = src->succ; e ; e = e->succ_next)
1070 if (e->dest == dst)
1072 e->flags |= flags;
1073 return;
1076 e = (edge) xcalloc (1, sizeof (*e));
1078 e->succ_next = src->succ;
1079 e->pred_next = dst->pred;
1080 e->src = src;
1081 e->dest = dst;
1082 e->flags = flags;
1084 src->succ = e;
1085 dst->pred = e;
1088 /* Create an edge from a basic block to a label. */
1090 static void
1091 make_label_edge (src, label, flags)
1092 basic_block src;
1093 rtx label;
1094 int flags;
1096 if (GET_CODE (label) != CODE_LABEL)
1097 abort ();
1099 /* If the label was never emitted, this insn is junk, but avoid a
1100 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1101 as a result of a syntax error and a diagnostic has already been
1102 printed. */
1104 if (INSN_UID (label) == 0)
1105 return;
1107 make_edge (src, BLOCK_FOR_INSN (label), flags);
1110 /* Identify critical edges and set the bits appropriately. */
1111 static void
1112 mark_critical_edges ()
1114 int i, n = n_basic_blocks;
1115 basic_block bb;
1117 /* We begin with the entry block. This is not terribly important now,
1118 but could be if a front end (Fortran) implemented alternate entry
1119 points. */
1120 bb = ENTRY_BLOCK_PTR;
1121 i = -1;
1123 while (1)
1125 edge e;
1127 /* (1) Critical edges must have a source with multiple successors. */
1128 if (bb->succ && bb->succ->succ_next)
1130 for (e = bb->succ; e ; e = e->succ_next)
1132 /* (2) Critical edges must have a destination with multiple
1133 predecessors. Note that we know there is at least one
1134 predecessor -- the edge we followed to get here. */
1135 if (e->dest->pred->pred_next)
1136 e->flags |= EDGE_CRITICAL;
1137 else
1138 e->flags &= ~EDGE_CRITICAL;
1141 else
1143 for (e = bb->succ; e ; e = e->succ_next)
1144 e->flags &= ~EDGE_CRITICAL;
1147 if (++i >= n)
1148 break;
1149 bb = BASIC_BLOCK (i);
1153 /* Split a (typically critical) edge. Return the new block.
1154 Abort on abnormal edges.
1156 ??? The code generally expects to be called on critical edges.
1157 The case of a block ending in an unconditional jump to a
1158 block with multiple predecessors is not handled optimally. */
1160 basic_block
1161 split_edge (edge_in)
1162 edge edge_in;
1164 basic_block old_pred, bb, old_succ;
1165 edge edge_out;
1166 rtx bb_note;
1167 int i;
1169 /* Abnormal edges cannot be split. */
1170 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1171 abort ();
1173 old_pred = edge_in->src;
1174 old_succ = edge_in->dest;
1176 /* Remove the existing edge from the destination's pred list. */
1178 edge *pp;
1179 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1180 continue;
1181 *pp = edge_in->pred_next;
1182 edge_in->pred_next = NULL;
1185 /* Create the new structures. */
1186 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1187 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1189 memset (bb, 0, sizeof (*bb));
1190 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
1191 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1192 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1194 /* ??? This info is likely going to be out of date very soon. */
1195 CLEAR_REG_SET (bb->local_set);
1196 if (old_succ->global_live_at_start)
1198 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1199 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1201 else
1203 CLEAR_REG_SET (bb->global_live_at_start);
1204 CLEAR_REG_SET (bb->global_live_at_end);
1207 /* Wire them up. */
1208 bb->pred = edge_in;
1209 bb->succ = edge_out;
1211 edge_in->dest = bb;
1212 edge_in->flags &= ~EDGE_CRITICAL;
1214 edge_out->pred_next = old_succ->pred;
1215 edge_out->succ_next = NULL;
1216 edge_out->src = bb;
1217 edge_out->dest = old_succ;
1218 edge_out->flags = EDGE_FALLTHRU;
1219 edge_out->probability = REG_BR_PROB_BASE;
1221 old_succ->pred = edge_out;
1223 /* Tricky case -- if there existed a fallthru into the successor
1224 (and we're not it) we must add a new unconditional jump around
1225 the new block we're actually interested in.
1227 Further, if that edge is critical, this means a second new basic
1228 block must be created to hold it. In order to simplify correct
1229 insn placement, do this before we touch the existing basic block
1230 ordering for the block we were really wanting. */
1231 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1233 edge e;
1234 for (e = edge_out->pred_next; e ; e = e->pred_next)
1235 if (e->flags & EDGE_FALLTHRU)
1236 break;
1238 if (e)
1240 basic_block jump_block;
1241 rtx pos;
1243 if ((e->flags & EDGE_CRITICAL) == 0)
1245 /* Non critical -- we can simply add a jump to the end
1246 of the existing predecessor. */
1247 jump_block = e->src;
1249 else
1251 /* We need a new block to hold the jump. The simplest
1252 way to do the bulk of the work here is to recursively
1253 call ourselves. */
1254 jump_block = split_edge (e);
1255 e = jump_block->succ;
1258 /* Now add the jump insn ... */
1259 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1260 jump_block->end);
1261 jump_block->end = pos;
1262 emit_barrier_after (pos);
1264 /* ... let jump know that label is in use, ... */
1265 ++LABEL_NUSES (old_succ->head);
1267 /* ... and clear fallthru on the outgoing edge. */
1268 e->flags &= ~EDGE_FALLTHRU;
1270 /* Continue splitting the interesting edge. */
1274 /* Place the new block just in front of the successor. */
1275 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1276 for (i = n_basic_blocks - 1; i > old_succ->index; --i)
1278 basic_block tmp = BASIC_BLOCK (i - 1);
1279 BASIC_BLOCK (i) = tmp;
1280 tmp->index = i;
1282 BASIC_BLOCK (i) = bb;
1283 bb->index = i;
1285 /* Create the basic block note. */
1286 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1287 NOTE_BASIC_BLOCK (bb_note) = bb;
1288 bb->head = bb->end = bb_note;
1290 /* Not quite simple -- for non-fallthru edges, we must adjust the
1291 predecessor's jump instruction to target our new block. */
1292 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1294 rtx tmp, insn = old_pred->end;
1295 rtx old_label = old_succ->head;
1296 rtx new_label = gen_label_rtx ();
1298 if (GET_CODE (insn) != JUMP_INSN)
1299 abort ();
1301 /* ??? Recognize a tablejump and adjust all matching cases. */
1302 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1303 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1304 && GET_CODE (tmp) == JUMP_INSN
1305 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1306 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1308 rtvec vec;
1309 int j;
1311 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1312 vec = XVEC (PATTERN (tmp), 0);
1313 else
1314 vec = XVEC (PATTERN (tmp), 1);
1316 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1317 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1319 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1320 --LABEL_NUSES (old_label);
1321 ++LABEL_NUSES (new_label);
1324 else
1326 /* This would have indicated an abnormal edge. */
1327 if (computed_jump_p (insn))
1328 abort ();
1330 /* A return instruction can't be redirected. */
1331 if (returnjump_p (insn))
1332 abort ();
1334 /* If the insn doesn't go where we think, we're confused. */
1335 if (JUMP_LABEL (insn) != old_label)
1336 abort ();
1338 redirect_jump (insn, new_label);
1341 emit_label_before (new_label, bb_note);
1342 bb->head = new_label;
1345 return bb;
1348 /* Queue instructions for insertion on an edge between two basic blocks.
1349 The new instructions and basic blocks (if any) will not appear in the
1350 CFG until commit_edge_insertions is called. */
1352 void
1353 insert_insn_on_edge (pattern, e)
1354 rtx pattern;
1355 edge e;
1357 /* We cannot insert instructions on an abnormal critical edge.
1358 It will be easier to find the culprit if we die now. */
1359 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1360 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1361 abort ();
1363 if (e->insns == NULL_RTX)
1364 start_sequence ();
1365 else
1366 push_to_sequence (e->insns);
1368 emit_insn (pattern);
1370 e->insns = get_insns ();
1371 end_sequence();
1374 /* Update the CFG for the instructions queued on edge E. */
1376 static void
1377 commit_one_edge_insertion (e)
1378 edge e;
1380 rtx before = NULL_RTX, after = NULL_RTX, tmp;
1381 basic_block bb;
1383 /* Figure out where to put these things. If the destination has
1384 one predecessor, insert there. Except for the exit block. */
1385 if (e->dest->pred->pred_next == NULL
1386 && e->dest != EXIT_BLOCK_PTR)
1388 bb = e->dest;
1390 /* Get the location correct wrt a code label, and "nice" wrt
1391 a basic block note, and before everything else. */
1392 tmp = bb->head;
1393 if (GET_CODE (tmp) == CODE_LABEL)
1394 tmp = NEXT_INSN (tmp);
1395 if (GET_CODE (tmp) == NOTE
1396 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1397 tmp = NEXT_INSN (tmp);
1398 if (tmp == bb->head)
1399 before = tmp;
1400 else
1401 after = PREV_INSN (tmp);
1404 /* If the source has one successor and the edge is not abnormal,
1405 insert there. Except for the entry block. */
1406 else if ((e->flags & EDGE_ABNORMAL) == 0
1407 && e->src->succ->succ_next == NULL
1408 && e->src != ENTRY_BLOCK_PTR)
1410 bb = e->src;
1411 if (GET_CODE (bb->end) == JUMP_INSN)
1413 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1414 a jump with delay slots already filled? */
1415 if (! simplejump_p (bb->end))
1416 abort ();
1418 before = bb->end;
1420 else
1422 /* We'd better be fallthru, or we've lost track of what's what. */
1423 if ((e->flags & EDGE_FALLTHRU) == 0)
1424 abort ();
1426 after = bb->end;
1430 /* Otherwise we must split the edge. */
1431 else
1433 bb = split_edge (e);
1434 after = bb->end;
1437 /* Now that we've found the spot, do the insertion. */
1438 tmp = e->insns;
1439 e->insns = NULL_RTX;
1440 if (before)
1442 emit_insns_before (tmp, before);
1443 if (before == bb->head)
1444 bb->head = before;
1446 else
1448 tmp = emit_insns_after (tmp, after);
1449 if (after == bb->end)
1450 bb->end = tmp;
1454 /* Update the CFG for all queued instructions. */
1456 void
1457 commit_edge_insertions ()
1459 int i;
1460 basic_block bb;
1462 i = -1;
1463 bb = ENTRY_BLOCK_PTR;
1464 while (1)
1466 edge e, next;
1468 for (e = bb->succ; e ; e = next)
1470 next = e->succ_next;
1471 if (e->insns)
1472 commit_one_edge_insertion (e);
1475 if (++i >= n_basic_blocks)
1476 break;
1477 bb = BASIC_BLOCK (i);
1481 /* Delete all unreachable basic blocks. */
1483 static void
1484 delete_unreachable_blocks ()
1486 basic_block *worklist, *tos;
1487 int deleted_handler;
1488 edge e;
1489 int i, n;
1491 n = n_basic_blocks;
1492 tos = worklist = (basic_block *) alloca (sizeof (basic_block) * n);
1494 /* Use basic_block->aux as a marker. Clear them all. */
1496 for (i = 0; i < n; ++i)
1497 BASIC_BLOCK (i)->aux = NULL;
1499 /* Add our starting points to the worklist. Almost always there will
1500 be only one. It isn't inconcievable that we might one day directly
1501 support Fortran alternate entry points. */
1503 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1505 *tos++ = e->dest;
1507 /* Mark the block with a handy non-null value. */
1508 e->dest->aux = e;
1511 /* Iterate: find everything reachable from what we've already seen. */
1513 while (tos != worklist)
1515 basic_block b = *--tos;
1517 for (e = b->succ; e ; e = e->succ_next)
1518 if (!e->dest->aux)
1520 *tos++ = e->dest;
1521 e->dest->aux = e;
1525 /* Delete all unreachable basic blocks. Count down so that we don't
1526 interfere with the block renumbering that happens in delete_block. */
1528 deleted_handler = 0;
1530 for (i = n - 1; i >= 0; --i)
1532 basic_block b = BASIC_BLOCK (i);
1534 if (b->aux != NULL)
1535 /* This block was found. Tidy up the mark. */
1536 b->aux = NULL;
1537 else
1538 deleted_handler |= delete_block (b);
1541 /* Fix up edges that now fall through, or rather should now fall through
1542 but previously required a jump around now deleted blocks. Simplify
1543 the search by only examining blocks numerically adjacent, since this
1544 is how find_basic_blocks created them. */
1546 for (i = 1; i < n_basic_blocks; ++i)
1548 basic_block b = BASIC_BLOCK (i - 1);
1549 basic_block c = BASIC_BLOCK (i);
1550 edge s;
1552 /* We care about simple conditional or unconditional jumps with
1553 a single successor.
1555 If we had a conditional branch to the next instruction when
1556 find_basic_blocks was called, then there will only be one
1557 out edge for the block which ended with the conditional
1558 branch (since we do not create duplicate edges).
1560 Furthermore, the edge will be marked as a fallthru because we
1561 merge the flags for the duplicate edges. So we do not want to
1562 check that the edge is not a FALLTHRU edge. */
1563 if ((s = b->succ) != NULL
1564 && s->succ_next == NULL
1565 && s->dest == c)
1566 tidy_fallthru_edge (s, b, c);
1569 /* Attempt to merge blocks as made possible by edge removal. If a block
1570 has only one successor, and the successor has only one predecessor,
1571 they may be combined. */
1573 for (i = 0; i < n_basic_blocks; )
1575 basic_block c, b = BASIC_BLOCK (i);
1576 edge s;
1578 /* A loop because chains of blocks might be combineable. */
1579 while ((s = b->succ) != NULL
1580 && s->succ_next == NULL
1581 && (s->flags & EDGE_EH) == 0
1582 && (c = s->dest) != EXIT_BLOCK_PTR
1583 && c->pred->pred_next == NULL
1584 && merge_blocks (s, b, c))
1585 continue;
1587 /* Don't get confused by the index shift caused by deleting blocks. */
1588 i = b->index + 1;
1591 /* If we deleted an exception handler, we may have EH region begin/end
1592 blocks to remove as well. */
1593 if (deleted_handler)
1594 delete_eh_regions ();
1597 /* Find EH regions for which there is no longer a handler, and delete them. */
1599 static void
1600 delete_eh_regions ()
1602 rtx insn;
1604 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1605 if (GET_CODE (insn) == NOTE)
1607 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1608 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1610 int num = CODE_LABEL_NUMBER (insn);
1611 /* A NULL handler indicates a region is no longer needed */
1612 if (get_first_handler (num) == NULL)
1614 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1615 NOTE_SOURCE_FILE (insn) = 0;
1621 /* Return true if NOTE is not one of the ones that must be kept paired,
1622 so that we may simply delete them. */
1624 static int
1625 can_delete_note_p (note)
1626 rtx note;
1628 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1629 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1632 /* Unlink a chain of insns between START and FINISH, leaving notes
1633 that must be paired. */
1635 static void
1636 delete_insn_chain (start, finish)
1637 rtx start, finish;
1639 /* Unchain the insns one by one. It would be quicker to delete all
1640 of these with a single unchaining, rather than one at a time, but
1641 we need to keep the NOTE's. */
1643 rtx next;
1645 while (1)
1647 next = NEXT_INSN (start);
1648 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1650 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1652 else
1653 next = flow_delete_insn (start);
1655 if (start == finish)
1656 break;
1657 start = next;
1661 /* Delete the insns in a (non-live) block. We physically delete every
1662 non-deleted-note insn, and update the flow graph appropriately.
1664 Return nonzero if we deleted an exception handler. */
1666 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1667 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1669 static int
1670 delete_block (b)
1671 basic_block b;
1673 int deleted_handler = 0;
1674 rtx insn, end;
1676 /* If the head of this block is a CODE_LABEL, then it might be the
1677 label for an exception handler which can't be reached.
1679 We need to remove the label from the exception_handler_label list
1680 and remove the associated NOTE_EH_REGION_BEG and NOTE_EH_REGION_END
1681 notes. */
1683 insn = b->head;
1685 if (GET_CODE (insn) == CODE_LABEL)
1687 rtx x, *prev = &exception_handler_labels;
1689 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1691 if (XEXP (x, 0) == insn)
1693 /* Found a match, splice this label out of the EH label list. */
1694 *prev = XEXP (x, 1);
1695 XEXP (x, 1) = NULL_RTX;
1696 XEXP (x, 0) = NULL_RTX;
1698 /* Remove the handler from all regions */
1699 remove_handler (insn);
1700 deleted_handler = 1;
1701 break;
1703 prev = &XEXP (x, 1);
1706 /* This label may be referenced by code solely for its value, or
1707 referenced by static data, or something. We have determined
1708 that it is not reachable, but cannot delete the label itself.
1709 Save code space and continue to delete the balance of the block,
1710 along with properly updating the cfg. */
1711 if (!can_delete_label_p (insn))
1713 /* If we've only got one of these, skip the whole deleting
1714 insns thing. */
1715 if (insn == b->end)
1716 goto no_delete_insns;
1717 insn = NEXT_INSN (insn);
1721 /* Selectively unlink the insn chain. Include any BARRIER that may
1722 follow the basic block. */
1723 end = next_nonnote_insn (b->end);
1724 if (!end || GET_CODE (end) != BARRIER)
1725 end = b->end;
1726 delete_insn_chain (insn, end);
1728 no_delete_insns:
1730 /* Remove the edges into and out of this block. Note that there may
1731 indeed be edges in, if we are removing an unreachable loop. */
1733 edge e, next, *q;
1735 for (e = b->pred; e ; e = next)
1737 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1738 continue;
1739 *q = e->succ_next;
1740 next = e->pred_next;
1741 free (e);
1743 for (e = b->succ; e ; e = next)
1745 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1746 continue;
1747 *q = e->pred_next;
1748 next = e->succ_next;
1749 free (e);
1752 b->pred = NULL;
1753 b->succ = NULL;
1756 /* Remove the basic block from the array, and compact behind it. */
1757 expunge_block (b);
1759 return deleted_handler;
1762 /* Remove block B from the basic block array and compact behind it. */
1764 static void
1765 expunge_block (b)
1766 basic_block b;
1768 int i, n = n_basic_blocks;
1770 for (i = b->index; i + 1 < n; ++i)
1772 basic_block x = BASIC_BLOCK (i + 1);
1773 BASIC_BLOCK (i) = x;
1774 x->index = i;
1777 basic_block_info->num_elements--;
1778 n_basic_blocks--;
1781 /* Delete INSN by patching it out. Return the next insn. */
1783 static rtx
1784 flow_delete_insn (insn)
1785 rtx insn;
1787 rtx prev = PREV_INSN (insn);
1788 rtx next = NEXT_INSN (insn);
1790 PREV_INSN (insn) = NULL_RTX;
1791 NEXT_INSN (insn) = NULL_RTX;
1793 if (prev)
1794 NEXT_INSN (prev) = next;
1795 if (next)
1796 PREV_INSN (next) = prev;
1797 else
1798 set_last_insn (prev);
1800 if (GET_CODE (insn) == CODE_LABEL)
1801 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1803 /* If deleting a jump, decrement the use count of the label. Deleting
1804 the label itself should happen in the normal course of block merging. */
1805 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1806 LABEL_NUSES (JUMP_LABEL (insn))--;
1808 return next;
1811 /* True if a given label can be deleted. */
1813 static int
1814 can_delete_label_p (label)
1815 rtx label;
1817 rtx x;
1819 if (LABEL_PRESERVE_P (label))
1820 return 0;
1822 for (x = forced_labels; x ; x = XEXP (x, 1))
1823 if (label == XEXP (x, 0))
1824 return 0;
1825 for (x = label_value_list; x ; x = XEXP (x, 1))
1826 if (label == XEXP (x, 0))
1827 return 0;
1828 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1829 if (label == XEXP (x, 0))
1830 return 0;
1832 /* User declared labels must be preserved. */
1833 if (LABEL_NAME (label) != 0)
1834 return 0;
1836 return 1;
1839 /* Blocks A and B are to be merged into a single block. The insns
1840 are already contiguous, hence `nomove'. */
1842 static void
1843 merge_blocks_nomove (a, b)
1844 basic_block a, b;
1846 edge e;
1847 rtx b_head, b_end, a_end;
1848 int b_empty = 0;
1850 /* If there was a CODE_LABEL beginning B, delete it. */
1851 b_head = b->head;
1852 b_end = b->end;
1853 if (GET_CODE (b_head) == CODE_LABEL)
1855 /* Detect basic blocks with nothing but a label. This can happen
1856 in particular at the end of a function. */
1857 if (b_head == b_end)
1858 b_empty = 1;
1859 b_head = flow_delete_insn (b_head);
1862 /* Delete the basic block note. */
1863 if (GET_CODE (b_head) == NOTE
1864 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
1866 if (b_head == b_end)
1867 b_empty = 1;
1868 b_head = flow_delete_insn (b_head);
1871 /* If there was a jump out of A, delete it. */
1872 a_end = a->end;
1873 if (GET_CODE (a_end) == JUMP_INSN)
1875 rtx prev;
1877 prev = prev_nonnote_insn (a_end);
1878 if (!prev)
1879 prev = a->head;
1881 #ifdef HAVE_cc0
1882 /* If this was a conditional jump, we need to also delete
1883 the insn that set cc0. */
1885 if (prev && sets_cc0_p (prev))
1887 rtx tmp = prev;
1888 prev = prev_nonnote_insn (prev);
1889 if (!prev)
1890 prev = a->head;
1891 flow_delete_insn (tmp);
1893 #endif
1895 /* Note that a->head != a->end, since we should have at least a
1896 bb note plus the jump, so prev != insn. */
1897 flow_delete_insn (a_end);
1898 a_end = prev;
1901 /* By definition, there should only be one successor of A, and that is
1902 B. Free that edge struct. */
1903 free (a->succ);
1905 /* Adjust the edges out of B for the new owner. */
1906 for (e = b->succ; e ; e = e->succ_next)
1907 e->src = a;
1908 a->succ = b->succ;
1910 /* Reassociate the insns of B with A. */
1911 if (!b_empty)
1913 BLOCK_FOR_INSN (b_head) = a;
1914 while (b_head != b_end)
1916 b_head = NEXT_INSN (b_head);
1917 BLOCK_FOR_INSN (b_head) = a;
1919 a_end = b_head;
1921 a->end = a_end;
1923 /* Compact the basic block array. */
1924 expunge_block (b);
1927 /* Attempt to merge basic blocks that are potentially non-adjacent.
1928 Return true iff the attempt succeeded. */
1930 static int
1931 merge_blocks (e, b, c)
1932 edge e;
1933 basic_block b, c;
1935 /* If B has a fallthru edge to C, no need to move anything. */
1936 if (!(e->flags & EDGE_FALLTHRU))
1938 /* ??? From here on out we must make sure to not munge nesting
1939 of exception regions and lexical blocks. Need to think about
1940 these cases before this gets implemented. */
1941 return 0;
1943 /* If C has an outgoing fallthru, and B does not have an incoming
1944 fallthru, move B before C. The later clause is somewhat arbitrary,
1945 but avoids modifying blocks other than the two we've been given. */
1947 /* Otherwise, move C after B. If C had a fallthru, which doesn't
1948 happen to be the physical successor to B, insert an unconditional
1949 branch. If C already ended with a conditional branch, the new
1950 jump must go in a new basic block D. */
1953 /* If a label still appears somewhere and we cannot delete the label,
1954 then we cannot merge the blocks. The edge was tidied already. */
1956 rtx insn, stop = NEXT_INSN (c->head);
1957 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
1958 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
1959 return 0;
1962 merge_blocks_nomove (b, c);
1963 return 1;
1966 /* The given edge should potentially a fallthru edge. If that is in
1967 fact true, delete the unconditional jump and barriers that are in
1968 the way. */
1970 static void
1971 tidy_fallthru_edge (e, b, c)
1972 edge e;
1973 basic_block b, c;
1975 rtx q;
1977 /* ??? In a late-running flow pass, other folks may have deleted basic
1978 blocks by nopping out blocks, leaving multiple BARRIERs between here
1979 and the target label. They ought to be chastized and fixed.
1981 We can also wind up with a sequence of undeletable labels between
1982 one block and the next.
1984 So search through a sequence of barriers, labels, and notes for
1985 the head of block C and assert that we really do fall through. */
1987 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
1988 return;
1990 /* Remove what will soon cease being the jump insn from the source block.
1991 If block B consisted only of this single jump, turn it into a deleted
1992 note. */
1993 q = b->end;
1994 if (GET_CODE (q) == JUMP_INSN)
1996 #ifdef HAVE_cc0
1997 /* If this was a conditional jump, we need to also delete
1998 the insn that set cc0. */
1999 if (! simplejump_p (q) && condjump_p (q))
2000 q = PREV_INSN (q);
2001 #endif
2003 if (b->head == q)
2005 PUT_CODE (q, NOTE);
2006 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2007 NOTE_SOURCE_FILE (q) = 0;
2009 else
2010 b->end = q = PREV_INSN (q);
2013 /* Selectively unlink the sequence. */
2014 if (q != PREV_INSN (c->head))
2015 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2017 e->flags |= EDGE_FALLTHRU;
2020 /* Discover and record the loop depth at the head of each basic block. */
2022 static void
2023 calculate_loop_depth (insns)
2024 rtx insns;
2026 basic_block bb;
2027 rtx insn;
2028 int i = 0, depth = 1;
2030 bb = BASIC_BLOCK (i);
2031 for (insn = insns; insn ; insn = NEXT_INSN (insn))
2033 if (insn == bb->head)
2035 bb->loop_depth = depth;
2036 if (++i >= n_basic_blocks)
2037 break;
2038 bb = BASIC_BLOCK (i);
2041 if (GET_CODE (insn) == NOTE)
2043 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2044 depth++;
2045 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2046 depth--;
2048 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2049 if (depth == 0)
2050 abort ();
2055 /* Perform data flow analysis.
2056 F is the first insn of the function and NREGS the number of register numbers
2057 in use. */
2059 void
2060 life_analysis (f, nregs, file, remove_dead_code)
2061 rtx f;
2062 int nregs;
2063 FILE *file;
2064 int remove_dead_code;
2066 #ifdef ELIMINABLE_REGS
2067 register size_t i;
2068 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2069 #endif
2071 /* Record which registers will be eliminated. We use this in
2072 mark_used_regs. */
2074 CLEAR_HARD_REG_SET (elim_reg_set);
2076 #ifdef ELIMINABLE_REGS
2077 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2078 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2079 #else
2080 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2081 #endif
2083 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2084 uid_volatile = BITMAP_ALLOCA ();
2086 /* We want alias analysis information for local dead store elimination. */
2087 init_alias_analysis ();
2088 life_analysis_1 (f, nregs, remove_dead_code);
2089 end_alias_analysis ();
2091 if (file)
2092 dump_flow_info (file);
2094 BITMAP_FREE (uid_volatile);
2095 free_basic_block_vars (1);
2098 /* Free the variables allocated by find_basic_blocks.
2100 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2102 void
2103 free_basic_block_vars (keep_head_end_p)
2104 int keep_head_end_p;
2106 if (basic_block_for_insn)
2108 VARRAY_FREE (basic_block_for_insn);
2109 basic_block_for_insn = NULL;
2112 if (! keep_head_end_p)
2114 clear_edges ();
2115 VARRAY_FREE (basic_block_info);
2116 n_basic_blocks = 0;
2118 ENTRY_BLOCK_PTR->aux = NULL;
2119 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2120 EXIT_BLOCK_PTR->aux = NULL;
2121 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2125 /* Return nonzero if the destination of SET equals the source. */
2126 static int
2127 set_noop_p (set)
2128 rtx set;
2130 rtx src = SET_SRC (set);
2131 rtx dst = SET_DEST (set);
2132 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2133 && REGNO (src) == REGNO (dst))
2134 return 1;
2135 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2136 || SUBREG_WORD (src) != SUBREG_WORD (dst))
2137 return 0;
2138 src = SUBREG_REG (src);
2139 dst = SUBREG_REG (dst);
2140 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2141 && REGNO (src) == REGNO (dst))
2142 return 1;
2143 return 0;
2146 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2147 value to itself. */
2148 static int
2149 noop_move_p (insn)
2150 rtx insn;
2152 rtx pat = PATTERN (insn);
2154 /* Insns carrying these notes are useful later on. */
2155 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2156 return 0;
2158 if (GET_CODE (pat) == SET && set_noop_p (pat))
2159 return 1;
2161 if (GET_CODE (pat) == PARALLEL)
2163 int i;
2164 /* If nothing but SETs of registers to themselves,
2165 this insn can also be deleted. */
2166 for (i = 0; i < XVECLEN (pat, 0); i++)
2168 rtx tem = XVECEXP (pat, 0, i);
2170 if (GET_CODE (tem) == USE
2171 || GET_CODE (tem) == CLOBBER)
2172 continue;
2174 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2175 return 0;
2178 return 1;
2180 return 0;
2183 static void
2184 notice_stack_pointer_modification (x, pat)
2185 rtx x;
2186 rtx pat ATTRIBUTE_UNUSED;
2188 if (x == stack_pointer_rtx
2189 /* The stack pointer is only modified indirectly as the result
2190 of a push until later in flow. See the comments in rtl.texi
2191 regarding Embedded Side-Effects on Addresses. */
2192 || (GET_CODE (x) == MEM
2193 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2194 || GET_CODE (XEXP (x, 0)) == PRE_INC
2195 || GET_CODE (XEXP (x, 0)) == POST_DEC
2196 || GET_CODE (XEXP (x, 0)) == POST_INC)
2197 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2198 current_function_sp_is_unchanging = 0;
2201 /* Record which insns refer to any volatile memory
2202 or for any reason can't be deleted just because they are dead stores.
2203 Also, delete any insns that copy a register to itself.
2204 And see if the stack pointer is modified. */
2205 static void
2206 record_volatile_insns (f)
2207 rtx f;
2209 rtx insn;
2210 for (insn = f; insn; insn = NEXT_INSN (insn))
2212 enum rtx_code code1 = GET_CODE (insn);
2213 if (code1 == CALL_INSN)
2214 SET_INSN_VOLATILE (insn);
2215 else if (code1 == INSN || code1 == JUMP_INSN)
2217 if (GET_CODE (PATTERN (insn)) != USE
2218 && volatile_refs_p (PATTERN (insn)))
2219 SET_INSN_VOLATILE (insn);
2221 /* A SET that makes space on the stack cannot be dead.
2222 (Such SETs occur only for allocating variable-size data,
2223 so they will always have a PLUS or MINUS according to the
2224 direction of stack growth.)
2225 Even if this function never uses this stack pointer value,
2226 signal handlers do! */
2227 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2228 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2229 #ifdef STACK_GROWS_DOWNWARD
2230 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2231 #else
2232 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2233 #endif
2234 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2235 SET_INSN_VOLATILE (insn);
2237 /* Delete (in effect) any obvious no-op moves. */
2238 else if (noop_move_p (insn))
2240 PUT_CODE (insn, NOTE);
2241 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2242 NOTE_SOURCE_FILE (insn) = 0;
2246 /* Check if insn modifies the stack pointer. */
2247 if ( current_function_sp_is_unchanging
2248 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2249 note_stores (PATTERN (insn), notice_stack_pointer_modification);
2253 /* Mark those regs which are needed at the end of the function as live
2254 at the end of the last basic block. */
2255 static void
2256 mark_regs_live_at_end (set)
2257 regset set;
2259 int i;
2261 /* If exiting needs the right stack value, consider the stack pointer
2262 live at the end of the function. */
2263 if (! EXIT_IGNORE_STACK
2264 || (! FRAME_POINTER_REQUIRED
2265 && ! current_function_calls_alloca
2266 && flag_omit_frame_pointer)
2267 || current_function_sp_is_unchanging)
2269 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2272 /* Mark the frame pointer if needed at the end of the function. If
2273 we end up eliminating it, it will be removed from the live list
2274 of each basic block by reload. */
2276 if (! reload_completed || frame_pointer_needed)
2278 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2279 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2280 /* If they are different, also mark the hard frame pointer as live */
2281 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2282 #endif
2285 /* Mark all global registers, and all registers used by the epilogue
2286 as being live at the end of the function since they may be
2287 referenced by our caller. */
2288 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2289 if (global_regs[i]
2290 #ifdef EPILOGUE_USES
2291 || EPILOGUE_USES (i)
2292 #endif
2294 SET_REGNO_REG_SET (set, i);
2296 /* ??? Mark function return value here rather than as uses. */
2299 /* Determine which registers are live at the start of each
2300 basic block of the function whose first insn is F.
2301 NREGS is the number of registers used in F.
2302 We allocate the vector basic_block_live_at_start
2303 and the regsets that it points to, and fill them with the data.
2304 regset_size and regset_bytes are also set here. */
2306 static void
2307 life_analysis_1 (f, nregs, remove_dead_code)
2308 rtx f;
2309 int nregs;
2310 int remove_dead_code;
2312 int first_pass;
2313 int changed;
2314 register int i;
2315 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2316 regset *new_live_at_end;
2318 struct obstack flow_obstack;
2320 gcc_obstack_init (&flow_obstack);
2322 max_regno = nregs;
2324 /* Allocate and zero out many data structures
2325 that will record the data from lifetime analysis. */
2327 allocate_reg_life_data ();
2328 allocate_bb_life_data ();
2330 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
2331 memset (reg_next_use, 0, nregs * sizeof (rtx));
2333 /* Set up regset-vectors used internally within this function.
2334 Their meanings are documented above, with their declarations. */
2336 new_live_at_end = (regset *) alloca ((n_basic_blocks + 1) * sizeof (regset));
2337 init_regset_vector (new_live_at_end, n_basic_blocks + 1, &flow_obstack);
2339 /* Stick these vectors into the AUX field of the basic block, so that
2340 we don't have to keep going through the index. */
2342 for (i = 0; i < n_basic_blocks; ++i)
2343 BASIC_BLOCK (i)->aux = new_live_at_end[i];
2344 ENTRY_BLOCK_PTR->aux = new_live_at_end[i];
2346 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2347 This will be cleared by record_volatile_insns if it encounters an insn
2348 which modifies the stack pointer. */
2349 current_function_sp_is_unchanging = !current_function_calls_alloca;
2351 record_volatile_insns (f);
2353 if (n_basic_blocks > 0)
2355 regset theend;
2356 register edge e;
2358 theend = EXIT_BLOCK_PTR->global_live_at_start;
2359 mark_regs_live_at_end (theend);
2361 /* Propogate this exit data to each of EXIT's predecessors. */
2362 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2364 COPY_REG_SET (e->src->global_live_at_end, theend);
2365 COPY_REG_SET ((regset) e->src->aux, theend);
2369 /* The post-reload life analysis have (on a global basis) the same registers
2370 live as was computed by reload itself.
2372 Otherwise elimination offsets and such may be incorrect.
2374 Reload will make some registers as live even though they do not appear
2375 in the rtl. */
2376 if (reload_completed)
2377 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2378 memset (regs_ever_live, 0, sizeof regs_ever_live);
2380 /* Propagate life info through the basic blocks
2381 around the graph of basic blocks.
2383 This is a relaxation process: each time a new register
2384 is live at the end of the basic block, we must scan the block
2385 to determine which registers are, as a consequence, live at the beginning
2386 of that block. These registers must then be marked live at the ends
2387 of all the blocks that can transfer control to that block.
2388 The process continues until it reaches a fixed point. */
2390 first_pass = 1;
2391 changed = 1;
2392 while (changed)
2394 changed = 0;
2395 for (i = n_basic_blocks - 1; i >= 0; i--)
2397 basic_block bb = BASIC_BLOCK (i);
2398 int consider = first_pass;
2399 int must_rescan = first_pass;
2400 register int j;
2402 if (!first_pass)
2404 /* Set CONSIDER if this block needs thinking about at all
2405 (that is, if the regs live now at the end of it
2406 are not the same as were live at the end of it when
2407 we last thought about it).
2408 Set must_rescan if it needs to be thought about
2409 instruction by instruction (that is, if any additional
2410 reg that is live at the end now but was not live there before
2411 is one of the significant regs of this basic block). */
2413 EXECUTE_IF_AND_COMPL_IN_REG_SET
2414 ((regset) bb->aux, bb->global_live_at_end, 0, j,
2416 consider = 1;
2417 if (REGNO_REG_SET_P (bb->local_set, j))
2419 must_rescan = 1;
2420 goto done;
2423 done:
2424 if (! consider)
2425 continue;
2428 /* The live_at_start of this block may be changing,
2429 so another pass will be required after this one. */
2430 changed = 1;
2432 if (! must_rescan)
2434 /* No complete rescan needed;
2435 just record those variables newly known live at end
2436 as live at start as well. */
2437 IOR_AND_COMPL_REG_SET (bb->global_live_at_start,
2438 (regset) bb->aux,
2439 bb->global_live_at_end);
2441 IOR_AND_COMPL_REG_SET (bb->global_live_at_end,
2442 (regset) bb->aux,
2443 bb->global_live_at_end);
2445 else
2447 /* Update the basic_block_live_at_start
2448 by propagation backwards through the block. */
2449 COPY_REG_SET (bb->global_live_at_end, (regset) bb->aux);
2450 COPY_REG_SET (bb->global_live_at_start,
2451 bb->global_live_at_end);
2452 propagate_block (bb->global_live_at_start,
2453 bb->head, bb->end, 0,
2454 first_pass ? bb->local_set : (regset) 0,
2455 i, remove_dead_code);
2458 /* Update the new_live_at_end's of the block's predecessors. */
2460 register edge e;
2462 for (e = bb->pred; e ; e = e->pred_next)
2463 IOR_REG_SET ((regset) e->src->aux, bb->global_live_at_start);
2466 #ifdef USE_C_ALLOCA
2467 alloca (0);
2468 #endif
2470 first_pass = 0;
2473 /* The only pseudos that are live at the beginning of the function are
2474 those that were not set anywhere in the function. local-alloc doesn't
2475 know how to handle these correctly, so mark them as not local to any
2476 one basic block. */
2478 if (n_basic_blocks > 0)
2479 EXECUTE_IF_SET_IN_REG_SET (BASIC_BLOCK (0)->global_live_at_start,
2480 FIRST_PSEUDO_REGISTER, i,
2482 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2485 /* Now the life information is accurate. Make one more pass over each
2486 basic block to delete dead stores, create autoincrement addressing
2487 and record how many times each register is used, is set, or dies. */
2489 for (i = 0; i < n_basic_blocks; i++)
2491 basic_block bb = BASIC_BLOCK (i);
2493 /* We start with global_live_at_end to determine which stores are
2494 dead. This process is destructive, and we wish to preserve the
2495 contents of global_live_at_end for posterity. Fortunately,
2496 new_live_at_end, due to the way we converged on a solution,
2497 contains a duplicate of global_live_at_end that we can kill. */
2498 propagate_block ((regset) bb->aux, bb->head, bb->end, 1, (regset) 0, i, remove_dead_code);
2500 #ifdef USE_C_ALLOCA
2501 alloca (0);
2502 #endif
2505 /* We have a problem with any pseudoreg that lives across the setjmp.
2506 ANSI says that if a user variable does not change in value between
2507 the setjmp and the longjmp, then the longjmp preserves it. This
2508 includes longjmp from a place where the pseudo appears dead.
2509 (In principle, the value still exists if it is in scope.)
2510 If the pseudo goes in a hard reg, some other value may occupy
2511 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2512 Conclusion: such a pseudo must not go in a hard reg. */
2513 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2514 FIRST_PSEUDO_REGISTER, i,
2516 if (regno_reg_rtx[i] != 0)
2518 REG_LIVE_LENGTH (i) = -1;
2519 REG_BASIC_BLOCK (i) = -1;
2523 /* Restore regs_ever_live that was provided by reload. */
2524 if (reload_completed)
2525 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2527 free_regset_vector (new_live_at_end, n_basic_blocks);
2528 obstack_free (&flow_obstack, NULL_PTR);
2530 for (i = 0; i < n_basic_blocks; ++i)
2531 BASIC_BLOCK (i)->aux = NULL;
2532 ENTRY_BLOCK_PTR->aux = NULL;
2535 /* Subroutines of life analysis. */
2537 /* Allocate the permanent data structures that represent the results
2538 of life analysis. Not static since used also for stupid life analysis. */
2540 void
2541 allocate_bb_life_data ()
2543 register int i;
2545 for (i = 0; i < n_basic_blocks; i++)
2547 basic_block bb = BASIC_BLOCK (i);
2549 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
2550 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
2551 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
2554 ENTRY_BLOCK_PTR->global_live_at_end
2555 = OBSTACK_ALLOC_REG_SET (function_obstack);
2556 EXIT_BLOCK_PTR->global_live_at_start
2557 = OBSTACK_ALLOC_REG_SET (function_obstack);
2559 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
2562 void
2563 allocate_reg_life_data ()
2565 int i;
2567 /* Recalculate the register space, in case it has grown. Old style
2568 vector oriented regsets would set regset_{size,bytes} here also. */
2569 allocate_reg_info (max_regno, FALSE, FALSE);
2571 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
2572 information, explicitly reset it here. The allocation should have
2573 already happened on the previous reg_scan pass. Make sure in case
2574 some more registers were allocated. */
2575 for (i = 0; i < max_regno; i++)
2576 REG_N_SETS (i) = 0;
2579 /* Make each element of VECTOR point at a regset. The vector has
2580 NELTS elements, and space is allocated from the ALLOC_OBSTACK
2581 obstack. */
2583 static void
2584 init_regset_vector (vector, nelts, alloc_obstack)
2585 regset *vector;
2586 int nelts;
2587 struct obstack *alloc_obstack;
2589 register int i;
2591 for (i = 0; i < nelts; i++)
2593 vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
2594 CLEAR_REG_SET (vector[i]);
2598 /* Release any additional space allocated for each element of VECTOR point
2599 other than the regset header itself. The vector has NELTS elements. */
2601 void
2602 free_regset_vector (vector, nelts)
2603 regset *vector;
2604 int nelts;
2606 register int i;
2608 for (i = 0; i < nelts; i++)
2609 FREE_REG_SET (vector[i]);
2612 /* Compute the registers live at the beginning of a basic block
2613 from those live at the end.
2615 When called, OLD contains those live at the end.
2616 On return, it contains those live at the beginning.
2617 FIRST and LAST are the first and last insns of the basic block.
2619 FINAL is nonzero if we are doing the final pass which is not
2620 for computing the life info (since that has already been done)
2621 but for acting on it. On this pass, we delete dead stores,
2622 set up the logical links and dead-variables lists of instructions,
2623 and merge instructions for autoincrement and autodecrement addresses.
2625 SIGNIFICANT is nonzero only the first time for each basic block.
2626 If it is nonzero, it points to a regset in which we store
2627 a 1 for each register that is set within the block.
2629 BNUM is the number of the basic block. */
2631 static void
2632 propagate_block (old, first, last, final, significant, bnum, remove_dead_code)
2633 register regset old;
2634 rtx first;
2635 rtx last;
2636 int final;
2637 regset significant;
2638 int bnum;
2639 int remove_dead_code;
2641 register rtx insn;
2642 rtx prev;
2643 regset live;
2644 regset dead;
2646 /* Find the loop depth for this block. Ignore loop level changes in the
2647 middle of the basic block -- for register allocation purposes, the
2648 important uses will be in the blocks wholely contained within the loop
2649 not in the loop pre-header or post-trailer. */
2650 loop_depth = BASIC_BLOCK (bnum)->loop_depth;
2652 dead = ALLOCA_REG_SET ();
2653 live = ALLOCA_REG_SET ();
2655 cc0_live = 0;
2656 mem_set_list = NULL_RTX;
2658 if (final)
2660 register int i;
2662 /* Process the regs live at the end of the block.
2663 Mark them as not local to any one basic block. */
2664 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2666 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2670 /* Scan the block an insn at a time from end to beginning. */
2672 for (insn = last; ; insn = prev)
2674 prev = PREV_INSN (insn);
2676 if (GET_CODE (insn) == NOTE)
2678 /* If this is a call to `setjmp' et al,
2679 warn if any non-volatile datum is live. */
2681 if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2682 IOR_REG_SET (regs_live_at_setjmp, old);
2685 /* Update the life-status of regs for this insn.
2686 First DEAD gets which regs are set in this insn
2687 then LIVE gets which regs are used in this insn.
2688 Then the regs live before the insn
2689 are those live after, with DEAD regs turned off,
2690 and then LIVE regs turned on. */
2692 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2694 register int i;
2695 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
2696 int insn_is_dead = 0;
2697 int libcall_is_dead = 0;
2699 if (remove_dead_code)
2701 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
2702 /* Don't delete something that refers to volatile storage! */
2703 && ! INSN_VOLATILE (insn));
2704 libcall_is_dead = (insn_is_dead && note != 0
2705 && libcall_dead_p (PATTERN (insn), old, note, insn));
2708 /* If an instruction consists of just dead store(s) on final pass,
2709 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
2710 We could really delete it with delete_insn, but that
2711 can cause trouble for first or last insn in a basic block. */
2712 if (final && insn_is_dead)
2714 PUT_CODE (insn, NOTE);
2715 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2716 NOTE_SOURCE_FILE (insn) = 0;
2718 /* CC0 is now known to be dead. Either this insn used it,
2719 in which case it doesn't anymore, or clobbered it,
2720 so the next insn can't use it. */
2721 cc0_live = 0;
2723 /* If this insn is copying the return value from a library call,
2724 delete the entire library call. */
2725 if (libcall_is_dead)
2727 rtx first = XEXP (note, 0);
2728 rtx p = insn;
2729 while (INSN_DELETED_P (first))
2730 first = NEXT_INSN (first);
2731 while (p != first)
2733 p = PREV_INSN (p);
2734 PUT_CODE (p, NOTE);
2735 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
2736 NOTE_SOURCE_FILE (p) = 0;
2739 goto flushed;
2742 CLEAR_REG_SET (dead);
2743 CLEAR_REG_SET (live);
2745 /* See if this is an increment or decrement that can be
2746 merged into a following memory address. */
2747 #ifdef AUTO_INC_DEC
2749 register rtx x = single_set (insn);
2751 /* Does this instruction increment or decrement a register? */
2752 if (!reload_completed
2753 && final && x != 0
2754 && GET_CODE (SET_DEST (x)) == REG
2755 && (GET_CODE (SET_SRC (x)) == PLUS
2756 || GET_CODE (SET_SRC (x)) == MINUS)
2757 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
2758 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2759 /* Ok, look for a following memory ref we can combine with.
2760 If one is found, change the memory ref to a PRE_INC
2761 or PRE_DEC, cancel this insn, and return 1.
2762 Return 0 if nothing has been done. */
2763 && try_pre_increment_1 (insn))
2764 goto flushed;
2766 #endif /* AUTO_INC_DEC */
2768 /* If this is not the final pass, and this insn is copying the
2769 value of a library call and it's dead, don't scan the
2770 insns that perform the library call, so that the call's
2771 arguments are not marked live. */
2772 if (libcall_is_dead)
2774 /* Mark the dest reg as `significant'. */
2775 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
2777 insn = XEXP (note, 0);
2778 prev = PREV_INSN (insn);
2780 else if (GET_CODE (PATTERN (insn)) == SET
2781 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2782 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2783 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
2784 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
2785 /* We have an insn to pop a constant amount off the stack.
2786 (Such insns use PLUS regardless of the direction of the stack,
2787 and any insn to adjust the stack by a constant is always a pop.)
2788 These insns, if not dead stores, have no effect on life. */
2790 else
2792 /* Any regs live at the time of a call instruction
2793 must not go in a register clobbered by calls.
2794 Find all regs now live and record this for them. */
2796 if (GET_CODE (insn) == CALL_INSN && final)
2797 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2799 REG_N_CALLS_CROSSED (i)++;
2802 /* LIVE gets the regs used in INSN;
2803 DEAD gets those set by it. Dead insns don't make anything
2804 live. */
2806 mark_set_regs (old, dead, PATTERN (insn),
2807 final ? insn : NULL_RTX, significant);
2809 /* If an insn doesn't use CC0, it becomes dead since we
2810 assume that every insn clobbers it. So show it dead here;
2811 mark_used_regs will set it live if it is referenced. */
2812 cc0_live = 0;
2814 if (! insn_is_dead)
2815 mark_used_regs (old, live, PATTERN (insn), final, insn);
2817 /* Sometimes we may have inserted something before INSN (such as
2818 a move) when we make an auto-inc. So ensure we will scan
2819 those insns. */
2820 #ifdef AUTO_INC_DEC
2821 prev = PREV_INSN (insn);
2822 #endif
2824 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
2826 register int i;
2828 rtx note;
2830 for (note = CALL_INSN_FUNCTION_USAGE (insn);
2831 note;
2832 note = XEXP (note, 1))
2833 if (GET_CODE (XEXP (note, 0)) == USE)
2834 mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
2835 final, insn);
2837 /* Each call clobbers all call-clobbered regs that are not
2838 global or fixed. Note that the function-value reg is a
2839 call-clobbered reg, and mark_set_regs has already had
2840 a chance to handle it. */
2842 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2843 if (call_used_regs[i] && ! global_regs[i]
2844 && ! fixed_regs[i])
2845 SET_REGNO_REG_SET (dead, i);
2847 /* The stack ptr is used (honorarily) by a CALL insn. */
2848 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2850 /* Calls may also reference any of the global registers,
2851 so they are made live. */
2852 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2853 if (global_regs[i])
2854 mark_used_regs (old, live,
2855 gen_rtx_REG (reg_raw_mode[i], i),
2856 final, insn);
2858 /* Calls also clobber memory. */
2859 mem_set_list = NULL_RTX;
2862 /* Update OLD for the registers used or set. */
2863 AND_COMPL_REG_SET (old, dead);
2864 IOR_REG_SET (old, live);
2868 /* On final pass, update counts of how many insns each reg is live
2869 at. */
2870 if (final)
2871 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2872 { REG_LIVE_LENGTH (i)++; });
2874 flushed: ;
2875 if (insn == first)
2876 break;
2879 FREE_REG_SET (dead);
2880 FREE_REG_SET (live);
2883 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2884 (SET expressions whose destinations are registers dead after the insn).
2885 NEEDED is the regset that says which regs are alive after the insn.
2887 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
2889 If X is the entire body of an insn, NOTES contains the reg notes
2890 pertaining to the insn. */
2892 static int
2893 insn_dead_p (x, needed, call_ok, notes)
2894 rtx x;
2895 regset needed;
2896 int call_ok;
2897 rtx notes ATTRIBUTE_UNUSED;
2899 enum rtx_code code = GET_CODE (x);
2901 #ifdef AUTO_INC_DEC
2902 /* If flow is invoked after reload, we must take existing AUTO_INC
2903 expresions into account. */
2904 if (reload_completed)
2906 for ( ; notes; notes = XEXP (notes, 1))
2908 if (REG_NOTE_KIND (notes) == REG_INC)
2910 int regno = REGNO (XEXP (notes, 0));
2912 /* Don't delete insns to set global regs. */
2913 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2914 || REGNO_REG_SET_P (needed, regno))
2915 return 0;
2919 #endif
2921 /* If setting something that's a reg or part of one,
2922 see if that register's altered value will be live. */
2924 if (code == SET)
2926 rtx r = SET_DEST (x);
2928 /* A SET that is a subroutine call cannot be dead. */
2929 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
2930 return 0;
2932 #ifdef HAVE_cc0
2933 if (GET_CODE (r) == CC0)
2934 return ! cc0_live;
2935 #endif
2937 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
2939 rtx temp;
2940 /* Walk the set of memory locations we are currently tracking
2941 and see if one is an identical match to this memory location.
2942 If so, this memory write is dead (remember, we're walking
2943 backwards from the end of the block to the start. */
2944 temp = mem_set_list;
2945 while (temp)
2947 if (rtx_equal_p (XEXP (temp, 0), r))
2948 return 1;
2949 temp = XEXP (temp, 1);
2953 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
2954 || GET_CODE (r) == ZERO_EXTRACT)
2955 r = SUBREG_REG (r);
2957 if (GET_CODE (r) == REG)
2959 int regno = REGNO (r);
2961 /* Don't delete insns to set global regs. */
2962 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2963 /* Make sure insns to set frame pointer aren't deleted. */
2964 || (regno == FRAME_POINTER_REGNUM
2965 && (! reload_completed || frame_pointer_needed))
2966 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2967 || (regno == HARD_FRAME_POINTER_REGNUM
2968 && (! reload_completed || frame_pointer_needed))
2969 #endif
2970 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2971 /* Make sure insns to set arg pointer are never deleted
2972 (if the arg pointer isn't fixed, there will be a USE for
2973 it, so we can treat it normally). */
2974 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2975 #endif
2976 || REGNO_REG_SET_P (needed, regno))
2977 return 0;
2979 /* If this is a hard register, verify that subsequent words are
2980 not needed. */
2981 if (regno < FIRST_PSEUDO_REGISTER)
2983 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2985 while (--n > 0)
2986 if (REGNO_REG_SET_P (needed, regno+n))
2987 return 0;
2990 return 1;
2994 /* If performing several activities,
2995 insn is dead if each activity is individually dead.
2996 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
2997 that's inside a PARALLEL doesn't make the insn worth keeping. */
2998 else if (code == PARALLEL)
3000 int i = XVECLEN (x, 0);
3002 for (i--; i >= 0; i--)
3003 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3004 && GET_CODE (XVECEXP (x, 0, i)) != USE
3005 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3006 return 0;
3008 return 1;
3011 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3012 is not necessarily true for hard registers. */
3013 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3014 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3015 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3016 return 1;
3018 /* We do not check other CLOBBER or USE here. An insn consisting of just
3019 a CLOBBER or just a USE should not be deleted. */
3020 return 0;
3023 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3024 return 1 if the entire library call is dead.
3025 This is true if X copies a register (hard or pseudo)
3026 and if the hard return reg of the call insn is dead.
3027 (The caller should have tested the destination of X already for death.)
3029 If this insn doesn't just copy a register, then we don't
3030 have an ordinary libcall. In that case, cse could not have
3031 managed to substitute the source for the dest later on,
3032 so we can assume the libcall is dead.
3034 NEEDED is the bit vector of pseudoregs live before this insn.
3035 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3037 static int
3038 libcall_dead_p (x, needed, note, insn)
3039 rtx x;
3040 regset needed;
3041 rtx note;
3042 rtx insn;
3044 register RTX_CODE code = GET_CODE (x);
3046 if (code == SET)
3048 register rtx r = SET_SRC (x);
3049 if (GET_CODE (r) == REG)
3051 rtx call = XEXP (note, 0);
3052 rtx call_pat;
3053 register int i;
3055 /* Find the call insn. */
3056 while (call != insn && GET_CODE (call) != CALL_INSN)
3057 call = NEXT_INSN (call);
3059 /* If there is none, do nothing special,
3060 since ordinary death handling can understand these insns. */
3061 if (call == insn)
3062 return 0;
3064 /* See if the hard reg holding the value is dead.
3065 If this is a PARALLEL, find the call within it. */
3066 call_pat = PATTERN (call);
3067 if (GET_CODE (call_pat) == PARALLEL)
3069 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3070 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3071 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3072 break;
3074 /* This may be a library call that is returning a value
3075 via invisible pointer. Do nothing special, since
3076 ordinary death handling can understand these insns. */
3077 if (i < 0)
3078 return 0;
3080 call_pat = XVECEXP (call_pat, 0, i);
3083 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3086 return 1;
3089 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3090 live at function entry. Don't count global register variables, variables
3091 in registers that can be used for function arg passing, or variables in
3092 fixed hard registers. */
3095 regno_uninitialized (regno)
3096 int regno;
3098 if (n_basic_blocks == 0
3099 || (regno < FIRST_PSEUDO_REGISTER
3100 && (global_regs[regno]
3101 || fixed_regs[regno]
3102 || FUNCTION_ARG_REGNO_P (regno))))
3103 return 0;
3105 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3108 /* 1 if register REGNO was alive at a place where `setjmp' was called
3109 and was set more than once or is an argument.
3110 Such regs may be clobbered by `longjmp'. */
3113 regno_clobbered_at_setjmp (regno)
3114 int regno;
3116 if (n_basic_blocks == 0)
3117 return 0;
3119 return ((REG_N_SETS (regno) > 1
3120 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3121 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3124 /* INSN references memory, possibly using autoincrement addressing modes.
3125 Find any entries on the mem_set_list that need to be invalidated due
3126 to an address change. */
3127 static void
3128 invalidate_mems_from_autoinc (insn)
3129 rtx insn;
3131 rtx note = REG_NOTES (insn);
3132 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3134 if (REG_NOTE_KIND (note) == REG_INC)
3136 rtx temp = mem_set_list;
3137 rtx prev = NULL_RTX;
3139 while (temp)
3141 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3143 /* Splice temp out of list. */
3144 if (prev)
3145 XEXP (prev, 1) = XEXP (temp, 1);
3146 else
3147 mem_set_list = XEXP (temp, 1);
3149 else
3150 prev = temp;
3151 temp = XEXP (temp, 1);
3157 /* Process the registers that are set within X.
3158 Their bits are set to 1 in the regset DEAD,
3159 because they are dead prior to this insn.
3161 If INSN is nonzero, it is the insn being processed
3162 and the fact that it is nonzero implies this is the FINAL pass
3163 in propagate_block. In this case, various info about register
3164 usage is stored, LOG_LINKS fields of insns are set up. */
3166 static void
3167 mark_set_regs (needed, dead, x, insn, significant)
3168 regset needed;
3169 regset dead;
3170 rtx x;
3171 rtx insn;
3172 regset significant;
3174 register RTX_CODE code = GET_CODE (x);
3176 if (code == SET || code == CLOBBER)
3177 mark_set_1 (needed, dead, x, insn, significant);
3178 else if (code == PARALLEL)
3180 register int i;
3181 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3183 code = GET_CODE (XVECEXP (x, 0, i));
3184 if (code == SET || code == CLOBBER)
3185 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
3190 /* Process a single SET rtx, X. */
3192 static void
3193 mark_set_1 (needed, dead, x, insn, significant)
3194 regset needed;
3195 regset dead;
3196 rtx x;
3197 rtx insn;
3198 regset significant;
3200 register int regno;
3201 register rtx reg = SET_DEST (x);
3203 /* Some targets place small structures in registers for
3204 return values of functions. We have to detect this
3205 case specially here to get correct flow information. */
3206 if (GET_CODE (reg) == PARALLEL
3207 && GET_MODE (reg) == BLKmode)
3209 register int i;
3211 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3212 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant);
3213 return;
3216 /* Modifying just one hardware register of a multi-reg value
3217 or just a byte field of a register
3218 does not mean the value from before this insn is now dead.
3219 But it does mean liveness of that register at the end of the block
3220 is significant.
3222 Within mark_set_1, however, we treat it as if the register is
3223 indeed modified. mark_used_regs will, however, also treat this
3224 register as being used. Thus, we treat these insns as setting a
3225 new value for the register as a function of its old value. This
3226 cases LOG_LINKS to be made appropriately and this will help combine. */
3228 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3229 || GET_CODE (reg) == SIGN_EXTRACT
3230 || GET_CODE (reg) == STRICT_LOW_PART)
3231 reg = XEXP (reg, 0);
3233 /* If this set is a MEM, then it kills any aliased writes.
3234 If this set is a REG, then it kills any MEMs which use the reg. */
3235 if (GET_CODE (reg) == MEM
3236 || GET_CODE (reg) == REG)
3238 rtx temp = mem_set_list;
3239 rtx prev = NULL_RTX;
3241 while (temp)
3243 if ((GET_CODE (reg) == MEM
3244 && output_dependence (XEXP (temp, 0), reg))
3245 || (GET_CODE (reg) == REG
3246 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3248 /* Splice this entry out of the list. */
3249 if (prev)
3250 XEXP (prev, 1) = XEXP (temp, 1);
3251 else
3252 mem_set_list = XEXP (temp, 1);
3254 else
3255 prev = temp;
3256 temp = XEXP (temp, 1);
3260 /* If the memory reference had embedded side effects (autoincrement
3261 address modes. Then we may need to kill some entries on the
3262 memory set list. */
3263 if (insn && GET_CODE (reg) == MEM)
3264 invalidate_mems_from_autoinc (insn);
3266 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3267 /* We do not know the size of a BLKmode store, so we do not track
3268 them for redundant store elimination. */
3269 && GET_MODE (reg) != BLKmode
3270 /* There are no REG_INC notes for SP, so we can't assume we'll see
3271 everything that invalidates it. To be safe, don't eliminate any
3272 stores though SP; none of them should be redundant anyway. */
3273 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3274 mem_set_list = gen_rtx_EXPR_LIST (VOIDmode, reg, mem_set_list);
3276 if (GET_CODE (reg) == REG
3277 && (regno = REGNO (reg), ! (regno == FRAME_POINTER_REGNUM
3278 && (! reload_completed || frame_pointer_needed)))
3279 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3280 && ! (regno == HARD_FRAME_POINTER_REGNUM
3281 && (! reload_completed || frame_pointer_needed))
3282 #endif
3283 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3284 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3285 #endif
3286 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3287 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3289 int some_needed = REGNO_REG_SET_P (needed, regno);
3290 int some_not_needed = ! some_needed;
3292 /* Mark it as a significant register for this basic block. */
3293 if (significant)
3294 SET_REGNO_REG_SET (significant, regno);
3296 /* Mark it as dead before this insn. */
3297 SET_REGNO_REG_SET (dead, regno);
3299 /* A hard reg in a wide mode may really be multiple registers.
3300 If so, mark all of them just like the first. */
3301 if (regno < FIRST_PSEUDO_REGISTER)
3303 int n;
3305 /* Nothing below is needed for the stack pointer; get out asap.
3306 Eg, log links aren't needed, since combine won't use them. */
3307 if (regno == STACK_POINTER_REGNUM)
3308 return;
3310 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3311 while (--n > 0)
3313 int regno_n = regno + n;
3314 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3315 if (significant)
3316 SET_REGNO_REG_SET (significant, regno_n);
3318 SET_REGNO_REG_SET (dead, regno_n);
3319 some_needed |= needed_regno;
3320 some_not_needed |= ! needed_regno;
3323 /* Additional data to record if this is the final pass. */
3324 if (insn)
3326 register rtx y = reg_next_use[regno];
3327 register int blocknum = BLOCK_NUM (insn);
3329 /* If this is a hard reg, record this function uses the reg. */
3331 if (regno < FIRST_PSEUDO_REGISTER)
3333 register int i;
3334 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3336 for (i = regno; i < endregno; i++)
3338 /* The next use is no longer "next", since a store
3339 intervenes. */
3340 reg_next_use[i] = 0;
3342 regs_ever_live[i] = 1;
3343 REG_N_SETS (i)++;
3346 else
3348 /* The next use is no longer "next", since a store
3349 intervenes. */
3350 reg_next_use[regno] = 0;
3352 /* Keep track of which basic blocks each reg appears in. */
3354 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3355 REG_BASIC_BLOCK (regno) = blocknum;
3356 else if (REG_BASIC_BLOCK (regno) != blocknum)
3357 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3359 /* Count (weighted) references, stores, etc. This counts a
3360 register twice if it is modified, but that is correct. */
3361 REG_N_SETS (regno)++;
3363 REG_N_REFS (regno) += loop_depth;
3365 /* The insns where a reg is live are normally counted
3366 elsewhere, but we want the count to include the insn
3367 where the reg is set, and the normal counting mechanism
3368 would not count it. */
3369 REG_LIVE_LENGTH (regno)++;
3372 if (! some_not_needed)
3374 /* Make a logical link from the next following insn
3375 that uses this register, back to this insn.
3376 The following insns have already been processed.
3378 We don't build a LOG_LINK for hard registers containing
3379 in ASM_OPERANDs. If these registers get replaced,
3380 we might wind up changing the semantics of the insn,
3381 even if reload can make what appear to be valid assignments
3382 later. */
3383 if (y && (BLOCK_NUM (y) == blocknum)
3384 && (regno >= FIRST_PSEUDO_REGISTER
3385 || asm_noperands (PATTERN (y)) < 0))
3386 LOG_LINKS (y)
3387 = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
3389 else if (! some_needed)
3391 /* Note that dead stores have already been deleted when possible
3392 If we get here, we have found a dead store that cannot
3393 be eliminated (because the same insn does something useful).
3394 Indicate this by marking the reg being set as dying here. */
3395 REG_NOTES (insn)
3396 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3397 REG_N_DEATHS (REGNO (reg))++;
3399 else
3401 /* This is a case where we have a multi-word hard register
3402 and some, but not all, of the words of the register are
3403 needed in subsequent insns. Write REG_UNUSED notes
3404 for those parts that were not needed. This case should
3405 be rare. */
3407 int i;
3409 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
3410 i >= 0; i--)
3411 if (!REGNO_REG_SET_P (needed, regno + i))
3412 REG_NOTES (insn)
3413 = gen_rtx_EXPR_LIST (REG_UNUSED,
3414 gen_rtx_REG (reg_raw_mode[regno + i],
3415 regno + i),
3416 REG_NOTES (insn));
3420 else if (GET_CODE (reg) == REG)
3421 reg_next_use[regno] = 0;
3423 /* If this is the last pass and this is a SCRATCH, show it will be dying
3424 here and count it. */
3425 else if (GET_CODE (reg) == SCRATCH && insn != 0)
3427 REG_NOTES (insn)
3428 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3432 #ifdef AUTO_INC_DEC
3434 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
3435 reference. */
3437 static void
3438 find_auto_inc (needed, x, insn)
3439 regset needed;
3440 rtx x;
3441 rtx insn;
3443 rtx addr = XEXP (x, 0);
3444 HOST_WIDE_INT offset = 0;
3445 rtx set;
3447 /* Here we detect use of an index register which might be good for
3448 postincrement, postdecrement, preincrement, or predecrement. */
3450 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3451 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3453 if (GET_CODE (addr) == REG)
3455 register rtx y;
3456 register int size = GET_MODE_SIZE (GET_MODE (x));
3457 rtx use;
3458 rtx incr;
3459 int regno = REGNO (addr);
3461 /* Is the next use an increment that might make auto-increment? */
3462 if ((incr = reg_next_use[regno]) != 0
3463 && (set = single_set (incr)) != 0
3464 && GET_CODE (set) == SET
3465 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
3466 /* Can't add side effects to jumps; if reg is spilled and
3467 reloaded, there's no way to store back the altered value. */
3468 && GET_CODE (insn) != JUMP_INSN
3469 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
3470 && XEXP (y, 0) == addr
3471 && GET_CODE (XEXP (y, 1)) == CONST_INT
3472 && ((HAVE_POST_INCREMENT
3473 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
3474 || (HAVE_POST_DECREMENT
3475 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
3476 || (HAVE_PRE_INCREMENT
3477 && (INTVAL (XEXP (y, 1)) == size && offset == size))
3478 || (HAVE_PRE_DECREMENT
3479 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
3480 /* Make sure this reg appears only once in this insn. */
3481 && (use = find_use_as_address (PATTERN (insn), addr, offset),
3482 use != 0 && use != (rtx) 1))
3484 rtx q = SET_DEST (set);
3485 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
3486 ? (offset ? PRE_INC : POST_INC)
3487 : (offset ? PRE_DEC : POST_DEC));
3489 if (dead_or_set_p (incr, addr))
3491 /* This is the simple case. Try to make the auto-inc. If
3492 we can't, we are done. Otherwise, we will do any
3493 needed updates below. */
3494 if (! validate_change (insn, &XEXP (x, 0),
3495 gen_rtx_fmt_e (inc_code, Pmode, addr),
3497 return;
3499 else if (GET_CODE (q) == REG
3500 /* PREV_INSN used here to check the semi-open interval
3501 [insn,incr). */
3502 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
3503 /* We must also check for sets of q as q may be
3504 a call clobbered hard register and there may
3505 be a call between PREV_INSN (insn) and incr. */
3506 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
3508 /* We have *p followed sometime later by q = p+size.
3509 Both p and q must be live afterward,
3510 and q is not used between INSN and its assignment.
3511 Change it to q = p, ...*q..., q = q+size.
3512 Then fall into the usual case. */
3513 rtx insns, temp;
3514 basic_block bb;
3516 start_sequence ();
3517 emit_move_insn (q, addr);
3518 insns = get_insns ();
3519 end_sequence ();
3521 bb = BLOCK_FOR_INSN (insn);
3522 for (temp = insns; temp; temp = NEXT_INSN (temp))
3523 set_block_for_insn (temp, bb);
3525 /* If we can't make the auto-inc, or can't make the
3526 replacement into Y, exit. There's no point in making
3527 the change below if we can't do the auto-inc and doing
3528 so is not correct in the pre-inc case. */
3530 validate_change (insn, &XEXP (x, 0),
3531 gen_rtx_fmt_e (inc_code, Pmode, q),
3533 validate_change (incr, &XEXP (y, 0), q, 1);
3534 if (! apply_change_group ())
3535 return;
3537 /* We now know we'll be doing this change, so emit the
3538 new insn(s) and do the updates. */
3539 emit_insns_before (insns, insn);
3541 if (BLOCK_FOR_INSN (insn)->head == insn)
3542 BLOCK_FOR_INSN (insn)->head = insns;
3544 /* INCR will become a NOTE and INSN won't contain a
3545 use of ADDR. If a use of ADDR was just placed in
3546 the insn before INSN, make that the next use.
3547 Otherwise, invalidate it. */
3548 if (GET_CODE (PREV_INSN (insn)) == INSN
3549 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3550 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
3551 reg_next_use[regno] = PREV_INSN (insn);
3552 else
3553 reg_next_use[regno] = 0;
3555 addr = q;
3556 regno = REGNO (q);
3558 /* REGNO is now used in INCR which is below INSN, but
3559 it previously wasn't live here. If we don't mark
3560 it as needed, we'll put a REG_DEAD note for it
3561 on this insn, which is incorrect. */
3562 SET_REGNO_REG_SET (needed, regno);
3564 /* If there are any calls between INSN and INCR, show
3565 that REGNO now crosses them. */
3566 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3567 if (GET_CODE (temp) == CALL_INSN)
3568 REG_N_CALLS_CROSSED (regno)++;
3570 else
3571 return;
3573 /* If we haven't returned, it means we were able to make the
3574 auto-inc, so update the status. First, record that this insn
3575 has an implicit side effect. */
3577 REG_NOTES (insn)
3578 = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
3580 /* Modify the old increment-insn to simply copy
3581 the already-incremented value of our register. */
3582 if (! validate_change (incr, &SET_SRC (set), addr, 0))
3583 abort ();
3585 /* If that makes it a no-op (copying the register into itself) delete
3586 it so it won't appear to be a "use" and a "set" of this
3587 register. */
3588 if (SET_DEST (set) == addr)
3590 PUT_CODE (incr, NOTE);
3591 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3592 NOTE_SOURCE_FILE (incr) = 0;
3595 if (regno >= FIRST_PSEUDO_REGISTER)
3597 /* Count an extra reference to the reg. When a reg is
3598 incremented, spilling it is worse, so we want to make
3599 that less likely. */
3600 REG_N_REFS (regno) += loop_depth;
3602 /* Count the increment as a setting of the register,
3603 even though it isn't a SET in rtl. */
3604 REG_N_SETS (regno)++;
3609 #endif /* AUTO_INC_DEC */
3611 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
3612 This is done assuming the registers needed from X
3613 are those that have 1-bits in NEEDED.
3615 On the final pass, FINAL is 1. This means try for autoincrement
3616 and count the uses and deaths of each pseudo-reg.
3618 INSN is the containing instruction. If INSN is dead, this function is not
3619 called. */
3621 static void
3622 mark_used_regs (needed, live, x, final, insn)
3623 regset needed;
3624 regset live;
3625 rtx x;
3626 int final;
3627 rtx insn;
3629 register RTX_CODE code;
3630 register int regno;
3631 int i;
3633 retry:
3634 code = GET_CODE (x);
3635 switch (code)
3637 case LABEL_REF:
3638 case SYMBOL_REF:
3639 case CONST_INT:
3640 case CONST:
3641 case CONST_DOUBLE:
3642 case PC:
3643 case ADDR_VEC:
3644 case ADDR_DIFF_VEC:
3645 return;
3647 #ifdef HAVE_cc0
3648 case CC0:
3649 cc0_live = 1;
3650 return;
3651 #endif
3653 case CLOBBER:
3654 /* If we are clobbering a MEM, mark any registers inside the address
3655 as being used. */
3656 if (GET_CODE (XEXP (x, 0)) == MEM)
3657 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
3658 return;
3660 case MEM:
3661 /* Invalidate the data for the last MEM stored, but only if MEM is
3662 something that can be stored into. */
3663 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3664 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3665 ; /* needn't clear the memory set list */
3666 else
3668 rtx temp = mem_set_list;
3669 rtx prev = NULL_RTX;
3671 while (temp)
3673 if (anti_dependence (XEXP (temp, 0), x))
3675 /* Splice temp out of the list. */
3676 if (prev)
3677 XEXP (prev, 1) = XEXP (temp, 1);
3678 else
3679 mem_set_list = XEXP (temp, 1);
3681 else
3682 prev = temp;
3683 temp = XEXP (temp, 1);
3687 /* If the memory reference had embedded side effects (autoincrement
3688 address modes. Then we may need to kill some entries on the
3689 memory set list. */
3690 if (insn)
3691 invalidate_mems_from_autoinc (insn);
3693 #ifdef AUTO_INC_DEC
3694 if (final)
3695 find_auto_inc (needed, x, insn);
3696 #endif
3697 break;
3699 case SUBREG:
3700 if (GET_CODE (SUBREG_REG (x)) == REG
3701 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
3702 && (GET_MODE_SIZE (GET_MODE (x))
3703 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3704 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
3706 /* While we're here, optimize this case. */
3707 x = SUBREG_REG (x);
3709 /* In case the SUBREG is not of a register, don't optimize */
3710 if (GET_CODE (x) != REG)
3712 mark_used_regs (needed, live, x, final, insn);
3713 return;
3716 /* ... fall through ... */
3718 case REG:
3719 /* See a register other than being set
3720 => mark it as needed. */
3722 regno = REGNO (x);
3724 int some_needed = REGNO_REG_SET_P (needed, regno);
3725 int some_not_needed = ! some_needed;
3727 SET_REGNO_REG_SET (live, regno);
3729 /* A hard reg in a wide mode may really be multiple registers.
3730 If so, mark all of them just like the first. */
3731 if (regno < FIRST_PSEUDO_REGISTER)
3733 int n;
3735 /* For stack ptr or fixed arg pointer,
3736 nothing below can be necessary, so waste no more time. */
3737 if (regno == STACK_POINTER_REGNUM
3738 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3739 || (regno == HARD_FRAME_POINTER_REGNUM
3740 && (! reload_completed || frame_pointer_needed))
3741 #endif
3742 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3743 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3744 #endif
3745 || (regno == FRAME_POINTER_REGNUM
3746 && (! reload_completed || frame_pointer_needed)))
3748 /* If this is a register we are going to try to eliminate,
3749 don't mark it live here. If we are successful in
3750 eliminating it, it need not be live unless it is used for
3751 pseudos, in which case it will have been set live when
3752 it was allocated to the pseudos. If the register will not
3753 be eliminated, reload will set it live at that point. */
3755 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
3756 regs_ever_live[regno] = 1;
3757 return;
3759 /* No death notes for global register variables;
3760 their values are live after this function exits. */
3761 if (global_regs[regno])
3763 if (final)
3764 reg_next_use[regno] = insn;
3765 return;
3768 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3769 while (--n > 0)
3771 int regno_n = regno + n;
3772 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3774 SET_REGNO_REG_SET (live, regno_n);
3775 some_needed |= needed_regno;
3776 some_not_needed |= ! needed_regno;
3779 if (final)
3781 /* Record where each reg is used, so when the reg
3782 is set we know the next insn that uses it. */
3784 reg_next_use[regno] = insn;
3786 if (regno < FIRST_PSEUDO_REGISTER)
3788 /* If a hard reg is being used,
3789 record that this function does use it. */
3791 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
3792 if (i == 0)
3793 i = 1;
3795 regs_ever_live[regno + --i] = 1;
3796 while (i > 0);
3798 else
3800 /* Keep track of which basic block each reg appears in. */
3802 register int blocknum = BLOCK_NUM (insn);
3804 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3805 REG_BASIC_BLOCK (regno) = blocknum;
3806 else if (REG_BASIC_BLOCK (regno) != blocknum)
3807 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3809 /* Count (weighted) number of uses of each reg. */
3811 REG_N_REFS (regno) += loop_depth;
3814 /* Record and count the insns in which a reg dies.
3815 If it is used in this insn and was dead below the insn
3816 then it dies in this insn. If it was set in this insn,
3817 we do not make a REG_DEAD note; likewise if we already
3818 made such a note. */
3820 if (some_not_needed
3821 && ! dead_or_set_p (insn, x)
3822 #if 0
3823 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
3824 #endif
3827 /* Check for the case where the register dying partially
3828 overlaps the register set by this insn. */
3829 if (regno < FIRST_PSEUDO_REGISTER
3830 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
3832 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3833 while (--n >= 0)
3834 some_needed |= dead_or_set_regno_p (insn, regno + n);
3837 /* If none of the words in X is needed, make a REG_DEAD
3838 note. Otherwise, we must make partial REG_DEAD notes. */
3839 if (! some_needed)
3841 REG_NOTES (insn)
3842 = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
3843 REG_N_DEATHS (regno)++;
3845 else
3847 int i;
3849 /* Don't make a REG_DEAD note for a part of a register
3850 that is set in the insn. */
3852 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
3853 i >= 0; i--)
3854 if (!REGNO_REG_SET_P (needed, regno + i)
3855 && ! dead_or_set_regno_p (insn, regno + i))
3856 REG_NOTES (insn)
3857 = gen_rtx_EXPR_LIST (REG_DEAD,
3858 gen_rtx_REG (reg_raw_mode[regno + i],
3859 regno + i),
3860 REG_NOTES (insn));
3865 return;
3867 case SET:
3869 register rtx testreg = SET_DEST (x);
3870 int mark_dest = 0;
3872 /* If storing into MEM, don't show it as being used. But do
3873 show the address as being used. */
3874 if (GET_CODE (testreg) == MEM)
3876 #ifdef AUTO_INC_DEC
3877 if (final)
3878 find_auto_inc (needed, testreg, insn);
3879 #endif
3880 mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
3881 mark_used_regs (needed, live, SET_SRC (x), final, insn);
3882 return;
3885 /* Storing in STRICT_LOW_PART is like storing in a reg
3886 in that this SET might be dead, so ignore it in TESTREG.
3887 but in some other ways it is like using the reg.
3889 Storing in a SUBREG or a bit field is like storing the entire
3890 register in that if the register's value is not used
3891 then this SET is not needed. */
3892 while (GET_CODE (testreg) == STRICT_LOW_PART
3893 || GET_CODE (testreg) == ZERO_EXTRACT
3894 || GET_CODE (testreg) == SIGN_EXTRACT
3895 || GET_CODE (testreg) == SUBREG)
3897 if (GET_CODE (testreg) == SUBREG
3898 && GET_CODE (SUBREG_REG (testreg)) == REG
3899 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
3900 && (GET_MODE_SIZE (GET_MODE (testreg))
3901 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
3902 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
3904 /* Modifying a single register in an alternate mode
3905 does not use any of the old value. But these other
3906 ways of storing in a register do use the old value. */
3907 if (GET_CODE (testreg) == SUBREG
3908 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
3910 else
3911 mark_dest = 1;
3913 testreg = XEXP (testreg, 0);
3916 /* If this is a store into a register,
3917 recursively scan the value being stored. */
3919 if ((GET_CODE (testreg) == PARALLEL
3920 && GET_MODE (testreg) == BLKmode)
3921 || (GET_CODE (testreg) == REG
3922 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
3923 && (! reload_completed || frame_pointer_needed)))
3924 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3925 && ! (regno == HARD_FRAME_POINTER_REGNUM
3926 && (! reload_completed || frame_pointer_needed))
3927 #endif
3928 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3929 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3930 #endif
3932 /* We used to exclude global_regs here, but that seems wrong.
3933 Storing in them is like storing in mem. */
3935 mark_used_regs (needed, live, SET_SRC (x), final, insn);
3936 if (mark_dest)
3937 mark_used_regs (needed, live, SET_DEST (x), final, insn);
3938 return;
3941 break;
3943 case RETURN:
3944 /* If exiting needs the right stack value, consider this insn as
3945 using the stack pointer. In any event, consider it as using
3946 all global registers and all registers used by return. */
3947 if (! EXIT_IGNORE_STACK
3948 || (! FRAME_POINTER_REQUIRED
3949 && ! current_function_calls_alloca
3950 && flag_omit_frame_pointer)
3951 || current_function_sp_is_unchanging)
3952 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3954 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3955 if (global_regs[i]
3956 #ifdef EPILOGUE_USES
3957 || EPILOGUE_USES (i)
3958 #endif
3960 SET_REGNO_REG_SET (live, i);
3961 break;
3963 case ASM_OPERANDS:
3964 case UNSPEC_VOLATILE:
3965 case TRAP_IF:
3966 case ASM_INPUT:
3968 /* Traditional and volatile asm instructions must be considered to use
3969 and clobber all hard registers, all pseudo-registers and all of
3970 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
3972 Consider for instance a volatile asm that changes the fpu rounding
3973 mode. An insn should not be moved across this even if it only uses
3974 pseudo-regs because it might give an incorrectly rounded result.
3976 ?!? Unfortunately, marking all hard registers as live causes massive
3977 problems for the register allocator and marking all pseudos as live
3978 creates mountains of uninitialized variable warnings.
3980 So for now, just clear the memory set list and mark any regs
3981 we can find in ASM_OPERANDS as used. */
3982 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
3983 mem_set_list = NULL_RTX;
3985 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
3986 We can not just fall through here since then we would be confused
3987 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
3988 traditional asms unlike their normal usage. */
3989 if (code == ASM_OPERANDS)
3991 int j;
3993 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3994 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
3995 final, insn);
3997 break;
4001 default:
4002 break;
4005 /* Recursively scan the operands of this expression. */
4008 register char *fmt = GET_RTX_FORMAT (code);
4009 register int i;
4011 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4013 if (fmt[i] == 'e')
4015 /* Tail recursive case: save a function call level. */
4016 if (i == 0)
4018 x = XEXP (x, 0);
4019 goto retry;
4021 mark_used_regs (needed, live, XEXP (x, i), final, insn);
4023 else if (fmt[i] == 'E')
4025 register int j;
4026 for (j = 0; j < XVECLEN (x, i); j++)
4027 mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
4033 #ifdef AUTO_INC_DEC
4035 static int
4036 try_pre_increment_1 (insn)
4037 rtx insn;
4039 /* Find the next use of this reg. If in same basic block,
4040 make it do pre-increment or pre-decrement if appropriate. */
4041 rtx x = single_set (insn);
4042 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4043 * INTVAL (XEXP (SET_SRC (x), 1)));
4044 int regno = REGNO (SET_DEST (x));
4045 rtx y = reg_next_use[regno];
4046 if (y != 0
4047 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4048 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4049 mode would be better. */
4050 && ! dead_or_set_p (y, SET_DEST (x))
4051 && try_pre_increment (y, SET_DEST (x), amount))
4053 /* We have found a suitable auto-increment
4054 and already changed insn Y to do it.
4055 So flush this increment-instruction. */
4056 PUT_CODE (insn, NOTE);
4057 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4058 NOTE_SOURCE_FILE (insn) = 0;
4059 /* Count a reference to this reg for the increment
4060 insn we are deleting. When a reg is incremented.
4061 spilling it is worse, so we want to make that
4062 less likely. */
4063 if (regno >= FIRST_PSEUDO_REGISTER)
4065 REG_N_REFS (regno) += loop_depth;
4066 REG_N_SETS (regno)++;
4068 return 1;
4070 return 0;
4073 /* Try to change INSN so that it does pre-increment or pre-decrement
4074 addressing on register REG in order to add AMOUNT to REG.
4075 AMOUNT is negative for pre-decrement.
4076 Returns 1 if the change could be made.
4077 This checks all about the validity of the result of modifying INSN. */
4079 static int
4080 try_pre_increment (insn, reg, amount)
4081 rtx insn, reg;
4082 HOST_WIDE_INT amount;
4084 register rtx use;
4086 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4087 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4088 int pre_ok = 0;
4089 /* Nonzero if we can try to make a post-increment or post-decrement.
4090 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4091 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4092 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4093 int post_ok = 0;
4095 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4096 int do_post = 0;
4098 /* From the sign of increment, see which possibilities are conceivable
4099 on this target machine. */
4100 if (HAVE_PRE_INCREMENT && amount > 0)
4101 pre_ok = 1;
4102 if (HAVE_POST_INCREMENT && amount > 0)
4103 post_ok = 1;
4105 if (HAVE_PRE_DECREMENT && amount < 0)
4106 pre_ok = 1;
4107 if (HAVE_POST_DECREMENT && amount < 0)
4108 post_ok = 1;
4110 if (! (pre_ok || post_ok))
4111 return 0;
4113 /* It is not safe to add a side effect to a jump insn
4114 because if the incremented register is spilled and must be reloaded
4115 there would be no way to store the incremented value back in memory. */
4117 if (GET_CODE (insn) == JUMP_INSN)
4118 return 0;
4120 use = 0;
4121 if (pre_ok)
4122 use = find_use_as_address (PATTERN (insn), reg, 0);
4123 if (post_ok && (use == 0 || use == (rtx) 1))
4125 use = find_use_as_address (PATTERN (insn), reg, -amount);
4126 do_post = 1;
4129 if (use == 0 || use == (rtx) 1)
4130 return 0;
4132 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4133 return 0;
4135 /* See if this combination of instruction and addressing mode exists. */
4136 if (! validate_change (insn, &XEXP (use, 0),
4137 gen_rtx_fmt_e (amount > 0
4138 ? (do_post ? POST_INC : PRE_INC)
4139 : (do_post ? POST_DEC : PRE_DEC),
4140 Pmode, reg), 0))
4141 return 0;
4143 /* Record that this insn now has an implicit side effect on X. */
4144 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4145 return 1;
4148 #endif /* AUTO_INC_DEC */
4150 /* Find the place in the rtx X where REG is used as a memory address.
4151 Return the MEM rtx that so uses it.
4152 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4153 (plus REG (const_int PLUSCONST)).
4155 If such an address does not appear, return 0.
4156 If REG appears more than once, or is used other than in such an address,
4157 return (rtx)1. */
4160 find_use_as_address (x, reg, plusconst)
4161 register rtx x;
4162 rtx reg;
4163 HOST_WIDE_INT plusconst;
4165 enum rtx_code code = GET_CODE (x);
4166 char *fmt = GET_RTX_FORMAT (code);
4167 register int i;
4168 register rtx value = 0;
4169 register rtx tem;
4171 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4172 return x;
4174 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4175 && XEXP (XEXP (x, 0), 0) == reg
4176 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4177 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4178 return x;
4180 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4182 /* If REG occurs inside a MEM used in a bit-field reference,
4183 that is unacceptable. */
4184 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4185 return (rtx) (HOST_WIDE_INT) 1;
4188 if (x == reg)
4189 return (rtx) (HOST_WIDE_INT) 1;
4191 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4193 if (fmt[i] == 'e')
4195 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4196 if (value == 0)
4197 value = tem;
4198 else if (tem != 0)
4199 return (rtx) (HOST_WIDE_INT) 1;
4201 if (fmt[i] == 'E')
4203 register int j;
4204 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4206 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4207 if (value == 0)
4208 value = tem;
4209 else if (tem != 0)
4210 return (rtx) (HOST_WIDE_INT) 1;
4215 return value;
4218 /* Write information about registers and basic blocks into FILE.
4219 This is part of making a debugging dump. */
4221 void
4222 dump_flow_info (file)
4223 FILE *file;
4225 register int i;
4226 static char *reg_class_names[] = REG_CLASS_NAMES;
4228 fprintf (file, "%d registers.\n", max_regno);
4229 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4230 if (REG_N_REFS (i))
4232 enum reg_class class, altclass;
4233 fprintf (file, "\nRegister %d used %d times across %d insns",
4234 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4235 if (REG_BASIC_BLOCK (i) >= 0)
4236 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4237 if (REG_N_SETS (i))
4238 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4239 (REG_N_SETS (i) == 1) ? "" : "s");
4240 if (REG_USERVAR_P (regno_reg_rtx[i]))
4241 fprintf (file, "; user var");
4242 if (REG_N_DEATHS (i) != 1)
4243 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4244 if (REG_N_CALLS_CROSSED (i) == 1)
4245 fprintf (file, "; crosses 1 call");
4246 else if (REG_N_CALLS_CROSSED (i))
4247 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4248 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4249 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4250 class = reg_preferred_class (i);
4251 altclass = reg_alternate_class (i);
4252 if (class != GENERAL_REGS || altclass != ALL_REGS)
4254 if (altclass == ALL_REGS || class == ALL_REGS)
4255 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4256 else if (altclass == NO_REGS)
4257 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4258 else
4259 fprintf (file, "; pref %s, else %s",
4260 reg_class_names[(int) class],
4261 reg_class_names[(int) altclass]);
4263 if (REGNO_POINTER_FLAG (i))
4264 fprintf (file, "; pointer");
4265 fprintf (file, ".\n");
4268 fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
4269 for (i = 0; i < n_basic_blocks; i++)
4271 register basic_block bb = BASIC_BLOCK (i);
4272 register int regno;
4273 register edge e;
4275 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4276 i, INSN_UID (bb->head), INSN_UID (bb->end));
4278 fprintf (file, "Predecessors: ");
4279 for (e = bb->pred; e ; e = e->pred_next)
4280 dump_edge_info (file, e, 0);
4282 fprintf (file, "\nSuccessors: ");
4283 for (e = bb->succ; e ; e = e->succ_next)
4284 dump_edge_info (file, e, 1);
4286 fprintf (file, "\nRegisters live at start:");
4287 if (bb->global_live_at_start)
4289 for (regno = 0; regno < max_regno; regno++)
4290 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4291 fprintf (file, " %d", regno);
4293 else
4294 fprintf (file, " n/a");
4296 fprintf (file, "\nRegisters live at end:");
4297 if (bb->global_live_at_end)
4299 for (regno = 0; regno < max_regno; regno++)
4300 if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4301 fprintf (file, " %d", regno);
4303 else
4304 fprintf (file, " n/a");
4306 putc('\n', file);
4309 putc('\n', file);
4312 static void
4313 dump_edge_info (file, e, do_succ)
4314 FILE *file;
4315 edge e;
4316 int do_succ;
4318 basic_block side = (do_succ ? e->dest : e->src);
4320 if (side == ENTRY_BLOCK_PTR)
4321 fputs (" ENTRY", file);
4322 else if (side == EXIT_BLOCK_PTR)
4323 fputs (" EXIT", file);
4324 else
4325 fprintf (file, " %d", side->index);
4327 if (e->flags)
4329 static char * bitnames[] = {
4330 "fallthru", "crit", "ab", "abcall", "eh", "fake"
4332 int comma = 0;
4333 int i, flags = e->flags;
4335 fputc (' ', file);
4336 fputc ('(', file);
4337 for (i = 0; flags; i++)
4338 if (flags & (1 << i))
4340 flags &= ~(1 << i);
4342 if (comma)
4343 fputc (',', file);
4344 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
4345 fputs (bitnames[i], file);
4346 else
4347 fprintf (file, "%d", i);
4348 comma = 1;
4350 fputc (')', file);
4355 /* Like print_rtl, but also print out live information for the start of each
4356 basic block. */
4358 void
4359 print_rtl_with_bb (outf, rtx_first)
4360 FILE *outf;
4361 rtx rtx_first;
4363 register rtx tmp_rtx;
4365 if (rtx_first == 0)
4366 fprintf (outf, "(nil)\n");
4367 else
4369 int i;
4370 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
4371 int max_uid = get_max_uid ();
4372 basic_block *start = (basic_block *)
4373 alloca (max_uid * sizeof (basic_block));
4374 basic_block *end = (basic_block *)
4375 alloca (max_uid * sizeof (basic_block));
4376 enum bb_state *in_bb_p = (enum bb_state *)
4377 alloca (max_uid * sizeof (enum bb_state));
4379 memset (start, 0, max_uid * sizeof (basic_block));
4380 memset (end, 0, max_uid * sizeof (basic_block));
4381 memset (in_bb_p, 0, max_uid * sizeof (enum bb_state));
4383 for (i = n_basic_blocks - 1; i >= 0; i--)
4385 basic_block bb = BASIC_BLOCK (i);
4386 rtx x;
4388 start[INSN_UID (bb->head)] = bb;
4389 end[INSN_UID (bb->end)] = bb;
4390 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
4392 enum bb_state state = IN_MULTIPLE_BB;
4393 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
4394 state = IN_ONE_BB;
4395 in_bb_p[INSN_UID(x)] = state;
4397 if (x == bb->end)
4398 break;
4402 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
4404 int did_output;
4405 basic_block bb;
4407 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
4409 fprintf (outf, ";; Start of basic block %d, registers live:",
4410 bb->index);
4412 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
4414 fprintf (outf, " %d", i);
4415 if (i < FIRST_PSEUDO_REGISTER)
4416 fprintf (outf, " [%s]",
4417 reg_names[i]);
4419 putc ('\n', outf);
4422 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
4423 && GET_CODE (tmp_rtx) != NOTE
4424 && GET_CODE (tmp_rtx) != BARRIER
4425 && ! obey_regdecls)
4426 fprintf (outf, ";; Insn is not within a basic block\n");
4427 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
4428 fprintf (outf, ";; Insn is in multiple basic blocks\n");
4430 did_output = print_rtl_single (outf, tmp_rtx);
4432 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
4433 fprintf (outf, ";; End of basic block %d\n", bb->index);
4435 if (did_output)
4436 putc ('\n', outf);
4442 /* Integer list support. */
4444 /* Allocate a node from list *HEAD_PTR. */
4446 static int_list_ptr
4447 alloc_int_list_node (head_ptr)
4448 int_list_block **head_ptr;
4450 struct int_list_block *first_blk = *head_ptr;
4452 if (first_blk == NULL || first_blk->nodes_left <= 0)
4454 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
4455 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
4456 first_blk->next = *head_ptr;
4457 *head_ptr = first_blk;
4460 first_blk->nodes_left--;
4461 return &first_blk->nodes[first_blk->nodes_left];
4464 /* Pointer to head of predecessor/successor block list. */
4465 static int_list_block *pred_int_list_blocks;
4467 /* Add a new node to integer list LIST with value VAL.
4468 LIST is a pointer to a list object to allow for different implementations.
4469 If *LIST is initially NULL, the list is empty.
4470 The caller must not care whether the element is added to the front or
4471 to the end of the list (to allow for different implementations). */
4473 static int_list_ptr
4474 add_int_list_node (blk_list, list, val)
4475 int_list_block **blk_list;
4476 int_list **list;
4477 int val;
4479 int_list_ptr p = alloc_int_list_node (blk_list);
4481 p->val = val;
4482 p->next = *list;
4483 *list = p;
4484 return p;
4487 /* Free the blocks of lists at BLK_LIST. */
4489 void
4490 free_int_list (blk_list)
4491 int_list_block **blk_list;
4493 int_list_block *p, *next;
4495 for (p = *blk_list; p != NULL; p = next)
4497 next = p->next;
4498 free (p);
4501 /* Mark list as empty for the next function we compile. */
4502 *blk_list = NULL;
4505 /* Predecessor/successor computation. */
4507 /* Mark PRED_BB a precessor of SUCC_BB,
4508 and conversely SUCC_BB a successor of PRED_BB. */
4510 static void
4511 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
4512 int pred_bb;
4513 int succ_bb;
4514 int_list_ptr *s_preds;
4515 int_list_ptr *s_succs;
4516 int *num_preds;
4517 int *num_succs;
4519 if (succ_bb != EXIT_BLOCK)
4521 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
4522 num_preds[succ_bb]++;
4524 if (pred_bb != ENTRY_BLOCK)
4526 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
4527 num_succs[pred_bb]++;
4531 /* Convert edge lists into pred/succ lists for backward compatibility. */
4533 void
4534 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
4535 int_list_ptr *s_preds;
4536 int_list_ptr *s_succs;
4537 int *num_preds;
4538 int *num_succs;
4540 int i, n = n_basic_blocks;
4541 edge e;
4543 memset (s_preds, 0, n_basic_blocks * sizeof (int_list_ptr));
4544 memset (s_succs, 0, n_basic_blocks * sizeof (int_list_ptr));
4545 memset (num_preds, 0, n_basic_blocks * sizeof (int));
4546 memset (num_succs, 0, n_basic_blocks * sizeof (int));
4548 for (i = 0; i < n; ++i)
4550 basic_block bb = BASIC_BLOCK (i);
4552 for (e = bb->succ; e ; e = e->succ_next)
4553 add_pred_succ (i, e->dest->index, s_preds, s_succs,
4554 num_preds, num_succs);
4557 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
4558 add_pred_succ (ENTRY_BLOCK, e->dest->index, s_preds, s_succs,
4559 num_preds, num_succs);
4562 void
4563 dump_bb_data (file, preds, succs, live_info)
4564 FILE *file;
4565 int_list_ptr *preds;
4566 int_list_ptr *succs;
4567 int live_info;
4569 int bb;
4570 int_list_ptr p;
4572 fprintf (file, "BB data\n\n");
4573 for (bb = 0; bb < n_basic_blocks; bb++)
4575 fprintf (file, "BB %d, start %d, end %d\n", bb,
4576 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
4577 fprintf (file, " preds:");
4578 for (p = preds[bb]; p != NULL; p = p->next)
4580 int pred_bb = INT_LIST_VAL (p);
4581 if (pred_bb == ENTRY_BLOCK)
4582 fprintf (file, " entry");
4583 else
4584 fprintf (file, " %d", pred_bb);
4586 fprintf (file, "\n");
4587 fprintf (file, " succs:");
4588 for (p = succs[bb]; p != NULL; p = p->next)
4590 int succ_bb = INT_LIST_VAL (p);
4591 if (succ_bb == EXIT_BLOCK)
4592 fprintf (file, " exit");
4593 else
4594 fprintf (file, " %d", succ_bb);
4596 if (live_info)
4598 int regno;
4599 fprintf (file, "\nRegisters live at start:");
4600 for (regno = 0; regno < max_regno; regno++)
4601 if (REGNO_REG_SET_P (BASIC_BLOCK (bb)->global_live_at_start, regno))
4602 fprintf (file, " %d", regno);
4603 fprintf (file, "\n");
4605 fprintf (file, "\n");
4607 fprintf (file, "\n");
4610 /* Free basic block data storage. */
4612 void
4613 free_bb_mem ()
4615 free_int_list (&pred_int_list_blocks);
4618 /* Compute dominator relationships. */
4619 void
4620 compute_dominators (dominators, post_dominators, s_preds, s_succs)
4621 sbitmap *dominators;
4622 sbitmap *post_dominators;
4623 int_list_ptr *s_preds;
4624 int_list_ptr *s_succs;
4626 int bb, changed, passes;
4627 sbitmap *temp_bitmap;
4629 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4630 sbitmap_vector_ones (dominators, n_basic_blocks);
4631 sbitmap_vector_ones (post_dominators, n_basic_blocks);
4632 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
4634 sbitmap_zero (dominators[0]);
4635 SET_BIT (dominators[0], 0);
4637 sbitmap_zero (post_dominators[n_basic_blocks - 1]);
4638 SET_BIT (post_dominators[n_basic_blocks - 1], 0);
4640 passes = 0;
4641 changed = 1;
4642 while (changed)
4644 changed = 0;
4645 for (bb = 1; bb < n_basic_blocks; bb++)
4647 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
4648 bb, s_preds);
4649 SET_BIT (temp_bitmap[bb], bb);
4650 changed |= sbitmap_a_and_b (dominators[bb],
4651 dominators[bb],
4652 temp_bitmap[bb]);
4653 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
4654 bb, s_succs);
4655 SET_BIT (temp_bitmap[bb], bb);
4656 changed |= sbitmap_a_and_b (post_dominators[bb],
4657 post_dominators[bb],
4658 temp_bitmap[bb]);
4660 passes++;
4663 free (temp_bitmap);
4666 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
4668 void
4669 compute_immediate_dominators (idom, dominators)
4670 int *idom;
4671 sbitmap *dominators;
4673 sbitmap *tmp;
4674 int b;
4676 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4678 /* Begin with tmp(n) = dom(n) - { n }. */
4679 for (b = n_basic_blocks; --b >= 0; )
4681 sbitmap_copy (tmp[b], dominators[b]);
4682 RESET_BIT (tmp[b], b);
4685 /* Subtract out all of our dominator's dominators. */
4686 for (b = n_basic_blocks; --b >= 0; )
4688 sbitmap tmp_b = tmp[b];
4689 int s;
4691 for (s = n_basic_blocks; --s >= 0; )
4692 if (TEST_BIT (tmp_b, s))
4693 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
4696 /* Find the one bit set in the bitmap and put it in the output array. */
4697 for (b = n_basic_blocks; --b >= 0; )
4699 int t;
4700 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
4703 sbitmap_vector_free (tmp);
4706 /* Count for a single SET rtx, X. */
4708 static void
4709 count_reg_sets_1 (x)
4710 rtx x;
4712 register int regno;
4713 register rtx reg = SET_DEST (x);
4715 /* Find the register that's set/clobbered. */
4716 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4717 || GET_CODE (reg) == SIGN_EXTRACT
4718 || GET_CODE (reg) == STRICT_LOW_PART)
4719 reg = XEXP (reg, 0);
4721 if (GET_CODE (reg) == PARALLEL
4722 && GET_MODE (reg) == BLKmode)
4724 register int i;
4725 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4726 count_reg_sets_1 (XVECEXP (reg, 0, i));
4727 return;
4730 if (GET_CODE (reg) == REG)
4732 regno = REGNO (reg);
4733 if (regno >= FIRST_PSEUDO_REGISTER)
4735 /* Count (weighted) references, stores, etc. This counts a
4736 register twice if it is modified, but that is correct. */
4737 REG_N_SETS (regno)++;
4739 REG_N_REFS (regno) += loop_depth;
4744 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4745 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
4747 static void
4748 count_reg_sets (x)
4749 rtx x;
4751 register RTX_CODE code = GET_CODE (x);
4753 if (code == SET || code == CLOBBER)
4754 count_reg_sets_1 (x);
4755 else if (code == PARALLEL)
4757 register int i;
4758 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4760 code = GET_CODE (XVECEXP (x, 0, i));
4761 if (code == SET || code == CLOBBER)
4762 count_reg_sets_1 (XVECEXP (x, 0, i));
4767 /* Increment REG_N_REFS by the current loop depth each register reference
4768 found in X. */
4770 static void
4771 count_reg_references (x)
4772 rtx x;
4774 register RTX_CODE code;
4776 retry:
4777 code = GET_CODE (x);
4778 switch (code)
4780 case LABEL_REF:
4781 case SYMBOL_REF:
4782 case CONST_INT:
4783 case CONST:
4784 case CONST_DOUBLE:
4785 case PC:
4786 case ADDR_VEC:
4787 case ADDR_DIFF_VEC:
4788 case ASM_INPUT:
4789 return;
4791 #ifdef HAVE_cc0
4792 case CC0:
4793 return;
4794 #endif
4796 case CLOBBER:
4797 /* If we are clobbering a MEM, mark any registers inside the address
4798 as being used. */
4799 if (GET_CODE (XEXP (x, 0)) == MEM)
4800 count_reg_references (XEXP (XEXP (x, 0), 0));
4801 return;
4803 case SUBREG:
4804 /* While we're here, optimize this case. */
4805 x = SUBREG_REG (x);
4807 /* In case the SUBREG is not of a register, don't optimize */
4808 if (GET_CODE (x) != REG)
4810 count_reg_references (x);
4811 return;
4814 /* ... fall through ... */
4816 case REG:
4817 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
4818 REG_N_REFS (REGNO (x)) += loop_depth;
4819 return;
4821 case SET:
4823 register rtx testreg = SET_DEST (x);
4824 int mark_dest = 0;
4826 /* If storing into MEM, don't show it as being used. But do
4827 show the address as being used. */
4828 if (GET_CODE (testreg) == MEM)
4830 count_reg_references (XEXP (testreg, 0));
4831 count_reg_references (SET_SRC (x));
4832 return;
4835 /* Storing in STRICT_LOW_PART is like storing in a reg
4836 in that this SET might be dead, so ignore it in TESTREG.
4837 but in some other ways it is like using the reg.
4839 Storing in a SUBREG or a bit field is like storing the entire
4840 register in that if the register's value is not used
4841 then this SET is not needed. */
4842 while (GET_CODE (testreg) == STRICT_LOW_PART
4843 || GET_CODE (testreg) == ZERO_EXTRACT
4844 || GET_CODE (testreg) == SIGN_EXTRACT
4845 || GET_CODE (testreg) == SUBREG)
4847 /* Modifying a single register in an alternate mode
4848 does not use any of the old value. But these other
4849 ways of storing in a register do use the old value. */
4850 if (GET_CODE (testreg) == SUBREG
4851 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4853 else
4854 mark_dest = 1;
4856 testreg = XEXP (testreg, 0);
4859 /* If this is a store into a register,
4860 recursively scan the value being stored. */
4862 if ((GET_CODE (testreg) == PARALLEL
4863 && GET_MODE (testreg) == BLKmode)
4864 || GET_CODE (testreg) == REG)
4866 count_reg_references (SET_SRC (x));
4867 if (mark_dest)
4868 count_reg_references (SET_DEST (x));
4869 return;
4872 break;
4874 default:
4875 break;
4878 /* Recursively scan the operands of this expression. */
4881 register char *fmt = GET_RTX_FORMAT (code);
4882 register int i;
4884 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4886 if (fmt[i] == 'e')
4888 /* Tail recursive case: save a function call level. */
4889 if (i == 0)
4891 x = XEXP (x, 0);
4892 goto retry;
4894 count_reg_references (XEXP (x, i));
4896 else if (fmt[i] == 'E')
4898 register int j;
4899 for (j = 0; j < XVECLEN (x, i); j++)
4900 count_reg_references (XVECEXP (x, i, j));
4906 /* Recompute register set/reference counts immediately prior to register
4907 allocation.
4909 This avoids problems with set/reference counts changing to/from values
4910 which have special meanings to the register allocators.
4912 Additionally, the reference counts are the primary component used by the
4913 register allocators to prioritize pseudos for allocation to hard regs.
4914 More accurate reference counts generally lead to better register allocation.
4916 F is the first insn to be scanned.
4917 LOOP_STEP denotes how much loop_depth should be incremented per
4918 loop nesting level in order to increase the ref count more for references
4919 in a loop.
4921 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4922 possibly other information which is used by the register allocators. */
4924 void
4925 recompute_reg_usage (f, loop_step)
4926 rtx f;
4927 int loop_step;
4929 rtx insn;
4930 int i, max_reg;
4932 /* Clear out the old data. */
4933 max_reg = max_reg_num ();
4934 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
4936 REG_N_SETS (i) = 0;
4937 REG_N_REFS (i) = 0;
4940 /* Scan each insn in the chain and count how many times each register is
4941 set/used. */
4942 loop_depth = 1;
4943 for (insn = f; insn; insn = NEXT_INSN (insn))
4945 /* Keep track of loop depth. */
4946 if (GET_CODE (insn) == NOTE)
4948 /* Look for loop boundaries. */
4949 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
4950 loop_depth -= loop_step;
4951 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
4952 loop_depth += loop_step;
4954 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
4955 Abort now rather than setting register status incorrectly. */
4956 if (loop_depth == 0)
4957 abort ();
4959 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4961 rtx links;
4963 /* This call will increment REG_N_SETS for each SET or CLOBBER
4964 of a register in INSN. It will also increment REG_N_REFS
4965 by the loop depth for each set of a register in INSN. */
4966 count_reg_sets (PATTERN (insn));
4968 /* count_reg_sets does not detect autoincrement address modes, so
4969 detect them here by looking at the notes attached to INSN. */
4970 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
4972 if (REG_NOTE_KIND (links) == REG_INC)
4973 /* Count (weighted) references, stores, etc. This counts a
4974 register twice if it is modified, but that is correct. */
4975 REG_N_SETS (REGNO (XEXP (links, 0)))++;
4978 /* This call will increment REG_N_REFS by the current loop depth for
4979 each reference to a register in INSN. */
4980 count_reg_references (PATTERN (insn));
4982 /* count_reg_references will not include counts for arguments to
4983 function calls, so detect them here by examining the
4984 CALL_INSN_FUNCTION_USAGE data. */
4985 if (GET_CODE (insn) == CALL_INSN)
4987 rtx note;
4989 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4990 note;
4991 note = XEXP (note, 1))
4992 if (GET_CODE (XEXP (note, 0)) == USE)
4993 count_reg_references (SET_DEST (XEXP (note, 0)));
4999 /* Record INSN's block as BB. */
5001 void
5002 set_block_for_insn (insn, bb)
5003 rtx insn;
5004 basic_block bb;
5006 size_t uid = INSN_UID (insn);
5007 if (uid >= basic_block_for_insn->num_elements)
5009 int new_size;
5011 /* Add one-eighth the size so we don't keep calling xrealloc. */
5012 new_size = uid + (uid + 7) / 8;
5014 VARRAY_GROW (basic_block_for_insn, new_size);
5016 VARRAY_BB (basic_block_for_insn, uid) = bb;
5019 /* Record INSN's block number as BB. */
5020 /* ??? This has got to go. */
5022 void
5023 set_block_num (insn, bb)
5024 rtx insn;
5025 int bb;
5027 set_block_for_insn (insn, BASIC_BLOCK (bb));
5030 /* Verify the CFG consistency. This function check some CFG invariants and
5031 aborts when something is wrong. Hope that this function will help to
5032 convert many optimization passes to preserve CFG consistent.
5034 Currently it does following checks:
5036 - test head/end pointers
5037 - overlapping of basic blocks
5038 - edge list corectness
5039 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5040 - tails of basic blocks (ensure that boundary is necesary)
5041 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5042 and NOTE_INSN_BASIC_BLOCK
5043 - check that all insns are in the basic blocks
5044 (except the switch handling code, barriers and notes)
5046 In future it can be extended check a lot of other stuff as well
5047 (reachability of basic blocks, life information, etc. etc.). */
5049 void
5050 verify_flow_info ()
5052 const int max_uid = get_max_uid ();
5053 const rtx rtx_first = get_insns ();
5054 basic_block *bb_info;
5055 rtx x;
5056 int i;
5058 bb_info = (basic_block *) alloca (max_uid * sizeof (basic_block));
5059 memset (bb_info, 0, max_uid * sizeof (basic_block));
5061 /* First pass check head/end pointers and set bb_info array used by
5062 later passes. */
5063 for (i = n_basic_blocks - 1; i >= 0; i--)
5065 basic_block bb = BASIC_BLOCK (i);
5067 /* Check the head pointer and make sure that it is pointing into
5068 insn list. */
5069 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5070 if (x == bb->head)
5071 break;
5072 if (!x)
5074 fatal ("verify_flow_info: Head insn %d for block %d not found in the insn stream.\n",
5075 INSN_UID (bb->head), bb->index);
5078 /* Check the end pointer and make sure that it is pointing into
5079 insn list. */
5080 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5082 if (bb_info[INSN_UID (x)] != NULL)
5084 fatal ("verify_flow_info: Insn %d is in multiple basic blocks (%d and %d)",
5085 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5087 bb_info[INSN_UID (x)] = bb;
5089 if (x == bb->end)
5090 break;
5092 if (!x)
5094 fatal ("verify_flow_info: End insn %d for block %d not found in the insn stream.\n",
5095 INSN_UID (bb->end), bb->index);
5099 /* Now check the basic blocks (boundaries etc.) */
5100 for (i = n_basic_blocks - 1; i >= 0; i--)
5102 basic_block bb = BASIC_BLOCK (i);
5103 /* Check corectness of edge lists */
5104 edge e;
5106 e = bb->succ;
5107 while (e)
5109 if (e->src != bb)
5111 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5112 bb->index);
5113 fprintf (stderr, "Predecessor: ");
5114 dump_edge_info (stderr, e, 0);
5115 fprintf (stderr, "\nSuccessor: ");
5116 dump_edge_info (stderr, e, 1);
5117 fflush (stderr);
5118 abort ();
5120 if (e->dest != EXIT_BLOCK_PTR)
5122 edge e2 = e->dest->pred;
5123 while (e2 && e2 != e)
5124 e2 = e2->pred_next;
5125 if (!e2)
5127 fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5128 bb->index);
5131 e = e->succ_next;
5134 e = bb->pred;
5135 while (e)
5137 if (e->dest != bb)
5139 fprintf (stderr, "verify_flow_info: Basic block %d pred edge is corrupted\n",
5140 bb->index);
5141 fprintf (stderr, "Predecessor: ");
5142 dump_edge_info (stderr, e, 0);
5143 fprintf (stderr, "\nSuccessor: ");
5144 dump_edge_info (stderr, e, 1);
5145 fflush (stderr);
5146 abort ();
5148 if (e->src != ENTRY_BLOCK_PTR)
5150 edge e2 = e->src->succ;
5151 while (e2 && e2 != e)
5152 e2 = e2->succ_next;
5153 if (!e2)
5155 fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5156 bb->index);
5159 e = e->pred_next;
5162 /* OK pointers are correct. Now check the header of basic
5163 block. It ought to contain optional CODE_LABEL followed
5164 by NOTE_BASIC_BLOCK. */
5165 x = bb->head;
5166 if (GET_CODE (x) == CODE_LABEL)
5168 if (bb->end == x)
5170 fatal ("verify_flow_info: Basic block contains only CODE_LABEL and no NOTE_INSN_BASIC_BLOCK note\n");
5172 x = NEXT_INSN (x);
5174 if (GET_CODE (x) != NOTE
5175 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5176 || NOTE_BASIC_BLOCK (x) != bb)
5178 fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5179 bb->index);
5182 if (bb->end == x)
5184 /* Do checks for empty blocks here */
5186 else
5188 x = NEXT_INSN (x);
5189 while (x)
5191 if (GET_CODE (x) == NOTE
5192 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5194 fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d\n",
5195 INSN_UID (x), bb->index);
5198 if (x == bb->end)
5199 break;
5201 if (GET_CODE (x) == JUMP_INSN
5202 || GET_CODE (x) == CODE_LABEL
5203 || GET_CODE (x) == BARRIER)
5205 fatal_insn ("verify_flow_info: Incorrect insn in the middle of basic block %d\n",
5206 x, bb->index);
5209 x = NEXT_INSN (x);
5214 x = rtx_first;
5215 while (x)
5217 if (!bb_info[INSN_UID (x)])
5219 switch (GET_CODE (x))
5221 case BARRIER:
5222 case NOTE:
5223 break;
5225 case CODE_LABEL:
5226 /* An addr_vec is placed outside any block block. */
5227 if (NEXT_INSN (x)
5228 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5229 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5230 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5232 x = NEXT_INSN (x);
5235 /* But in any case, non-deletable labels can appear anywhere. */
5236 break;
5238 default:
5239 fatal_insn ("verify_flow_info: Insn outside basic block\n", x);
5243 x = NEXT_INSN (x);