2000-07-03 Donn Terry (donnte@microsoft.com)
[official-gcc.git] / gcc / flow.c
blob8135e020ec313517c8725e659fe55a67704e0f53
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
44 ** life_analysis **
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
76 REG_DEAD notes.
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
95 that is never used.
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
113 /* TODO:
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
118 - log links creation
119 - pre/post modify transformation
122 #include "config.h"
123 #include "system.h"
124 #include "tree.h"
125 #include "rtl.h"
126 #include "tm_p.h"
127 #include "hard-reg-set.h"
128 #include "basic-block.h"
129 #include "insn-config.h"
130 #include "regs.h"
131 #include "flags.h"
132 #include "output.h"
133 #include "function.h"
134 #include "except.h"
135 #include "toplev.h"
136 #include "recog.h"
137 #include "insn-flags.h"
138 #include "expr.h"
140 #include "obstack.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
147 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
148 the stack pointer does not matter. The value is tested only in
149 functions that have frame pointers.
150 No definition is equivalent to always zero. */
151 #ifndef EXIT_IGNORE_STACK
152 #define EXIT_IGNORE_STACK 0
153 #endif
155 #ifndef HAVE_epilogue
156 #define HAVE_epilogue 0
157 #endif
158 #ifndef HAVE_prologue
159 #define HAVE_prologue 0
160 #endif
161 #ifndef HAVE_sibcall_epilogue
162 #define HAVE_sibcall_epilogue 0
163 #endif
165 /* The contents of the current function definition are allocated
166 in this obstack, and all are freed at the end of the function.
167 For top-level functions, this is temporary_obstack.
168 Separate obstacks are made for nested functions. */
170 extern struct obstack *function_obstack;
172 /* Number of basic blocks in the current function. */
174 int n_basic_blocks;
176 /* Number of edges in the current function. */
178 int n_edges;
180 /* The basic block array. */
182 varray_type basic_block_info;
184 /* The special entry and exit blocks. */
186 struct basic_block_def entry_exit_blocks[2]
187 = {{NULL, /* head */
188 NULL, /* end */
189 NULL, /* pred */
190 NULL, /* succ */
191 NULL, /* local_set */
192 NULL, /* global_live_at_start */
193 NULL, /* global_live_at_end */
194 NULL, /* aux */
195 ENTRY_BLOCK, /* index */
196 0, /* loop_depth */
197 -1, -1, /* eh_beg, eh_end */
198 0 /* count */
201 NULL, /* head */
202 NULL, /* end */
203 NULL, /* pred */
204 NULL, /* succ */
205 NULL, /* local_set */
206 NULL, /* global_live_at_start */
207 NULL, /* global_live_at_end */
208 NULL, /* aux */
209 EXIT_BLOCK, /* index */
210 0, /* loop_depth */
211 -1, -1, /* eh_beg, eh_end */
212 0 /* count */
216 /* Nonzero if the second flow pass has completed. */
217 int flow2_completed;
219 /* Maximum register number used in this function, plus one. */
221 int max_regno;
223 /* Indexed by n, giving various register information */
225 varray_type reg_n_info;
227 /* Size of a regset for the current function,
228 in (1) bytes and (2) elements. */
230 int regset_bytes;
231 int regset_size;
233 /* Regset of regs live when calls to `setjmp'-like functions happen. */
234 /* ??? Does this exist only for the setjmp-clobbered warning message? */
236 regset regs_live_at_setjmp;
238 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
239 that have to go in the same hard reg.
240 The first two regs in the list are a pair, and the next two
241 are another pair, etc. */
242 rtx regs_may_share;
244 /* Set of registers that may be eliminable. These are handled specially
245 in updating regs_ever_live. */
247 static HARD_REG_SET elim_reg_set;
249 /* The basic block structure for every insn, indexed by uid. */
251 varray_type basic_block_for_insn;
253 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
254 /* ??? Should probably be using LABEL_NUSES instead. It would take a
255 bit of surgery to be able to use or co-opt the routines in jump. */
257 static rtx label_value_list;
258 static rtx tail_recursion_label_list;
260 /* Holds information for tracking conditional register life information. */
261 struct reg_cond_life_info
263 /* An EXPR_LIST of conditions under which a register is dead. */
264 rtx condition;
266 /* ??? Could store mask of bytes that are dead, so that we could finally
267 track lifetimes of multi-word registers accessed via subregs. */
270 /* For use in communicating between propagate_block and its subroutines.
271 Holds all information needed to compute life and def-use information. */
273 struct propagate_block_info
275 /* The basic block we're considering. */
276 basic_block bb;
278 /* Bit N is set if register N is conditionally or unconditionally live. */
279 regset reg_live;
281 /* Bit N is set if register N is set this insn. */
282 regset new_set;
284 /* Element N is the next insn that uses (hard or pseudo) register N
285 within the current basic block; or zero, if there is no such insn. */
286 rtx *reg_next_use;
288 /* Contains a list of all the MEMs we are tracking for dead store
289 elimination. */
290 rtx mem_set_list;
292 /* If non-null, record the set of registers set in the basic block. */
293 regset local_set;
295 #ifdef HAVE_conditional_execution
296 /* Indexed by register number, holds a reg_cond_life_info for each
297 register that is not unconditionally live or dead. */
298 splay_tree reg_cond_dead;
300 /* Bit N is set if register N is in an expression in reg_cond_dead. */
301 regset reg_cond_reg;
302 #endif
304 /* Non-zero if the value of CC0 is live. */
305 int cc0_live;
307 /* Flags controling the set of information propagate_block collects. */
308 int flags;
311 /* Forward declarations */
312 static int count_basic_blocks PARAMS ((rtx));
313 static void find_basic_blocks_1 PARAMS ((rtx));
314 static rtx find_label_refs PARAMS ((rtx, rtx));
315 static void clear_edges PARAMS ((void));
316 static void make_edges PARAMS ((rtx));
317 static void make_label_edge PARAMS ((sbitmap *, basic_block,
318 rtx, int));
319 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
320 basic_block, rtx, int));
321 static void mark_critical_edges PARAMS ((void));
322 static void move_stray_eh_region_notes PARAMS ((void));
323 static void record_active_eh_regions PARAMS ((rtx));
325 static void commit_one_edge_insertion PARAMS ((edge));
327 static void delete_unreachable_blocks PARAMS ((void));
328 static void delete_eh_regions PARAMS ((void));
329 static int can_delete_note_p PARAMS ((rtx));
330 static void expunge_block PARAMS ((basic_block));
331 static int can_delete_label_p PARAMS ((rtx));
332 static int tail_recursion_label_p PARAMS ((rtx));
333 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
334 basic_block));
335 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
336 basic_block));
337 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
338 static void try_merge_blocks PARAMS ((void));
339 static void tidy_fallthru_edges PARAMS ((void));
340 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
341 static void verify_wide_reg PARAMS ((int, rtx, rtx));
342 static void verify_local_live_at_start PARAMS ((regset, basic_block));
343 static int set_noop_p PARAMS ((rtx));
344 static int noop_move_p PARAMS ((rtx));
345 static void delete_noop_moves PARAMS ((rtx));
346 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
347 static void notice_stack_pointer_modification PARAMS ((rtx));
348 static void mark_reg PARAMS ((rtx, void *));
349 static void mark_regs_live_at_end PARAMS ((regset));
350 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
351 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
352 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
353 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
354 static int insn_dead_p PARAMS ((struct propagate_block_info *,
355 rtx, int, rtx));
356 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
357 rtx, rtx, rtx));
358 static void mark_set_regs PARAMS ((struct propagate_block_info *,
359 rtx, rtx));
360 static void mark_set_1 PARAMS ((struct propagate_block_info *,
361 enum rtx_code, rtx, rtx,
362 rtx, int));
363 #ifdef HAVE_conditional_execution
364 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
365 int, rtx));
366 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
367 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
368 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
369 int));
370 static rtx ior_reg_cond PARAMS ((rtx, rtx));
371 static rtx not_reg_cond PARAMS ((rtx));
372 static rtx nand_reg_cond PARAMS ((rtx, rtx));
373 #endif
374 #ifdef AUTO_INC_DEC
375 static void find_auto_inc PARAMS ((struct propagate_block_info *,
376 rtx, rtx));
377 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
378 rtx));
379 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
380 #endif
381 static void mark_used_reg PARAMS ((struct propagate_block_info *,
382 rtx, rtx, rtx));
383 static void mark_used_regs PARAMS ((struct propagate_block_info *,
384 rtx, rtx, rtx));
385 void dump_flow_info PARAMS ((FILE *));
386 void debug_flow_info PARAMS ((void));
387 static void dump_edge_info PARAMS ((FILE *, edge, int));
389 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
390 rtx));
391 static void remove_fake_successors PARAMS ((basic_block));
392 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
393 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
394 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
395 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
396 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
397 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
398 static int flow_depth_first_order_compute PARAMS ((int *));
399 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
400 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
401 static void flow_loops_tree_build PARAMS ((struct loops *));
402 static int flow_loop_level_compute PARAMS ((struct loop *, int));
403 static int flow_loops_level_compute PARAMS ((struct loops *));
405 /* Find basic blocks of the current function.
406 F is the first insn of the function and NREGS the number of register
407 numbers in use. */
409 void
410 find_basic_blocks (f, nregs, file)
411 rtx f;
412 int nregs ATTRIBUTE_UNUSED;
413 FILE *file ATTRIBUTE_UNUSED;
415 int max_uid;
417 /* Flush out existing data. */
418 if (basic_block_info != NULL)
420 int i;
422 clear_edges ();
424 /* Clear bb->aux on all extant basic blocks. We'll use this as a
425 tag for reuse during create_basic_block, just in case some pass
426 copies around basic block notes improperly. */
427 for (i = 0; i < n_basic_blocks; ++i)
428 BASIC_BLOCK (i)->aux = NULL;
430 VARRAY_FREE (basic_block_info);
433 n_basic_blocks = count_basic_blocks (f);
435 /* Size the basic block table. The actual structures will be allocated
436 by find_basic_blocks_1, since we want to keep the structure pointers
437 stable across calls to find_basic_blocks. */
438 /* ??? This whole issue would be much simpler if we called find_basic_blocks
439 exactly once, and thereafter we don't have a single long chain of
440 instructions at all until close to the end of compilation when we
441 actually lay them out. */
443 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
445 find_basic_blocks_1 (f);
447 /* Record the block to which an insn belongs. */
448 /* ??? This should be done another way, by which (perhaps) a label is
449 tagged directly with the basic block that it starts. It is used for
450 more than that currently, but IMO that is the only valid use. */
452 max_uid = get_max_uid ();
453 #ifdef AUTO_INC_DEC
454 /* Leave space for insns life_analysis makes in some cases for auto-inc.
455 These cases are rare, so we don't need too much space. */
456 max_uid += max_uid / 10;
457 #endif
459 compute_bb_for_insn (max_uid);
461 /* Discover the edges of our cfg. */
462 record_active_eh_regions (f);
463 make_edges (label_value_list);
465 /* Do very simple cleanup now, for the benefit of code that runs between
466 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
467 tidy_fallthru_edges ();
469 mark_critical_edges ();
471 #ifdef ENABLE_CHECKING
472 verify_flow_info ();
473 #endif
476 /* Count the basic blocks of the function. */
478 static int
479 count_basic_blocks (f)
480 rtx f;
482 register rtx insn;
483 register RTX_CODE prev_code;
484 register int count = 0;
485 int eh_region = 0;
486 int call_had_abnormal_edge = 0;
488 prev_code = JUMP_INSN;
489 for (insn = f; insn; insn = NEXT_INSN (insn))
491 register RTX_CODE code = GET_CODE (insn);
493 if (code == CODE_LABEL
494 || (GET_RTX_CLASS (code) == 'i'
495 && (prev_code == JUMP_INSN
496 || prev_code == BARRIER
497 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
498 count++;
500 /* Record whether this call created an edge. */
501 if (code == CALL_INSN)
503 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
504 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
506 call_had_abnormal_edge = 0;
508 /* If there is an EH region or rethrow, we have an edge. */
509 if ((eh_region && region > 0)
510 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
511 call_had_abnormal_edge = 1;
512 else if (nonlocal_goto_handler_labels && region >= 0)
513 /* If there is a nonlocal goto label and the specified
514 region number isn't -1, we have an edge. (0 means
515 no throw, but might have a nonlocal goto). */
516 call_had_abnormal_edge = 1;
519 if (code != NOTE)
520 prev_code = code;
521 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
522 ++eh_region;
523 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
524 --eh_region;
527 /* The rest of the compiler works a bit smoother when we don't have to
528 check for the edge case of do-nothing functions with no basic blocks. */
529 if (count == 0)
531 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
532 count = 1;
535 return count;
538 /* Scan a list of insns for labels referrred to other than by jumps.
539 This is used to scan the alternatives of a call placeholder. */
540 static rtx find_label_refs (f, lvl)
541 rtx f;
542 rtx lvl;
544 rtx insn;
546 for (insn = f; insn; insn = NEXT_INSN (insn))
547 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
549 rtx note;
551 /* Make a list of all labels referred to other than by jumps
552 (which just don't have the REG_LABEL notes).
554 Make a special exception for labels followed by an ADDR*VEC,
555 as this would be a part of the tablejump setup code.
557 Make a special exception for the eh_return_stub_label, which
558 we know isn't part of any otherwise visible control flow. */
560 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
561 if (REG_NOTE_KIND (note) == REG_LABEL)
563 rtx lab = XEXP (note, 0), next;
565 if (lab == eh_return_stub_label)
567 else if ((next = next_nonnote_insn (lab)) != NULL
568 && GET_CODE (next) == JUMP_INSN
569 && (GET_CODE (PATTERN (next)) == ADDR_VEC
570 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
572 else if (GET_CODE (lab) == NOTE)
574 else
575 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
579 return lvl;
582 /* Find all basic blocks of the function whose first insn is F.
584 Collect and return a list of labels whose addresses are taken. This
585 will be used in make_edges for use with computed gotos. */
587 static void
588 find_basic_blocks_1 (f)
589 rtx f;
591 register rtx insn, next;
592 int i = 0;
593 rtx bb_note = NULL_RTX;
594 rtx eh_list = NULL_RTX;
595 rtx lvl = NULL_RTX;
596 rtx trll = NULL_RTX;
597 rtx head = NULL_RTX;
598 rtx end = NULL_RTX;
600 /* We process the instructions in a slightly different way than we did
601 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
602 closed out the previous block, so that it gets attached at the proper
603 place. Since this form should be equivalent to the previous,
604 count_basic_blocks continues to use the old form as a check. */
606 for (insn = f; insn; insn = next)
608 enum rtx_code code = GET_CODE (insn);
610 next = NEXT_INSN (insn);
612 switch (code)
614 case NOTE:
616 int kind = NOTE_LINE_NUMBER (insn);
618 /* Keep a LIFO list of the currently active exception notes. */
619 if (kind == NOTE_INSN_EH_REGION_BEG)
620 eh_list = alloc_INSN_LIST (insn, eh_list);
621 else if (kind == NOTE_INSN_EH_REGION_END)
623 rtx t = eh_list;
625 eh_list = XEXP (eh_list, 1);
626 free_INSN_LIST_node (t);
629 /* Look for basic block notes with which to keep the
630 basic_block_info pointers stable. Unthread the note now;
631 we'll put it back at the right place in create_basic_block.
632 Or not at all if we've already found a note in this block. */
633 else if (kind == NOTE_INSN_BASIC_BLOCK)
635 if (bb_note == NULL_RTX)
636 bb_note = insn;
637 else
638 next = flow_delete_insn (insn);
640 break;
643 case CODE_LABEL:
644 /* A basic block starts at a label. If we've closed one off due
645 to a barrier or some such, no need to do it again. */
646 if (head != NULL_RTX)
648 /* While we now have edge lists with which other portions of
649 the compiler might determine a call ending a basic block
650 does not imply an abnormal edge, it will be a bit before
651 everything can be updated. So continue to emit a noop at
652 the end of such a block. */
653 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
655 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
656 end = emit_insn_after (nop, end);
659 create_basic_block (i++, head, end, bb_note);
660 bb_note = NULL_RTX;
663 head = end = insn;
664 break;
666 case JUMP_INSN:
667 /* A basic block ends at a jump. */
668 if (head == NULL_RTX)
669 head = insn;
670 else
672 /* ??? Make a special check for table jumps. The way this
673 happens is truly and amazingly gross. We are about to
674 create a basic block that contains just a code label and
675 an addr*vec jump insn. Worse, an addr_diff_vec creates
676 its own natural loop.
678 Prevent this bit of brain damage, pasting things together
679 correctly in make_edges.
681 The correct solution involves emitting the table directly
682 on the tablejump instruction as a note, or JUMP_LABEL. */
684 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
685 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
687 head = end = NULL;
688 n_basic_blocks--;
689 break;
692 end = insn;
693 goto new_bb_inclusive;
695 case BARRIER:
696 /* A basic block ends at a barrier. It may be that an unconditional
697 jump already closed the basic block -- no need to do it again. */
698 if (head == NULL_RTX)
699 break;
701 /* While we now have edge lists with which other portions of the
702 compiler might determine a call ending a basic block does not
703 imply an abnormal edge, it will be a bit before everything can
704 be updated. So continue to emit a noop at the end of such a
705 block. */
706 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
708 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
709 end = emit_insn_after (nop, end);
711 goto new_bb_exclusive;
713 case CALL_INSN:
715 /* Record whether this call created an edge. */
716 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
717 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
718 int call_has_abnormal_edge = 0;
720 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
722 /* Scan each of the alternatives for label refs. */
723 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
724 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
725 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
726 /* Record its tail recursion label, if any. */
727 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
728 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
731 /* If there is an EH region or rethrow, we have an edge. */
732 if ((eh_list && region > 0)
733 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
734 call_has_abnormal_edge = 1;
735 else if (nonlocal_goto_handler_labels && region >= 0)
736 /* If there is a nonlocal goto label and the specified
737 region number isn't -1, we have an edge. (0 means
738 no throw, but might have a nonlocal goto). */
739 call_has_abnormal_edge = 1;
741 /* A basic block ends at a call that can either throw or
742 do a non-local goto. */
743 if (call_has_abnormal_edge)
745 new_bb_inclusive:
746 if (head == NULL_RTX)
747 head = insn;
748 end = insn;
750 new_bb_exclusive:
751 create_basic_block (i++, head, end, bb_note);
752 head = end = NULL_RTX;
753 bb_note = NULL_RTX;
754 break;
757 /* FALLTHRU */
759 default:
760 if (GET_RTX_CLASS (code) == 'i')
762 if (head == NULL_RTX)
763 head = insn;
764 end = insn;
766 break;
769 if (GET_RTX_CLASS (code) == 'i')
771 rtx note;
773 /* Make a list of all labels referred to other than by jumps
774 (which just don't have the REG_LABEL notes).
776 Make a special exception for labels followed by an ADDR*VEC,
777 as this would be a part of the tablejump setup code.
779 Make a special exception for the eh_return_stub_label, which
780 we know isn't part of any otherwise visible control flow. */
782 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
783 if (REG_NOTE_KIND (note) == REG_LABEL)
785 rtx lab = XEXP (note, 0), next;
787 if (lab == eh_return_stub_label)
789 else if ((next = next_nonnote_insn (lab)) != NULL
790 && GET_CODE (next) == JUMP_INSN
791 && (GET_CODE (PATTERN (next)) == ADDR_VEC
792 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
794 else if (GET_CODE (lab) == NOTE)
796 else
797 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
802 if (head != NULL_RTX)
803 create_basic_block (i++, head, end, bb_note);
804 else if (bb_note)
805 flow_delete_insn (bb_note);
807 if (i != n_basic_blocks)
808 abort ();
810 label_value_list = lvl;
811 tail_recursion_label_list = trll;
814 /* Tidy the CFG by deleting unreachable code and whatnot. */
816 void
817 cleanup_cfg (f)
818 rtx f;
820 delete_unreachable_blocks ();
821 move_stray_eh_region_notes ();
822 record_active_eh_regions (f);
823 try_merge_blocks ();
824 mark_critical_edges ();
826 /* Kill the data we won't maintain. */
827 free_EXPR_LIST_list (&label_value_list);
828 free_EXPR_LIST_list (&tail_recursion_label_list);
831 /* Create a new basic block consisting of the instructions between
832 HEAD and END inclusive. Reuses the note and basic block struct
833 in BB_NOTE, if any. */
835 void
836 create_basic_block (index, head, end, bb_note)
837 int index;
838 rtx head, end, bb_note;
840 basic_block bb;
842 if (bb_note
843 && ! RTX_INTEGRATED_P (bb_note)
844 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
845 && bb->aux == NULL)
847 /* If we found an existing note, thread it back onto the chain. */
849 rtx after;
851 if (GET_CODE (head) == CODE_LABEL)
852 after = head;
853 else
855 after = PREV_INSN (head);
856 head = bb_note;
859 if (after != bb_note && NEXT_INSN (after) != bb_note)
860 reorder_insns (bb_note, bb_note, after);
862 else
864 /* Otherwise we must create a note and a basic block structure.
865 Since we allow basic block structs in rtl, give the struct
866 the same lifetime by allocating it off the function obstack
867 rather than using malloc. */
869 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
870 memset (bb, 0, sizeof (*bb));
872 if (GET_CODE (head) == CODE_LABEL)
873 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
874 else
876 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
877 head = bb_note;
879 NOTE_BASIC_BLOCK (bb_note) = bb;
882 /* Always include the bb note in the block. */
883 if (NEXT_INSN (end) == bb_note)
884 end = bb_note;
886 bb->head = head;
887 bb->end = end;
888 bb->index = index;
889 BASIC_BLOCK (index) = bb;
891 /* Tag the block so that we know it has been used when considering
892 other basic block notes. */
893 bb->aux = bb;
896 /* Records the basic block struct in BB_FOR_INSN, for every instruction
897 indexed by INSN_UID. MAX is the size of the array. */
899 void
900 compute_bb_for_insn (max)
901 int max;
903 int i;
905 if (basic_block_for_insn)
906 VARRAY_FREE (basic_block_for_insn);
907 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
909 for (i = 0; i < n_basic_blocks; ++i)
911 basic_block bb = BASIC_BLOCK (i);
912 rtx insn, end;
914 end = bb->end;
915 insn = bb->head;
916 while (1)
918 int uid = INSN_UID (insn);
919 if (uid < max)
920 VARRAY_BB (basic_block_for_insn, uid) = bb;
921 if (insn == end)
922 break;
923 insn = NEXT_INSN (insn);
928 /* Free the memory associated with the edge structures. */
930 static void
931 clear_edges ()
933 int i;
934 edge n, e;
936 for (i = 0; i < n_basic_blocks; ++i)
938 basic_block bb = BASIC_BLOCK (i);
940 for (e = bb->succ; e ; e = n)
942 n = e->succ_next;
943 free (e);
946 bb->succ = 0;
947 bb->pred = 0;
950 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
952 n = e->succ_next;
953 free (e);
956 ENTRY_BLOCK_PTR->succ = 0;
957 EXIT_BLOCK_PTR->pred = 0;
959 n_edges = 0;
962 /* Identify the edges between basic blocks.
964 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
965 that are otherwise unreachable may be reachable with a non-local goto.
967 BB_EH_END is an array indexed by basic block number in which we record
968 the list of exception regions active at the end of the basic block. */
970 static void
971 make_edges (label_value_list)
972 rtx label_value_list;
974 int i;
975 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
976 sbitmap *edge_cache = NULL;
978 /* Assume no computed jump; revise as we create edges. */
979 current_function_has_computed_jump = 0;
981 /* Heavy use of computed goto in machine-generated code can lead to
982 nearly fully-connected CFGs. In that case we spend a significant
983 amount of time searching the edge lists for duplicates. */
984 if (forced_labels || label_value_list)
986 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
987 sbitmap_vector_zero (edge_cache, n_basic_blocks);
990 /* By nature of the way these get numbered, block 0 is always the entry. */
991 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
993 for (i = 0; i < n_basic_blocks; ++i)
995 basic_block bb = BASIC_BLOCK (i);
996 rtx insn, x;
997 enum rtx_code code;
998 int force_fallthru = 0;
1000 /* Examine the last instruction of the block, and discover the
1001 ways we can leave the block. */
1003 insn = bb->end;
1004 code = GET_CODE (insn);
1006 /* A branch. */
1007 if (code == JUMP_INSN)
1009 rtx tmp;
1011 /* ??? Recognize a tablejump and do the right thing. */
1012 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1013 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1014 && GET_CODE (tmp) == JUMP_INSN
1015 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1016 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1018 rtvec vec;
1019 int j;
1021 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1022 vec = XVEC (PATTERN (tmp), 0);
1023 else
1024 vec = XVEC (PATTERN (tmp), 1);
1026 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1027 make_label_edge (edge_cache, bb,
1028 XEXP (RTVEC_ELT (vec, j), 0), 0);
1030 /* Some targets (eg, ARM) emit a conditional jump that also
1031 contains the out-of-range target. Scan for these and
1032 add an edge if necessary. */
1033 if ((tmp = single_set (insn)) != NULL
1034 && SET_DEST (tmp) == pc_rtx
1035 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1036 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1037 make_label_edge (edge_cache, bb,
1038 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1040 #ifdef CASE_DROPS_THROUGH
1041 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1042 us naturally detecting fallthru into the next block. */
1043 force_fallthru = 1;
1044 #endif
1047 /* If this is a computed jump, then mark it as reaching
1048 everything on the label_value_list and forced_labels list. */
1049 else if (computed_jump_p (insn))
1051 current_function_has_computed_jump = 1;
1053 for (x = label_value_list; x; x = XEXP (x, 1))
1054 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1056 for (x = forced_labels; x; x = XEXP (x, 1))
1057 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1060 /* Returns create an exit out. */
1061 else if (returnjump_p (insn))
1062 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1064 /* Otherwise, we have a plain conditional or unconditional jump. */
1065 else
1067 if (! JUMP_LABEL (insn))
1068 abort ();
1069 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1073 /* If this is a sibling call insn, then this is in effect a
1074 combined call and return, and so we need an edge to the
1075 exit block. No need to worry about EH edges, since we
1076 wouldn't have created the sibling call in the first place. */
1078 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1079 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1080 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1081 else
1083 /* If this is a CALL_INSN, then mark it as reaching the active EH
1084 handler for this CALL_INSN. If we're handling asynchronous
1085 exceptions then any insn can reach any of the active handlers.
1087 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1089 if (code == CALL_INSN || asynchronous_exceptions)
1091 /* Add any appropriate EH edges. We do this unconditionally
1092 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1093 on the call, and this needn't be within an EH region. */
1094 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1096 /* If we have asynchronous exceptions, do the same for *all*
1097 exception regions active in the block. */
1098 if (asynchronous_exceptions
1099 && bb->eh_beg != bb->eh_end)
1101 if (bb->eh_beg >= 0)
1102 make_eh_edge (edge_cache, eh_nest_info, bb,
1103 NULL_RTX, bb->eh_beg);
1105 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1106 if (GET_CODE (x) == NOTE
1107 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1108 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1110 int region = NOTE_EH_HANDLER (x);
1111 make_eh_edge (edge_cache, eh_nest_info, bb,
1112 NULL_RTX, region);
1116 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1118 /* ??? This could be made smarter: in some cases it's possible
1119 to tell that certain calls will not do a nonlocal goto.
1121 For example, if the nested functions that do the nonlocal
1122 gotos do not have their addresses taken, then only calls to
1123 those functions or to other nested functions that use them
1124 could possibly do nonlocal gotos. */
1125 /* We do know that a REG_EH_REGION note with a value less
1126 than 0 is guaranteed not to perform a non-local goto. */
1127 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1128 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1129 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1130 make_label_edge (edge_cache, bb, XEXP (x, 0),
1131 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1135 /* We know something about the structure of the function __throw in
1136 libgcc2.c. It is the only function that ever contains eh_stub
1137 labels. It modifies its return address so that the last block
1138 returns to one of the eh_stub labels within it. So we have to
1139 make additional edges in the flow graph. */
1140 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1141 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1143 /* Find out if we can drop through to the next block. */
1144 insn = next_nonnote_insn (insn);
1145 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1146 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1147 else if (i + 1 < n_basic_blocks)
1149 rtx tmp = BLOCK_HEAD (i + 1);
1150 if (GET_CODE (tmp) == NOTE)
1151 tmp = next_nonnote_insn (tmp);
1152 if (force_fallthru || insn == tmp)
1153 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1157 free_eh_nesting_info (eh_nest_info);
1158 if (edge_cache)
1159 sbitmap_vector_free (edge_cache);
1162 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1163 about the edge that is accumulated between calls. */
1165 void
1166 make_edge (edge_cache, src, dst, flags)
1167 sbitmap *edge_cache;
1168 basic_block src, dst;
1169 int flags;
1171 int use_edge_cache;
1172 edge e;
1174 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1175 many edges to them, and we didn't allocate memory for it. */
1176 use_edge_cache = (edge_cache
1177 && src != ENTRY_BLOCK_PTR
1178 && dst != EXIT_BLOCK_PTR);
1180 /* Make sure we don't add duplicate edges. */
1181 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1182 for (e = src->succ; e ; e = e->succ_next)
1183 if (e->dest == dst)
1185 e->flags |= flags;
1186 return;
1189 e = (edge) xcalloc (1, sizeof (*e));
1190 n_edges++;
1192 e->succ_next = src->succ;
1193 e->pred_next = dst->pred;
1194 e->src = src;
1195 e->dest = dst;
1196 e->flags = flags;
1198 src->succ = e;
1199 dst->pred = e;
1201 if (use_edge_cache)
1202 SET_BIT (edge_cache[src->index], dst->index);
1205 /* Create an edge from a basic block to a label. */
1207 static void
1208 make_label_edge (edge_cache, src, label, flags)
1209 sbitmap *edge_cache;
1210 basic_block src;
1211 rtx label;
1212 int flags;
1214 if (GET_CODE (label) != CODE_LABEL)
1215 abort ();
1217 /* If the label was never emitted, this insn is junk, but avoid a
1218 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1219 as a result of a syntax error and a diagnostic has already been
1220 printed. */
1222 if (INSN_UID (label) == 0)
1223 return;
1225 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1228 /* Create the edges generated by INSN in REGION. */
1230 static void
1231 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1232 sbitmap *edge_cache;
1233 eh_nesting_info *eh_nest_info;
1234 basic_block src;
1235 rtx insn;
1236 int region;
1238 handler_info **handler_list;
1239 int num, is_call;
1241 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1242 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1243 while (--num >= 0)
1245 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1246 EDGE_ABNORMAL | EDGE_EH | is_call);
1250 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1251 dangerous if we intend to move basic blocks around. Move such notes
1252 into the following block. */
1254 static void
1255 move_stray_eh_region_notes ()
1257 int i;
1258 basic_block b1, b2;
1260 if (n_basic_blocks < 2)
1261 return;
1263 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1264 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1266 rtx insn, next, list = NULL_RTX;
1268 b1 = BASIC_BLOCK (i);
1269 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1271 next = NEXT_INSN (insn);
1272 if (GET_CODE (insn) == NOTE
1273 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1274 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1276 /* Unlink from the insn chain. */
1277 NEXT_INSN (PREV_INSN (insn)) = next;
1278 PREV_INSN (next) = PREV_INSN (insn);
1280 /* Queue it. */
1281 NEXT_INSN (insn) = list;
1282 list = insn;
1286 if (list == NULL_RTX)
1287 continue;
1289 /* Find where to insert these things. */
1290 insn = b2->head;
1291 if (GET_CODE (insn) == CODE_LABEL)
1292 insn = NEXT_INSN (insn);
1294 while (list)
1296 next = NEXT_INSN (list);
1297 add_insn_after (list, insn);
1298 list = next;
1303 /* Recompute eh_beg/eh_end for each basic block. */
1305 static void
1306 record_active_eh_regions (f)
1307 rtx f;
1309 rtx insn, eh_list = NULL_RTX;
1310 int i = 0;
1311 basic_block bb = BASIC_BLOCK (0);
1313 for (insn = f; insn ; insn = NEXT_INSN (insn))
1315 if (bb->head == insn)
1316 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1318 if (GET_CODE (insn) == NOTE)
1320 int kind = NOTE_LINE_NUMBER (insn);
1321 if (kind == NOTE_INSN_EH_REGION_BEG)
1322 eh_list = alloc_INSN_LIST (insn, eh_list);
1323 else if (kind == NOTE_INSN_EH_REGION_END)
1325 rtx t = XEXP (eh_list, 1);
1326 free_INSN_LIST_node (eh_list);
1327 eh_list = t;
1331 if (bb->end == insn)
1333 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1334 i += 1;
1335 if (i == n_basic_blocks)
1336 break;
1337 bb = BASIC_BLOCK (i);
1342 /* Identify critical edges and set the bits appropriately. */
1344 static void
1345 mark_critical_edges ()
1347 int i, n = n_basic_blocks;
1348 basic_block bb;
1350 /* We begin with the entry block. This is not terribly important now,
1351 but could be if a front end (Fortran) implemented alternate entry
1352 points. */
1353 bb = ENTRY_BLOCK_PTR;
1354 i = -1;
1356 while (1)
1358 edge e;
1360 /* (1) Critical edges must have a source with multiple successors. */
1361 if (bb->succ && bb->succ->succ_next)
1363 for (e = bb->succ; e ; e = e->succ_next)
1365 /* (2) Critical edges must have a destination with multiple
1366 predecessors. Note that we know there is at least one
1367 predecessor -- the edge we followed to get here. */
1368 if (e->dest->pred->pred_next)
1369 e->flags |= EDGE_CRITICAL;
1370 else
1371 e->flags &= ~EDGE_CRITICAL;
1374 else
1376 for (e = bb->succ; e ; e = e->succ_next)
1377 e->flags &= ~EDGE_CRITICAL;
1380 if (++i >= n)
1381 break;
1382 bb = BASIC_BLOCK (i);
1386 /* Split a (typically critical) edge. Return the new block.
1387 Abort on abnormal edges.
1389 ??? The code generally expects to be called on critical edges.
1390 The case of a block ending in an unconditional jump to a
1391 block with multiple predecessors is not handled optimally. */
1393 basic_block
1394 split_edge (edge_in)
1395 edge edge_in;
1397 basic_block old_pred, bb, old_succ;
1398 edge edge_out;
1399 rtx bb_note;
1400 int i, j;
1402 /* Abnormal edges cannot be split. */
1403 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1404 abort ();
1406 old_pred = edge_in->src;
1407 old_succ = edge_in->dest;
1409 /* Remove the existing edge from the destination's pred list. */
1411 edge *pp;
1412 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1413 continue;
1414 *pp = edge_in->pred_next;
1415 edge_in->pred_next = NULL;
1418 /* Create the new structures. */
1419 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1420 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1421 n_edges++;
1423 memset (bb, 0, sizeof (*bb));
1425 /* ??? This info is likely going to be out of date very soon. */
1426 if (old_succ->global_live_at_start)
1428 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1429 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1430 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1431 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1434 /* Wire them up. */
1435 bb->pred = edge_in;
1436 bb->succ = edge_out;
1437 bb->count = edge_in->count;
1439 edge_in->dest = bb;
1440 edge_in->flags &= ~EDGE_CRITICAL;
1442 edge_out->pred_next = old_succ->pred;
1443 edge_out->succ_next = NULL;
1444 edge_out->src = bb;
1445 edge_out->dest = old_succ;
1446 edge_out->flags = EDGE_FALLTHRU;
1447 edge_out->probability = REG_BR_PROB_BASE;
1448 edge_out->count = edge_in->count;
1450 old_succ->pred = edge_out;
1452 /* Tricky case -- if there existed a fallthru into the successor
1453 (and we're not it) we must add a new unconditional jump around
1454 the new block we're actually interested in.
1456 Further, if that edge is critical, this means a second new basic
1457 block must be created to hold it. In order to simplify correct
1458 insn placement, do this before we touch the existing basic block
1459 ordering for the block we were really wanting. */
1460 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1462 edge e;
1463 for (e = edge_out->pred_next; e ; e = e->pred_next)
1464 if (e->flags & EDGE_FALLTHRU)
1465 break;
1467 if (e)
1469 basic_block jump_block;
1470 rtx pos;
1472 if ((e->flags & EDGE_CRITICAL) == 0
1473 && e->src != ENTRY_BLOCK_PTR)
1475 /* Non critical -- we can simply add a jump to the end
1476 of the existing predecessor. */
1477 jump_block = e->src;
1479 else
1481 /* We need a new block to hold the jump. The simplest
1482 way to do the bulk of the work here is to recursively
1483 call ourselves. */
1484 jump_block = split_edge (e);
1485 e = jump_block->succ;
1488 /* Now add the jump insn ... */
1489 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1490 jump_block->end);
1491 jump_block->end = pos;
1492 if (basic_block_for_insn)
1493 set_block_for_insn (pos, jump_block);
1494 emit_barrier_after (pos);
1496 /* ... let jump know that label is in use, ... */
1497 JUMP_LABEL (pos) = old_succ->head;
1498 ++LABEL_NUSES (old_succ->head);
1500 /* ... and clear fallthru on the outgoing edge. */
1501 e->flags &= ~EDGE_FALLTHRU;
1503 /* Continue splitting the interesting edge. */
1507 /* Place the new block just in front of the successor. */
1508 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1509 if (old_succ == EXIT_BLOCK_PTR)
1510 j = n_basic_blocks - 1;
1511 else
1512 j = old_succ->index;
1513 for (i = n_basic_blocks - 1; i > j; --i)
1515 basic_block tmp = BASIC_BLOCK (i - 1);
1516 BASIC_BLOCK (i) = tmp;
1517 tmp->index = i;
1519 BASIC_BLOCK (i) = bb;
1520 bb->index = i;
1522 /* Create the basic block note.
1524 Where we place the note can have a noticable impact on the generated
1525 code. Consider this cfg:
1532 +->1-->2--->E
1534 +--+
1536 If we need to insert an insn on the edge from block 0 to block 1,
1537 we want to ensure the instructions we insert are outside of any
1538 loop notes that physically sit between block 0 and block 1. Otherwise
1539 we confuse the loop optimizer into thinking the loop is a phony. */
1540 if (old_succ != EXIT_BLOCK_PTR
1541 && PREV_INSN (old_succ->head)
1542 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1543 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1544 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1545 PREV_INSN (old_succ->head));
1546 else if (old_succ != EXIT_BLOCK_PTR)
1547 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1548 else
1549 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1550 NOTE_BASIC_BLOCK (bb_note) = bb;
1551 bb->head = bb->end = bb_note;
1553 /* Not quite simple -- for non-fallthru edges, we must adjust the
1554 predecessor's jump instruction to target our new block. */
1555 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1557 rtx tmp, insn = old_pred->end;
1558 rtx old_label = old_succ->head;
1559 rtx new_label = gen_label_rtx ();
1561 if (GET_CODE (insn) != JUMP_INSN)
1562 abort ();
1564 /* ??? Recognize a tablejump and adjust all matching cases. */
1565 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1566 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1567 && GET_CODE (tmp) == JUMP_INSN
1568 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1569 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1571 rtvec vec;
1572 int j;
1574 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1575 vec = XVEC (PATTERN (tmp), 0);
1576 else
1577 vec = XVEC (PATTERN (tmp), 1);
1579 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1580 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1582 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1583 --LABEL_NUSES (old_label);
1584 ++LABEL_NUSES (new_label);
1587 /* Handle casesi dispatch insns */
1588 if ((tmp = single_set (insn)) != NULL
1589 && SET_DEST (tmp) == pc_rtx
1590 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1591 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1592 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1594 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1595 new_label);
1596 --LABEL_NUSES (old_label);
1597 ++LABEL_NUSES (new_label);
1600 else
1602 /* This would have indicated an abnormal edge. */
1603 if (computed_jump_p (insn))
1604 abort ();
1606 /* A return instruction can't be redirected. */
1607 if (returnjump_p (insn))
1608 abort ();
1610 /* If the insn doesn't go where we think, we're confused. */
1611 if (JUMP_LABEL (insn) != old_label)
1612 abort ();
1614 redirect_jump (insn, new_label, 0);
1617 emit_label_before (new_label, bb_note);
1618 bb->head = new_label;
1621 return bb;
1624 /* Queue instructions for insertion on an edge between two basic blocks.
1625 The new instructions and basic blocks (if any) will not appear in the
1626 CFG until commit_edge_insertions is called. */
1628 void
1629 insert_insn_on_edge (pattern, e)
1630 rtx pattern;
1631 edge e;
1633 /* We cannot insert instructions on an abnormal critical edge.
1634 It will be easier to find the culprit if we die now. */
1635 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1636 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1637 abort ();
1639 if (e->insns == NULL_RTX)
1640 start_sequence ();
1641 else
1642 push_to_sequence (e->insns);
1644 emit_insn (pattern);
1646 e->insns = get_insns ();
1647 end_sequence();
1650 /* Update the CFG for the instructions queued on edge E. */
1652 static void
1653 commit_one_edge_insertion (e)
1654 edge e;
1656 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1657 basic_block bb;
1659 /* Pull the insns off the edge now since the edge might go away. */
1660 insns = e->insns;
1661 e->insns = NULL_RTX;
1663 /* Figure out where to put these things. If the destination has
1664 one predecessor, insert there. Except for the exit block. */
1665 if (e->dest->pred->pred_next == NULL
1666 && e->dest != EXIT_BLOCK_PTR)
1668 bb = e->dest;
1670 /* Get the location correct wrt a code label, and "nice" wrt
1671 a basic block note, and before everything else. */
1672 tmp = bb->head;
1673 if (GET_CODE (tmp) == CODE_LABEL)
1674 tmp = NEXT_INSN (tmp);
1675 if (GET_CODE (tmp) == NOTE
1676 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1677 tmp = NEXT_INSN (tmp);
1678 if (tmp == bb->head)
1679 before = tmp;
1680 else
1681 after = PREV_INSN (tmp);
1684 /* If the source has one successor and the edge is not abnormal,
1685 insert there. Except for the entry block. */
1686 else if ((e->flags & EDGE_ABNORMAL) == 0
1687 && e->src->succ->succ_next == NULL
1688 && e->src != ENTRY_BLOCK_PTR)
1690 bb = e->src;
1691 /* It is possible to have a non-simple jump here. Consider a target
1692 where some forms of unconditional jumps clobber a register. This
1693 happens on the fr30 for example.
1695 We know this block has a single successor, so we can just emit
1696 the queued insns before the jump. */
1697 if (GET_CODE (bb->end) == JUMP_INSN)
1699 before = bb->end;
1701 else
1703 /* We'd better be fallthru, or we've lost track of what's what. */
1704 if ((e->flags & EDGE_FALLTHRU) == 0)
1705 abort ();
1707 after = bb->end;
1711 /* Otherwise we must split the edge. */
1712 else
1714 bb = split_edge (e);
1715 after = bb->end;
1718 /* Now that we've found the spot, do the insertion. */
1720 /* Set the new block number for these insns, if structure is allocated. */
1721 if (basic_block_for_insn)
1723 rtx i;
1724 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1725 set_block_for_insn (i, bb);
1728 if (before)
1730 emit_insns_before (insns, before);
1731 if (before == bb->head)
1732 bb->head = insns;
1734 last = prev_nonnote_insn (before);
1736 else
1738 last = emit_insns_after (insns, after);
1739 if (after == bb->end)
1740 bb->end = last;
1743 if (returnjump_p (last))
1745 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1746 This is not currently a problem because this only happens
1747 for the (single) epilogue, which already has a fallthru edge
1748 to EXIT. */
1750 e = bb->succ;
1751 if (e->dest != EXIT_BLOCK_PTR
1752 || e->succ_next != NULL
1753 || (e->flags & EDGE_FALLTHRU) == 0)
1754 abort ();
1755 e->flags &= ~EDGE_FALLTHRU;
1757 emit_barrier_after (last);
1758 bb->end = last;
1760 if (before)
1761 flow_delete_insn (before);
1763 else if (GET_CODE (last) == JUMP_INSN)
1764 abort ();
1767 /* Update the CFG for all queued instructions. */
1769 void
1770 commit_edge_insertions ()
1772 int i;
1773 basic_block bb;
1775 #ifdef ENABLE_CHECKING
1776 verify_flow_info ();
1777 #endif
1779 i = -1;
1780 bb = ENTRY_BLOCK_PTR;
1781 while (1)
1783 edge e, next;
1785 for (e = bb->succ; e ; e = next)
1787 next = e->succ_next;
1788 if (e->insns)
1789 commit_one_edge_insertion (e);
1792 if (++i >= n_basic_blocks)
1793 break;
1794 bb = BASIC_BLOCK (i);
1798 /* Delete all unreachable basic blocks. */
1800 static void
1801 delete_unreachable_blocks ()
1803 basic_block *worklist, *tos;
1804 int deleted_handler;
1805 edge e;
1806 int i, n;
1808 n = n_basic_blocks;
1809 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1811 /* Use basic_block->aux as a marker. Clear them all. */
1813 for (i = 0; i < n; ++i)
1814 BASIC_BLOCK (i)->aux = NULL;
1816 /* Add our starting points to the worklist. Almost always there will
1817 be only one. It isn't inconcievable that we might one day directly
1818 support Fortran alternate entry points. */
1820 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1822 *tos++ = e->dest;
1824 /* Mark the block with a handy non-null value. */
1825 e->dest->aux = e;
1828 /* Iterate: find everything reachable from what we've already seen. */
1830 while (tos != worklist)
1832 basic_block b = *--tos;
1834 for (e = b->succ; e ; e = e->succ_next)
1835 if (!e->dest->aux)
1837 *tos++ = e->dest;
1838 e->dest->aux = e;
1842 /* Delete all unreachable basic blocks. Count down so that we don't
1843 interfere with the block renumbering that happens in flow_delete_block. */
1845 deleted_handler = 0;
1847 for (i = n - 1; i >= 0; --i)
1849 basic_block b = BASIC_BLOCK (i);
1851 if (b->aux != NULL)
1852 /* This block was found. Tidy up the mark. */
1853 b->aux = NULL;
1854 else
1855 deleted_handler |= flow_delete_block (b);
1858 tidy_fallthru_edges ();
1860 /* If we deleted an exception handler, we may have EH region begin/end
1861 blocks to remove as well. */
1862 if (deleted_handler)
1863 delete_eh_regions ();
1865 free (worklist);
1868 /* Find EH regions for which there is no longer a handler, and delete them. */
1870 static void
1871 delete_eh_regions ()
1873 rtx insn;
1875 update_rethrow_references ();
1877 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1878 if (GET_CODE (insn) == NOTE)
1880 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1881 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1883 int num = NOTE_EH_HANDLER (insn);
1884 /* A NULL handler indicates a region is no longer needed,
1885 as long as its rethrow label isn't used. */
1886 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1888 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1889 NOTE_SOURCE_FILE (insn) = 0;
1895 /* Return true if NOTE is not one of the ones that must be kept paired,
1896 so that we may simply delete them. */
1898 static int
1899 can_delete_note_p (note)
1900 rtx note;
1902 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1903 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1906 /* Unlink a chain of insns between START and FINISH, leaving notes
1907 that must be paired. */
1909 void
1910 flow_delete_insn_chain (start, finish)
1911 rtx start, finish;
1913 /* Unchain the insns one by one. It would be quicker to delete all
1914 of these with a single unchaining, rather than one at a time, but
1915 we need to keep the NOTE's. */
1917 rtx next;
1919 while (1)
1921 next = NEXT_INSN (start);
1922 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1924 else if (GET_CODE (start) == CODE_LABEL
1925 && ! can_delete_label_p (start))
1927 const char *name = LABEL_NAME (start);
1928 PUT_CODE (start, NOTE);
1929 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
1930 NOTE_SOURCE_FILE (start) = name;
1932 else
1933 next = flow_delete_insn (start);
1935 if (start == finish)
1936 break;
1937 start = next;
1941 /* Delete the insns in a (non-live) block. We physically delete every
1942 non-deleted-note insn, and update the flow graph appropriately.
1944 Return nonzero if we deleted an exception handler. */
1946 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1947 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1950 flow_delete_block (b)
1951 basic_block b;
1953 int deleted_handler = 0;
1954 rtx insn, end, tmp;
1956 /* If the head of this block is a CODE_LABEL, then it might be the
1957 label for an exception handler which can't be reached.
1959 We need to remove the label from the exception_handler_label list
1960 and remove the associated NOTE_INSN_EH_REGION_BEG and
1961 NOTE_INSN_EH_REGION_END notes. */
1963 insn = b->head;
1965 never_reached_warning (insn);
1967 if (GET_CODE (insn) == CODE_LABEL)
1969 rtx x, *prev = &exception_handler_labels;
1971 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1973 if (XEXP (x, 0) == insn)
1975 /* Found a match, splice this label out of the EH label list. */
1976 *prev = XEXP (x, 1);
1977 XEXP (x, 1) = NULL_RTX;
1978 XEXP (x, 0) = NULL_RTX;
1980 /* Remove the handler from all regions */
1981 remove_handler (insn);
1982 deleted_handler = 1;
1983 break;
1985 prev = &XEXP (x, 1);
1989 /* Include any jump table following the basic block. */
1990 end = b->end;
1991 if (GET_CODE (end) == JUMP_INSN
1992 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1993 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1994 && GET_CODE (tmp) == JUMP_INSN
1995 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1996 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1997 end = tmp;
1999 /* Include any barrier that may follow the basic block. */
2000 tmp = next_nonnote_insn (end);
2001 if (tmp && GET_CODE (tmp) == BARRIER)
2002 end = tmp;
2004 /* Selectively delete the entire chain. */
2005 flow_delete_insn_chain (insn, end);
2007 /* Remove the edges into and out of this block. Note that there may
2008 indeed be edges in, if we are removing an unreachable loop. */
2010 edge e, next, *q;
2012 for (e = b->pred; e ; e = next)
2014 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2015 continue;
2016 *q = e->succ_next;
2017 next = e->pred_next;
2018 n_edges--;
2019 free (e);
2021 for (e = b->succ; e ; e = next)
2023 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2024 continue;
2025 *q = e->pred_next;
2026 next = e->succ_next;
2027 n_edges--;
2028 free (e);
2031 b->pred = NULL;
2032 b->succ = NULL;
2035 /* Remove the basic block from the array, and compact behind it. */
2036 expunge_block (b);
2038 return deleted_handler;
2041 /* Remove block B from the basic block array and compact behind it. */
2043 static void
2044 expunge_block (b)
2045 basic_block b;
2047 int i, n = n_basic_blocks;
2049 for (i = b->index; i + 1 < n; ++i)
2051 basic_block x = BASIC_BLOCK (i + 1);
2052 BASIC_BLOCK (i) = x;
2053 x->index = i;
2056 basic_block_info->num_elements--;
2057 n_basic_blocks--;
2060 /* Delete INSN by patching it out. Return the next insn. */
2063 flow_delete_insn (insn)
2064 rtx insn;
2066 rtx prev = PREV_INSN (insn);
2067 rtx next = NEXT_INSN (insn);
2068 rtx note;
2070 PREV_INSN (insn) = NULL_RTX;
2071 NEXT_INSN (insn) = NULL_RTX;
2072 INSN_DELETED_P (insn) = 1;
2074 if (prev)
2075 NEXT_INSN (prev) = next;
2076 if (next)
2077 PREV_INSN (next) = prev;
2078 else
2079 set_last_insn (prev);
2081 if (GET_CODE (insn) == CODE_LABEL)
2082 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2084 /* If deleting a jump, decrement the use count of the label. Deleting
2085 the label itself should happen in the normal course of block merging. */
2086 if (GET_CODE (insn) == JUMP_INSN
2087 && JUMP_LABEL (insn)
2088 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2089 LABEL_NUSES (JUMP_LABEL (insn))--;
2091 /* Also if deleting an insn that references a label. */
2092 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2093 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2094 LABEL_NUSES (XEXP (note, 0))--;
2096 return next;
2099 /* True if a given label can be deleted. */
2101 static int
2102 can_delete_label_p (label)
2103 rtx label;
2105 rtx x;
2107 if (LABEL_PRESERVE_P (label))
2108 return 0;
2110 for (x = forced_labels; x ; x = XEXP (x, 1))
2111 if (label == XEXP (x, 0))
2112 return 0;
2113 for (x = label_value_list; x ; x = XEXP (x, 1))
2114 if (label == XEXP (x, 0))
2115 return 0;
2116 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2117 if (label == XEXP (x, 0))
2118 return 0;
2120 /* User declared labels must be preserved. */
2121 if (LABEL_NAME (label) != 0)
2122 return 0;
2124 return 1;
2127 static int
2128 tail_recursion_label_p (label)
2129 rtx label;
2131 rtx x;
2133 for (x = tail_recursion_label_list; x ; x = XEXP (x, 1))
2134 if (label == XEXP (x, 0))
2135 return 1;
2137 return 0;
2140 /* Blocks A and B are to be merged into a single block A. The insns
2141 are already contiguous, hence `nomove'. */
2143 void
2144 merge_blocks_nomove (a, b)
2145 basic_block a, b;
2147 edge e;
2148 rtx b_head, b_end, a_end;
2149 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2150 int b_empty = 0;
2152 /* If there was a CODE_LABEL beginning B, delete it. */
2153 b_head = b->head;
2154 b_end = b->end;
2155 if (GET_CODE (b_head) == CODE_LABEL)
2157 /* Detect basic blocks with nothing but a label. This can happen
2158 in particular at the end of a function. */
2159 if (b_head == b_end)
2160 b_empty = 1;
2161 del_first = del_last = b_head;
2162 b_head = NEXT_INSN (b_head);
2165 /* Delete the basic block note. */
2166 if (GET_CODE (b_head) == NOTE
2167 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2169 if (b_head == b_end)
2170 b_empty = 1;
2171 if (! del_last)
2172 del_first = b_head;
2173 del_last = b_head;
2174 b_head = NEXT_INSN (b_head);
2177 /* If there was a jump out of A, delete it. */
2178 a_end = a->end;
2179 if (GET_CODE (a_end) == JUMP_INSN)
2181 rtx prev;
2183 prev = prev_nonnote_insn (a_end);
2184 if (!prev)
2185 prev = a->head;
2187 del_first = a_end;
2189 #ifdef HAVE_cc0
2190 /* If this was a conditional jump, we need to also delete
2191 the insn that set cc0. */
2192 if (prev && sets_cc0_p (prev))
2194 rtx tmp = prev;
2195 prev = prev_nonnote_insn (prev);
2196 if (!prev)
2197 prev = a->head;
2198 del_first = tmp;
2200 #endif
2202 a_end = prev;
2204 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2205 del_first = NEXT_INSN (a_end);
2207 /* Delete everything marked above as well as crap that might be
2208 hanging out between the two blocks. */
2209 flow_delete_insn_chain (del_first, del_last);
2211 /* Normally there should only be one successor of A and that is B, but
2212 partway though the merge of blocks for conditional_execution we'll
2213 be merging a TEST block with THEN and ELSE successors. Free the
2214 whole lot of them and hope the caller knows what they're doing. */
2215 while (a->succ)
2216 remove_edge (a->succ);
2218 /* Adjust the edges out of B for the new owner. */
2219 for (e = b->succ; e ; e = e->succ_next)
2220 e->src = a;
2221 a->succ = b->succ;
2223 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2224 b->pred = b->succ = NULL;
2226 /* Reassociate the insns of B with A. */
2227 if (!b_empty)
2229 if (basic_block_for_insn)
2231 BLOCK_FOR_INSN (b_head) = a;
2232 while (b_head != b_end)
2234 b_head = NEXT_INSN (b_head);
2235 BLOCK_FOR_INSN (b_head) = a;
2238 a_end = b_end;
2240 a->end = a_end;
2242 expunge_block (b);
2245 /* Blocks A and B are to be merged into a single block. A has no incoming
2246 fallthru edge, so it can be moved before B without adding or modifying
2247 any jumps (aside from the jump from A to B). */
2249 static int
2250 merge_blocks_move_predecessor_nojumps (a, b)
2251 basic_block a, b;
2253 rtx start, end, barrier;
2254 int index;
2256 start = a->head;
2257 end = a->end;
2259 barrier = next_nonnote_insn (end);
2260 if (GET_CODE (barrier) != BARRIER)
2261 abort ();
2262 flow_delete_insn (barrier);
2264 /* Move block and loop notes out of the chain so that we do not
2265 disturb their order.
2267 ??? A better solution would be to squeeze out all the non-nested notes
2268 and adjust the block trees appropriately. Even better would be to have
2269 a tighter connection between block trees and rtl so that this is not
2270 necessary. */
2271 start = squeeze_notes (start, end);
2273 /* Scramble the insn chain. */
2274 if (end != PREV_INSN (b->head))
2275 reorder_insns (start, end, PREV_INSN (b->head));
2277 if (rtl_dump_file)
2279 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2280 a->index, b->index);
2283 /* Swap the records for the two blocks around. Although we are deleting B,
2284 A is now where B was and we want to compact the BB array from where
2285 A used to be. */
2286 BASIC_BLOCK(a->index) = b;
2287 BASIC_BLOCK(b->index) = a;
2288 index = a->index;
2289 a->index = b->index;
2290 b->index = index;
2292 /* Now blocks A and B are contiguous. Merge them. */
2293 merge_blocks_nomove (a, b);
2295 return 1;
2298 /* Blocks A and B are to be merged into a single block. B has no outgoing
2299 fallthru edge, so it can be moved after A without adding or modifying
2300 any jumps (aside from the jump from A to B). */
2302 static int
2303 merge_blocks_move_successor_nojumps (a, b)
2304 basic_block a, b;
2306 rtx start, end, barrier;
2308 start = b->head;
2309 end = b->end;
2310 barrier = NEXT_INSN (end);
2312 /* Recognize a jump table following block B. */
2313 if (GET_CODE (barrier) == CODE_LABEL
2314 && NEXT_INSN (barrier)
2315 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2316 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2317 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2319 end = NEXT_INSN (barrier);
2320 barrier = NEXT_INSN (end);
2323 /* There had better have been a barrier there. Delete it. */
2324 if (GET_CODE (barrier) != BARRIER)
2325 abort ();
2326 flow_delete_insn (barrier);
2328 /* Move block and loop notes out of the chain so that we do not
2329 disturb their order.
2331 ??? A better solution would be to squeeze out all the non-nested notes
2332 and adjust the block trees appropriately. Even better would be to have
2333 a tighter connection between block trees and rtl so that this is not
2334 necessary. */
2335 start = squeeze_notes (start, end);
2337 /* Scramble the insn chain. */
2338 reorder_insns (start, end, a->end);
2340 /* Now blocks A and B are contiguous. Merge them. */
2341 merge_blocks_nomove (a, b);
2343 if (rtl_dump_file)
2345 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2346 b->index, a->index);
2349 return 1;
2352 /* Attempt to merge basic blocks that are potentially non-adjacent.
2353 Return true iff the attempt succeeded. */
2355 static int
2356 merge_blocks (e, b, c)
2357 edge e;
2358 basic_block b, c;
2360 /* If C has a tail recursion label, do not merge. There is no
2361 edge recorded from the call_placeholder back to this label, as
2362 that would make optimize_sibling_and_tail_recursive_calls more
2363 complex for no gain. */
2364 if (GET_CODE (c->head) == CODE_LABEL
2365 && tail_recursion_label_p (c->head))
2366 return 0;
2368 /* If B has a fallthru edge to C, no need to move anything. */
2369 if (e->flags & EDGE_FALLTHRU)
2371 merge_blocks_nomove (b, c);
2373 if (rtl_dump_file)
2375 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2376 b->index, c->index);
2379 return 1;
2381 else
2383 edge tmp_edge;
2384 basic_block d;
2385 int c_has_outgoing_fallthru;
2386 int b_has_incoming_fallthru;
2388 /* We must make sure to not munge nesting of exception regions,
2389 lexical blocks, and loop notes.
2391 The first is taken care of by requiring that the active eh
2392 region at the end of one block always matches the active eh
2393 region at the beginning of the next block.
2395 The later two are taken care of by squeezing out all the notes. */
2397 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2398 executed and we may want to treat blocks which have two out
2399 edges, one normal, one abnormal as only having one edge for
2400 block merging purposes. */
2402 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2403 if (tmp_edge->flags & EDGE_FALLTHRU)
2404 break;
2405 c_has_outgoing_fallthru = (tmp_edge != NULL);
2407 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2408 if (tmp_edge->flags & EDGE_FALLTHRU)
2409 break;
2410 b_has_incoming_fallthru = (tmp_edge != NULL);
2412 /* If B does not have an incoming fallthru, and the exception regions
2413 match, then it can be moved immediately before C without introducing
2414 or modifying jumps.
2416 C can not be the first block, so we do not have to worry about
2417 accessing a non-existent block. */
2418 d = BASIC_BLOCK (c->index - 1);
2419 if (! b_has_incoming_fallthru
2420 && d->eh_end == b->eh_beg
2421 && b->eh_end == c->eh_beg)
2422 return merge_blocks_move_predecessor_nojumps (b, c);
2424 /* Otherwise, we're going to try to move C after B. Make sure the
2425 exception regions match.
2427 If B is the last basic block, then we must not try to access the
2428 block structure for block B + 1. Luckily in that case we do not
2429 need to worry about matching exception regions. */
2430 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2431 if (b->eh_end == c->eh_beg
2432 && (d == NULL || c->eh_end == d->eh_beg))
2434 /* If C does not have an outgoing fallthru, then it can be moved
2435 immediately after B without introducing or modifying jumps. */
2436 if (! c_has_outgoing_fallthru)
2437 return merge_blocks_move_successor_nojumps (b, c);
2439 /* Otherwise, we'll need to insert an extra jump, and possibly
2440 a new block to contain it. */
2441 /* ??? Not implemented yet. */
2444 return 0;
2448 /* Top level driver for merge_blocks. */
2450 static void
2451 try_merge_blocks ()
2453 int i;
2455 /* Attempt to merge blocks as made possible by edge removal. If a block
2456 has only one successor, and the successor has only one predecessor,
2457 they may be combined. */
2459 for (i = 0; i < n_basic_blocks; )
2461 basic_block c, b = BASIC_BLOCK (i);
2462 edge s;
2464 /* A loop because chains of blocks might be combineable. */
2465 while ((s = b->succ) != NULL
2466 && s->succ_next == NULL
2467 && (s->flags & EDGE_EH) == 0
2468 && (c = s->dest) != EXIT_BLOCK_PTR
2469 && c->pred->pred_next == NULL
2470 /* If the jump insn has side effects, we can't kill the edge. */
2471 && (GET_CODE (b->end) != JUMP_INSN
2472 || onlyjump_p (b->end))
2473 && merge_blocks (s, b, c))
2474 continue;
2476 /* Don't get confused by the index shift caused by deleting blocks. */
2477 i = b->index + 1;
2481 /* The given edge should potentially be a fallthru edge. If that is in
2482 fact true, delete the jump and barriers that are in the way. */
2484 void
2485 tidy_fallthru_edge (e, b, c)
2486 edge e;
2487 basic_block b, c;
2489 rtx q;
2491 /* ??? In a late-running flow pass, other folks may have deleted basic
2492 blocks by nopping out blocks, leaving multiple BARRIERs between here
2493 and the target label. They ought to be chastized and fixed.
2495 We can also wind up with a sequence of undeletable labels between
2496 one block and the next.
2498 So search through a sequence of barriers, labels, and notes for
2499 the head of block C and assert that we really do fall through. */
2501 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2502 return;
2504 /* Remove what will soon cease being the jump insn from the source block.
2505 If block B consisted only of this single jump, turn it into a deleted
2506 note. */
2507 q = b->end;
2508 if (GET_CODE (q) == JUMP_INSN
2509 && onlyjump_p (q)
2510 && (any_uncondjump_p (q)
2511 || (b->succ == e && e->succ_next == NULL)))
2513 #ifdef HAVE_cc0
2514 /* If this was a conditional jump, we need to also delete
2515 the insn that set cc0. */
2516 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2517 q = PREV_INSN (q);
2518 #endif
2520 if (b->head == q)
2522 PUT_CODE (q, NOTE);
2523 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2524 NOTE_SOURCE_FILE (q) = 0;
2526 else
2527 b->end = q = PREV_INSN (q);
2530 /* Selectively unlink the sequence. */
2531 if (q != PREV_INSN (c->head))
2532 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2534 e->flags |= EDGE_FALLTHRU;
2537 /* Fix up edges that now fall through, or rather should now fall through
2538 but previously required a jump around now deleted blocks. Simplify
2539 the search by only examining blocks numerically adjacent, since this
2540 is how find_basic_blocks created them. */
2542 static void
2543 tidy_fallthru_edges ()
2545 int i;
2547 for (i = 1; i < n_basic_blocks; ++i)
2549 basic_block b = BASIC_BLOCK (i - 1);
2550 basic_block c = BASIC_BLOCK (i);
2551 edge s;
2553 /* We care about simple conditional or unconditional jumps with
2554 a single successor.
2556 If we had a conditional branch to the next instruction when
2557 find_basic_blocks was called, then there will only be one
2558 out edge for the block which ended with the conditional
2559 branch (since we do not create duplicate edges).
2561 Furthermore, the edge will be marked as a fallthru because we
2562 merge the flags for the duplicate edges. So we do not want to
2563 check that the edge is not a FALLTHRU edge. */
2564 if ((s = b->succ) != NULL
2565 && s->succ_next == NULL
2566 && s->dest == c
2567 /* If the jump insn has side effects, we can't tidy the edge. */
2568 && (GET_CODE (b->end) != JUMP_INSN
2569 || onlyjump_p (b->end)))
2570 tidy_fallthru_edge (s, b, c);
2574 /* Perform data flow analysis.
2575 F is the first insn of the function; FLAGS is a set of PROP_* flags
2576 to be used in accumulating flow info. */
2578 void
2579 life_analysis (f, file, flags)
2580 rtx f;
2581 FILE *file;
2582 int flags;
2584 #ifdef ELIMINABLE_REGS
2585 register int i;
2586 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2587 #endif
2589 /* Record which registers will be eliminated. We use this in
2590 mark_used_regs. */
2592 CLEAR_HARD_REG_SET (elim_reg_set);
2594 #ifdef ELIMINABLE_REGS
2595 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2596 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2597 #else
2598 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2599 #endif
2601 if (! optimize)
2602 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2604 /* The post-reload life analysis have (on a global basis) the same
2605 registers live as was computed by reload itself. elimination
2606 Otherwise offsets and such may be incorrect.
2608 Reload will make some registers as live even though they do not
2609 appear in the rtl.
2611 We don't want to create new auto-incs after reload, since they
2612 are unlikely to be useful and can cause problems with shared
2613 stack slots. */
2614 if (reload_completed)
2615 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2617 /* We want alias analysis information for local dead store elimination. */
2618 if (flags & PROP_SCAN_DEAD_CODE)
2619 init_alias_analysis ();
2621 /* Always remove no-op moves. Do this before other processing so
2622 that we don't have to keep re-scanning them. */
2623 delete_noop_moves (f);
2625 /* Some targets can emit simpler epilogues if they know that sp was
2626 not ever modified during the function. After reload, of course,
2627 we've already emitted the epilogue so there's no sense searching. */
2628 if (! reload_completed)
2629 notice_stack_pointer_modification (f);
2631 /* Allocate and zero out data structures that will record the
2632 data from lifetime analysis. */
2633 allocate_reg_life_data ();
2634 allocate_bb_life_data ();
2636 /* Find the set of registers live on function exit. */
2637 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2639 /* "Update" life info from zero. It'd be nice to begin the
2640 relaxation with just the exit and noreturn blocks, but that set
2641 is not immediately handy. */
2643 if (flags & PROP_REG_INFO)
2644 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2645 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2647 /* Clean up. */
2648 if (flags & PROP_SCAN_DEAD_CODE)
2649 end_alias_analysis ();
2651 if (file)
2652 dump_flow_info (file);
2654 free_basic_block_vars (1);
2657 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2658 Search for REGNO. If found, abort if it is not wider than word_mode. */
2660 static int
2661 verify_wide_reg_1 (px, pregno)
2662 rtx *px;
2663 void *pregno;
2665 rtx x = *px;
2666 unsigned int regno = *(int *) pregno;
2668 if (GET_CODE (x) == REG && REGNO (x) == regno)
2670 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2671 abort ();
2672 return 1;
2674 return 0;
2677 /* A subroutine of verify_local_live_at_start. Search through insns
2678 between HEAD and END looking for register REGNO. */
2680 static void
2681 verify_wide_reg (regno, head, end)
2682 int regno;
2683 rtx head, end;
2685 while (1)
2687 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2688 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2689 return;
2690 if (head == end)
2691 break;
2692 head = NEXT_INSN (head);
2695 /* We didn't find the register at all. Something's way screwy. */
2696 abort ();
2699 /* A subroutine of update_life_info. Verify that there are no untoward
2700 changes in live_at_start during a local update. */
2702 static void
2703 verify_local_live_at_start (new_live_at_start, bb)
2704 regset new_live_at_start;
2705 basic_block bb;
2707 if (reload_completed)
2709 /* After reload, there are no pseudos, nor subregs of multi-word
2710 registers. The regsets should exactly match. */
2711 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2712 abort ();
2714 else
2716 int i;
2718 /* Find the set of changed registers. */
2719 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2721 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2723 /* No registers should die. */
2724 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2725 abort ();
2726 /* Verify that the now-live register is wider than word_mode. */
2727 verify_wide_reg (i, bb->head, bb->end);
2732 /* Updates life information starting with the basic blocks set in BLOCKS.
2733 If BLOCKS is null, consider it to be the universal set.
2735 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2736 we are only expecting local modifications to basic blocks. If we find
2737 extra registers live at the beginning of a block, then we either killed
2738 useful data, or we have a broken split that wants data not provided.
2739 If we find registers removed from live_at_start, that means we have
2740 a broken peephole that is killing a register it shouldn't.
2742 ??? This is not true in one situation -- when a pre-reload splitter
2743 generates subregs of a multi-word pseudo, current life analysis will
2744 lose the kill. So we _can_ have a pseudo go live. How irritating.
2746 Including PROP_REG_INFO does not properly refresh regs_ever_live
2747 unless the caller resets it to zero. */
2749 void
2750 update_life_info (blocks, extent, prop_flags)
2751 sbitmap blocks;
2752 enum update_life_extent extent;
2753 int prop_flags;
2755 regset tmp;
2756 regset_head tmp_head;
2757 int i;
2759 tmp = INITIALIZE_REG_SET (tmp_head);
2761 /* For a global update, we go through the relaxation process again. */
2762 if (extent != UPDATE_LIFE_LOCAL)
2764 calculate_global_regs_live (blocks, blocks,
2765 prop_flags & PROP_SCAN_DEAD_CODE);
2767 /* If asked, remove notes from the blocks we'll update. */
2768 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2769 count_or_remove_death_notes (blocks, 1);
2772 if (blocks)
2774 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2776 basic_block bb = BASIC_BLOCK (i);
2778 COPY_REG_SET (tmp, bb->global_live_at_end);
2779 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2781 if (extent == UPDATE_LIFE_LOCAL)
2782 verify_local_live_at_start (tmp, bb);
2785 else
2787 for (i = n_basic_blocks - 1; i >= 0; --i)
2789 basic_block bb = BASIC_BLOCK (i);
2791 COPY_REG_SET (tmp, bb->global_live_at_end);
2792 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2794 if (extent == UPDATE_LIFE_LOCAL)
2795 verify_local_live_at_start (tmp, bb);
2799 FREE_REG_SET (tmp);
2801 if (prop_flags & PROP_REG_INFO)
2803 /* The only pseudos that are live at the beginning of the function
2804 are those that were not set anywhere in the function. local-alloc
2805 doesn't know how to handle these correctly, so mark them as not
2806 local to any one basic block. */
2807 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2808 FIRST_PSEUDO_REGISTER, i,
2809 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2811 /* We have a problem with any pseudoreg that lives across the setjmp.
2812 ANSI says that if a user variable does not change in value between
2813 the setjmp and the longjmp, then the longjmp preserves it. This
2814 includes longjmp from a place where the pseudo appears dead.
2815 (In principle, the value still exists if it is in scope.)
2816 If the pseudo goes in a hard reg, some other value may occupy
2817 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2818 Conclusion: such a pseudo must not go in a hard reg. */
2819 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2820 FIRST_PSEUDO_REGISTER, i,
2822 if (regno_reg_rtx[i] != 0)
2824 REG_LIVE_LENGTH (i) = -1;
2825 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2831 /* Free the variables allocated by find_basic_blocks.
2833 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2835 void
2836 free_basic_block_vars (keep_head_end_p)
2837 int keep_head_end_p;
2839 if (basic_block_for_insn)
2841 VARRAY_FREE (basic_block_for_insn);
2842 basic_block_for_insn = NULL;
2845 if (! keep_head_end_p)
2847 clear_edges ();
2848 VARRAY_FREE (basic_block_info);
2849 n_basic_blocks = 0;
2851 ENTRY_BLOCK_PTR->aux = NULL;
2852 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2853 EXIT_BLOCK_PTR->aux = NULL;
2854 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2858 /* Return nonzero if the destination of SET equals the source. */
2859 static int
2860 set_noop_p (set)
2861 rtx set;
2863 rtx src = SET_SRC (set);
2864 rtx dst = SET_DEST (set);
2866 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2868 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2869 return 0;
2870 src = SUBREG_REG (src);
2871 dst = SUBREG_REG (dst);
2874 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2875 && REGNO (src) == REGNO (dst));
2878 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2879 value to itself. */
2880 static int
2881 noop_move_p (insn)
2882 rtx insn;
2884 rtx pat = PATTERN (insn);
2886 /* Insns carrying these notes are useful later on. */
2887 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2888 return 0;
2890 if (GET_CODE (pat) == SET && set_noop_p (pat))
2891 return 1;
2893 if (GET_CODE (pat) == PARALLEL)
2895 int i;
2896 /* If nothing but SETs of registers to themselves,
2897 this insn can also be deleted. */
2898 for (i = 0; i < XVECLEN (pat, 0); i++)
2900 rtx tem = XVECEXP (pat, 0, i);
2902 if (GET_CODE (tem) == USE
2903 || GET_CODE (tem) == CLOBBER)
2904 continue;
2906 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2907 return 0;
2910 return 1;
2912 return 0;
2915 /* Delete any insns that copy a register to itself. */
2917 static void
2918 delete_noop_moves (f)
2919 rtx f;
2921 rtx insn;
2922 for (insn = f; insn; insn = NEXT_INSN (insn))
2924 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2926 PUT_CODE (insn, NOTE);
2927 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2928 NOTE_SOURCE_FILE (insn) = 0;
2933 /* Determine if the stack pointer is constant over the life of the function.
2934 Only useful before prologues have been emitted. */
2936 static void
2937 notice_stack_pointer_modification_1 (x, pat, data)
2938 rtx x;
2939 rtx pat ATTRIBUTE_UNUSED;
2940 void *data ATTRIBUTE_UNUSED;
2942 if (x == stack_pointer_rtx
2943 /* The stack pointer is only modified indirectly as the result
2944 of a push until later in flow. See the comments in rtl.texi
2945 regarding Embedded Side-Effects on Addresses. */
2946 || (GET_CODE (x) == MEM
2947 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2948 || GET_CODE (XEXP (x, 0)) == PRE_INC
2949 || GET_CODE (XEXP (x, 0)) == POST_DEC
2950 || GET_CODE (XEXP (x, 0)) == POST_INC)
2951 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2952 current_function_sp_is_unchanging = 0;
2955 static void
2956 notice_stack_pointer_modification (f)
2957 rtx f;
2959 rtx insn;
2961 /* Assume that the stack pointer is unchanging if alloca hasn't
2962 been used. */
2963 current_function_sp_is_unchanging = !current_function_calls_alloca;
2964 if (! current_function_sp_is_unchanging)
2965 return;
2967 for (insn = f; insn; insn = NEXT_INSN (insn))
2969 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2971 /* Check if insn modifies the stack pointer. */
2972 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2973 NULL);
2974 if (! current_function_sp_is_unchanging)
2975 return;
2980 /* Mark a register in SET. Hard registers in large modes get all
2981 of their component registers set as well. */
2982 static void
2983 mark_reg (reg, xset)
2984 rtx reg;
2985 void *xset;
2987 regset set = (regset) xset;
2988 int regno = REGNO (reg);
2990 if (GET_MODE (reg) == BLKmode)
2991 abort ();
2993 SET_REGNO_REG_SET (set, regno);
2994 if (regno < FIRST_PSEUDO_REGISTER)
2996 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2997 while (--n > 0)
2998 SET_REGNO_REG_SET (set, regno + n);
3002 /* Mark those regs which are needed at the end of the function as live
3003 at the end of the last basic block. */
3004 static void
3005 mark_regs_live_at_end (set)
3006 regset set;
3008 int i;
3010 /* If exiting needs the right stack value, consider the stack pointer
3011 live at the end of the function. */
3012 if ((HAVE_epilogue && reload_completed)
3013 || ! EXIT_IGNORE_STACK
3014 || (! FRAME_POINTER_REQUIRED
3015 && ! current_function_calls_alloca
3016 && flag_omit_frame_pointer)
3017 || current_function_sp_is_unchanging)
3019 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3022 /* Mark the frame pointer if needed at the end of the function. If
3023 we end up eliminating it, it will be removed from the live list
3024 of each basic block by reload. */
3026 if (! reload_completed || frame_pointer_needed)
3028 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3029 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3030 /* If they are different, also mark the hard frame pointer as live */
3031 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3032 #endif
3035 #ifdef PIC_OFFSET_TABLE_REGNUM
3036 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3037 /* Many architectures have a GP register even without flag_pic.
3038 Assume the pic register is not in use, or will be handled by
3039 other means, if it is not fixed. */
3040 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3041 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3042 #endif
3043 #endif
3045 /* Mark all global registers, and all registers used by the epilogue
3046 as being live at the end of the function since they may be
3047 referenced by our caller. */
3048 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3049 if (global_regs[i]
3050 #ifdef EPILOGUE_USES
3051 || EPILOGUE_USES (i)
3052 #endif
3054 SET_REGNO_REG_SET (set, i);
3056 /* Mark all call-saved registers that we actaully used. */
3057 if (HAVE_epilogue && reload_completed)
3059 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3060 if (! call_used_regs[i] && regs_ever_live[i])
3061 SET_REGNO_REG_SET (set, i);
3064 /* Mark function return value. */
3065 diddle_return_value (mark_reg, set);
3068 /* Callback function for for_each_successor_phi. DATA is a regset.
3069 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3070 INSN, in the regset. */
3072 static int
3073 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3074 rtx insn ATTRIBUTE_UNUSED;
3075 int dest_regno ATTRIBUTE_UNUSED;
3076 int src_regno;
3077 void *data;
3079 regset live = (regset) data;
3080 SET_REGNO_REG_SET (live, src_regno);
3081 return 0;
3084 /* Propagate global life info around the graph of basic blocks. Begin
3085 considering blocks with their corresponding bit set in BLOCKS_IN.
3086 If BLOCKS_IN is null, consider it the universal set.
3088 BLOCKS_OUT is set for every block that was changed. */
3090 static void
3091 calculate_global_regs_live (blocks_in, blocks_out, flags)
3092 sbitmap blocks_in, blocks_out;
3093 int flags;
3095 basic_block *queue, *qhead, *qtail, *qend;
3096 regset tmp, new_live_at_end;
3097 regset_head tmp_head;
3098 regset_head new_live_at_end_head;
3099 int i;
3101 tmp = INITIALIZE_REG_SET (tmp_head);
3102 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3104 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3105 because the `head == tail' style test for an empty queue doesn't
3106 work with a full queue. */
3107 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3108 qtail = queue;
3109 qhead = qend = queue + n_basic_blocks + 2;
3111 /* Clear out the garbage that might be hanging out in bb->aux. */
3112 for (i = n_basic_blocks - 1; i >= 0; --i)
3113 BASIC_BLOCK (i)->aux = NULL;
3115 /* Queue the blocks set in the initial mask. Do this in reverse block
3116 number order so that we are more likely for the first round to do
3117 useful work. We use AUX non-null to flag that the block is queued. */
3118 if (blocks_in)
3120 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3122 basic_block bb = BASIC_BLOCK (i);
3123 *--qhead = bb;
3124 bb->aux = bb;
3127 else
3129 for (i = 0; i < n_basic_blocks; ++i)
3131 basic_block bb = BASIC_BLOCK (i);
3132 *--qhead = bb;
3133 bb->aux = bb;
3137 if (blocks_out)
3138 sbitmap_zero (blocks_out);
3140 while (qhead != qtail)
3142 int rescan, changed;
3143 basic_block bb;
3144 edge e;
3146 bb = *qhead++;
3147 if (qhead == qend)
3148 qhead = queue;
3149 bb->aux = NULL;
3151 /* Begin by propogating live_at_start from the successor blocks. */
3152 CLEAR_REG_SET (new_live_at_end);
3153 for (e = bb->succ; e ; e = e->succ_next)
3155 basic_block sb = e->dest;
3156 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3159 /* Force the stack pointer to be live -- which might not already be
3160 the case for blocks within infinite loops. */
3161 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3163 /* Regs used in phi nodes are not included in
3164 global_live_at_start, since they are live only along a
3165 particular edge. Set those regs that are live because of a
3166 phi node alternative corresponding to this particular block. */
3167 if (in_ssa_form)
3168 for_each_successor_phi (bb, &set_phi_alternative_reg,
3169 new_live_at_end);
3171 if (bb == ENTRY_BLOCK_PTR)
3173 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3174 continue;
3177 /* On our first pass through this block, we'll go ahead and continue.
3178 Recognize first pass by local_set NULL. On subsequent passes, we
3179 get to skip out early if live_at_end wouldn't have changed. */
3181 if (bb->local_set == NULL)
3183 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3184 rescan = 1;
3186 else
3188 /* If any bits were removed from live_at_end, we'll have to
3189 rescan the block. This wouldn't be necessary if we had
3190 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3191 local_live is really dependant on live_at_end. */
3192 CLEAR_REG_SET (tmp);
3193 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3194 new_live_at_end, BITMAP_AND_COMPL);
3196 if (! rescan)
3198 /* Find the set of changed bits. Take this opportunity
3199 to notice that this set is empty and early out. */
3200 CLEAR_REG_SET (tmp);
3201 changed = bitmap_operation (tmp, bb->global_live_at_end,
3202 new_live_at_end, BITMAP_XOR);
3203 if (! changed)
3204 continue;
3206 /* If any of the changed bits overlap with local_set,
3207 we'll have to rescan the block. Detect overlap by
3208 the AND with ~local_set turning off bits. */
3209 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3210 BITMAP_AND_COMPL);
3214 /* Let our caller know that BB changed enough to require its
3215 death notes updated. */
3216 if (blocks_out)
3217 SET_BIT (blocks_out, bb->index);
3219 if (! rescan)
3221 /* Add to live_at_start the set of all registers in
3222 new_live_at_end that aren't in the old live_at_end. */
3224 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3225 BITMAP_AND_COMPL);
3226 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3228 changed = bitmap_operation (bb->global_live_at_start,
3229 bb->global_live_at_start,
3230 tmp, BITMAP_IOR);
3231 if (! changed)
3232 continue;
3234 else
3236 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3238 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3239 into live_at_start. */
3240 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3242 /* If live_at start didn't change, no need to go farther. */
3243 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3244 continue;
3246 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3249 /* Queue all predecessors of BB so that we may re-examine
3250 their live_at_end. */
3251 for (e = bb->pred; e ; e = e->pred_next)
3253 basic_block pb = e->src;
3254 if (pb->aux == NULL)
3256 *qtail++ = pb;
3257 if (qtail == qend)
3258 qtail = queue;
3259 pb->aux = pb;
3264 FREE_REG_SET (tmp);
3265 FREE_REG_SET (new_live_at_end);
3267 if (blocks_out)
3269 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3271 basic_block bb = BASIC_BLOCK (i);
3272 FREE_REG_SET (bb->local_set);
3275 else
3277 for (i = n_basic_blocks - 1; i >= 0; --i)
3279 basic_block bb = BASIC_BLOCK (i);
3280 FREE_REG_SET (bb->local_set);
3284 free (queue);
3287 /* Subroutines of life analysis. */
3289 /* Allocate the permanent data structures that represent the results
3290 of life analysis. Not static since used also for stupid life analysis. */
3292 void
3293 allocate_bb_life_data ()
3295 register int i;
3297 for (i = 0; i < n_basic_blocks; i++)
3299 basic_block bb = BASIC_BLOCK (i);
3301 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3302 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3305 ENTRY_BLOCK_PTR->global_live_at_end
3306 = OBSTACK_ALLOC_REG_SET (function_obstack);
3307 EXIT_BLOCK_PTR->global_live_at_start
3308 = OBSTACK_ALLOC_REG_SET (function_obstack);
3310 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3313 void
3314 allocate_reg_life_data ()
3316 int i;
3318 max_regno = max_reg_num ();
3320 /* Recalculate the register space, in case it has grown. Old style
3321 vector oriented regsets would set regset_{size,bytes} here also. */
3322 allocate_reg_info (max_regno, FALSE, FALSE);
3324 /* Reset all the data we'll collect in propagate_block and its
3325 subroutines. */
3326 for (i = 0; i < max_regno; i++)
3328 REG_N_SETS (i) = 0;
3329 REG_N_REFS (i) = 0;
3330 REG_N_DEATHS (i) = 0;
3331 REG_N_CALLS_CROSSED (i) = 0;
3332 REG_LIVE_LENGTH (i) = 0;
3333 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3337 /* Delete dead instructions for propagate_block. */
3339 static void
3340 propagate_block_delete_insn (bb, insn)
3341 basic_block bb;
3342 rtx insn;
3344 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3346 /* If the insn referred to a label, and that label was attached to
3347 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3348 pretty much mandatory to delete it, because the ADDR_VEC may be
3349 referencing labels that no longer exist. */
3351 if (inote)
3353 rtx label = XEXP (inote, 0);
3354 rtx next;
3356 if (LABEL_NUSES (label) == 1
3357 && (next = next_nonnote_insn (label)) != NULL
3358 && GET_CODE (next) == JUMP_INSN
3359 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3360 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3362 rtx pat = PATTERN (next);
3363 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3364 int len = XVECLEN (pat, diff_vec_p);
3365 int i;
3367 for (i = 0; i < len; i++)
3368 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3370 flow_delete_insn (next);
3374 if (bb->end == insn)
3375 bb->end = PREV_INSN (insn);
3376 flow_delete_insn (insn);
3379 /* Delete dead libcalls for propagate_block. Return the insn
3380 before the libcall. */
3382 static rtx
3383 propagate_block_delete_libcall (bb, insn, note)
3384 basic_block bb;
3385 rtx insn, note;
3387 rtx first = XEXP (note, 0);
3388 rtx before = PREV_INSN (first);
3390 if (insn == bb->end)
3391 bb->end = before;
3393 flow_delete_insn_chain (first, insn);
3394 return before;
3397 /* Update the life-status of regs for one insn. Return the previous insn. */
3400 propagate_one_insn (pbi, insn)
3401 struct propagate_block_info *pbi;
3402 rtx insn;
3404 rtx prev = PREV_INSN (insn);
3405 int flags = pbi->flags;
3406 int insn_is_dead = 0;
3407 int libcall_is_dead = 0;
3408 rtx note;
3409 int i;
3411 if (! INSN_P (insn))
3412 return prev;
3414 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3415 if (flags & PROP_SCAN_DEAD_CODE)
3417 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3418 REG_NOTES (insn));
3419 libcall_is_dead = (insn_is_dead && note != 0
3420 && libcall_dead_p (pbi, PATTERN (insn),
3421 note, insn));
3424 /* We almost certainly don't want to delete prologue or epilogue
3425 instructions. Warn about probable compiler losage. */
3426 if (insn_is_dead
3427 && reload_completed
3428 && (((HAVE_epilogue || HAVE_prologue)
3429 && prologue_epilogue_contains (insn))
3430 || (HAVE_sibcall_epilogue
3431 && sibcall_epilogue_contains (insn))))
3433 if (flags & PROP_KILL_DEAD_CODE)
3435 warning ("ICE: would have deleted prologue/epilogue insn");
3436 if (!inhibit_warnings)
3437 debug_rtx (insn);
3439 libcall_is_dead = insn_is_dead = 0;
3442 /* If an instruction consists of just dead store(s) on final pass,
3443 delete it. */
3444 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3446 /* Record sets. Do this even for dead instructions, since they
3447 would have killed the values if they hadn't been deleted. */
3448 mark_set_regs (pbi, PATTERN (insn), insn);
3450 /* CC0 is now known to be dead. Either this insn used it,
3451 in which case it doesn't anymore, or clobbered it,
3452 so the next insn can't use it. */
3453 pbi->cc0_live = 0;
3455 if (libcall_is_dead)
3457 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3458 insn = NEXT_INSN (prev);
3460 else
3461 propagate_block_delete_insn (pbi->bb, insn);
3463 return prev;
3466 /* See if this is an increment or decrement that can be merged into
3467 a following memory address. */
3468 #ifdef AUTO_INC_DEC
3470 register rtx x = single_set (insn);
3472 /* Does this instruction increment or decrement a register? */
3473 if ((flags & PROP_AUTOINC)
3474 && x != 0
3475 && GET_CODE (SET_DEST (x)) == REG
3476 && (GET_CODE (SET_SRC (x)) == PLUS
3477 || GET_CODE (SET_SRC (x)) == MINUS)
3478 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3479 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3480 /* Ok, look for a following memory ref we can combine with.
3481 If one is found, change the memory ref to a PRE_INC
3482 or PRE_DEC, cancel this insn, and return 1.
3483 Return 0 if nothing has been done. */
3484 && try_pre_increment_1 (pbi, insn))
3485 return prev;
3487 #endif /* AUTO_INC_DEC */
3489 CLEAR_REG_SET (pbi->new_set);
3491 /* If this is not the final pass, and this insn is copying the value of
3492 a library call and it's dead, don't scan the insns that perform the
3493 library call, so that the call's arguments are not marked live. */
3494 if (libcall_is_dead)
3496 /* Record the death of the dest reg. */
3497 mark_set_regs (pbi, PATTERN (insn), insn);
3499 insn = XEXP (note, 0);
3500 return PREV_INSN (insn);
3502 else if (GET_CODE (PATTERN (insn)) == SET
3503 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3504 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3505 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3506 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3507 /* We have an insn to pop a constant amount off the stack.
3508 (Such insns use PLUS regardless of the direction of the stack,
3509 and any insn to adjust the stack by a constant is always a pop.)
3510 These insns, if not dead stores, have no effect on life. */
3512 else
3514 /* Any regs live at the time of a call instruction must not go
3515 in a register clobbered by calls. Find all regs now live and
3516 record this for them. */
3518 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3519 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3520 { REG_N_CALLS_CROSSED (i)++; });
3522 /* Record sets. Do this even for dead instructions, since they
3523 would have killed the values if they hadn't been deleted. */
3524 mark_set_regs (pbi, PATTERN (insn), insn);
3526 if (GET_CODE (insn) == CALL_INSN)
3528 register int i;
3529 rtx note, cond;
3531 cond = NULL_RTX;
3532 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3533 cond = COND_EXEC_TEST (PATTERN (insn));
3535 /* Non-constant calls clobber memory. */
3536 if (! CONST_CALL_P (insn))
3537 free_EXPR_LIST_list (&pbi->mem_set_list);
3539 /* There may be extra registers to be clobbered. */
3540 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3541 note;
3542 note = XEXP (note, 1))
3543 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3544 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3545 cond, insn, pbi->flags);
3547 /* Calls change all call-used and global registers. */
3548 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3549 if (call_used_regs[i] && ! global_regs[i]
3550 && ! fixed_regs[i])
3552 /* We do not want REG_UNUSED notes for these registers. */
3553 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3554 cond, insn,
3555 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3559 /* If an insn doesn't use CC0, it becomes dead since we assume
3560 that every insn clobbers it. So show it dead here;
3561 mark_used_regs will set it live if it is referenced. */
3562 pbi->cc0_live = 0;
3564 /* Record uses. */
3565 if (! insn_is_dead)
3566 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3568 /* Sometimes we may have inserted something before INSN (such as a move)
3569 when we make an auto-inc. So ensure we will scan those insns. */
3570 #ifdef AUTO_INC_DEC
3571 prev = PREV_INSN (insn);
3572 #endif
3574 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3576 register int i;
3577 rtx note, cond;
3579 cond = NULL_RTX;
3580 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3581 cond = COND_EXEC_TEST (PATTERN (insn));
3583 /* Calls use their arguments. */
3584 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3585 note;
3586 note = XEXP (note, 1))
3587 if (GET_CODE (XEXP (note, 0)) == USE)
3588 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3589 cond, insn);
3591 /* The stack ptr is used (honorarily) by a CALL insn. */
3592 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3594 /* Calls may also reference any of the global registers,
3595 so they are made live. */
3596 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3597 if (global_regs[i])
3598 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3599 cond, insn);
3603 /* On final pass, update counts of how many insns in which each reg
3604 is live. */
3605 if (flags & PROP_REG_INFO)
3606 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3607 { REG_LIVE_LENGTH (i)++; });
3609 return prev;
3612 /* Initialize a propagate_block_info struct for public consumption.
3613 Note that the structure itself is opaque to this file, but that
3614 the user can use the regsets provided here. */
3616 struct propagate_block_info *
3617 init_propagate_block_info (bb, live, local_set, flags)
3618 basic_block bb;
3619 regset live;
3620 regset local_set;
3621 int flags;
3623 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3625 pbi->bb = bb;
3626 pbi->reg_live = live;
3627 pbi->mem_set_list = NULL_RTX;
3628 pbi->local_set = local_set;
3629 pbi->cc0_live = 0;
3630 pbi->flags = flags;
3632 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3633 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3634 else
3635 pbi->reg_next_use = NULL;
3637 pbi->new_set = BITMAP_XMALLOC ();
3639 #ifdef HAVE_conditional_execution
3640 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3641 free_reg_cond_life_info);
3642 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3644 /* If this block ends in a conditional branch, for each register live
3645 from one side of the branch and not the other, record the register
3646 as conditionally dead. */
3647 if ((flags & (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE))
3648 && GET_CODE (bb->end) == JUMP_INSN
3649 && any_condjump_p (bb->end))
3651 regset_head diff_head;
3652 regset diff = INITIALIZE_REG_SET (diff_head);
3653 basic_block bb_true, bb_false;
3654 rtx cond_true, cond_false;
3655 int i;
3657 /* Identify the successor blocks. */
3658 bb_true = bb->succ->dest;
3659 if (bb->succ->succ_next != NULL)
3661 bb_false = bb->succ->succ_next->dest;
3663 if (bb->succ->flags & EDGE_FALLTHRU)
3665 basic_block t = bb_false;
3666 bb_false = bb_true;
3667 bb_true = t;
3669 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3670 abort ();
3672 else
3674 /* This can happen with a conditional jump to the next insn. */
3675 if (JUMP_LABEL (bb->end) != bb_true->head)
3676 abort ();
3678 /* Simplest way to do nothing. */
3679 bb_false = bb_true;
3682 /* Extract the condition from the branch. */
3683 cond_true = XEXP (SET_SRC (PATTERN (bb->end)), 0);
3684 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3685 GET_MODE (cond_true), XEXP (cond_true, 0),
3686 XEXP (cond_true, 1));
3687 if (GET_CODE (XEXP (SET_SRC (PATTERN (bb->end)), 1)) == PC)
3689 rtx t = cond_false;
3690 cond_false = cond_true;
3691 cond_true = t;
3694 /* Compute which register lead different lives in the successors. */
3695 if (bitmap_operation (diff, bb_true->global_live_at_start,
3696 bb_false->global_live_at_start, BITMAP_XOR))
3698 if (GET_CODE (XEXP (cond_true, 0)) != REG)
3699 abort ();
3700 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond_true, 0)));
3702 /* For each such register, mark it conditionally dead. */
3703 EXECUTE_IF_SET_IN_REG_SET
3704 (diff, 0, i,
3706 struct reg_cond_life_info *rcli;
3707 rtx cond;
3709 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3711 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3712 cond = cond_false;
3713 else
3714 cond = cond_true;
3715 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3717 splay_tree_insert (pbi->reg_cond_dead, i,
3718 (splay_tree_value) rcli);
3722 FREE_REG_SET (diff);
3724 #endif
3726 /* If this block has no successors, any stores to the frame that aren't
3727 used later in the block are dead. So make a pass over the block
3728 recording any such that are made and show them dead at the end. We do
3729 a very conservative and simple job here. */
3730 if ((flags & PROP_SCAN_DEAD_CODE)
3731 && (bb->succ == NULL
3732 || (bb->succ->succ_next == NULL
3733 && bb->succ->dest == EXIT_BLOCK_PTR)))
3735 rtx insn;
3736 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3737 if (GET_CODE (insn) == INSN
3738 && GET_CODE (PATTERN (insn)) == SET
3739 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3741 rtx mem = SET_DEST (PATTERN (insn));
3743 if (XEXP (mem, 0) == frame_pointer_rtx
3744 || (GET_CODE (XEXP (mem, 0)) == PLUS
3745 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3746 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3747 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3751 return pbi;
3754 /* Release a propagate_block_info struct. */
3756 void
3757 free_propagate_block_info (pbi)
3758 struct propagate_block_info *pbi;
3760 free_EXPR_LIST_list (&pbi->mem_set_list);
3762 BITMAP_XFREE (pbi->new_set);
3764 #ifdef HAVE_conditional_execution
3765 splay_tree_delete (pbi->reg_cond_dead);
3766 BITMAP_XFREE (pbi->reg_cond_reg);
3767 #endif
3769 if (pbi->reg_next_use)
3770 free (pbi->reg_next_use);
3772 free (pbi);
3775 /* Compute the registers live at the beginning of a basic block BB from
3776 those live at the end.
3778 When called, REG_LIVE contains those live at the end. On return, it
3779 contains those live at the beginning.
3781 LOCAL_SET, if non-null, will be set with all registers killed by
3782 this basic block. */
3784 void
3785 propagate_block (bb, live, local_set, flags)
3786 basic_block bb;
3787 regset live;
3788 regset local_set;
3789 int flags;
3791 struct propagate_block_info *pbi;
3792 rtx insn, prev;
3794 pbi = init_propagate_block_info (bb, live, local_set, flags);
3796 if (flags & PROP_REG_INFO)
3798 register int i;
3800 /* Process the regs live at the end of the block.
3801 Mark them as not local to any one basic block. */
3802 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3803 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3806 /* Scan the block an insn at a time from end to beginning. */
3808 for (insn = bb->end; ; insn = prev)
3810 /* If this is a call to `setjmp' et al, warn if any
3811 non-volatile datum is live. */
3812 if ((flags & PROP_REG_INFO)
3813 && GET_CODE (insn) == NOTE
3814 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3815 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3817 prev = propagate_one_insn (pbi, insn);
3819 if (insn == bb->head)
3820 break;
3823 free_propagate_block_info (pbi);
3826 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3827 (SET expressions whose destinations are registers dead after the insn).
3828 NEEDED is the regset that says which regs are alive after the insn.
3830 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3832 If X is the entire body of an insn, NOTES contains the reg notes
3833 pertaining to the insn. */
3835 static int
3836 insn_dead_p (pbi, x, call_ok, notes)
3837 struct propagate_block_info *pbi;
3838 rtx x;
3839 int call_ok;
3840 rtx notes ATTRIBUTE_UNUSED;
3842 enum rtx_code code = GET_CODE (x);
3844 #ifdef AUTO_INC_DEC
3845 /* If flow is invoked after reload, we must take existing AUTO_INC
3846 expresions into account. */
3847 if (reload_completed)
3849 for ( ; notes; notes = XEXP (notes, 1))
3851 if (REG_NOTE_KIND (notes) == REG_INC)
3853 int regno = REGNO (XEXP (notes, 0));
3855 /* Don't delete insns to set global regs. */
3856 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3857 || REGNO_REG_SET_P (pbi->reg_live, regno))
3858 return 0;
3862 #endif
3864 /* If setting something that's a reg or part of one,
3865 see if that register's altered value will be live. */
3867 if (code == SET)
3869 rtx r = SET_DEST (x);
3871 #ifdef HAVE_cc0
3872 if (GET_CODE (r) == CC0)
3873 return ! pbi->cc0_live;
3874 #endif
3876 /* A SET that is a subroutine call cannot be dead. */
3877 if (GET_CODE (SET_SRC (x)) == CALL)
3879 if (! call_ok)
3880 return 0;
3883 /* Don't eliminate loads from volatile memory or volatile asms. */
3884 else if (volatile_refs_p (SET_SRC (x)))
3885 return 0;
3887 if (GET_CODE (r) == MEM)
3889 rtx temp;
3891 if (MEM_VOLATILE_P (r))
3892 return 0;
3894 /* Walk the set of memory locations we are currently tracking
3895 and see if one is an identical match to this memory location.
3896 If so, this memory write is dead (remember, we're walking
3897 backwards from the end of the block to the start). */
3898 temp = pbi->mem_set_list;
3899 while (temp)
3901 if (rtx_equal_p (XEXP (temp, 0), r))
3902 return 1;
3903 temp = XEXP (temp, 1);
3906 else
3908 while (GET_CODE (r) == SUBREG
3909 || GET_CODE (r) == STRICT_LOW_PART
3910 || GET_CODE (r) == ZERO_EXTRACT)
3911 r = XEXP (r, 0);
3913 if (GET_CODE (r) == REG)
3915 int regno = REGNO (r);
3917 /* Obvious. */
3918 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3919 return 0;
3921 /* If this is a hard register, verify that subsequent
3922 words are not needed. */
3923 if (regno < FIRST_PSEUDO_REGISTER)
3925 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3927 while (--n > 0)
3928 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3929 return 0;
3932 /* Don't delete insns to set global regs. */
3933 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3934 return 0;
3936 /* Make sure insns to set the stack pointer aren't deleted. */
3937 if (regno == STACK_POINTER_REGNUM)
3938 return 0;
3940 /* Make sure insns to set the frame pointer aren't deleted. */
3941 if (regno == FRAME_POINTER_REGNUM
3942 && (! reload_completed || frame_pointer_needed))
3943 return 0;
3944 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3945 if (regno == HARD_FRAME_POINTER_REGNUM
3946 && (! reload_completed || frame_pointer_needed))
3947 return 0;
3948 #endif
3950 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3951 /* Make sure insns to set arg pointer are never deleted
3952 (if the arg pointer isn't fixed, there will be a USE
3953 for it, so we can treat it normally). */
3954 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3955 return 0;
3956 #endif
3958 #ifdef PIC_OFFSET_TABLE_REGNUM
3959 /* Before reload, do not allow sets of the pic register
3960 to be deleted. Reload can insert references to
3961 constant pool memory anywhere in the function, making
3962 the PIC register live where it wasn't before. */
3963 if (regno == PIC_OFFSET_TABLE_REGNUM && fixed_regs[regno]
3964 && ! reload_completed)
3965 return 0;
3966 #endif
3968 /* Otherwise, the set is dead. */
3969 return 1;
3974 /* If performing several activities, insn is dead if each activity
3975 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3976 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3977 worth keeping. */
3978 else if (code == PARALLEL)
3980 int i = XVECLEN (x, 0);
3982 for (i--; i >= 0; i--)
3983 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3984 && GET_CODE (XVECEXP (x, 0, i)) != USE
3985 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3986 return 0;
3988 return 1;
3991 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3992 is not necessarily true for hard registers. */
3993 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3994 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3995 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3996 return 1;
3998 /* We do not check other CLOBBER or USE here. An insn consisting of just
3999 a CLOBBER or just a USE should not be deleted. */
4000 return 0;
4003 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
4004 return 1 if the entire library call is dead.
4005 This is true if X copies a register (hard or pseudo)
4006 and if the hard return reg of the call insn is dead.
4007 (The caller should have tested the destination of X already for death.)
4009 If this insn doesn't just copy a register, then we don't
4010 have an ordinary libcall. In that case, cse could not have
4011 managed to substitute the source for the dest later on,
4012 so we can assume the libcall is dead.
4014 NEEDED is the bit vector of pseudoregs live before this insn.
4015 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
4017 static int
4018 libcall_dead_p (pbi, x, note, insn)
4019 struct propagate_block_info *pbi;
4020 rtx x;
4021 rtx note;
4022 rtx insn;
4024 register RTX_CODE code = GET_CODE (x);
4026 if (code == SET)
4028 register rtx r = SET_SRC (x);
4029 if (GET_CODE (r) == REG)
4031 rtx call = XEXP (note, 0);
4032 rtx call_pat;
4033 register int i;
4035 /* Find the call insn. */
4036 while (call != insn && GET_CODE (call) != CALL_INSN)
4037 call = NEXT_INSN (call);
4039 /* If there is none, do nothing special,
4040 since ordinary death handling can understand these insns. */
4041 if (call == insn)
4042 return 0;
4044 /* See if the hard reg holding the value is dead.
4045 If this is a PARALLEL, find the call within it. */
4046 call_pat = PATTERN (call);
4047 if (GET_CODE (call_pat) == PARALLEL)
4049 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4050 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4051 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4052 break;
4054 /* This may be a library call that is returning a value
4055 via invisible pointer. Do nothing special, since
4056 ordinary death handling can understand these insns. */
4057 if (i < 0)
4058 return 0;
4060 call_pat = XVECEXP (call_pat, 0, i);
4063 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4066 return 1;
4069 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4070 live at function entry. Don't count global register variables, variables
4071 in registers that can be used for function arg passing, or variables in
4072 fixed hard registers. */
4075 regno_uninitialized (regno)
4076 int regno;
4078 if (n_basic_blocks == 0
4079 || (regno < FIRST_PSEUDO_REGISTER
4080 && (global_regs[regno]
4081 || fixed_regs[regno]
4082 || FUNCTION_ARG_REGNO_P (regno))))
4083 return 0;
4085 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4088 /* 1 if register REGNO was alive at a place where `setjmp' was called
4089 and was set more than once or is an argument.
4090 Such regs may be clobbered by `longjmp'. */
4093 regno_clobbered_at_setjmp (regno)
4094 int regno;
4096 if (n_basic_blocks == 0)
4097 return 0;
4099 return ((REG_N_SETS (regno) > 1
4100 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4101 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4104 /* INSN references memory, possibly using autoincrement addressing modes.
4105 Find any entries on the mem_set_list that need to be invalidated due
4106 to an address change. */
4108 static void
4109 invalidate_mems_from_autoinc (pbi, insn)
4110 struct propagate_block_info *pbi;
4111 rtx insn;
4113 rtx note = REG_NOTES (insn);
4114 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4116 if (REG_NOTE_KIND (note) == REG_INC)
4118 rtx temp = pbi->mem_set_list;
4119 rtx prev = NULL_RTX;
4120 rtx next;
4122 while (temp)
4124 next = XEXP (temp, 1);
4125 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4127 /* Splice temp out of list. */
4128 if (prev)
4129 XEXP (prev, 1) = next;
4130 else
4131 pbi->mem_set_list = next;
4132 free_EXPR_LIST_node (temp);
4134 else
4135 prev = temp;
4136 temp = next;
4142 /* Process the registers that are set within X. Their bits are set to
4143 1 in the regset DEAD, because they are dead prior to this insn.
4145 If INSN is nonzero, it is the insn being processed.
4147 FLAGS is the set of operations to perform. */
4149 static void
4150 mark_set_regs (pbi, x, insn)
4151 struct propagate_block_info *pbi;
4152 rtx x, insn;
4154 rtx cond = NULL_RTX;
4155 rtx link;
4156 enum rtx_code code;
4158 if (insn)
4159 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4161 if (REG_NOTE_KIND (link) == REG_INC)
4162 mark_set_1 (pbi, SET, XEXP (link, 0),
4163 (GET_CODE (x) == COND_EXEC
4164 ? COND_EXEC_TEST (x) : NULL_RTX),
4165 insn, pbi->flags);
4167 retry:
4168 switch (code = GET_CODE (x))
4170 case SET:
4171 case CLOBBER:
4172 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4173 return;
4175 case COND_EXEC:
4176 cond = COND_EXEC_TEST (x);
4177 x = COND_EXEC_CODE (x);
4178 goto retry;
4180 case PARALLEL:
4182 register int i;
4183 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4185 rtx sub = XVECEXP (x, 0, i);
4186 switch (code = GET_CODE (sub))
4188 case COND_EXEC:
4189 if (cond != NULL_RTX)
4190 abort ();
4192 cond = COND_EXEC_TEST (sub);
4193 sub = COND_EXEC_CODE (sub);
4194 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4195 break;
4196 /* FALLTHRU */
4198 case SET:
4199 case CLOBBER:
4200 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4201 break;
4203 default:
4204 break;
4207 break;
4210 default:
4211 break;
4215 /* Process a single SET rtx, X. */
4217 static void
4218 mark_set_1 (pbi, code, reg, cond, insn, flags)
4219 struct propagate_block_info *pbi;
4220 enum rtx_code code;
4221 rtx reg, cond, insn;
4222 int flags;
4224 int regno_first = -1, regno_last = -1;
4225 int not_dead = 0;
4226 int i;
4228 /* Some targets place small structures in registers for
4229 return values of functions. We have to detect this
4230 case specially here to get correct flow information. */
4231 if (GET_CODE (reg) == PARALLEL
4232 && GET_MODE (reg) == BLKmode)
4234 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4235 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4236 return;
4239 /* Modifying just one hardware register of a multi-reg value or just a
4240 byte field of a register does not mean the value from before this insn
4241 is now dead. Of course, if it was dead after it's unused now. */
4243 switch (GET_CODE (reg))
4245 case ZERO_EXTRACT:
4246 case SIGN_EXTRACT:
4247 case STRICT_LOW_PART:
4248 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4250 reg = XEXP (reg, 0);
4251 while (GET_CODE (reg) == SUBREG
4252 || GET_CODE (reg) == ZERO_EXTRACT
4253 || GET_CODE (reg) == SIGN_EXTRACT
4254 || GET_CODE (reg) == STRICT_LOW_PART);
4255 if (GET_CODE (reg) == MEM)
4256 break;
4257 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4258 /* FALLTHRU */
4260 case REG:
4261 regno_last = regno_first = REGNO (reg);
4262 if (regno_first < FIRST_PSEUDO_REGISTER)
4263 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4264 break;
4266 case SUBREG:
4267 if (GET_CODE (SUBREG_REG (reg)) == REG)
4269 enum machine_mode outer_mode = GET_MODE (reg);
4270 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4272 /* Identify the range of registers affected. This is moderately
4273 tricky for hard registers. See alter_subreg. */
4275 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4276 if (regno_first < FIRST_PSEUDO_REGISTER)
4278 #ifdef ALTER_HARD_SUBREG
4279 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4280 inner_mode, regno_first);
4281 #else
4282 regno_first += SUBREG_WORD (reg);
4283 #endif
4284 regno_last = (regno_first
4285 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4287 /* Since we've just adjusted the register number ranges, make
4288 sure REG matches. Otherwise some_was_live will be clear
4289 when it shouldn't have been, and we'll create incorrect
4290 REG_UNUSED notes. */
4291 reg = gen_rtx_REG (outer_mode, regno_first);
4293 else
4295 /* If the number of words in the subreg is less than the number
4296 of words in the full register, we have a well-defined partial
4297 set. Otherwise the high bits are undefined.
4299 This is only really applicable to pseudos, since we just took
4300 care of multi-word hard registers. */
4301 if (((GET_MODE_SIZE (outer_mode)
4302 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4303 < ((GET_MODE_SIZE (inner_mode)
4304 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4305 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4307 reg = SUBREG_REG (reg);
4310 else
4311 reg = SUBREG_REG (reg);
4312 break;
4314 default:
4315 break;
4318 /* If this set is a MEM, then it kills any aliased writes.
4319 If this set is a REG, then it kills any MEMs which use the reg. */
4320 if (flags & PROP_SCAN_DEAD_CODE)
4322 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4324 rtx temp = pbi->mem_set_list;
4325 rtx prev = NULL_RTX;
4326 rtx next;
4328 while (temp)
4330 next = XEXP (temp, 1);
4331 if ((GET_CODE (reg) == MEM
4332 && output_dependence (XEXP (temp, 0), reg))
4333 || (GET_CODE (reg) == REG
4334 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4336 /* Splice this entry out of the list. */
4337 if (prev)
4338 XEXP (prev, 1) = next;
4339 else
4340 pbi->mem_set_list = next;
4341 free_EXPR_LIST_node (temp);
4343 else
4344 prev = temp;
4345 temp = next;
4349 /* If the memory reference had embedded side effects (autoincrement
4350 address modes. Then we may need to kill some entries on the
4351 memory set list. */
4352 if (insn && GET_CODE (reg) == MEM)
4353 invalidate_mems_from_autoinc (pbi, insn);
4355 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4356 /* ??? With more effort we could track conditional memory life. */
4357 && ! cond
4358 /* We do not know the size of a BLKmode store, so we do not track
4359 them for redundant store elimination. */
4360 && GET_MODE (reg) != BLKmode
4361 /* There are no REG_INC notes for SP, so we can't assume we'll see
4362 everything that invalidates it. To be safe, don't eliminate any
4363 stores though SP; none of them should be redundant anyway. */
4364 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4365 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4368 if (GET_CODE (reg) == REG
4369 && ! (regno_first == FRAME_POINTER_REGNUM
4370 && (! reload_completed || frame_pointer_needed))
4371 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4372 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4373 && (! reload_completed || frame_pointer_needed))
4374 #endif
4375 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4376 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4377 #endif
4380 int some_was_live = 0, some_was_dead = 0;
4382 for (i = regno_first; i <= regno_last; ++i)
4384 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4385 if (pbi->local_set)
4386 SET_REGNO_REG_SET (pbi->local_set, i);
4387 if (code != CLOBBER)
4388 SET_REGNO_REG_SET (pbi->new_set, i);
4390 some_was_live |= needed_regno;
4391 some_was_dead |= ! needed_regno;
4394 #ifdef HAVE_conditional_execution
4395 /* Consider conditional death in deciding that the register needs
4396 a death note. */
4397 if (some_was_live && ! not_dead
4398 /* The stack pointer is never dead. Well, not strictly true,
4399 but it's very difficult to tell from here. Hopefully
4400 combine_stack_adjustments will fix up the most egregious
4401 errors. */
4402 && regno_first != STACK_POINTER_REGNUM)
4404 for (i = regno_first; i <= regno_last; ++i)
4405 if (! mark_regno_cond_dead (pbi, i, cond))
4406 not_dead = 1;
4408 #endif
4410 /* Additional data to record if this is the final pass. */
4411 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4412 | PROP_DEATH_NOTES | PROP_AUTOINC))
4414 register rtx y;
4415 register int blocknum = pbi->bb->index;
4417 y = NULL_RTX;
4418 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4420 y = pbi->reg_next_use[regno_first];
4422 /* The next use is no longer next, since a store intervenes. */
4423 for (i = regno_first; i <= regno_last; ++i)
4424 pbi->reg_next_use[i] = 0;
4427 if (flags & PROP_REG_INFO)
4429 for (i = regno_first; i <= regno_last; ++i)
4431 /* Count (weighted) references, stores, etc. This counts a
4432 register twice if it is modified, but that is correct. */
4433 REG_N_SETS (i) += 1;
4434 REG_N_REFS (i) += (optimize_size ? 1
4435 : pbi->bb->loop_depth + 1);
4437 /* The insns where a reg is live are normally counted
4438 elsewhere, but we want the count to include the insn
4439 where the reg is set, and the normal counting mechanism
4440 would not count it. */
4441 REG_LIVE_LENGTH (i) += 1;
4444 /* If this is a hard reg, record this function uses the reg. */
4445 if (regno_first < FIRST_PSEUDO_REGISTER)
4447 for (i = regno_first; i <= regno_last; i++)
4448 regs_ever_live[i] = 1;
4450 else
4452 /* Keep track of which basic blocks each reg appears in. */
4453 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4454 REG_BASIC_BLOCK (regno_first) = blocknum;
4455 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4456 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4460 if (! some_was_dead)
4462 if (flags & PROP_LOG_LINKS)
4464 /* Make a logical link from the next following insn
4465 that uses this register, back to this insn.
4466 The following insns have already been processed.
4468 We don't build a LOG_LINK for hard registers containing
4469 in ASM_OPERANDs. If these registers get replaced,
4470 we might wind up changing the semantics of the insn,
4471 even if reload can make what appear to be valid
4472 assignments later. */
4473 if (y && (BLOCK_NUM (y) == blocknum)
4474 && (regno_first >= FIRST_PSEUDO_REGISTER
4475 || asm_noperands (PATTERN (y)) < 0))
4476 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4479 else if (not_dead)
4481 else if (! some_was_live)
4483 if (flags & PROP_REG_INFO)
4484 REG_N_DEATHS (regno_first) += 1;
4486 if (flags & PROP_DEATH_NOTES)
4488 /* Note that dead stores have already been deleted
4489 when possible. If we get here, we have found a
4490 dead store that cannot be eliminated (because the
4491 same insn does something useful). Indicate this
4492 by marking the reg being set as dying here. */
4493 REG_NOTES (insn)
4494 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4497 else
4499 if (flags & PROP_DEATH_NOTES)
4501 /* This is a case where we have a multi-word hard register
4502 and some, but not all, of the words of the register are
4503 needed in subsequent insns. Write REG_UNUSED notes
4504 for those parts that were not needed. This case should
4505 be rare. */
4507 for (i = regno_first; i <= regno_last; ++i)
4508 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4509 REG_NOTES (insn)
4510 = alloc_EXPR_LIST (REG_UNUSED,
4511 gen_rtx_REG (reg_raw_mode[i], i),
4512 REG_NOTES (insn));
4517 /* Mark the register as being dead. */
4518 if (some_was_live
4519 && ! not_dead
4520 /* The stack pointer is never dead. Well, not strictly true,
4521 but it's very difficult to tell from here. Hopefully
4522 combine_stack_adjustments will fix up the most egregious
4523 errors. */
4524 && regno_first != STACK_POINTER_REGNUM)
4526 for (i = regno_first; i <= regno_last; ++i)
4527 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4530 else if (GET_CODE (reg) == REG)
4532 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4533 pbi->reg_next_use[regno_first] = 0;
4536 /* If this is the last pass and this is a SCRATCH, show it will be dying
4537 here and count it. */
4538 else if (GET_CODE (reg) == SCRATCH)
4540 if (flags & PROP_DEATH_NOTES)
4541 REG_NOTES (insn)
4542 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4546 #ifdef HAVE_conditional_execution
4547 /* Mark REGNO conditionally dead. Return true if the register is
4548 now unconditionally dead. */
4550 static int
4551 mark_regno_cond_dead (pbi, regno, cond)
4552 struct propagate_block_info *pbi;
4553 int regno;
4554 rtx cond;
4556 /* If this is a store to a predicate register, the value of the
4557 predicate is changing, we don't know that the predicate as seen
4558 before is the same as that seen after. Flush all dependant
4559 conditions from reg_cond_dead. This will make all such
4560 conditionally live registers unconditionally live. */
4561 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4562 flush_reg_cond_reg (pbi, regno);
4564 /* If this is an unconditional store, remove any conditional
4565 life that may have existed. */
4566 if (cond == NULL_RTX)
4567 splay_tree_remove (pbi->reg_cond_dead, regno);
4568 else
4570 splay_tree_node node;
4571 struct reg_cond_life_info *rcli;
4572 rtx ncond;
4574 /* Otherwise this is a conditional set. Record that fact.
4575 It may have been conditionally used, or there may be a
4576 subsequent set with a complimentary condition. */
4578 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4579 if (node == NULL)
4581 /* The register was unconditionally live previously.
4582 Record the current condition as the condition under
4583 which it is dead. */
4584 rcli = (struct reg_cond_life_info *)
4585 xmalloc (sizeof (*rcli));
4586 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4587 splay_tree_insert (pbi->reg_cond_dead, regno,
4588 (splay_tree_value) rcli);
4590 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4591 REGNO (XEXP (cond, 0)));
4593 /* Not unconditionaly dead. */
4594 return 0;
4596 else
4598 /* The register was conditionally live previously.
4599 Add the new condition to the old. */
4600 rcli = (struct reg_cond_life_info *) node->value;
4601 ncond = rcli->condition;
4602 ncond = ior_reg_cond (ncond, cond);
4604 /* If the register is now unconditionally dead,
4605 remove the entry in the splay_tree. */
4606 if (ncond == const1_rtx)
4607 splay_tree_remove (pbi->reg_cond_dead, regno);
4608 else
4610 rcli->condition = ncond;
4612 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4613 REGNO (XEXP (cond, 0)));
4615 /* Not unconditionaly dead. */
4616 return 0;
4621 return 1;
4624 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4626 static void
4627 free_reg_cond_life_info (value)
4628 splay_tree_value value;
4630 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4631 free_EXPR_LIST_list (&rcli->condition);
4632 free (rcli);
4635 /* Helper function for flush_reg_cond_reg. */
4637 static int
4638 flush_reg_cond_reg_1 (node, data)
4639 splay_tree_node node;
4640 void *data;
4642 struct reg_cond_life_info *rcli;
4643 int *xdata = (int *) data;
4644 unsigned int regno = xdata[0];
4645 rtx c, *prev;
4647 /* Don't need to search if last flushed value was farther on in
4648 the in-order traversal. */
4649 if (xdata[1] >= (int) node->key)
4650 return 0;
4652 /* Splice out portions of the expression that refer to regno. */
4653 rcli = (struct reg_cond_life_info *) node->value;
4654 c = *(prev = &rcli->condition);
4655 while (c)
4657 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4659 rtx next = XEXP (c, 1);
4660 free_EXPR_LIST_node (c);
4661 c = *prev = next;
4663 else
4664 c = *(prev = &XEXP (c, 1));
4667 /* If the entire condition is now NULL, signal the node to be removed. */
4668 if (! rcli->condition)
4670 xdata[1] = node->key;
4671 return -1;
4673 else
4674 return 0;
4677 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4679 static void
4680 flush_reg_cond_reg (pbi, regno)
4681 struct propagate_block_info *pbi;
4682 int regno;
4684 int pair[2];
4686 pair[0] = regno;
4687 pair[1] = -1;
4688 while (splay_tree_foreach (pbi->reg_cond_dead,
4689 flush_reg_cond_reg_1, pair) == -1)
4690 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4692 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4695 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4696 We actually use EXPR_LIST to chain the sub-expressions together
4697 instead of IOR because it's easier to manipulate and we have
4698 the lists.c functions to reuse nodes.
4700 Return a new rtl expression as appropriate. */
4702 static rtx
4703 ior_reg_cond (old, x)
4704 rtx old, x;
4706 enum rtx_code x_code;
4707 rtx x_reg;
4708 rtx c;
4710 /* We expect these conditions to be of the form (eq reg 0). */
4711 x_code = GET_CODE (x);
4712 if (GET_RTX_CLASS (x_code) != '<'
4713 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4714 || XEXP (x, 1) != const0_rtx)
4715 abort ();
4717 /* Search the expression for an existing sub-expression of X_REG. */
4718 for (c = old; c ; c = XEXP (c, 1))
4720 rtx y = XEXP (c, 0);
4721 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4723 /* If we find X already present in OLD, we need do nothing. */
4724 if (GET_CODE (y) == x_code)
4725 return old;
4727 /* If we find X being a compliment of a condition in OLD,
4728 then the entire condition is true. */
4729 if (GET_CODE (y) == reverse_condition (x_code))
4730 return const1_rtx;
4734 /* Otherwise just add to the chain. */
4735 return alloc_EXPR_LIST (0, x, old);
4738 static rtx
4739 not_reg_cond (x)
4740 rtx x;
4742 enum rtx_code x_code;
4743 rtx x_reg;
4745 /* We expect these conditions to be of the form (eq reg 0). */
4746 x_code = GET_CODE (x);
4747 if (GET_RTX_CLASS (x_code) != '<'
4748 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4749 || XEXP (x, 1) != const0_rtx)
4750 abort ();
4752 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4753 VOIDmode, x_reg, const0_rtx),
4754 NULL_RTX);
4757 static rtx
4758 nand_reg_cond (old, x)
4759 rtx old, x;
4761 enum rtx_code x_code;
4762 rtx x_reg;
4763 rtx c, *prev;
4765 /* We expect these conditions to be of the form (eq reg 0). */
4766 x_code = GET_CODE (x);
4767 if (GET_RTX_CLASS (x_code) != '<'
4768 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4769 || XEXP (x, 1) != const0_rtx)
4770 abort ();
4772 /* Search the expression for an existing sub-expression of X_REG. */
4774 for (c = *(prev = &old); c ; c = *(prev = &XEXP (c, 1)))
4776 rtx y = XEXP (c, 0);
4777 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4779 /* If we find X already present in OLD, then we need to
4780 splice it out. */
4781 if (GET_CODE (y) == x_code)
4783 *prev = XEXP (c, 1);
4784 free_EXPR_LIST_node (c);
4785 return old ? old : const0_rtx;
4788 /* If we find X being a compliment of a condition in OLD,
4789 then we need do nothing. */
4790 if (GET_CODE (y) == reverse_condition (x_code))
4791 return old;
4795 /* Otherwise, by implication, the register in question is now live for
4796 the inverse of the condition X. */
4797 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4798 VOIDmode, x_reg, const0_rtx),
4799 old);
4801 #endif /* HAVE_conditional_execution */
4803 #ifdef AUTO_INC_DEC
4805 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4806 reference. */
4808 static void
4809 find_auto_inc (pbi, x, insn)
4810 struct propagate_block_info *pbi;
4811 rtx x;
4812 rtx insn;
4814 rtx addr = XEXP (x, 0);
4815 HOST_WIDE_INT offset = 0;
4816 rtx set;
4818 /* Here we detect use of an index register which might be good for
4819 postincrement, postdecrement, preincrement, or predecrement. */
4821 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4822 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4824 if (GET_CODE (addr) == REG)
4826 register rtx y;
4827 register int size = GET_MODE_SIZE (GET_MODE (x));
4828 rtx use;
4829 rtx incr;
4830 int regno = REGNO (addr);
4832 /* Is the next use an increment that might make auto-increment? */
4833 if ((incr = pbi->reg_next_use[regno]) != 0
4834 && (set = single_set (incr)) != 0
4835 && GET_CODE (set) == SET
4836 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4837 /* Can't add side effects to jumps; if reg is spilled and
4838 reloaded, there's no way to store back the altered value. */
4839 && GET_CODE (insn) != JUMP_INSN
4840 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4841 && XEXP (y, 0) == addr
4842 && GET_CODE (XEXP (y, 1)) == CONST_INT
4843 && ((HAVE_POST_INCREMENT
4844 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4845 || (HAVE_POST_DECREMENT
4846 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4847 || (HAVE_PRE_INCREMENT
4848 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4849 || (HAVE_PRE_DECREMENT
4850 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4851 /* Make sure this reg appears only once in this insn. */
4852 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4853 use != 0 && use != (rtx) 1))
4855 rtx q = SET_DEST (set);
4856 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4857 ? (offset ? PRE_INC : POST_INC)
4858 : (offset ? PRE_DEC : POST_DEC));
4860 if (dead_or_set_p (incr, addr)
4861 /* Mustn't autoinc an eliminable register. */
4862 && (regno >= FIRST_PSEUDO_REGISTER
4863 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4865 /* This is the simple case. Try to make the auto-inc. If
4866 we can't, we are done. Otherwise, we will do any
4867 needed updates below. */
4868 if (! validate_change (insn, &XEXP (x, 0),
4869 gen_rtx_fmt_e (inc_code, Pmode, addr),
4871 return;
4873 else if (GET_CODE (q) == REG
4874 /* PREV_INSN used here to check the semi-open interval
4875 [insn,incr). */
4876 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4877 /* We must also check for sets of q as q may be
4878 a call clobbered hard register and there may
4879 be a call between PREV_INSN (insn) and incr. */
4880 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4882 /* We have *p followed sometime later by q = p+size.
4883 Both p and q must be live afterward,
4884 and q is not used between INSN and its assignment.
4885 Change it to q = p, ...*q..., q = q+size.
4886 Then fall into the usual case. */
4887 rtx insns, temp;
4889 start_sequence ();
4890 emit_move_insn (q, addr);
4891 insns = get_insns ();
4892 end_sequence ();
4894 if (basic_block_for_insn)
4895 for (temp = insns; temp; temp = NEXT_INSN (temp))
4896 set_block_for_insn (temp, pbi->bb);
4898 /* If we can't make the auto-inc, or can't make the
4899 replacement into Y, exit. There's no point in making
4900 the change below if we can't do the auto-inc and doing
4901 so is not correct in the pre-inc case. */
4903 validate_change (insn, &XEXP (x, 0),
4904 gen_rtx_fmt_e (inc_code, Pmode, q),
4906 validate_change (incr, &XEXP (y, 0), q, 1);
4907 if (! apply_change_group ())
4908 return;
4910 /* We now know we'll be doing this change, so emit the
4911 new insn(s) and do the updates. */
4912 emit_insns_before (insns, insn);
4914 if (pbi->bb->head == insn)
4915 pbi->bb->head = insns;
4917 /* INCR will become a NOTE and INSN won't contain a
4918 use of ADDR. If a use of ADDR was just placed in
4919 the insn before INSN, make that the next use.
4920 Otherwise, invalidate it. */
4921 if (GET_CODE (PREV_INSN (insn)) == INSN
4922 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4923 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4924 pbi->reg_next_use[regno] = PREV_INSN (insn);
4925 else
4926 pbi->reg_next_use[regno] = 0;
4928 addr = q;
4929 regno = REGNO (q);
4931 /* REGNO is now used in INCR which is below INSN, but it
4932 previously wasn't live here. If we don't mark it as
4933 live, we'll put a REG_DEAD note for it on this insn,
4934 which is incorrect. */
4935 SET_REGNO_REG_SET (pbi->reg_live, regno);
4937 /* If there are any calls between INSN and INCR, show
4938 that REGNO now crosses them. */
4939 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4940 if (GET_CODE (temp) == CALL_INSN)
4941 REG_N_CALLS_CROSSED (regno)++;
4943 else
4944 return;
4946 /* If we haven't returned, it means we were able to make the
4947 auto-inc, so update the status. First, record that this insn
4948 has an implicit side effect. */
4950 REG_NOTES (insn)
4951 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4953 /* Modify the old increment-insn to simply copy
4954 the already-incremented value of our register. */
4955 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4956 abort ();
4958 /* If that makes it a no-op (copying the register into itself) delete
4959 it so it won't appear to be a "use" and a "set" of this
4960 register. */
4961 if (SET_DEST (set) == addr)
4963 /* If the original source was dead, it's dead now. */
4964 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4965 if (note && XEXP (note, 0) != addr)
4966 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4968 PUT_CODE (incr, NOTE);
4969 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4970 NOTE_SOURCE_FILE (incr) = 0;
4973 if (regno >= FIRST_PSEUDO_REGISTER)
4975 /* Count an extra reference to the reg. When a reg is
4976 incremented, spilling it is worse, so we want to make
4977 that less likely. */
4978 REG_N_REFS (regno) += (optimize_size ? 1
4979 : pbi->bb->loop_depth + 1);
4981 /* Count the increment as a setting of the register,
4982 even though it isn't a SET in rtl. */
4983 REG_N_SETS (regno)++;
4988 #endif /* AUTO_INC_DEC */
4990 static void
4991 mark_used_reg (pbi, reg, cond, insn)
4992 struct propagate_block_info *pbi;
4993 rtx reg;
4994 rtx cond ATTRIBUTE_UNUSED;
4995 rtx insn;
4997 int regno = REGNO (reg);
4998 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4999 int some_was_dead = ! some_was_live;
5000 int some_not_set;
5001 int n;
5003 /* A hard reg in a wide mode may really be multiple registers.
5004 If so, mark all of them just like the first. */
5005 if (regno < FIRST_PSEUDO_REGISTER)
5007 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5008 while (--n > 0)
5010 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5011 some_was_live |= needed_regno;
5012 some_was_dead |= ! needed_regno;
5016 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5018 /* Record where each reg is used, so when the reg is set we know
5019 the next insn that uses it. */
5020 pbi->reg_next_use[regno] = insn;
5023 if (pbi->flags & PROP_REG_INFO)
5025 if (regno < FIRST_PSEUDO_REGISTER)
5027 /* If this is a register we are going to try to eliminate,
5028 don't mark it live here. If we are successful in
5029 eliminating it, it need not be live unless it is used for
5030 pseudos, in which case it will have been set live when it
5031 was allocated to the pseudos. If the register will not
5032 be eliminated, reload will set it live at that point.
5034 Otherwise, record that this function uses this register. */
5035 /* ??? The PPC backend tries to "eliminate" on the pic
5036 register to itself. This should be fixed. In the mean
5037 time, hack around it. */
5039 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5040 && (regno == FRAME_POINTER_REGNUM
5041 || regno == ARG_POINTER_REGNUM)))
5043 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5045 regs_ever_live[regno + --n] = 1;
5046 while (n > 0);
5049 else
5051 /* Keep track of which basic block each reg appears in. */
5053 register int blocknum = pbi->bb->index;
5054 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5055 REG_BASIC_BLOCK (regno) = blocknum;
5056 else if (REG_BASIC_BLOCK (regno) != blocknum)
5057 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5059 /* Count (weighted) number of uses of each reg. */
5060 REG_N_REFS (regno) += (optimize_size ? 1
5061 : pbi->bb->loop_depth + 1);
5065 /* Find out if any of the register was set this insn. */
5066 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5067 if (regno < FIRST_PSEUDO_REGISTER)
5069 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5070 while (--n > 0)
5071 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5074 /* Record and count the insns in which a reg dies. If it is used in
5075 this insn and was dead below the insn then it dies in this insn.
5076 If it was set in this insn, we do not make a REG_DEAD note;
5077 likewise if we already made such a note. */
5078 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5079 && some_was_dead
5080 && some_not_set)
5082 /* Check for the case where the register dying partially
5083 overlaps the register set by this insn. */
5084 if (regno < FIRST_PSEUDO_REGISTER
5085 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5087 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5088 while (--n >= 0)
5089 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5092 /* If none of the words in X is needed, make a REG_DEAD note.
5093 Otherwise, we must make partial REG_DEAD notes. */
5094 if (! some_was_live)
5096 if ((pbi->flags & PROP_DEATH_NOTES)
5097 && ! find_regno_note (insn, REG_DEAD, regno))
5098 REG_NOTES (insn)
5099 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5101 if (pbi->flags & PROP_REG_INFO)
5102 REG_N_DEATHS (regno)++;
5104 else
5106 /* Don't make a REG_DEAD note for a part of a register
5107 that is set in the insn. */
5109 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5110 for (; n >= regno; n--)
5111 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5112 && ! dead_or_set_regno_p (insn, n))
5113 REG_NOTES (insn)
5114 = alloc_EXPR_LIST (REG_DEAD,
5115 gen_rtx_REG (reg_raw_mode[n], n),
5116 REG_NOTES (insn));
5120 SET_REGNO_REG_SET (pbi->reg_live, regno);
5121 if (regno < FIRST_PSEUDO_REGISTER)
5123 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5124 while (--n > 0)
5125 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5128 #ifdef HAVE_conditional_execution
5129 /* If this is a conditional use, record that fact. If it is later
5130 conditionally set, we'll know to kill the register. */
5131 if (cond != NULL_RTX)
5133 splay_tree_node node;
5134 struct reg_cond_life_info *rcli;
5135 rtx ncond;
5137 if (some_was_live)
5139 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5140 if (node == NULL)
5142 /* The register was unconditionally live previously.
5143 No need to do anything. */
5145 else
5147 /* The register was conditionally live previously.
5148 Subtract the new life cond from the old death cond. */
5149 rcli = (struct reg_cond_life_info *) node->value;
5150 ncond = rcli->condition;
5151 ncond = nand_reg_cond (ncond, cond);
5153 /* If the register is now unconditionally live, remove the
5154 entry in the splay_tree. */
5155 if (ncond == const0_rtx)
5157 rcli->condition = NULL_RTX;
5158 splay_tree_remove (pbi->reg_cond_dead, regno);
5160 else
5161 rcli->condition = ncond;
5164 else
5166 /* The register was not previously live at all. Record
5167 the condition under which it is still dead. */
5168 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5169 rcli->condition = not_reg_cond (cond);
5170 splay_tree_insert (pbi->reg_cond_dead, regno,
5171 (splay_tree_value) rcli);
5174 else if (some_was_live)
5176 splay_tree_node node;
5177 struct reg_cond_life_info *rcli;
5179 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5180 if (node != NULL)
5182 /* The register was conditionally live previously, but is now
5183 unconditionally so. Remove it from the conditionally dead
5184 list, so that a conditional set won't cause us to think
5185 it dead. */
5186 rcli = (struct reg_cond_life_info *) node->value;
5187 rcli->condition = NULL_RTX;
5188 splay_tree_remove (pbi->reg_cond_dead, regno);
5192 #endif
5195 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5196 This is done assuming the registers needed from X are those that
5197 have 1-bits in PBI->REG_LIVE.
5199 INSN is the containing instruction. If INSN is dead, this function
5200 is not called. */
5202 static void
5203 mark_used_regs (pbi, x, cond, insn)
5204 struct propagate_block_info *pbi;
5205 rtx x, cond, insn;
5207 register RTX_CODE code;
5208 register int regno;
5209 int flags = pbi->flags;
5211 retry:
5212 code = GET_CODE (x);
5213 switch (code)
5215 case LABEL_REF:
5216 case SYMBOL_REF:
5217 case CONST_INT:
5218 case CONST:
5219 case CONST_DOUBLE:
5220 case PC:
5221 case ADDR_VEC:
5222 case ADDR_DIFF_VEC:
5223 return;
5225 #ifdef HAVE_cc0
5226 case CC0:
5227 pbi->cc0_live = 1;
5228 return;
5229 #endif
5231 case CLOBBER:
5232 /* If we are clobbering a MEM, mark any registers inside the address
5233 as being used. */
5234 if (GET_CODE (XEXP (x, 0)) == MEM)
5235 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5236 return;
5238 case MEM:
5239 /* Don't bother watching stores to mems if this is not the
5240 final pass. We'll not be deleting dead stores this round. */
5241 if (flags & PROP_SCAN_DEAD_CODE)
5243 /* Invalidate the data for the last MEM stored, but only if MEM is
5244 something that can be stored into. */
5245 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5246 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5247 ; /* needn't clear the memory set list */
5248 else
5250 rtx temp = pbi->mem_set_list;
5251 rtx prev = NULL_RTX;
5252 rtx next;
5254 while (temp)
5256 next = XEXP (temp, 1);
5257 if (anti_dependence (XEXP (temp, 0), x))
5259 /* Splice temp out of the list. */
5260 if (prev)
5261 XEXP (prev, 1) = next;
5262 else
5263 pbi->mem_set_list = next;
5264 free_EXPR_LIST_node (temp);
5266 else
5267 prev = temp;
5268 temp = next;
5272 /* If the memory reference had embedded side effects (autoincrement
5273 address modes. Then we may need to kill some entries on the
5274 memory set list. */
5275 if (insn)
5276 invalidate_mems_from_autoinc (pbi, insn);
5279 #ifdef AUTO_INC_DEC
5280 if (flags & PROP_AUTOINC)
5281 find_auto_inc (pbi, x, insn);
5282 #endif
5283 break;
5285 case SUBREG:
5286 #ifdef CLASS_CANNOT_CHANGE_MODE
5287 if (GET_CODE (SUBREG_REG (x)) == REG
5288 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5289 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5290 GET_MODE (SUBREG_REG (x))))
5291 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5292 #endif
5294 /* While we're here, optimize this case. */
5295 x = SUBREG_REG (x);
5296 if (GET_CODE (x) != REG)
5297 goto retry;
5298 /* FALLTHRU */
5300 case REG:
5301 /* See a register other than being set => mark it as needed. */
5302 mark_used_reg (pbi, x, cond, insn);
5303 return;
5305 case SET:
5307 register rtx testreg = SET_DEST (x);
5308 int mark_dest = 0;
5310 /* If storing into MEM, don't show it as being used. But do
5311 show the address as being used. */
5312 if (GET_CODE (testreg) == MEM)
5314 #ifdef AUTO_INC_DEC
5315 if (flags & PROP_AUTOINC)
5316 find_auto_inc (pbi, testreg, insn);
5317 #endif
5318 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5319 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5320 return;
5323 /* Storing in STRICT_LOW_PART is like storing in a reg
5324 in that this SET might be dead, so ignore it in TESTREG.
5325 but in some other ways it is like using the reg.
5327 Storing in a SUBREG or a bit field is like storing the entire
5328 register in that if the register's value is not used
5329 then this SET is not needed. */
5330 while (GET_CODE (testreg) == STRICT_LOW_PART
5331 || GET_CODE (testreg) == ZERO_EXTRACT
5332 || GET_CODE (testreg) == SIGN_EXTRACT
5333 || GET_CODE (testreg) == SUBREG)
5335 #ifdef CLASS_CANNOT_CHANGE_MODE
5336 if (GET_CODE (testreg) == SUBREG
5337 && GET_CODE (SUBREG_REG (testreg)) == REG
5338 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5339 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5340 GET_MODE (testreg)))
5341 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5342 #endif
5344 /* Modifying a single register in an alternate mode
5345 does not use any of the old value. But these other
5346 ways of storing in a register do use the old value. */
5347 if (GET_CODE (testreg) == SUBREG
5348 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5350 else
5351 mark_dest = 1;
5353 testreg = XEXP (testreg, 0);
5356 /* If this is a store into a register, recursively scan the
5357 value being stored. */
5359 if ((GET_CODE (testreg) == PARALLEL
5360 && GET_MODE (testreg) == BLKmode)
5361 || (GET_CODE (testreg) == REG
5362 && (regno = REGNO (testreg),
5363 ! (regno == FRAME_POINTER_REGNUM
5364 && (! reload_completed || frame_pointer_needed)))
5365 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5366 && ! (regno == HARD_FRAME_POINTER_REGNUM
5367 && (! reload_completed || frame_pointer_needed))
5368 #endif
5369 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5370 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5371 #endif
5374 if (mark_dest)
5375 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5376 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5377 return;
5380 break;
5382 case ASM_OPERANDS:
5383 case UNSPEC_VOLATILE:
5384 case TRAP_IF:
5385 case ASM_INPUT:
5387 /* Traditional and volatile asm instructions must be considered to use
5388 and clobber all hard registers, all pseudo-registers and all of
5389 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5391 Consider for instance a volatile asm that changes the fpu rounding
5392 mode. An insn should not be moved across this even if it only uses
5393 pseudo-regs because it might give an incorrectly rounded result.
5395 ?!? Unfortunately, marking all hard registers as live causes massive
5396 problems for the register allocator and marking all pseudos as live
5397 creates mountains of uninitialized variable warnings.
5399 So for now, just clear the memory set list and mark any regs
5400 we can find in ASM_OPERANDS as used. */
5401 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5402 free_EXPR_LIST_list (&pbi->mem_set_list);
5404 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5405 We can not just fall through here since then we would be confused
5406 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5407 traditional asms unlike their normal usage. */
5408 if (code == ASM_OPERANDS)
5410 int j;
5412 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5413 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5415 break;
5418 case COND_EXEC:
5419 if (cond != NULL_RTX)
5420 abort ();
5422 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5424 cond = COND_EXEC_TEST (x);
5425 x = COND_EXEC_CODE (x);
5426 goto retry;
5428 case PHI:
5429 /* We _do_not_ want to scan operands of phi nodes. Operands of
5430 a phi function are evaluated only when control reaches this
5431 block along a particular edge. Therefore, regs that appear
5432 as arguments to phi should not be added to the global live at
5433 start. */
5434 return;
5436 default:
5437 break;
5440 /* Recursively scan the operands of this expression. */
5443 register const char *fmt = GET_RTX_FORMAT (code);
5444 register int i;
5446 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5448 if (fmt[i] == 'e')
5450 /* Tail recursive case: save a function call level. */
5451 if (i == 0)
5453 x = XEXP (x, 0);
5454 goto retry;
5456 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5458 else if (fmt[i] == 'E')
5460 register int j;
5461 for (j = 0; j < XVECLEN (x, i); j++)
5462 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5468 #ifdef AUTO_INC_DEC
5470 static int
5471 try_pre_increment_1 (pbi, insn)
5472 struct propagate_block_info *pbi;
5473 rtx insn;
5475 /* Find the next use of this reg. If in same basic block,
5476 make it do pre-increment or pre-decrement if appropriate. */
5477 rtx x = single_set (insn);
5478 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5479 * INTVAL (XEXP (SET_SRC (x), 1)));
5480 int regno = REGNO (SET_DEST (x));
5481 rtx y = pbi->reg_next_use[regno];
5482 if (y != 0
5483 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5484 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5485 mode would be better. */
5486 && ! dead_or_set_p (y, SET_DEST (x))
5487 && try_pre_increment (y, SET_DEST (x), amount))
5489 /* We have found a suitable auto-increment
5490 and already changed insn Y to do it.
5491 So flush this increment-instruction. */
5492 PUT_CODE (insn, NOTE);
5493 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5494 NOTE_SOURCE_FILE (insn) = 0;
5495 /* Count a reference to this reg for the increment
5496 insn we are deleting. When a reg is incremented.
5497 spilling it is worse, so we want to make that
5498 less likely. */
5499 if (regno >= FIRST_PSEUDO_REGISTER)
5501 REG_N_REFS (regno) += (optimize_size ? 1
5502 : pbi->bb->loop_depth + 1);
5503 REG_N_SETS (regno)++;
5505 return 1;
5507 return 0;
5510 /* Try to change INSN so that it does pre-increment or pre-decrement
5511 addressing on register REG in order to add AMOUNT to REG.
5512 AMOUNT is negative for pre-decrement.
5513 Returns 1 if the change could be made.
5514 This checks all about the validity of the result of modifying INSN. */
5516 static int
5517 try_pre_increment (insn, reg, amount)
5518 rtx insn, reg;
5519 HOST_WIDE_INT amount;
5521 register rtx use;
5523 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5524 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5525 int pre_ok = 0;
5526 /* Nonzero if we can try to make a post-increment or post-decrement.
5527 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5528 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5529 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5530 int post_ok = 0;
5532 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5533 int do_post = 0;
5535 /* From the sign of increment, see which possibilities are conceivable
5536 on this target machine. */
5537 if (HAVE_PRE_INCREMENT && amount > 0)
5538 pre_ok = 1;
5539 if (HAVE_POST_INCREMENT && amount > 0)
5540 post_ok = 1;
5542 if (HAVE_PRE_DECREMENT && amount < 0)
5543 pre_ok = 1;
5544 if (HAVE_POST_DECREMENT && amount < 0)
5545 post_ok = 1;
5547 if (! (pre_ok || post_ok))
5548 return 0;
5550 /* It is not safe to add a side effect to a jump insn
5551 because if the incremented register is spilled and must be reloaded
5552 there would be no way to store the incremented value back in memory. */
5554 if (GET_CODE (insn) == JUMP_INSN)
5555 return 0;
5557 use = 0;
5558 if (pre_ok)
5559 use = find_use_as_address (PATTERN (insn), reg, 0);
5560 if (post_ok && (use == 0 || use == (rtx) 1))
5562 use = find_use_as_address (PATTERN (insn), reg, -amount);
5563 do_post = 1;
5566 if (use == 0 || use == (rtx) 1)
5567 return 0;
5569 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5570 return 0;
5572 /* See if this combination of instruction and addressing mode exists. */
5573 if (! validate_change (insn, &XEXP (use, 0),
5574 gen_rtx_fmt_e (amount > 0
5575 ? (do_post ? POST_INC : PRE_INC)
5576 : (do_post ? POST_DEC : PRE_DEC),
5577 Pmode, reg), 0))
5578 return 0;
5580 /* Record that this insn now has an implicit side effect on X. */
5581 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5582 return 1;
5585 #endif /* AUTO_INC_DEC */
5587 /* Find the place in the rtx X where REG is used as a memory address.
5588 Return the MEM rtx that so uses it.
5589 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5590 (plus REG (const_int PLUSCONST)).
5592 If such an address does not appear, return 0.
5593 If REG appears more than once, or is used other than in such an address,
5594 return (rtx)1. */
5597 find_use_as_address (x, reg, plusconst)
5598 register rtx x;
5599 rtx reg;
5600 HOST_WIDE_INT plusconst;
5602 enum rtx_code code = GET_CODE (x);
5603 const char *fmt = GET_RTX_FORMAT (code);
5604 register int i;
5605 register rtx value = 0;
5606 register rtx tem;
5608 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5609 return x;
5611 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5612 && XEXP (XEXP (x, 0), 0) == reg
5613 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5614 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5615 return x;
5617 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5619 /* If REG occurs inside a MEM used in a bit-field reference,
5620 that is unacceptable. */
5621 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5622 return (rtx) (HOST_WIDE_INT) 1;
5625 if (x == reg)
5626 return (rtx) (HOST_WIDE_INT) 1;
5628 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5630 if (fmt[i] == 'e')
5632 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5633 if (value == 0)
5634 value = tem;
5635 else if (tem != 0)
5636 return (rtx) (HOST_WIDE_INT) 1;
5638 else if (fmt[i] == 'E')
5640 register int j;
5641 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5643 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5644 if (value == 0)
5645 value = tem;
5646 else if (tem != 0)
5647 return (rtx) (HOST_WIDE_INT) 1;
5652 return value;
5655 /* Write information about registers and basic blocks into FILE.
5656 This is part of making a debugging dump. */
5658 void
5659 dump_regset (r, outf)
5660 regset r;
5661 FILE *outf;
5663 int i;
5664 if (r == NULL)
5666 fputs (" (nil)", outf);
5667 return;
5670 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5672 fprintf (outf, " %d", i);
5673 if (i < FIRST_PSEUDO_REGISTER)
5674 fprintf (outf, " [%s]",
5675 reg_names[i]);
5679 void
5680 debug_regset (r)
5681 regset r;
5683 dump_regset (r, stderr);
5684 putc ('\n', stderr);
5687 void
5688 dump_flow_info (file)
5689 FILE *file;
5691 register int i;
5692 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5694 fprintf (file, "%d registers.\n", max_regno);
5695 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5696 if (REG_N_REFS (i))
5698 enum reg_class class, altclass;
5699 fprintf (file, "\nRegister %d used %d times across %d insns",
5700 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5701 if (REG_BASIC_BLOCK (i) >= 0)
5702 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5703 if (REG_N_SETS (i))
5704 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5705 (REG_N_SETS (i) == 1) ? "" : "s");
5706 if (REG_USERVAR_P (regno_reg_rtx[i]))
5707 fprintf (file, "; user var");
5708 if (REG_N_DEATHS (i) != 1)
5709 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5710 if (REG_N_CALLS_CROSSED (i) == 1)
5711 fprintf (file, "; crosses 1 call");
5712 else if (REG_N_CALLS_CROSSED (i))
5713 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5714 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5715 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5716 class = reg_preferred_class (i);
5717 altclass = reg_alternate_class (i);
5718 if (class != GENERAL_REGS || altclass != ALL_REGS)
5720 if (altclass == ALL_REGS || class == ALL_REGS)
5721 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5722 else if (altclass == NO_REGS)
5723 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5724 else
5725 fprintf (file, "; pref %s, else %s",
5726 reg_class_names[(int) class],
5727 reg_class_names[(int) altclass]);
5729 if (REGNO_POINTER_FLAG (i))
5730 fprintf (file, "; pointer");
5731 fprintf (file, ".\n");
5734 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5735 for (i = 0; i < n_basic_blocks; i++)
5737 register basic_block bb = BASIC_BLOCK (i);
5738 register edge e;
5740 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
5741 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
5743 fprintf (file, "Predecessors: ");
5744 for (e = bb->pred; e ; e = e->pred_next)
5745 dump_edge_info (file, e, 0);
5747 fprintf (file, "\nSuccessors: ");
5748 for (e = bb->succ; e ; e = e->succ_next)
5749 dump_edge_info (file, e, 1);
5751 fprintf (file, "\nRegisters live at start:");
5752 dump_regset (bb->global_live_at_start, file);
5754 fprintf (file, "\nRegisters live at end:");
5755 dump_regset (bb->global_live_at_end, file);
5757 putc('\n', file);
5760 putc('\n', file);
5763 void
5764 debug_flow_info ()
5766 dump_flow_info (stderr);
5769 static void
5770 dump_edge_info (file, e, do_succ)
5771 FILE *file;
5772 edge e;
5773 int do_succ;
5775 basic_block side = (do_succ ? e->dest : e->src);
5777 if (side == ENTRY_BLOCK_PTR)
5778 fputs (" ENTRY", file);
5779 else if (side == EXIT_BLOCK_PTR)
5780 fputs (" EXIT", file);
5781 else
5782 fprintf (file, " %d", side->index);
5784 if (e->count)
5785 fprintf (file, " count:%d", e->count);
5787 if (e->flags)
5789 static const char * const bitnames[] = {
5790 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5792 int comma = 0;
5793 int i, flags = e->flags;
5795 fputc (' ', file);
5796 fputc ('(', file);
5797 for (i = 0; flags; i++)
5798 if (flags & (1 << i))
5800 flags &= ~(1 << i);
5802 if (comma)
5803 fputc (',', file);
5804 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5805 fputs (bitnames[i], file);
5806 else
5807 fprintf (file, "%d", i);
5808 comma = 1;
5810 fputc (')', file);
5815 /* Print out one basic block with live information at start and end. */
5816 void
5817 dump_bb (bb, outf)
5818 basic_block bb;
5819 FILE *outf;
5821 rtx insn;
5822 rtx last;
5823 edge e;
5825 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
5826 bb->index, bb->loop_depth, bb->count);
5827 if (bb->eh_beg != -1 || bb->eh_end != -1)
5828 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5829 putc ('\n', outf);
5831 fputs (";; Predecessors: ", outf);
5832 for (e = bb->pred; e ; e = e->pred_next)
5833 dump_edge_info (outf, e, 0);
5834 putc ('\n', outf);
5836 fputs (";; Registers live at start:", outf);
5837 dump_regset (bb->global_live_at_start, outf);
5838 putc ('\n', outf);
5840 for (insn = bb->head, last = NEXT_INSN (bb->end);
5841 insn != last;
5842 insn = NEXT_INSN (insn))
5843 print_rtl_single (outf, insn);
5845 fputs (";; Registers live at end:", outf);
5846 dump_regset (bb->global_live_at_end, outf);
5847 putc ('\n', outf);
5849 fputs (";; Successors: ", outf);
5850 for (e = bb->succ; e; e = e->succ_next)
5851 dump_edge_info (outf, e, 1);
5852 putc ('\n', outf);
5855 void
5856 debug_bb (bb)
5857 basic_block bb;
5859 dump_bb (bb, stderr);
5862 void
5863 debug_bb_n (n)
5864 int n;
5866 dump_bb (BASIC_BLOCK(n), stderr);
5869 /* Like print_rtl, but also print out live information for the start of each
5870 basic block. */
5872 void
5873 print_rtl_with_bb (outf, rtx_first)
5874 FILE *outf;
5875 rtx rtx_first;
5877 register rtx tmp_rtx;
5879 if (rtx_first == 0)
5880 fprintf (outf, "(nil)\n");
5881 else
5883 int i;
5884 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5885 int max_uid = get_max_uid ();
5886 basic_block *start = (basic_block *)
5887 xcalloc (max_uid, sizeof (basic_block));
5888 basic_block *end = (basic_block *)
5889 xcalloc (max_uid, sizeof (basic_block));
5890 enum bb_state *in_bb_p = (enum bb_state *)
5891 xcalloc (max_uid, sizeof (enum bb_state));
5893 for (i = n_basic_blocks - 1; i >= 0; i--)
5895 basic_block bb = BASIC_BLOCK (i);
5896 rtx x;
5898 start[INSN_UID (bb->head)] = bb;
5899 end[INSN_UID (bb->end)] = bb;
5900 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5902 enum bb_state state = IN_MULTIPLE_BB;
5903 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5904 state = IN_ONE_BB;
5905 in_bb_p[INSN_UID(x)] = state;
5907 if (x == bb->end)
5908 break;
5912 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5914 int did_output;
5915 basic_block bb;
5917 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5919 fprintf (outf, ";; Start of basic block %d, registers live:",
5920 bb->index);
5921 dump_regset (bb->global_live_at_start, outf);
5922 putc ('\n', outf);
5925 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5926 && GET_CODE (tmp_rtx) != NOTE
5927 && GET_CODE (tmp_rtx) != BARRIER)
5928 fprintf (outf, ";; Insn is not within a basic block\n");
5929 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5930 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5932 did_output = print_rtl_single (outf, tmp_rtx);
5934 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5936 fprintf (outf, ";; End of basic block %d, registers live:\n",
5937 bb->index);
5938 dump_regset (bb->global_live_at_end, outf);
5939 putc ('\n', outf);
5942 if (did_output)
5943 putc ('\n', outf);
5946 free (start);
5947 free (end);
5948 free (in_bb_p);
5951 if (current_function_epilogue_delay_list != 0)
5953 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5954 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5955 tmp_rtx = XEXP (tmp_rtx, 1))
5956 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5960 /* Compute dominator relationships using new flow graph structures. */
5961 void
5962 compute_flow_dominators (dominators, post_dominators)
5963 sbitmap *dominators;
5964 sbitmap *post_dominators;
5966 int bb;
5967 sbitmap *temp_bitmap;
5968 edge e;
5969 basic_block *worklist, *workend, *qin, *qout;
5970 int qlen;
5972 /* Allocate a worklist array/queue. Entries are only added to the
5973 list if they were not already on the list. So the size is
5974 bounded by the number of basic blocks. */
5975 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5976 workend = &worklist[n_basic_blocks];
5978 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5979 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5981 if (dominators)
5983 /* The optimistic setting of dominators requires us to put every
5984 block on the work list initially. */
5985 qin = qout = worklist;
5986 for (bb = 0; bb < n_basic_blocks; bb++)
5988 *qin++ = BASIC_BLOCK (bb);
5989 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5991 qlen = n_basic_blocks;
5992 qin = worklist;
5994 /* We want a maximal solution, so initially assume everything dominates
5995 everything else. */
5996 sbitmap_vector_ones (dominators, n_basic_blocks);
5998 /* Mark successors of the entry block so we can identify them below. */
5999 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6000 e->dest->aux = ENTRY_BLOCK_PTR;
6002 /* Iterate until the worklist is empty. */
6003 while (qlen)
6005 /* Take the first entry off the worklist. */
6006 basic_block b = *qout++;
6007 if (qout >= workend)
6008 qout = worklist;
6009 qlen--;
6011 bb = b->index;
6013 /* Compute the intersection of the dominators of all the
6014 predecessor blocks.
6016 If one of the predecessor blocks is the ENTRY block, then the
6017 intersection of the dominators of the predecessor blocks is
6018 defined as the null set. We can identify such blocks by the
6019 special value in the AUX field in the block structure. */
6020 if (b->aux == ENTRY_BLOCK_PTR)
6022 /* Do not clear the aux field for blocks which are
6023 successors of the ENTRY block. That way we we never
6024 add them to the worklist again.
6026 The intersect of dominators of the preds of this block is
6027 defined as the null set. */
6028 sbitmap_zero (temp_bitmap[bb]);
6030 else
6032 /* Clear the aux field of this block so it can be added to
6033 the worklist again if necessary. */
6034 b->aux = NULL;
6035 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
6038 /* Make sure each block always dominates itself. */
6039 SET_BIT (temp_bitmap[bb], bb);
6041 /* If the out state of this block changed, then we need to
6042 add the successors of this block to the worklist if they
6043 are not already on the worklist. */
6044 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
6046 for (e = b->succ; e; e = e->succ_next)
6048 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
6050 *qin++ = e->dest;
6051 if (qin >= workend)
6052 qin = worklist;
6053 qlen++;
6055 e->dest->aux = e;
6062 if (post_dominators)
6064 /* The optimistic setting of dominators requires us to put every
6065 block on the work list initially. */
6066 qin = qout = worklist;
6067 for (bb = 0; bb < n_basic_blocks; bb++)
6069 *qin++ = BASIC_BLOCK (bb);
6070 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6072 qlen = n_basic_blocks;
6073 qin = worklist;
6075 /* We want a maximal solution, so initially assume everything post
6076 dominates everything else. */
6077 sbitmap_vector_ones (post_dominators, n_basic_blocks);
6079 /* Mark predecessors of the exit block so we can identify them below. */
6080 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6081 e->src->aux = EXIT_BLOCK_PTR;
6083 /* Iterate until the worklist is empty. */
6084 while (qlen)
6086 /* Take the first entry off the worklist. */
6087 basic_block b = *qout++;
6088 if (qout >= workend)
6089 qout = worklist;
6090 qlen--;
6092 bb = b->index;
6094 /* Compute the intersection of the post dominators of all the
6095 successor blocks.
6097 If one of the successor blocks is the EXIT block, then the
6098 intersection of the dominators of the successor blocks is
6099 defined as the null set. We can identify such blocks by the
6100 special value in the AUX field in the block structure. */
6101 if (b->aux == EXIT_BLOCK_PTR)
6103 /* Do not clear the aux field for blocks which are
6104 predecessors of the EXIT block. That way we we never
6105 add them to the worklist again.
6107 The intersect of dominators of the succs of this block is
6108 defined as the null set. */
6109 sbitmap_zero (temp_bitmap[bb]);
6111 else
6113 /* Clear the aux field of this block so it can be added to
6114 the worklist again if necessary. */
6115 b->aux = NULL;
6116 sbitmap_intersection_of_succs (temp_bitmap[bb],
6117 post_dominators, bb);
6120 /* Make sure each block always post dominates itself. */
6121 SET_BIT (temp_bitmap[bb], bb);
6123 /* If the out state of this block changed, then we need to
6124 add the successors of this block to the worklist if they
6125 are not already on the worklist. */
6126 if (sbitmap_a_and_b (post_dominators[bb],
6127 post_dominators[bb],
6128 temp_bitmap[bb]))
6130 for (e = b->pred; e; e = e->pred_next)
6132 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6134 *qin++ = e->src;
6135 if (qin >= workend)
6136 qin = worklist;
6137 qlen++;
6139 e->src->aux = e;
6146 free (worklist);
6147 free (temp_bitmap);
6150 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
6152 void
6153 compute_immediate_dominators (idom, dominators)
6154 int *idom;
6155 sbitmap *dominators;
6157 sbitmap *tmp;
6158 int b;
6160 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6162 /* Begin with tmp(n) = dom(n) - { n }. */
6163 for (b = n_basic_blocks; --b >= 0; )
6165 sbitmap_copy (tmp[b], dominators[b]);
6166 RESET_BIT (tmp[b], b);
6169 /* Subtract out all of our dominator's dominators. */
6170 for (b = n_basic_blocks; --b >= 0; )
6172 sbitmap tmp_b = tmp[b];
6173 int s;
6175 for (s = n_basic_blocks; --s >= 0; )
6176 if (TEST_BIT (tmp_b, s))
6177 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6180 /* Find the one bit set in the bitmap and put it in the output array. */
6181 for (b = n_basic_blocks; --b >= 0; )
6183 int t;
6184 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6187 sbitmap_vector_free (tmp);
6190 /* Recompute register set/reference counts immediately prior to register
6191 allocation.
6193 This avoids problems with set/reference counts changing to/from values
6194 which have special meanings to the register allocators.
6196 Additionally, the reference counts are the primary component used by the
6197 register allocators to prioritize pseudos for allocation to hard regs.
6198 More accurate reference counts generally lead to better register allocation.
6200 F is the first insn to be scanned.
6202 LOOP_STEP denotes how much loop_depth should be incremented per
6203 loop nesting level in order to increase the ref count more for
6204 references in a loop.
6206 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6207 possibly other information which is used by the register allocators. */
6209 void
6210 recompute_reg_usage (f, loop_step)
6211 rtx f ATTRIBUTE_UNUSED;
6212 int loop_step ATTRIBUTE_UNUSED;
6214 allocate_reg_life_data ();
6215 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6218 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6219 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6220 of the number of registers that died. */
6223 count_or_remove_death_notes (blocks, kill)
6224 sbitmap blocks;
6225 int kill;
6227 int i, count = 0;
6229 for (i = n_basic_blocks - 1; i >= 0; --i)
6231 basic_block bb;
6232 rtx insn;
6234 if (blocks && ! TEST_BIT (blocks, i))
6235 continue;
6237 bb = BASIC_BLOCK (i);
6239 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6241 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6243 rtx *pprev = &REG_NOTES (insn);
6244 rtx link = *pprev;
6246 while (link)
6248 switch (REG_NOTE_KIND (link))
6250 case REG_DEAD:
6251 if (GET_CODE (XEXP (link, 0)) == REG)
6253 rtx reg = XEXP (link, 0);
6254 int n;
6256 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6257 n = 1;
6258 else
6259 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6260 count += n;
6262 /* FALLTHRU */
6264 case REG_UNUSED:
6265 if (kill)
6267 rtx next = XEXP (link, 1);
6268 free_EXPR_LIST_node (link);
6269 *pprev = link = next;
6270 break;
6272 /* FALLTHRU */
6274 default:
6275 pprev = &XEXP (link, 1);
6276 link = *pprev;
6277 break;
6282 if (insn == bb->end)
6283 break;
6287 return count;
6290 /* Record INSN's block as BB. */
6292 void
6293 set_block_for_insn (insn, bb)
6294 rtx insn;
6295 basic_block bb;
6297 size_t uid = INSN_UID (insn);
6298 if (uid >= basic_block_for_insn->num_elements)
6300 int new_size;
6302 /* Add one-eighth the size so we don't keep calling xrealloc. */
6303 new_size = uid + (uid + 7) / 8;
6305 VARRAY_GROW (basic_block_for_insn, new_size);
6307 VARRAY_BB (basic_block_for_insn, uid) = bb;
6310 /* Record INSN's block number as BB. */
6311 /* ??? This has got to go. */
6313 void
6314 set_block_num (insn, bb)
6315 rtx insn;
6316 int bb;
6318 set_block_for_insn (insn, BASIC_BLOCK (bb));
6321 /* Verify the CFG consistency. This function check some CFG invariants and
6322 aborts when something is wrong. Hope that this function will help to
6323 convert many optimization passes to preserve CFG consistent.
6325 Currently it does following checks:
6327 - test head/end pointers
6328 - overlapping of basic blocks
6329 - edge list corectness
6330 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6331 - tails of basic blocks (ensure that boundary is necesary)
6332 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6333 and NOTE_INSN_BASIC_BLOCK
6334 - check that all insns are in the basic blocks
6335 (except the switch handling code, barriers and notes)
6336 - check that all returns are followed by barriers
6338 In future it can be extended check a lot of other stuff as well
6339 (reachability of basic blocks, life information, etc. etc.). */
6341 void
6342 verify_flow_info ()
6344 const int max_uid = get_max_uid ();
6345 const rtx rtx_first = get_insns ();
6346 basic_block *bb_info;
6347 rtx x;
6348 int i, last_bb_num_seen, num_bb_notes, err = 0;
6350 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6352 /* First pass check head/end pointers and set bb_info array used by
6353 later passes. */
6354 for (i = n_basic_blocks - 1; i >= 0; i--)
6356 basic_block bb = BASIC_BLOCK (i);
6358 /* Check the head pointer and make sure that it is pointing into
6359 insn list. */
6360 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
6361 if (x == bb->head)
6362 break;
6363 if (!x)
6365 error ("Head insn %d for block %d not found in the insn stream.",
6366 INSN_UID (bb->head), bb->index);
6367 err = 1;
6370 /* Check the end pointer and make sure that it is pointing into
6371 insn list. */
6372 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6374 if (bb_info[INSN_UID (x)] != NULL)
6376 error ("Insn %d is in multiple basic blocks (%d and %d)",
6377 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6378 err = 1;
6380 bb_info[INSN_UID (x)] = bb;
6382 if (x == bb->end)
6383 break;
6385 if (!x)
6387 error ("End insn %d for block %d not found in the insn stream.",
6388 INSN_UID (bb->end), bb->index);
6389 err = 1;
6393 /* Now check the basic blocks (boundaries etc.) */
6394 for (i = n_basic_blocks - 1; i >= 0; i--)
6396 basic_block bb = BASIC_BLOCK (i);
6397 /* Check corectness of edge lists */
6398 edge e;
6400 e = bb->succ;
6401 while (e)
6403 if (e->src != bb)
6405 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6406 bb->index);
6407 fprintf (stderr, "Predecessor: ");
6408 dump_edge_info (stderr, e, 0);
6409 fprintf (stderr, "\nSuccessor: ");
6410 dump_edge_info (stderr, e, 1);
6411 fflush (stderr);
6412 err = 1;
6414 if (e->dest != EXIT_BLOCK_PTR)
6416 edge e2 = e->dest->pred;
6417 while (e2 && e2 != e)
6418 e2 = e2->pred_next;
6419 if (!e2)
6421 error ("Basic block %i edge lists are corrupted", bb->index);
6422 err = 1;
6425 e = e->succ_next;
6428 e = bb->pred;
6429 while (e)
6431 if (e->dest != bb)
6433 error ("Basic block %d pred edge is corrupted", bb->index);
6434 fputs ("Predecessor: ", stderr);
6435 dump_edge_info (stderr, e, 0);
6436 fputs ("\nSuccessor: ", stderr);
6437 dump_edge_info (stderr, e, 1);
6438 fputc ('\n', stderr);
6439 err = 1;
6441 if (e->src != ENTRY_BLOCK_PTR)
6443 edge e2 = e->src->succ;
6444 while (e2 && e2 != e)
6445 e2 = e2->succ_next;
6446 if (!e2)
6448 error ("Basic block %i edge lists are corrupted", bb->index);
6449 err = 1;
6452 e = e->pred_next;
6455 /* OK pointers are correct. Now check the header of basic
6456 block. It ought to contain optional CODE_LABEL followed
6457 by NOTE_BASIC_BLOCK. */
6458 x = bb->head;
6459 if (GET_CODE (x) == CODE_LABEL)
6461 if (bb->end == x)
6463 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6464 bb->index);
6465 err = 1;
6467 x = NEXT_INSN (x);
6469 if (GET_CODE (x) != NOTE
6470 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6471 || NOTE_BASIC_BLOCK (x) != bb)
6473 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6474 bb->index);
6475 err = 1;
6478 if (bb->end == x)
6480 /* Do checks for empty blocks here */
6482 else
6484 x = NEXT_INSN (x);
6485 while (x)
6487 if (GET_CODE (x) == NOTE
6488 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6490 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6491 INSN_UID (x), bb->index);
6492 err = 1;
6495 if (x == bb->end)
6496 break;
6498 if (GET_CODE (x) == JUMP_INSN
6499 || GET_CODE (x) == CODE_LABEL
6500 || GET_CODE (x) == BARRIER)
6502 error ("In basic block %d:", bb->index);
6503 fatal_insn ("Flow control insn inside a basic block", x);
6506 x = NEXT_INSN (x);
6511 last_bb_num_seen = -1;
6512 num_bb_notes = 0;
6513 x = rtx_first;
6514 while (x)
6516 if (GET_CODE (x) == NOTE
6517 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6519 basic_block bb = NOTE_BASIC_BLOCK (x);
6520 num_bb_notes++;
6521 if (bb->index != last_bb_num_seen + 1)
6522 fatal ("Basic blocks not numbered consecutively");
6523 last_bb_num_seen = bb->index;
6526 if (!bb_info[INSN_UID (x)])
6528 switch (GET_CODE (x))
6530 case BARRIER:
6531 case NOTE:
6532 break;
6534 case CODE_LABEL:
6535 /* An addr_vec is placed outside any block block. */
6536 if (NEXT_INSN (x)
6537 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6538 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6539 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6541 x = NEXT_INSN (x);
6544 /* But in any case, non-deletable labels can appear anywhere. */
6545 break;
6547 default:
6548 fatal_insn ("Insn outside basic block", x);
6552 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6553 && GET_CODE (x) == JUMP_INSN
6554 && returnjump_p (x) && ! condjump_p (x)
6555 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6556 fatal_insn ("Return not followed by barrier", x);
6558 x = NEXT_INSN (x);
6561 if (num_bb_notes != n_basic_blocks)
6562 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6563 num_bb_notes, n_basic_blocks);
6565 if (err)
6566 abort ();
6568 /* Clean up. */
6569 free (bb_info);
6572 /* Functions to access an edge list with a vector representation.
6573 Enough data is kept such that given an index number, the
6574 pred and succ that edge reprsents can be determined, or
6575 given a pred and a succ, it's index number can be returned.
6576 This allows algorithms which comsume a lot of memory to
6577 represent the normally full matrix of edge (pred,succ) with a
6578 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6579 wasted space in the client code due to sparse flow graphs. */
6581 /* This functions initializes the edge list. Basically the entire
6582 flowgraph is processed, and all edges are assigned a number,
6583 and the data structure is filed in. */
6584 struct edge_list *
6585 create_edge_list ()
6587 struct edge_list *elist;
6588 edge e;
6589 int num_edges;
6590 int x;
6591 int block_count;
6593 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6595 num_edges = 0;
6597 /* Determine the number of edges in the flow graph by counting successor
6598 edges on each basic block. */
6599 for (x = 0; x < n_basic_blocks; x++)
6601 basic_block bb = BASIC_BLOCK (x);
6603 for (e = bb->succ; e; e = e->succ_next)
6604 num_edges++;
6606 /* Don't forget successors of the entry block. */
6607 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6608 num_edges++;
6610 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6611 elist->num_blocks = block_count;
6612 elist->num_edges = num_edges;
6613 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6615 num_edges = 0;
6617 /* Follow successors of the entry block, and register these edges. */
6618 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6620 elist->index_to_edge[num_edges] = e;
6621 num_edges++;
6624 for (x = 0; x < n_basic_blocks; x++)
6626 basic_block bb = BASIC_BLOCK (x);
6628 /* Follow all successors of blocks, and register these edges. */
6629 for (e = bb->succ; e; e = e->succ_next)
6631 elist->index_to_edge[num_edges] = e;
6632 num_edges++;
6635 return elist;
6638 /* This function free's memory associated with an edge list. */
6639 void
6640 free_edge_list (elist)
6641 struct edge_list *elist;
6643 if (elist)
6645 free (elist->index_to_edge);
6646 free (elist);
6650 /* This function provides debug output showing an edge list. */
6651 void
6652 print_edge_list (f, elist)
6653 FILE *f;
6654 struct edge_list *elist;
6656 int x;
6657 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6658 elist->num_blocks - 2, elist->num_edges);
6660 for (x = 0; x < elist->num_edges; x++)
6662 fprintf (f, " %-4d - edge(", x);
6663 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6664 fprintf (f,"entry,");
6665 else
6666 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6668 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6669 fprintf (f,"exit)\n");
6670 else
6671 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6675 /* This function provides an internal consistancy check of an edge list,
6676 verifying that all edges are present, and that there are no
6677 extra edges. */
6678 void
6679 verify_edge_list (f, elist)
6680 FILE *f;
6681 struct edge_list *elist;
6683 int x, pred, succ, index;
6684 edge e;
6686 for (x = 0; x < n_basic_blocks; x++)
6688 basic_block bb = BASIC_BLOCK (x);
6690 for (e = bb->succ; e; e = e->succ_next)
6692 pred = e->src->index;
6693 succ = e->dest->index;
6694 index = EDGE_INDEX (elist, e->src, e->dest);
6695 if (index == EDGE_INDEX_NO_EDGE)
6697 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6698 continue;
6700 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6701 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6702 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6703 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6704 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6705 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6708 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6710 pred = e->src->index;
6711 succ = e->dest->index;
6712 index = EDGE_INDEX (elist, e->src, e->dest);
6713 if (index == EDGE_INDEX_NO_EDGE)
6715 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6716 continue;
6718 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6719 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6720 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6721 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6722 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6723 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6725 /* We've verified that all the edges are in the list, no lets make sure
6726 there are no spurious edges in the list. */
6728 for (pred = 0 ; pred < n_basic_blocks; pred++)
6729 for (succ = 0 ; succ < n_basic_blocks; succ++)
6731 basic_block p = BASIC_BLOCK (pred);
6732 basic_block s = BASIC_BLOCK (succ);
6734 int found_edge = 0;
6736 for (e = p->succ; e; e = e->succ_next)
6737 if (e->dest == s)
6739 found_edge = 1;
6740 break;
6742 for (e = s->pred; e; e = e->pred_next)
6743 if (e->src == p)
6745 found_edge = 1;
6746 break;
6748 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6749 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6750 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6751 pred, succ);
6752 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6753 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6754 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6755 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6756 BASIC_BLOCK (succ)));
6758 for (succ = 0 ; succ < n_basic_blocks; succ++)
6760 basic_block p = ENTRY_BLOCK_PTR;
6761 basic_block s = BASIC_BLOCK (succ);
6763 int found_edge = 0;
6765 for (e = p->succ; e; e = e->succ_next)
6766 if (e->dest == s)
6768 found_edge = 1;
6769 break;
6771 for (e = s->pred; e; e = e->pred_next)
6772 if (e->src == p)
6774 found_edge = 1;
6775 break;
6777 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6778 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6779 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6780 succ);
6781 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6782 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6783 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6784 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6785 BASIC_BLOCK (succ)));
6787 for (pred = 0 ; pred < n_basic_blocks; pred++)
6789 basic_block p = BASIC_BLOCK (pred);
6790 basic_block s = EXIT_BLOCK_PTR;
6792 int found_edge = 0;
6794 for (e = p->succ; e; e = e->succ_next)
6795 if (e->dest == s)
6797 found_edge = 1;
6798 break;
6800 for (e = s->pred; e; e = e->pred_next)
6801 if (e->src == p)
6803 found_edge = 1;
6804 break;
6806 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6807 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6808 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6809 pred);
6810 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6811 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6812 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6813 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6814 EXIT_BLOCK_PTR));
6818 /* This routine will determine what, if any, edge there is between
6819 a specified predecessor and successor. */
6822 find_edge_index (edge_list, pred, succ)
6823 struct edge_list *edge_list;
6824 basic_block pred, succ;
6826 int x;
6827 for (x = 0; x < NUM_EDGES (edge_list); x++)
6829 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6830 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6831 return x;
6833 return (EDGE_INDEX_NO_EDGE);
6836 /* This function will remove an edge from the flow graph. */
6837 void
6838 remove_edge (e)
6839 edge e;
6841 edge last_pred = NULL;
6842 edge last_succ = NULL;
6843 edge tmp;
6844 basic_block src, dest;
6845 src = e->src;
6846 dest = e->dest;
6847 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6848 last_succ = tmp;
6850 if (!tmp)
6851 abort ();
6852 if (last_succ)
6853 last_succ->succ_next = e->succ_next;
6854 else
6855 src->succ = e->succ_next;
6857 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6858 last_pred = tmp;
6860 if (!tmp)
6861 abort ();
6862 if (last_pred)
6863 last_pred->pred_next = e->pred_next;
6864 else
6865 dest->pred = e->pred_next;
6867 n_edges--;
6868 free (e);
6871 /* This routine will remove any fake successor edges for a basic block.
6872 When the edge is removed, it is also removed from whatever predecessor
6873 list it is in. */
6874 static void
6875 remove_fake_successors (bb)
6876 basic_block bb;
6878 edge e;
6879 for (e = bb->succ; e ; )
6881 edge tmp = e;
6882 e = e->succ_next;
6883 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6884 remove_edge (tmp);
6888 /* This routine will remove all fake edges from the flow graph. If
6889 we remove all fake successors, it will automatically remove all
6890 fake predecessors. */
6891 void
6892 remove_fake_edges ()
6894 int x;
6896 for (x = 0; x < n_basic_blocks; x++)
6897 remove_fake_successors (BASIC_BLOCK (x));
6899 /* We've handled all successors except the entry block's. */
6900 remove_fake_successors (ENTRY_BLOCK_PTR);
6903 /* This functions will add a fake edge between any block which has no
6904 successors, and the exit block. Some data flow equations require these
6905 edges to exist. */
6906 void
6907 add_noreturn_fake_exit_edges ()
6909 int x;
6911 for (x = 0; x < n_basic_blocks; x++)
6912 if (BASIC_BLOCK (x)->succ == NULL)
6913 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6916 /* Redirect an edge's successor from one block to another. */
6918 void
6919 redirect_edge_succ (e, new_succ)
6920 edge e;
6921 basic_block new_succ;
6923 edge *pe;
6925 /* Disconnect the edge from the old successor block. */
6926 for (pe = &e->dest->pred; *pe != e ; pe = &(*pe)->pred_next)
6927 continue;
6928 *pe = (*pe)->pred_next;
6930 /* Reconnect the edge to the new successor block. */
6931 e->pred_next = new_succ->pred;
6932 new_succ->pred = e;
6933 e->dest = new_succ;
6936 /* Redirect an edge's predecessor from one block to another. */
6938 void
6939 redirect_edge_pred (e, new_pred)
6940 edge e;
6941 basic_block new_pred;
6943 edge *pe;
6945 /* Disconnect the edge from the old predecessor block. */
6946 for (pe = &e->src->succ; *pe != e ; pe = &(*pe)->succ_next)
6947 continue;
6948 *pe = (*pe)->succ_next;
6950 /* Reconnect the edge to the new predecessor block. */
6951 e->succ_next = new_pred->succ;
6952 new_pred->succ = e;
6953 e->src = new_pred;
6956 /* Dump the list of basic blocks in the bitmap NODES. */
6957 static void
6958 flow_nodes_print (str, nodes, file)
6959 const char *str;
6960 const sbitmap nodes;
6961 FILE *file;
6963 int node;
6965 fprintf (file, "%s { ", str);
6966 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6967 fputs ("}\n", file);
6971 /* Dump the list of exiting edges in the array EDGES. */
6972 static void
6973 flow_exits_print (str, edges, num_edges, file)
6974 const char *str;
6975 const edge *edges;
6976 int num_edges;
6977 FILE *file;
6979 int i;
6981 fprintf (file, "%s { ", str);
6982 for (i = 0; i < num_edges; i++)
6983 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6984 fputs ("}\n", file);
6988 /* Dump loop related CFG information. */
6989 static void
6990 flow_loops_cfg_dump (loops, file)
6991 const struct loops *loops;
6992 FILE *file;
6994 int i;
6996 if (! loops->num || ! file || ! loops->cfg.dom)
6997 return;
6999 for (i = 0; i < n_basic_blocks; i++)
7001 edge succ;
7003 fprintf (file, ";; %d succs { ", i);
7004 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7005 fprintf (file, "%d ", succ->dest->index);
7006 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7010 /* Dump the DFS node order. */
7011 if (loops->cfg.dfs_order)
7013 fputs (";; DFS order: ", file);
7014 for (i = 0; i < n_basic_blocks; i++)
7015 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7016 fputs ("\n", file);
7021 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7022 static int
7023 flow_loop_nested_p (outer, loop)
7024 struct loop *outer;
7025 struct loop *loop;
7027 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7031 /* Dump the loop information specified by LOOPS to the stream FILE. */
7032 void
7033 flow_loops_dump (loops, file, verbose)
7034 const struct loops *loops;
7035 FILE *file;
7036 int verbose;
7038 int i;
7039 int num_loops;
7041 num_loops = loops->num;
7042 if (! num_loops || ! file)
7043 return;
7045 fprintf (file, ";; %d loops found, %d levels\n",
7046 num_loops, loops->levels);
7048 for (i = 0; i < num_loops; i++)
7050 struct loop *loop = &loops->array[i];
7052 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
7053 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
7054 loop->header->index, loop->latch->index,
7055 loop->pre_header ? loop->pre_header->index : -1,
7056 loop->depth, loop->level,
7057 (long) (loop->outer ? (loop->outer - loops->array) : -1));
7058 fprintf (file, ";; %d", loop->num_nodes);
7059 flow_nodes_print (" nodes", loop->nodes, file);
7060 fprintf (file, ";; %d", loop->num_exits);
7061 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
7063 if (loop->shared)
7065 int j;
7067 for (j = 0; j < i; j++)
7069 struct loop *oloop = &loops->array[j];
7071 if (loop->header == oloop->header)
7073 int disjoint;
7074 int smaller;
7076 smaller = loop->num_nodes < oloop->num_nodes;
7078 /* If the union of LOOP and OLOOP is different than
7079 the larger of LOOP and OLOOP then LOOP and OLOOP
7080 must be disjoint. */
7081 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7082 smaller ? oloop : loop);
7083 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
7084 loop->header->index, i, j,
7085 disjoint ? "disjoint" : "nested");
7090 if (verbose)
7092 /* Print diagnostics to compare our concept of a loop with
7093 what the loop notes say. */
7094 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
7095 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
7096 != NOTE_INSN_LOOP_BEG)
7097 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
7098 INSN_UID (PREV_INSN (loop->first->head)));
7099 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
7100 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
7101 != NOTE_INSN_LOOP_END)
7102 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
7103 INSN_UID (NEXT_INSN (loop->last->end)));
7107 if (verbose)
7108 flow_loops_cfg_dump (loops, file);
7112 /* Free all the memory allocated for LOOPS. */
7113 void
7114 flow_loops_free (loops)
7115 struct loops *loops;
7117 if (loops->array)
7119 int i;
7121 if (! loops->num)
7122 abort ();
7124 /* Free the loop descriptors. */
7125 for (i = 0; i < loops->num; i++)
7127 struct loop *loop = &loops->array[i];
7129 if (loop->nodes)
7130 sbitmap_free (loop->nodes);
7131 if (loop->exits)
7132 free (loop->exits);
7134 free (loops->array);
7135 loops->array = NULL;
7137 if (loops->cfg.dom)
7138 sbitmap_vector_free (loops->cfg.dom);
7139 if (loops->cfg.dfs_order)
7140 free (loops->cfg.dfs_order);
7142 sbitmap_free (loops->shared_headers);
7147 /* Find the exits from the loop using the bitmap of loop nodes NODES
7148 and store in EXITS array. Return the number of exits from the
7149 loop. */
7150 static int
7151 flow_loop_exits_find (nodes, exits)
7152 const sbitmap nodes;
7153 edge **exits;
7155 edge e;
7156 int node;
7157 int num_exits;
7159 *exits = NULL;
7161 /* Check all nodes within the loop to see if there are any
7162 successors not in the loop. Note that a node may have multiple
7163 exiting edges. */
7164 num_exits = 0;
7165 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7166 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7168 basic_block dest = e->dest;
7170 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7171 num_exits++;
7175 if (! num_exits)
7176 return 0;
7178 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
7180 /* Store all exiting edges into an array. */
7181 num_exits = 0;
7182 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7183 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7185 basic_block dest = e->dest;
7187 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7188 (*exits)[num_exits++] = e;
7192 return num_exits;
7196 /* Find the nodes contained within the loop with header HEADER and
7197 latch LATCH and store in NODES. Return the number of nodes within
7198 the loop. */
7199 static int
7200 flow_loop_nodes_find (header, latch, nodes)
7201 basic_block header;
7202 basic_block latch;
7203 sbitmap nodes;
7205 basic_block *stack;
7206 int sp;
7207 int num_nodes = 0;
7209 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7210 sp = 0;
7212 /* Start with only the loop header in the set of loop nodes. */
7213 sbitmap_zero (nodes);
7214 SET_BIT (nodes, header->index);
7215 num_nodes++;
7216 header->loop_depth++;
7218 /* Push the loop latch on to the stack. */
7219 if (! TEST_BIT (nodes, latch->index))
7221 SET_BIT (nodes, latch->index);
7222 latch->loop_depth++;
7223 num_nodes++;
7224 stack[sp++] = latch;
7227 while (sp)
7229 basic_block node;
7230 edge e;
7232 node = stack[--sp];
7233 for (e = node->pred; e; e = e->pred_next)
7235 basic_block ancestor = e->src;
7237 /* If each ancestor not marked as part of loop, add to set of
7238 loop nodes and push on to stack. */
7239 if (ancestor != ENTRY_BLOCK_PTR
7240 && ! TEST_BIT (nodes, ancestor->index))
7242 SET_BIT (nodes, ancestor->index);
7243 ancestor->loop_depth++;
7244 num_nodes++;
7245 stack[sp++] = ancestor;
7249 free (stack);
7250 return num_nodes;
7254 /* Compute the depth first search order and store in the array
7255 DFS_ORDER, marking the nodes visited in VISITED. Returns the
7256 number of nodes visited. A depth first search tries to get as far
7257 away from the starting point as quickly as possible. */
7258 static int
7259 flow_depth_first_order_compute (dfs_order)
7260 int *dfs_order;
7262 edge *stack;
7263 int sp;
7264 int dfsnum = 0;
7265 sbitmap visited;
7267 /* Allocate stack for back-tracking up CFG. */
7268 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7269 sp = 0;
7271 /* Allocate bitmap to track nodes that have been visited. */
7272 visited = sbitmap_alloc (n_basic_blocks);
7274 /* None of the nodes in the CFG have been visited yet. */
7275 sbitmap_zero (visited);
7277 /* Push the first edge on to the stack. */
7278 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7280 while (sp)
7282 edge e;
7283 basic_block src;
7284 basic_block dest;
7286 /* Look at the edge on the top of the stack. */
7287 e = stack[sp - 1];
7288 src = e->src;
7289 dest = e->dest;
7291 /* Check if the edge destination has been visited yet. */
7292 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7294 /* Mark that we have visited the destination. */
7295 SET_BIT (visited, dest->index);
7297 if (dest->succ)
7299 /* Since the DEST node has been visited for the first
7300 time, check its successors. */
7301 stack[sp++] = dest->succ;
7303 else
7305 /* There are no successors for the DEST node so assign
7306 its DFS number. */
7307 dfs_order[n_basic_blocks - ++dfsnum] = dest->index;
7310 else
7312 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7314 /* There are no more successors for the SRC node
7315 so assign its DFS number. */
7316 dfs_order[n_basic_blocks - ++dfsnum] = src->index;
7319 if (e->succ_next)
7320 stack[sp - 1] = e->succ_next;
7321 else
7322 sp--;
7326 free (stack);
7327 sbitmap_free (visited);
7329 /* The number of nodes visited should not be greater than
7330 n_basic_blocks. */
7331 if (dfsnum > n_basic_blocks)
7332 abort ();
7334 /* There are some nodes left in the CFG that are unreachable. */
7335 if (dfsnum < n_basic_blocks)
7336 abort ();
7337 return dfsnum;
7341 /* Return the block for the pre-header of the loop with header
7342 HEADER where DOM specifies the dominator information. Return NULL if
7343 there is no pre-header. */
7344 static basic_block
7345 flow_loop_pre_header_find (header, dom)
7346 basic_block header;
7347 const sbitmap *dom;
7349 basic_block pre_header;
7350 edge e;
7352 /* If block p is a predecessor of the header and is the only block
7353 that the header does not dominate, then it is the pre-header. */
7354 pre_header = NULL;
7355 for (e = header->pred; e; e = e->pred_next)
7357 basic_block node = e->src;
7359 if (node != ENTRY_BLOCK_PTR
7360 && ! TEST_BIT (dom[node->index], header->index))
7362 if (pre_header == NULL)
7363 pre_header = node;
7364 else
7366 /* There are multiple edges into the header from outside
7367 the loop so there is no pre-header block. */
7368 pre_header = NULL;
7369 break;
7373 return pre_header;
7377 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7378 previously added. The insertion algorithm assumes that the loops
7379 are added in the order found by a depth first search of the CFG. */
7380 static void
7381 flow_loop_tree_node_add (prevloop, loop)
7382 struct loop *prevloop;
7383 struct loop *loop;
7386 if (flow_loop_nested_p (prevloop, loop))
7388 prevloop->inner = loop;
7389 loop->outer = prevloop;
7390 return;
7393 while (prevloop->outer)
7395 if (flow_loop_nested_p (prevloop->outer, loop))
7397 prevloop->next = loop;
7398 loop->outer = prevloop->outer;
7399 return;
7401 prevloop = prevloop->outer;
7404 prevloop->next = loop;
7405 loop->outer = NULL;
7409 /* Build the loop hierarchy tree for LOOPS. */
7410 static void
7411 flow_loops_tree_build (loops)
7412 struct loops *loops;
7414 int i;
7415 int num_loops;
7417 num_loops = loops->num;
7418 if (! num_loops)
7419 return;
7421 /* Root the loop hierarchy tree with the first loop found.
7422 Since we used a depth first search this should be the
7423 outermost loop. */
7424 loops->tree = &loops->array[0];
7425 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7427 /* Add the remaining loops to the tree. */
7428 for (i = 1; i < num_loops; i++)
7429 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7433 /* Helper function to compute loop nesting depth and enclosed loop level
7434 for the natural loop specified by LOOP at the loop depth DEPTH.
7435 Returns the loop level. */
7436 static int
7437 flow_loop_level_compute (loop, depth)
7438 struct loop *loop;
7439 int depth;
7441 struct loop *inner;
7442 int level = 1;
7444 if (! loop)
7445 return 0;
7447 /* Traverse loop tree assigning depth and computing level as the
7448 maximum level of all the inner loops of this loop. The loop
7449 level is equivalent to the height of the loop in the loop tree
7450 and corresponds to the number of enclosed loop levels (including
7451 itself). */
7452 for (inner = loop->inner; inner; inner = inner->next)
7454 int ilevel;
7456 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7458 if (ilevel > level)
7459 level = ilevel;
7461 loop->level = level;
7462 loop->depth = depth;
7463 return level;
7467 /* Compute the loop nesting depth and enclosed loop level for the loop
7468 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7469 level. */
7471 static int
7472 flow_loops_level_compute (loops)
7473 struct loops *loops;
7475 struct loop *loop;
7476 int level;
7477 int levels = 0;
7479 /* Traverse all the outer level loops. */
7480 for (loop = loops->tree; loop; loop = loop->next)
7482 level = flow_loop_level_compute (loop, 1);
7483 if (level > levels)
7484 levels = level;
7486 return levels;
7490 /* Find all the natural loops in the function and save in LOOPS structure
7491 and recalculate loop_depth information in basic block structures.
7492 Return the number of natural loops found. */
7494 int
7495 flow_loops_find (loops)
7496 struct loops *loops;
7498 int i;
7499 int b;
7500 int num_loops;
7501 edge e;
7502 sbitmap headers;
7503 sbitmap *dom;
7504 int *dfs_order;
7506 loops->num = 0;
7507 loops->array = NULL;
7508 loops->tree = NULL;
7509 dfs_order = NULL;
7511 /* Taking care of this degenerate case makes the rest of
7512 this code simpler. */
7513 if (n_basic_blocks == 0)
7514 return 0;
7516 /* Compute the dominators. */
7517 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7518 compute_flow_dominators (dom, NULL);
7520 /* Count the number of loop edges (back edges). This should be the
7521 same as the number of natural loops. Also clear the loop_depth
7522 and as we work from inner->outer in a loop nest we call
7523 find_loop_nodes_find which will increment loop_depth for nodes
7524 within the current loop, which happens to enclose inner loops. */
7526 num_loops = 0;
7527 for (b = 0; b < n_basic_blocks; b++)
7529 BASIC_BLOCK (b)->loop_depth = 0;
7530 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7532 basic_block latch = e->src;
7534 /* Look for back edges where a predecessor is dominated
7535 by this block. A natural loop has a single entry
7536 node (header) that dominates all the nodes in the
7537 loop. It also has single back edge to the header
7538 from a latch node. Note that multiple natural loops
7539 may share the same header. */
7540 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7541 num_loops++;
7545 if (num_loops)
7547 /* Compute depth first search order of the CFG so that outer
7548 natural loops will be found before inner natural loops. */
7549 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7550 flow_depth_first_order_compute (dfs_order);
7552 /* Allocate loop structures. */
7553 loops->array
7554 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7556 headers = sbitmap_alloc (n_basic_blocks);
7557 sbitmap_zero (headers);
7559 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7560 sbitmap_zero (loops->shared_headers);
7562 /* Find and record information about all the natural loops
7563 in the CFG. */
7564 num_loops = 0;
7565 for (b = 0; b < n_basic_blocks; b++)
7567 basic_block header;
7569 /* Search the nodes of the CFG in DFS order that we can find
7570 outer loops first. */
7571 header = BASIC_BLOCK (dfs_order[b]);
7573 /* Look for all the possible latch blocks for this header. */
7574 for (e = header->pred; e; e = e->pred_next)
7576 basic_block latch = e->src;
7578 /* Look for back edges where a predecessor is dominated
7579 by this block. A natural loop has a single entry
7580 node (header) that dominates all the nodes in the
7581 loop. It also has single back edge to the header
7582 from a latch node. Note that multiple natural loops
7583 may share the same header. */
7584 if (latch != ENTRY_BLOCK_PTR
7585 && TEST_BIT (dom[latch->index], header->index))
7587 struct loop *loop;
7589 loop = loops->array + num_loops;
7591 loop->header = header;
7592 loop->latch = latch;
7594 /* Keep track of blocks that are loop headers so
7595 that we can tell which loops should be merged. */
7596 if (TEST_BIT (headers, header->index))
7597 SET_BIT (loops->shared_headers, header->index);
7598 SET_BIT (headers, header->index);
7600 /* Find nodes contained within the loop. */
7601 loop->nodes = sbitmap_alloc (n_basic_blocks);
7602 loop->num_nodes
7603 = flow_loop_nodes_find (header, latch, loop->nodes);
7605 /* Compute first and last blocks within the loop.
7606 These are often the same as the loop header and
7607 loop latch respectively, but this is not always
7608 the case. */
7609 loop->first
7610 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7611 loop->last
7612 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7614 /* Find edges which exit the loop. Note that a node
7615 may have several exit edges. */
7616 loop->num_exits
7617 = flow_loop_exits_find (loop->nodes, &loop->exits);
7619 /* Look to see if the loop has a pre-header node. */
7620 loop->pre_header
7621 = flow_loop_pre_header_find (header, dom);
7623 num_loops++;
7628 /* Natural loops with shared headers may either be disjoint or
7629 nested. Disjoint loops with shared headers cannot be inner
7630 loops and should be merged. For now just mark loops that share
7631 headers. */
7632 for (i = 0; i < num_loops; i++)
7633 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7634 loops->array[i].shared = 1;
7636 sbitmap_free (headers);
7639 loops->num = num_loops;
7641 /* Save CFG derived information to avoid recomputing it. */
7642 loops->cfg.dom = dom;
7643 loops->cfg.dfs_order = dfs_order;
7645 /* Build the loop hierarchy tree. */
7646 flow_loops_tree_build (loops);
7648 /* Assign the loop nesting depth and enclosed loop level for each
7649 loop. */
7650 loops->levels = flow_loops_level_compute (loops);
7652 return num_loops;
7656 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7659 flow_loop_outside_edge_p (loop, e)
7660 const struct loop *loop;
7661 edge e;
7663 if (e->dest != loop->header)
7664 abort ();
7665 return (e->src == ENTRY_BLOCK_PTR)
7666 || ! TEST_BIT (loop->nodes, e->src->index);
7670 /* Clear LOG_LINKS fields of insns in a chain. */
7672 void
7673 clear_log_links (insns)
7674 rtx insns;
7676 rtx i;
7677 for (i = insns; i; i = NEXT_INSN (i))
7678 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')
7679 LOG_LINKS (i) = 0;
7682 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
7683 correspond to the hard registers, if any, set in that map. This
7684 could be done far more efficiently by having all sorts of special-cases
7685 with moving single words, but probably isn't worth the trouble. */
7687 void
7688 reg_set_to_hard_reg_set (to, from)
7689 HARD_REG_SET *to;
7690 bitmap from;
7692 int i;
7694 EXECUTE_IF_SET_IN_BITMAP
7695 (from, 0, i,
7697 if (i >= FIRST_PSEUDO_REGISTER)
7698 return;
7699 SET_HARD_REG_BIT (*to, i);