2000-02-17 Zack Weinberg <zack@wolery.cumb.org>
[official-gcc.git] / gcc / flow.c
blob2dc2b173089eb1b48144a8ade75da797e4c16d3a
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
44 ** life_analysis **
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
76 REG_DEAD notes.
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
95 that is never used.
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
113 /* TODO:
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
118 - log links creation
119 - pre/post modify transformation
122 #include "config.h"
123 #include "system.h"
124 #include "tree.h"
125 #include "rtl.h"
126 #include "tm_p.h"
127 #include "hard-reg-set.h"
128 #include "basic-block.h"
129 #include "insn-config.h"
130 #include "regs.h"
131 #include "flags.h"
132 #include "output.h"
133 #include "function.h"
134 #include "except.h"
135 #include "toplev.h"
136 #include "recog.h"
137 #include "insn-flags.h"
138 #include "expr.h"
140 #include "obstack.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
147 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
148 the stack pointer does not matter. The value is tested only in
149 functions that have frame pointers.
150 No definition is equivalent to always zero. */
151 #ifndef EXIT_IGNORE_STACK
152 #define EXIT_IGNORE_STACK 0
153 #endif
155 #ifndef HAVE_epilogue
156 #define HAVE_epilogue 0
157 #endif
158 #ifndef HAVE_prologue
159 #define HAVE_prologue 0
160 #endif
161 #ifndef HAVE_sibcall_epilogue
162 #define HAVE_sibcall_epilogue 0
163 #endif
165 /* The contents of the current function definition are allocated
166 in this obstack, and all are freed at the end of the function.
167 For top-level functions, this is temporary_obstack.
168 Separate obstacks are made for nested functions. */
170 extern struct obstack *function_obstack;
172 /* Number of basic blocks in the current function. */
174 int n_basic_blocks;
176 /* Number of edges in the current function. */
178 int n_edges;
180 /* The basic block array. */
182 varray_type basic_block_info;
184 /* The special entry and exit blocks. */
186 struct basic_block_def entry_exit_blocks[2]
187 = {{NULL, /* head */
188 NULL, /* end */
189 NULL, /* pred */
190 NULL, /* succ */
191 NULL, /* local_set */
192 NULL, /* global_live_at_start */
193 NULL, /* global_live_at_end */
194 NULL, /* aux */
195 ENTRY_BLOCK, /* index */
196 0, /* loop_depth */
197 -1, -1, /* eh_beg, eh_end */
198 0 /* count */
201 NULL, /* head */
202 NULL, /* end */
203 NULL, /* pred */
204 NULL, /* succ */
205 NULL, /* local_set */
206 NULL, /* global_live_at_start */
207 NULL, /* global_live_at_end */
208 NULL, /* aux */
209 EXIT_BLOCK, /* index */
210 0, /* loop_depth */
211 -1, -1, /* eh_beg, eh_end */
212 0 /* count */
216 /* Nonzero if the second flow pass has completed. */
217 int flow2_completed;
219 /* Maximum register number used in this function, plus one. */
221 int max_regno;
223 /* Indexed by n, giving various register information */
225 varray_type reg_n_info;
227 /* Size of a regset for the current function,
228 in (1) bytes and (2) elements. */
230 int regset_bytes;
231 int regset_size;
233 /* Regset of regs live when calls to `setjmp'-like functions happen. */
234 /* ??? Does this exist only for the setjmp-clobbered warning message? */
236 regset regs_live_at_setjmp;
238 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
239 that have to go in the same hard reg.
240 The first two regs in the list are a pair, and the next two
241 are another pair, etc. */
242 rtx regs_may_share;
244 /* Set of registers that may be eliminable. These are handled specially
245 in updating regs_ever_live. */
247 static HARD_REG_SET elim_reg_set;
249 /* The basic block structure for every insn, indexed by uid. */
251 varray_type basic_block_for_insn;
253 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
254 /* ??? Should probably be using LABEL_NUSES instead. It would take a
255 bit of surgery to be able to use or co-opt the routines in jump. */
257 static rtx label_value_list;
258 static rtx tail_recursion_label_list;
260 /* Holds information for tracking conditional register life information. */
261 struct reg_cond_life_info
263 /* An EXPR_LIST of conditions under which a register is dead. */
264 rtx condition;
266 /* ??? Could store mask of bytes that are dead, so that we could finally
267 track lifetimes of multi-word registers accessed via subregs. */
270 /* For use in communicating between propagate_block and its subroutines.
271 Holds all information needed to compute life and def-use information. */
273 struct propagate_block_info
275 /* The basic block we're considering. */
276 basic_block bb;
278 /* Bit N is set if register N is conditionally or unconditionally live. */
279 regset reg_live;
281 /* Bit N is set if register N is set this insn. */
282 regset new_set;
284 /* Element N is the next insn that uses (hard or pseudo) register N
285 within the current basic block; or zero, if there is no such insn. */
286 rtx *reg_next_use;
288 /* Contains a list of all the MEMs we are tracking for dead store
289 elimination. */
290 rtx mem_set_list;
292 /* If non-null, record the set of registers set in the basic block. */
293 regset local_set;
295 #ifdef HAVE_conditional_execution
296 /* Indexed by register number, holds a reg_cond_life_info for each
297 register that is not unconditionally live or dead. */
298 splay_tree reg_cond_dead;
300 /* Bit N is set if register N is in an expression in reg_cond_dead. */
301 regset reg_cond_reg;
302 #endif
304 /* Non-zero if the value of CC0 is live. */
305 int cc0_live;
307 /* Flags controling the set of information propagate_block collects. */
308 int flags;
311 /* Forward declarations */
312 static int count_basic_blocks PARAMS ((rtx));
313 static void find_basic_blocks_1 PARAMS ((rtx));
314 static rtx find_label_refs PARAMS ((rtx, rtx));
315 static void clear_edges PARAMS ((void));
316 static void make_edges PARAMS ((rtx));
317 static void make_label_edge PARAMS ((sbitmap *, basic_block,
318 rtx, int));
319 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
320 basic_block, rtx, int));
321 static void mark_critical_edges PARAMS ((void));
322 static void move_stray_eh_region_notes PARAMS ((void));
323 static void record_active_eh_regions PARAMS ((rtx));
325 static void commit_one_edge_insertion PARAMS ((edge));
327 static void delete_unreachable_blocks PARAMS ((void));
328 static void delete_eh_regions PARAMS ((void));
329 static int can_delete_note_p PARAMS ((rtx));
330 static void expunge_block PARAMS ((basic_block));
331 static int can_delete_label_p PARAMS ((rtx));
332 static int tail_recursion_label_p PARAMS ((rtx));
333 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
334 basic_block));
335 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
336 basic_block));
337 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
338 static void try_merge_blocks PARAMS ((void));
339 static void tidy_fallthru_edges PARAMS ((void));
340 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
341 static void verify_wide_reg PARAMS ((int, rtx, rtx));
342 static void verify_local_live_at_start PARAMS ((regset, basic_block));
343 static int set_noop_p PARAMS ((rtx));
344 static int noop_move_p PARAMS ((rtx));
345 static void delete_noop_moves PARAMS ((rtx));
346 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
347 static void notice_stack_pointer_modification PARAMS ((rtx));
348 static void mark_reg PARAMS ((rtx, void *));
349 static void mark_regs_live_at_end PARAMS ((regset));
350 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
351 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
352 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
353 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
354 static int insn_dead_p PARAMS ((struct propagate_block_info *,
355 rtx, int, rtx));
356 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
357 rtx, rtx));
358 static void mark_set_regs PARAMS ((struct propagate_block_info *,
359 rtx, rtx));
360 static void mark_set_1 PARAMS ((struct propagate_block_info *,
361 enum rtx_code, rtx, rtx,
362 rtx, int));
363 #ifdef HAVE_conditional_execution
364 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
365 int, rtx));
366 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
367 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
368 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
369 int));
370 static rtx ior_reg_cond PARAMS ((rtx, rtx));
371 static rtx not_reg_cond PARAMS ((rtx));
372 static rtx nand_reg_cond PARAMS ((rtx, rtx));
373 #endif
374 #ifdef AUTO_INC_DEC
375 static void find_auto_inc PARAMS ((struct propagate_block_info *,
376 rtx, rtx));
377 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
378 rtx));
379 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
380 #endif
381 static void mark_used_reg PARAMS ((struct propagate_block_info *,
382 rtx, rtx, rtx));
383 static void mark_used_regs PARAMS ((struct propagate_block_info *,
384 rtx, rtx, rtx));
385 void dump_flow_info PARAMS ((FILE *));
386 void debug_flow_info PARAMS ((void));
387 static void dump_edge_info PARAMS ((FILE *, edge, int));
389 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
390 rtx));
391 static void remove_fake_successors PARAMS ((basic_block));
392 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
393 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
394 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
395 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
396 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
397 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
398 static int flow_depth_first_order_compute PARAMS ((int *));
399 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
400 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
401 static void flow_loops_tree_build PARAMS ((struct loops *));
402 static int flow_loop_level_compute PARAMS ((struct loop *, int));
403 static int flow_loops_level_compute PARAMS ((struct loops *));
405 /* Find basic blocks of the current function.
406 F is the first insn of the function and NREGS the number of register
407 numbers in use. */
409 void
410 find_basic_blocks (f, nregs, file)
411 rtx f;
412 int nregs ATTRIBUTE_UNUSED;
413 FILE *file ATTRIBUTE_UNUSED;
415 int max_uid;
417 /* Flush out existing data. */
418 if (basic_block_info != NULL)
420 int i;
422 clear_edges ();
424 /* Clear bb->aux on all extant basic blocks. We'll use this as a
425 tag for reuse during create_basic_block, just in case some pass
426 copies around basic block notes improperly. */
427 for (i = 0; i < n_basic_blocks; ++i)
428 BASIC_BLOCK (i)->aux = NULL;
430 VARRAY_FREE (basic_block_info);
433 n_basic_blocks = count_basic_blocks (f);
435 /* Size the basic block table. The actual structures will be allocated
436 by find_basic_blocks_1, since we want to keep the structure pointers
437 stable across calls to find_basic_blocks. */
438 /* ??? This whole issue would be much simpler if we called find_basic_blocks
439 exactly once, and thereafter we don't have a single long chain of
440 instructions at all until close to the end of compilation when we
441 actually lay them out. */
443 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
445 find_basic_blocks_1 (f);
447 /* Record the block to which an insn belongs. */
448 /* ??? This should be done another way, by which (perhaps) a label is
449 tagged directly with the basic block that it starts. It is used for
450 more than that currently, but IMO that is the only valid use. */
452 max_uid = get_max_uid ();
453 #ifdef AUTO_INC_DEC
454 /* Leave space for insns life_analysis makes in some cases for auto-inc.
455 These cases are rare, so we don't need too much space. */
456 max_uid += max_uid / 10;
457 #endif
459 compute_bb_for_insn (max_uid);
461 /* Discover the edges of our cfg. */
462 record_active_eh_regions (f);
463 make_edges (label_value_list);
465 /* Do very simple cleanup now, for the benefit of code that runs between
466 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
467 tidy_fallthru_edges ();
469 mark_critical_edges ();
471 #ifdef ENABLE_CHECKING
472 verify_flow_info ();
473 #endif
476 /* Count the basic blocks of the function. */
478 static int
479 count_basic_blocks (f)
480 rtx f;
482 register rtx insn;
483 register RTX_CODE prev_code;
484 register int count = 0;
485 int eh_region = 0;
486 int call_had_abnormal_edge = 0;
488 prev_code = JUMP_INSN;
489 for (insn = f; insn; insn = NEXT_INSN (insn))
491 register RTX_CODE code = GET_CODE (insn);
493 if (code == CODE_LABEL
494 || (GET_RTX_CLASS (code) == 'i'
495 && (prev_code == JUMP_INSN
496 || prev_code == BARRIER
497 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
498 count++;
500 /* Record whether this call created an edge. */
501 if (code == CALL_INSN)
503 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
504 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
506 call_had_abnormal_edge = 0;
508 /* If there is an EH region or rethrow, we have an edge. */
509 if ((eh_region && region > 0)
510 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
511 call_had_abnormal_edge = 1;
512 else if (nonlocal_goto_handler_labels && region >= 0)
513 /* If there is a nonlocal goto label and the specified
514 region number isn't -1, we have an edge. (0 means
515 no throw, but might have a nonlocal goto). */
516 call_had_abnormal_edge = 1;
519 if (code != NOTE)
520 prev_code = code;
521 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
522 ++eh_region;
523 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
524 --eh_region;
527 /* The rest of the compiler works a bit smoother when we don't have to
528 check for the edge case of do-nothing functions with no basic blocks. */
529 if (count == 0)
531 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
532 count = 1;
535 return count;
538 /* Scan a list of insns for labels referrred to other than by jumps.
539 This is used to scan the alternatives of a call placeholder. */
540 static rtx find_label_refs (f, lvl)
541 rtx f;
542 rtx lvl;
544 rtx insn;
546 for (insn = f; insn; insn = NEXT_INSN (insn))
547 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
549 rtx note;
551 /* Make a list of all labels referred to other than by jumps
552 (which just don't have the REG_LABEL notes).
554 Make a special exception for labels followed by an ADDR*VEC,
555 as this would be a part of the tablejump setup code.
557 Make a special exception for the eh_return_stub_label, which
558 we know isn't part of any otherwise visible control flow. */
560 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
561 if (REG_NOTE_KIND (note) == REG_LABEL)
563 rtx lab = XEXP (note, 0), next;
565 if (lab == eh_return_stub_label)
567 else if ((next = next_nonnote_insn (lab)) != NULL
568 && GET_CODE (next) == JUMP_INSN
569 && (GET_CODE (PATTERN (next)) == ADDR_VEC
570 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
572 else if (GET_CODE (lab) == NOTE)
574 else
575 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
579 return lvl;
582 /* Find all basic blocks of the function whose first insn is F.
584 Collect and return a list of labels whose addresses are taken. This
585 will be used in make_edges for use with computed gotos. */
587 static void
588 find_basic_blocks_1 (f)
589 rtx f;
591 register rtx insn, next;
592 int i = 0;
593 rtx bb_note = NULL_RTX;
594 rtx eh_list = NULL_RTX;
595 rtx lvl = NULL_RTX;
596 rtx trll = NULL_RTX;
597 rtx head = NULL_RTX;
598 rtx end = NULL_RTX;
600 /* We process the instructions in a slightly different way than we did
601 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
602 closed out the previous block, so that it gets attached at the proper
603 place. Since this form should be equivalent to the previous,
604 count_basic_blocks continues to use the old form as a check. */
606 for (insn = f; insn; insn = next)
608 enum rtx_code code = GET_CODE (insn);
610 next = NEXT_INSN (insn);
612 switch (code)
614 case NOTE:
616 int kind = NOTE_LINE_NUMBER (insn);
618 /* Keep a LIFO list of the currently active exception notes. */
619 if (kind == NOTE_INSN_EH_REGION_BEG)
620 eh_list = alloc_INSN_LIST (insn, eh_list);
621 else if (kind == NOTE_INSN_EH_REGION_END)
623 rtx t = eh_list;
625 eh_list = XEXP (eh_list, 1);
626 free_INSN_LIST_node (t);
629 /* Look for basic block notes with which to keep the
630 basic_block_info pointers stable. Unthread the note now;
631 we'll put it back at the right place in create_basic_block.
632 Or not at all if we've already found a note in this block. */
633 else if (kind == NOTE_INSN_BASIC_BLOCK)
635 if (bb_note == NULL_RTX)
636 bb_note = insn;
637 else
638 next = flow_delete_insn (insn);
640 break;
643 case CODE_LABEL:
644 /* A basic block starts at a label. If we've closed one off due
645 to a barrier or some such, no need to do it again. */
646 if (head != NULL_RTX)
648 /* While we now have edge lists with which other portions of
649 the compiler might determine a call ending a basic block
650 does not imply an abnormal edge, it will be a bit before
651 everything can be updated. So continue to emit a noop at
652 the end of such a block. */
653 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
655 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
656 end = emit_insn_after (nop, end);
659 create_basic_block (i++, head, end, bb_note);
660 bb_note = NULL_RTX;
663 head = end = insn;
664 break;
666 case JUMP_INSN:
667 /* A basic block ends at a jump. */
668 if (head == NULL_RTX)
669 head = insn;
670 else
672 /* ??? Make a special check for table jumps. The way this
673 happens is truly and amazingly gross. We are about to
674 create a basic block that contains just a code label and
675 an addr*vec jump insn. Worse, an addr_diff_vec creates
676 its own natural loop.
678 Prevent this bit of brain damage, pasting things together
679 correctly in make_edges.
681 The correct solution involves emitting the table directly
682 on the tablejump instruction as a note, or JUMP_LABEL. */
684 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
685 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
687 head = end = NULL;
688 n_basic_blocks--;
689 break;
692 end = insn;
693 goto new_bb_inclusive;
695 case BARRIER:
696 /* A basic block ends at a barrier. It may be that an unconditional
697 jump already closed the basic block -- no need to do it again. */
698 if (head == NULL_RTX)
699 break;
701 /* While we now have edge lists with which other portions of the
702 compiler might determine a call ending a basic block does not
703 imply an abnormal edge, it will be a bit before everything can
704 be updated. So continue to emit a noop at the end of such a
705 block. */
706 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
708 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
709 end = emit_insn_after (nop, end);
711 goto new_bb_exclusive;
713 case CALL_INSN:
715 /* Record whether this call created an edge. */
716 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
717 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
718 int call_has_abnormal_edge = 0;
720 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
722 /* Scan each of the alternatives for label refs. */
723 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
724 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
725 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
726 /* Record its tail recursion label, if any. */
727 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
728 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
731 /* If there is an EH region or rethrow, we have an edge. */
732 if ((eh_list && region > 0)
733 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
734 call_has_abnormal_edge = 1;
735 else if (nonlocal_goto_handler_labels && region >= 0)
736 /* If there is a nonlocal goto label and the specified
737 region number isn't -1, we have an edge. (0 means
738 no throw, but might have a nonlocal goto). */
739 call_has_abnormal_edge = 1;
741 /* A basic block ends at a call that can either throw or
742 do a non-local goto. */
743 if (call_has_abnormal_edge)
745 new_bb_inclusive:
746 if (head == NULL_RTX)
747 head = insn;
748 end = insn;
750 new_bb_exclusive:
751 create_basic_block (i++, head, end, bb_note);
752 head = end = NULL_RTX;
753 bb_note = NULL_RTX;
754 break;
757 /* FALLTHRU */
759 default:
760 if (GET_RTX_CLASS (code) == 'i')
762 if (head == NULL_RTX)
763 head = insn;
764 end = insn;
766 break;
769 if (GET_RTX_CLASS (code) == 'i')
771 rtx note;
773 /* Make a list of all labels referred to other than by jumps
774 (which just don't have the REG_LABEL notes).
776 Make a special exception for labels followed by an ADDR*VEC,
777 as this would be a part of the tablejump setup code.
779 Make a special exception for the eh_return_stub_label, which
780 we know isn't part of any otherwise visible control flow. */
782 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
783 if (REG_NOTE_KIND (note) == REG_LABEL)
785 rtx lab = XEXP (note, 0), next;
787 if (lab == eh_return_stub_label)
789 else if ((next = next_nonnote_insn (lab)) != NULL
790 && GET_CODE (next) == JUMP_INSN
791 && (GET_CODE (PATTERN (next)) == ADDR_VEC
792 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
794 else if (GET_CODE (lab) == NOTE)
796 else
797 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
802 if (head != NULL_RTX)
803 create_basic_block (i++, head, end, bb_note);
804 else if (bb_note)
805 flow_delete_insn (bb_note);
807 if (i != n_basic_blocks)
808 abort ();
810 label_value_list = lvl;
811 tail_recursion_label_list = trll;
814 /* Tidy the CFG by deleting unreachable code and whatnot. */
816 void
817 cleanup_cfg (f)
818 rtx f;
820 delete_unreachable_blocks ();
821 move_stray_eh_region_notes ();
822 record_active_eh_regions (f);
823 try_merge_blocks ();
824 mark_critical_edges ();
826 /* Kill the data we won't maintain. */
827 free_EXPR_LIST_list (&label_value_list);
828 free_EXPR_LIST_list (&tail_recursion_label_list);
831 /* Create a new basic block consisting of the instructions between
832 HEAD and END inclusive. Reuses the note and basic block struct
833 in BB_NOTE, if any. */
835 void
836 create_basic_block (index, head, end, bb_note)
837 int index;
838 rtx head, end, bb_note;
840 basic_block bb;
842 if (bb_note
843 && ! RTX_INTEGRATED_P (bb_note)
844 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
845 && bb->aux == NULL)
847 /* If we found an existing note, thread it back onto the chain. */
849 rtx after;
851 if (GET_CODE (head) == CODE_LABEL)
852 after = head;
853 else
855 after = PREV_INSN (head);
856 head = bb_note;
859 if (after != bb_note && NEXT_INSN (after) != bb_note)
860 reorder_insns (bb_note, bb_note, after);
862 else
864 /* Otherwise we must create a note and a basic block structure.
865 Since we allow basic block structs in rtl, give the struct
866 the same lifetime by allocating it off the function obstack
867 rather than using malloc. */
869 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
870 memset (bb, 0, sizeof (*bb));
872 if (GET_CODE (head) == CODE_LABEL)
873 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
874 else
876 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
877 head = bb_note;
879 NOTE_BASIC_BLOCK (bb_note) = bb;
882 /* Always include the bb note in the block. */
883 if (NEXT_INSN (end) == bb_note)
884 end = bb_note;
886 bb->head = head;
887 bb->end = end;
888 bb->index = index;
889 BASIC_BLOCK (index) = bb;
891 /* Tag the block so that we know it has been used when considering
892 other basic block notes. */
893 bb->aux = bb;
896 /* Records the basic block struct in BB_FOR_INSN, for every instruction
897 indexed by INSN_UID. MAX is the size of the array. */
899 void
900 compute_bb_for_insn (max)
901 int max;
903 int i;
905 if (basic_block_for_insn)
906 VARRAY_FREE (basic_block_for_insn);
907 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
909 for (i = 0; i < n_basic_blocks; ++i)
911 basic_block bb = BASIC_BLOCK (i);
912 rtx insn, end;
914 end = bb->end;
915 insn = bb->head;
916 while (1)
918 int uid = INSN_UID (insn);
919 if (uid < max)
920 VARRAY_BB (basic_block_for_insn, uid) = bb;
921 if (insn == end)
922 break;
923 insn = NEXT_INSN (insn);
928 /* Free the memory associated with the edge structures. */
930 static void
931 clear_edges ()
933 int i;
934 edge n, e;
936 for (i = 0; i < n_basic_blocks; ++i)
938 basic_block bb = BASIC_BLOCK (i);
940 for (e = bb->succ; e ; e = n)
942 n = e->succ_next;
943 free (e);
946 bb->succ = 0;
947 bb->pred = 0;
950 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
952 n = e->succ_next;
953 free (e);
956 ENTRY_BLOCK_PTR->succ = 0;
957 EXIT_BLOCK_PTR->pred = 0;
959 n_edges = 0;
962 /* Identify the edges between basic blocks.
964 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
965 that are otherwise unreachable may be reachable with a non-local goto.
967 BB_EH_END is an array indexed by basic block number in which we record
968 the list of exception regions active at the end of the basic block. */
970 static void
971 make_edges (label_value_list)
972 rtx label_value_list;
974 int i;
975 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
976 sbitmap *edge_cache = NULL;
978 /* Assume no computed jump; revise as we create edges. */
979 current_function_has_computed_jump = 0;
981 /* Heavy use of computed goto in machine-generated code can lead to
982 nearly fully-connected CFGs. In that case we spend a significant
983 amount of time searching the edge lists for duplicates. */
984 if (forced_labels || label_value_list)
986 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
987 sbitmap_vector_zero (edge_cache, n_basic_blocks);
990 /* By nature of the way these get numbered, block 0 is always the entry. */
991 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
993 for (i = 0; i < n_basic_blocks; ++i)
995 basic_block bb = BASIC_BLOCK (i);
996 rtx insn, x;
997 enum rtx_code code;
998 int force_fallthru = 0;
1000 /* Examine the last instruction of the block, and discover the
1001 ways we can leave the block. */
1003 insn = bb->end;
1004 code = GET_CODE (insn);
1006 /* A branch. */
1007 if (code == JUMP_INSN)
1009 rtx tmp;
1011 /* ??? Recognize a tablejump and do the right thing. */
1012 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1013 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1014 && GET_CODE (tmp) == JUMP_INSN
1015 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1016 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1018 rtvec vec;
1019 int j;
1021 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1022 vec = XVEC (PATTERN (tmp), 0);
1023 else
1024 vec = XVEC (PATTERN (tmp), 1);
1026 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1027 make_label_edge (edge_cache, bb,
1028 XEXP (RTVEC_ELT (vec, j), 0), 0);
1030 /* Some targets (eg, ARM) emit a conditional jump that also
1031 contains the out-of-range target. Scan for these and
1032 add an edge if necessary. */
1033 if ((tmp = single_set (insn)) != NULL
1034 && SET_DEST (tmp) == pc_rtx
1035 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1036 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1037 make_label_edge (edge_cache, bb,
1038 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1040 #ifdef CASE_DROPS_THROUGH
1041 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1042 us naturally detecting fallthru into the next block. */
1043 force_fallthru = 1;
1044 #endif
1047 /* If this is a computed jump, then mark it as reaching
1048 everything on the label_value_list and forced_labels list. */
1049 else if (computed_jump_p (insn))
1051 current_function_has_computed_jump = 1;
1053 for (x = label_value_list; x; x = XEXP (x, 1))
1054 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1056 for (x = forced_labels; x; x = XEXP (x, 1))
1057 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1060 /* Returns create an exit out. */
1061 else if (returnjump_p (insn))
1062 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1064 /* Otherwise, we have a plain conditional or unconditional jump. */
1065 else
1067 if (! JUMP_LABEL (insn))
1068 abort ();
1069 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1073 /* If this is a sibling call insn, then this is in effect a
1074 combined call and return, and so we need an edge to the
1075 exit block. No need to worry about EH edges, since we
1076 wouldn't have created the sibling call in the first place. */
1078 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1079 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1080 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1081 else
1083 /* If this is a CALL_INSN, then mark it as reaching the active EH
1084 handler for this CALL_INSN. If we're handling asynchronous
1085 exceptions then any insn can reach any of the active handlers.
1087 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1089 if (code == CALL_INSN || asynchronous_exceptions)
1091 /* Add any appropriate EH edges. We do this unconditionally
1092 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1093 on the call, and this needn't be within an EH region. */
1094 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1096 /* If we have asynchronous exceptions, do the same for *all*
1097 exception regions active in the block. */
1098 if (asynchronous_exceptions
1099 && bb->eh_beg != bb->eh_end)
1101 if (bb->eh_beg >= 0)
1102 make_eh_edge (edge_cache, eh_nest_info, bb,
1103 NULL_RTX, bb->eh_beg);
1105 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1106 if (GET_CODE (x) == NOTE
1107 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1108 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1110 int region = NOTE_EH_HANDLER (x);
1111 make_eh_edge (edge_cache, eh_nest_info, bb,
1112 NULL_RTX, region);
1116 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1118 /* ??? This could be made smarter: in some cases it's possible
1119 to tell that certain calls will not do a nonlocal goto.
1121 For example, if the nested functions that do the nonlocal
1122 gotos do not have their addresses taken, then only calls to
1123 those functions or to other nested functions that use them
1124 could possibly do nonlocal gotos. */
1125 /* We do know that a REG_EH_REGION note with a value less
1126 than 0 is guaranteed not to perform a non-local goto. */
1127 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1128 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1129 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1130 make_label_edge (edge_cache, bb, XEXP (x, 0),
1131 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1135 /* We know something about the structure of the function __throw in
1136 libgcc2.c. It is the only function that ever contains eh_stub
1137 labels. It modifies its return address so that the last block
1138 returns to one of the eh_stub labels within it. So we have to
1139 make additional edges in the flow graph. */
1140 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1141 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1143 /* Find out if we can drop through to the next block. */
1144 insn = next_nonnote_insn (insn);
1145 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1146 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1147 else if (i + 1 < n_basic_blocks)
1149 rtx tmp = BLOCK_HEAD (i + 1);
1150 if (GET_CODE (tmp) == NOTE)
1151 tmp = next_nonnote_insn (tmp);
1152 if (force_fallthru || insn == tmp)
1153 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1157 free_eh_nesting_info (eh_nest_info);
1158 if (edge_cache)
1159 sbitmap_vector_free (edge_cache);
1162 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1163 about the edge that is accumulated between calls. */
1165 void
1166 make_edge (edge_cache, src, dst, flags)
1167 sbitmap *edge_cache;
1168 basic_block src, dst;
1169 int flags;
1171 int use_edge_cache;
1172 edge e;
1174 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1175 many edges to them, and we didn't allocate memory for it. */
1176 use_edge_cache = (edge_cache
1177 && src != ENTRY_BLOCK_PTR
1178 && dst != EXIT_BLOCK_PTR);
1180 /* Make sure we don't add duplicate edges. */
1181 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1182 for (e = src->succ; e ; e = e->succ_next)
1183 if (e->dest == dst)
1185 e->flags |= flags;
1186 return;
1189 e = (edge) xcalloc (1, sizeof (*e));
1190 n_edges++;
1192 e->succ_next = src->succ;
1193 e->pred_next = dst->pred;
1194 e->src = src;
1195 e->dest = dst;
1196 e->flags = flags;
1198 src->succ = e;
1199 dst->pred = e;
1201 if (use_edge_cache)
1202 SET_BIT (edge_cache[src->index], dst->index);
1205 /* Create an edge from a basic block to a label. */
1207 static void
1208 make_label_edge (edge_cache, src, label, flags)
1209 sbitmap *edge_cache;
1210 basic_block src;
1211 rtx label;
1212 int flags;
1214 if (GET_CODE (label) != CODE_LABEL)
1215 abort ();
1217 /* If the label was never emitted, this insn is junk, but avoid a
1218 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1219 as a result of a syntax error and a diagnostic has already been
1220 printed. */
1222 if (INSN_UID (label) == 0)
1223 return;
1225 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1228 /* Create the edges generated by INSN in REGION. */
1230 static void
1231 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1232 sbitmap *edge_cache;
1233 eh_nesting_info *eh_nest_info;
1234 basic_block src;
1235 rtx insn;
1236 int region;
1238 handler_info **handler_list;
1239 int num, is_call;
1241 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1242 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1243 while (--num >= 0)
1245 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1246 EDGE_ABNORMAL | EDGE_EH | is_call);
1250 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1251 dangerous if we intend to move basic blocks around. Move such notes
1252 into the following block. */
1254 static void
1255 move_stray_eh_region_notes ()
1257 int i;
1258 basic_block b1, b2;
1260 if (n_basic_blocks < 2)
1261 return;
1263 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1264 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1266 rtx insn, next, list = NULL_RTX;
1268 b1 = BASIC_BLOCK (i);
1269 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1271 next = NEXT_INSN (insn);
1272 if (GET_CODE (insn) == NOTE
1273 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1274 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1276 /* Unlink from the insn chain. */
1277 NEXT_INSN (PREV_INSN (insn)) = next;
1278 PREV_INSN (next) = PREV_INSN (insn);
1280 /* Queue it. */
1281 NEXT_INSN (insn) = list;
1282 list = insn;
1286 if (list == NULL_RTX)
1287 continue;
1289 /* Find where to insert these things. */
1290 insn = b2->head;
1291 if (GET_CODE (insn) == CODE_LABEL)
1292 insn = NEXT_INSN (insn);
1294 while (list)
1296 next = NEXT_INSN (list);
1297 add_insn_after (list, insn);
1298 list = next;
1303 /* Recompute eh_beg/eh_end for each basic block. */
1305 static void
1306 record_active_eh_regions (f)
1307 rtx f;
1309 rtx insn, eh_list = NULL_RTX;
1310 int i = 0;
1311 basic_block bb = BASIC_BLOCK (0);
1313 for (insn = f; insn ; insn = NEXT_INSN (insn))
1315 if (bb->head == insn)
1316 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1318 if (GET_CODE (insn) == NOTE)
1320 int kind = NOTE_LINE_NUMBER (insn);
1321 if (kind == NOTE_INSN_EH_REGION_BEG)
1322 eh_list = alloc_INSN_LIST (insn, eh_list);
1323 else if (kind == NOTE_INSN_EH_REGION_END)
1325 rtx t = XEXP (eh_list, 1);
1326 free_INSN_LIST_node (eh_list);
1327 eh_list = t;
1331 if (bb->end == insn)
1333 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1334 i += 1;
1335 if (i == n_basic_blocks)
1336 break;
1337 bb = BASIC_BLOCK (i);
1342 /* Identify critical edges and set the bits appropriately. */
1344 static void
1345 mark_critical_edges ()
1347 int i, n = n_basic_blocks;
1348 basic_block bb;
1350 /* We begin with the entry block. This is not terribly important now,
1351 but could be if a front end (Fortran) implemented alternate entry
1352 points. */
1353 bb = ENTRY_BLOCK_PTR;
1354 i = -1;
1356 while (1)
1358 edge e;
1360 /* (1) Critical edges must have a source with multiple successors. */
1361 if (bb->succ && bb->succ->succ_next)
1363 for (e = bb->succ; e ; e = e->succ_next)
1365 /* (2) Critical edges must have a destination with multiple
1366 predecessors. Note that we know there is at least one
1367 predecessor -- the edge we followed to get here. */
1368 if (e->dest->pred->pred_next)
1369 e->flags |= EDGE_CRITICAL;
1370 else
1371 e->flags &= ~EDGE_CRITICAL;
1374 else
1376 for (e = bb->succ; e ; e = e->succ_next)
1377 e->flags &= ~EDGE_CRITICAL;
1380 if (++i >= n)
1381 break;
1382 bb = BASIC_BLOCK (i);
1386 /* Split a (typically critical) edge. Return the new block.
1387 Abort on abnormal edges.
1389 ??? The code generally expects to be called on critical edges.
1390 The case of a block ending in an unconditional jump to a
1391 block with multiple predecessors is not handled optimally. */
1393 basic_block
1394 split_edge (edge_in)
1395 edge edge_in;
1397 basic_block old_pred, bb, old_succ;
1398 edge edge_out;
1399 rtx bb_note;
1400 int i, j;
1402 /* Abnormal edges cannot be split. */
1403 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1404 abort ();
1406 old_pred = edge_in->src;
1407 old_succ = edge_in->dest;
1409 /* Remove the existing edge from the destination's pred list. */
1411 edge *pp;
1412 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1413 continue;
1414 *pp = edge_in->pred_next;
1415 edge_in->pred_next = NULL;
1418 /* Create the new structures. */
1419 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1420 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1421 n_edges++;
1423 memset (bb, 0, sizeof (*bb));
1425 /* ??? This info is likely going to be out of date very soon. */
1426 if (old_succ->global_live_at_start)
1428 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1429 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1430 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1431 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1434 /* Wire them up. */
1435 bb->pred = edge_in;
1436 bb->succ = edge_out;
1437 bb->count = edge_in->count;
1439 edge_in->dest = bb;
1440 edge_in->flags &= ~EDGE_CRITICAL;
1442 edge_out->pred_next = old_succ->pred;
1443 edge_out->succ_next = NULL;
1444 edge_out->src = bb;
1445 edge_out->dest = old_succ;
1446 edge_out->flags = EDGE_FALLTHRU;
1447 edge_out->probability = REG_BR_PROB_BASE;
1448 edge_out->count = edge_in->count;
1450 old_succ->pred = edge_out;
1452 /* Tricky case -- if there existed a fallthru into the successor
1453 (and we're not it) we must add a new unconditional jump around
1454 the new block we're actually interested in.
1456 Further, if that edge is critical, this means a second new basic
1457 block must be created to hold it. In order to simplify correct
1458 insn placement, do this before we touch the existing basic block
1459 ordering for the block we were really wanting. */
1460 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1462 edge e;
1463 for (e = edge_out->pred_next; e ; e = e->pred_next)
1464 if (e->flags & EDGE_FALLTHRU)
1465 break;
1467 if (e)
1469 basic_block jump_block;
1470 rtx pos;
1472 if ((e->flags & EDGE_CRITICAL) == 0
1473 && e->src != ENTRY_BLOCK_PTR)
1475 /* Non critical -- we can simply add a jump to the end
1476 of the existing predecessor. */
1477 jump_block = e->src;
1479 else
1481 /* We need a new block to hold the jump. The simplest
1482 way to do the bulk of the work here is to recursively
1483 call ourselves. */
1484 jump_block = split_edge (e);
1485 e = jump_block->succ;
1488 /* Now add the jump insn ... */
1489 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1490 jump_block->end);
1491 jump_block->end = pos;
1492 if (basic_block_for_insn)
1493 set_block_for_insn (pos, jump_block);
1494 emit_barrier_after (pos);
1496 /* ... let jump know that label is in use, ... */
1497 JUMP_LABEL (pos) = old_succ->head;
1498 ++LABEL_NUSES (old_succ->head);
1500 /* ... and clear fallthru on the outgoing edge. */
1501 e->flags &= ~EDGE_FALLTHRU;
1503 /* Continue splitting the interesting edge. */
1507 /* Place the new block just in front of the successor. */
1508 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1509 if (old_succ == EXIT_BLOCK_PTR)
1510 j = n_basic_blocks - 1;
1511 else
1512 j = old_succ->index;
1513 for (i = n_basic_blocks - 1; i > j; --i)
1515 basic_block tmp = BASIC_BLOCK (i - 1);
1516 BASIC_BLOCK (i) = tmp;
1517 tmp->index = i;
1519 BASIC_BLOCK (i) = bb;
1520 bb->index = i;
1522 /* Create the basic block note.
1524 Where we place the note can have a noticable impact on the generated
1525 code. Consider this cfg:
1532 +->1-->2--->E
1534 +--+
1536 If we need to insert an insn on the edge from block 0 to block 1,
1537 we want to ensure the instructions we insert are outside of any
1538 loop notes that physically sit between block 0 and block 1. Otherwise
1539 we confuse the loop optimizer into thinking the loop is a phony. */
1540 if (old_succ != EXIT_BLOCK_PTR
1541 && PREV_INSN (old_succ->head)
1542 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1543 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1544 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1545 PREV_INSN (old_succ->head));
1546 else if (old_succ != EXIT_BLOCK_PTR)
1547 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1548 else
1549 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1550 NOTE_BASIC_BLOCK (bb_note) = bb;
1551 bb->head = bb->end = bb_note;
1553 /* Not quite simple -- for non-fallthru edges, we must adjust the
1554 predecessor's jump instruction to target our new block. */
1555 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1557 rtx tmp, insn = old_pred->end;
1558 rtx old_label = old_succ->head;
1559 rtx new_label = gen_label_rtx ();
1561 if (GET_CODE (insn) != JUMP_INSN)
1562 abort ();
1564 /* ??? Recognize a tablejump and adjust all matching cases. */
1565 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1566 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1567 && GET_CODE (tmp) == JUMP_INSN
1568 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1569 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1571 rtvec vec;
1572 int j;
1574 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1575 vec = XVEC (PATTERN (tmp), 0);
1576 else
1577 vec = XVEC (PATTERN (tmp), 1);
1579 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1580 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1582 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1583 --LABEL_NUSES (old_label);
1584 ++LABEL_NUSES (new_label);
1587 /* Handle casesi dispatch insns */
1588 if ((tmp = single_set (insn)) != NULL
1589 && SET_DEST (tmp) == pc_rtx
1590 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1591 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1592 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1594 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1595 new_label);
1596 --LABEL_NUSES (old_label);
1597 ++LABEL_NUSES (new_label);
1600 else
1602 /* This would have indicated an abnormal edge. */
1603 if (computed_jump_p (insn))
1604 abort ();
1606 /* A return instruction can't be redirected. */
1607 if (returnjump_p (insn))
1608 abort ();
1610 /* If the insn doesn't go where we think, we're confused. */
1611 if (JUMP_LABEL (insn) != old_label)
1612 abort ();
1614 redirect_jump (insn, new_label, 0);
1617 emit_label_before (new_label, bb_note);
1618 bb->head = new_label;
1621 return bb;
1624 /* Queue instructions for insertion on an edge between two basic blocks.
1625 The new instructions and basic blocks (if any) will not appear in the
1626 CFG until commit_edge_insertions is called. */
1628 void
1629 insert_insn_on_edge (pattern, e)
1630 rtx pattern;
1631 edge e;
1633 /* We cannot insert instructions on an abnormal critical edge.
1634 It will be easier to find the culprit if we die now. */
1635 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1636 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1637 abort ();
1639 if (e->insns == NULL_RTX)
1640 start_sequence ();
1641 else
1642 push_to_sequence (e->insns);
1644 emit_insn (pattern);
1646 e->insns = get_insns ();
1647 end_sequence();
1650 /* Update the CFG for the instructions queued on edge E. */
1652 static void
1653 commit_one_edge_insertion (e)
1654 edge e;
1656 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1657 basic_block bb;
1659 /* Pull the insns off the edge now since the edge might go away. */
1660 insns = e->insns;
1661 e->insns = NULL_RTX;
1663 /* Figure out where to put these things. If the destination has
1664 one predecessor, insert there. Except for the exit block. */
1665 if (e->dest->pred->pred_next == NULL
1666 && e->dest != EXIT_BLOCK_PTR)
1668 bb = e->dest;
1670 /* Get the location correct wrt a code label, and "nice" wrt
1671 a basic block note, and before everything else. */
1672 tmp = bb->head;
1673 if (GET_CODE (tmp) == CODE_LABEL)
1674 tmp = NEXT_INSN (tmp);
1675 if (GET_CODE (tmp) == NOTE
1676 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1677 tmp = NEXT_INSN (tmp);
1678 if (tmp == bb->head)
1679 before = tmp;
1680 else
1681 after = PREV_INSN (tmp);
1684 /* If the source has one successor and the edge is not abnormal,
1685 insert there. Except for the entry block. */
1686 else if ((e->flags & EDGE_ABNORMAL) == 0
1687 && e->src->succ->succ_next == NULL
1688 && e->src != ENTRY_BLOCK_PTR)
1690 bb = e->src;
1691 /* It is possible to have a non-simple jump here. Consider a target
1692 where some forms of unconditional jumps clobber a register. This
1693 happens on the fr30 for example.
1695 We know this block has a single successor, so we can just emit
1696 the queued insns before the jump. */
1697 if (GET_CODE (bb->end) == JUMP_INSN)
1699 before = bb->end;
1701 else
1703 /* We'd better be fallthru, or we've lost track of what's what. */
1704 if ((e->flags & EDGE_FALLTHRU) == 0)
1705 abort ();
1707 after = bb->end;
1711 /* Otherwise we must split the edge. */
1712 else
1714 bb = split_edge (e);
1715 after = bb->end;
1718 /* Now that we've found the spot, do the insertion. */
1720 /* Set the new block number for these insns, if structure is allocated. */
1721 if (basic_block_for_insn)
1723 rtx i;
1724 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1725 set_block_for_insn (i, bb);
1728 if (before)
1730 emit_insns_before (insns, before);
1731 if (before == bb->head)
1732 bb->head = insns;
1734 last = prev_nonnote_insn (before);
1736 else
1738 last = emit_insns_after (insns, after);
1739 if (after == bb->end)
1740 bb->end = last;
1743 if (returnjump_p (last))
1745 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1746 This is not currently a problem because this only happens
1747 for the (single) epilogue, which already has a fallthru edge
1748 to EXIT. */
1750 e = bb->succ;
1751 if (e->dest != EXIT_BLOCK_PTR
1752 || e->succ_next != NULL
1753 || (e->flags & EDGE_FALLTHRU) == 0)
1754 abort ();
1755 e->flags &= ~EDGE_FALLTHRU;
1757 emit_barrier_after (last);
1758 bb->end = last;
1760 if (before)
1761 flow_delete_insn (before);
1763 else if (GET_CODE (last) == JUMP_INSN)
1764 abort ();
1767 /* Update the CFG for all queued instructions. */
1769 void
1770 commit_edge_insertions ()
1772 int i;
1773 basic_block bb;
1775 #ifdef ENABLE_CHECKING
1776 verify_flow_info ();
1777 #endif
1779 i = -1;
1780 bb = ENTRY_BLOCK_PTR;
1781 while (1)
1783 edge e, next;
1785 for (e = bb->succ; e ; e = next)
1787 next = e->succ_next;
1788 if (e->insns)
1789 commit_one_edge_insertion (e);
1792 if (++i >= n_basic_blocks)
1793 break;
1794 bb = BASIC_BLOCK (i);
1798 /* Delete all unreachable basic blocks. */
1800 static void
1801 delete_unreachable_blocks ()
1803 basic_block *worklist, *tos;
1804 int deleted_handler;
1805 edge e;
1806 int i, n;
1808 n = n_basic_blocks;
1809 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1811 /* Use basic_block->aux as a marker. Clear them all. */
1813 for (i = 0; i < n; ++i)
1814 BASIC_BLOCK (i)->aux = NULL;
1816 /* Add our starting points to the worklist. Almost always there will
1817 be only one. It isn't inconcievable that we might one day directly
1818 support Fortran alternate entry points. */
1820 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1822 *tos++ = e->dest;
1824 /* Mark the block with a handy non-null value. */
1825 e->dest->aux = e;
1828 /* Iterate: find everything reachable from what we've already seen. */
1830 while (tos != worklist)
1832 basic_block b = *--tos;
1834 for (e = b->succ; e ; e = e->succ_next)
1835 if (!e->dest->aux)
1837 *tos++ = e->dest;
1838 e->dest->aux = e;
1842 /* Delete all unreachable basic blocks. Count down so that we don't
1843 interfere with the block renumbering that happens in flow_delete_block. */
1845 deleted_handler = 0;
1847 for (i = n - 1; i >= 0; --i)
1849 basic_block b = BASIC_BLOCK (i);
1851 if (b->aux != NULL)
1852 /* This block was found. Tidy up the mark. */
1853 b->aux = NULL;
1854 else
1855 deleted_handler |= flow_delete_block (b);
1858 tidy_fallthru_edges ();
1860 /* If we deleted an exception handler, we may have EH region begin/end
1861 blocks to remove as well. */
1862 if (deleted_handler)
1863 delete_eh_regions ();
1865 free (worklist);
1868 /* Find EH regions for which there is no longer a handler, and delete them. */
1870 static void
1871 delete_eh_regions ()
1873 rtx insn;
1875 update_rethrow_references ();
1877 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1878 if (GET_CODE (insn) == NOTE)
1880 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1881 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1883 int num = NOTE_EH_HANDLER (insn);
1884 /* A NULL handler indicates a region is no longer needed,
1885 as long as its rethrow label isn't used. */
1886 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1888 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1889 NOTE_SOURCE_FILE (insn) = 0;
1895 /* Return true if NOTE is not one of the ones that must be kept paired,
1896 so that we may simply delete them. */
1898 static int
1899 can_delete_note_p (note)
1900 rtx note;
1902 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1903 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1906 /* Unlink a chain of insns between START and FINISH, leaving notes
1907 that must be paired. */
1909 void
1910 flow_delete_insn_chain (start, finish)
1911 rtx start, finish;
1913 /* Unchain the insns one by one. It would be quicker to delete all
1914 of these with a single unchaining, rather than one at a time, but
1915 we need to keep the NOTE's. */
1917 rtx next;
1919 while (1)
1921 next = NEXT_INSN (start);
1922 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1924 else if (GET_CODE (start) == CODE_LABEL
1925 && ! can_delete_label_p (start))
1927 const char *name = LABEL_NAME (start);
1928 PUT_CODE (start, NOTE);
1929 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
1930 NOTE_SOURCE_FILE (start) = name;
1932 else
1933 next = flow_delete_insn (start);
1935 if (start == finish)
1936 break;
1937 start = next;
1941 /* Delete the insns in a (non-live) block. We physically delete every
1942 non-deleted-note insn, and update the flow graph appropriately.
1944 Return nonzero if we deleted an exception handler. */
1946 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1947 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1950 flow_delete_block (b)
1951 basic_block b;
1953 int deleted_handler = 0;
1954 rtx insn, end, tmp;
1956 /* If the head of this block is a CODE_LABEL, then it might be the
1957 label for an exception handler which can't be reached.
1959 We need to remove the label from the exception_handler_label list
1960 and remove the associated NOTE_INSN_EH_REGION_BEG and
1961 NOTE_INSN_EH_REGION_END notes. */
1963 insn = b->head;
1965 never_reached_warning (insn);
1967 if (GET_CODE (insn) == CODE_LABEL)
1969 rtx x, *prev = &exception_handler_labels;
1971 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1973 if (XEXP (x, 0) == insn)
1975 /* Found a match, splice this label out of the EH label list. */
1976 *prev = XEXP (x, 1);
1977 XEXP (x, 1) = NULL_RTX;
1978 XEXP (x, 0) = NULL_RTX;
1980 /* Remove the handler from all regions */
1981 remove_handler (insn);
1982 deleted_handler = 1;
1983 break;
1985 prev = &XEXP (x, 1);
1989 /* Include any jump table following the basic block. */
1990 end = b->end;
1991 if (GET_CODE (end) == JUMP_INSN
1992 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1993 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1994 && GET_CODE (tmp) == JUMP_INSN
1995 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1996 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1997 end = tmp;
1999 /* Include any barrier that may follow the basic block. */
2000 tmp = next_nonnote_insn (end);
2001 if (tmp && GET_CODE (tmp) == BARRIER)
2002 end = tmp;
2004 /* Selectively delete the entire chain. */
2005 flow_delete_insn_chain (insn, end);
2007 /* Remove the edges into and out of this block. Note that there may
2008 indeed be edges in, if we are removing an unreachable loop. */
2010 edge e, next, *q;
2012 for (e = b->pred; e ; e = next)
2014 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2015 continue;
2016 *q = e->succ_next;
2017 next = e->pred_next;
2018 n_edges--;
2019 free (e);
2021 for (e = b->succ; e ; e = next)
2023 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2024 continue;
2025 *q = e->pred_next;
2026 next = e->succ_next;
2027 n_edges--;
2028 free (e);
2031 b->pred = NULL;
2032 b->succ = NULL;
2035 /* Remove the basic block from the array, and compact behind it. */
2036 expunge_block (b);
2038 return deleted_handler;
2041 /* Remove block B from the basic block array and compact behind it. */
2043 static void
2044 expunge_block (b)
2045 basic_block b;
2047 int i, n = n_basic_blocks;
2049 for (i = b->index; i + 1 < n; ++i)
2051 basic_block x = BASIC_BLOCK (i + 1);
2052 BASIC_BLOCK (i) = x;
2053 x->index = i;
2056 basic_block_info->num_elements--;
2057 n_basic_blocks--;
2060 /* Delete INSN by patching it out. Return the next insn. */
2063 flow_delete_insn (insn)
2064 rtx insn;
2066 rtx prev = PREV_INSN (insn);
2067 rtx next = NEXT_INSN (insn);
2068 rtx note;
2070 PREV_INSN (insn) = NULL_RTX;
2071 NEXT_INSN (insn) = NULL_RTX;
2072 INSN_DELETED_P (insn) = 1;
2074 if (prev)
2075 NEXT_INSN (prev) = next;
2076 if (next)
2077 PREV_INSN (next) = prev;
2078 else
2079 set_last_insn (prev);
2081 if (GET_CODE (insn) == CODE_LABEL)
2082 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2084 /* If deleting a jump, decrement the use count of the label. Deleting
2085 the label itself should happen in the normal course of block merging. */
2086 if (GET_CODE (insn) == JUMP_INSN
2087 && JUMP_LABEL (insn)
2088 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2089 LABEL_NUSES (JUMP_LABEL (insn))--;
2091 /* Also if deleting an insn that references a label. */
2092 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2093 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2094 LABEL_NUSES (XEXP (note, 0))--;
2096 return next;
2099 /* True if a given label can be deleted. */
2101 static int
2102 can_delete_label_p (label)
2103 rtx label;
2105 rtx x;
2107 if (LABEL_PRESERVE_P (label))
2108 return 0;
2110 for (x = forced_labels; x ; x = XEXP (x, 1))
2111 if (label == XEXP (x, 0))
2112 return 0;
2113 for (x = label_value_list; x ; x = XEXP (x, 1))
2114 if (label == XEXP (x, 0))
2115 return 0;
2116 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2117 if (label == XEXP (x, 0))
2118 return 0;
2120 /* User declared labels must be preserved. */
2121 if (LABEL_NAME (label) != 0)
2122 return 0;
2124 return 1;
2127 static int
2128 tail_recursion_label_p (label)
2129 rtx label;
2131 rtx x;
2133 for (x = tail_recursion_label_list; x ; x = XEXP (x, 1))
2134 if (label == XEXP (x, 0))
2135 return 1;
2137 return 0;
2140 /* Blocks A and B are to be merged into a single block A. The insns
2141 are already contiguous, hence `nomove'. */
2143 void
2144 merge_blocks_nomove (a, b)
2145 basic_block a, b;
2147 edge e;
2148 rtx b_head, b_end, a_end;
2149 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2150 int b_empty = 0;
2152 /* If there was a CODE_LABEL beginning B, delete it. */
2153 b_head = b->head;
2154 b_end = b->end;
2155 if (GET_CODE (b_head) == CODE_LABEL)
2157 /* Detect basic blocks with nothing but a label. This can happen
2158 in particular at the end of a function. */
2159 if (b_head == b_end)
2160 b_empty = 1;
2161 del_first = del_last = b_head;
2162 b_head = NEXT_INSN (b_head);
2165 /* Delete the basic block note. */
2166 if (GET_CODE (b_head) == NOTE
2167 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2169 if (b_head == b_end)
2170 b_empty = 1;
2171 if (! del_last)
2172 del_first = b_head;
2173 del_last = b_head;
2174 b_head = NEXT_INSN (b_head);
2177 /* If there was a jump out of A, delete it. */
2178 a_end = a->end;
2179 if (GET_CODE (a_end) == JUMP_INSN)
2181 rtx prev;
2183 prev = prev_nonnote_insn (a_end);
2184 if (!prev)
2185 prev = a->head;
2187 del_first = a_end;
2189 #ifdef HAVE_cc0
2190 /* If this was a conditional jump, we need to also delete
2191 the insn that set cc0. */
2192 if (prev && sets_cc0_p (prev))
2194 rtx tmp = prev;
2195 prev = prev_nonnote_insn (prev);
2196 if (!prev)
2197 prev = a->head;
2198 del_first = tmp;
2200 #endif
2202 a_end = prev;
2204 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2205 del_first = NEXT_INSN (a_end);
2207 /* Delete everything marked above as well as crap that might be
2208 hanging out between the two blocks. */
2209 flow_delete_insn_chain (del_first, del_last);
2211 /* Normally there should only be one successor of A and that is B, but
2212 partway though the merge of blocks for conditional_execution we'll
2213 be merging a TEST block with THEN and ELSE successors. Free the
2214 whole lot of them and hope the caller knows what they're doing. */
2215 while (a->succ)
2216 remove_edge (a->succ);
2218 /* Adjust the edges out of B for the new owner. */
2219 for (e = b->succ; e ; e = e->succ_next)
2220 e->src = a;
2221 a->succ = b->succ;
2223 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2224 b->pred = b->succ = NULL;
2226 /* Reassociate the insns of B with A. */
2227 if (!b_empty)
2229 if (basic_block_for_insn)
2231 BLOCK_FOR_INSN (b_head) = a;
2232 while (b_head != b_end)
2234 b_head = NEXT_INSN (b_head);
2235 BLOCK_FOR_INSN (b_head) = a;
2238 a_end = b_end;
2240 a->end = a_end;
2242 expunge_block (b);
2245 /* Blocks A and B are to be merged into a single block. A has no incoming
2246 fallthru edge, so it can be moved before B without adding or modifying
2247 any jumps (aside from the jump from A to B). */
2249 static int
2250 merge_blocks_move_predecessor_nojumps (a, b)
2251 basic_block a, b;
2253 rtx start, end, barrier;
2254 int index;
2256 start = a->head;
2257 end = a->end;
2259 barrier = next_nonnote_insn (end);
2260 if (GET_CODE (barrier) != BARRIER)
2261 abort ();
2262 flow_delete_insn (barrier);
2264 /* Move block and loop notes out of the chain so that we do not
2265 disturb their order.
2267 ??? A better solution would be to squeeze out all the non-nested notes
2268 and adjust the block trees appropriately. Even better would be to have
2269 a tighter connection between block trees and rtl so that this is not
2270 necessary. */
2271 start = squeeze_notes (start, end);
2273 /* Scramble the insn chain. */
2274 if (end != PREV_INSN (b->head))
2275 reorder_insns (start, end, PREV_INSN (b->head));
2277 if (rtl_dump_file)
2279 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2280 a->index, b->index);
2283 /* Swap the records for the two blocks around. Although we are deleting B,
2284 A is now where B was and we want to compact the BB array from where
2285 A used to be. */
2286 BASIC_BLOCK(a->index) = b;
2287 BASIC_BLOCK(b->index) = a;
2288 index = a->index;
2289 a->index = b->index;
2290 b->index = index;
2292 /* Now blocks A and B are contiguous. Merge them. */
2293 merge_blocks_nomove (a, b);
2295 return 1;
2298 /* Blocks A and B are to be merged into a single block. B has no outgoing
2299 fallthru edge, so it can be moved after A without adding or modifying
2300 any jumps (aside from the jump from A to B). */
2302 static int
2303 merge_blocks_move_successor_nojumps (a, b)
2304 basic_block a, b;
2306 rtx start, end, barrier;
2308 start = b->head;
2309 end = b->end;
2310 barrier = NEXT_INSN (end);
2312 /* Recognize a jump table following block B. */
2313 if (GET_CODE (barrier) == CODE_LABEL
2314 && NEXT_INSN (barrier)
2315 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2316 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2317 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2319 end = NEXT_INSN (barrier);
2320 barrier = NEXT_INSN (end);
2323 /* There had better have been a barrier there. Delete it. */
2324 if (GET_CODE (barrier) != BARRIER)
2325 abort ();
2326 flow_delete_insn (barrier);
2328 /* Move block and loop notes out of the chain so that we do not
2329 disturb their order.
2331 ??? A better solution would be to squeeze out all the non-nested notes
2332 and adjust the block trees appropriately. Even better would be to have
2333 a tighter connection between block trees and rtl so that this is not
2334 necessary. */
2335 start = squeeze_notes (start, end);
2337 /* Scramble the insn chain. */
2338 reorder_insns (start, end, a->end);
2340 /* Now blocks A and B are contiguous. Merge them. */
2341 merge_blocks_nomove (a, b);
2343 if (rtl_dump_file)
2345 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2346 b->index, a->index);
2349 return 1;
2352 /* Attempt to merge basic blocks that are potentially non-adjacent.
2353 Return true iff the attempt succeeded. */
2355 static int
2356 merge_blocks (e, b, c)
2357 edge e;
2358 basic_block b, c;
2360 /* If C has a tail recursion label, do not merge. There is no
2361 edge recorded from the call_placeholder back to this label, as
2362 that would make optimize_sibling_and_tail_recursive_calls more
2363 complex for no gain. */
2364 if (GET_CODE (c->head) == CODE_LABEL
2365 && tail_recursion_label_p (c->head))
2366 return 0;
2368 /* If B has a fallthru edge to C, no need to move anything. */
2369 if (e->flags & EDGE_FALLTHRU)
2371 merge_blocks_nomove (b, c);
2373 if (rtl_dump_file)
2375 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2376 b->index, c->index);
2379 return 1;
2381 else
2383 edge tmp_edge;
2384 basic_block d;
2385 int c_has_outgoing_fallthru;
2386 int b_has_incoming_fallthru;
2388 /* We must make sure to not munge nesting of exception regions,
2389 lexical blocks, and loop notes.
2391 The first is taken care of by requiring that the active eh
2392 region at the end of one block always matches the active eh
2393 region at the beginning of the next block.
2395 The later two are taken care of by squeezing out all the notes. */
2397 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2398 executed and we may want to treat blocks which have two out
2399 edges, one normal, one abnormal as only having one edge for
2400 block merging purposes. */
2402 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2403 if (tmp_edge->flags & EDGE_FALLTHRU)
2404 break;
2405 c_has_outgoing_fallthru = (tmp_edge != NULL);
2407 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2408 if (tmp_edge->flags & EDGE_FALLTHRU)
2409 break;
2410 b_has_incoming_fallthru = (tmp_edge != NULL);
2412 /* If B does not have an incoming fallthru, and the exception regions
2413 match, then it can be moved immediately before C without introducing
2414 or modifying jumps.
2416 C can not be the first block, so we do not have to worry about
2417 accessing a non-existent block. */
2418 d = BASIC_BLOCK (c->index - 1);
2419 if (! b_has_incoming_fallthru
2420 && d->eh_end == b->eh_beg
2421 && b->eh_end == c->eh_beg)
2422 return merge_blocks_move_predecessor_nojumps (b, c);
2424 /* Otherwise, we're going to try to move C after B. Make sure the
2425 exception regions match.
2427 If B is the last basic block, then we must not try to access the
2428 block structure for block B + 1. Luckily in that case we do not
2429 need to worry about matching exception regions. */
2430 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2431 if (b->eh_end == c->eh_beg
2432 && (d == NULL || c->eh_end == d->eh_beg))
2434 /* If C does not have an outgoing fallthru, then it can be moved
2435 immediately after B without introducing or modifying jumps. */
2436 if (! c_has_outgoing_fallthru)
2437 return merge_blocks_move_successor_nojumps (b, c);
2439 /* Otherwise, we'll need to insert an extra jump, and possibly
2440 a new block to contain it. */
2441 /* ??? Not implemented yet. */
2444 return 0;
2448 /* Top level driver for merge_blocks. */
2450 static void
2451 try_merge_blocks ()
2453 int i;
2455 /* Attempt to merge blocks as made possible by edge removal. If a block
2456 has only one successor, and the successor has only one predecessor,
2457 they may be combined. */
2459 for (i = 0; i < n_basic_blocks; )
2461 basic_block c, b = BASIC_BLOCK (i);
2462 edge s;
2464 /* A loop because chains of blocks might be combineable. */
2465 while ((s = b->succ) != NULL
2466 && s->succ_next == NULL
2467 && (s->flags & EDGE_EH) == 0
2468 && (c = s->dest) != EXIT_BLOCK_PTR
2469 && c->pred->pred_next == NULL
2470 /* If the jump insn has side effects, we can't kill the edge. */
2471 && (GET_CODE (b->end) != JUMP_INSN
2472 || onlyjump_p (b->end))
2473 && merge_blocks (s, b, c))
2474 continue;
2476 /* Don't get confused by the index shift caused by deleting blocks. */
2477 i = b->index + 1;
2481 /* The given edge should potentially be a fallthru edge. If that is in
2482 fact true, delete the jump and barriers that are in the way. */
2484 void
2485 tidy_fallthru_edge (e, b, c)
2486 edge e;
2487 basic_block b, c;
2489 rtx q;
2491 /* ??? In a late-running flow pass, other folks may have deleted basic
2492 blocks by nopping out blocks, leaving multiple BARRIERs between here
2493 and the target label. They ought to be chastized and fixed.
2495 We can also wind up with a sequence of undeletable labels between
2496 one block and the next.
2498 So search through a sequence of barriers, labels, and notes for
2499 the head of block C and assert that we really do fall through. */
2501 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2502 return;
2504 /* Remove what will soon cease being the jump insn from the source block.
2505 If block B consisted only of this single jump, turn it into a deleted
2506 note. */
2507 q = b->end;
2508 if (GET_CODE (q) == JUMP_INSN
2509 && onlyjump_p (q)
2510 && (any_uncondjump_p (q)
2511 || (b->succ == e && e->succ_next == NULL)))
2513 #ifdef HAVE_cc0
2514 /* If this was a conditional jump, we need to also delete
2515 the insn that set cc0. */
2516 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2517 q = PREV_INSN (q);
2518 #endif
2520 if (b->head == q)
2522 PUT_CODE (q, NOTE);
2523 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2524 NOTE_SOURCE_FILE (q) = 0;
2526 else
2527 b->end = q = PREV_INSN (q);
2530 /* Selectively unlink the sequence. */
2531 if (q != PREV_INSN (c->head))
2532 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2534 e->flags |= EDGE_FALLTHRU;
2537 /* Fix up edges that now fall through, or rather should now fall through
2538 but previously required a jump around now deleted blocks. Simplify
2539 the search by only examining blocks numerically adjacent, since this
2540 is how find_basic_blocks created them. */
2542 static void
2543 tidy_fallthru_edges ()
2545 int i;
2547 for (i = 1; i < n_basic_blocks; ++i)
2549 basic_block b = BASIC_BLOCK (i - 1);
2550 basic_block c = BASIC_BLOCK (i);
2551 edge s;
2553 /* We care about simple conditional or unconditional jumps with
2554 a single successor.
2556 If we had a conditional branch to the next instruction when
2557 find_basic_blocks was called, then there will only be one
2558 out edge for the block which ended with the conditional
2559 branch (since we do not create duplicate edges).
2561 Furthermore, the edge will be marked as a fallthru because we
2562 merge the flags for the duplicate edges. So we do not want to
2563 check that the edge is not a FALLTHRU edge. */
2564 if ((s = b->succ) != NULL
2565 && s->succ_next == NULL
2566 && s->dest == c
2567 /* If the jump insn has side effects, we can't tidy the edge. */
2568 && (GET_CODE (b->end) != JUMP_INSN
2569 || onlyjump_p (b->end)))
2570 tidy_fallthru_edge (s, b, c);
2574 /* Perform data flow analysis.
2575 F is the first insn of the function; FLAGS is a set of PROP_* flags
2576 to be used in accumulating flow info. */
2578 void
2579 life_analysis (f, file, flags)
2580 rtx f;
2581 FILE *file;
2582 int flags;
2584 #ifdef ELIMINABLE_REGS
2585 register int i;
2586 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2587 #endif
2589 /* Record which registers will be eliminated. We use this in
2590 mark_used_regs. */
2592 CLEAR_HARD_REG_SET (elim_reg_set);
2594 #ifdef ELIMINABLE_REGS
2595 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2596 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2597 #else
2598 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2599 #endif
2601 if (! optimize)
2602 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2604 /* The post-reload life analysis have (on a global basis) the same
2605 registers live as was computed by reload itself. elimination
2606 Otherwise offsets and such may be incorrect.
2608 Reload will make some registers as live even though they do not
2609 appear in the rtl.
2611 We don't want to create new auto-incs after reload, since they
2612 are unlikely to be useful and can cause problems with shared
2613 stack slots. */
2614 if (reload_completed)
2615 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2617 /* We want alias analysis information for local dead store elimination. */
2618 if (flags & PROP_SCAN_DEAD_CODE)
2619 init_alias_analysis ();
2621 /* Always remove no-op moves. Do this before other processing so
2622 that we don't have to keep re-scanning them. */
2623 delete_noop_moves (f);
2625 /* Some targets can emit simpler epilogues if they know that sp was
2626 not ever modified during the function. After reload, of course,
2627 we've already emitted the epilogue so there's no sense searching. */
2628 if (! reload_completed)
2629 notice_stack_pointer_modification (f);
2631 /* Allocate and zero out data structures that will record the
2632 data from lifetime analysis. */
2633 allocate_reg_life_data ();
2634 allocate_bb_life_data ();
2636 /* Find the set of registers live on function exit. */
2637 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2639 /* "Update" life info from zero. It'd be nice to begin the
2640 relaxation with just the exit and noreturn blocks, but that set
2641 is not immediately handy. */
2643 if (flags & PROP_REG_INFO)
2644 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2645 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2647 /* Clean up. */
2648 if (flags & PROP_SCAN_DEAD_CODE)
2649 end_alias_analysis ();
2651 if (file)
2652 dump_flow_info (file);
2654 free_basic_block_vars (1);
2657 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2658 Search for REGNO. If found, abort if it is not wider than word_mode. */
2660 static int
2661 verify_wide_reg_1 (px, pregno)
2662 rtx *px;
2663 void *pregno;
2665 rtx x = *px;
2666 unsigned int regno = *(int *) pregno;
2668 if (GET_CODE (x) == REG && REGNO (x) == regno)
2670 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2671 abort ();
2672 return 1;
2674 return 0;
2677 /* A subroutine of verify_local_live_at_start. Search through insns
2678 between HEAD and END looking for register REGNO. */
2680 static void
2681 verify_wide_reg (regno, head, end)
2682 int regno;
2683 rtx head, end;
2685 while (1)
2687 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2688 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2689 return;
2690 if (head == end)
2691 break;
2692 head = NEXT_INSN (head);
2695 /* We didn't find the register at all. Something's way screwy. */
2696 abort ();
2699 /* A subroutine of update_life_info. Verify that there are no untoward
2700 changes in live_at_start during a local update. */
2702 static void
2703 verify_local_live_at_start (new_live_at_start, bb)
2704 regset new_live_at_start;
2705 basic_block bb;
2707 if (reload_completed)
2709 /* After reload, there are no pseudos, nor subregs of multi-word
2710 registers. The regsets should exactly match. */
2711 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2712 abort ();
2714 else
2716 int i;
2718 /* Find the set of changed registers. */
2719 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2721 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2723 /* No registers should die. */
2724 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2725 abort ();
2726 /* Verify that the now-live register is wider than word_mode. */
2727 verify_wide_reg (i, bb->head, bb->end);
2732 /* Updates life information starting with the basic blocks set in BLOCKS.
2733 If BLOCKS is null, consider it to be the universal set.
2735 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2736 we are only expecting local modifications to basic blocks. If we find
2737 extra registers live at the beginning of a block, then we either killed
2738 useful data, or we have a broken split that wants data not provided.
2739 If we find registers removed from live_at_start, that means we have
2740 a broken peephole that is killing a register it shouldn't.
2742 ??? This is not true in one situation -- when a pre-reload splitter
2743 generates subregs of a multi-word pseudo, current life analysis will
2744 lose the kill. So we _can_ have a pseudo go live. How irritating.
2746 Including PROP_REG_INFO does not properly refresh regs_ever_live
2747 unless the caller resets it to zero. */
2749 void
2750 update_life_info (blocks, extent, prop_flags)
2751 sbitmap blocks;
2752 enum update_life_extent extent;
2753 int prop_flags;
2755 regset tmp;
2756 regset_head tmp_head;
2757 int i;
2759 tmp = INITIALIZE_REG_SET (tmp_head);
2761 /* For a global update, we go through the relaxation process again. */
2762 if (extent != UPDATE_LIFE_LOCAL)
2764 calculate_global_regs_live (blocks, blocks,
2765 prop_flags & PROP_SCAN_DEAD_CODE);
2767 /* If asked, remove notes from the blocks we'll update. */
2768 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2769 count_or_remove_death_notes (blocks, 1);
2772 if (blocks)
2774 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2776 basic_block bb = BASIC_BLOCK (i);
2778 COPY_REG_SET (tmp, bb->global_live_at_end);
2779 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2781 if (extent == UPDATE_LIFE_LOCAL)
2782 verify_local_live_at_start (tmp, bb);
2785 else
2787 for (i = n_basic_blocks - 1; i >= 0; --i)
2789 basic_block bb = BASIC_BLOCK (i);
2791 COPY_REG_SET (tmp, bb->global_live_at_end);
2792 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2794 if (extent == UPDATE_LIFE_LOCAL)
2795 verify_local_live_at_start (tmp, bb);
2799 FREE_REG_SET (tmp);
2801 if (prop_flags & PROP_REG_INFO)
2803 /* The only pseudos that are live at the beginning of the function
2804 are those that were not set anywhere in the function. local-alloc
2805 doesn't know how to handle these correctly, so mark them as not
2806 local to any one basic block. */
2807 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2808 FIRST_PSEUDO_REGISTER, i,
2809 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2811 /* We have a problem with any pseudoreg that lives across the setjmp.
2812 ANSI says that if a user variable does not change in value between
2813 the setjmp and the longjmp, then the longjmp preserves it. This
2814 includes longjmp from a place where the pseudo appears dead.
2815 (In principle, the value still exists if it is in scope.)
2816 If the pseudo goes in a hard reg, some other value may occupy
2817 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2818 Conclusion: such a pseudo must not go in a hard reg. */
2819 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2820 FIRST_PSEUDO_REGISTER, i,
2822 if (regno_reg_rtx[i] != 0)
2824 REG_LIVE_LENGTH (i) = -1;
2825 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2831 /* Free the variables allocated by find_basic_blocks.
2833 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2835 void
2836 free_basic_block_vars (keep_head_end_p)
2837 int keep_head_end_p;
2839 if (basic_block_for_insn)
2841 VARRAY_FREE (basic_block_for_insn);
2842 basic_block_for_insn = NULL;
2845 if (! keep_head_end_p)
2847 clear_edges ();
2848 VARRAY_FREE (basic_block_info);
2849 n_basic_blocks = 0;
2851 ENTRY_BLOCK_PTR->aux = NULL;
2852 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2853 EXIT_BLOCK_PTR->aux = NULL;
2854 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2858 /* Return nonzero if the destination of SET equals the source. */
2859 static int
2860 set_noop_p (set)
2861 rtx set;
2863 rtx src = SET_SRC (set);
2864 rtx dst = SET_DEST (set);
2866 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2868 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2869 return 0;
2870 src = SUBREG_REG (src);
2871 dst = SUBREG_REG (dst);
2874 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2875 && REGNO (src) == REGNO (dst));
2878 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2879 value to itself. */
2880 static int
2881 noop_move_p (insn)
2882 rtx insn;
2884 rtx pat = PATTERN (insn);
2886 /* Insns carrying these notes are useful later on. */
2887 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2888 return 0;
2890 if (GET_CODE (pat) == SET && set_noop_p (pat))
2891 return 1;
2893 if (GET_CODE (pat) == PARALLEL)
2895 int i;
2896 /* If nothing but SETs of registers to themselves,
2897 this insn can also be deleted. */
2898 for (i = 0; i < XVECLEN (pat, 0); i++)
2900 rtx tem = XVECEXP (pat, 0, i);
2902 if (GET_CODE (tem) == USE
2903 || GET_CODE (tem) == CLOBBER)
2904 continue;
2906 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2907 return 0;
2910 return 1;
2912 return 0;
2915 /* Delete any insns that copy a register to itself. */
2917 static void
2918 delete_noop_moves (f)
2919 rtx f;
2921 rtx insn;
2922 for (insn = f; insn; insn = NEXT_INSN (insn))
2924 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2926 PUT_CODE (insn, NOTE);
2927 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2928 NOTE_SOURCE_FILE (insn) = 0;
2933 /* Determine if the stack pointer is constant over the life of the function.
2934 Only useful before prologues have been emitted. */
2936 static void
2937 notice_stack_pointer_modification_1 (x, pat, data)
2938 rtx x;
2939 rtx pat ATTRIBUTE_UNUSED;
2940 void *data ATTRIBUTE_UNUSED;
2942 if (x == stack_pointer_rtx
2943 /* The stack pointer is only modified indirectly as the result
2944 of a push until later in flow. See the comments in rtl.texi
2945 regarding Embedded Side-Effects on Addresses. */
2946 || (GET_CODE (x) == MEM
2947 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2948 || GET_CODE (XEXP (x, 0)) == PRE_INC
2949 || GET_CODE (XEXP (x, 0)) == POST_DEC
2950 || GET_CODE (XEXP (x, 0)) == POST_INC)
2951 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2952 current_function_sp_is_unchanging = 0;
2955 static void
2956 notice_stack_pointer_modification (f)
2957 rtx f;
2959 rtx insn;
2961 /* Assume that the stack pointer is unchanging if alloca hasn't
2962 been used. */
2963 current_function_sp_is_unchanging = !current_function_calls_alloca;
2964 if (! current_function_sp_is_unchanging)
2965 return;
2967 for (insn = f; insn; insn = NEXT_INSN (insn))
2969 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2971 /* Check if insn modifies the stack pointer. */
2972 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2973 NULL);
2974 if (! current_function_sp_is_unchanging)
2975 return;
2980 /* Mark a register in SET. Hard registers in large modes get all
2981 of their component registers set as well. */
2982 static void
2983 mark_reg (reg, xset)
2984 rtx reg;
2985 void *xset;
2987 regset set = (regset) xset;
2988 int regno = REGNO (reg);
2990 if (GET_MODE (reg) == BLKmode)
2991 abort ();
2993 SET_REGNO_REG_SET (set, regno);
2994 if (regno < FIRST_PSEUDO_REGISTER)
2996 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2997 while (--n > 0)
2998 SET_REGNO_REG_SET (set, regno + n);
3002 /* Mark those regs which are needed at the end of the function as live
3003 at the end of the last basic block. */
3004 static void
3005 mark_regs_live_at_end (set)
3006 regset set;
3008 int i;
3010 /* If exiting needs the right stack value, consider the stack pointer
3011 live at the end of the function. */
3012 if ((HAVE_epilogue && reload_completed)
3013 || ! EXIT_IGNORE_STACK
3014 || (! FRAME_POINTER_REQUIRED
3015 && ! current_function_calls_alloca
3016 && flag_omit_frame_pointer)
3017 || current_function_sp_is_unchanging)
3019 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3022 /* Mark the frame pointer if needed at the end of the function. If
3023 we end up eliminating it, it will be removed from the live list
3024 of each basic block by reload. */
3026 if (! reload_completed || frame_pointer_needed)
3028 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3029 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3030 /* If they are different, also mark the hard frame pointer as live */
3031 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3032 #endif
3035 #ifdef PIC_OFFSET_TABLE_REGNUM
3036 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3037 /* Many architectures have a GP register even without flag_pic.
3038 Assume the pic register is not in use, or will be handled by
3039 other means, if it is not fixed. */
3040 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3041 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3042 #endif
3043 #endif
3045 /* Mark all global registers, and all registers used by the epilogue
3046 as being live at the end of the function since they may be
3047 referenced by our caller. */
3048 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3049 if (global_regs[i]
3050 #ifdef EPILOGUE_USES
3051 || EPILOGUE_USES (i)
3052 #endif
3054 SET_REGNO_REG_SET (set, i);
3056 /* Mark all call-saved registers that we actaully used. */
3057 if (HAVE_epilogue && reload_completed)
3059 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3060 if (! call_used_regs[i] && regs_ever_live[i])
3061 SET_REGNO_REG_SET (set, i);
3064 /* Mark function return value. */
3065 diddle_return_value (mark_reg, set);
3068 /* Callback function for for_each_successor_phi. DATA is a regset.
3069 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3070 INSN, in the regset. */
3072 static int
3073 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3074 rtx insn ATTRIBUTE_UNUSED;
3075 int dest_regno ATTRIBUTE_UNUSED;
3076 int src_regno;
3077 void *data;
3079 regset live = (regset) data;
3080 SET_REGNO_REG_SET (live, src_regno);
3081 return 0;
3084 /* Propagate global life info around the graph of basic blocks. Begin
3085 considering blocks with their corresponding bit set in BLOCKS_IN.
3086 If BLOCKS_IN is null, consider it the universal set.
3088 BLOCKS_OUT is set for every block that was changed. */
3090 static void
3091 calculate_global_regs_live (blocks_in, blocks_out, flags)
3092 sbitmap blocks_in, blocks_out;
3093 int flags;
3095 basic_block *queue, *qhead, *qtail, *qend;
3096 regset tmp, new_live_at_end;
3097 regset_head tmp_head;
3098 regset_head new_live_at_end_head;
3099 int i;
3101 tmp = INITIALIZE_REG_SET (tmp_head);
3102 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3104 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3105 because the `head == tail' style test for an empty queue doesn't
3106 work with a full queue. */
3107 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3108 qtail = queue;
3109 qhead = qend = queue + n_basic_blocks + 2;
3111 /* Clear out the garbage that might be hanging out in bb->aux. */
3112 for (i = n_basic_blocks - 1; i >= 0; --i)
3113 BASIC_BLOCK (i)->aux = NULL;
3115 /* Queue the blocks set in the initial mask. Do this in reverse block
3116 number order so that we are more likely for the first round to do
3117 useful work. We use AUX non-null to flag that the block is queued. */
3118 if (blocks_in)
3120 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3122 basic_block bb = BASIC_BLOCK (i);
3123 *--qhead = bb;
3124 bb->aux = bb;
3127 else
3129 for (i = 0; i < n_basic_blocks; ++i)
3131 basic_block bb = BASIC_BLOCK (i);
3132 *--qhead = bb;
3133 bb->aux = bb;
3137 if (blocks_out)
3138 sbitmap_zero (blocks_out);
3140 while (qhead != qtail)
3142 int rescan, changed;
3143 basic_block bb;
3144 edge e;
3146 bb = *qhead++;
3147 if (qhead == qend)
3148 qhead = queue;
3149 bb->aux = NULL;
3151 /* Begin by propogating live_at_start from the successor blocks. */
3152 CLEAR_REG_SET (new_live_at_end);
3153 for (e = bb->succ; e ; e = e->succ_next)
3155 basic_block sb = e->dest;
3156 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3159 /* Force the stack pointer to be live -- which might not already be
3160 the case for blocks within infinite loops. */
3161 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3163 /* Regs used in phi nodes are not included in
3164 global_live_at_start, since they are live only along a
3165 particular edge. Set those regs that are live because of a
3166 phi node alternative corresponding to this particular block. */
3167 if (in_ssa_form)
3168 for_each_successor_phi (bb, &set_phi_alternative_reg,
3169 new_live_at_end);
3171 if (bb == ENTRY_BLOCK_PTR)
3173 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3174 continue;
3177 /* On our first pass through this block, we'll go ahead and continue.
3178 Recognize first pass by local_set NULL. On subsequent passes, we
3179 get to skip out early if live_at_end wouldn't have changed. */
3181 if (bb->local_set == NULL)
3183 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3184 rescan = 1;
3186 else
3188 /* If any bits were removed from live_at_end, we'll have to
3189 rescan the block. This wouldn't be necessary if we had
3190 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3191 local_live is really dependant on live_at_end. */
3192 CLEAR_REG_SET (tmp);
3193 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3194 new_live_at_end, BITMAP_AND_COMPL);
3196 if (! rescan)
3198 /* Find the set of changed bits. Take this opportunity
3199 to notice that this set is empty and early out. */
3200 CLEAR_REG_SET (tmp);
3201 changed = bitmap_operation (tmp, bb->global_live_at_end,
3202 new_live_at_end, BITMAP_XOR);
3203 if (! changed)
3204 continue;
3206 /* If any of the changed bits overlap with local_set,
3207 we'll have to rescan the block. Detect overlap by
3208 the AND with ~local_set turning off bits. */
3209 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3210 BITMAP_AND_COMPL);
3214 /* Let our caller know that BB changed enough to require its
3215 death notes updated. */
3216 if (blocks_out)
3217 SET_BIT (blocks_out, bb->index);
3219 if (! rescan)
3221 /* Add to live_at_start the set of all registers in
3222 new_live_at_end that aren't in the old live_at_end. */
3224 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3225 BITMAP_AND_COMPL);
3226 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3228 changed = bitmap_operation (bb->global_live_at_start,
3229 bb->global_live_at_start,
3230 tmp, BITMAP_IOR);
3231 if (! changed)
3232 continue;
3234 else
3236 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3238 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3239 into live_at_start. */
3240 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3242 /* If live_at start didn't change, no need to go farther. */
3243 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3244 continue;
3246 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3249 /* Queue all predecessors of BB so that we may re-examine
3250 their live_at_end. */
3251 for (e = bb->pred; e ; e = e->pred_next)
3253 basic_block pb = e->src;
3254 if (pb->aux == NULL)
3256 *qtail++ = pb;
3257 if (qtail == qend)
3258 qtail = queue;
3259 pb->aux = pb;
3264 FREE_REG_SET (tmp);
3265 FREE_REG_SET (new_live_at_end);
3267 if (blocks_out)
3269 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3271 basic_block bb = BASIC_BLOCK (i);
3272 FREE_REG_SET (bb->local_set);
3275 else
3277 for (i = n_basic_blocks - 1; i >= 0; --i)
3279 basic_block bb = BASIC_BLOCK (i);
3280 FREE_REG_SET (bb->local_set);
3284 free (queue);
3287 /* Subroutines of life analysis. */
3289 /* Allocate the permanent data structures that represent the results
3290 of life analysis. Not static since used also for stupid life analysis. */
3292 void
3293 allocate_bb_life_data ()
3295 register int i;
3297 for (i = 0; i < n_basic_blocks; i++)
3299 basic_block bb = BASIC_BLOCK (i);
3301 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3302 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3305 ENTRY_BLOCK_PTR->global_live_at_end
3306 = OBSTACK_ALLOC_REG_SET (function_obstack);
3307 EXIT_BLOCK_PTR->global_live_at_start
3308 = OBSTACK_ALLOC_REG_SET (function_obstack);
3310 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3313 void
3314 allocate_reg_life_data ()
3316 int i;
3318 max_regno = max_reg_num ();
3320 /* Recalculate the register space, in case it has grown. Old style
3321 vector oriented regsets would set regset_{size,bytes} here also. */
3322 allocate_reg_info (max_regno, FALSE, FALSE);
3324 /* Reset all the data we'll collect in propagate_block and its
3325 subroutines. */
3326 for (i = 0; i < max_regno; i++)
3328 REG_N_SETS (i) = 0;
3329 REG_N_REFS (i) = 0;
3330 REG_N_DEATHS (i) = 0;
3331 REG_N_CALLS_CROSSED (i) = 0;
3332 REG_LIVE_LENGTH (i) = 0;
3333 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3337 /* Delete dead instructions for propagate_block. */
3339 static void
3340 propagate_block_delete_insn (bb, insn)
3341 basic_block bb;
3342 rtx insn;
3344 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3346 /* If the insn referred to a label, and that label was attached to
3347 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3348 pretty much mandatory to delete it, because the ADDR_VEC may be
3349 referencing labels that no longer exist. */
3351 if (inote)
3353 rtx label = XEXP (inote, 0);
3354 rtx next;
3356 if (LABEL_NUSES (label) == 1
3357 && (next = next_nonnote_insn (label)) != NULL
3358 && GET_CODE (next) == JUMP_INSN
3359 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3360 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3362 rtx pat = PATTERN (next);
3363 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3364 int len = XVECLEN (pat, diff_vec_p);
3365 int i;
3367 for (i = 0; i < len; i++)
3368 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3370 flow_delete_insn (next);
3374 if (bb->end == insn)
3375 bb->end = PREV_INSN (insn);
3376 flow_delete_insn (insn);
3379 /* Delete dead libcalls for propagate_block. Return the insn
3380 before the libcall. */
3382 static rtx
3383 propagate_block_delete_libcall (bb, insn, note)
3384 basic_block bb;
3385 rtx insn, note;
3387 rtx first = XEXP (note, 0);
3388 rtx before = PREV_INSN (first);
3390 if (insn == bb->end)
3391 bb->end = before;
3393 flow_delete_insn_chain (first, insn);
3394 return before;
3397 /* Update the life-status of regs for one insn. Return the previous insn. */
3400 propagate_one_insn (pbi, insn)
3401 struct propagate_block_info *pbi;
3402 rtx insn;
3404 rtx prev = PREV_INSN (insn);
3405 int flags = pbi->flags;
3406 int insn_is_dead = 0;
3407 int libcall_is_dead = 0;
3408 rtx note;
3409 int i;
3411 if (! INSN_P (insn))
3412 return prev;
3414 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3415 if (flags & PROP_SCAN_DEAD_CODE)
3417 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3418 REG_NOTES (insn));
3419 libcall_is_dead = (insn_is_dead && note != 0
3420 && libcall_dead_p (pbi, note, insn));
3423 /* We almost certainly don't want to delete prologue or epilogue
3424 instructions. Warn about probable compiler losage. */
3425 if (insn_is_dead
3426 && reload_completed
3427 && (((HAVE_epilogue || HAVE_prologue)
3428 && prologue_epilogue_contains (insn))
3429 || (HAVE_sibcall_epilogue
3430 && sibcall_epilogue_contains (insn))))
3432 if (flags & PROP_KILL_DEAD_CODE)
3434 warning ("ICE: would have deleted prologue/epilogue insn");
3435 if (!inhibit_warnings)
3436 debug_rtx (insn);
3438 libcall_is_dead = insn_is_dead = 0;
3441 /* If an instruction consists of just dead store(s) on final pass,
3442 delete it. */
3443 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3445 /* Record sets. Do this even for dead instructions, since they
3446 would have killed the values if they hadn't been deleted. */
3447 mark_set_regs (pbi, PATTERN (insn), insn);
3449 /* CC0 is now known to be dead. Either this insn used it,
3450 in which case it doesn't anymore, or clobbered it,
3451 so the next insn can't use it. */
3452 pbi->cc0_live = 0;
3454 if (libcall_is_dead)
3456 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3457 insn = NEXT_INSN (prev);
3459 else
3460 propagate_block_delete_insn (pbi->bb, insn);
3462 return prev;
3465 /* See if this is an increment or decrement that can be merged into
3466 a following memory address. */
3467 #ifdef AUTO_INC_DEC
3469 register rtx x = single_set (insn);
3471 /* Does this instruction increment or decrement a register? */
3472 if ((flags & PROP_AUTOINC)
3473 && x != 0
3474 && GET_CODE (SET_DEST (x)) == REG
3475 && (GET_CODE (SET_SRC (x)) == PLUS
3476 || GET_CODE (SET_SRC (x)) == MINUS)
3477 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3478 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3479 /* Ok, look for a following memory ref we can combine with.
3480 If one is found, change the memory ref to a PRE_INC
3481 or PRE_DEC, cancel this insn, and return 1.
3482 Return 0 if nothing has been done. */
3483 && try_pre_increment_1 (pbi, insn))
3484 return prev;
3486 #endif /* AUTO_INC_DEC */
3488 CLEAR_REG_SET (pbi->new_set);
3490 /* If this is not the final pass, and this insn is copying the value of
3491 a library call and it's dead, don't scan the insns that perform the
3492 library call, so that the call's arguments are not marked live. */
3493 if (libcall_is_dead)
3495 /* Record the death of the dest reg. */
3496 mark_set_regs (pbi, PATTERN (insn), insn);
3498 insn = XEXP (note, 0);
3499 return PREV_INSN (insn);
3501 else if (GET_CODE (PATTERN (insn)) == SET
3502 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3503 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3504 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3505 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3506 /* We have an insn to pop a constant amount off the stack.
3507 (Such insns use PLUS regardless of the direction of the stack,
3508 and any insn to adjust the stack by a constant is always a pop.)
3509 These insns, if not dead stores, have no effect on life. */
3511 else
3513 /* Any regs live at the time of a call instruction must not go
3514 in a register clobbered by calls. Find all regs now live and
3515 record this for them. */
3517 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3518 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3519 { REG_N_CALLS_CROSSED (i)++; });
3521 /* Record sets. Do this even for dead instructions, since they
3522 would have killed the values if they hadn't been deleted. */
3523 mark_set_regs (pbi, PATTERN (insn), insn);
3525 if (GET_CODE (insn) == CALL_INSN)
3527 register int i;
3528 rtx note, cond;
3530 cond = NULL_RTX;
3531 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3532 cond = COND_EXEC_TEST (PATTERN (insn));
3534 /* Non-constant calls clobber memory. */
3535 if (! CONST_CALL_P (insn))
3536 free_EXPR_LIST_list (&pbi->mem_set_list);
3538 /* There may be extra registers to be clobbered. */
3539 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3540 note;
3541 note = XEXP (note, 1))
3542 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3543 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3544 cond, insn, pbi->flags);
3546 /* Calls change all call-used and global registers. */
3547 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3548 if (call_used_regs[i] && ! global_regs[i]
3549 && ! fixed_regs[i])
3551 /* We do not want REG_UNUSED notes for these registers. */
3552 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3553 cond, insn,
3554 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3558 /* If an insn doesn't use CC0, it becomes dead since we assume
3559 that every insn clobbers it. So show it dead here;
3560 mark_used_regs will set it live if it is referenced. */
3561 pbi->cc0_live = 0;
3563 /* Record uses. */
3564 if (! insn_is_dead)
3565 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3567 /* Sometimes we may have inserted something before INSN (such as a move)
3568 when we make an auto-inc. So ensure we will scan those insns. */
3569 #ifdef AUTO_INC_DEC
3570 prev = PREV_INSN (insn);
3571 #endif
3573 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3575 register int i;
3576 rtx note, cond;
3578 cond = NULL_RTX;
3579 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3580 cond = COND_EXEC_TEST (PATTERN (insn));
3582 /* Calls use their arguments. */
3583 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3584 note;
3585 note = XEXP (note, 1))
3586 if (GET_CODE (XEXP (note, 0)) == USE)
3587 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3588 cond, insn);
3590 /* The stack ptr is used (honorarily) by a CALL insn. */
3591 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3593 /* Calls may also reference any of the global registers,
3594 so they are made live. */
3595 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3596 if (global_regs[i])
3597 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3598 cond, insn);
3602 /* On final pass, update counts of how many insns in which each reg
3603 is live. */
3604 if (flags & PROP_REG_INFO)
3605 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3606 { REG_LIVE_LENGTH (i)++; });
3608 return prev;
3611 /* Initialize a propagate_block_info struct for public consumption.
3612 Note that the structure itself is opaque to this file, but that
3613 the user can use the regsets provided here. */
3615 struct propagate_block_info *
3616 init_propagate_block_info (bb, live, local_set, flags)
3617 basic_block bb;
3618 regset live;
3619 regset local_set;
3620 int flags;
3622 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3624 pbi->bb = bb;
3625 pbi->reg_live = live;
3626 pbi->mem_set_list = NULL_RTX;
3627 pbi->local_set = local_set;
3628 pbi->cc0_live = 0;
3629 pbi->flags = flags;
3631 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3632 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3633 else
3634 pbi->reg_next_use = NULL;
3636 pbi->new_set = BITMAP_XMALLOC ();
3638 #ifdef HAVE_conditional_execution
3639 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3640 free_reg_cond_life_info);
3641 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3643 /* If this block ends in a conditional branch, for each register live
3644 from one side of the branch and not the other, record the register
3645 as conditionally dead. */
3646 if ((flags & (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE))
3647 && GET_CODE (bb->end) == JUMP_INSN
3648 && any_condjump_p (bb->end))
3650 regset_head diff_head;
3651 regset diff = INITIALIZE_REG_SET (diff_head);
3652 basic_block bb_true, bb_false;
3653 rtx cond_true, cond_false;
3654 int i;
3656 /* Identify the successor blocks. */
3657 bb_true = bb->succ->dest;
3658 if (bb->succ->succ_next != NULL)
3660 bb_false = bb->succ->succ_next->dest;
3662 if (bb->succ->flags & EDGE_FALLTHRU)
3664 basic_block t = bb_false;
3665 bb_false = bb_true;
3666 bb_true = t;
3668 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3669 abort ();
3671 else
3673 /* This can happen with a conditional jump to the next insn. */
3674 if (JUMP_LABEL (bb->end) != bb_true->head)
3675 abort ();
3677 /* Simplest way to do nothing. */
3678 bb_false = bb_true;
3681 /* Extract the condition from the branch. */
3682 cond_true = XEXP (SET_SRC (PATTERN (bb->end)), 0);
3683 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3684 GET_MODE (cond_true), XEXP (cond_true, 0),
3685 XEXP (cond_true, 1));
3686 if (GET_CODE (XEXP (SET_SRC (PATTERN (bb->end)), 1)) == PC)
3688 rtx t = cond_false;
3689 cond_false = cond_true;
3690 cond_true = t;
3693 /* Compute which register lead different lives in the successors. */
3694 if (bitmap_operation (diff, bb_true->global_live_at_start,
3695 bb_false->global_live_at_start, BITMAP_XOR))
3697 if (GET_CODE (XEXP (cond_true, 0)) != REG)
3698 abort ();
3699 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond_true, 0)));
3701 /* For each such register, mark it conditionally dead. */
3702 EXECUTE_IF_SET_IN_REG_SET
3703 (diff, 0, i,
3705 struct reg_cond_life_info *rcli;
3706 rtx cond;
3708 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3710 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3711 cond = cond_false;
3712 else
3713 cond = cond_true;
3714 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3716 splay_tree_insert (pbi->reg_cond_dead, i,
3717 (splay_tree_value) rcli);
3721 FREE_REG_SET (diff);
3723 #endif
3725 /* If this block has no successors, any stores to the frame that aren't
3726 used later in the block are dead. So make a pass over the block
3727 recording any such that are made and show them dead at the end. We do
3728 a very conservative and simple job here. */
3729 if ((flags & PROP_SCAN_DEAD_CODE)
3730 && (bb->succ == NULL
3731 || (bb->succ->succ_next == NULL
3732 && bb->succ->dest == EXIT_BLOCK_PTR)))
3734 rtx insn;
3735 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3736 if (GET_CODE (insn) == INSN
3737 && GET_CODE (PATTERN (insn)) == SET
3738 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3740 rtx mem = SET_DEST (PATTERN (insn));
3742 if (XEXP (mem, 0) == frame_pointer_rtx
3743 || (GET_CODE (XEXP (mem, 0)) == PLUS
3744 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3745 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3746 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3750 return pbi;
3753 /* Release a propagate_block_info struct. */
3755 void
3756 free_propagate_block_info (pbi)
3757 struct propagate_block_info *pbi;
3759 free_EXPR_LIST_list (&pbi->mem_set_list);
3761 BITMAP_XFREE (pbi->new_set);
3763 #ifdef HAVE_conditional_execution
3764 splay_tree_delete (pbi->reg_cond_dead);
3765 BITMAP_XFREE (pbi->reg_cond_reg);
3766 #endif
3768 if (pbi->reg_next_use)
3769 free (pbi->reg_next_use);
3771 free (pbi);
3774 /* Compute the registers live at the beginning of a basic block BB from
3775 those live at the end.
3777 When called, REG_LIVE contains those live at the end. On return, it
3778 contains those live at the beginning.
3780 LOCAL_SET, if non-null, will be set with all registers killed by
3781 this basic block. */
3783 void
3784 propagate_block (bb, live, local_set, flags)
3785 basic_block bb;
3786 regset live;
3787 regset local_set;
3788 int flags;
3790 struct propagate_block_info *pbi;
3791 rtx insn, prev;
3793 pbi = init_propagate_block_info (bb, live, local_set, flags);
3795 if (flags & PROP_REG_INFO)
3797 register int i;
3799 /* Process the regs live at the end of the block.
3800 Mark them as not local to any one basic block. */
3801 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3802 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3805 /* Scan the block an insn at a time from end to beginning. */
3807 for (insn = bb->end; ; insn = prev)
3809 /* If this is a call to `setjmp' et al, warn if any
3810 non-volatile datum is live. */
3811 if ((flags & PROP_REG_INFO)
3812 && GET_CODE (insn) == NOTE
3813 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3814 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3816 prev = propagate_one_insn (pbi, insn);
3818 if (insn == bb->head)
3819 break;
3822 free_propagate_block_info (pbi);
3825 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3826 (SET expressions whose destinations are registers dead after the insn).
3827 NEEDED is the regset that says which regs are alive after the insn.
3829 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3831 If X is the entire body of an insn, NOTES contains the reg notes
3832 pertaining to the insn. */
3834 static int
3835 insn_dead_p (pbi, x, call_ok, notes)
3836 struct propagate_block_info *pbi;
3837 rtx x;
3838 int call_ok;
3839 rtx notes ATTRIBUTE_UNUSED;
3841 enum rtx_code code = GET_CODE (x);
3843 #ifdef AUTO_INC_DEC
3844 /* If flow is invoked after reload, we must take existing AUTO_INC
3845 expresions into account. */
3846 if (reload_completed)
3848 for ( ; notes; notes = XEXP (notes, 1))
3850 if (REG_NOTE_KIND (notes) == REG_INC)
3852 int regno = REGNO (XEXP (notes, 0));
3854 /* Don't delete insns to set global regs. */
3855 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3856 || REGNO_REG_SET_P (pbi->reg_live, regno))
3857 return 0;
3861 #endif
3863 /* If setting something that's a reg or part of one,
3864 see if that register's altered value will be live. */
3866 if (code == SET)
3868 rtx r = SET_DEST (x);
3870 #ifdef HAVE_cc0
3871 if (GET_CODE (r) == CC0)
3872 return ! pbi->cc0_live;
3873 #endif
3875 /* A SET that is a subroutine call cannot be dead. */
3876 if (GET_CODE (SET_SRC (x)) == CALL)
3878 if (! call_ok)
3879 return 0;
3882 /* Don't eliminate loads from volatile memory or volatile asms. */
3883 else if (volatile_refs_p (SET_SRC (x)))
3884 return 0;
3886 if (GET_CODE (r) == MEM)
3888 rtx temp;
3890 if (MEM_VOLATILE_P (r))
3891 return 0;
3893 /* Walk the set of memory locations we are currently tracking
3894 and see if one is an identical match to this memory location.
3895 If so, this memory write is dead (remember, we're walking
3896 backwards from the end of the block to the start). */
3897 temp = pbi->mem_set_list;
3898 while (temp)
3900 if (rtx_equal_p (XEXP (temp, 0), r))
3901 return 1;
3902 temp = XEXP (temp, 1);
3905 else
3907 while (GET_CODE (r) == SUBREG
3908 || GET_CODE (r) == STRICT_LOW_PART
3909 || GET_CODE (r) == ZERO_EXTRACT)
3910 r = XEXP (r, 0);
3912 if (GET_CODE (r) == REG)
3914 int regno = REGNO (r);
3916 /* Obvious. */
3917 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3918 return 0;
3920 /* If this is a hard register, verify that subsequent
3921 words are not needed. */
3922 if (regno < FIRST_PSEUDO_REGISTER)
3924 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3926 while (--n > 0)
3927 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3928 return 0;
3931 /* Don't delete insns to set global regs. */
3932 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3933 return 0;
3935 /* Make sure insns to set the stack pointer aren't deleted. */
3936 if (regno == STACK_POINTER_REGNUM)
3937 return 0;
3939 /* Make sure insns to set the frame pointer aren't deleted. */
3940 if (regno == FRAME_POINTER_REGNUM
3941 && (! reload_completed || frame_pointer_needed))
3942 return 0;
3943 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3944 if (regno == HARD_FRAME_POINTER_REGNUM
3945 && (! reload_completed || frame_pointer_needed))
3946 return 0;
3947 #endif
3949 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3950 /* Make sure insns to set arg pointer are never deleted
3951 (if the arg pointer isn't fixed, there will be a USE
3952 for it, so we can treat it normally). */
3953 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3954 return 0;
3955 #endif
3957 #ifdef PIC_OFFSET_TABLE_REGNUM
3958 /* Before reload, do not allow sets of the pic register
3959 to be deleted. Reload can insert references to
3960 constant pool memory anywhere in the function, making
3961 the PIC register live where it wasn't before. */
3962 if (regno == PIC_OFFSET_TABLE_REGNUM && fixed_regs[regno]
3963 && ! reload_completed)
3964 return 0;
3965 #endif
3967 /* Otherwise, the set is dead. */
3968 return 1;
3973 /* If performing several activities, insn is dead if each activity
3974 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3975 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3976 worth keeping. */
3977 else if (code == PARALLEL)
3979 int i = XVECLEN (x, 0);
3981 for (i--; i >= 0; i--)
3982 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3983 && GET_CODE (XVECEXP (x, 0, i)) != USE
3984 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3985 return 0;
3987 return 1;
3990 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3991 is not necessarily true for hard registers. */
3992 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3993 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3994 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3995 return 1;
3997 /* We do not check other CLOBBER or USE here. An insn consisting of just
3998 a CLOBBER or just a USE should not be deleted. */
3999 return 0;
4002 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4003 return 1 if the entire library call is dead.
4004 This is true if INSN copies a register (hard or pseudo)
4005 and if the hard return reg of the call insn is dead.
4006 (The caller should have tested the destination of the SET inside
4007 INSN already for death.)
4009 If this insn doesn't just copy a register, then we don't
4010 have an ordinary libcall. In that case, cse could not have
4011 managed to substitute the source for the dest later on,
4012 so we can assume the libcall is dead.
4014 PBI is the block info giving pseudoregs live before this insn.
4015 NOTE is the REG_RETVAL note of the insn. */
4017 static int
4018 libcall_dead_p (pbi, note, insn)
4019 struct propagate_block_info *pbi;
4020 rtx note;
4021 rtx insn;
4023 rtx x = single_set (insn);
4025 if (x)
4027 register rtx r = SET_SRC (x);
4028 if (GET_CODE (r) == REG)
4030 rtx call = XEXP (note, 0);
4031 rtx call_pat;
4032 register int i;
4034 /* Find the call insn. */
4035 while (call != insn && GET_CODE (call) != CALL_INSN)
4036 call = NEXT_INSN (call);
4038 /* If there is none, do nothing special,
4039 since ordinary death handling can understand these insns. */
4040 if (call == insn)
4041 return 0;
4043 /* See if the hard reg holding the value is dead.
4044 If this is a PARALLEL, find the call within it. */
4045 call_pat = PATTERN (call);
4046 if (GET_CODE (call_pat) == PARALLEL)
4048 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4049 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4050 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4051 break;
4053 /* This may be a library call that is returning a value
4054 via invisible pointer. Do nothing special, since
4055 ordinary death handling can understand these insns. */
4056 if (i < 0)
4057 return 0;
4059 call_pat = XVECEXP (call_pat, 0, i);
4062 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4065 return 1;
4068 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4069 live at function entry. Don't count global register variables, variables
4070 in registers that can be used for function arg passing, or variables in
4071 fixed hard registers. */
4074 regno_uninitialized (regno)
4075 int regno;
4077 if (n_basic_blocks == 0
4078 || (regno < FIRST_PSEUDO_REGISTER
4079 && (global_regs[regno]
4080 || fixed_regs[regno]
4081 || FUNCTION_ARG_REGNO_P (regno))))
4082 return 0;
4084 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4087 /* 1 if register REGNO was alive at a place where `setjmp' was called
4088 and was set more than once or is an argument.
4089 Such regs may be clobbered by `longjmp'. */
4092 regno_clobbered_at_setjmp (regno)
4093 int regno;
4095 if (n_basic_blocks == 0)
4096 return 0;
4098 return ((REG_N_SETS (regno) > 1
4099 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4100 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4103 /* INSN references memory, possibly using autoincrement addressing modes.
4104 Find any entries on the mem_set_list that need to be invalidated due
4105 to an address change. */
4107 static void
4108 invalidate_mems_from_autoinc (pbi, insn)
4109 struct propagate_block_info *pbi;
4110 rtx insn;
4112 rtx note = REG_NOTES (insn);
4113 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4115 if (REG_NOTE_KIND (note) == REG_INC)
4117 rtx temp = pbi->mem_set_list;
4118 rtx prev = NULL_RTX;
4119 rtx next;
4121 while (temp)
4123 next = XEXP (temp, 1);
4124 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4126 /* Splice temp out of list. */
4127 if (prev)
4128 XEXP (prev, 1) = next;
4129 else
4130 pbi->mem_set_list = next;
4131 free_EXPR_LIST_node (temp);
4133 else
4134 prev = temp;
4135 temp = next;
4141 /* Process the registers that are set within X. Their bits are set to
4142 1 in the regset DEAD, because they are dead prior to this insn.
4144 If INSN is nonzero, it is the insn being processed.
4146 FLAGS is the set of operations to perform. */
4148 static void
4149 mark_set_regs (pbi, x, insn)
4150 struct propagate_block_info *pbi;
4151 rtx x, insn;
4153 rtx cond = NULL_RTX;
4154 rtx link;
4155 enum rtx_code code;
4157 if (insn)
4158 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4160 if (REG_NOTE_KIND (link) == REG_INC)
4161 mark_set_1 (pbi, SET, XEXP (link, 0),
4162 (GET_CODE (x) == COND_EXEC
4163 ? COND_EXEC_TEST (x) : NULL_RTX),
4164 insn, pbi->flags);
4166 retry:
4167 switch (code = GET_CODE (x))
4169 case SET:
4170 case CLOBBER:
4171 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4172 return;
4174 case COND_EXEC:
4175 cond = COND_EXEC_TEST (x);
4176 x = COND_EXEC_CODE (x);
4177 goto retry;
4179 case PARALLEL:
4181 register int i;
4182 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4184 rtx sub = XVECEXP (x, 0, i);
4185 switch (code = GET_CODE (sub))
4187 case COND_EXEC:
4188 if (cond != NULL_RTX)
4189 abort ();
4191 cond = COND_EXEC_TEST (sub);
4192 sub = COND_EXEC_CODE (sub);
4193 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4194 break;
4195 /* FALLTHRU */
4197 case SET:
4198 case CLOBBER:
4199 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4200 break;
4202 default:
4203 break;
4206 break;
4209 default:
4210 break;
4214 /* Process a single SET rtx, X. */
4216 static void
4217 mark_set_1 (pbi, code, reg, cond, insn, flags)
4218 struct propagate_block_info *pbi;
4219 enum rtx_code code;
4220 rtx reg, cond, insn;
4221 int flags;
4223 int regno_first = -1, regno_last = -1;
4224 int not_dead = 0;
4225 int i;
4227 /* Some targets place small structures in registers for
4228 return values of functions. We have to detect this
4229 case specially here to get correct flow information. */
4230 if (GET_CODE (reg) == PARALLEL
4231 && GET_MODE (reg) == BLKmode)
4233 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4234 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4235 return;
4238 /* Modifying just one hardware register of a multi-reg value or just a
4239 byte field of a register does not mean the value from before this insn
4240 is now dead. Of course, if it was dead after it's unused now. */
4242 switch (GET_CODE (reg))
4244 case ZERO_EXTRACT:
4245 case SIGN_EXTRACT:
4246 case STRICT_LOW_PART:
4247 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4249 reg = XEXP (reg, 0);
4250 while (GET_CODE (reg) == SUBREG
4251 || GET_CODE (reg) == ZERO_EXTRACT
4252 || GET_CODE (reg) == SIGN_EXTRACT
4253 || GET_CODE (reg) == STRICT_LOW_PART);
4254 if (GET_CODE (reg) == MEM)
4255 break;
4256 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4257 /* FALLTHRU */
4259 case REG:
4260 regno_last = regno_first = REGNO (reg);
4261 if (regno_first < FIRST_PSEUDO_REGISTER)
4262 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4263 break;
4265 case SUBREG:
4266 if (GET_CODE (SUBREG_REG (reg)) == REG)
4268 enum machine_mode outer_mode = GET_MODE (reg);
4269 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4271 /* Identify the range of registers affected. This is moderately
4272 tricky for hard registers. See alter_subreg. */
4274 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4275 if (regno_first < FIRST_PSEUDO_REGISTER)
4277 #ifdef ALTER_HARD_SUBREG
4278 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4279 inner_mode, regno_first);
4280 #else
4281 regno_first += SUBREG_WORD (reg);
4282 #endif
4283 regno_last = (regno_first
4284 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4286 /* Since we've just adjusted the register number ranges, make
4287 sure REG matches. Otherwise some_was_live will be clear
4288 when it shouldn't have been, and we'll create incorrect
4289 REG_UNUSED notes. */
4290 reg = gen_rtx_REG (outer_mode, regno_first);
4292 else
4294 /* If the number of words in the subreg is less than the number
4295 of words in the full register, we have a well-defined partial
4296 set. Otherwise the high bits are undefined.
4298 This is only really applicable to pseudos, since we just took
4299 care of multi-word hard registers. */
4300 if (((GET_MODE_SIZE (outer_mode)
4301 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4302 < ((GET_MODE_SIZE (inner_mode)
4303 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4304 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4306 reg = SUBREG_REG (reg);
4309 else
4310 reg = SUBREG_REG (reg);
4311 break;
4313 default:
4314 break;
4317 /* If this set is a MEM, then it kills any aliased writes.
4318 If this set is a REG, then it kills any MEMs which use the reg. */
4319 if (flags & PROP_SCAN_DEAD_CODE)
4321 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4323 rtx temp = pbi->mem_set_list;
4324 rtx prev = NULL_RTX;
4325 rtx next;
4327 while (temp)
4329 next = XEXP (temp, 1);
4330 if ((GET_CODE (reg) == MEM
4331 && output_dependence (XEXP (temp, 0), reg))
4332 || (GET_CODE (reg) == REG
4333 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4335 /* Splice this entry out of the list. */
4336 if (prev)
4337 XEXP (prev, 1) = next;
4338 else
4339 pbi->mem_set_list = next;
4340 free_EXPR_LIST_node (temp);
4342 else
4343 prev = temp;
4344 temp = next;
4348 /* If the memory reference had embedded side effects (autoincrement
4349 address modes. Then we may need to kill some entries on the
4350 memory set list. */
4351 if (insn && GET_CODE (reg) == MEM)
4352 invalidate_mems_from_autoinc (pbi, insn);
4354 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4355 /* ??? With more effort we could track conditional memory life. */
4356 && ! cond
4357 /* We do not know the size of a BLKmode store, so we do not track
4358 them for redundant store elimination. */
4359 && GET_MODE (reg) != BLKmode
4360 /* There are no REG_INC notes for SP, so we can't assume we'll see
4361 everything that invalidates it. To be safe, don't eliminate any
4362 stores though SP; none of them should be redundant anyway. */
4363 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4364 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4367 if (GET_CODE (reg) == REG
4368 && ! (regno_first == FRAME_POINTER_REGNUM
4369 && (! reload_completed || frame_pointer_needed))
4370 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4371 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4372 && (! reload_completed || frame_pointer_needed))
4373 #endif
4374 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4375 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4376 #endif
4379 int some_was_live = 0, some_was_dead = 0;
4381 for (i = regno_first; i <= regno_last; ++i)
4383 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4384 if (pbi->local_set)
4385 SET_REGNO_REG_SET (pbi->local_set, i);
4386 if (code != CLOBBER)
4387 SET_REGNO_REG_SET (pbi->new_set, i);
4389 some_was_live |= needed_regno;
4390 some_was_dead |= ! needed_regno;
4393 #ifdef HAVE_conditional_execution
4394 /* Consider conditional death in deciding that the register needs
4395 a death note. */
4396 if (some_was_live && ! not_dead
4397 /* The stack pointer is never dead. Well, not strictly true,
4398 but it's very difficult to tell from here. Hopefully
4399 combine_stack_adjustments will fix up the most egregious
4400 errors. */
4401 && regno_first != STACK_POINTER_REGNUM)
4403 for (i = regno_first; i <= regno_last; ++i)
4404 if (! mark_regno_cond_dead (pbi, i, cond))
4405 not_dead = 1;
4407 #endif
4409 /* Additional data to record if this is the final pass. */
4410 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4411 | PROP_DEATH_NOTES | PROP_AUTOINC))
4413 register rtx y;
4414 register int blocknum = pbi->bb->index;
4416 y = NULL_RTX;
4417 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4419 y = pbi->reg_next_use[regno_first];
4421 /* The next use is no longer next, since a store intervenes. */
4422 for (i = regno_first; i <= regno_last; ++i)
4423 pbi->reg_next_use[i] = 0;
4426 if (flags & PROP_REG_INFO)
4428 for (i = regno_first; i <= regno_last; ++i)
4430 /* Count (weighted) references, stores, etc. This counts a
4431 register twice if it is modified, but that is correct. */
4432 REG_N_SETS (i) += 1;
4433 REG_N_REFS (i) += (optimize_size ? 1
4434 : pbi->bb->loop_depth + 1);
4436 /* The insns where a reg is live are normally counted
4437 elsewhere, but we want the count to include the insn
4438 where the reg is set, and the normal counting mechanism
4439 would not count it. */
4440 REG_LIVE_LENGTH (i) += 1;
4443 /* If this is a hard reg, record this function uses the reg. */
4444 if (regno_first < FIRST_PSEUDO_REGISTER)
4446 for (i = regno_first; i <= regno_last; i++)
4447 regs_ever_live[i] = 1;
4449 else
4451 /* Keep track of which basic blocks each reg appears in. */
4452 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4453 REG_BASIC_BLOCK (regno_first) = blocknum;
4454 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4455 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4459 if (! some_was_dead)
4461 if (flags & PROP_LOG_LINKS)
4463 /* Make a logical link from the next following insn
4464 that uses this register, back to this insn.
4465 The following insns have already been processed.
4467 We don't build a LOG_LINK for hard registers containing
4468 in ASM_OPERANDs. If these registers get replaced,
4469 we might wind up changing the semantics of the insn,
4470 even if reload can make what appear to be valid
4471 assignments later. */
4472 if (y && (BLOCK_NUM (y) == blocknum)
4473 && (regno_first >= FIRST_PSEUDO_REGISTER
4474 || asm_noperands (PATTERN (y)) < 0))
4475 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4478 else if (not_dead)
4480 else if (! some_was_live)
4482 if (flags & PROP_REG_INFO)
4483 REG_N_DEATHS (regno_first) += 1;
4485 if (flags & PROP_DEATH_NOTES)
4487 /* Note that dead stores have already been deleted
4488 when possible. If we get here, we have found a
4489 dead store that cannot be eliminated (because the
4490 same insn does something useful). Indicate this
4491 by marking the reg being set as dying here. */
4492 REG_NOTES (insn)
4493 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4496 else
4498 if (flags & PROP_DEATH_NOTES)
4500 /* This is a case where we have a multi-word hard register
4501 and some, but not all, of the words of the register are
4502 needed in subsequent insns. Write REG_UNUSED notes
4503 for those parts that were not needed. This case should
4504 be rare. */
4506 for (i = regno_first; i <= regno_last; ++i)
4507 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4508 REG_NOTES (insn)
4509 = alloc_EXPR_LIST (REG_UNUSED,
4510 gen_rtx_REG (reg_raw_mode[i], i),
4511 REG_NOTES (insn));
4516 /* Mark the register as being dead. */
4517 if (some_was_live
4518 && ! not_dead
4519 /* The stack pointer is never dead. Well, not strictly true,
4520 but it's very difficult to tell from here. Hopefully
4521 combine_stack_adjustments will fix up the most egregious
4522 errors. */
4523 && regno_first != STACK_POINTER_REGNUM)
4525 for (i = regno_first; i <= regno_last; ++i)
4526 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4529 else if (GET_CODE (reg) == REG)
4531 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4532 pbi->reg_next_use[regno_first] = 0;
4535 /* If this is the last pass and this is a SCRATCH, show it will be dying
4536 here and count it. */
4537 else if (GET_CODE (reg) == SCRATCH)
4539 if (flags & PROP_DEATH_NOTES)
4540 REG_NOTES (insn)
4541 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4545 #ifdef HAVE_conditional_execution
4546 /* Mark REGNO conditionally dead. Return true if the register is
4547 now unconditionally dead. */
4549 static int
4550 mark_regno_cond_dead (pbi, regno, cond)
4551 struct propagate_block_info *pbi;
4552 int regno;
4553 rtx cond;
4555 /* If this is a store to a predicate register, the value of the
4556 predicate is changing, we don't know that the predicate as seen
4557 before is the same as that seen after. Flush all dependant
4558 conditions from reg_cond_dead. This will make all such
4559 conditionally live registers unconditionally live. */
4560 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4561 flush_reg_cond_reg (pbi, regno);
4563 /* If this is an unconditional store, remove any conditional
4564 life that may have existed. */
4565 if (cond == NULL_RTX)
4566 splay_tree_remove (pbi->reg_cond_dead, regno);
4567 else
4569 splay_tree_node node;
4570 struct reg_cond_life_info *rcli;
4571 rtx ncond;
4573 /* Otherwise this is a conditional set. Record that fact.
4574 It may have been conditionally used, or there may be a
4575 subsequent set with a complimentary condition. */
4577 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4578 if (node == NULL)
4580 /* The register was unconditionally live previously.
4581 Record the current condition as the condition under
4582 which it is dead. */
4583 rcli = (struct reg_cond_life_info *)
4584 xmalloc (sizeof (*rcli));
4585 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4586 splay_tree_insert (pbi->reg_cond_dead, regno,
4587 (splay_tree_value) rcli);
4589 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4590 REGNO (XEXP (cond, 0)));
4592 /* Not unconditionaly dead. */
4593 return 0;
4595 else
4597 /* The register was conditionally live previously.
4598 Add the new condition to the old. */
4599 rcli = (struct reg_cond_life_info *) node->value;
4600 ncond = rcli->condition;
4601 ncond = ior_reg_cond (ncond, cond);
4603 /* If the register is now unconditionally dead,
4604 remove the entry in the splay_tree. */
4605 if (ncond == const1_rtx)
4606 splay_tree_remove (pbi->reg_cond_dead, regno);
4607 else
4609 rcli->condition = ncond;
4611 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4612 REGNO (XEXP (cond, 0)));
4614 /* Not unconditionaly dead. */
4615 return 0;
4620 return 1;
4623 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4625 static void
4626 free_reg_cond_life_info (value)
4627 splay_tree_value value;
4629 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4630 free_EXPR_LIST_list (&rcli->condition);
4631 free (rcli);
4634 /* Helper function for flush_reg_cond_reg. */
4636 static int
4637 flush_reg_cond_reg_1 (node, data)
4638 splay_tree_node node;
4639 void *data;
4641 struct reg_cond_life_info *rcli;
4642 int *xdata = (int *) data;
4643 unsigned int regno = xdata[0];
4644 rtx c, *prev;
4646 /* Don't need to search if last flushed value was farther on in
4647 the in-order traversal. */
4648 if (xdata[1] >= (int) node->key)
4649 return 0;
4651 /* Splice out portions of the expression that refer to regno. */
4652 rcli = (struct reg_cond_life_info *) node->value;
4653 c = *(prev = &rcli->condition);
4654 while (c)
4656 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4658 rtx next = XEXP (c, 1);
4659 free_EXPR_LIST_node (c);
4660 c = *prev = next;
4662 else
4663 c = *(prev = &XEXP (c, 1));
4666 /* If the entire condition is now NULL, signal the node to be removed. */
4667 if (! rcli->condition)
4669 xdata[1] = node->key;
4670 return -1;
4672 else
4673 return 0;
4676 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4678 static void
4679 flush_reg_cond_reg (pbi, regno)
4680 struct propagate_block_info *pbi;
4681 int regno;
4683 int pair[2];
4685 pair[0] = regno;
4686 pair[1] = -1;
4687 while (splay_tree_foreach (pbi->reg_cond_dead,
4688 flush_reg_cond_reg_1, pair) == -1)
4689 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4691 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4694 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4695 We actually use EXPR_LIST to chain the sub-expressions together
4696 instead of IOR because it's easier to manipulate and we have
4697 the lists.c functions to reuse nodes.
4699 Return a new rtl expression as appropriate. */
4701 static rtx
4702 ior_reg_cond (old, x)
4703 rtx old, x;
4705 enum rtx_code x_code;
4706 rtx x_reg;
4707 rtx c;
4709 /* We expect these conditions to be of the form (eq reg 0). */
4710 x_code = GET_CODE (x);
4711 if (GET_RTX_CLASS (x_code) != '<'
4712 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4713 || XEXP (x, 1) != const0_rtx)
4714 abort ();
4716 /* Search the expression for an existing sub-expression of X_REG. */
4717 for (c = old; c ; c = XEXP (c, 1))
4719 rtx y = XEXP (c, 0);
4720 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4722 /* If we find X already present in OLD, we need do nothing. */
4723 if (GET_CODE (y) == x_code)
4724 return old;
4726 /* If we find X being a compliment of a condition in OLD,
4727 then the entire condition is true. */
4728 if (GET_CODE (y) == reverse_condition (x_code))
4729 return const1_rtx;
4733 /* Otherwise just add to the chain. */
4734 return alloc_EXPR_LIST (0, x, old);
4737 static rtx
4738 not_reg_cond (x)
4739 rtx x;
4741 enum rtx_code x_code;
4742 rtx x_reg;
4744 /* We expect these conditions to be of the form (eq reg 0). */
4745 x_code = GET_CODE (x);
4746 if (GET_RTX_CLASS (x_code) != '<'
4747 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4748 || XEXP (x, 1) != const0_rtx)
4749 abort ();
4751 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4752 VOIDmode, x_reg, const0_rtx),
4753 NULL_RTX);
4756 static rtx
4757 nand_reg_cond (old, x)
4758 rtx old, x;
4760 enum rtx_code x_code;
4761 rtx x_reg;
4762 rtx c, *prev;
4764 /* We expect these conditions to be of the form (eq reg 0). */
4765 x_code = GET_CODE (x);
4766 if (GET_RTX_CLASS (x_code) != '<'
4767 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4768 || XEXP (x, 1) != const0_rtx)
4769 abort ();
4771 /* Search the expression for an existing sub-expression of X_REG. */
4773 for (c = *(prev = &old); c ; c = *(prev = &XEXP (c, 1)))
4775 rtx y = XEXP (c, 0);
4776 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4778 /* If we find X already present in OLD, then we need to
4779 splice it out. */
4780 if (GET_CODE (y) == x_code)
4782 *prev = XEXP (c, 1);
4783 free_EXPR_LIST_node (c);
4784 return old ? old : const0_rtx;
4787 /* If we find X being a compliment of a condition in OLD,
4788 then we need do nothing. */
4789 if (GET_CODE (y) == reverse_condition (x_code))
4790 return old;
4794 /* Otherwise, by implication, the register in question is now live for
4795 the inverse of the condition X. */
4796 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4797 VOIDmode, x_reg, const0_rtx),
4798 old);
4800 #endif /* HAVE_conditional_execution */
4802 #ifdef AUTO_INC_DEC
4804 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4805 reference. */
4807 static void
4808 find_auto_inc (pbi, x, insn)
4809 struct propagate_block_info *pbi;
4810 rtx x;
4811 rtx insn;
4813 rtx addr = XEXP (x, 0);
4814 HOST_WIDE_INT offset = 0;
4815 rtx set;
4817 /* Here we detect use of an index register which might be good for
4818 postincrement, postdecrement, preincrement, or predecrement. */
4820 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4821 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4823 if (GET_CODE (addr) == REG)
4825 register rtx y;
4826 register int size = GET_MODE_SIZE (GET_MODE (x));
4827 rtx use;
4828 rtx incr;
4829 int regno = REGNO (addr);
4831 /* Is the next use an increment that might make auto-increment? */
4832 if ((incr = pbi->reg_next_use[regno]) != 0
4833 && (set = single_set (incr)) != 0
4834 && GET_CODE (set) == SET
4835 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4836 /* Can't add side effects to jumps; if reg is spilled and
4837 reloaded, there's no way to store back the altered value. */
4838 && GET_CODE (insn) != JUMP_INSN
4839 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4840 && XEXP (y, 0) == addr
4841 && GET_CODE (XEXP (y, 1)) == CONST_INT
4842 && ((HAVE_POST_INCREMENT
4843 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4844 || (HAVE_POST_DECREMENT
4845 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4846 || (HAVE_PRE_INCREMENT
4847 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4848 || (HAVE_PRE_DECREMENT
4849 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4850 /* Make sure this reg appears only once in this insn. */
4851 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4852 use != 0 && use != (rtx) 1))
4854 rtx q = SET_DEST (set);
4855 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4856 ? (offset ? PRE_INC : POST_INC)
4857 : (offset ? PRE_DEC : POST_DEC));
4859 if (dead_or_set_p (incr, addr)
4860 /* Mustn't autoinc an eliminable register. */
4861 && (regno >= FIRST_PSEUDO_REGISTER
4862 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4864 /* This is the simple case. Try to make the auto-inc. If
4865 we can't, we are done. Otherwise, we will do any
4866 needed updates below. */
4867 if (! validate_change (insn, &XEXP (x, 0),
4868 gen_rtx_fmt_e (inc_code, Pmode, addr),
4870 return;
4872 else if (GET_CODE (q) == REG
4873 /* PREV_INSN used here to check the semi-open interval
4874 [insn,incr). */
4875 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4876 /* We must also check for sets of q as q may be
4877 a call clobbered hard register and there may
4878 be a call between PREV_INSN (insn) and incr. */
4879 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4881 /* We have *p followed sometime later by q = p+size.
4882 Both p and q must be live afterward,
4883 and q is not used between INSN and its assignment.
4884 Change it to q = p, ...*q..., q = q+size.
4885 Then fall into the usual case. */
4886 rtx insns, temp;
4888 start_sequence ();
4889 emit_move_insn (q, addr);
4890 insns = get_insns ();
4891 end_sequence ();
4893 if (basic_block_for_insn)
4894 for (temp = insns; temp; temp = NEXT_INSN (temp))
4895 set_block_for_insn (temp, pbi->bb);
4897 /* If we can't make the auto-inc, or can't make the
4898 replacement into Y, exit. There's no point in making
4899 the change below if we can't do the auto-inc and doing
4900 so is not correct in the pre-inc case. */
4902 validate_change (insn, &XEXP (x, 0),
4903 gen_rtx_fmt_e (inc_code, Pmode, q),
4905 validate_change (incr, &XEXP (y, 0), q, 1);
4906 if (! apply_change_group ())
4907 return;
4909 /* We now know we'll be doing this change, so emit the
4910 new insn(s) and do the updates. */
4911 emit_insns_before (insns, insn);
4913 if (pbi->bb->head == insn)
4914 pbi->bb->head = insns;
4916 /* INCR will become a NOTE and INSN won't contain a
4917 use of ADDR. If a use of ADDR was just placed in
4918 the insn before INSN, make that the next use.
4919 Otherwise, invalidate it. */
4920 if (GET_CODE (PREV_INSN (insn)) == INSN
4921 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4922 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4923 pbi->reg_next_use[regno] = PREV_INSN (insn);
4924 else
4925 pbi->reg_next_use[regno] = 0;
4927 addr = q;
4928 regno = REGNO (q);
4930 /* REGNO is now used in INCR which is below INSN, but it
4931 previously wasn't live here. If we don't mark it as
4932 live, we'll put a REG_DEAD note for it on this insn,
4933 which is incorrect. */
4934 SET_REGNO_REG_SET (pbi->reg_live, regno);
4936 /* If there are any calls between INSN and INCR, show
4937 that REGNO now crosses them. */
4938 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4939 if (GET_CODE (temp) == CALL_INSN)
4940 REG_N_CALLS_CROSSED (regno)++;
4942 else
4943 return;
4945 /* If we haven't returned, it means we were able to make the
4946 auto-inc, so update the status. First, record that this insn
4947 has an implicit side effect. */
4949 REG_NOTES (insn)
4950 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4952 /* Modify the old increment-insn to simply copy
4953 the already-incremented value of our register. */
4954 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4955 abort ();
4957 /* If that makes it a no-op (copying the register into itself) delete
4958 it so it won't appear to be a "use" and a "set" of this
4959 register. */
4960 if (SET_DEST (set) == addr)
4962 /* If the original source was dead, it's dead now. */
4963 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4964 if (note && XEXP (note, 0) != addr)
4965 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4967 PUT_CODE (incr, NOTE);
4968 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4969 NOTE_SOURCE_FILE (incr) = 0;
4972 if (regno >= FIRST_PSEUDO_REGISTER)
4974 /* Count an extra reference to the reg. When a reg is
4975 incremented, spilling it is worse, so we want to make
4976 that less likely. */
4977 REG_N_REFS (regno) += (optimize_size ? 1
4978 : pbi->bb->loop_depth + 1);
4980 /* Count the increment as a setting of the register,
4981 even though it isn't a SET in rtl. */
4982 REG_N_SETS (regno)++;
4987 #endif /* AUTO_INC_DEC */
4989 static void
4990 mark_used_reg (pbi, reg, cond, insn)
4991 struct propagate_block_info *pbi;
4992 rtx reg;
4993 rtx cond ATTRIBUTE_UNUSED;
4994 rtx insn;
4996 int regno = REGNO (reg);
4997 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4998 int some_was_dead = ! some_was_live;
4999 int some_not_set;
5000 int n;
5002 /* A hard reg in a wide mode may really be multiple registers.
5003 If so, mark all of them just like the first. */
5004 if (regno < FIRST_PSEUDO_REGISTER)
5006 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5007 while (--n > 0)
5009 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5010 some_was_live |= needed_regno;
5011 some_was_dead |= ! needed_regno;
5015 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5017 /* Record where each reg is used, so when the reg is set we know
5018 the next insn that uses it. */
5019 pbi->reg_next_use[regno] = insn;
5022 if (pbi->flags & PROP_REG_INFO)
5024 if (regno < FIRST_PSEUDO_REGISTER)
5026 /* If this is a register we are going to try to eliminate,
5027 don't mark it live here. If we are successful in
5028 eliminating it, it need not be live unless it is used for
5029 pseudos, in which case it will have been set live when it
5030 was allocated to the pseudos. If the register will not
5031 be eliminated, reload will set it live at that point.
5033 Otherwise, record that this function uses this register. */
5034 /* ??? The PPC backend tries to "eliminate" on the pic
5035 register to itself. This should be fixed. In the mean
5036 time, hack around it. */
5038 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5039 && (regno == FRAME_POINTER_REGNUM
5040 || regno == ARG_POINTER_REGNUM)))
5042 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5044 regs_ever_live[regno + --n] = 1;
5045 while (n > 0);
5048 else
5050 /* Keep track of which basic block each reg appears in. */
5052 register int blocknum = pbi->bb->index;
5053 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5054 REG_BASIC_BLOCK (regno) = blocknum;
5055 else if (REG_BASIC_BLOCK (regno) != blocknum)
5056 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5058 /* Count (weighted) number of uses of each reg. */
5059 REG_N_REFS (regno) += (optimize_size ? 1
5060 : pbi->bb->loop_depth + 1);
5064 /* Find out if any of the register was set this insn. */
5065 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5066 if (regno < FIRST_PSEUDO_REGISTER)
5068 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5069 while (--n > 0)
5070 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5073 /* Record and count the insns in which a reg dies. If it is used in
5074 this insn and was dead below the insn then it dies in this insn.
5075 If it was set in this insn, we do not make a REG_DEAD note;
5076 likewise if we already made such a note. */
5077 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5078 && some_was_dead
5079 && some_not_set)
5081 /* Check for the case where the register dying partially
5082 overlaps the register set by this insn. */
5083 if (regno < FIRST_PSEUDO_REGISTER
5084 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5086 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5087 while (--n >= 0)
5088 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5091 /* If none of the words in X is needed, make a REG_DEAD note.
5092 Otherwise, we must make partial REG_DEAD notes. */
5093 if (! some_was_live)
5095 if ((pbi->flags & PROP_DEATH_NOTES)
5096 && ! find_regno_note (insn, REG_DEAD, regno))
5097 REG_NOTES (insn)
5098 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5100 if (pbi->flags & PROP_REG_INFO)
5101 REG_N_DEATHS (regno)++;
5103 else
5105 /* Don't make a REG_DEAD note for a part of a register
5106 that is set in the insn. */
5108 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5109 for (; n >= regno; n--)
5110 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5111 && ! dead_or_set_regno_p (insn, n))
5112 REG_NOTES (insn)
5113 = alloc_EXPR_LIST (REG_DEAD,
5114 gen_rtx_REG (reg_raw_mode[n], n),
5115 REG_NOTES (insn));
5119 SET_REGNO_REG_SET (pbi->reg_live, regno);
5120 if (regno < FIRST_PSEUDO_REGISTER)
5122 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5123 while (--n > 0)
5124 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5127 #ifdef HAVE_conditional_execution
5128 /* If this is a conditional use, record that fact. If it is later
5129 conditionally set, we'll know to kill the register. */
5130 if (cond != NULL_RTX)
5132 splay_tree_node node;
5133 struct reg_cond_life_info *rcli;
5134 rtx ncond;
5136 if (some_was_live)
5138 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5139 if (node == NULL)
5141 /* The register was unconditionally live previously.
5142 No need to do anything. */
5144 else
5146 /* The register was conditionally live previously.
5147 Subtract the new life cond from the old death cond. */
5148 rcli = (struct reg_cond_life_info *) node->value;
5149 ncond = rcli->condition;
5150 ncond = nand_reg_cond (ncond, cond);
5152 /* If the register is now unconditionally live, remove the
5153 entry in the splay_tree. */
5154 if (ncond == const0_rtx)
5156 rcli->condition = NULL_RTX;
5157 splay_tree_remove (pbi->reg_cond_dead, regno);
5159 else
5160 rcli->condition = ncond;
5163 else
5165 /* The register was not previously live at all. Record
5166 the condition under which it is still dead. */
5167 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5168 rcli->condition = not_reg_cond (cond);
5169 splay_tree_insert (pbi->reg_cond_dead, regno,
5170 (splay_tree_value) rcli);
5173 else if (some_was_live)
5175 splay_tree_node node;
5176 struct reg_cond_life_info *rcli;
5178 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5179 if (node != NULL)
5181 /* The register was conditionally live previously, but is now
5182 unconditionally so. Remove it from the conditionally dead
5183 list, so that a conditional set won't cause us to think
5184 it dead. */
5185 rcli = (struct reg_cond_life_info *) node->value;
5186 rcli->condition = NULL_RTX;
5187 splay_tree_remove (pbi->reg_cond_dead, regno);
5191 #endif
5194 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5195 This is done assuming the registers needed from X are those that
5196 have 1-bits in PBI->REG_LIVE.
5198 INSN is the containing instruction. If INSN is dead, this function
5199 is not called. */
5201 static void
5202 mark_used_regs (pbi, x, cond, insn)
5203 struct propagate_block_info *pbi;
5204 rtx x, cond, insn;
5206 register RTX_CODE code;
5207 register int regno;
5208 int flags = pbi->flags;
5210 retry:
5211 code = GET_CODE (x);
5212 switch (code)
5214 case LABEL_REF:
5215 case SYMBOL_REF:
5216 case CONST_INT:
5217 case CONST:
5218 case CONST_DOUBLE:
5219 case PC:
5220 case ADDR_VEC:
5221 case ADDR_DIFF_VEC:
5222 return;
5224 #ifdef HAVE_cc0
5225 case CC0:
5226 pbi->cc0_live = 1;
5227 return;
5228 #endif
5230 case CLOBBER:
5231 /* If we are clobbering a MEM, mark any registers inside the address
5232 as being used. */
5233 if (GET_CODE (XEXP (x, 0)) == MEM)
5234 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5235 return;
5237 case MEM:
5238 /* Don't bother watching stores to mems if this is not the
5239 final pass. We'll not be deleting dead stores this round. */
5240 if (flags & PROP_SCAN_DEAD_CODE)
5242 /* Invalidate the data for the last MEM stored, but only if MEM is
5243 something that can be stored into. */
5244 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5245 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5246 ; /* needn't clear the memory set list */
5247 else
5249 rtx temp = pbi->mem_set_list;
5250 rtx prev = NULL_RTX;
5251 rtx next;
5253 while (temp)
5255 next = XEXP (temp, 1);
5256 if (anti_dependence (XEXP (temp, 0), x))
5258 /* Splice temp out of the list. */
5259 if (prev)
5260 XEXP (prev, 1) = next;
5261 else
5262 pbi->mem_set_list = next;
5263 free_EXPR_LIST_node (temp);
5265 else
5266 prev = temp;
5267 temp = next;
5271 /* If the memory reference had embedded side effects (autoincrement
5272 address modes. Then we may need to kill some entries on the
5273 memory set list. */
5274 if (insn)
5275 invalidate_mems_from_autoinc (pbi, insn);
5278 #ifdef AUTO_INC_DEC
5279 if (flags & PROP_AUTOINC)
5280 find_auto_inc (pbi, x, insn);
5281 #endif
5282 break;
5284 case SUBREG:
5285 #ifdef CLASS_CANNOT_CHANGE_MODE
5286 if (GET_CODE (SUBREG_REG (x)) == REG
5287 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5288 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5289 GET_MODE (SUBREG_REG (x))))
5290 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5291 #endif
5293 /* While we're here, optimize this case. */
5294 x = SUBREG_REG (x);
5295 if (GET_CODE (x) != REG)
5296 goto retry;
5297 /* FALLTHRU */
5299 case REG:
5300 /* See a register other than being set => mark it as needed. */
5301 mark_used_reg (pbi, x, cond, insn);
5302 return;
5304 case SET:
5306 register rtx testreg = SET_DEST (x);
5307 int mark_dest = 0;
5309 /* If storing into MEM, don't show it as being used. But do
5310 show the address as being used. */
5311 if (GET_CODE (testreg) == MEM)
5313 #ifdef AUTO_INC_DEC
5314 if (flags & PROP_AUTOINC)
5315 find_auto_inc (pbi, testreg, insn);
5316 #endif
5317 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5318 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5319 return;
5322 /* Storing in STRICT_LOW_PART is like storing in a reg
5323 in that this SET might be dead, so ignore it in TESTREG.
5324 but in some other ways it is like using the reg.
5326 Storing in a SUBREG or a bit field is like storing the entire
5327 register in that if the register's value is not used
5328 then this SET is not needed. */
5329 while (GET_CODE (testreg) == STRICT_LOW_PART
5330 || GET_CODE (testreg) == ZERO_EXTRACT
5331 || GET_CODE (testreg) == SIGN_EXTRACT
5332 || GET_CODE (testreg) == SUBREG)
5334 #ifdef CLASS_CANNOT_CHANGE_MODE
5335 if (GET_CODE (testreg) == SUBREG
5336 && GET_CODE (SUBREG_REG (testreg)) == REG
5337 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5338 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5339 GET_MODE (testreg)))
5340 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5341 #endif
5343 /* Modifying a single register in an alternate mode
5344 does not use any of the old value. But these other
5345 ways of storing in a register do use the old value. */
5346 if (GET_CODE (testreg) == SUBREG
5347 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5349 else
5350 mark_dest = 1;
5352 testreg = XEXP (testreg, 0);
5355 /* If this is a store into a register, recursively scan the
5356 value being stored. */
5358 if ((GET_CODE (testreg) == PARALLEL
5359 && GET_MODE (testreg) == BLKmode)
5360 || (GET_CODE (testreg) == REG
5361 && (regno = REGNO (testreg),
5362 ! (regno == FRAME_POINTER_REGNUM
5363 && (! reload_completed || frame_pointer_needed)))
5364 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5365 && ! (regno == HARD_FRAME_POINTER_REGNUM
5366 && (! reload_completed || frame_pointer_needed))
5367 #endif
5368 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5369 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5370 #endif
5373 if (mark_dest)
5374 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5375 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5376 return;
5379 break;
5381 case ASM_OPERANDS:
5382 case UNSPEC_VOLATILE:
5383 case TRAP_IF:
5384 case ASM_INPUT:
5386 /* Traditional and volatile asm instructions must be considered to use
5387 and clobber all hard registers, all pseudo-registers and all of
5388 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5390 Consider for instance a volatile asm that changes the fpu rounding
5391 mode. An insn should not be moved across this even if it only uses
5392 pseudo-regs because it might give an incorrectly rounded result.
5394 ?!? Unfortunately, marking all hard registers as live causes massive
5395 problems for the register allocator and marking all pseudos as live
5396 creates mountains of uninitialized variable warnings.
5398 So for now, just clear the memory set list and mark any regs
5399 we can find in ASM_OPERANDS as used. */
5400 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5401 free_EXPR_LIST_list (&pbi->mem_set_list);
5403 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5404 We can not just fall through here since then we would be confused
5405 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5406 traditional asms unlike their normal usage. */
5407 if (code == ASM_OPERANDS)
5409 int j;
5411 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5412 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5414 break;
5417 case COND_EXEC:
5418 if (cond != NULL_RTX)
5419 abort ();
5421 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5423 cond = COND_EXEC_TEST (x);
5424 x = COND_EXEC_CODE (x);
5425 goto retry;
5427 case PHI:
5428 /* We _do_not_ want to scan operands of phi nodes. Operands of
5429 a phi function are evaluated only when control reaches this
5430 block along a particular edge. Therefore, regs that appear
5431 as arguments to phi should not be added to the global live at
5432 start. */
5433 return;
5435 default:
5436 break;
5439 /* Recursively scan the operands of this expression. */
5442 register const char *fmt = GET_RTX_FORMAT (code);
5443 register int i;
5445 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5447 if (fmt[i] == 'e')
5449 /* Tail recursive case: save a function call level. */
5450 if (i == 0)
5452 x = XEXP (x, 0);
5453 goto retry;
5455 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5457 else if (fmt[i] == 'E')
5459 register int j;
5460 for (j = 0; j < XVECLEN (x, i); j++)
5461 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5467 #ifdef AUTO_INC_DEC
5469 static int
5470 try_pre_increment_1 (pbi, insn)
5471 struct propagate_block_info *pbi;
5472 rtx insn;
5474 /* Find the next use of this reg. If in same basic block,
5475 make it do pre-increment or pre-decrement if appropriate. */
5476 rtx x = single_set (insn);
5477 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5478 * INTVAL (XEXP (SET_SRC (x), 1)));
5479 int regno = REGNO (SET_DEST (x));
5480 rtx y = pbi->reg_next_use[regno];
5481 if (y != 0
5482 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5483 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5484 mode would be better. */
5485 && ! dead_or_set_p (y, SET_DEST (x))
5486 && try_pre_increment (y, SET_DEST (x), amount))
5488 /* We have found a suitable auto-increment
5489 and already changed insn Y to do it.
5490 So flush this increment-instruction. */
5491 PUT_CODE (insn, NOTE);
5492 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5493 NOTE_SOURCE_FILE (insn) = 0;
5494 /* Count a reference to this reg for the increment
5495 insn we are deleting. When a reg is incremented.
5496 spilling it is worse, so we want to make that
5497 less likely. */
5498 if (regno >= FIRST_PSEUDO_REGISTER)
5500 REG_N_REFS (regno) += (optimize_size ? 1
5501 : pbi->bb->loop_depth + 1);
5502 REG_N_SETS (regno)++;
5504 return 1;
5506 return 0;
5509 /* Try to change INSN so that it does pre-increment or pre-decrement
5510 addressing on register REG in order to add AMOUNT to REG.
5511 AMOUNT is negative for pre-decrement.
5512 Returns 1 if the change could be made.
5513 This checks all about the validity of the result of modifying INSN. */
5515 static int
5516 try_pre_increment (insn, reg, amount)
5517 rtx insn, reg;
5518 HOST_WIDE_INT amount;
5520 register rtx use;
5522 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5523 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5524 int pre_ok = 0;
5525 /* Nonzero if we can try to make a post-increment or post-decrement.
5526 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5527 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5528 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5529 int post_ok = 0;
5531 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5532 int do_post = 0;
5534 /* From the sign of increment, see which possibilities are conceivable
5535 on this target machine. */
5536 if (HAVE_PRE_INCREMENT && amount > 0)
5537 pre_ok = 1;
5538 if (HAVE_POST_INCREMENT && amount > 0)
5539 post_ok = 1;
5541 if (HAVE_PRE_DECREMENT && amount < 0)
5542 pre_ok = 1;
5543 if (HAVE_POST_DECREMENT && amount < 0)
5544 post_ok = 1;
5546 if (! (pre_ok || post_ok))
5547 return 0;
5549 /* It is not safe to add a side effect to a jump insn
5550 because if the incremented register is spilled and must be reloaded
5551 there would be no way to store the incremented value back in memory. */
5553 if (GET_CODE (insn) == JUMP_INSN)
5554 return 0;
5556 use = 0;
5557 if (pre_ok)
5558 use = find_use_as_address (PATTERN (insn), reg, 0);
5559 if (post_ok && (use == 0 || use == (rtx) 1))
5561 use = find_use_as_address (PATTERN (insn), reg, -amount);
5562 do_post = 1;
5565 if (use == 0 || use == (rtx) 1)
5566 return 0;
5568 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5569 return 0;
5571 /* See if this combination of instruction and addressing mode exists. */
5572 if (! validate_change (insn, &XEXP (use, 0),
5573 gen_rtx_fmt_e (amount > 0
5574 ? (do_post ? POST_INC : PRE_INC)
5575 : (do_post ? POST_DEC : PRE_DEC),
5576 Pmode, reg), 0))
5577 return 0;
5579 /* Record that this insn now has an implicit side effect on X. */
5580 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5581 return 1;
5584 #endif /* AUTO_INC_DEC */
5586 /* Find the place in the rtx X where REG is used as a memory address.
5587 Return the MEM rtx that so uses it.
5588 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5589 (plus REG (const_int PLUSCONST)).
5591 If such an address does not appear, return 0.
5592 If REG appears more than once, or is used other than in such an address,
5593 return (rtx)1. */
5596 find_use_as_address (x, reg, plusconst)
5597 register rtx x;
5598 rtx reg;
5599 HOST_WIDE_INT plusconst;
5601 enum rtx_code code = GET_CODE (x);
5602 const char *fmt = GET_RTX_FORMAT (code);
5603 register int i;
5604 register rtx value = 0;
5605 register rtx tem;
5607 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5608 return x;
5610 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5611 && XEXP (XEXP (x, 0), 0) == reg
5612 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5613 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5614 return x;
5616 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5618 /* If REG occurs inside a MEM used in a bit-field reference,
5619 that is unacceptable. */
5620 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5621 return (rtx) (HOST_WIDE_INT) 1;
5624 if (x == reg)
5625 return (rtx) (HOST_WIDE_INT) 1;
5627 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5629 if (fmt[i] == 'e')
5631 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5632 if (value == 0)
5633 value = tem;
5634 else if (tem != 0)
5635 return (rtx) (HOST_WIDE_INT) 1;
5637 else if (fmt[i] == 'E')
5639 register int j;
5640 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5642 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5643 if (value == 0)
5644 value = tem;
5645 else if (tem != 0)
5646 return (rtx) (HOST_WIDE_INT) 1;
5651 return value;
5654 /* Write information about registers and basic blocks into FILE.
5655 This is part of making a debugging dump. */
5657 void
5658 dump_regset (r, outf)
5659 regset r;
5660 FILE *outf;
5662 int i;
5663 if (r == NULL)
5665 fputs (" (nil)", outf);
5666 return;
5669 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5671 fprintf (outf, " %d", i);
5672 if (i < FIRST_PSEUDO_REGISTER)
5673 fprintf (outf, " [%s]",
5674 reg_names[i]);
5678 void
5679 debug_regset (r)
5680 regset r;
5682 dump_regset (r, stderr);
5683 putc ('\n', stderr);
5686 void
5687 dump_flow_info (file)
5688 FILE *file;
5690 register int i;
5691 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5693 fprintf (file, "%d registers.\n", max_regno);
5694 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5695 if (REG_N_REFS (i))
5697 enum reg_class class, altclass;
5698 fprintf (file, "\nRegister %d used %d times across %d insns",
5699 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5700 if (REG_BASIC_BLOCK (i) >= 0)
5701 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5702 if (REG_N_SETS (i))
5703 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5704 (REG_N_SETS (i) == 1) ? "" : "s");
5705 if (REG_USERVAR_P (regno_reg_rtx[i]))
5706 fprintf (file, "; user var");
5707 if (REG_N_DEATHS (i) != 1)
5708 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5709 if (REG_N_CALLS_CROSSED (i) == 1)
5710 fprintf (file, "; crosses 1 call");
5711 else if (REG_N_CALLS_CROSSED (i))
5712 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5713 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5714 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5715 class = reg_preferred_class (i);
5716 altclass = reg_alternate_class (i);
5717 if (class != GENERAL_REGS || altclass != ALL_REGS)
5719 if (altclass == ALL_REGS || class == ALL_REGS)
5720 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5721 else if (altclass == NO_REGS)
5722 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5723 else
5724 fprintf (file, "; pref %s, else %s",
5725 reg_class_names[(int) class],
5726 reg_class_names[(int) altclass]);
5728 if (REGNO_POINTER_FLAG (i))
5729 fprintf (file, "; pointer");
5730 fprintf (file, ".\n");
5733 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5734 for (i = 0; i < n_basic_blocks; i++)
5736 register basic_block bb = BASIC_BLOCK (i);
5737 register edge e;
5739 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
5740 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
5742 fprintf (file, "Predecessors: ");
5743 for (e = bb->pred; e ; e = e->pred_next)
5744 dump_edge_info (file, e, 0);
5746 fprintf (file, "\nSuccessors: ");
5747 for (e = bb->succ; e ; e = e->succ_next)
5748 dump_edge_info (file, e, 1);
5750 fprintf (file, "\nRegisters live at start:");
5751 dump_regset (bb->global_live_at_start, file);
5753 fprintf (file, "\nRegisters live at end:");
5754 dump_regset (bb->global_live_at_end, file);
5756 putc('\n', file);
5759 putc('\n', file);
5762 void
5763 debug_flow_info ()
5765 dump_flow_info (stderr);
5768 static void
5769 dump_edge_info (file, e, do_succ)
5770 FILE *file;
5771 edge e;
5772 int do_succ;
5774 basic_block side = (do_succ ? e->dest : e->src);
5776 if (side == ENTRY_BLOCK_PTR)
5777 fputs (" ENTRY", file);
5778 else if (side == EXIT_BLOCK_PTR)
5779 fputs (" EXIT", file);
5780 else
5781 fprintf (file, " %d", side->index);
5783 if (e->count)
5784 fprintf (file, " count:%d", e->count);
5786 if (e->flags)
5788 static const char * const bitnames[] = {
5789 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5791 int comma = 0;
5792 int i, flags = e->flags;
5794 fputc (' ', file);
5795 fputc ('(', file);
5796 for (i = 0; flags; i++)
5797 if (flags & (1 << i))
5799 flags &= ~(1 << i);
5801 if (comma)
5802 fputc (',', file);
5803 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5804 fputs (bitnames[i], file);
5805 else
5806 fprintf (file, "%d", i);
5807 comma = 1;
5809 fputc (')', file);
5814 /* Print out one basic block with live information at start and end. */
5815 void
5816 dump_bb (bb, outf)
5817 basic_block bb;
5818 FILE *outf;
5820 rtx insn;
5821 rtx last;
5822 edge e;
5824 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
5825 bb->index, bb->loop_depth, bb->count);
5826 if (bb->eh_beg != -1 || bb->eh_end != -1)
5827 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5828 putc ('\n', outf);
5830 fputs (";; Predecessors: ", outf);
5831 for (e = bb->pred; e ; e = e->pred_next)
5832 dump_edge_info (outf, e, 0);
5833 putc ('\n', outf);
5835 fputs (";; Registers live at start:", outf);
5836 dump_regset (bb->global_live_at_start, outf);
5837 putc ('\n', outf);
5839 for (insn = bb->head, last = NEXT_INSN (bb->end);
5840 insn != last;
5841 insn = NEXT_INSN (insn))
5842 print_rtl_single (outf, insn);
5844 fputs (";; Registers live at end:", outf);
5845 dump_regset (bb->global_live_at_end, outf);
5846 putc ('\n', outf);
5848 fputs (";; Successors: ", outf);
5849 for (e = bb->succ; e; e = e->succ_next)
5850 dump_edge_info (outf, e, 1);
5851 putc ('\n', outf);
5854 void
5855 debug_bb (bb)
5856 basic_block bb;
5858 dump_bb (bb, stderr);
5861 void
5862 debug_bb_n (n)
5863 int n;
5865 dump_bb (BASIC_BLOCK(n), stderr);
5868 /* Like print_rtl, but also print out live information for the start of each
5869 basic block. */
5871 void
5872 print_rtl_with_bb (outf, rtx_first)
5873 FILE *outf;
5874 rtx rtx_first;
5876 register rtx tmp_rtx;
5878 if (rtx_first == 0)
5879 fprintf (outf, "(nil)\n");
5880 else
5882 int i;
5883 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5884 int max_uid = get_max_uid ();
5885 basic_block *start = (basic_block *)
5886 xcalloc (max_uid, sizeof (basic_block));
5887 basic_block *end = (basic_block *)
5888 xcalloc (max_uid, sizeof (basic_block));
5889 enum bb_state *in_bb_p = (enum bb_state *)
5890 xcalloc (max_uid, sizeof (enum bb_state));
5892 for (i = n_basic_blocks - 1; i >= 0; i--)
5894 basic_block bb = BASIC_BLOCK (i);
5895 rtx x;
5897 start[INSN_UID (bb->head)] = bb;
5898 end[INSN_UID (bb->end)] = bb;
5899 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5901 enum bb_state state = IN_MULTIPLE_BB;
5902 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5903 state = IN_ONE_BB;
5904 in_bb_p[INSN_UID(x)] = state;
5906 if (x == bb->end)
5907 break;
5911 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5913 int did_output;
5914 basic_block bb;
5916 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5918 fprintf (outf, ";; Start of basic block %d, registers live:",
5919 bb->index);
5920 dump_regset (bb->global_live_at_start, outf);
5921 putc ('\n', outf);
5924 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5925 && GET_CODE (tmp_rtx) != NOTE
5926 && GET_CODE (tmp_rtx) != BARRIER)
5927 fprintf (outf, ";; Insn is not within a basic block\n");
5928 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5929 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5931 did_output = print_rtl_single (outf, tmp_rtx);
5933 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5935 fprintf (outf, ";; End of basic block %d, registers live:\n",
5936 bb->index);
5937 dump_regset (bb->global_live_at_end, outf);
5938 putc ('\n', outf);
5941 if (did_output)
5942 putc ('\n', outf);
5945 free (start);
5946 free (end);
5947 free (in_bb_p);
5950 if (current_function_epilogue_delay_list != 0)
5952 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5953 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5954 tmp_rtx = XEXP (tmp_rtx, 1))
5955 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5959 /* Compute dominator relationships using new flow graph structures. */
5960 void
5961 compute_flow_dominators (dominators, post_dominators)
5962 sbitmap *dominators;
5963 sbitmap *post_dominators;
5965 int bb;
5966 sbitmap *temp_bitmap;
5967 edge e;
5968 basic_block *worklist, *workend, *qin, *qout;
5969 int qlen;
5971 /* Allocate a worklist array/queue. Entries are only added to the
5972 list if they were not already on the list. So the size is
5973 bounded by the number of basic blocks. */
5974 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5975 workend = &worklist[n_basic_blocks];
5977 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5978 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5980 if (dominators)
5982 /* The optimistic setting of dominators requires us to put every
5983 block on the work list initially. */
5984 qin = qout = worklist;
5985 for (bb = 0; bb < n_basic_blocks; bb++)
5987 *qin++ = BASIC_BLOCK (bb);
5988 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5990 qlen = n_basic_blocks;
5991 qin = worklist;
5993 /* We want a maximal solution, so initially assume everything dominates
5994 everything else. */
5995 sbitmap_vector_ones (dominators, n_basic_blocks);
5997 /* Mark successors of the entry block so we can identify them below. */
5998 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5999 e->dest->aux = ENTRY_BLOCK_PTR;
6001 /* Iterate until the worklist is empty. */
6002 while (qlen)
6004 /* Take the first entry off the worklist. */
6005 basic_block b = *qout++;
6006 if (qout >= workend)
6007 qout = worklist;
6008 qlen--;
6010 bb = b->index;
6012 /* Compute the intersection of the dominators of all the
6013 predecessor blocks.
6015 If one of the predecessor blocks is the ENTRY block, then the
6016 intersection of the dominators of the predecessor blocks is
6017 defined as the null set. We can identify such blocks by the
6018 special value in the AUX field in the block structure. */
6019 if (b->aux == ENTRY_BLOCK_PTR)
6021 /* Do not clear the aux field for blocks which are
6022 successors of the ENTRY block. That way we we never
6023 add them to the worklist again.
6025 The intersect of dominators of the preds of this block is
6026 defined as the null set. */
6027 sbitmap_zero (temp_bitmap[bb]);
6029 else
6031 /* Clear the aux field of this block so it can be added to
6032 the worklist again if necessary. */
6033 b->aux = NULL;
6034 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
6037 /* Make sure each block always dominates itself. */
6038 SET_BIT (temp_bitmap[bb], bb);
6040 /* If the out state of this block changed, then we need to
6041 add the successors of this block to the worklist if they
6042 are not already on the worklist. */
6043 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
6045 for (e = b->succ; e; e = e->succ_next)
6047 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
6049 *qin++ = e->dest;
6050 if (qin >= workend)
6051 qin = worklist;
6052 qlen++;
6054 e->dest->aux = e;
6061 if (post_dominators)
6063 /* The optimistic setting of dominators requires us to put every
6064 block on the work list initially. */
6065 qin = qout = worklist;
6066 for (bb = 0; bb < n_basic_blocks; bb++)
6068 *qin++ = BASIC_BLOCK (bb);
6069 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6071 qlen = n_basic_blocks;
6072 qin = worklist;
6074 /* We want a maximal solution, so initially assume everything post
6075 dominates everything else. */
6076 sbitmap_vector_ones (post_dominators, n_basic_blocks);
6078 /* Mark predecessors of the exit block so we can identify them below. */
6079 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6080 e->src->aux = EXIT_BLOCK_PTR;
6082 /* Iterate until the worklist is empty. */
6083 while (qlen)
6085 /* Take the first entry off the worklist. */
6086 basic_block b = *qout++;
6087 if (qout >= workend)
6088 qout = worklist;
6089 qlen--;
6091 bb = b->index;
6093 /* Compute the intersection of the post dominators of all the
6094 successor blocks.
6096 If one of the successor blocks is the EXIT block, then the
6097 intersection of the dominators of the successor blocks is
6098 defined as the null set. We can identify such blocks by the
6099 special value in the AUX field in the block structure. */
6100 if (b->aux == EXIT_BLOCK_PTR)
6102 /* Do not clear the aux field for blocks which are
6103 predecessors of the EXIT block. That way we we never
6104 add them to the worklist again.
6106 The intersect of dominators of the succs of this block is
6107 defined as the null set. */
6108 sbitmap_zero (temp_bitmap[bb]);
6110 else
6112 /* Clear the aux field of this block so it can be added to
6113 the worklist again if necessary. */
6114 b->aux = NULL;
6115 sbitmap_intersection_of_succs (temp_bitmap[bb],
6116 post_dominators, bb);
6119 /* Make sure each block always post dominates itself. */
6120 SET_BIT (temp_bitmap[bb], bb);
6122 /* If the out state of this block changed, then we need to
6123 add the successors of this block to the worklist if they
6124 are not already on the worklist. */
6125 if (sbitmap_a_and_b (post_dominators[bb],
6126 post_dominators[bb],
6127 temp_bitmap[bb]))
6129 for (e = b->pred; e; e = e->pred_next)
6131 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6133 *qin++ = e->src;
6134 if (qin >= workend)
6135 qin = worklist;
6136 qlen++;
6138 e->src->aux = e;
6145 free (worklist);
6146 free (temp_bitmap);
6149 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
6151 void
6152 compute_immediate_dominators (idom, dominators)
6153 int *idom;
6154 sbitmap *dominators;
6156 sbitmap *tmp;
6157 int b;
6159 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6161 /* Begin with tmp(n) = dom(n) - { n }. */
6162 for (b = n_basic_blocks; --b >= 0; )
6164 sbitmap_copy (tmp[b], dominators[b]);
6165 RESET_BIT (tmp[b], b);
6168 /* Subtract out all of our dominator's dominators. */
6169 for (b = n_basic_blocks; --b >= 0; )
6171 sbitmap tmp_b = tmp[b];
6172 int s;
6174 for (s = n_basic_blocks; --s >= 0; )
6175 if (TEST_BIT (tmp_b, s))
6176 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6179 /* Find the one bit set in the bitmap and put it in the output array. */
6180 for (b = n_basic_blocks; --b >= 0; )
6182 int t;
6183 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6186 sbitmap_vector_free (tmp);
6189 /* Recompute register set/reference counts immediately prior to register
6190 allocation.
6192 This avoids problems with set/reference counts changing to/from values
6193 which have special meanings to the register allocators.
6195 Additionally, the reference counts are the primary component used by the
6196 register allocators to prioritize pseudos for allocation to hard regs.
6197 More accurate reference counts generally lead to better register allocation.
6199 F is the first insn to be scanned.
6201 LOOP_STEP denotes how much loop_depth should be incremented per
6202 loop nesting level in order to increase the ref count more for
6203 references in a loop.
6205 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6206 possibly other information which is used by the register allocators. */
6208 void
6209 recompute_reg_usage (f, loop_step)
6210 rtx f ATTRIBUTE_UNUSED;
6211 int loop_step ATTRIBUTE_UNUSED;
6213 allocate_reg_life_data ();
6214 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6217 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6218 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6219 of the number of registers that died. */
6222 count_or_remove_death_notes (blocks, kill)
6223 sbitmap blocks;
6224 int kill;
6226 int i, count = 0;
6228 for (i = n_basic_blocks - 1; i >= 0; --i)
6230 basic_block bb;
6231 rtx insn;
6233 if (blocks && ! TEST_BIT (blocks, i))
6234 continue;
6236 bb = BASIC_BLOCK (i);
6238 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6240 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6242 rtx *pprev = &REG_NOTES (insn);
6243 rtx link = *pprev;
6245 while (link)
6247 switch (REG_NOTE_KIND (link))
6249 case REG_DEAD:
6250 if (GET_CODE (XEXP (link, 0)) == REG)
6252 rtx reg = XEXP (link, 0);
6253 int n;
6255 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6256 n = 1;
6257 else
6258 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6259 count += n;
6261 /* FALLTHRU */
6263 case REG_UNUSED:
6264 if (kill)
6266 rtx next = XEXP (link, 1);
6267 free_EXPR_LIST_node (link);
6268 *pprev = link = next;
6269 break;
6271 /* FALLTHRU */
6273 default:
6274 pprev = &XEXP (link, 1);
6275 link = *pprev;
6276 break;
6281 if (insn == bb->end)
6282 break;
6286 return count;
6289 /* Record INSN's block as BB. */
6291 void
6292 set_block_for_insn (insn, bb)
6293 rtx insn;
6294 basic_block bb;
6296 size_t uid = INSN_UID (insn);
6297 if (uid >= basic_block_for_insn->num_elements)
6299 int new_size;
6301 /* Add one-eighth the size so we don't keep calling xrealloc. */
6302 new_size = uid + (uid + 7) / 8;
6304 VARRAY_GROW (basic_block_for_insn, new_size);
6306 VARRAY_BB (basic_block_for_insn, uid) = bb;
6309 /* Record INSN's block number as BB. */
6310 /* ??? This has got to go. */
6312 void
6313 set_block_num (insn, bb)
6314 rtx insn;
6315 int bb;
6317 set_block_for_insn (insn, BASIC_BLOCK (bb));
6320 /* Verify the CFG consistency. This function check some CFG invariants and
6321 aborts when something is wrong. Hope that this function will help to
6322 convert many optimization passes to preserve CFG consistent.
6324 Currently it does following checks:
6326 - test head/end pointers
6327 - overlapping of basic blocks
6328 - edge list corectness
6329 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6330 - tails of basic blocks (ensure that boundary is necesary)
6331 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6332 and NOTE_INSN_BASIC_BLOCK
6333 - check that all insns are in the basic blocks
6334 (except the switch handling code, barriers and notes)
6335 - check that all returns are followed by barriers
6337 In future it can be extended check a lot of other stuff as well
6338 (reachability of basic blocks, life information, etc. etc.). */
6340 void
6341 verify_flow_info ()
6343 const int max_uid = get_max_uid ();
6344 const rtx rtx_first = get_insns ();
6345 basic_block *bb_info;
6346 rtx x;
6347 int i, last_bb_num_seen, num_bb_notes, err = 0;
6349 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6351 /* First pass check head/end pointers and set bb_info array used by
6352 later passes. */
6353 for (i = n_basic_blocks - 1; i >= 0; i--)
6355 basic_block bb = BASIC_BLOCK (i);
6357 /* Check the head pointer and make sure that it is pointing into
6358 insn list. */
6359 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
6360 if (x == bb->head)
6361 break;
6362 if (!x)
6364 error ("Head insn %d for block %d not found in the insn stream.",
6365 INSN_UID (bb->head), bb->index);
6366 err = 1;
6369 /* Check the end pointer and make sure that it is pointing into
6370 insn list. */
6371 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6373 if (bb_info[INSN_UID (x)] != NULL)
6375 error ("Insn %d is in multiple basic blocks (%d and %d)",
6376 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6377 err = 1;
6379 bb_info[INSN_UID (x)] = bb;
6381 if (x == bb->end)
6382 break;
6384 if (!x)
6386 error ("End insn %d for block %d not found in the insn stream.",
6387 INSN_UID (bb->end), bb->index);
6388 err = 1;
6392 /* Now check the basic blocks (boundaries etc.) */
6393 for (i = n_basic_blocks - 1; i >= 0; i--)
6395 basic_block bb = BASIC_BLOCK (i);
6396 /* Check corectness of edge lists */
6397 edge e;
6399 e = bb->succ;
6400 while (e)
6402 if (e->src != bb)
6404 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6405 bb->index);
6406 fprintf (stderr, "Predecessor: ");
6407 dump_edge_info (stderr, e, 0);
6408 fprintf (stderr, "\nSuccessor: ");
6409 dump_edge_info (stderr, e, 1);
6410 fflush (stderr);
6411 err = 1;
6413 if (e->dest != EXIT_BLOCK_PTR)
6415 edge e2 = e->dest->pred;
6416 while (e2 && e2 != e)
6417 e2 = e2->pred_next;
6418 if (!e2)
6420 error ("Basic block %i edge lists are corrupted", bb->index);
6421 err = 1;
6424 e = e->succ_next;
6427 e = bb->pred;
6428 while (e)
6430 if (e->dest != bb)
6432 error ("Basic block %d pred edge is corrupted", bb->index);
6433 fputs ("Predecessor: ", stderr);
6434 dump_edge_info (stderr, e, 0);
6435 fputs ("\nSuccessor: ", stderr);
6436 dump_edge_info (stderr, e, 1);
6437 fputc ('\n', stderr);
6438 err = 1;
6440 if (e->src != ENTRY_BLOCK_PTR)
6442 edge e2 = e->src->succ;
6443 while (e2 && e2 != e)
6444 e2 = e2->succ_next;
6445 if (!e2)
6447 error ("Basic block %i edge lists are corrupted", bb->index);
6448 err = 1;
6451 e = e->pred_next;
6454 /* OK pointers are correct. Now check the header of basic
6455 block. It ought to contain optional CODE_LABEL followed
6456 by NOTE_BASIC_BLOCK. */
6457 x = bb->head;
6458 if (GET_CODE (x) == CODE_LABEL)
6460 if (bb->end == x)
6462 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6463 bb->index);
6464 err = 1;
6466 x = NEXT_INSN (x);
6468 if (GET_CODE (x) != NOTE
6469 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6470 || NOTE_BASIC_BLOCK (x) != bb)
6472 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6473 bb->index);
6474 err = 1;
6477 if (bb->end == x)
6479 /* Do checks for empty blocks here */
6481 else
6483 x = NEXT_INSN (x);
6484 while (x)
6486 if (GET_CODE (x) == NOTE
6487 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6489 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6490 INSN_UID (x), bb->index);
6491 err = 1;
6494 if (x == bb->end)
6495 break;
6497 if (GET_CODE (x) == JUMP_INSN
6498 || GET_CODE (x) == CODE_LABEL
6499 || GET_CODE (x) == BARRIER)
6501 error ("In basic block %d:", bb->index);
6502 fatal_insn ("Flow control insn inside a basic block", x);
6505 x = NEXT_INSN (x);
6510 last_bb_num_seen = -1;
6511 num_bb_notes = 0;
6512 x = rtx_first;
6513 while (x)
6515 if (GET_CODE (x) == NOTE
6516 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6518 basic_block bb = NOTE_BASIC_BLOCK (x);
6519 num_bb_notes++;
6520 if (bb->index != last_bb_num_seen + 1)
6521 fatal ("Basic blocks not numbered consecutively");
6522 last_bb_num_seen = bb->index;
6525 if (!bb_info[INSN_UID (x)])
6527 switch (GET_CODE (x))
6529 case BARRIER:
6530 case NOTE:
6531 break;
6533 case CODE_LABEL:
6534 /* An addr_vec is placed outside any block block. */
6535 if (NEXT_INSN (x)
6536 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6537 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6538 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6540 x = NEXT_INSN (x);
6543 /* But in any case, non-deletable labels can appear anywhere. */
6544 break;
6546 default:
6547 fatal_insn ("Insn outside basic block", x);
6551 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6552 && GET_CODE (x) == JUMP_INSN
6553 && returnjump_p (x) && ! condjump_p (x)
6554 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6555 fatal_insn ("Return not followed by barrier", x);
6557 x = NEXT_INSN (x);
6560 if (num_bb_notes != n_basic_blocks)
6561 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6562 num_bb_notes, n_basic_blocks);
6564 if (err)
6565 abort ();
6567 /* Clean up. */
6568 free (bb_info);
6571 /* Functions to access an edge list with a vector representation.
6572 Enough data is kept such that given an index number, the
6573 pred and succ that edge reprsents can be determined, or
6574 given a pred and a succ, it's index number can be returned.
6575 This allows algorithms which comsume a lot of memory to
6576 represent the normally full matrix of edge (pred,succ) with a
6577 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6578 wasted space in the client code due to sparse flow graphs. */
6580 /* This functions initializes the edge list. Basically the entire
6581 flowgraph is processed, and all edges are assigned a number,
6582 and the data structure is filed in. */
6583 struct edge_list *
6584 create_edge_list ()
6586 struct edge_list *elist;
6587 edge e;
6588 int num_edges;
6589 int x;
6590 int block_count;
6592 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6594 num_edges = 0;
6596 /* Determine the number of edges in the flow graph by counting successor
6597 edges on each basic block. */
6598 for (x = 0; x < n_basic_blocks; x++)
6600 basic_block bb = BASIC_BLOCK (x);
6602 for (e = bb->succ; e; e = e->succ_next)
6603 num_edges++;
6605 /* Don't forget successors of the entry block. */
6606 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6607 num_edges++;
6609 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6610 elist->num_blocks = block_count;
6611 elist->num_edges = num_edges;
6612 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6614 num_edges = 0;
6616 /* Follow successors of the entry block, and register these edges. */
6617 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6619 elist->index_to_edge[num_edges] = e;
6620 num_edges++;
6623 for (x = 0; x < n_basic_blocks; x++)
6625 basic_block bb = BASIC_BLOCK (x);
6627 /* Follow all successors of blocks, and register these edges. */
6628 for (e = bb->succ; e; e = e->succ_next)
6630 elist->index_to_edge[num_edges] = e;
6631 num_edges++;
6634 return elist;
6637 /* This function free's memory associated with an edge list. */
6638 void
6639 free_edge_list (elist)
6640 struct edge_list *elist;
6642 if (elist)
6644 free (elist->index_to_edge);
6645 free (elist);
6649 /* This function provides debug output showing an edge list. */
6650 void
6651 print_edge_list (f, elist)
6652 FILE *f;
6653 struct edge_list *elist;
6655 int x;
6656 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6657 elist->num_blocks - 2, elist->num_edges);
6659 for (x = 0; x < elist->num_edges; x++)
6661 fprintf (f, " %-4d - edge(", x);
6662 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6663 fprintf (f,"entry,");
6664 else
6665 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6667 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6668 fprintf (f,"exit)\n");
6669 else
6670 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6674 /* This function provides an internal consistancy check of an edge list,
6675 verifying that all edges are present, and that there are no
6676 extra edges. */
6677 void
6678 verify_edge_list (f, elist)
6679 FILE *f;
6680 struct edge_list *elist;
6682 int x, pred, succ, index;
6683 edge e;
6685 for (x = 0; x < n_basic_blocks; x++)
6687 basic_block bb = BASIC_BLOCK (x);
6689 for (e = bb->succ; e; e = e->succ_next)
6691 pred = e->src->index;
6692 succ = e->dest->index;
6693 index = EDGE_INDEX (elist, e->src, e->dest);
6694 if (index == EDGE_INDEX_NO_EDGE)
6696 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6697 continue;
6699 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6700 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6701 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6702 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6703 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6704 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6707 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6709 pred = e->src->index;
6710 succ = e->dest->index;
6711 index = EDGE_INDEX (elist, e->src, e->dest);
6712 if (index == EDGE_INDEX_NO_EDGE)
6714 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6715 continue;
6717 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6718 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6719 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6720 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6721 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6722 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6724 /* We've verified that all the edges are in the list, no lets make sure
6725 there are no spurious edges in the list. */
6727 for (pred = 0 ; pred < n_basic_blocks; pred++)
6728 for (succ = 0 ; succ < n_basic_blocks; succ++)
6730 basic_block p = BASIC_BLOCK (pred);
6731 basic_block s = BASIC_BLOCK (succ);
6733 int found_edge = 0;
6735 for (e = p->succ; e; e = e->succ_next)
6736 if (e->dest == s)
6738 found_edge = 1;
6739 break;
6741 for (e = s->pred; e; e = e->pred_next)
6742 if (e->src == p)
6744 found_edge = 1;
6745 break;
6747 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6748 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6749 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6750 pred, succ);
6751 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6752 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6753 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6754 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6755 BASIC_BLOCK (succ)));
6757 for (succ = 0 ; succ < n_basic_blocks; succ++)
6759 basic_block p = ENTRY_BLOCK_PTR;
6760 basic_block s = BASIC_BLOCK (succ);
6762 int found_edge = 0;
6764 for (e = p->succ; e; e = e->succ_next)
6765 if (e->dest == s)
6767 found_edge = 1;
6768 break;
6770 for (e = s->pred; e; e = e->pred_next)
6771 if (e->src == p)
6773 found_edge = 1;
6774 break;
6776 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6777 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6778 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6779 succ);
6780 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6781 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6782 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6783 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6784 BASIC_BLOCK (succ)));
6786 for (pred = 0 ; pred < n_basic_blocks; pred++)
6788 basic_block p = BASIC_BLOCK (pred);
6789 basic_block s = EXIT_BLOCK_PTR;
6791 int found_edge = 0;
6793 for (e = p->succ; e; e = e->succ_next)
6794 if (e->dest == s)
6796 found_edge = 1;
6797 break;
6799 for (e = s->pred; e; e = e->pred_next)
6800 if (e->src == p)
6802 found_edge = 1;
6803 break;
6805 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6806 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6807 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6808 pred);
6809 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6810 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6811 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6812 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6813 EXIT_BLOCK_PTR));
6817 /* This routine will determine what, if any, edge there is between
6818 a specified predecessor and successor. */
6821 find_edge_index (edge_list, pred, succ)
6822 struct edge_list *edge_list;
6823 basic_block pred, succ;
6825 int x;
6826 for (x = 0; x < NUM_EDGES (edge_list); x++)
6828 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6829 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6830 return x;
6832 return (EDGE_INDEX_NO_EDGE);
6835 /* This function will remove an edge from the flow graph. */
6836 void
6837 remove_edge (e)
6838 edge e;
6840 edge last_pred = NULL;
6841 edge last_succ = NULL;
6842 edge tmp;
6843 basic_block src, dest;
6844 src = e->src;
6845 dest = e->dest;
6846 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6847 last_succ = tmp;
6849 if (!tmp)
6850 abort ();
6851 if (last_succ)
6852 last_succ->succ_next = e->succ_next;
6853 else
6854 src->succ = e->succ_next;
6856 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6857 last_pred = tmp;
6859 if (!tmp)
6860 abort ();
6861 if (last_pred)
6862 last_pred->pred_next = e->pred_next;
6863 else
6864 dest->pred = e->pred_next;
6866 n_edges--;
6867 free (e);
6870 /* This routine will remove any fake successor edges for a basic block.
6871 When the edge is removed, it is also removed from whatever predecessor
6872 list it is in. */
6873 static void
6874 remove_fake_successors (bb)
6875 basic_block bb;
6877 edge e;
6878 for (e = bb->succ; e ; )
6880 edge tmp = e;
6881 e = e->succ_next;
6882 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6883 remove_edge (tmp);
6887 /* This routine will remove all fake edges from the flow graph. If
6888 we remove all fake successors, it will automatically remove all
6889 fake predecessors. */
6890 void
6891 remove_fake_edges ()
6893 int x;
6895 for (x = 0; x < n_basic_blocks; x++)
6896 remove_fake_successors (BASIC_BLOCK (x));
6898 /* We've handled all successors except the entry block's. */
6899 remove_fake_successors (ENTRY_BLOCK_PTR);
6902 /* This functions will add a fake edge between any block which has no
6903 successors, and the exit block. Some data flow equations require these
6904 edges to exist. */
6905 void
6906 add_noreturn_fake_exit_edges ()
6908 int x;
6910 for (x = 0; x < n_basic_blocks; x++)
6911 if (BASIC_BLOCK (x)->succ == NULL)
6912 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6915 /* Redirect an edge's successor from one block to another. */
6917 void
6918 redirect_edge_succ (e, new_succ)
6919 edge e;
6920 basic_block new_succ;
6922 edge *pe;
6924 /* Disconnect the edge from the old successor block. */
6925 for (pe = &e->dest->pred; *pe != e ; pe = &(*pe)->pred_next)
6926 continue;
6927 *pe = (*pe)->pred_next;
6929 /* Reconnect the edge to the new successor block. */
6930 e->pred_next = new_succ->pred;
6931 new_succ->pred = e;
6932 e->dest = new_succ;
6935 /* Redirect an edge's predecessor from one block to another. */
6937 void
6938 redirect_edge_pred (e, new_pred)
6939 edge e;
6940 basic_block new_pred;
6942 edge *pe;
6944 /* Disconnect the edge from the old predecessor block. */
6945 for (pe = &e->src->succ; *pe != e ; pe = &(*pe)->succ_next)
6946 continue;
6947 *pe = (*pe)->succ_next;
6949 /* Reconnect the edge to the new predecessor block. */
6950 e->succ_next = new_pred->succ;
6951 new_pred->succ = e;
6952 e->src = new_pred;
6955 /* Dump the list of basic blocks in the bitmap NODES. */
6956 static void
6957 flow_nodes_print (str, nodes, file)
6958 const char *str;
6959 const sbitmap nodes;
6960 FILE *file;
6962 int node;
6964 fprintf (file, "%s { ", str);
6965 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6966 fputs ("}\n", file);
6970 /* Dump the list of exiting edges in the array EDGES. */
6971 static void
6972 flow_exits_print (str, edges, num_edges, file)
6973 const char *str;
6974 const edge *edges;
6975 int num_edges;
6976 FILE *file;
6978 int i;
6980 fprintf (file, "%s { ", str);
6981 for (i = 0; i < num_edges; i++)
6982 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6983 fputs ("}\n", file);
6987 /* Dump loop related CFG information. */
6988 static void
6989 flow_loops_cfg_dump (loops, file)
6990 const struct loops *loops;
6991 FILE *file;
6993 int i;
6995 if (! loops->num || ! file || ! loops->cfg.dom)
6996 return;
6998 for (i = 0; i < n_basic_blocks; i++)
7000 edge succ;
7002 fprintf (file, ";; %d succs { ", i);
7003 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7004 fprintf (file, "%d ", succ->dest->index);
7005 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7009 /* Dump the DFS node order. */
7010 if (loops->cfg.dfs_order)
7012 fputs (";; DFS order: ", file);
7013 for (i = 0; i < n_basic_blocks; i++)
7014 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7015 fputs ("\n", file);
7020 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7021 static int
7022 flow_loop_nested_p (outer, loop)
7023 struct loop *outer;
7024 struct loop *loop;
7026 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7030 /* Dump the loop information specified by LOOPS to the stream FILE. */
7031 void
7032 flow_loops_dump (loops, file, verbose)
7033 const struct loops *loops;
7034 FILE *file;
7035 int verbose;
7037 int i;
7038 int num_loops;
7040 num_loops = loops->num;
7041 if (! num_loops || ! file)
7042 return;
7044 fprintf (file, ";; %d loops found, %d levels\n",
7045 num_loops, loops->levels);
7047 for (i = 0; i < num_loops; i++)
7049 struct loop *loop = &loops->array[i];
7051 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
7052 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
7053 loop->header->index, loop->latch->index,
7054 loop->pre_header ? loop->pre_header->index : -1,
7055 loop->depth, loop->level,
7056 (long) (loop->outer ? (loop->outer - loops->array) : -1));
7057 fprintf (file, ";; %d", loop->num_nodes);
7058 flow_nodes_print (" nodes", loop->nodes, file);
7059 fprintf (file, ";; %d", loop->num_exits);
7060 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
7062 if (loop->shared)
7064 int j;
7066 for (j = 0; j < i; j++)
7068 struct loop *oloop = &loops->array[j];
7070 if (loop->header == oloop->header)
7072 int disjoint;
7073 int smaller;
7075 smaller = loop->num_nodes < oloop->num_nodes;
7077 /* If the union of LOOP and OLOOP is different than
7078 the larger of LOOP and OLOOP then LOOP and OLOOP
7079 must be disjoint. */
7080 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7081 smaller ? oloop : loop);
7082 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
7083 loop->header->index, i, j,
7084 disjoint ? "disjoint" : "nested");
7089 if (verbose)
7091 /* Print diagnostics to compare our concept of a loop with
7092 what the loop notes say. */
7093 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
7094 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
7095 != NOTE_INSN_LOOP_BEG)
7096 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
7097 INSN_UID (PREV_INSN (loop->first->head)));
7098 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
7099 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
7100 != NOTE_INSN_LOOP_END)
7101 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
7102 INSN_UID (NEXT_INSN (loop->last->end)));
7106 if (verbose)
7107 flow_loops_cfg_dump (loops, file);
7111 /* Free all the memory allocated for LOOPS. */
7112 void
7113 flow_loops_free (loops)
7114 struct loops *loops;
7116 if (loops->array)
7118 int i;
7120 if (! loops->num)
7121 abort ();
7123 /* Free the loop descriptors. */
7124 for (i = 0; i < loops->num; i++)
7126 struct loop *loop = &loops->array[i];
7128 if (loop->nodes)
7129 sbitmap_free (loop->nodes);
7130 if (loop->exits)
7131 free (loop->exits);
7133 free (loops->array);
7134 loops->array = NULL;
7136 if (loops->cfg.dom)
7137 sbitmap_vector_free (loops->cfg.dom);
7138 if (loops->cfg.dfs_order)
7139 free (loops->cfg.dfs_order);
7141 sbitmap_free (loops->shared_headers);
7146 /* Find the exits from the loop using the bitmap of loop nodes NODES
7147 and store in EXITS array. Return the number of exits from the
7148 loop. */
7149 static int
7150 flow_loop_exits_find (nodes, exits)
7151 const sbitmap nodes;
7152 edge **exits;
7154 edge e;
7155 int node;
7156 int num_exits;
7158 *exits = NULL;
7160 /* Check all nodes within the loop to see if there are any
7161 successors not in the loop. Note that a node may have multiple
7162 exiting edges. */
7163 num_exits = 0;
7164 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7165 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7167 basic_block dest = e->dest;
7169 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7170 num_exits++;
7174 if (! num_exits)
7175 return 0;
7177 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
7179 /* Store all exiting edges into an array. */
7180 num_exits = 0;
7181 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7182 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7184 basic_block dest = e->dest;
7186 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7187 (*exits)[num_exits++] = e;
7191 return num_exits;
7195 /* Find the nodes contained within the loop with header HEADER and
7196 latch LATCH and store in NODES. Return the number of nodes within
7197 the loop. */
7198 static int
7199 flow_loop_nodes_find (header, latch, nodes)
7200 basic_block header;
7201 basic_block latch;
7202 sbitmap nodes;
7204 basic_block *stack;
7205 int sp;
7206 int num_nodes = 0;
7208 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7209 sp = 0;
7211 /* Start with only the loop header in the set of loop nodes. */
7212 sbitmap_zero (nodes);
7213 SET_BIT (nodes, header->index);
7214 num_nodes++;
7215 header->loop_depth++;
7217 /* Push the loop latch on to the stack. */
7218 if (! TEST_BIT (nodes, latch->index))
7220 SET_BIT (nodes, latch->index);
7221 latch->loop_depth++;
7222 num_nodes++;
7223 stack[sp++] = latch;
7226 while (sp)
7228 basic_block node;
7229 edge e;
7231 node = stack[--sp];
7232 for (e = node->pred; e; e = e->pred_next)
7234 basic_block ancestor = e->src;
7236 /* If each ancestor not marked as part of loop, add to set of
7237 loop nodes and push on to stack. */
7238 if (ancestor != ENTRY_BLOCK_PTR
7239 && ! TEST_BIT (nodes, ancestor->index))
7241 SET_BIT (nodes, ancestor->index);
7242 ancestor->loop_depth++;
7243 num_nodes++;
7244 stack[sp++] = ancestor;
7248 free (stack);
7249 return num_nodes;
7253 /* Compute the depth first search order and store in the array
7254 DFS_ORDER, marking the nodes visited in VISITED. Returns the
7255 number of nodes visited. A depth first search tries to get as far
7256 away from the starting point as quickly as possible. */
7257 static int
7258 flow_depth_first_order_compute (dfs_order)
7259 int *dfs_order;
7261 edge *stack;
7262 int sp;
7263 int dfsnum = 0;
7264 sbitmap visited;
7266 /* Allocate stack for back-tracking up CFG. */
7267 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7268 sp = 0;
7270 /* Allocate bitmap to track nodes that have been visited. */
7271 visited = sbitmap_alloc (n_basic_blocks);
7273 /* None of the nodes in the CFG have been visited yet. */
7274 sbitmap_zero (visited);
7276 /* Push the first edge on to the stack. */
7277 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7279 while (sp)
7281 edge e;
7282 basic_block src;
7283 basic_block dest;
7285 /* Look at the edge on the top of the stack. */
7286 e = stack[sp - 1];
7287 src = e->src;
7288 dest = e->dest;
7290 /* Check if the edge destination has been visited yet. */
7291 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7293 /* Mark that we have visited the destination. */
7294 SET_BIT (visited, dest->index);
7296 if (dest->succ)
7298 /* Since the DEST node has been visited for the first
7299 time, check its successors. */
7300 stack[sp++] = dest->succ;
7302 else
7304 /* There are no successors for the DEST node so assign
7305 its DFS number. */
7306 dfs_order[n_basic_blocks - ++dfsnum] = dest->index;
7309 else
7311 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7313 /* There are no more successors for the SRC node
7314 so assign its DFS number. */
7315 dfs_order[n_basic_blocks - ++dfsnum] = src->index;
7318 if (e->succ_next)
7319 stack[sp - 1] = e->succ_next;
7320 else
7321 sp--;
7325 free (stack);
7326 sbitmap_free (visited);
7328 /* The number of nodes visited should not be greater than
7329 n_basic_blocks. */
7330 if (dfsnum > n_basic_blocks)
7331 abort ();
7333 /* There are some nodes left in the CFG that are unreachable. */
7334 if (dfsnum < n_basic_blocks)
7335 abort ();
7336 return dfsnum;
7340 /* Return the block for the pre-header of the loop with header
7341 HEADER where DOM specifies the dominator information. Return NULL if
7342 there is no pre-header. */
7343 static basic_block
7344 flow_loop_pre_header_find (header, dom)
7345 basic_block header;
7346 const sbitmap *dom;
7348 basic_block pre_header;
7349 edge e;
7351 /* If block p is a predecessor of the header and is the only block
7352 that the header does not dominate, then it is the pre-header. */
7353 pre_header = NULL;
7354 for (e = header->pred; e; e = e->pred_next)
7356 basic_block node = e->src;
7358 if (node != ENTRY_BLOCK_PTR
7359 && ! TEST_BIT (dom[node->index], header->index))
7361 if (pre_header == NULL)
7362 pre_header = node;
7363 else
7365 /* There are multiple edges into the header from outside
7366 the loop so there is no pre-header block. */
7367 pre_header = NULL;
7368 break;
7372 return pre_header;
7376 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7377 previously added. The insertion algorithm assumes that the loops
7378 are added in the order found by a depth first search of the CFG. */
7379 static void
7380 flow_loop_tree_node_add (prevloop, loop)
7381 struct loop *prevloop;
7382 struct loop *loop;
7385 if (flow_loop_nested_p (prevloop, loop))
7387 prevloop->inner = loop;
7388 loop->outer = prevloop;
7389 return;
7392 while (prevloop->outer)
7394 if (flow_loop_nested_p (prevloop->outer, loop))
7396 prevloop->next = loop;
7397 loop->outer = prevloop->outer;
7398 return;
7400 prevloop = prevloop->outer;
7403 prevloop->next = loop;
7404 loop->outer = NULL;
7408 /* Build the loop hierarchy tree for LOOPS. */
7409 static void
7410 flow_loops_tree_build (loops)
7411 struct loops *loops;
7413 int i;
7414 int num_loops;
7416 num_loops = loops->num;
7417 if (! num_loops)
7418 return;
7420 /* Root the loop hierarchy tree with the first loop found.
7421 Since we used a depth first search this should be the
7422 outermost loop. */
7423 loops->tree = &loops->array[0];
7424 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7426 /* Add the remaining loops to the tree. */
7427 for (i = 1; i < num_loops; i++)
7428 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7432 /* Helper function to compute loop nesting depth and enclosed loop level
7433 for the natural loop specified by LOOP at the loop depth DEPTH.
7434 Returns the loop level. */
7435 static int
7436 flow_loop_level_compute (loop, depth)
7437 struct loop *loop;
7438 int depth;
7440 struct loop *inner;
7441 int level = 1;
7443 if (! loop)
7444 return 0;
7446 /* Traverse loop tree assigning depth and computing level as the
7447 maximum level of all the inner loops of this loop. The loop
7448 level is equivalent to the height of the loop in the loop tree
7449 and corresponds to the number of enclosed loop levels (including
7450 itself). */
7451 for (inner = loop->inner; inner; inner = inner->next)
7453 int ilevel;
7455 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7457 if (ilevel > level)
7458 level = ilevel;
7460 loop->level = level;
7461 loop->depth = depth;
7462 return level;
7466 /* Compute the loop nesting depth and enclosed loop level for the loop
7467 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7468 level. */
7470 static int
7471 flow_loops_level_compute (loops)
7472 struct loops *loops;
7474 struct loop *loop;
7475 int level;
7476 int levels = 0;
7478 /* Traverse all the outer level loops. */
7479 for (loop = loops->tree; loop; loop = loop->next)
7481 level = flow_loop_level_compute (loop, 1);
7482 if (level > levels)
7483 levels = level;
7485 return levels;
7489 /* Find all the natural loops in the function and save in LOOPS structure
7490 and recalculate loop_depth information in basic block structures.
7491 Return the number of natural loops found. */
7493 int
7494 flow_loops_find (loops)
7495 struct loops *loops;
7497 int i;
7498 int b;
7499 int num_loops;
7500 edge e;
7501 sbitmap headers;
7502 sbitmap *dom;
7503 int *dfs_order;
7505 loops->num = 0;
7506 loops->array = NULL;
7507 loops->tree = NULL;
7508 dfs_order = NULL;
7510 /* Taking care of this degenerate case makes the rest of
7511 this code simpler. */
7512 if (n_basic_blocks == 0)
7513 return 0;
7515 /* Compute the dominators. */
7516 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7517 compute_flow_dominators (dom, NULL);
7519 /* Count the number of loop edges (back edges). This should be the
7520 same as the number of natural loops. Also clear the loop_depth
7521 and as we work from inner->outer in a loop nest we call
7522 find_loop_nodes_find which will increment loop_depth for nodes
7523 within the current loop, which happens to enclose inner loops. */
7525 num_loops = 0;
7526 for (b = 0; b < n_basic_blocks; b++)
7528 BASIC_BLOCK (b)->loop_depth = 0;
7529 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7531 basic_block latch = e->src;
7533 /* Look for back edges where a predecessor is dominated
7534 by this block. A natural loop has a single entry
7535 node (header) that dominates all the nodes in the
7536 loop. It also has single back edge to the header
7537 from a latch node. Note that multiple natural loops
7538 may share the same header. */
7539 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7540 num_loops++;
7544 if (num_loops)
7546 /* Compute depth first search order of the CFG so that outer
7547 natural loops will be found before inner natural loops. */
7548 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7549 flow_depth_first_order_compute (dfs_order);
7551 /* Allocate loop structures. */
7552 loops->array
7553 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7555 headers = sbitmap_alloc (n_basic_blocks);
7556 sbitmap_zero (headers);
7558 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7559 sbitmap_zero (loops->shared_headers);
7561 /* Find and record information about all the natural loops
7562 in the CFG. */
7563 num_loops = 0;
7564 for (b = 0; b < n_basic_blocks; b++)
7566 basic_block header;
7568 /* Search the nodes of the CFG in DFS order that we can find
7569 outer loops first. */
7570 header = BASIC_BLOCK (dfs_order[b]);
7572 /* Look for all the possible latch blocks for this header. */
7573 for (e = header->pred; e; e = e->pred_next)
7575 basic_block latch = e->src;
7577 /* Look for back edges where a predecessor is dominated
7578 by this block. A natural loop has a single entry
7579 node (header) that dominates all the nodes in the
7580 loop. It also has single back edge to the header
7581 from a latch node. Note that multiple natural loops
7582 may share the same header. */
7583 if (latch != ENTRY_BLOCK_PTR
7584 && TEST_BIT (dom[latch->index], header->index))
7586 struct loop *loop;
7588 loop = loops->array + num_loops;
7590 loop->header = header;
7591 loop->latch = latch;
7593 /* Keep track of blocks that are loop headers so
7594 that we can tell which loops should be merged. */
7595 if (TEST_BIT (headers, header->index))
7596 SET_BIT (loops->shared_headers, header->index);
7597 SET_BIT (headers, header->index);
7599 /* Find nodes contained within the loop. */
7600 loop->nodes = sbitmap_alloc (n_basic_blocks);
7601 loop->num_nodes
7602 = flow_loop_nodes_find (header, latch, loop->nodes);
7604 /* Compute first and last blocks within the loop.
7605 These are often the same as the loop header and
7606 loop latch respectively, but this is not always
7607 the case. */
7608 loop->first
7609 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7610 loop->last
7611 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7613 /* Find edges which exit the loop. Note that a node
7614 may have several exit edges. */
7615 loop->num_exits
7616 = flow_loop_exits_find (loop->nodes, &loop->exits);
7618 /* Look to see if the loop has a pre-header node. */
7619 loop->pre_header
7620 = flow_loop_pre_header_find (header, dom);
7622 num_loops++;
7627 /* Natural loops with shared headers may either be disjoint or
7628 nested. Disjoint loops with shared headers cannot be inner
7629 loops and should be merged. For now just mark loops that share
7630 headers. */
7631 for (i = 0; i < num_loops; i++)
7632 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7633 loops->array[i].shared = 1;
7635 sbitmap_free (headers);
7638 loops->num = num_loops;
7640 /* Save CFG derived information to avoid recomputing it. */
7641 loops->cfg.dom = dom;
7642 loops->cfg.dfs_order = dfs_order;
7644 /* Build the loop hierarchy tree. */
7645 flow_loops_tree_build (loops);
7647 /* Assign the loop nesting depth and enclosed loop level for each
7648 loop. */
7649 loops->levels = flow_loops_level_compute (loops);
7651 return num_loops;
7655 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7658 flow_loop_outside_edge_p (loop, e)
7659 const struct loop *loop;
7660 edge e;
7662 if (e->dest != loop->header)
7663 abort ();
7664 return (e->src == ENTRY_BLOCK_PTR)
7665 || ! TEST_BIT (loop->nodes, e->src->index);
7669 /* Clear LOG_LINKS fields of insns in a chain. */
7671 void
7672 clear_log_links (insns)
7673 rtx insns;
7675 rtx i;
7676 for (i = insns; i; i = NEXT_INSN (i))
7677 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')
7678 LOG_LINKS (i) = 0;
7681 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
7682 correspond to the hard registers, if any, set in that map. This
7683 could be done far more efficiently by having all sorts of special-cases
7684 with moving single words, but probably isn't worth the trouble. */
7686 void
7687 reg_set_to_hard_reg_set (to, from)
7688 HARD_REG_SET *to;
7689 bitmap from;
7691 int i;
7693 EXECUTE_IF_SET_IN_BITMAP
7694 (from, 0, i,
7696 if (i >= FIRST_PSEUDO_REGISTER)
7697 return;
7698 SET_HARD_REG_BIT (*to, i);