fix typo
[official-gcc.git] / gcc / flow.c
blobc8762c8ffc4ce950f9151b0a9fecb6850807117d
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
44 ** life_analysis **
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
76 REG_DEAD notes.
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
95 that is never used.
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
113 /* TODO:
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
118 - log links creation
119 - pre/post modify transformation
122 #include "config.h"
123 #include "system.h"
124 #include "tree.h"
125 #include "rtl.h"
126 #include "tm_p.h"
127 #include "hard-reg-set.h"
128 #include "basic-block.h"
129 #include "insn-config.h"
130 #include "regs.h"
131 #include "flags.h"
132 #include "output.h"
133 #include "function.h"
134 #include "except.h"
135 #include "toplev.h"
136 #include "recog.h"
137 #include "insn-flags.h"
138 #include "expr.h"
140 #include "obstack.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
147 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
148 the stack pointer does not matter. The value is tested only in
149 functions that have frame pointers.
150 No definition is equivalent to always zero. */
151 #ifndef EXIT_IGNORE_STACK
152 #define EXIT_IGNORE_STACK 0
153 #endif
155 #ifndef HAVE_epilogue
156 #define HAVE_epilogue 0
157 #endif
158 #ifndef HAVE_prologue
159 #define HAVE_prologue 0
160 #endif
161 #ifndef HAVE_sibcall_epilogue
162 #define HAVE_sibcall_epilogue 0
163 #endif
165 /* The contents of the current function definition are allocated
166 in this obstack, and all are freed at the end of the function.
167 For top-level functions, this is temporary_obstack.
168 Separate obstacks are made for nested functions. */
170 extern struct obstack *function_obstack;
172 /* Number of basic blocks in the current function. */
174 int n_basic_blocks;
176 /* Number of edges in the current function. */
178 int n_edges;
180 /* The basic block array. */
182 varray_type basic_block_info;
184 /* The special entry and exit blocks. */
186 struct basic_block_def entry_exit_blocks[2]
187 = {{NULL, /* head */
188 NULL, /* end */
189 NULL, /* pred */
190 NULL, /* succ */
191 NULL, /* local_set */
192 NULL, /* global_live_at_start */
193 NULL, /* global_live_at_end */
194 NULL, /* aux */
195 ENTRY_BLOCK, /* index */
196 0, /* loop_depth */
197 -1, -1, /* eh_beg, eh_end */
198 0 /* count */
201 NULL, /* head */
202 NULL, /* end */
203 NULL, /* pred */
204 NULL, /* succ */
205 NULL, /* local_set */
206 NULL, /* global_live_at_start */
207 NULL, /* global_live_at_end */
208 NULL, /* aux */
209 EXIT_BLOCK, /* index */
210 0, /* loop_depth */
211 -1, -1, /* eh_beg, eh_end */
212 0 /* count */
216 /* Nonzero if the second flow pass has completed. */
217 int flow2_completed;
219 /* Maximum register number used in this function, plus one. */
221 int max_regno;
223 /* Indexed by n, giving various register information */
225 varray_type reg_n_info;
227 /* Size of a regset for the current function,
228 in (1) bytes and (2) elements. */
230 int regset_bytes;
231 int regset_size;
233 /* Regset of regs live when calls to `setjmp'-like functions happen. */
234 /* ??? Does this exist only for the setjmp-clobbered warning message? */
236 regset regs_live_at_setjmp;
238 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
239 that have to go in the same hard reg.
240 The first two regs in the list are a pair, and the next two
241 are another pair, etc. */
242 rtx regs_may_share;
244 /* Set of registers that may be eliminable. These are handled specially
245 in updating regs_ever_live. */
247 static HARD_REG_SET elim_reg_set;
249 /* The basic block structure for every insn, indexed by uid. */
251 varray_type basic_block_for_insn;
253 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
254 /* ??? Should probably be using LABEL_NUSES instead. It would take a
255 bit of surgery to be able to use or co-opt the routines in jump. */
257 static rtx label_value_list;
258 static rtx tail_recursion_label_list;
260 /* Holds information for tracking conditional register life information. */
261 struct reg_cond_life_info
263 /* An EXPR_LIST of conditions under which a register is dead. */
264 rtx condition;
266 /* ??? Could store mask of bytes that are dead, so that we could finally
267 track lifetimes of multi-word registers accessed via subregs. */
270 /* For use in communicating between propagate_block and its subroutines.
271 Holds all information needed to compute life and def-use information. */
273 struct propagate_block_info
275 /* The basic block we're considering. */
276 basic_block bb;
278 /* Bit N is set if register N is conditionally or unconditionally live. */
279 regset reg_live;
281 /* Bit N is set if register N is set this insn. */
282 regset new_set;
284 /* Element N is the next insn that uses (hard or pseudo) register N
285 within the current basic block; or zero, if there is no such insn. */
286 rtx *reg_next_use;
288 /* Contains a list of all the MEMs we are tracking for dead store
289 elimination. */
290 rtx mem_set_list;
292 /* If non-null, record the set of registers set in the basic block. */
293 regset local_set;
295 #ifdef HAVE_conditional_execution
296 /* Indexed by register number, holds a reg_cond_life_info for each
297 register that is not unconditionally live or dead. */
298 splay_tree reg_cond_dead;
300 /* Bit N is set if register N is in an expression in reg_cond_dead. */
301 regset reg_cond_reg;
302 #endif
304 /* Non-zero if the value of CC0 is live. */
305 int cc0_live;
307 /* Flags controling the set of information propagate_block collects. */
308 int flags;
311 /* Forward declarations */
312 static int count_basic_blocks PARAMS ((rtx));
313 static void find_basic_blocks_1 PARAMS ((rtx));
314 static rtx find_label_refs PARAMS ((rtx, rtx));
315 static void clear_edges PARAMS ((void));
316 static void make_edges PARAMS ((rtx));
317 static void make_label_edge PARAMS ((sbitmap *, basic_block,
318 rtx, int));
319 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
320 basic_block, rtx, int));
321 static void mark_critical_edges PARAMS ((void));
322 static void move_stray_eh_region_notes PARAMS ((void));
323 static void record_active_eh_regions PARAMS ((rtx));
325 static void commit_one_edge_insertion PARAMS ((edge));
327 static void delete_unreachable_blocks PARAMS ((void));
328 static void delete_eh_regions PARAMS ((void));
329 static int can_delete_note_p PARAMS ((rtx));
330 static void expunge_block PARAMS ((basic_block));
331 static int can_delete_label_p PARAMS ((rtx));
332 static int tail_recursion_label_p PARAMS ((rtx));
333 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
334 basic_block));
335 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
336 basic_block));
337 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
338 static void try_merge_blocks PARAMS ((void));
339 static void tidy_fallthru_edges PARAMS ((void));
340 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
341 static void verify_wide_reg PARAMS ((int, rtx, rtx));
342 static void verify_local_live_at_start PARAMS ((regset, basic_block));
343 static int set_noop_p PARAMS ((rtx));
344 static int noop_move_p PARAMS ((rtx));
345 static void delete_noop_moves PARAMS ((rtx));
346 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
347 static void notice_stack_pointer_modification PARAMS ((rtx));
348 static void mark_reg PARAMS ((rtx, void *));
349 static void mark_regs_live_at_end PARAMS ((regset));
350 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
351 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
352 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
353 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
354 static int insn_dead_p PARAMS ((struct propagate_block_info *,
355 rtx, int, rtx));
356 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
357 rtx, rtx, rtx));
358 static void mark_set_regs PARAMS ((struct propagate_block_info *,
359 rtx, rtx));
360 static void mark_set_1 PARAMS ((struct propagate_block_info *,
361 enum rtx_code, rtx, rtx,
362 rtx, int));
363 #ifdef HAVE_conditional_execution
364 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
365 int, rtx));
366 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
367 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
368 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
369 int));
370 static rtx ior_reg_cond PARAMS ((rtx, rtx));
371 static rtx not_reg_cond PARAMS ((rtx));
372 static rtx nand_reg_cond PARAMS ((rtx, rtx));
373 #endif
374 #ifdef AUTO_INC_DEC
375 static void find_auto_inc PARAMS ((struct propagate_block_info *,
376 rtx, rtx));
377 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
378 rtx));
379 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
380 #endif
381 static void mark_used_reg PARAMS ((struct propagate_block_info *,
382 rtx, rtx, rtx));
383 static void mark_used_regs PARAMS ((struct propagate_block_info *,
384 rtx, rtx, rtx));
385 void dump_flow_info PARAMS ((FILE *));
386 void debug_flow_info PARAMS ((void));
387 static void dump_edge_info PARAMS ((FILE *, edge, int));
389 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
390 rtx));
391 static void remove_fake_successors PARAMS ((basic_block));
392 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
393 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
394 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
395 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
396 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
397 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
398 static int flow_depth_first_order_compute PARAMS ((int *));
399 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
400 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
401 static void flow_loops_tree_build PARAMS ((struct loops *));
402 static int flow_loop_level_compute PARAMS ((struct loop *, int));
403 static int flow_loops_level_compute PARAMS ((struct loops *));
405 /* Find basic blocks of the current function.
406 F is the first insn of the function and NREGS the number of register
407 numbers in use. */
409 void
410 find_basic_blocks (f, nregs, file)
411 rtx f;
412 int nregs ATTRIBUTE_UNUSED;
413 FILE *file ATTRIBUTE_UNUSED;
415 int max_uid;
417 /* Flush out existing data. */
418 if (basic_block_info != NULL)
420 int i;
422 clear_edges ();
424 /* Clear bb->aux on all extant basic blocks. We'll use this as a
425 tag for reuse during create_basic_block, just in case some pass
426 copies around basic block notes improperly. */
427 for (i = 0; i < n_basic_blocks; ++i)
428 BASIC_BLOCK (i)->aux = NULL;
430 VARRAY_FREE (basic_block_info);
433 n_basic_blocks = count_basic_blocks (f);
435 /* Size the basic block table. The actual structures will be allocated
436 by find_basic_blocks_1, since we want to keep the structure pointers
437 stable across calls to find_basic_blocks. */
438 /* ??? This whole issue would be much simpler if we called find_basic_blocks
439 exactly once, and thereafter we don't have a single long chain of
440 instructions at all until close to the end of compilation when we
441 actually lay them out. */
443 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
445 find_basic_blocks_1 (f);
447 /* Record the block to which an insn belongs. */
448 /* ??? This should be done another way, by which (perhaps) a label is
449 tagged directly with the basic block that it starts. It is used for
450 more than that currently, but IMO that is the only valid use. */
452 max_uid = get_max_uid ();
453 #ifdef AUTO_INC_DEC
454 /* Leave space for insns life_analysis makes in some cases for auto-inc.
455 These cases are rare, so we don't need too much space. */
456 max_uid += max_uid / 10;
457 #endif
459 compute_bb_for_insn (max_uid);
461 /* Discover the edges of our cfg. */
462 record_active_eh_regions (f);
463 make_edges (label_value_list);
465 /* Do very simple cleanup now, for the benefit of code that runs between
466 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
467 tidy_fallthru_edges ();
469 mark_critical_edges ();
471 #ifdef ENABLE_CHECKING
472 verify_flow_info ();
473 #endif
476 /* Count the basic blocks of the function. */
478 static int
479 count_basic_blocks (f)
480 rtx f;
482 register rtx insn;
483 register RTX_CODE prev_code;
484 register int count = 0;
485 int eh_region = 0;
486 int call_had_abnormal_edge = 0;
488 prev_code = JUMP_INSN;
489 for (insn = f; insn; insn = NEXT_INSN (insn))
491 register RTX_CODE code = GET_CODE (insn);
493 if (code == CODE_LABEL
494 || (GET_RTX_CLASS (code) == 'i'
495 && (prev_code == JUMP_INSN
496 || prev_code == BARRIER
497 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
498 count++;
500 /* Record whether this call created an edge. */
501 if (code == CALL_INSN)
503 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
504 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
506 call_had_abnormal_edge = 0;
508 /* If there is an EH region or rethrow, we have an edge. */
509 if ((eh_region && region > 0)
510 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
511 call_had_abnormal_edge = 1;
512 else if (nonlocal_goto_handler_labels && region >= 0)
513 /* If there is a nonlocal goto label and the specified
514 region number isn't -1, we have an edge. (0 means
515 no throw, but might have a nonlocal goto). */
516 call_had_abnormal_edge = 1;
519 if (code != NOTE)
520 prev_code = code;
521 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
522 ++eh_region;
523 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
524 --eh_region;
527 /* The rest of the compiler works a bit smoother when we don't have to
528 check for the edge case of do-nothing functions with no basic blocks. */
529 if (count == 0)
531 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
532 count = 1;
535 return count;
538 /* Scan a list of insns for labels referrred to other than by jumps.
539 This is used to scan the alternatives of a call placeholder. */
540 static rtx find_label_refs (f, lvl)
541 rtx f;
542 rtx lvl;
544 rtx insn;
546 for (insn = f; insn; insn = NEXT_INSN (insn))
547 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
549 rtx note;
551 /* Make a list of all labels referred to other than by jumps
552 (which just don't have the REG_LABEL notes).
554 Make a special exception for labels followed by an ADDR*VEC,
555 as this would be a part of the tablejump setup code.
557 Make a special exception for the eh_return_stub_label, which
558 we know isn't part of any otherwise visible control flow. */
560 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
561 if (REG_NOTE_KIND (note) == REG_LABEL)
563 rtx lab = XEXP (note, 0), next;
565 if (lab == eh_return_stub_label)
567 else if ((next = next_nonnote_insn (lab)) != NULL
568 && GET_CODE (next) == JUMP_INSN
569 && (GET_CODE (PATTERN (next)) == ADDR_VEC
570 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
572 else if (GET_CODE (lab) == NOTE)
574 else
575 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
579 return lvl;
582 /* Find all basic blocks of the function whose first insn is F.
584 Collect and return a list of labels whose addresses are taken. This
585 will be used in make_edges for use with computed gotos. */
587 static void
588 find_basic_blocks_1 (f)
589 rtx f;
591 register rtx insn, next;
592 int i = 0;
593 rtx bb_note = NULL_RTX;
594 rtx eh_list = NULL_RTX;
595 rtx lvl = NULL_RTX;
596 rtx trll = NULL_RTX;
597 rtx head = NULL_RTX;
598 rtx end = NULL_RTX;
600 /* We process the instructions in a slightly different way than we did
601 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
602 closed out the previous block, so that it gets attached at the proper
603 place. Since this form should be equivalent to the previous,
604 count_basic_blocks continues to use the old form as a check. */
606 for (insn = f; insn; insn = next)
608 enum rtx_code code = GET_CODE (insn);
610 next = NEXT_INSN (insn);
612 switch (code)
614 case NOTE:
616 int kind = NOTE_LINE_NUMBER (insn);
618 /* Keep a LIFO list of the currently active exception notes. */
619 if (kind == NOTE_INSN_EH_REGION_BEG)
620 eh_list = alloc_INSN_LIST (insn, eh_list);
621 else if (kind == NOTE_INSN_EH_REGION_END)
623 rtx t = eh_list;
625 eh_list = XEXP (eh_list, 1);
626 free_INSN_LIST_node (t);
629 /* Look for basic block notes with which to keep the
630 basic_block_info pointers stable. Unthread the note now;
631 we'll put it back at the right place in create_basic_block.
632 Or not at all if we've already found a note in this block. */
633 else if (kind == NOTE_INSN_BASIC_BLOCK)
635 if (bb_note == NULL_RTX)
636 bb_note = insn;
637 else
638 next = flow_delete_insn (insn);
640 break;
643 case CODE_LABEL:
644 /* A basic block starts at a label. If we've closed one off due
645 to a barrier or some such, no need to do it again. */
646 if (head != NULL_RTX)
648 /* While we now have edge lists with which other portions of
649 the compiler might determine a call ending a basic block
650 does not imply an abnormal edge, it will be a bit before
651 everything can be updated. So continue to emit a noop at
652 the end of such a block. */
653 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
655 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
656 end = emit_insn_after (nop, end);
659 create_basic_block (i++, head, end, bb_note);
660 bb_note = NULL_RTX;
663 head = end = insn;
664 break;
666 case JUMP_INSN:
667 /* A basic block ends at a jump. */
668 if (head == NULL_RTX)
669 head = insn;
670 else
672 /* ??? Make a special check for table jumps. The way this
673 happens is truly and amazingly gross. We are about to
674 create a basic block that contains just a code label and
675 an addr*vec jump insn. Worse, an addr_diff_vec creates
676 its own natural loop.
678 Prevent this bit of brain damage, pasting things together
679 correctly in make_edges.
681 The correct solution involves emitting the table directly
682 on the tablejump instruction as a note, or JUMP_LABEL. */
684 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
685 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
687 head = end = NULL;
688 n_basic_blocks--;
689 break;
692 end = insn;
693 goto new_bb_inclusive;
695 case BARRIER:
696 /* A basic block ends at a barrier. It may be that an unconditional
697 jump already closed the basic block -- no need to do it again. */
698 if (head == NULL_RTX)
699 break;
701 /* While we now have edge lists with which other portions of the
702 compiler might determine a call ending a basic block does not
703 imply an abnormal edge, it will be a bit before everything can
704 be updated. So continue to emit a noop at the end of such a
705 block. */
706 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
708 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
709 end = emit_insn_after (nop, end);
711 goto new_bb_exclusive;
713 case CALL_INSN:
715 /* Record whether this call created an edge. */
716 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
717 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
718 int call_has_abnormal_edge = 0;
720 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
722 /* Scan each of the alternatives for label refs. */
723 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
724 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
725 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
726 /* Record its tail recursion label, if any. */
727 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
728 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
731 /* If there is an EH region or rethrow, we have an edge. */
732 if ((eh_list && region > 0)
733 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
734 call_has_abnormal_edge = 1;
735 else if (nonlocal_goto_handler_labels && region >= 0)
736 /* If there is a nonlocal goto label and the specified
737 region number isn't -1, we have an edge. (0 means
738 no throw, but might have a nonlocal goto). */
739 call_has_abnormal_edge = 1;
741 /* A basic block ends at a call that can either throw or
742 do a non-local goto. */
743 if (call_has_abnormal_edge)
745 new_bb_inclusive:
746 if (head == NULL_RTX)
747 head = insn;
748 end = insn;
750 new_bb_exclusive:
751 create_basic_block (i++, head, end, bb_note);
752 head = end = NULL_RTX;
753 bb_note = NULL_RTX;
754 break;
757 /* FALLTHRU */
759 default:
760 if (GET_RTX_CLASS (code) == 'i')
762 if (head == NULL_RTX)
763 head = insn;
764 end = insn;
766 break;
769 if (GET_RTX_CLASS (code) == 'i')
771 rtx note;
773 /* Make a list of all labels referred to other than by jumps
774 (which just don't have the REG_LABEL notes).
776 Make a special exception for labels followed by an ADDR*VEC,
777 as this would be a part of the tablejump setup code.
779 Make a special exception for the eh_return_stub_label, which
780 we know isn't part of any otherwise visible control flow. */
782 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
783 if (REG_NOTE_KIND (note) == REG_LABEL)
785 rtx lab = XEXP (note, 0), next;
787 if (lab == eh_return_stub_label)
789 else if ((next = next_nonnote_insn (lab)) != NULL
790 && GET_CODE (next) == JUMP_INSN
791 && (GET_CODE (PATTERN (next)) == ADDR_VEC
792 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
794 else if (GET_CODE (lab) == NOTE)
796 else
797 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
802 if (head != NULL_RTX)
803 create_basic_block (i++, head, end, bb_note);
804 else if (bb_note)
805 flow_delete_insn (bb_note);
807 if (i != n_basic_blocks)
808 abort ();
810 label_value_list = lvl;
811 tail_recursion_label_list = trll;
814 /* Tidy the CFG by deleting unreachable code and whatnot. */
816 void
817 cleanup_cfg (f)
818 rtx f;
820 delete_unreachable_blocks ();
821 move_stray_eh_region_notes ();
822 record_active_eh_regions (f);
823 try_merge_blocks ();
824 mark_critical_edges ();
826 /* Kill the data we won't maintain. */
827 free_EXPR_LIST_list (&label_value_list);
828 free_EXPR_LIST_list (&tail_recursion_label_list);
831 /* Create a new basic block consisting of the instructions between
832 HEAD and END inclusive. Reuses the note and basic block struct
833 in BB_NOTE, if any. */
835 void
836 create_basic_block (index, head, end, bb_note)
837 int index;
838 rtx head, end, bb_note;
840 basic_block bb;
842 if (bb_note
843 && ! RTX_INTEGRATED_P (bb_note)
844 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
845 && bb->aux == NULL)
847 /* If we found an existing note, thread it back onto the chain. */
849 rtx after;
851 if (GET_CODE (head) == CODE_LABEL)
852 after = head;
853 else
855 after = PREV_INSN (head);
856 head = bb_note;
859 if (after != bb_note && NEXT_INSN (after) != bb_note)
860 reorder_insns (bb_note, bb_note, after);
862 else
864 /* Otherwise we must create a note and a basic block structure.
865 Since we allow basic block structs in rtl, give the struct
866 the same lifetime by allocating it off the function obstack
867 rather than using malloc. */
869 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
870 memset (bb, 0, sizeof (*bb));
872 if (GET_CODE (head) == CODE_LABEL)
873 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
874 else
876 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
877 head = bb_note;
879 NOTE_BASIC_BLOCK (bb_note) = bb;
882 /* Always include the bb note in the block. */
883 if (NEXT_INSN (end) == bb_note)
884 end = bb_note;
886 bb->head = head;
887 bb->end = end;
888 bb->index = index;
889 BASIC_BLOCK (index) = bb;
891 /* Tag the block so that we know it has been used when considering
892 other basic block notes. */
893 bb->aux = bb;
896 /* Records the basic block struct in BB_FOR_INSN, for every instruction
897 indexed by INSN_UID. MAX is the size of the array. */
899 void
900 compute_bb_for_insn (max)
901 int max;
903 int i;
905 if (basic_block_for_insn)
906 VARRAY_FREE (basic_block_for_insn);
907 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
909 for (i = 0; i < n_basic_blocks; ++i)
911 basic_block bb = BASIC_BLOCK (i);
912 rtx insn, end;
914 end = bb->end;
915 insn = bb->head;
916 while (1)
918 int uid = INSN_UID (insn);
919 if (uid < max)
920 VARRAY_BB (basic_block_for_insn, uid) = bb;
921 if (insn == end)
922 break;
923 insn = NEXT_INSN (insn);
928 /* Free the memory associated with the edge structures. */
930 static void
931 clear_edges ()
933 int i;
934 edge n, e;
936 for (i = 0; i < n_basic_blocks; ++i)
938 basic_block bb = BASIC_BLOCK (i);
940 for (e = bb->succ; e ; e = n)
942 n = e->succ_next;
943 free (e);
946 bb->succ = 0;
947 bb->pred = 0;
950 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
952 n = e->succ_next;
953 free (e);
956 ENTRY_BLOCK_PTR->succ = 0;
957 EXIT_BLOCK_PTR->pred = 0;
959 n_edges = 0;
962 /* Identify the edges between basic blocks.
964 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
965 that are otherwise unreachable may be reachable with a non-local goto.
967 BB_EH_END is an array indexed by basic block number in which we record
968 the list of exception regions active at the end of the basic block. */
970 static void
971 make_edges (label_value_list)
972 rtx label_value_list;
974 int i;
975 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
976 sbitmap *edge_cache = NULL;
978 /* Assume no computed jump; revise as we create edges. */
979 current_function_has_computed_jump = 0;
981 /* Heavy use of computed goto in machine-generated code can lead to
982 nearly fully-connected CFGs. In that case we spend a significant
983 amount of time searching the edge lists for duplicates. */
984 if (forced_labels || label_value_list)
986 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
987 sbitmap_vector_zero (edge_cache, n_basic_blocks);
990 /* By nature of the way these get numbered, block 0 is always the entry. */
991 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
993 for (i = 0; i < n_basic_blocks; ++i)
995 basic_block bb = BASIC_BLOCK (i);
996 rtx insn, x;
997 enum rtx_code code;
998 int force_fallthru = 0;
1000 /* Examine the last instruction of the block, and discover the
1001 ways we can leave the block. */
1003 insn = bb->end;
1004 code = GET_CODE (insn);
1006 /* A branch. */
1007 if (code == JUMP_INSN)
1009 rtx tmp;
1011 /* ??? Recognize a tablejump and do the right thing. */
1012 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1013 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1014 && GET_CODE (tmp) == JUMP_INSN
1015 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1016 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1018 rtvec vec;
1019 int j;
1021 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1022 vec = XVEC (PATTERN (tmp), 0);
1023 else
1024 vec = XVEC (PATTERN (tmp), 1);
1026 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1027 make_label_edge (edge_cache, bb,
1028 XEXP (RTVEC_ELT (vec, j), 0), 0);
1030 /* Some targets (eg, ARM) emit a conditional jump that also
1031 contains the out-of-range target. Scan for these and
1032 add an edge if necessary. */
1033 if ((tmp = single_set (insn)) != NULL
1034 && SET_DEST (tmp) == pc_rtx
1035 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1036 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1037 make_label_edge (edge_cache, bb,
1038 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1040 #ifdef CASE_DROPS_THROUGH
1041 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1042 us naturally detecting fallthru into the next block. */
1043 force_fallthru = 1;
1044 #endif
1047 /* If this is a computed jump, then mark it as reaching
1048 everything on the label_value_list and forced_labels list. */
1049 else if (computed_jump_p (insn))
1051 current_function_has_computed_jump = 1;
1053 for (x = label_value_list; x; x = XEXP (x, 1))
1054 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1056 for (x = forced_labels; x; x = XEXP (x, 1))
1057 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1060 /* Returns create an exit out. */
1061 else if (returnjump_p (insn))
1062 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1064 /* Otherwise, we have a plain conditional or unconditional jump. */
1065 else
1067 if (! JUMP_LABEL (insn))
1068 abort ();
1069 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1073 /* If this is a sibling call insn, then this is in effect a
1074 combined call and return, and so we need an edge to the
1075 exit block. No need to worry about EH edges, since we
1076 wouldn't have created the sibling call in the first place. */
1078 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1079 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1080 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1081 else
1083 /* If this is a CALL_INSN, then mark it as reaching the active EH
1084 handler for this CALL_INSN. If we're handling asynchronous
1085 exceptions then any insn can reach any of the active handlers.
1087 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1089 if (code == CALL_INSN || asynchronous_exceptions)
1091 /* Add any appropriate EH edges. We do this unconditionally
1092 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1093 on the call, and this needn't be within an EH region. */
1094 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1096 /* If we have asynchronous exceptions, do the same for *all*
1097 exception regions active in the block. */
1098 if (asynchronous_exceptions
1099 && bb->eh_beg != bb->eh_end)
1101 if (bb->eh_beg >= 0)
1102 make_eh_edge (edge_cache, eh_nest_info, bb,
1103 NULL_RTX, bb->eh_beg);
1105 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1106 if (GET_CODE (x) == NOTE
1107 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1108 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1110 int region = NOTE_EH_HANDLER (x);
1111 make_eh_edge (edge_cache, eh_nest_info, bb,
1112 NULL_RTX, region);
1116 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1118 /* ??? This could be made smarter: in some cases it's possible
1119 to tell that certain calls will not do a nonlocal goto.
1121 For example, if the nested functions that do the nonlocal
1122 gotos do not have their addresses taken, then only calls to
1123 those functions or to other nested functions that use them
1124 could possibly do nonlocal gotos. */
1125 /* We do know that a REG_EH_REGION note with a value less
1126 than 0 is guaranteed not to perform a non-local goto. */
1127 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1128 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1129 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1130 make_label_edge (edge_cache, bb, XEXP (x, 0),
1131 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1135 /* We know something about the structure of the function __throw in
1136 libgcc2.c. It is the only function that ever contains eh_stub
1137 labels. It modifies its return address so that the last block
1138 returns to one of the eh_stub labels within it. So we have to
1139 make additional edges in the flow graph. */
1140 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1141 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1143 /* Find out if we can drop through to the next block. */
1144 insn = next_nonnote_insn (insn);
1145 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1146 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1147 else if (i + 1 < n_basic_blocks)
1149 rtx tmp = BLOCK_HEAD (i + 1);
1150 if (GET_CODE (tmp) == NOTE)
1151 tmp = next_nonnote_insn (tmp);
1152 if (force_fallthru || insn == tmp)
1153 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1157 free_eh_nesting_info (eh_nest_info);
1158 if (edge_cache)
1159 sbitmap_vector_free (edge_cache);
1162 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1163 about the edge that is accumulated between calls. */
1165 void
1166 make_edge (edge_cache, src, dst, flags)
1167 sbitmap *edge_cache;
1168 basic_block src, dst;
1169 int flags;
1171 int use_edge_cache;
1172 edge e;
1174 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1175 many edges to them, and we didn't allocate memory for it. */
1176 use_edge_cache = (edge_cache
1177 && src != ENTRY_BLOCK_PTR
1178 && dst != EXIT_BLOCK_PTR);
1180 /* Make sure we don't add duplicate edges. */
1181 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1182 for (e = src->succ; e ; e = e->succ_next)
1183 if (e->dest == dst)
1185 e->flags |= flags;
1186 return;
1189 e = (edge) xcalloc (1, sizeof (*e));
1190 n_edges++;
1192 e->succ_next = src->succ;
1193 e->pred_next = dst->pred;
1194 e->src = src;
1195 e->dest = dst;
1196 e->flags = flags;
1198 src->succ = e;
1199 dst->pred = e;
1201 if (use_edge_cache)
1202 SET_BIT (edge_cache[src->index], dst->index);
1205 /* Create an edge from a basic block to a label. */
1207 static void
1208 make_label_edge (edge_cache, src, label, flags)
1209 sbitmap *edge_cache;
1210 basic_block src;
1211 rtx label;
1212 int flags;
1214 if (GET_CODE (label) != CODE_LABEL)
1215 abort ();
1217 /* If the label was never emitted, this insn is junk, but avoid a
1218 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1219 as a result of a syntax error and a diagnostic has already been
1220 printed. */
1222 if (INSN_UID (label) == 0)
1223 return;
1225 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1228 /* Create the edges generated by INSN in REGION. */
1230 static void
1231 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1232 sbitmap *edge_cache;
1233 eh_nesting_info *eh_nest_info;
1234 basic_block src;
1235 rtx insn;
1236 int region;
1238 handler_info **handler_list;
1239 int num, is_call;
1241 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1242 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1243 while (--num >= 0)
1245 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1246 EDGE_ABNORMAL | EDGE_EH | is_call);
1250 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1251 dangerous if we intend to move basic blocks around. Move such notes
1252 into the following block. */
1254 static void
1255 move_stray_eh_region_notes ()
1257 int i;
1258 basic_block b1, b2;
1260 if (n_basic_blocks < 2)
1261 return;
1263 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1264 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1266 rtx insn, next, list = NULL_RTX;
1268 b1 = BASIC_BLOCK (i);
1269 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1271 next = NEXT_INSN (insn);
1272 if (GET_CODE (insn) == NOTE
1273 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1274 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1276 /* Unlink from the insn chain. */
1277 NEXT_INSN (PREV_INSN (insn)) = next;
1278 PREV_INSN (next) = PREV_INSN (insn);
1280 /* Queue it. */
1281 NEXT_INSN (insn) = list;
1282 list = insn;
1286 if (list == NULL_RTX)
1287 continue;
1289 /* Find where to insert these things. */
1290 insn = b2->head;
1291 if (GET_CODE (insn) == CODE_LABEL)
1292 insn = NEXT_INSN (insn);
1294 while (list)
1296 next = NEXT_INSN (list);
1297 add_insn_after (list, insn);
1298 list = next;
1303 /* Recompute eh_beg/eh_end for each basic block. */
1305 static void
1306 record_active_eh_regions (f)
1307 rtx f;
1309 rtx insn, eh_list = NULL_RTX;
1310 int i = 0;
1311 basic_block bb = BASIC_BLOCK (0);
1313 for (insn = f; insn ; insn = NEXT_INSN (insn))
1315 if (bb->head == insn)
1316 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1318 if (GET_CODE (insn) == NOTE)
1320 int kind = NOTE_LINE_NUMBER (insn);
1321 if (kind == NOTE_INSN_EH_REGION_BEG)
1322 eh_list = alloc_INSN_LIST (insn, eh_list);
1323 else if (kind == NOTE_INSN_EH_REGION_END)
1325 rtx t = XEXP (eh_list, 1);
1326 free_INSN_LIST_node (eh_list);
1327 eh_list = t;
1331 if (bb->end == insn)
1333 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1334 i += 1;
1335 if (i == n_basic_blocks)
1336 break;
1337 bb = BASIC_BLOCK (i);
1342 /* Identify critical edges and set the bits appropriately. */
1344 static void
1345 mark_critical_edges ()
1347 int i, n = n_basic_blocks;
1348 basic_block bb;
1350 /* We begin with the entry block. This is not terribly important now,
1351 but could be if a front end (Fortran) implemented alternate entry
1352 points. */
1353 bb = ENTRY_BLOCK_PTR;
1354 i = -1;
1356 while (1)
1358 edge e;
1360 /* (1) Critical edges must have a source with multiple successors. */
1361 if (bb->succ && bb->succ->succ_next)
1363 for (e = bb->succ; e ; e = e->succ_next)
1365 /* (2) Critical edges must have a destination with multiple
1366 predecessors. Note that we know there is at least one
1367 predecessor -- the edge we followed to get here. */
1368 if (e->dest->pred->pred_next)
1369 e->flags |= EDGE_CRITICAL;
1370 else
1371 e->flags &= ~EDGE_CRITICAL;
1374 else
1376 for (e = bb->succ; e ; e = e->succ_next)
1377 e->flags &= ~EDGE_CRITICAL;
1380 if (++i >= n)
1381 break;
1382 bb = BASIC_BLOCK (i);
1386 /* Split a (typically critical) edge. Return the new block.
1387 Abort on abnormal edges.
1389 ??? The code generally expects to be called on critical edges.
1390 The case of a block ending in an unconditional jump to a
1391 block with multiple predecessors is not handled optimally. */
1393 basic_block
1394 split_edge (edge_in)
1395 edge edge_in;
1397 basic_block old_pred, bb, old_succ;
1398 edge edge_out;
1399 rtx bb_note;
1400 int i, j;
1402 /* Abnormal edges cannot be split. */
1403 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1404 abort ();
1406 old_pred = edge_in->src;
1407 old_succ = edge_in->dest;
1409 /* Remove the existing edge from the destination's pred list. */
1411 edge *pp;
1412 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1413 continue;
1414 *pp = edge_in->pred_next;
1415 edge_in->pred_next = NULL;
1418 /* Create the new structures. */
1419 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1420 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1421 n_edges++;
1423 memset (bb, 0, sizeof (*bb));
1425 /* ??? This info is likely going to be out of date very soon. */
1426 if (old_succ->global_live_at_start)
1428 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1429 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1430 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1431 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1434 /* Wire them up. */
1435 bb->pred = edge_in;
1436 bb->succ = edge_out;
1437 bb->count = edge_in->count;
1439 edge_in->dest = bb;
1440 edge_in->flags &= ~EDGE_CRITICAL;
1442 edge_out->pred_next = old_succ->pred;
1443 edge_out->succ_next = NULL;
1444 edge_out->src = bb;
1445 edge_out->dest = old_succ;
1446 edge_out->flags = EDGE_FALLTHRU;
1447 edge_out->probability = REG_BR_PROB_BASE;
1448 edge_out->count = edge_in->count;
1450 old_succ->pred = edge_out;
1452 /* Tricky case -- if there existed a fallthru into the successor
1453 (and we're not it) we must add a new unconditional jump around
1454 the new block we're actually interested in.
1456 Further, if that edge is critical, this means a second new basic
1457 block must be created to hold it. In order to simplify correct
1458 insn placement, do this before we touch the existing basic block
1459 ordering for the block we were really wanting. */
1460 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1462 edge e;
1463 for (e = edge_out->pred_next; e ; e = e->pred_next)
1464 if (e->flags & EDGE_FALLTHRU)
1465 break;
1467 if (e)
1469 basic_block jump_block;
1470 rtx pos;
1472 if ((e->flags & EDGE_CRITICAL) == 0
1473 && e->src != ENTRY_BLOCK_PTR)
1475 /* Non critical -- we can simply add a jump to the end
1476 of the existing predecessor. */
1477 jump_block = e->src;
1479 else
1481 /* We need a new block to hold the jump. The simplest
1482 way to do the bulk of the work here is to recursively
1483 call ourselves. */
1484 jump_block = split_edge (e);
1485 e = jump_block->succ;
1488 /* Now add the jump insn ... */
1489 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1490 jump_block->end);
1491 jump_block->end = pos;
1492 if (basic_block_for_insn)
1493 set_block_for_insn (pos, jump_block);
1494 emit_barrier_after (pos);
1496 /* ... let jump know that label is in use, ... */
1497 JUMP_LABEL (pos) = old_succ->head;
1498 ++LABEL_NUSES (old_succ->head);
1500 /* ... and clear fallthru on the outgoing edge. */
1501 e->flags &= ~EDGE_FALLTHRU;
1503 /* Continue splitting the interesting edge. */
1507 /* Place the new block just in front of the successor. */
1508 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1509 if (old_succ == EXIT_BLOCK_PTR)
1510 j = n_basic_blocks - 1;
1511 else
1512 j = old_succ->index;
1513 for (i = n_basic_blocks - 1; i > j; --i)
1515 basic_block tmp = BASIC_BLOCK (i - 1);
1516 BASIC_BLOCK (i) = tmp;
1517 tmp->index = i;
1519 BASIC_BLOCK (i) = bb;
1520 bb->index = i;
1522 /* Create the basic block note.
1524 Where we place the note can have a noticable impact on the generated
1525 code. Consider this cfg:
1532 +->1-->2--->E
1534 +--+
1536 If we need to insert an insn on the edge from block 0 to block 1,
1537 we want to ensure the instructions we insert are outside of any
1538 loop notes that physically sit between block 0 and block 1. Otherwise
1539 we confuse the loop optimizer into thinking the loop is a phony. */
1540 if (old_succ != EXIT_BLOCK_PTR
1541 && PREV_INSN (old_succ->head)
1542 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1543 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1544 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1545 PREV_INSN (old_succ->head));
1546 else if (old_succ != EXIT_BLOCK_PTR)
1547 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1548 else
1549 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1550 NOTE_BASIC_BLOCK (bb_note) = bb;
1551 bb->head = bb->end = bb_note;
1553 /* Not quite simple -- for non-fallthru edges, we must adjust the
1554 predecessor's jump instruction to target our new block. */
1555 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1557 rtx tmp, insn = old_pred->end;
1558 rtx old_label = old_succ->head;
1559 rtx new_label = gen_label_rtx ();
1561 if (GET_CODE (insn) != JUMP_INSN)
1562 abort ();
1564 /* ??? Recognize a tablejump and adjust all matching cases. */
1565 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1566 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1567 && GET_CODE (tmp) == JUMP_INSN
1568 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1569 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1571 rtvec vec;
1572 int j;
1574 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1575 vec = XVEC (PATTERN (tmp), 0);
1576 else
1577 vec = XVEC (PATTERN (tmp), 1);
1579 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1580 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1582 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1583 --LABEL_NUSES (old_label);
1584 ++LABEL_NUSES (new_label);
1587 /* Handle casesi dispatch insns */
1588 if ((tmp = single_set (insn)) != NULL
1589 && SET_DEST (tmp) == pc_rtx
1590 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1591 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1592 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1594 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1595 new_label);
1596 --LABEL_NUSES (old_label);
1597 ++LABEL_NUSES (new_label);
1600 else
1602 /* This would have indicated an abnormal edge. */
1603 if (computed_jump_p (insn))
1604 abort ();
1606 /* A return instruction can't be redirected. */
1607 if (returnjump_p (insn))
1608 abort ();
1610 /* If the insn doesn't go where we think, we're confused. */
1611 if (JUMP_LABEL (insn) != old_label)
1612 abort ();
1614 redirect_jump (insn, new_label, 0);
1617 emit_label_before (new_label, bb_note);
1618 bb->head = new_label;
1621 return bb;
1624 /* Queue instructions for insertion on an edge between two basic blocks.
1625 The new instructions and basic blocks (if any) will not appear in the
1626 CFG until commit_edge_insertions is called. */
1628 void
1629 insert_insn_on_edge (pattern, e)
1630 rtx pattern;
1631 edge e;
1633 /* We cannot insert instructions on an abnormal critical edge.
1634 It will be easier to find the culprit if we die now. */
1635 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1636 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1637 abort ();
1639 if (e->insns == NULL_RTX)
1640 start_sequence ();
1641 else
1642 push_to_sequence (e->insns);
1644 emit_insn (pattern);
1646 e->insns = get_insns ();
1647 end_sequence();
1650 /* Update the CFG for the instructions queued on edge E. */
1652 static void
1653 commit_one_edge_insertion (e)
1654 edge e;
1656 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1657 basic_block bb;
1659 /* Pull the insns off the edge now since the edge might go away. */
1660 insns = e->insns;
1661 e->insns = NULL_RTX;
1663 /* Figure out where to put these things. If the destination has
1664 one predecessor, insert there. Except for the exit block. */
1665 if (e->dest->pred->pred_next == NULL
1666 && e->dest != EXIT_BLOCK_PTR)
1668 bb = e->dest;
1670 /* Get the location correct wrt a code label, and "nice" wrt
1671 a basic block note, and before everything else. */
1672 tmp = bb->head;
1673 if (GET_CODE (tmp) == CODE_LABEL)
1674 tmp = NEXT_INSN (tmp);
1675 if (GET_CODE (tmp) == NOTE
1676 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1677 tmp = NEXT_INSN (tmp);
1678 if (tmp == bb->head)
1679 before = tmp;
1680 else
1681 after = PREV_INSN (tmp);
1684 /* If the source has one successor and the edge is not abnormal,
1685 insert there. Except for the entry block. */
1686 else if ((e->flags & EDGE_ABNORMAL) == 0
1687 && e->src->succ->succ_next == NULL
1688 && e->src != ENTRY_BLOCK_PTR)
1690 bb = e->src;
1691 /* It is possible to have a non-simple jump here. Consider a target
1692 where some forms of unconditional jumps clobber a register. This
1693 happens on the fr30 for example.
1695 We know this block has a single successor, so we can just emit
1696 the queued insns before the jump. */
1697 if (GET_CODE (bb->end) == JUMP_INSN)
1699 before = bb->end;
1701 else
1703 /* We'd better be fallthru, or we've lost track of what's what. */
1704 if ((e->flags & EDGE_FALLTHRU) == 0)
1705 abort ();
1707 after = bb->end;
1711 /* Otherwise we must split the edge. */
1712 else
1714 bb = split_edge (e);
1715 after = bb->end;
1718 /* Now that we've found the spot, do the insertion. */
1720 /* Set the new block number for these insns, if structure is allocated. */
1721 if (basic_block_for_insn)
1723 rtx i;
1724 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1725 set_block_for_insn (i, bb);
1728 if (before)
1730 emit_insns_before (insns, before);
1731 if (before == bb->head)
1732 bb->head = insns;
1734 last = prev_nonnote_insn (before);
1736 else
1738 last = emit_insns_after (insns, after);
1739 if (after == bb->end)
1740 bb->end = last;
1743 if (returnjump_p (last))
1745 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1746 This is not currently a problem because this only happens
1747 for the (single) epilogue, which already has a fallthru edge
1748 to EXIT. */
1750 e = bb->succ;
1751 if (e->dest != EXIT_BLOCK_PTR
1752 || e->succ_next != NULL
1753 || (e->flags & EDGE_FALLTHRU) == 0)
1754 abort ();
1755 e->flags &= ~EDGE_FALLTHRU;
1757 emit_barrier_after (last);
1758 bb->end = last;
1760 if (before)
1761 flow_delete_insn (before);
1763 else if (GET_CODE (last) == JUMP_INSN)
1764 abort ();
1767 /* Update the CFG for all queued instructions. */
1769 void
1770 commit_edge_insertions ()
1772 int i;
1773 basic_block bb;
1775 #ifdef ENABLE_CHECKING
1776 verify_flow_info ();
1777 #endif
1779 i = -1;
1780 bb = ENTRY_BLOCK_PTR;
1781 while (1)
1783 edge e, next;
1785 for (e = bb->succ; e ; e = next)
1787 next = e->succ_next;
1788 if (e->insns)
1789 commit_one_edge_insertion (e);
1792 if (++i >= n_basic_blocks)
1793 break;
1794 bb = BASIC_BLOCK (i);
1798 /* Delete all unreachable basic blocks. */
1800 static void
1801 delete_unreachable_blocks ()
1803 basic_block *worklist, *tos;
1804 int deleted_handler;
1805 edge e;
1806 int i, n;
1808 n = n_basic_blocks;
1809 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1811 /* Use basic_block->aux as a marker. Clear them all. */
1813 for (i = 0; i < n; ++i)
1814 BASIC_BLOCK (i)->aux = NULL;
1816 /* Add our starting points to the worklist. Almost always there will
1817 be only one. It isn't inconcievable that we might one day directly
1818 support Fortran alternate entry points. */
1820 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1822 *tos++ = e->dest;
1824 /* Mark the block with a handy non-null value. */
1825 e->dest->aux = e;
1828 /* Iterate: find everything reachable from what we've already seen. */
1830 while (tos != worklist)
1832 basic_block b = *--tos;
1834 for (e = b->succ; e ; e = e->succ_next)
1835 if (!e->dest->aux)
1837 *tos++ = e->dest;
1838 e->dest->aux = e;
1842 /* Delete all unreachable basic blocks. Count down so that we don't
1843 interfere with the block renumbering that happens in flow_delete_block. */
1845 deleted_handler = 0;
1847 for (i = n - 1; i >= 0; --i)
1849 basic_block b = BASIC_BLOCK (i);
1851 if (b->aux != NULL)
1852 /* This block was found. Tidy up the mark. */
1853 b->aux = NULL;
1854 else
1855 deleted_handler |= flow_delete_block (b);
1858 tidy_fallthru_edges ();
1860 /* If we deleted an exception handler, we may have EH region begin/end
1861 blocks to remove as well. */
1862 if (deleted_handler)
1863 delete_eh_regions ();
1865 free (worklist);
1868 /* Find EH regions for which there is no longer a handler, and delete them. */
1870 static void
1871 delete_eh_regions ()
1873 rtx insn;
1875 update_rethrow_references ();
1877 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1878 if (GET_CODE (insn) == NOTE)
1880 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1881 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1883 int num = NOTE_EH_HANDLER (insn);
1884 /* A NULL handler indicates a region is no longer needed,
1885 as long as its rethrow label isn't used. */
1886 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1888 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1889 NOTE_SOURCE_FILE (insn) = 0;
1895 /* Return true if NOTE is not one of the ones that must be kept paired,
1896 so that we may simply delete them. */
1898 static int
1899 can_delete_note_p (note)
1900 rtx note;
1902 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1903 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1906 /* Unlink a chain of insns between START and FINISH, leaving notes
1907 that must be paired. */
1909 void
1910 flow_delete_insn_chain (start, finish)
1911 rtx start, finish;
1913 /* Unchain the insns one by one. It would be quicker to delete all
1914 of these with a single unchaining, rather than one at a time, but
1915 we need to keep the NOTE's. */
1917 rtx next;
1919 while (1)
1921 next = NEXT_INSN (start);
1922 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1924 else if (GET_CODE (start) == CODE_LABEL
1925 && ! can_delete_label_p (start))
1927 const char *name = LABEL_NAME (start);
1928 PUT_CODE (start, NOTE);
1929 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
1930 NOTE_SOURCE_FILE (start) = name;
1932 else
1933 next = flow_delete_insn (start);
1935 if (start == finish)
1936 break;
1937 start = next;
1941 /* Delete the insns in a (non-live) block. We physically delete every
1942 non-deleted-note insn, and update the flow graph appropriately.
1944 Return nonzero if we deleted an exception handler. */
1946 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1947 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1950 flow_delete_block (b)
1951 basic_block b;
1953 int deleted_handler = 0;
1954 rtx insn, end, tmp;
1956 /* If the head of this block is a CODE_LABEL, then it might be the
1957 label for an exception handler which can't be reached.
1959 We need to remove the label from the exception_handler_label list
1960 and remove the associated NOTE_INSN_EH_REGION_BEG and
1961 NOTE_INSN_EH_REGION_END notes. */
1963 insn = b->head;
1965 never_reached_warning (insn);
1967 if (GET_CODE (insn) == CODE_LABEL)
1969 rtx x, *prev = &exception_handler_labels;
1971 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1973 if (XEXP (x, 0) == insn)
1975 /* Found a match, splice this label out of the EH label list. */
1976 *prev = XEXP (x, 1);
1977 XEXP (x, 1) = NULL_RTX;
1978 XEXP (x, 0) = NULL_RTX;
1980 /* Remove the handler from all regions */
1981 remove_handler (insn);
1982 deleted_handler = 1;
1983 break;
1985 prev = &XEXP (x, 1);
1989 /* Include any jump table following the basic block. */
1990 end = b->end;
1991 if (GET_CODE (end) == JUMP_INSN
1992 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1993 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1994 && GET_CODE (tmp) == JUMP_INSN
1995 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1996 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1997 end = tmp;
1999 /* Include any barrier that may follow the basic block. */
2000 tmp = next_nonnote_insn (end);
2001 if (tmp && GET_CODE (tmp) == BARRIER)
2002 end = tmp;
2004 /* Selectively delete the entire chain. */
2005 flow_delete_insn_chain (insn, end);
2007 /* Remove the edges into and out of this block. Note that there may
2008 indeed be edges in, if we are removing an unreachable loop. */
2010 edge e, next, *q;
2012 for (e = b->pred; e ; e = next)
2014 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2015 continue;
2016 *q = e->succ_next;
2017 next = e->pred_next;
2018 n_edges--;
2019 free (e);
2021 for (e = b->succ; e ; e = next)
2023 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2024 continue;
2025 *q = e->pred_next;
2026 next = e->succ_next;
2027 n_edges--;
2028 free (e);
2031 b->pred = NULL;
2032 b->succ = NULL;
2035 /* Remove the basic block from the array, and compact behind it. */
2036 expunge_block (b);
2038 return deleted_handler;
2041 /* Remove block B from the basic block array and compact behind it. */
2043 static void
2044 expunge_block (b)
2045 basic_block b;
2047 int i, n = n_basic_blocks;
2049 for (i = b->index; i + 1 < n; ++i)
2051 basic_block x = BASIC_BLOCK (i + 1);
2052 BASIC_BLOCK (i) = x;
2053 x->index = i;
2056 basic_block_info->num_elements--;
2057 n_basic_blocks--;
2060 /* Delete INSN by patching it out. Return the next insn. */
2063 flow_delete_insn (insn)
2064 rtx insn;
2066 rtx prev = PREV_INSN (insn);
2067 rtx next = NEXT_INSN (insn);
2068 rtx note;
2070 PREV_INSN (insn) = NULL_RTX;
2071 NEXT_INSN (insn) = NULL_RTX;
2072 INSN_DELETED_P (insn) = 1;
2074 if (prev)
2075 NEXT_INSN (prev) = next;
2076 if (next)
2077 PREV_INSN (next) = prev;
2078 else
2079 set_last_insn (prev);
2081 if (GET_CODE (insn) == CODE_LABEL)
2082 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2084 /* If deleting a jump, decrement the use count of the label. Deleting
2085 the label itself should happen in the normal course of block merging. */
2086 if (GET_CODE (insn) == JUMP_INSN
2087 && JUMP_LABEL (insn)
2088 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2089 LABEL_NUSES (JUMP_LABEL (insn))--;
2091 /* Also if deleting an insn that references a label. */
2092 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2093 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2094 LABEL_NUSES (XEXP (note, 0))--;
2096 return next;
2099 /* True if a given label can be deleted. */
2101 static int
2102 can_delete_label_p (label)
2103 rtx label;
2105 rtx x;
2107 if (LABEL_PRESERVE_P (label))
2108 return 0;
2110 for (x = forced_labels; x ; x = XEXP (x, 1))
2111 if (label == XEXP (x, 0))
2112 return 0;
2113 for (x = label_value_list; x ; x = XEXP (x, 1))
2114 if (label == XEXP (x, 0))
2115 return 0;
2116 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2117 if (label == XEXP (x, 0))
2118 return 0;
2120 /* User declared labels must be preserved. */
2121 if (LABEL_NAME (label) != 0)
2122 return 0;
2124 return 1;
2127 static int
2128 tail_recursion_label_p (label)
2129 rtx label;
2131 rtx x;
2133 for (x = tail_recursion_label_list; x ; x = XEXP (x, 1))
2134 if (label == XEXP (x, 0))
2135 return 1;
2137 return 0;
2140 /* Blocks A and B are to be merged into a single block A. The insns
2141 are already contiguous, hence `nomove'. */
2143 void
2144 merge_blocks_nomove (a, b)
2145 basic_block a, b;
2147 edge e;
2148 rtx b_head, b_end, a_end;
2149 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2150 int b_empty = 0;
2152 /* If there was a CODE_LABEL beginning B, delete it. */
2153 b_head = b->head;
2154 b_end = b->end;
2155 if (GET_CODE (b_head) == CODE_LABEL)
2157 /* Detect basic blocks with nothing but a label. This can happen
2158 in particular at the end of a function. */
2159 if (b_head == b_end)
2160 b_empty = 1;
2161 del_first = del_last = b_head;
2162 b_head = NEXT_INSN (b_head);
2165 /* Delete the basic block note. */
2166 if (GET_CODE (b_head) == NOTE
2167 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2169 if (b_head == b_end)
2170 b_empty = 1;
2171 if (! del_last)
2172 del_first = b_head;
2173 del_last = b_head;
2174 b_head = NEXT_INSN (b_head);
2177 /* If there was a jump out of A, delete it. */
2178 a_end = a->end;
2179 if (GET_CODE (a_end) == JUMP_INSN)
2181 rtx prev;
2183 prev = prev_nonnote_insn (a_end);
2184 if (!prev)
2185 prev = a->head;
2187 del_first = a_end;
2189 #ifdef HAVE_cc0
2190 /* If this was a conditional jump, we need to also delete
2191 the insn that set cc0. */
2192 if (prev && sets_cc0_p (prev))
2194 rtx tmp = prev;
2195 prev = prev_nonnote_insn (prev);
2196 if (!prev)
2197 prev = a->head;
2198 del_first = tmp;
2200 #endif
2202 a_end = prev;
2204 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2205 del_first = NEXT_INSN (a_end);
2207 /* Delete everything marked above as well as crap that might be
2208 hanging out between the two blocks. */
2209 flow_delete_insn_chain (del_first, del_last);
2211 /* Normally there should only be one successor of A and that is B, but
2212 partway though the merge of blocks for conditional_execution we'll
2213 be merging a TEST block with THEN and ELSE successors. Free the
2214 whole lot of them and hope the caller knows what they're doing. */
2215 while (a->succ)
2216 remove_edge (a->succ);
2218 /* Adjust the edges out of B for the new owner. */
2219 for (e = b->succ; e ; e = e->succ_next)
2220 e->src = a;
2221 a->succ = b->succ;
2223 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2224 b->pred = b->succ = NULL;
2226 /* Reassociate the insns of B with A. */
2227 if (!b_empty)
2229 if (basic_block_for_insn)
2231 BLOCK_FOR_INSN (b_head) = a;
2232 while (b_head != b_end)
2234 b_head = NEXT_INSN (b_head);
2235 BLOCK_FOR_INSN (b_head) = a;
2238 a_end = b_end;
2240 a->end = a_end;
2242 expunge_block (b);
2245 /* Blocks A and B are to be merged into a single block. A has no incoming
2246 fallthru edge, so it can be moved before B without adding or modifying
2247 any jumps (aside from the jump from A to B). */
2249 static int
2250 merge_blocks_move_predecessor_nojumps (a, b)
2251 basic_block a, b;
2253 rtx start, end, barrier;
2254 int index;
2256 start = a->head;
2257 end = a->end;
2259 barrier = next_nonnote_insn (end);
2260 if (GET_CODE (barrier) != BARRIER)
2261 abort ();
2262 flow_delete_insn (barrier);
2264 /* Move block and loop notes out of the chain so that we do not
2265 disturb their order.
2267 ??? A better solution would be to squeeze out all the non-nested notes
2268 and adjust the block trees appropriately. Even better would be to have
2269 a tighter connection between block trees and rtl so that this is not
2270 necessary. */
2271 start = squeeze_notes (start, end);
2273 /* Scramble the insn chain. */
2274 if (end != PREV_INSN (b->head))
2275 reorder_insns (start, end, PREV_INSN (b->head));
2277 if (rtl_dump_file)
2279 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2280 a->index, b->index);
2283 /* Swap the records for the two blocks around. Although we are deleting B,
2284 A is now where B was and we want to compact the BB array from where
2285 A used to be. */
2286 BASIC_BLOCK(a->index) = b;
2287 BASIC_BLOCK(b->index) = a;
2288 index = a->index;
2289 a->index = b->index;
2290 b->index = index;
2292 /* Now blocks A and B are contiguous. Merge them. */
2293 merge_blocks_nomove (a, b);
2295 return 1;
2298 /* Blocks A and B are to be merged into a single block. B has no outgoing
2299 fallthru edge, so it can be moved after A without adding or modifying
2300 any jumps (aside from the jump from A to B). */
2302 static int
2303 merge_blocks_move_successor_nojumps (a, b)
2304 basic_block a, b;
2306 rtx start, end, barrier;
2308 start = b->head;
2309 end = b->end;
2310 barrier = NEXT_INSN (end);
2312 /* Recognize a jump table following block B. */
2313 if (GET_CODE (barrier) == CODE_LABEL
2314 && NEXT_INSN (barrier)
2315 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2316 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2317 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2319 end = NEXT_INSN (barrier);
2320 barrier = NEXT_INSN (end);
2323 /* There had better have been a barrier there. Delete it. */
2324 if (GET_CODE (barrier) != BARRIER)
2325 abort ();
2326 flow_delete_insn (barrier);
2328 /* Move block and loop notes out of the chain so that we do not
2329 disturb their order.
2331 ??? A better solution would be to squeeze out all the non-nested notes
2332 and adjust the block trees appropriately. Even better would be to have
2333 a tighter connection between block trees and rtl so that this is not
2334 necessary. */
2335 start = squeeze_notes (start, end);
2337 /* Scramble the insn chain. */
2338 reorder_insns (start, end, a->end);
2340 /* Now blocks A and B are contiguous. Merge them. */
2341 merge_blocks_nomove (a, b);
2343 if (rtl_dump_file)
2345 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2346 b->index, a->index);
2349 return 1;
2352 /* Attempt to merge basic blocks that are potentially non-adjacent.
2353 Return true iff the attempt succeeded. */
2355 static int
2356 merge_blocks (e, b, c)
2357 edge e;
2358 basic_block b, c;
2360 /* If C has a tail recursion label, do not merge. There is no
2361 edge recorded from the call_placeholder back to this label, as
2362 that would make optimize_sibling_and_tail_recursive_calls more
2363 complex for no gain. */
2364 if (GET_CODE (c->head) == CODE_LABEL
2365 && tail_recursion_label_p (c->head))
2366 return 0;
2368 /* If B has a fallthru edge to C, no need to move anything. */
2369 if (e->flags & EDGE_FALLTHRU)
2371 merge_blocks_nomove (b, c);
2373 if (rtl_dump_file)
2375 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2376 b->index, c->index);
2379 return 1;
2381 else
2383 edge tmp_edge;
2384 basic_block d;
2385 int c_has_outgoing_fallthru;
2386 int b_has_incoming_fallthru;
2388 /* We must make sure to not munge nesting of exception regions,
2389 lexical blocks, and loop notes.
2391 The first is taken care of by requiring that the active eh
2392 region at the end of one block always matches the active eh
2393 region at the beginning of the next block.
2395 The later two are taken care of by squeezing out all the notes. */
2397 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2398 executed and we may want to treat blocks which have two out
2399 edges, one normal, one abnormal as only having one edge for
2400 block merging purposes. */
2402 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2403 if (tmp_edge->flags & EDGE_FALLTHRU)
2404 break;
2405 c_has_outgoing_fallthru = (tmp_edge != NULL);
2407 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2408 if (tmp_edge->flags & EDGE_FALLTHRU)
2409 break;
2410 b_has_incoming_fallthru = (tmp_edge != NULL);
2412 /* If B does not have an incoming fallthru, and the exception regions
2413 match, then it can be moved immediately before C without introducing
2414 or modifying jumps.
2416 C can not be the first block, so we do not have to worry about
2417 accessing a non-existent block. */
2418 d = BASIC_BLOCK (c->index - 1);
2419 if (! b_has_incoming_fallthru
2420 && d->eh_end == b->eh_beg
2421 && b->eh_end == c->eh_beg)
2422 return merge_blocks_move_predecessor_nojumps (b, c);
2424 /* Otherwise, we're going to try to move C after B. Make sure the
2425 exception regions match.
2427 If B is the last basic block, then we must not try to access the
2428 block structure for block B + 1. Luckily in that case we do not
2429 need to worry about matching exception regions. */
2430 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2431 if (b->eh_end == c->eh_beg
2432 && (d == NULL || c->eh_end == d->eh_beg))
2434 /* If C does not have an outgoing fallthru, then it can be moved
2435 immediately after B without introducing or modifying jumps. */
2436 if (! c_has_outgoing_fallthru)
2437 return merge_blocks_move_successor_nojumps (b, c);
2439 /* Otherwise, we'll need to insert an extra jump, and possibly
2440 a new block to contain it. */
2441 /* ??? Not implemented yet. */
2444 return 0;
2448 /* Top level driver for merge_blocks. */
2450 static void
2451 try_merge_blocks ()
2453 int i;
2455 /* Attempt to merge blocks as made possible by edge removal. If a block
2456 has only one successor, and the successor has only one predecessor,
2457 they may be combined. */
2459 for (i = 0; i < n_basic_blocks; )
2461 basic_block c, b = BASIC_BLOCK (i);
2462 edge s;
2464 /* A loop because chains of blocks might be combineable. */
2465 while ((s = b->succ) != NULL
2466 && s->succ_next == NULL
2467 && (s->flags & EDGE_EH) == 0
2468 && (c = s->dest) != EXIT_BLOCK_PTR
2469 && c->pred->pred_next == NULL
2470 /* If the jump insn has side effects, we can't kill the edge. */
2471 && (GET_CODE (b->end) != JUMP_INSN
2472 || onlyjump_p (b->end))
2473 && merge_blocks (s, b, c))
2474 continue;
2476 /* Don't get confused by the index shift caused by deleting blocks. */
2477 i = b->index + 1;
2481 /* The given edge should potentially be a fallthru edge. If that is in
2482 fact true, delete the jump and barriers that are in the way. */
2484 void
2485 tidy_fallthru_edge (e, b, c)
2486 edge e;
2487 basic_block b, c;
2489 rtx q;
2491 /* ??? In a late-running flow pass, other folks may have deleted basic
2492 blocks by nopping out blocks, leaving multiple BARRIERs between here
2493 and the target label. They ought to be chastized and fixed.
2495 We can also wind up with a sequence of undeletable labels between
2496 one block and the next.
2498 So search through a sequence of barriers, labels, and notes for
2499 the head of block C and assert that we really do fall through. */
2501 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2502 return;
2504 /* Remove what will soon cease being the jump insn from the source block.
2505 If block B consisted only of this single jump, turn it into a deleted
2506 note. */
2507 q = b->end;
2508 if (GET_CODE (q) == JUMP_INSN
2509 && onlyjump_p (q)
2510 && (any_uncondjump_p (q)
2511 || (b->succ == e && e->succ_next == NULL)))
2513 #ifdef HAVE_cc0
2514 /* If this was a conditional jump, we need to also delete
2515 the insn that set cc0. */
2516 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2517 q = PREV_INSN (q);
2518 #endif
2520 if (b->head == q)
2522 PUT_CODE (q, NOTE);
2523 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2524 NOTE_SOURCE_FILE (q) = 0;
2526 else
2527 b->end = q = PREV_INSN (q);
2530 /* Selectively unlink the sequence. */
2531 if (q != PREV_INSN (c->head))
2532 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2534 e->flags |= EDGE_FALLTHRU;
2537 /* Fix up edges that now fall through, or rather should now fall through
2538 but previously required a jump around now deleted blocks. Simplify
2539 the search by only examining blocks numerically adjacent, since this
2540 is how find_basic_blocks created them. */
2542 static void
2543 tidy_fallthru_edges ()
2545 int i;
2547 for (i = 1; i < n_basic_blocks; ++i)
2549 basic_block b = BASIC_BLOCK (i - 1);
2550 basic_block c = BASIC_BLOCK (i);
2551 edge s;
2553 /* We care about simple conditional or unconditional jumps with
2554 a single successor.
2556 If we had a conditional branch to the next instruction when
2557 find_basic_blocks was called, then there will only be one
2558 out edge for the block which ended with the conditional
2559 branch (since we do not create duplicate edges).
2561 Furthermore, the edge will be marked as a fallthru because we
2562 merge the flags for the duplicate edges. So we do not want to
2563 check that the edge is not a FALLTHRU edge. */
2564 if ((s = b->succ) != NULL
2565 && s->succ_next == NULL
2566 && s->dest == c
2567 /* If the jump insn has side effects, we can't tidy the edge. */
2568 && (GET_CODE (b->end) != JUMP_INSN
2569 || onlyjump_p (b->end)))
2570 tidy_fallthru_edge (s, b, c);
2574 /* Perform data flow analysis.
2575 F is the first insn of the function; FLAGS is a set of PROP_* flags
2576 to be used in accumulating flow info. */
2578 void
2579 life_analysis (f, file, flags)
2580 rtx f;
2581 FILE *file;
2582 int flags;
2584 #ifdef ELIMINABLE_REGS
2585 register int i;
2586 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2587 #endif
2589 /* Record which registers will be eliminated. We use this in
2590 mark_used_regs. */
2592 CLEAR_HARD_REG_SET (elim_reg_set);
2594 #ifdef ELIMINABLE_REGS
2595 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2596 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2597 #else
2598 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2599 #endif
2601 if (! optimize)
2602 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2604 /* The post-reload life analysis have (on a global basis) the same
2605 registers live as was computed by reload itself. elimination
2606 Otherwise offsets and such may be incorrect.
2608 Reload will make some registers as live even though they do not
2609 appear in the rtl. */
2610 if (reload_completed)
2611 flags &= ~PROP_REG_INFO;
2613 /* We want alias analysis information for local dead store elimination. */
2614 if (flags & PROP_SCAN_DEAD_CODE)
2615 init_alias_analysis ();
2617 /* Always remove no-op moves. Do this before other processing so
2618 that we don't have to keep re-scanning them. */
2619 delete_noop_moves (f);
2621 /* Some targets can emit simpler epilogues if they know that sp was
2622 not ever modified during the function. After reload, of course,
2623 we've already emitted the epilogue so there's no sense searching. */
2624 if (! reload_completed)
2625 notice_stack_pointer_modification (f);
2627 /* Allocate and zero out data structures that will record the
2628 data from lifetime analysis. */
2629 allocate_reg_life_data ();
2630 allocate_bb_life_data ();
2632 /* Find the set of registers live on function exit. */
2633 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2635 /* "Update" life info from zero. It'd be nice to begin the
2636 relaxation with just the exit and noreturn blocks, but that set
2637 is not immediately handy. */
2639 if (flags & PROP_REG_INFO)
2640 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2641 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2643 /* Clean up. */
2644 if (flags & PROP_SCAN_DEAD_CODE)
2645 end_alias_analysis ();
2647 if (file)
2648 dump_flow_info (file);
2650 free_basic_block_vars (1);
2653 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2654 Search for REGNO. If found, abort if it is not wider than word_mode. */
2656 static int
2657 verify_wide_reg_1 (px, pregno)
2658 rtx *px;
2659 void *pregno;
2661 rtx x = *px;
2662 unsigned int regno = *(int *) pregno;
2664 if (GET_CODE (x) == REG && REGNO (x) == regno)
2666 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2667 abort ();
2668 return 1;
2670 return 0;
2673 /* A subroutine of verify_local_live_at_start. Search through insns
2674 between HEAD and END looking for register REGNO. */
2676 static void
2677 verify_wide_reg (regno, head, end)
2678 int regno;
2679 rtx head, end;
2681 while (1)
2683 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2684 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2685 return;
2686 if (head == end)
2687 break;
2688 head = NEXT_INSN (head);
2691 /* We didn't find the register at all. Something's way screwy. */
2692 abort ();
2695 /* A subroutine of update_life_info. Verify that there are no untoward
2696 changes in live_at_start during a local update. */
2698 static void
2699 verify_local_live_at_start (new_live_at_start, bb)
2700 regset new_live_at_start;
2701 basic_block bb;
2703 if (reload_completed)
2705 /* After reload, there are no pseudos, nor subregs of multi-word
2706 registers. The regsets should exactly match. */
2707 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2708 abort ();
2710 else
2712 int i;
2714 /* Find the set of changed registers. */
2715 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2717 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2719 /* No registers should die. */
2720 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2721 abort ();
2722 /* Verify that the now-live register is wider than word_mode. */
2723 verify_wide_reg (i, bb->head, bb->end);
2728 /* Updates life information starting with the basic blocks set in BLOCKS.
2729 If BLOCKS is null, consider it to be the universal set.
2731 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2732 we are only expecting local modifications to basic blocks. If we find
2733 extra registers live at the beginning of a block, then we either killed
2734 useful data, or we have a broken split that wants data not provided.
2735 If we find registers removed from live_at_start, that means we have
2736 a broken peephole that is killing a register it shouldn't.
2738 ??? This is not true in one situation -- when a pre-reload splitter
2739 generates subregs of a multi-word pseudo, current life analysis will
2740 lose the kill. So we _can_ have a pseudo go live. How irritating.
2742 Including PROP_REG_INFO does not properly refresh regs_ever_live
2743 unless the caller resets it to zero. */
2745 void
2746 update_life_info (blocks, extent, prop_flags)
2747 sbitmap blocks;
2748 enum update_life_extent extent;
2749 int prop_flags;
2751 regset tmp;
2752 regset_head tmp_head;
2753 int i;
2755 tmp = INITIALIZE_REG_SET (tmp_head);
2757 /* For a global update, we go through the relaxation process again. */
2758 if (extent != UPDATE_LIFE_LOCAL)
2760 calculate_global_regs_live (blocks, blocks,
2761 prop_flags & PROP_SCAN_DEAD_CODE);
2763 /* If asked, remove notes from the blocks we'll update. */
2764 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2765 count_or_remove_death_notes (blocks, 1);
2768 if (blocks)
2770 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2772 basic_block bb = BASIC_BLOCK (i);
2774 COPY_REG_SET (tmp, bb->global_live_at_end);
2775 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2777 if (extent == UPDATE_LIFE_LOCAL)
2778 verify_local_live_at_start (tmp, bb);
2781 else
2783 for (i = n_basic_blocks - 1; i >= 0; --i)
2785 basic_block bb = BASIC_BLOCK (i);
2787 COPY_REG_SET (tmp, bb->global_live_at_end);
2788 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2790 if (extent == UPDATE_LIFE_LOCAL)
2791 verify_local_live_at_start (tmp, bb);
2795 FREE_REG_SET (tmp);
2797 if (prop_flags & PROP_REG_INFO)
2799 /* The only pseudos that are live at the beginning of the function
2800 are those that were not set anywhere in the function. local-alloc
2801 doesn't know how to handle these correctly, so mark them as not
2802 local to any one basic block. */
2803 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2804 FIRST_PSEUDO_REGISTER, i,
2805 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2807 /* We have a problem with any pseudoreg that lives across the setjmp.
2808 ANSI says that if a user variable does not change in value between
2809 the setjmp and the longjmp, then the longjmp preserves it. This
2810 includes longjmp from a place where the pseudo appears dead.
2811 (In principle, the value still exists if it is in scope.)
2812 If the pseudo goes in a hard reg, some other value may occupy
2813 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2814 Conclusion: such a pseudo must not go in a hard reg. */
2815 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2816 FIRST_PSEUDO_REGISTER, i,
2818 if (regno_reg_rtx[i] != 0)
2820 REG_LIVE_LENGTH (i) = -1;
2821 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2827 /* Free the variables allocated by find_basic_blocks.
2829 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2831 void
2832 free_basic_block_vars (keep_head_end_p)
2833 int keep_head_end_p;
2835 if (basic_block_for_insn)
2837 VARRAY_FREE (basic_block_for_insn);
2838 basic_block_for_insn = NULL;
2841 if (! keep_head_end_p)
2843 clear_edges ();
2844 VARRAY_FREE (basic_block_info);
2845 n_basic_blocks = 0;
2847 ENTRY_BLOCK_PTR->aux = NULL;
2848 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2849 EXIT_BLOCK_PTR->aux = NULL;
2850 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2854 /* Return nonzero if the destination of SET equals the source. */
2855 static int
2856 set_noop_p (set)
2857 rtx set;
2859 rtx src = SET_SRC (set);
2860 rtx dst = SET_DEST (set);
2862 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2864 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2865 return 0;
2866 src = SUBREG_REG (src);
2867 dst = SUBREG_REG (dst);
2870 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2871 && REGNO (src) == REGNO (dst));
2874 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2875 value to itself. */
2876 static int
2877 noop_move_p (insn)
2878 rtx insn;
2880 rtx pat = PATTERN (insn);
2882 /* Insns carrying these notes are useful later on. */
2883 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2884 return 0;
2886 if (GET_CODE (pat) == SET && set_noop_p (pat))
2887 return 1;
2889 if (GET_CODE (pat) == PARALLEL)
2891 int i;
2892 /* If nothing but SETs of registers to themselves,
2893 this insn can also be deleted. */
2894 for (i = 0; i < XVECLEN (pat, 0); i++)
2896 rtx tem = XVECEXP (pat, 0, i);
2898 if (GET_CODE (tem) == USE
2899 || GET_CODE (tem) == CLOBBER)
2900 continue;
2902 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2903 return 0;
2906 return 1;
2908 return 0;
2911 /* Delete any insns that copy a register to itself. */
2913 static void
2914 delete_noop_moves (f)
2915 rtx f;
2917 rtx insn;
2918 for (insn = f; insn; insn = NEXT_INSN (insn))
2920 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2922 PUT_CODE (insn, NOTE);
2923 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2924 NOTE_SOURCE_FILE (insn) = 0;
2929 /* Determine if the stack pointer is constant over the life of the function.
2930 Only useful before prologues have been emitted. */
2932 static void
2933 notice_stack_pointer_modification_1 (x, pat, data)
2934 rtx x;
2935 rtx pat ATTRIBUTE_UNUSED;
2936 void *data ATTRIBUTE_UNUSED;
2938 if (x == stack_pointer_rtx
2939 /* The stack pointer is only modified indirectly as the result
2940 of a push until later in flow. See the comments in rtl.texi
2941 regarding Embedded Side-Effects on Addresses. */
2942 || (GET_CODE (x) == MEM
2943 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2944 || GET_CODE (XEXP (x, 0)) == PRE_INC
2945 || GET_CODE (XEXP (x, 0)) == POST_DEC
2946 || GET_CODE (XEXP (x, 0)) == POST_INC)
2947 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2948 current_function_sp_is_unchanging = 0;
2951 static void
2952 notice_stack_pointer_modification (f)
2953 rtx f;
2955 rtx insn;
2957 /* Assume that the stack pointer is unchanging if alloca hasn't
2958 been used. */
2959 current_function_sp_is_unchanging = !current_function_calls_alloca;
2960 if (! current_function_sp_is_unchanging)
2961 return;
2963 for (insn = f; insn; insn = NEXT_INSN (insn))
2965 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2967 /* Check if insn modifies the stack pointer. */
2968 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2969 NULL);
2970 if (! current_function_sp_is_unchanging)
2971 return;
2976 /* Mark a register in SET. Hard registers in large modes get all
2977 of their component registers set as well. */
2978 static void
2979 mark_reg (reg, xset)
2980 rtx reg;
2981 void *xset;
2983 regset set = (regset) xset;
2984 int regno = REGNO (reg);
2986 if (GET_MODE (reg) == BLKmode)
2987 abort ();
2989 SET_REGNO_REG_SET (set, regno);
2990 if (regno < FIRST_PSEUDO_REGISTER)
2992 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2993 while (--n > 0)
2994 SET_REGNO_REG_SET (set, regno + n);
2998 /* Mark those regs which are needed at the end of the function as live
2999 at the end of the last basic block. */
3000 static void
3001 mark_regs_live_at_end (set)
3002 regset set;
3004 int i;
3006 /* If exiting needs the right stack value, consider the stack pointer
3007 live at the end of the function. */
3008 if ((HAVE_epilogue && reload_completed)
3009 || ! EXIT_IGNORE_STACK
3010 || (! FRAME_POINTER_REQUIRED
3011 && ! current_function_calls_alloca
3012 && flag_omit_frame_pointer)
3013 || current_function_sp_is_unchanging)
3015 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3018 /* Mark the frame pointer if needed at the end of the function. If
3019 we end up eliminating it, it will be removed from the live list
3020 of each basic block by reload. */
3022 if (! reload_completed || frame_pointer_needed)
3024 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3025 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3026 /* If they are different, also mark the hard frame pointer as live */
3027 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3028 #endif
3031 #ifdef PIC_OFFSET_TABLE_REGNUM
3032 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3033 /* Many architectures have a GP register even without flag_pic.
3034 Assume the pic register is not in use, or will be handled by
3035 other means, if it is not fixed. */
3036 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3037 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3038 #endif
3039 #endif
3041 /* Mark all global registers, and all registers used by the epilogue
3042 as being live at the end of the function since they may be
3043 referenced by our caller. */
3044 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3045 if (global_regs[i]
3046 #ifdef EPILOGUE_USES
3047 || EPILOGUE_USES (i)
3048 #endif
3050 SET_REGNO_REG_SET (set, i);
3052 /* Mark all call-saved registers that we actaully used. */
3053 if (HAVE_epilogue && reload_completed)
3055 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3056 if (! call_used_regs[i] && regs_ever_live[i])
3057 SET_REGNO_REG_SET (set, i);
3060 /* Mark function return value. */
3061 diddle_return_value (mark_reg, set);
3064 /* Callback function for for_each_successor_phi. DATA is a regset.
3065 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3066 INSN, in the regset. */
3068 static int
3069 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3070 rtx insn ATTRIBUTE_UNUSED;
3071 int dest_regno ATTRIBUTE_UNUSED;
3072 int src_regno;
3073 void *data;
3075 regset live = (regset) data;
3076 SET_REGNO_REG_SET (live, src_regno);
3077 return 0;
3080 /* Propagate global life info around the graph of basic blocks. Begin
3081 considering blocks with their corresponding bit set in BLOCKS_IN.
3082 If BLOCKS_IN is null, consider it the universal set.
3084 BLOCKS_OUT is set for every block that was changed. */
3086 static void
3087 calculate_global_regs_live (blocks_in, blocks_out, flags)
3088 sbitmap blocks_in, blocks_out;
3089 int flags;
3091 basic_block *queue, *qhead, *qtail, *qend;
3092 regset tmp, new_live_at_end;
3093 regset_head tmp_head;
3094 regset_head new_live_at_end_head;
3095 int i;
3097 tmp = INITIALIZE_REG_SET (tmp_head);
3098 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3100 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3101 because the `head == tail' style test for an empty queue doesn't
3102 work with a full queue. */
3103 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3104 qtail = queue;
3105 qhead = qend = queue + n_basic_blocks + 2;
3107 /* Clear out the garbage that might be hanging out in bb->aux. */
3108 for (i = n_basic_blocks - 1; i >= 0; --i)
3109 BASIC_BLOCK (i)->aux = NULL;
3111 /* Queue the blocks set in the initial mask. Do this in reverse block
3112 number order so that we are more likely for the first round to do
3113 useful work. We use AUX non-null to flag that the block is queued. */
3114 if (blocks_in)
3116 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3118 basic_block bb = BASIC_BLOCK (i);
3119 *--qhead = bb;
3120 bb->aux = bb;
3123 else
3125 for (i = 0; i < n_basic_blocks; ++i)
3127 basic_block bb = BASIC_BLOCK (i);
3128 *--qhead = bb;
3129 bb->aux = bb;
3133 if (blocks_out)
3134 sbitmap_zero (blocks_out);
3136 while (qhead != qtail)
3138 int rescan, changed;
3139 basic_block bb;
3140 edge e;
3142 bb = *qhead++;
3143 if (qhead == qend)
3144 qhead = queue;
3145 bb->aux = NULL;
3147 /* Begin by propogating live_at_start from the successor blocks. */
3148 CLEAR_REG_SET (new_live_at_end);
3149 for (e = bb->succ; e ; e = e->succ_next)
3151 basic_block sb = e->dest;
3152 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3155 /* Force the stack pointer to be live -- which might not already be
3156 the case for blocks within infinite loops. */
3157 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3159 /* Regs used in phi nodes are not included in
3160 global_live_at_start, since they are live only along a
3161 particular edge. Set those regs that are live because of a
3162 phi node alternative corresponding to this particular block. */
3163 if (in_ssa_form)
3164 for_each_successor_phi (bb, &set_phi_alternative_reg,
3165 new_live_at_end);
3167 if (bb == ENTRY_BLOCK_PTR)
3169 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3170 continue;
3173 /* On our first pass through this block, we'll go ahead and continue.
3174 Recognize first pass by local_set NULL. On subsequent passes, we
3175 get to skip out early if live_at_end wouldn't have changed. */
3177 if (bb->local_set == NULL)
3179 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3180 rescan = 1;
3182 else
3184 /* If any bits were removed from live_at_end, we'll have to
3185 rescan the block. This wouldn't be necessary if we had
3186 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3187 local_live is really dependant on live_at_end. */
3188 CLEAR_REG_SET (tmp);
3189 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3190 new_live_at_end, BITMAP_AND_COMPL);
3192 if (! rescan)
3194 /* Find the set of changed bits. Take this opportunity
3195 to notice that this set is empty and early out. */
3196 CLEAR_REG_SET (tmp);
3197 changed = bitmap_operation (tmp, bb->global_live_at_end,
3198 new_live_at_end, BITMAP_XOR);
3199 if (! changed)
3200 continue;
3202 /* If any of the changed bits overlap with local_set,
3203 we'll have to rescan the block. Detect overlap by
3204 the AND with ~local_set turning off bits. */
3205 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3206 BITMAP_AND_COMPL);
3210 /* Let our caller know that BB changed enough to require its
3211 death notes updated. */
3212 if (blocks_out)
3213 SET_BIT (blocks_out, bb->index);
3215 if (! rescan)
3217 /* Add to live_at_start the set of all registers in
3218 new_live_at_end that aren't in the old live_at_end. */
3220 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3221 BITMAP_AND_COMPL);
3222 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3224 changed = bitmap_operation (bb->global_live_at_start,
3225 bb->global_live_at_start,
3226 tmp, BITMAP_IOR);
3227 if (! changed)
3228 continue;
3230 else
3232 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3234 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3235 into live_at_start. */
3236 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3238 /* If live_at start didn't change, no need to go farther. */
3239 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3240 continue;
3242 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3245 /* Queue all predecessors of BB so that we may re-examine
3246 their live_at_end. */
3247 for (e = bb->pred; e ; e = e->pred_next)
3249 basic_block pb = e->src;
3250 if (pb->aux == NULL)
3252 *qtail++ = pb;
3253 if (qtail == qend)
3254 qtail = queue;
3255 pb->aux = pb;
3260 FREE_REG_SET (tmp);
3261 FREE_REG_SET (new_live_at_end);
3263 if (blocks_out)
3265 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3267 basic_block bb = BASIC_BLOCK (i);
3268 FREE_REG_SET (bb->local_set);
3271 else
3273 for (i = n_basic_blocks - 1; i >= 0; --i)
3275 basic_block bb = BASIC_BLOCK (i);
3276 FREE_REG_SET (bb->local_set);
3280 free (queue);
3283 /* Subroutines of life analysis. */
3285 /* Allocate the permanent data structures that represent the results
3286 of life analysis. Not static since used also for stupid life analysis. */
3288 void
3289 allocate_bb_life_data ()
3291 register int i;
3293 for (i = 0; i < n_basic_blocks; i++)
3295 basic_block bb = BASIC_BLOCK (i);
3297 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3298 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3301 ENTRY_BLOCK_PTR->global_live_at_end
3302 = OBSTACK_ALLOC_REG_SET (function_obstack);
3303 EXIT_BLOCK_PTR->global_live_at_start
3304 = OBSTACK_ALLOC_REG_SET (function_obstack);
3306 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3309 void
3310 allocate_reg_life_data ()
3312 int i;
3314 max_regno = max_reg_num ();
3316 /* Recalculate the register space, in case it has grown. Old style
3317 vector oriented regsets would set regset_{size,bytes} here also. */
3318 allocate_reg_info (max_regno, FALSE, FALSE);
3320 /* Reset all the data we'll collect in propagate_block and its
3321 subroutines. */
3322 for (i = 0; i < max_regno; i++)
3324 REG_N_SETS (i) = 0;
3325 REG_N_REFS (i) = 0;
3326 REG_N_DEATHS (i) = 0;
3327 REG_N_CALLS_CROSSED (i) = 0;
3328 REG_LIVE_LENGTH (i) = 0;
3329 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3333 /* Delete dead instructions for propagate_block. */
3335 static void
3336 propagate_block_delete_insn (bb, insn)
3337 basic_block bb;
3338 rtx insn;
3340 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3342 /* If the insn referred to a label, and that label was attached to
3343 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3344 pretty much mandatory to delete it, because the ADDR_VEC may be
3345 referencing labels that no longer exist. */
3347 if (inote)
3349 rtx label = XEXP (inote, 0);
3350 rtx next;
3352 if (LABEL_NUSES (label) == 1
3353 && (next = next_nonnote_insn (label)) != NULL
3354 && GET_CODE (next) == JUMP_INSN
3355 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3356 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3358 rtx pat = PATTERN (next);
3359 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3360 int len = XVECLEN (pat, diff_vec_p);
3361 int i;
3363 for (i = 0; i < len; i++)
3364 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3366 flow_delete_insn (next);
3370 if (bb->end == insn)
3371 bb->end = PREV_INSN (insn);
3372 flow_delete_insn (insn);
3375 /* Delete dead libcalls for propagate_block. Return the insn
3376 before the libcall. */
3378 static rtx
3379 propagate_block_delete_libcall (bb, insn, note)
3380 basic_block bb;
3381 rtx insn, note;
3383 rtx first = XEXP (note, 0);
3384 rtx before = PREV_INSN (first);
3386 if (insn == bb->end)
3387 bb->end = before;
3389 flow_delete_insn_chain (first, insn);
3390 return before;
3393 /* Update the life-status of regs for one insn. Return the previous insn. */
3396 propagate_one_insn (pbi, insn)
3397 struct propagate_block_info *pbi;
3398 rtx insn;
3400 rtx prev = PREV_INSN (insn);
3401 int flags = pbi->flags;
3402 int insn_is_dead = 0;
3403 int libcall_is_dead = 0;
3404 rtx note;
3405 int i;
3407 if (! INSN_P (insn))
3408 return prev;
3410 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3411 if (flags & PROP_SCAN_DEAD_CODE)
3413 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3414 REG_NOTES (insn));
3415 libcall_is_dead = (insn_is_dead && note != 0
3416 && libcall_dead_p (pbi, PATTERN (insn),
3417 note, insn));
3420 /* We almost certainly don't want to delete prologue or epilogue
3421 instructions. Warn about probable compiler losage. */
3422 if (insn_is_dead
3423 && reload_completed
3424 && (((HAVE_epilogue || HAVE_prologue)
3425 && prologue_epilogue_contains (insn))
3426 || (HAVE_sibcall_epilogue
3427 && sibcall_epilogue_contains (insn))))
3429 if (flags & PROP_KILL_DEAD_CODE)
3431 warning ("ICE: would have deleted prologue/epilogue insn");
3432 if (!inhibit_warnings)
3433 debug_rtx (insn);
3435 libcall_is_dead = insn_is_dead = 0;
3438 /* If an instruction consists of just dead store(s) on final pass,
3439 delete it. */
3440 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3442 /* Record sets. Do this even for dead instructions, since they
3443 would have killed the values if they hadn't been deleted. */
3444 mark_set_regs (pbi, PATTERN (insn), insn);
3446 /* CC0 is now known to be dead. Either this insn used it,
3447 in which case it doesn't anymore, or clobbered it,
3448 so the next insn can't use it. */
3449 pbi->cc0_live = 0;
3451 if (libcall_is_dead)
3453 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3454 insn = NEXT_INSN (prev);
3456 else
3457 propagate_block_delete_insn (pbi->bb, insn);
3459 return prev;
3462 /* See if this is an increment or decrement that can be merged into
3463 a following memory address. */
3464 #ifdef AUTO_INC_DEC
3466 register rtx x = single_set (insn);
3468 /* Does this instruction increment or decrement a register? */
3469 if (!reload_completed
3470 && (flags & PROP_AUTOINC)
3471 && x != 0
3472 && GET_CODE (SET_DEST (x)) == REG
3473 && (GET_CODE (SET_SRC (x)) == PLUS
3474 || GET_CODE (SET_SRC (x)) == MINUS)
3475 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3476 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3477 /* Ok, look for a following memory ref we can combine with.
3478 If one is found, change the memory ref to a PRE_INC
3479 or PRE_DEC, cancel this insn, and return 1.
3480 Return 0 if nothing has been done. */
3481 && try_pre_increment_1 (pbi, insn))
3482 return prev;
3484 #endif /* AUTO_INC_DEC */
3486 CLEAR_REG_SET (pbi->new_set);
3488 /* If this is not the final pass, and this insn is copying the value of
3489 a library call and it's dead, don't scan the insns that perform the
3490 library call, so that the call's arguments are not marked live. */
3491 if (libcall_is_dead)
3493 /* Record the death of the dest reg. */
3494 mark_set_regs (pbi, PATTERN (insn), insn);
3496 insn = XEXP (note, 0);
3497 return PREV_INSN (insn);
3499 else if (GET_CODE (PATTERN (insn)) == SET
3500 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3501 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3502 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3503 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3504 /* We have an insn to pop a constant amount off the stack.
3505 (Such insns use PLUS regardless of the direction of the stack,
3506 and any insn to adjust the stack by a constant is always a pop.)
3507 These insns, if not dead stores, have no effect on life. */
3509 else
3511 /* Any regs live at the time of a call instruction must not go
3512 in a register clobbered by calls. Find all regs now live and
3513 record this for them. */
3515 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3516 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3517 { REG_N_CALLS_CROSSED (i)++; });
3519 /* Record sets. Do this even for dead instructions, since they
3520 would have killed the values if they hadn't been deleted. */
3521 mark_set_regs (pbi, PATTERN (insn), insn);
3523 if (GET_CODE (insn) == CALL_INSN)
3525 register int i;
3526 rtx note, cond;
3528 cond = NULL_RTX;
3529 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3530 cond = COND_EXEC_TEST (PATTERN (insn));
3532 /* Non-constant calls clobber memory. */
3533 if (! CONST_CALL_P (insn))
3534 free_EXPR_LIST_list (&pbi->mem_set_list);
3536 /* There may be extra registers to be clobbered. */
3537 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3538 note;
3539 note = XEXP (note, 1))
3540 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3541 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3542 cond, insn, pbi->flags);
3544 /* Calls change all call-used and global registers. */
3545 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3546 if (call_used_regs[i] && ! global_regs[i]
3547 && ! fixed_regs[i])
3549 /* We do not want REG_UNUSED notes for these registers. */
3550 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3551 cond, insn,
3552 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3556 /* If an insn doesn't use CC0, it becomes dead since we assume
3557 that every insn clobbers it. So show it dead here;
3558 mark_used_regs will set it live if it is referenced. */
3559 pbi->cc0_live = 0;
3561 /* Record uses. */
3562 if (! insn_is_dead)
3563 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3565 /* Sometimes we may have inserted something before INSN (such as a move)
3566 when we make an auto-inc. So ensure we will scan those insns. */
3567 #ifdef AUTO_INC_DEC
3568 prev = PREV_INSN (insn);
3569 #endif
3571 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3573 register int i;
3574 rtx note, cond;
3576 cond = NULL_RTX;
3577 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3578 cond = COND_EXEC_TEST (PATTERN (insn));
3580 /* Calls use their arguments. */
3581 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3582 note;
3583 note = XEXP (note, 1))
3584 if (GET_CODE (XEXP (note, 0)) == USE)
3585 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3586 cond, insn);
3588 /* The stack ptr is used (honorarily) by a CALL insn. */
3589 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3591 /* Calls may also reference any of the global registers,
3592 so they are made live. */
3593 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3594 if (global_regs[i])
3595 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3596 cond, insn);
3600 /* On final pass, update counts of how many insns in which each reg
3601 is live. */
3602 if (flags & PROP_REG_INFO)
3603 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3604 { REG_LIVE_LENGTH (i)++; });
3606 return prev;
3609 /* Initialize a propagate_block_info struct for public consumption.
3610 Note that the structure itself is opaque to this file, but that
3611 the user can use the regsets provided here. */
3613 struct propagate_block_info *
3614 init_propagate_block_info (bb, live, local_set, flags)
3615 basic_block bb;
3616 regset live;
3617 regset local_set;
3618 int flags;
3620 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3622 pbi->bb = bb;
3623 pbi->reg_live = live;
3624 pbi->mem_set_list = NULL_RTX;
3625 pbi->local_set = local_set;
3626 pbi->cc0_live = 0;
3627 pbi->flags = flags;
3629 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3630 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3631 else
3632 pbi->reg_next_use = NULL;
3634 pbi->new_set = BITMAP_XMALLOC ();
3636 #ifdef HAVE_conditional_execution
3637 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3638 free_reg_cond_life_info);
3639 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3641 /* If this block ends in a conditional branch, for each register live
3642 from one side of the branch and not the other, record the register
3643 as conditionally dead. */
3644 if ((flags & (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE))
3645 && GET_CODE (bb->end) == JUMP_INSN
3646 && any_condjump_p (bb->end))
3648 regset_head diff_head;
3649 regset diff = INITIALIZE_REG_SET (diff_head);
3650 basic_block bb_true, bb_false;
3651 rtx cond_true, cond_false;
3652 int i;
3654 /* Identify the successor blocks. */
3655 bb_true = bb->succ->dest;
3656 if (bb->succ->succ_next != NULL)
3658 bb_false = bb->succ->succ_next->dest;
3660 if (bb->succ->flags & EDGE_FALLTHRU)
3662 basic_block t = bb_false;
3663 bb_false = bb_true;
3664 bb_true = t;
3666 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3667 abort ();
3669 else
3671 /* This can happen with a conditional jump to the next insn. */
3672 if (JUMP_LABEL (bb->end) != bb_true->head)
3673 abort ();
3675 /* Simplest way to do nothing. */
3676 bb_false = bb_true;
3679 /* Extract the condition from the branch. */
3680 cond_true = XEXP (SET_SRC (PATTERN (bb->end)), 0);
3681 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3682 GET_MODE (cond_true), XEXP (cond_true, 0),
3683 XEXP (cond_true, 1));
3684 if (GET_CODE (XEXP (SET_SRC (PATTERN (bb->end)), 1)) == PC)
3686 rtx t = cond_false;
3687 cond_false = cond_true;
3688 cond_true = t;
3691 /* Compute which register lead different lives in the successors. */
3692 if (bitmap_operation (diff, bb_true->global_live_at_start,
3693 bb_false->global_live_at_start, BITMAP_XOR))
3695 if (GET_CODE (XEXP (cond_true, 0)) != REG)
3696 abort ();
3697 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond_true, 0)));
3699 /* For each such register, mark it conditionally dead. */
3700 EXECUTE_IF_SET_IN_REG_SET
3701 (diff, 0, i,
3703 struct reg_cond_life_info *rcli;
3704 rtx cond;
3706 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3708 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3709 cond = cond_false;
3710 else
3711 cond = cond_true;
3712 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3714 splay_tree_insert (pbi->reg_cond_dead, i,
3715 (splay_tree_value) rcli);
3719 FREE_REG_SET (diff);
3721 #endif
3723 /* If this block has no successors, any stores to the frame that aren't
3724 used later in the block are dead. So make a pass over the block
3725 recording any such that are made and show them dead at the end. We do
3726 a very conservative and simple job here. */
3727 if ((flags & PROP_SCAN_DEAD_CODE)
3728 && (bb->succ == NULL
3729 || (bb->succ->succ_next == NULL
3730 && bb->succ->dest == EXIT_BLOCK_PTR)))
3732 rtx insn;
3733 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3734 if (GET_CODE (insn) == INSN
3735 && GET_CODE (PATTERN (insn)) == SET
3736 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3738 rtx mem = SET_DEST (PATTERN (insn));
3740 if (XEXP (mem, 0) == frame_pointer_rtx
3741 || (GET_CODE (XEXP (mem, 0)) == PLUS
3742 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3743 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3744 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3748 return pbi;
3751 /* Release a propagate_block_info struct. */
3753 void
3754 free_propagate_block_info (pbi)
3755 struct propagate_block_info *pbi;
3757 free_EXPR_LIST_list (&pbi->mem_set_list);
3759 BITMAP_XFREE (pbi->new_set);
3761 #ifdef HAVE_conditional_execution
3762 splay_tree_delete (pbi->reg_cond_dead);
3763 BITMAP_XFREE (pbi->reg_cond_reg);
3764 #endif
3766 if (pbi->reg_next_use)
3767 free (pbi->reg_next_use);
3769 free (pbi);
3772 /* Compute the registers live at the beginning of a basic block BB from
3773 those live at the end.
3775 When called, REG_LIVE contains those live at the end. On return, it
3776 contains those live at the beginning.
3778 LOCAL_SET, if non-null, will be set with all registers killed by
3779 this basic block. */
3781 void
3782 propagate_block (bb, live, local_set, flags)
3783 basic_block bb;
3784 regset live;
3785 regset local_set;
3786 int flags;
3788 struct propagate_block_info *pbi;
3789 rtx insn, prev;
3791 pbi = init_propagate_block_info (bb, live, local_set, flags);
3793 if (flags & PROP_REG_INFO)
3795 register int i;
3797 /* Process the regs live at the end of the block.
3798 Mark them as not local to any one basic block. */
3799 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3800 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3803 /* Scan the block an insn at a time from end to beginning. */
3805 for (insn = bb->end; ; insn = prev)
3807 /* If this is a call to `setjmp' et al, warn if any
3808 non-volatile datum is live. */
3809 if ((flags & PROP_REG_INFO)
3810 && GET_CODE (insn) == NOTE
3811 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3812 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3814 prev = propagate_one_insn (pbi, insn);
3816 if (insn == bb->head)
3817 break;
3820 free_propagate_block_info (pbi);
3823 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3824 (SET expressions whose destinations are registers dead after the insn).
3825 NEEDED is the regset that says which regs are alive after the insn.
3827 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3829 If X is the entire body of an insn, NOTES contains the reg notes
3830 pertaining to the insn. */
3832 static int
3833 insn_dead_p (pbi, x, call_ok, notes)
3834 struct propagate_block_info *pbi;
3835 rtx x;
3836 int call_ok;
3837 rtx notes ATTRIBUTE_UNUSED;
3839 enum rtx_code code = GET_CODE (x);
3841 #ifdef AUTO_INC_DEC
3842 /* If flow is invoked after reload, we must take existing AUTO_INC
3843 expresions into account. */
3844 if (reload_completed)
3846 for ( ; notes; notes = XEXP (notes, 1))
3848 if (REG_NOTE_KIND (notes) == REG_INC)
3850 int regno = REGNO (XEXP (notes, 0));
3852 /* Don't delete insns to set global regs. */
3853 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3854 || REGNO_REG_SET_P (pbi->reg_live, regno))
3855 return 0;
3859 #endif
3861 /* If setting something that's a reg or part of one,
3862 see if that register's altered value will be live. */
3864 if (code == SET)
3866 rtx r = SET_DEST (x);
3868 #ifdef HAVE_cc0
3869 if (GET_CODE (r) == CC0)
3870 return ! pbi->cc0_live;
3871 #endif
3873 /* A SET that is a subroutine call cannot be dead. */
3874 if (GET_CODE (SET_SRC (x)) == CALL)
3876 if (! call_ok)
3877 return 0;
3880 /* Don't eliminate loads from volatile memory or volatile asms. */
3881 else if (volatile_refs_p (SET_SRC (x)))
3882 return 0;
3884 if (GET_CODE (r) == MEM)
3886 rtx temp;
3888 if (MEM_VOLATILE_P (r))
3889 return 0;
3891 /* Walk the set of memory locations we are currently tracking
3892 and see if one is an identical match to this memory location.
3893 If so, this memory write is dead (remember, we're walking
3894 backwards from the end of the block to the start). */
3895 temp = pbi->mem_set_list;
3896 while (temp)
3898 if (rtx_equal_p (XEXP (temp, 0), r))
3899 return 1;
3900 temp = XEXP (temp, 1);
3903 else
3905 while (GET_CODE (r) == SUBREG
3906 || GET_CODE (r) == STRICT_LOW_PART
3907 || GET_CODE (r) == ZERO_EXTRACT)
3908 r = XEXP (r, 0);
3910 if (GET_CODE (r) == REG)
3912 int regno = REGNO (r);
3914 /* Obvious. */
3915 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3916 return 0;
3918 /* If this is a hard register, verify that subsequent
3919 words are not needed. */
3920 if (regno < FIRST_PSEUDO_REGISTER)
3922 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3924 while (--n > 0)
3925 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3926 return 0;
3929 /* Don't delete insns to set global regs. */
3930 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3931 return 0;
3933 /* Make sure insns to set the stack pointer aren't deleted. */
3934 if (regno == STACK_POINTER_REGNUM)
3935 return 0;
3937 /* Make sure insns to set the frame pointer aren't deleted. */
3938 if (regno == FRAME_POINTER_REGNUM
3939 && (! reload_completed || frame_pointer_needed))
3940 return 0;
3941 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3942 if (regno == HARD_FRAME_POINTER_REGNUM
3943 && (! reload_completed || frame_pointer_needed))
3944 return 0;
3945 #endif
3947 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3948 /* Make sure insns to set arg pointer are never deleted
3949 (if the arg pointer isn't fixed, there will be a USE
3950 for it, so we can treat it normally). */
3951 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3952 return 0;
3953 #endif
3955 /* Otherwise, the set is dead. */
3956 return 1;
3961 /* If performing several activities, insn is dead if each activity
3962 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3963 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3964 worth keeping. */
3965 else if (code == PARALLEL)
3967 int i = XVECLEN (x, 0);
3969 for (i--; i >= 0; i--)
3970 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3971 && GET_CODE (XVECEXP (x, 0, i)) != USE
3972 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3973 return 0;
3975 return 1;
3978 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3979 is not necessarily true for hard registers. */
3980 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3981 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3982 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3983 return 1;
3985 /* We do not check other CLOBBER or USE here. An insn consisting of just
3986 a CLOBBER or just a USE should not be deleted. */
3987 return 0;
3990 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3991 return 1 if the entire library call is dead.
3992 This is true if X copies a register (hard or pseudo)
3993 and if the hard return reg of the call insn is dead.
3994 (The caller should have tested the destination of X already for death.)
3996 If this insn doesn't just copy a register, then we don't
3997 have an ordinary libcall. In that case, cse could not have
3998 managed to substitute the source for the dest later on,
3999 so we can assume the libcall is dead.
4001 NEEDED is the bit vector of pseudoregs live before this insn.
4002 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
4004 static int
4005 libcall_dead_p (pbi, x, note, insn)
4006 struct propagate_block_info *pbi;
4007 rtx x;
4008 rtx note;
4009 rtx insn;
4011 register RTX_CODE code = GET_CODE (x);
4013 if (code == SET)
4015 register rtx r = SET_SRC (x);
4016 if (GET_CODE (r) == REG)
4018 rtx call = XEXP (note, 0);
4019 rtx call_pat;
4020 register int i;
4022 /* Find the call insn. */
4023 while (call != insn && GET_CODE (call) != CALL_INSN)
4024 call = NEXT_INSN (call);
4026 /* If there is none, do nothing special,
4027 since ordinary death handling can understand these insns. */
4028 if (call == insn)
4029 return 0;
4031 /* See if the hard reg holding the value is dead.
4032 If this is a PARALLEL, find the call within it. */
4033 call_pat = PATTERN (call);
4034 if (GET_CODE (call_pat) == PARALLEL)
4036 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4037 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4038 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4039 break;
4041 /* This may be a library call that is returning a value
4042 via invisible pointer. Do nothing special, since
4043 ordinary death handling can understand these insns. */
4044 if (i < 0)
4045 return 0;
4047 call_pat = XVECEXP (call_pat, 0, i);
4050 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4053 return 1;
4056 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4057 live at function entry. Don't count global register variables, variables
4058 in registers that can be used for function arg passing, or variables in
4059 fixed hard registers. */
4062 regno_uninitialized (regno)
4063 int regno;
4065 if (n_basic_blocks == 0
4066 || (regno < FIRST_PSEUDO_REGISTER
4067 && (global_regs[regno]
4068 || fixed_regs[regno]
4069 || FUNCTION_ARG_REGNO_P (regno))))
4070 return 0;
4072 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4075 /* 1 if register REGNO was alive at a place where `setjmp' was called
4076 and was set more than once or is an argument.
4077 Such regs may be clobbered by `longjmp'. */
4080 regno_clobbered_at_setjmp (regno)
4081 int regno;
4083 if (n_basic_blocks == 0)
4084 return 0;
4086 return ((REG_N_SETS (regno) > 1
4087 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4088 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4091 /* INSN references memory, possibly using autoincrement addressing modes.
4092 Find any entries on the mem_set_list that need to be invalidated due
4093 to an address change. */
4095 static void
4096 invalidate_mems_from_autoinc (pbi, insn)
4097 struct propagate_block_info *pbi;
4098 rtx insn;
4100 rtx note = REG_NOTES (insn);
4101 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4103 if (REG_NOTE_KIND (note) == REG_INC)
4105 rtx temp = pbi->mem_set_list;
4106 rtx prev = NULL_RTX;
4107 rtx next;
4109 while (temp)
4111 next = XEXP (temp, 1);
4112 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4114 /* Splice temp out of list. */
4115 if (prev)
4116 XEXP (prev, 1) = next;
4117 else
4118 pbi->mem_set_list = next;
4119 free_EXPR_LIST_node (temp);
4121 else
4122 prev = temp;
4123 temp = next;
4129 /* Process the registers that are set within X. Their bits are set to
4130 1 in the regset DEAD, because they are dead prior to this insn.
4132 If INSN is nonzero, it is the insn being processed.
4134 FLAGS is the set of operations to perform. */
4136 static void
4137 mark_set_regs (pbi, x, insn)
4138 struct propagate_block_info *pbi;
4139 rtx x, insn;
4141 rtx cond = NULL_RTX;
4142 enum rtx_code code;
4144 retry:
4145 switch (code = GET_CODE (x))
4147 case SET:
4148 case CLOBBER:
4149 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4150 return;
4152 case COND_EXEC:
4153 cond = COND_EXEC_TEST (x);
4154 x = COND_EXEC_CODE (x);
4155 goto retry;
4157 case PARALLEL:
4159 register int i;
4160 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4162 rtx sub = XVECEXP (x, 0, i);
4163 switch (code = GET_CODE (sub))
4165 case COND_EXEC:
4166 if (cond != NULL_RTX)
4167 abort ();
4169 cond = COND_EXEC_TEST (sub);
4170 sub = COND_EXEC_CODE (sub);
4171 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4172 break;
4173 /* FALLTHRU */
4175 case SET:
4176 case CLOBBER:
4177 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4178 break;
4180 default:
4181 break;
4184 break;
4187 default:
4188 break;
4192 /* Process a single SET rtx, X. */
4194 static void
4195 mark_set_1 (pbi, code, reg, cond, insn, flags)
4196 struct propagate_block_info *pbi;
4197 enum rtx_code code;
4198 rtx reg, cond, insn;
4199 int flags;
4201 int regno_first = -1, regno_last = -1;
4202 int not_dead = 0;
4203 int i;
4205 /* Some targets place small structures in registers for
4206 return values of functions. We have to detect this
4207 case specially here to get correct flow information. */
4208 if (GET_CODE (reg) == PARALLEL
4209 && GET_MODE (reg) == BLKmode)
4211 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4212 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4213 return;
4216 /* Modifying just one hardware register of a multi-reg value or just a
4217 byte field of a register does not mean the value from before this insn
4218 is now dead. Of course, if it was dead after it's unused now. */
4220 switch (GET_CODE (reg))
4222 case ZERO_EXTRACT:
4223 case SIGN_EXTRACT:
4224 case STRICT_LOW_PART:
4225 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4227 reg = XEXP (reg, 0);
4228 while (GET_CODE (reg) == SUBREG
4229 || GET_CODE (reg) == ZERO_EXTRACT
4230 || GET_CODE (reg) == SIGN_EXTRACT
4231 || GET_CODE (reg) == STRICT_LOW_PART);
4232 if (GET_CODE (reg) == MEM)
4233 break;
4234 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4235 /* FALLTHRU */
4237 case REG:
4238 regno_last = regno_first = REGNO (reg);
4239 if (regno_first < FIRST_PSEUDO_REGISTER)
4240 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4241 break;
4243 case SUBREG:
4244 if (GET_CODE (SUBREG_REG (reg)) == REG)
4246 enum machine_mode outer_mode = GET_MODE (reg);
4247 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4249 /* Identify the range of registers affected. This is moderately
4250 tricky for hard registers. See alter_subreg. */
4252 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4253 if (regno_first < FIRST_PSEUDO_REGISTER)
4255 #ifdef ALTER_HARD_SUBREG
4256 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4257 inner_mode, regno_first);
4258 #else
4259 regno_first += SUBREG_WORD (reg);
4260 #endif
4261 regno_last = (regno_first
4262 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4264 /* Since we've just adjusted the register number ranges, make
4265 sure REG matches. Otherwise some_was_live will be clear
4266 when it shouldn't have been, and we'll create incorrect
4267 REG_UNUSED notes. */
4268 reg = gen_rtx_REG (outer_mode, regno_first);
4270 else
4272 /* If the number of words in the subreg is less than the number
4273 of words in the full register, we have a well-defined partial
4274 set. Otherwise the high bits are undefined.
4276 This is only really applicable to pseudos, since we just took
4277 care of multi-word hard registers. */
4278 if (((GET_MODE_SIZE (outer_mode)
4279 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4280 < ((GET_MODE_SIZE (inner_mode)
4281 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4282 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4284 reg = SUBREG_REG (reg);
4287 else
4288 reg = SUBREG_REG (reg);
4289 break;
4291 default:
4292 break;
4295 /* If this set is a MEM, then it kills any aliased writes.
4296 If this set is a REG, then it kills any MEMs which use the reg. */
4297 if (flags & PROP_SCAN_DEAD_CODE)
4299 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4301 rtx temp = pbi->mem_set_list;
4302 rtx prev = NULL_RTX;
4303 rtx next;
4305 while (temp)
4307 next = XEXP (temp, 1);
4308 if ((GET_CODE (reg) == MEM
4309 && output_dependence (XEXP (temp, 0), reg))
4310 || (GET_CODE (reg) == REG
4311 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4313 /* Splice this entry out of the list. */
4314 if (prev)
4315 XEXP (prev, 1) = next;
4316 else
4317 pbi->mem_set_list = next;
4318 free_EXPR_LIST_node (temp);
4320 else
4321 prev = temp;
4322 temp = next;
4326 /* If the memory reference had embedded side effects (autoincrement
4327 address modes. Then we may need to kill some entries on the
4328 memory set list. */
4329 if (insn && GET_CODE (reg) == MEM)
4330 invalidate_mems_from_autoinc (pbi, insn);
4332 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4333 /* ??? With more effort we could track conditional memory life. */
4334 && ! cond
4335 /* We do not know the size of a BLKmode store, so we do not track
4336 them for redundant store elimination. */
4337 && GET_MODE (reg) != BLKmode
4338 /* There are no REG_INC notes for SP, so we can't assume we'll see
4339 everything that invalidates it. To be safe, don't eliminate any
4340 stores though SP; none of them should be redundant anyway. */
4341 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4342 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4345 if (GET_CODE (reg) == REG
4346 && ! (regno_first == FRAME_POINTER_REGNUM
4347 && (! reload_completed || frame_pointer_needed))
4348 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4349 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4350 && (! reload_completed || frame_pointer_needed))
4351 #endif
4352 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4353 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4354 #endif
4357 int some_was_live = 0, some_was_dead = 0;
4359 for (i = regno_first; i <= regno_last; ++i)
4361 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4362 if (pbi->local_set)
4363 SET_REGNO_REG_SET (pbi->local_set, i);
4364 if (code != CLOBBER)
4365 SET_REGNO_REG_SET (pbi->new_set, i);
4367 some_was_live |= needed_regno;
4368 some_was_dead |= ! needed_regno;
4371 #ifdef HAVE_conditional_execution
4372 /* Consider conditional death in deciding that the register needs
4373 a death note. */
4374 if (some_was_live && ! not_dead
4375 /* The stack pointer is never dead. Well, not strictly true,
4376 but it's very difficult to tell from here. Hopefully
4377 combine_stack_adjustments will fix up the most egregious
4378 errors. */
4379 && regno_first != STACK_POINTER_REGNUM)
4381 for (i = regno_first; i <= regno_last; ++i)
4382 if (! mark_regno_cond_dead (pbi, i, cond))
4383 not_dead = 1;
4385 #endif
4387 /* Additional data to record if this is the final pass. */
4388 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4389 | PROP_DEATH_NOTES | PROP_AUTOINC))
4391 register rtx y;
4392 register int blocknum = pbi->bb->index;
4394 y = NULL_RTX;
4395 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4397 y = pbi->reg_next_use[regno_first];
4399 /* The next use is no longer next, since a store intervenes. */
4400 for (i = regno_first; i <= regno_last; ++i)
4401 pbi->reg_next_use[i] = 0;
4404 if (flags & PROP_REG_INFO)
4406 for (i = regno_first; i <= regno_last; ++i)
4408 /* Count (weighted) references, stores, etc. This counts a
4409 register twice if it is modified, but that is correct. */
4410 REG_N_SETS (i) += 1;
4411 REG_N_REFS (i) += (optimize_size ? 1
4412 : pbi->bb->loop_depth + 1);
4414 /* The insns where a reg is live are normally counted
4415 elsewhere, but we want the count to include the insn
4416 where the reg is set, and the normal counting mechanism
4417 would not count it. */
4418 REG_LIVE_LENGTH (i) += 1;
4421 /* If this is a hard reg, record this function uses the reg. */
4422 if (regno_first < FIRST_PSEUDO_REGISTER)
4424 for (i = regno_first; i <= regno_last; i++)
4425 regs_ever_live[i] = 1;
4427 else
4429 /* Keep track of which basic blocks each reg appears in. */
4430 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4431 REG_BASIC_BLOCK (regno_first) = blocknum;
4432 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4433 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4437 if (! some_was_dead)
4439 if (flags & PROP_LOG_LINKS)
4441 /* Make a logical link from the next following insn
4442 that uses this register, back to this insn.
4443 The following insns have already been processed.
4445 We don't build a LOG_LINK for hard registers containing
4446 in ASM_OPERANDs. If these registers get replaced,
4447 we might wind up changing the semantics of the insn,
4448 even if reload can make what appear to be valid
4449 assignments later. */
4450 if (y && (BLOCK_NUM (y) == blocknum)
4451 && (regno_first >= FIRST_PSEUDO_REGISTER
4452 || asm_noperands (PATTERN (y)) < 0))
4453 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4456 else if (not_dead)
4458 else if (! some_was_live)
4460 if (flags & PROP_REG_INFO)
4461 REG_N_DEATHS (regno_first) += 1;
4463 if (flags & PROP_DEATH_NOTES)
4465 /* Note that dead stores have already been deleted
4466 when possible. If we get here, we have found a
4467 dead store that cannot be eliminated (because the
4468 same insn does something useful). Indicate this
4469 by marking the reg being set as dying here. */
4470 REG_NOTES (insn)
4471 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4474 else
4476 if (flags & PROP_DEATH_NOTES)
4478 /* This is a case where we have a multi-word hard register
4479 and some, but not all, of the words of the register are
4480 needed in subsequent insns. Write REG_UNUSED notes
4481 for those parts that were not needed. This case should
4482 be rare. */
4484 for (i = regno_first; i <= regno_last; ++i)
4485 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4486 REG_NOTES (insn)
4487 = alloc_EXPR_LIST (REG_UNUSED,
4488 gen_rtx_REG (reg_raw_mode[i], i),
4489 REG_NOTES (insn));
4494 /* Mark the register as being dead. */
4495 if (some_was_live
4496 && ! not_dead
4497 /* The stack pointer is never dead. Well, not strictly true,
4498 but it's very difficult to tell from here. Hopefully
4499 combine_stack_adjustments will fix up the most egregious
4500 errors. */
4501 && regno_first != STACK_POINTER_REGNUM)
4503 for (i = regno_first; i <= regno_last; ++i)
4504 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4507 else if (GET_CODE (reg) == REG)
4509 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4510 pbi->reg_next_use[regno_first] = 0;
4513 /* If this is the last pass and this is a SCRATCH, show it will be dying
4514 here and count it. */
4515 else if (GET_CODE (reg) == SCRATCH)
4517 if (flags & PROP_DEATH_NOTES)
4518 REG_NOTES (insn)
4519 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4523 #ifdef HAVE_conditional_execution
4524 /* Mark REGNO conditionally dead. Return true if the register is
4525 now unconditionally dead. */
4527 static int
4528 mark_regno_cond_dead (pbi, regno, cond)
4529 struct propagate_block_info *pbi;
4530 int regno;
4531 rtx cond;
4533 /* If this is a store to a predicate register, the value of the
4534 predicate is changing, we don't know that the predicate as seen
4535 before is the same as that seen after. Flush all dependant
4536 conditions from reg_cond_dead. This will make all such
4537 conditionally live registers unconditionally live. */
4538 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4539 flush_reg_cond_reg (pbi, regno);
4541 /* If this is an unconditional store, remove any conditional
4542 life that may have existed. */
4543 if (cond == NULL_RTX)
4544 splay_tree_remove (pbi->reg_cond_dead, regno);
4545 else
4547 splay_tree_node node;
4548 struct reg_cond_life_info *rcli;
4549 rtx ncond;
4551 /* Otherwise this is a conditional set. Record that fact.
4552 It may have been conditionally used, or there may be a
4553 subsequent set with a complimentary condition. */
4555 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4556 if (node == NULL)
4558 /* The register was unconditionally live previously.
4559 Record the current condition as the condition under
4560 which it is dead. */
4561 rcli = (struct reg_cond_life_info *)
4562 xmalloc (sizeof (*rcli));
4563 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4564 splay_tree_insert (pbi->reg_cond_dead, regno,
4565 (splay_tree_value) rcli);
4567 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4568 REGNO (XEXP (cond, 0)));
4570 /* Not unconditionaly dead. */
4571 return 0;
4573 else
4575 /* The register was conditionally live previously.
4576 Add the new condition to the old. */
4577 rcli = (struct reg_cond_life_info *) node->value;
4578 ncond = rcli->condition;
4579 ncond = ior_reg_cond (ncond, cond);
4581 /* If the register is now unconditionally dead,
4582 remove the entry in the splay_tree. */
4583 if (ncond == const1_rtx)
4584 splay_tree_remove (pbi->reg_cond_dead, regno);
4585 else
4587 rcli->condition = ncond;
4589 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4590 REGNO (XEXP (cond, 0)));
4592 /* Not unconditionaly dead. */
4593 return 0;
4598 return 1;
4601 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4603 static void
4604 free_reg_cond_life_info (value)
4605 splay_tree_value value;
4607 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4608 free_EXPR_LIST_list (&rcli->condition);
4609 free (rcli);
4612 /* Helper function for flush_reg_cond_reg. */
4614 static int
4615 flush_reg_cond_reg_1 (node, data)
4616 splay_tree_node node;
4617 void *data;
4619 struct reg_cond_life_info *rcli;
4620 int *xdata = (int *) data;
4621 unsigned int regno = xdata[0];
4622 rtx c, *prev;
4624 /* Don't need to search if last flushed value was farther on in
4625 the in-order traversal. */
4626 if (xdata[1] >= (int) node->key)
4627 return 0;
4629 /* Splice out portions of the expression that refer to regno. */
4630 rcli = (struct reg_cond_life_info *) node->value;
4631 c = *(prev = &rcli->condition);
4632 while (c)
4634 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4636 rtx next = XEXP (c, 1);
4637 free_EXPR_LIST_node (c);
4638 c = *prev = next;
4640 else
4641 c = *(prev = &XEXP (c, 1));
4644 /* If the entire condition is now NULL, signal the node to be removed. */
4645 if (! rcli->condition)
4647 xdata[1] = node->key;
4648 return -1;
4650 else
4651 return 0;
4654 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4656 static void
4657 flush_reg_cond_reg (pbi, regno)
4658 struct propagate_block_info *pbi;
4659 int regno;
4661 int pair[2];
4663 pair[0] = regno;
4664 pair[1] = -1;
4665 while (splay_tree_foreach (pbi->reg_cond_dead,
4666 flush_reg_cond_reg_1, pair) == -1)
4667 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4669 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4672 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4673 We actually use EXPR_LIST to chain the sub-expressions together
4674 instead of IOR because it's easier to manipulate and we have
4675 the lists.c functions to reuse nodes.
4677 Return a new rtl expression as appropriate. */
4679 static rtx
4680 ior_reg_cond (old, x)
4681 rtx old, x;
4683 enum rtx_code x_code;
4684 rtx x_reg;
4685 rtx c;
4687 /* We expect these conditions to be of the form (eq reg 0). */
4688 x_code = GET_CODE (x);
4689 if (GET_RTX_CLASS (x_code) != '<'
4690 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4691 || XEXP (x, 1) != const0_rtx)
4692 abort ();
4694 /* Search the expression for an existing sub-expression of X_REG. */
4695 for (c = old; c ; c = XEXP (c, 1))
4697 rtx y = XEXP (c, 0);
4698 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4700 /* If we find X already present in OLD, we need do nothing. */
4701 if (GET_CODE (y) == x_code)
4702 return old;
4704 /* If we find X being a compliment of a condition in OLD,
4705 then the entire condition is true. */
4706 if (GET_CODE (y) == reverse_condition (x_code))
4707 return const1_rtx;
4711 /* Otherwise just add to the chain. */
4712 return alloc_EXPR_LIST (0, x, old);
4715 static rtx
4716 not_reg_cond (x)
4717 rtx x;
4719 enum rtx_code x_code;
4720 rtx x_reg;
4722 /* We expect these conditions to be of the form (eq reg 0). */
4723 x_code = GET_CODE (x);
4724 if (GET_RTX_CLASS (x_code) != '<'
4725 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4726 || XEXP (x, 1) != const0_rtx)
4727 abort ();
4729 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4730 VOIDmode, x_reg, const0_rtx),
4731 NULL_RTX);
4734 static rtx
4735 nand_reg_cond (old, x)
4736 rtx old, x;
4738 enum rtx_code x_code;
4739 rtx x_reg;
4740 rtx c, *prev;
4742 /* We expect these conditions to be of the form (eq reg 0). */
4743 x_code = GET_CODE (x);
4744 if (GET_RTX_CLASS (x_code) != '<'
4745 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4746 || XEXP (x, 1) != const0_rtx)
4747 abort ();
4749 /* Search the expression for an existing sub-expression of X_REG. */
4751 for (c = *(prev = &old); c ; c = *(prev = &XEXP (c, 1)))
4753 rtx y = XEXP (c, 0);
4754 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4756 /* If we find X already present in OLD, then we need to
4757 splice it out. */
4758 if (GET_CODE (y) == x_code)
4760 *prev = XEXP (c, 1);
4761 free_EXPR_LIST_node (c);
4762 return old ? old : const0_rtx;
4765 /* If we find X being a compliment of a condition in OLD,
4766 then we need do nothing. */
4767 if (GET_CODE (y) == reverse_condition (x_code))
4768 return old;
4772 /* Otherwise, by implication, the register in question is now live for
4773 the inverse of the condition X. */
4774 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4775 VOIDmode, x_reg, const0_rtx),
4776 old);
4778 #endif /* HAVE_conditional_execution */
4780 #ifdef AUTO_INC_DEC
4782 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4783 reference. */
4785 static void
4786 find_auto_inc (pbi, x, insn)
4787 struct propagate_block_info *pbi;
4788 rtx x;
4789 rtx insn;
4791 rtx addr = XEXP (x, 0);
4792 HOST_WIDE_INT offset = 0;
4793 rtx set;
4795 /* Here we detect use of an index register which might be good for
4796 postincrement, postdecrement, preincrement, or predecrement. */
4798 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4799 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4801 if (GET_CODE (addr) == REG)
4803 register rtx y;
4804 register int size = GET_MODE_SIZE (GET_MODE (x));
4805 rtx use;
4806 rtx incr;
4807 int regno = REGNO (addr);
4809 /* Is the next use an increment that might make auto-increment? */
4810 if ((incr = pbi->reg_next_use[regno]) != 0
4811 && (set = single_set (incr)) != 0
4812 && GET_CODE (set) == SET
4813 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4814 /* Can't add side effects to jumps; if reg is spilled and
4815 reloaded, there's no way to store back the altered value. */
4816 && GET_CODE (insn) != JUMP_INSN
4817 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4818 && XEXP (y, 0) == addr
4819 && GET_CODE (XEXP (y, 1)) == CONST_INT
4820 && ((HAVE_POST_INCREMENT
4821 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4822 || (HAVE_POST_DECREMENT
4823 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4824 || (HAVE_PRE_INCREMENT
4825 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4826 || (HAVE_PRE_DECREMENT
4827 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4828 /* Make sure this reg appears only once in this insn. */
4829 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4830 use != 0 && use != (rtx) 1))
4832 rtx q = SET_DEST (set);
4833 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4834 ? (offset ? PRE_INC : POST_INC)
4835 : (offset ? PRE_DEC : POST_DEC));
4837 if (dead_or_set_p (incr, addr)
4838 /* Mustn't autoinc an eliminable register. */
4839 && (regno >= FIRST_PSEUDO_REGISTER
4840 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4842 /* This is the simple case. Try to make the auto-inc. If
4843 we can't, we are done. Otherwise, we will do any
4844 needed updates below. */
4845 if (! validate_change (insn, &XEXP (x, 0),
4846 gen_rtx_fmt_e (inc_code, Pmode, addr),
4848 return;
4850 else if (GET_CODE (q) == REG
4851 /* PREV_INSN used here to check the semi-open interval
4852 [insn,incr). */
4853 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4854 /* We must also check for sets of q as q may be
4855 a call clobbered hard register and there may
4856 be a call between PREV_INSN (insn) and incr. */
4857 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4859 /* We have *p followed sometime later by q = p+size.
4860 Both p and q must be live afterward,
4861 and q is not used between INSN and its assignment.
4862 Change it to q = p, ...*q..., q = q+size.
4863 Then fall into the usual case. */
4864 rtx insns, temp;
4866 start_sequence ();
4867 emit_move_insn (q, addr);
4868 insns = get_insns ();
4869 end_sequence ();
4871 if (basic_block_for_insn)
4872 for (temp = insns; temp; temp = NEXT_INSN (temp))
4873 set_block_for_insn (temp, pbi->bb);
4875 /* If we can't make the auto-inc, or can't make the
4876 replacement into Y, exit. There's no point in making
4877 the change below if we can't do the auto-inc and doing
4878 so is not correct in the pre-inc case. */
4880 validate_change (insn, &XEXP (x, 0),
4881 gen_rtx_fmt_e (inc_code, Pmode, q),
4883 validate_change (incr, &XEXP (y, 0), q, 1);
4884 if (! apply_change_group ())
4885 return;
4887 /* We now know we'll be doing this change, so emit the
4888 new insn(s) and do the updates. */
4889 emit_insns_before (insns, insn);
4891 if (pbi->bb->head == insn)
4892 pbi->bb->head = insns;
4894 /* INCR will become a NOTE and INSN won't contain a
4895 use of ADDR. If a use of ADDR was just placed in
4896 the insn before INSN, make that the next use.
4897 Otherwise, invalidate it. */
4898 if (GET_CODE (PREV_INSN (insn)) == INSN
4899 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4900 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4901 pbi->reg_next_use[regno] = PREV_INSN (insn);
4902 else
4903 pbi->reg_next_use[regno] = 0;
4905 addr = q;
4906 regno = REGNO (q);
4908 /* REGNO is now used in INCR which is below INSN, but it
4909 previously wasn't live here. If we don't mark it as
4910 live, we'll put a REG_DEAD note for it on this insn,
4911 which is incorrect. */
4912 SET_REGNO_REG_SET (pbi->reg_live, regno);
4914 /* If there are any calls between INSN and INCR, show
4915 that REGNO now crosses them. */
4916 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4917 if (GET_CODE (temp) == CALL_INSN)
4918 REG_N_CALLS_CROSSED (regno)++;
4920 else
4921 return;
4923 /* If we haven't returned, it means we were able to make the
4924 auto-inc, so update the status. First, record that this insn
4925 has an implicit side effect. */
4927 REG_NOTES (insn)
4928 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4930 /* Modify the old increment-insn to simply copy
4931 the already-incremented value of our register. */
4932 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4933 abort ();
4935 /* If that makes it a no-op (copying the register into itself) delete
4936 it so it won't appear to be a "use" and a "set" of this
4937 register. */
4938 if (SET_DEST (set) == addr)
4940 /* If the original source was dead, it's dead now. */
4941 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4942 if (note && XEXP (note, 0) != addr)
4943 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4945 PUT_CODE (incr, NOTE);
4946 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4947 NOTE_SOURCE_FILE (incr) = 0;
4950 if (regno >= FIRST_PSEUDO_REGISTER)
4952 /* Count an extra reference to the reg. When a reg is
4953 incremented, spilling it is worse, so we want to make
4954 that less likely. */
4955 REG_N_REFS (regno) += (optimize_size ? 1
4956 : pbi->bb->loop_depth + 1);
4958 /* Count the increment as a setting of the register,
4959 even though it isn't a SET in rtl. */
4960 REG_N_SETS (regno)++;
4965 #endif /* AUTO_INC_DEC */
4967 static void
4968 mark_used_reg (pbi, reg, cond, insn)
4969 struct propagate_block_info *pbi;
4970 rtx reg;
4971 rtx cond ATTRIBUTE_UNUSED;
4972 rtx insn;
4974 int regno = REGNO (reg);
4975 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4976 int some_was_dead = ! some_was_live;
4977 int some_not_set;
4978 int n;
4980 /* A hard reg in a wide mode may really be multiple registers.
4981 If so, mark all of them just like the first. */
4982 if (regno < FIRST_PSEUDO_REGISTER)
4984 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4985 while (--n > 0)
4987 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
4988 some_was_live |= needed_regno;
4989 some_was_dead |= ! needed_regno;
4993 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4995 /* Record where each reg is used, so when the reg is set we know
4996 the next insn that uses it. */
4997 pbi->reg_next_use[regno] = insn;
5000 if (pbi->flags & PROP_REG_INFO)
5002 if (regno < FIRST_PSEUDO_REGISTER)
5004 /* If this is a register we are going to try to eliminate,
5005 don't mark it live here. If we are successful in
5006 eliminating it, it need not be live unless it is used for
5007 pseudos, in which case it will have been set live when it
5008 was allocated to the pseudos. If the register will not
5009 be eliminated, reload will set it live at that point.
5011 Otherwise, record that this function uses this register. */
5012 /* ??? The PPC backend tries to "eliminate" on the pic
5013 register to itself. This should be fixed. In the mean
5014 time, hack around it. */
5016 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5017 && (regno == FRAME_POINTER_REGNUM
5018 || regno == ARG_POINTER_REGNUM)))
5020 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5022 regs_ever_live[regno + --n] = 1;
5023 while (n > 0);
5026 else
5028 /* Keep track of which basic block each reg appears in. */
5030 register int blocknum = pbi->bb->index;
5031 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5032 REG_BASIC_BLOCK (regno) = blocknum;
5033 else if (REG_BASIC_BLOCK (regno) != blocknum)
5034 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5036 /* Count (weighted) number of uses of each reg. */
5037 REG_N_REFS (regno) += (optimize_size ? 1
5038 : pbi->bb->loop_depth + 1);
5042 /* Find out if any of the register was set this insn. */
5043 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5044 if (regno < FIRST_PSEUDO_REGISTER)
5046 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5047 while (--n > 0)
5048 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5051 /* Record and count the insns in which a reg dies. If it is used in
5052 this insn and was dead below the insn then it dies in this insn.
5053 If it was set in this insn, we do not make a REG_DEAD note;
5054 likewise if we already made such a note. */
5055 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5056 && some_was_dead
5057 && some_not_set)
5059 /* Check for the case where the register dying partially
5060 overlaps the register set by this insn. */
5061 if (regno < FIRST_PSEUDO_REGISTER
5062 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5064 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5065 while (--n >= 0)
5066 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5069 /* If none of the words in X is needed, make a REG_DEAD note.
5070 Otherwise, we must make partial REG_DEAD notes. */
5071 if (! some_was_live)
5073 if ((pbi->flags & PROP_DEATH_NOTES)
5074 && ! find_regno_note (insn, REG_DEAD, regno))
5075 REG_NOTES (insn)
5076 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5078 if (pbi->flags & PROP_REG_INFO)
5079 REG_N_DEATHS (regno)++;
5081 else
5083 /* Don't make a REG_DEAD note for a part of a register
5084 that is set in the insn. */
5086 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5087 for (; n >= regno; n--)
5088 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5089 && ! dead_or_set_regno_p (insn, n))
5090 REG_NOTES (insn)
5091 = alloc_EXPR_LIST (REG_DEAD,
5092 gen_rtx_REG (reg_raw_mode[n], n),
5093 REG_NOTES (insn));
5097 SET_REGNO_REG_SET (pbi->reg_live, regno);
5098 if (regno < FIRST_PSEUDO_REGISTER)
5100 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5101 while (--n > 0)
5102 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5105 #ifdef HAVE_conditional_execution
5106 /* If this is a conditional use, record that fact. If it is later
5107 conditionally set, we'll know to kill the register. */
5108 if (cond != NULL_RTX)
5110 splay_tree_node node;
5111 struct reg_cond_life_info *rcli;
5112 rtx ncond;
5114 if (some_was_live)
5116 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5117 if (node == NULL)
5119 /* The register was unconditionally live previously.
5120 No need to do anything. */
5122 else
5124 /* The register was conditionally live previously.
5125 Subtract the new life cond from the old death cond. */
5126 rcli = (struct reg_cond_life_info *) node->value;
5127 ncond = rcli->condition;
5128 ncond = nand_reg_cond (ncond, cond);
5130 /* If the register is now unconditionally live, remove the
5131 entry in the splay_tree. */
5132 if (ncond == const0_rtx)
5134 rcli->condition = NULL_RTX;
5135 splay_tree_remove (pbi->reg_cond_dead, regno);
5137 else
5138 rcli->condition = ncond;
5141 else
5143 /* The register was not previously live at all. Record
5144 the condition under which it is still dead. */
5145 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5146 rcli->condition = not_reg_cond (cond);
5147 splay_tree_insert (pbi->reg_cond_dead, regno,
5148 (splay_tree_value) rcli);
5151 else if (some_was_live)
5153 splay_tree_node node;
5154 struct reg_cond_life_info *rcli;
5156 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5157 if (node != NULL)
5159 /* The register was conditionally live previously, but is now
5160 unconditionally so. Remove it from the conditionally dead
5161 list, so that a conditional set won't cause us to think
5162 it dead. */
5163 rcli = (struct reg_cond_life_info *) node->value;
5164 rcli->condition = NULL_RTX;
5165 splay_tree_remove (pbi->reg_cond_dead, regno);
5169 #endif
5172 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5173 This is done assuming the registers needed from X are those that
5174 have 1-bits in PBI->REG_LIVE.
5176 INSN is the containing instruction. If INSN is dead, this function
5177 is not called. */
5179 static void
5180 mark_used_regs (pbi, x, cond, insn)
5181 struct propagate_block_info *pbi;
5182 rtx x, cond, insn;
5184 register RTX_CODE code;
5185 register int regno;
5186 int flags = pbi->flags;
5188 retry:
5189 code = GET_CODE (x);
5190 switch (code)
5192 case LABEL_REF:
5193 case SYMBOL_REF:
5194 case CONST_INT:
5195 case CONST:
5196 case CONST_DOUBLE:
5197 case PC:
5198 case ADDR_VEC:
5199 case ADDR_DIFF_VEC:
5200 return;
5202 #ifdef HAVE_cc0
5203 case CC0:
5204 pbi->cc0_live = 1;
5205 return;
5206 #endif
5208 case CLOBBER:
5209 /* If we are clobbering a MEM, mark any registers inside the address
5210 as being used. */
5211 if (GET_CODE (XEXP (x, 0)) == MEM)
5212 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5213 return;
5215 case MEM:
5216 /* Don't bother watching stores to mems if this is not the
5217 final pass. We'll not be deleting dead stores this round. */
5218 if (flags & PROP_SCAN_DEAD_CODE)
5220 /* Invalidate the data for the last MEM stored, but only if MEM is
5221 something that can be stored into. */
5222 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5223 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5224 ; /* needn't clear the memory set list */
5225 else
5227 rtx temp = pbi->mem_set_list;
5228 rtx prev = NULL_RTX;
5229 rtx next;
5231 while (temp)
5233 next = XEXP (temp, 1);
5234 if (anti_dependence (XEXP (temp, 0), x))
5236 /* Splice temp out of the list. */
5237 if (prev)
5238 XEXP (prev, 1) = next;
5239 else
5240 pbi->mem_set_list = next;
5241 free_EXPR_LIST_node (temp);
5243 else
5244 prev = temp;
5245 temp = next;
5249 /* If the memory reference had embedded side effects (autoincrement
5250 address modes. Then we may need to kill some entries on the
5251 memory set list. */
5252 if (insn)
5253 invalidate_mems_from_autoinc (pbi, insn);
5256 #ifdef AUTO_INC_DEC
5257 if (flags & PROP_AUTOINC)
5258 find_auto_inc (pbi, x, insn);
5259 #endif
5260 break;
5262 case SUBREG:
5263 if (GET_CODE (SUBREG_REG (x)) == REG
5264 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5265 && (GET_MODE_SIZE (GET_MODE (x))
5266 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
5267 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
5269 /* While we're here, optimize this case. */
5270 x = SUBREG_REG (x);
5271 if (GET_CODE (x) != REG)
5272 goto retry;
5273 /* FALLTHRU */
5275 case REG:
5276 /* See a register other than being set => mark it as needed. */
5277 mark_used_reg (pbi, x, cond, insn);
5278 return;
5280 case SET:
5282 register rtx testreg = SET_DEST (x);
5283 int mark_dest = 0;
5285 /* If storing into MEM, don't show it as being used. But do
5286 show the address as being used. */
5287 if (GET_CODE (testreg) == MEM)
5289 #ifdef AUTO_INC_DEC
5290 if (flags & PROP_AUTOINC)
5291 find_auto_inc (pbi, testreg, insn);
5292 #endif
5293 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5294 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5295 return;
5298 /* Storing in STRICT_LOW_PART is like storing in a reg
5299 in that this SET might be dead, so ignore it in TESTREG.
5300 but in some other ways it is like using the reg.
5302 Storing in a SUBREG or a bit field is like storing the entire
5303 register in that if the register's value is not used
5304 then this SET is not needed. */
5305 while (GET_CODE (testreg) == STRICT_LOW_PART
5306 || GET_CODE (testreg) == ZERO_EXTRACT
5307 || GET_CODE (testreg) == SIGN_EXTRACT
5308 || GET_CODE (testreg) == SUBREG)
5310 if (GET_CODE (testreg) == SUBREG
5311 && GET_CODE (SUBREG_REG (testreg)) == REG
5312 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5313 && (GET_MODE_SIZE (GET_MODE (testreg))
5314 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
5315 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
5317 /* Modifying a single register in an alternate mode
5318 does not use any of the old value. But these other
5319 ways of storing in a register do use the old value. */
5320 if (GET_CODE (testreg) == SUBREG
5321 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5323 else
5324 mark_dest = 1;
5326 testreg = XEXP (testreg, 0);
5329 /* If this is a store into a register, recursively scan the
5330 value being stored. */
5332 if ((GET_CODE (testreg) == PARALLEL
5333 && GET_MODE (testreg) == BLKmode)
5334 || (GET_CODE (testreg) == REG
5335 && (regno = REGNO (testreg),
5336 ! (regno == FRAME_POINTER_REGNUM
5337 && (! reload_completed || frame_pointer_needed)))
5338 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5339 && ! (regno == HARD_FRAME_POINTER_REGNUM
5340 && (! reload_completed || frame_pointer_needed))
5341 #endif
5342 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5343 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5344 #endif
5347 if (mark_dest)
5348 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5349 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5350 return;
5353 break;
5355 case ASM_OPERANDS:
5356 case UNSPEC_VOLATILE:
5357 case TRAP_IF:
5358 case ASM_INPUT:
5360 /* Traditional and volatile asm instructions must be considered to use
5361 and clobber all hard registers, all pseudo-registers and all of
5362 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5364 Consider for instance a volatile asm that changes the fpu rounding
5365 mode. An insn should not be moved across this even if it only uses
5366 pseudo-regs because it might give an incorrectly rounded result.
5368 ?!? Unfortunately, marking all hard registers as live causes massive
5369 problems for the register allocator and marking all pseudos as live
5370 creates mountains of uninitialized variable warnings.
5372 So for now, just clear the memory set list and mark any regs
5373 we can find in ASM_OPERANDS as used. */
5374 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5375 free_EXPR_LIST_list (&pbi->mem_set_list);
5377 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5378 We can not just fall through here since then we would be confused
5379 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5380 traditional asms unlike their normal usage. */
5381 if (code == ASM_OPERANDS)
5383 int j;
5385 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5386 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5388 break;
5391 case COND_EXEC:
5392 if (cond != NULL_RTX)
5393 abort ();
5395 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5397 cond = COND_EXEC_TEST (x);
5398 x = COND_EXEC_CODE (x);
5399 goto retry;
5401 case PHI:
5402 /* We _do_not_ want to scan operands of phi nodes. Operands of
5403 a phi function are evaluated only when control reaches this
5404 block along a particular edge. Therefore, regs that appear
5405 as arguments to phi should not be added to the global live at
5406 start. */
5407 return;
5409 default:
5410 break;
5413 /* Recursively scan the operands of this expression. */
5416 register const char *fmt = GET_RTX_FORMAT (code);
5417 register int i;
5419 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5421 if (fmt[i] == 'e')
5423 /* Tail recursive case: save a function call level. */
5424 if (i == 0)
5426 x = XEXP (x, 0);
5427 goto retry;
5429 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5431 else if (fmt[i] == 'E')
5433 register int j;
5434 for (j = 0; j < XVECLEN (x, i); j++)
5435 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5441 #ifdef AUTO_INC_DEC
5443 static int
5444 try_pre_increment_1 (pbi, insn)
5445 struct propagate_block_info *pbi;
5446 rtx insn;
5448 /* Find the next use of this reg. If in same basic block,
5449 make it do pre-increment or pre-decrement if appropriate. */
5450 rtx x = single_set (insn);
5451 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5452 * INTVAL (XEXP (SET_SRC (x), 1)));
5453 int regno = REGNO (SET_DEST (x));
5454 rtx y = pbi->reg_next_use[regno];
5455 if (y != 0
5456 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5457 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5458 mode would be better. */
5459 && ! dead_or_set_p (y, SET_DEST (x))
5460 && try_pre_increment (y, SET_DEST (x), amount))
5462 /* We have found a suitable auto-increment
5463 and already changed insn Y to do it.
5464 So flush this increment-instruction. */
5465 PUT_CODE (insn, NOTE);
5466 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5467 NOTE_SOURCE_FILE (insn) = 0;
5468 /* Count a reference to this reg for the increment
5469 insn we are deleting. When a reg is incremented.
5470 spilling it is worse, so we want to make that
5471 less likely. */
5472 if (regno >= FIRST_PSEUDO_REGISTER)
5474 REG_N_REFS (regno) += (optimize_size ? 1
5475 : pbi->bb->loop_depth + 1);
5476 REG_N_SETS (regno)++;
5478 return 1;
5480 return 0;
5483 /* Try to change INSN so that it does pre-increment or pre-decrement
5484 addressing on register REG in order to add AMOUNT to REG.
5485 AMOUNT is negative for pre-decrement.
5486 Returns 1 if the change could be made.
5487 This checks all about the validity of the result of modifying INSN. */
5489 static int
5490 try_pre_increment (insn, reg, amount)
5491 rtx insn, reg;
5492 HOST_WIDE_INT amount;
5494 register rtx use;
5496 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5497 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5498 int pre_ok = 0;
5499 /* Nonzero if we can try to make a post-increment or post-decrement.
5500 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5501 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5502 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5503 int post_ok = 0;
5505 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5506 int do_post = 0;
5508 /* From the sign of increment, see which possibilities are conceivable
5509 on this target machine. */
5510 if (HAVE_PRE_INCREMENT && amount > 0)
5511 pre_ok = 1;
5512 if (HAVE_POST_INCREMENT && amount > 0)
5513 post_ok = 1;
5515 if (HAVE_PRE_DECREMENT && amount < 0)
5516 pre_ok = 1;
5517 if (HAVE_POST_DECREMENT && amount < 0)
5518 post_ok = 1;
5520 if (! (pre_ok || post_ok))
5521 return 0;
5523 /* It is not safe to add a side effect to a jump insn
5524 because if the incremented register is spilled and must be reloaded
5525 there would be no way to store the incremented value back in memory. */
5527 if (GET_CODE (insn) == JUMP_INSN)
5528 return 0;
5530 use = 0;
5531 if (pre_ok)
5532 use = find_use_as_address (PATTERN (insn), reg, 0);
5533 if (post_ok && (use == 0 || use == (rtx) 1))
5535 use = find_use_as_address (PATTERN (insn), reg, -amount);
5536 do_post = 1;
5539 if (use == 0 || use == (rtx) 1)
5540 return 0;
5542 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5543 return 0;
5545 /* See if this combination of instruction and addressing mode exists. */
5546 if (! validate_change (insn, &XEXP (use, 0),
5547 gen_rtx_fmt_e (amount > 0
5548 ? (do_post ? POST_INC : PRE_INC)
5549 : (do_post ? POST_DEC : PRE_DEC),
5550 Pmode, reg), 0))
5551 return 0;
5553 /* Record that this insn now has an implicit side effect on X. */
5554 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5555 return 1;
5558 #endif /* AUTO_INC_DEC */
5560 /* Find the place in the rtx X where REG is used as a memory address.
5561 Return the MEM rtx that so uses it.
5562 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5563 (plus REG (const_int PLUSCONST)).
5565 If such an address does not appear, return 0.
5566 If REG appears more than once, or is used other than in such an address,
5567 return (rtx)1. */
5570 find_use_as_address (x, reg, plusconst)
5571 register rtx x;
5572 rtx reg;
5573 HOST_WIDE_INT plusconst;
5575 enum rtx_code code = GET_CODE (x);
5576 const char *fmt = GET_RTX_FORMAT (code);
5577 register int i;
5578 register rtx value = 0;
5579 register rtx tem;
5581 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5582 return x;
5584 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5585 && XEXP (XEXP (x, 0), 0) == reg
5586 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5587 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5588 return x;
5590 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5592 /* If REG occurs inside a MEM used in a bit-field reference,
5593 that is unacceptable. */
5594 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5595 return (rtx) (HOST_WIDE_INT) 1;
5598 if (x == reg)
5599 return (rtx) (HOST_WIDE_INT) 1;
5601 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5603 if (fmt[i] == 'e')
5605 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5606 if (value == 0)
5607 value = tem;
5608 else if (tem != 0)
5609 return (rtx) (HOST_WIDE_INT) 1;
5611 else if (fmt[i] == 'E')
5613 register int j;
5614 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5616 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5617 if (value == 0)
5618 value = tem;
5619 else if (tem != 0)
5620 return (rtx) (HOST_WIDE_INT) 1;
5625 return value;
5628 /* Write information about registers and basic blocks into FILE.
5629 This is part of making a debugging dump. */
5631 void
5632 dump_regset (r, outf)
5633 regset r;
5634 FILE *outf;
5636 int i;
5637 if (r == NULL)
5639 fputs (" (nil)", outf);
5640 return;
5643 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5645 fprintf (outf, " %d", i);
5646 if (i < FIRST_PSEUDO_REGISTER)
5647 fprintf (outf, " [%s]",
5648 reg_names[i]);
5652 void
5653 debug_regset (r)
5654 regset r;
5656 dump_regset (r, stderr);
5657 putc ('\n', stderr);
5660 void
5661 dump_flow_info (file)
5662 FILE *file;
5664 register int i;
5665 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5667 fprintf (file, "%d registers.\n", max_regno);
5668 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5669 if (REG_N_REFS (i))
5671 enum reg_class class, altclass;
5672 fprintf (file, "\nRegister %d used %d times across %d insns",
5673 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5674 if (REG_BASIC_BLOCK (i) >= 0)
5675 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5676 if (REG_N_SETS (i))
5677 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5678 (REG_N_SETS (i) == 1) ? "" : "s");
5679 if (REG_USERVAR_P (regno_reg_rtx[i]))
5680 fprintf (file, "; user var");
5681 if (REG_N_DEATHS (i) != 1)
5682 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5683 if (REG_N_CALLS_CROSSED (i) == 1)
5684 fprintf (file, "; crosses 1 call");
5685 else if (REG_N_CALLS_CROSSED (i))
5686 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5687 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5688 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5689 class = reg_preferred_class (i);
5690 altclass = reg_alternate_class (i);
5691 if (class != GENERAL_REGS || altclass != ALL_REGS)
5693 if (altclass == ALL_REGS || class == ALL_REGS)
5694 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5695 else if (altclass == NO_REGS)
5696 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5697 else
5698 fprintf (file, "; pref %s, else %s",
5699 reg_class_names[(int) class],
5700 reg_class_names[(int) altclass]);
5702 if (REGNO_POINTER_FLAG (i))
5703 fprintf (file, "; pointer");
5704 fprintf (file, ".\n");
5707 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5708 for (i = 0; i < n_basic_blocks; i++)
5710 register basic_block bb = BASIC_BLOCK (i);
5711 register edge e;
5713 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
5714 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
5716 fprintf (file, "Predecessors: ");
5717 for (e = bb->pred; e ; e = e->pred_next)
5718 dump_edge_info (file, e, 0);
5720 fprintf (file, "\nSuccessors: ");
5721 for (e = bb->succ; e ; e = e->succ_next)
5722 dump_edge_info (file, e, 1);
5724 fprintf (file, "\nRegisters live at start:");
5725 dump_regset (bb->global_live_at_start, file);
5727 fprintf (file, "\nRegisters live at end:");
5728 dump_regset (bb->global_live_at_end, file);
5730 putc('\n', file);
5733 putc('\n', file);
5736 void
5737 debug_flow_info ()
5739 dump_flow_info (stderr);
5742 static void
5743 dump_edge_info (file, e, do_succ)
5744 FILE *file;
5745 edge e;
5746 int do_succ;
5748 basic_block side = (do_succ ? e->dest : e->src);
5750 if (side == ENTRY_BLOCK_PTR)
5751 fputs (" ENTRY", file);
5752 else if (side == EXIT_BLOCK_PTR)
5753 fputs (" EXIT", file);
5754 else
5755 fprintf (file, " %d", side->index);
5757 if (e->count)
5758 fprintf (file, " count:%d", e->count);
5760 if (e->flags)
5762 static const char * const bitnames[] = {
5763 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5765 int comma = 0;
5766 int i, flags = e->flags;
5768 fputc (' ', file);
5769 fputc ('(', file);
5770 for (i = 0; flags; i++)
5771 if (flags & (1 << i))
5773 flags &= ~(1 << i);
5775 if (comma)
5776 fputc (',', file);
5777 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5778 fputs (bitnames[i], file);
5779 else
5780 fprintf (file, "%d", i);
5781 comma = 1;
5783 fputc (')', file);
5788 /* Print out one basic block with live information at start and end. */
5789 void
5790 dump_bb (bb, outf)
5791 basic_block bb;
5792 FILE *outf;
5794 rtx insn;
5795 rtx last;
5796 edge e;
5798 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
5799 bb->index, bb->loop_depth, bb->count);
5800 if (bb->eh_beg != -1 || bb->eh_end != -1)
5801 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5802 putc ('\n', outf);
5804 fputs (";; Predecessors: ", outf);
5805 for (e = bb->pred; e ; e = e->pred_next)
5806 dump_edge_info (outf, e, 0);
5807 putc ('\n', outf);
5809 fputs (";; Registers live at start:", outf);
5810 dump_regset (bb->global_live_at_start, outf);
5811 putc ('\n', outf);
5813 for (insn = bb->head, last = NEXT_INSN (bb->end);
5814 insn != last;
5815 insn = NEXT_INSN (insn))
5816 print_rtl_single (outf, insn);
5818 fputs (";; Registers live at end:", outf);
5819 dump_regset (bb->global_live_at_end, outf);
5820 putc ('\n', outf);
5822 fputs (";; Successors: ", outf);
5823 for (e = bb->succ; e; e = e->succ_next)
5824 dump_edge_info (outf, e, 1);
5825 putc ('\n', outf);
5828 void
5829 debug_bb (bb)
5830 basic_block bb;
5832 dump_bb (bb, stderr);
5835 void
5836 debug_bb_n (n)
5837 int n;
5839 dump_bb (BASIC_BLOCK(n), stderr);
5842 /* Like print_rtl, but also print out live information for the start of each
5843 basic block. */
5845 void
5846 print_rtl_with_bb (outf, rtx_first)
5847 FILE *outf;
5848 rtx rtx_first;
5850 register rtx tmp_rtx;
5852 if (rtx_first == 0)
5853 fprintf (outf, "(nil)\n");
5854 else
5856 int i;
5857 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5858 int max_uid = get_max_uid ();
5859 basic_block *start = (basic_block *)
5860 xcalloc (max_uid, sizeof (basic_block));
5861 basic_block *end = (basic_block *)
5862 xcalloc (max_uid, sizeof (basic_block));
5863 enum bb_state *in_bb_p = (enum bb_state *)
5864 xcalloc (max_uid, sizeof (enum bb_state));
5866 for (i = n_basic_blocks - 1; i >= 0; i--)
5868 basic_block bb = BASIC_BLOCK (i);
5869 rtx x;
5871 start[INSN_UID (bb->head)] = bb;
5872 end[INSN_UID (bb->end)] = bb;
5873 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5875 enum bb_state state = IN_MULTIPLE_BB;
5876 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5877 state = IN_ONE_BB;
5878 in_bb_p[INSN_UID(x)] = state;
5880 if (x == bb->end)
5881 break;
5885 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5887 int did_output;
5888 basic_block bb;
5890 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5892 fprintf (outf, ";; Start of basic block %d, registers live:",
5893 bb->index);
5894 dump_regset (bb->global_live_at_start, outf);
5895 putc ('\n', outf);
5898 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5899 && GET_CODE (tmp_rtx) != NOTE
5900 && GET_CODE (tmp_rtx) != BARRIER)
5901 fprintf (outf, ";; Insn is not within a basic block\n");
5902 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5903 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5905 did_output = print_rtl_single (outf, tmp_rtx);
5907 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5909 fprintf (outf, ";; End of basic block %d, registers live:\n",
5910 bb->index);
5911 dump_regset (bb->global_live_at_end, outf);
5912 putc ('\n', outf);
5915 if (did_output)
5916 putc ('\n', outf);
5919 free (start);
5920 free (end);
5921 free (in_bb_p);
5924 if (current_function_epilogue_delay_list != 0)
5926 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5927 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5928 tmp_rtx = XEXP (tmp_rtx, 1))
5929 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5933 /* Compute dominator relationships using new flow graph structures. */
5934 void
5935 compute_flow_dominators (dominators, post_dominators)
5936 sbitmap *dominators;
5937 sbitmap *post_dominators;
5939 int bb;
5940 sbitmap *temp_bitmap;
5941 edge e;
5942 basic_block *worklist, *workend, *qin, *qout;
5943 int qlen;
5945 /* Allocate a worklist array/queue. Entries are only added to the
5946 list if they were not already on the list. So the size is
5947 bounded by the number of basic blocks. */
5948 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5949 workend = &worklist[n_basic_blocks];
5951 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5952 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5954 if (dominators)
5956 /* The optimistic setting of dominators requires us to put every
5957 block on the work list initially. */
5958 qin = qout = worklist;
5959 for (bb = 0; bb < n_basic_blocks; bb++)
5961 *qin++ = BASIC_BLOCK (bb);
5962 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5964 qlen = n_basic_blocks;
5965 qin = worklist;
5967 /* We want a maximal solution, so initially assume everything dominates
5968 everything else. */
5969 sbitmap_vector_ones (dominators, n_basic_blocks);
5971 /* Mark successors of the entry block so we can identify them below. */
5972 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5973 e->dest->aux = ENTRY_BLOCK_PTR;
5975 /* Iterate until the worklist is empty. */
5976 while (qlen)
5978 /* Take the first entry off the worklist. */
5979 basic_block b = *qout++;
5980 if (qout >= workend)
5981 qout = worklist;
5982 qlen--;
5984 bb = b->index;
5986 /* Compute the intersection of the dominators of all the
5987 predecessor blocks.
5989 If one of the predecessor blocks is the ENTRY block, then the
5990 intersection of the dominators of the predecessor blocks is
5991 defined as the null set. We can identify such blocks by the
5992 special value in the AUX field in the block structure. */
5993 if (b->aux == ENTRY_BLOCK_PTR)
5995 /* Do not clear the aux field for blocks which are
5996 successors of the ENTRY block. That way we we never
5997 add them to the worklist again.
5999 The intersect of dominators of the preds of this block is
6000 defined as the null set. */
6001 sbitmap_zero (temp_bitmap[bb]);
6003 else
6005 /* Clear the aux field of this block so it can be added to
6006 the worklist again if necessary. */
6007 b->aux = NULL;
6008 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
6011 /* Make sure each block always dominates itself. */
6012 SET_BIT (temp_bitmap[bb], bb);
6014 /* If the out state of this block changed, then we need to
6015 add the successors of this block to the worklist if they
6016 are not already on the worklist. */
6017 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
6019 for (e = b->succ; e; e = e->succ_next)
6021 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
6023 *qin++ = e->dest;
6024 if (qin >= workend)
6025 qin = worklist;
6026 qlen++;
6028 e->dest->aux = e;
6035 if (post_dominators)
6037 /* The optimistic setting of dominators requires us to put every
6038 block on the work list initially. */
6039 qin = qout = worklist;
6040 for (bb = 0; bb < n_basic_blocks; bb++)
6042 *qin++ = BASIC_BLOCK (bb);
6043 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6045 qlen = n_basic_blocks;
6046 qin = worklist;
6048 /* We want a maximal solution, so initially assume everything post
6049 dominates everything else. */
6050 sbitmap_vector_ones (post_dominators, n_basic_blocks);
6052 /* Mark predecessors of the exit block so we can identify them below. */
6053 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6054 e->src->aux = EXIT_BLOCK_PTR;
6056 /* Iterate until the worklist is empty. */
6057 while (qlen)
6059 /* Take the first entry off the worklist. */
6060 basic_block b = *qout++;
6061 if (qout >= workend)
6062 qout = worklist;
6063 qlen--;
6065 bb = b->index;
6067 /* Compute the intersection of the post dominators of all the
6068 successor blocks.
6070 If one of the successor blocks is the EXIT block, then the
6071 intersection of the dominators of the successor blocks is
6072 defined as the null set. We can identify such blocks by the
6073 special value in the AUX field in the block structure. */
6074 if (b->aux == EXIT_BLOCK_PTR)
6076 /* Do not clear the aux field for blocks which are
6077 predecessors of the EXIT block. That way we we never
6078 add them to the worklist again.
6080 The intersect of dominators of the succs of this block is
6081 defined as the null set. */
6082 sbitmap_zero (temp_bitmap[bb]);
6084 else
6086 /* Clear the aux field of this block so it can be added to
6087 the worklist again if necessary. */
6088 b->aux = NULL;
6089 sbitmap_intersection_of_succs (temp_bitmap[bb],
6090 post_dominators, bb);
6093 /* Make sure each block always post dominates itself. */
6094 SET_BIT (temp_bitmap[bb], bb);
6096 /* If the out state of this block changed, then we need to
6097 add the successors of this block to the worklist if they
6098 are not already on the worklist. */
6099 if (sbitmap_a_and_b (post_dominators[bb],
6100 post_dominators[bb],
6101 temp_bitmap[bb]))
6103 for (e = b->pred; e; e = e->pred_next)
6105 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6107 *qin++ = e->src;
6108 if (qin >= workend)
6109 qin = worklist;
6110 qlen++;
6112 e->src->aux = e;
6119 free (worklist);
6120 free (temp_bitmap);
6123 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
6125 void
6126 compute_immediate_dominators (idom, dominators)
6127 int *idom;
6128 sbitmap *dominators;
6130 sbitmap *tmp;
6131 int b;
6133 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6135 /* Begin with tmp(n) = dom(n) - { n }. */
6136 for (b = n_basic_blocks; --b >= 0; )
6138 sbitmap_copy (tmp[b], dominators[b]);
6139 RESET_BIT (tmp[b], b);
6142 /* Subtract out all of our dominator's dominators. */
6143 for (b = n_basic_blocks; --b >= 0; )
6145 sbitmap tmp_b = tmp[b];
6146 int s;
6148 for (s = n_basic_blocks; --s >= 0; )
6149 if (TEST_BIT (tmp_b, s))
6150 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6153 /* Find the one bit set in the bitmap and put it in the output array. */
6154 for (b = n_basic_blocks; --b >= 0; )
6156 int t;
6157 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6160 sbitmap_vector_free (tmp);
6163 /* Recompute register set/reference counts immediately prior to register
6164 allocation.
6166 This avoids problems with set/reference counts changing to/from values
6167 which have special meanings to the register allocators.
6169 Additionally, the reference counts are the primary component used by the
6170 register allocators to prioritize pseudos for allocation to hard regs.
6171 More accurate reference counts generally lead to better register allocation.
6173 F is the first insn to be scanned.
6175 LOOP_STEP denotes how much loop_depth should be incremented per
6176 loop nesting level in order to increase the ref count more for
6177 references in a loop.
6179 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6180 possibly other information which is used by the register allocators. */
6182 void
6183 recompute_reg_usage (f, loop_step)
6184 rtx f ATTRIBUTE_UNUSED;
6185 int loop_step ATTRIBUTE_UNUSED;
6187 allocate_reg_life_data ();
6188 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6191 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6192 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6193 of the number of registers that died. */
6196 count_or_remove_death_notes (blocks, kill)
6197 sbitmap blocks;
6198 int kill;
6200 int i, count = 0;
6202 for (i = n_basic_blocks - 1; i >= 0; --i)
6204 basic_block bb;
6205 rtx insn;
6207 if (blocks && ! TEST_BIT (blocks, i))
6208 continue;
6210 bb = BASIC_BLOCK (i);
6212 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6214 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6216 rtx *pprev = &REG_NOTES (insn);
6217 rtx link = *pprev;
6219 while (link)
6221 switch (REG_NOTE_KIND (link))
6223 case REG_DEAD:
6224 if (GET_CODE (XEXP (link, 0)) == REG)
6226 rtx reg = XEXP (link, 0);
6227 int n;
6229 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6230 n = 1;
6231 else
6232 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6233 count += n;
6235 /* FALLTHRU */
6237 case REG_UNUSED:
6238 if (kill)
6240 rtx next = XEXP (link, 1);
6241 free_EXPR_LIST_node (link);
6242 *pprev = link = next;
6243 break;
6245 /* FALLTHRU */
6247 default:
6248 pprev = &XEXP (link, 1);
6249 link = *pprev;
6250 break;
6255 if (insn == bb->end)
6256 break;
6260 return count;
6263 /* Record INSN's block as BB. */
6265 void
6266 set_block_for_insn (insn, bb)
6267 rtx insn;
6268 basic_block bb;
6270 size_t uid = INSN_UID (insn);
6271 if (uid >= basic_block_for_insn->num_elements)
6273 int new_size;
6275 /* Add one-eighth the size so we don't keep calling xrealloc. */
6276 new_size = uid + (uid + 7) / 8;
6278 VARRAY_GROW (basic_block_for_insn, new_size);
6280 VARRAY_BB (basic_block_for_insn, uid) = bb;
6283 /* Record INSN's block number as BB. */
6284 /* ??? This has got to go. */
6286 void
6287 set_block_num (insn, bb)
6288 rtx insn;
6289 int bb;
6291 set_block_for_insn (insn, BASIC_BLOCK (bb));
6294 /* Verify the CFG consistency. This function check some CFG invariants and
6295 aborts when something is wrong. Hope that this function will help to
6296 convert many optimization passes to preserve CFG consistent.
6298 Currently it does following checks:
6300 - test head/end pointers
6301 - overlapping of basic blocks
6302 - edge list corectness
6303 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6304 - tails of basic blocks (ensure that boundary is necesary)
6305 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6306 and NOTE_INSN_BASIC_BLOCK
6307 - check that all insns are in the basic blocks
6308 (except the switch handling code, barriers and notes)
6309 - check that all returns are followed by barriers
6311 In future it can be extended check a lot of other stuff as well
6312 (reachability of basic blocks, life information, etc. etc.). */
6314 void
6315 verify_flow_info ()
6317 const int max_uid = get_max_uid ();
6318 const rtx rtx_first = get_insns ();
6319 basic_block *bb_info;
6320 rtx x;
6321 int i, last_bb_num_seen, num_bb_notes, err = 0;
6323 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6325 /* First pass check head/end pointers and set bb_info array used by
6326 later passes. */
6327 for (i = n_basic_blocks - 1; i >= 0; i--)
6329 basic_block bb = BASIC_BLOCK (i);
6331 /* Check the head pointer and make sure that it is pointing into
6332 insn list. */
6333 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
6334 if (x == bb->head)
6335 break;
6336 if (!x)
6338 error ("Head insn %d for block %d not found in the insn stream.",
6339 INSN_UID (bb->head), bb->index);
6340 err = 1;
6343 /* Check the end pointer and make sure that it is pointing into
6344 insn list. */
6345 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6347 if (bb_info[INSN_UID (x)] != NULL)
6349 error ("Insn %d is in multiple basic blocks (%d and %d)",
6350 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6351 err = 1;
6353 bb_info[INSN_UID (x)] = bb;
6355 if (x == bb->end)
6356 break;
6358 if (!x)
6360 error ("End insn %d for block %d not found in the insn stream.",
6361 INSN_UID (bb->end), bb->index);
6362 err = 1;
6366 /* Now check the basic blocks (boundaries etc.) */
6367 for (i = n_basic_blocks - 1; i >= 0; i--)
6369 basic_block bb = BASIC_BLOCK (i);
6370 /* Check corectness of edge lists */
6371 edge e;
6373 e = bb->succ;
6374 while (e)
6376 if (e->src != bb)
6378 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6379 bb->index);
6380 fprintf (stderr, "Predecessor: ");
6381 dump_edge_info (stderr, e, 0);
6382 fprintf (stderr, "\nSuccessor: ");
6383 dump_edge_info (stderr, e, 1);
6384 fflush (stderr);
6385 err = 1;
6387 if (e->dest != EXIT_BLOCK_PTR)
6389 edge e2 = e->dest->pred;
6390 while (e2 && e2 != e)
6391 e2 = e2->pred_next;
6392 if (!e2)
6394 error ("Basic block %i edge lists are corrupted", bb->index);
6395 err = 1;
6398 e = e->succ_next;
6401 e = bb->pred;
6402 while (e)
6404 if (e->dest != bb)
6406 error ("Basic block %d pred edge is corrupted", bb->index);
6407 fputs ("Predecessor: ", stderr);
6408 dump_edge_info (stderr, e, 0);
6409 fputs ("\nSuccessor: ", stderr);
6410 dump_edge_info (stderr, e, 1);
6411 fputc ('\n', stderr);
6412 err = 1;
6414 if (e->src != ENTRY_BLOCK_PTR)
6416 edge e2 = e->src->succ;
6417 while (e2 && e2 != e)
6418 e2 = e2->succ_next;
6419 if (!e2)
6421 error ("Basic block %i edge lists are corrupted", bb->index);
6422 err = 1;
6425 e = e->pred_next;
6428 /* OK pointers are correct. Now check the header of basic
6429 block. It ought to contain optional CODE_LABEL followed
6430 by NOTE_BASIC_BLOCK. */
6431 x = bb->head;
6432 if (GET_CODE (x) == CODE_LABEL)
6434 if (bb->end == x)
6436 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6437 bb->index);
6438 err = 1;
6440 x = NEXT_INSN (x);
6442 if (GET_CODE (x) != NOTE
6443 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6444 || NOTE_BASIC_BLOCK (x) != bb)
6446 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6447 bb->index);
6448 err = 1;
6451 if (bb->end == x)
6453 /* Do checks for empty blocks here */
6455 else
6457 x = NEXT_INSN (x);
6458 while (x)
6460 if (GET_CODE (x) == NOTE
6461 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6463 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6464 INSN_UID (x), bb->index);
6465 err = 1;
6468 if (x == bb->end)
6469 break;
6471 if (GET_CODE (x) == JUMP_INSN
6472 || GET_CODE (x) == CODE_LABEL
6473 || GET_CODE (x) == BARRIER)
6475 error ("In basic block %d:", bb->index);
6476 fatal_insn ("Flow control insn inside a basic block", x);
6479 x = NEXT_INSN (x);
6484 last_bb_num_seen = -1;
6485 num_bb_notes = 0;
6486 x = rtx_first;
6487 while (x)
6489 if (GET_CODE (x) == NOTE
6490 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6492 basic_block bb = NOTE_BASIC_BLOCK (x);
6493 num_bb_notes++;
6494 if (bb->index != last_bb_num_seen + 1)
6495 fatal ("Basic blocks not numbered consecutively");
6496 last_bb_num_seen = bb->index;
6499 if (!bb_info[INSN_UID (x)])
6501 switch (GET_CODE (x))
6503 case BARRIER:
6504 case NOTE:
6505 break;
6507 case CODE_LABEL:
6508 /* An addr_vec is placed outside any block block. */
6509 if (NEXT_INSN (x)
6510 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6511 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6512 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6514 x = NEXT_INSN (x);
6517 /* But in any case, non-deletable labels can appear anywhere. */
6518 break;
6520 default:
6521 fatal_insn ("Insn outside basic block", x);
6525 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6526 && GET_CODE (x) == JUMP_INSN
6527 && returnjump_p (x) && ! condjump_p (x)
6528 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6529 fatal_insn ("Return not followed by barrier", x);
6531 x = NEXT_INSN (x);
6534 if (num_bb_notes != n_basic_blocks)
6535 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6536 num_bb_notes, n_basic_blocks);
6538 if (err)
6539 abort ();
6541 /* Clean up. */
6542 free (bb_info);
6545 /* Functions to access an edge list with a vector representation.
6546 Enough data is kept such that given an index number, the
6547 pred and succ that edge reprsents can be determined, or
6548 given a pred and a succ, it's index number can be returned.
6549 This allows algorithms which comsume a lot of memory to
6550 represent the normally full matrix of edge (pred,succ) with a
6551 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6552 wasted space in the client code due to sparse flow graphs. */
6554 /* This functions initializes the edge list. Basically the entire
6555 flowgraph is processed, and all edges are assigned a number,
6556 and the data structure is filed in. */
6557 struct edge_list *
6558 create_edge_list ()
6560 struct edge_list *elist;
6561 edge e;
6562 int num_edges;
6563 int x;
6564 int block_count;
6566 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6568 num_edges = 0;
6570 /* Determine the number of edges in the flow graph by counting successor
6571 edges on each basic block. */
6572 for (x = 0; x < n_basic_blocks; x++)
6574 basic_block bb = BASIC_BLOCK (x);
6576 for (e = bb->succ; e; e = e->succ_next)
6577 num_edges++;
6579 /* Don't forget successors of the entry block. */
6580 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6581 num_edges++;
6583 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6584 elist->num_blocks = block_count;
6585 elist->num_edges = num_edges;
6586 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6588 num_edges = 0;
6590 /* Follow successors of the entry block, and register these edges. */
6591 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6593 elist->index_to_edge[num_edges] = e;
6594 num_edges++;
6597 for (x = 0; x < n_basic_blocks; x++)
6599 basic_block bb = BASIC_BLOCK (x);
6601 /* Follow all successors of blocks, and register these edges. */
6602 for (e = bb->succ; e; e = e->succ_next)
6604 elist->index_to_edge[num_edges] = e;
6605 num_edges++;
6608 return elist;
6611 /* This function free's memory associated with an edge list. */
6612 void
6613 free_edge_list (elist)
6614 struct edge_list *elist;
6616 if (elist)
6618 free (elist->index_to_edge);
6619 free (elist);
6623 /* This function provides debug output showing an edge list. */
6624 void
6625 print_edge_list (f, elist)
6626 FILE *f;
6627 struct edge_list *elist;
6629 int x;
6630 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6631 elist->num_blocks - 2, elist->num_edges);
6633 for (x = 0; x < elist->num_edges; x++)
6635 fprintf (f, " %-4d - edge(", x);
6636 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6637 fprintf (f,"entry,");
6638 else
6639 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6641 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6642 fprintf (f,"exit)\n");
6643 else
6644 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6648 /* This function provides an internal consistancy check of an edge list,
6649 verifying that all edges are present, and that there are no
6650 extra edges. */
6651 void
6652 verify_edge_list (f, elist)
6653 FILE *f;
6654 struct edge_list *elist;
6656 int x, pred, succ, index;
6657 edge e;
6659 for (x = 0; x < n_basic_blocks; x++)
6661 basic_block bb = BASIC_BLOCK (x);
6663 for (e = bb->succ; e; e = e->succ_next)
6665 pred = e->src->index;
6666 succ = e->dest->index;
6667 index = EDGE_INDEX (elist, e->src, e->dest);
6668 if (index == EDGE_INDEX_NO_EDGE)
6670 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6671 continue;
6673 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6674 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6675 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6676 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6677 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6678 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6681 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6683 pred = e->src->index;
6684 succ = e->dest->index;
6685 index = EDGE_INDEX (elist, e->src, e->dest);
6686 if (index == EDGE_INDEX_NO_EDGE)
6688 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6689 continue;
6691 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6692 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6693 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6694 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6695 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6696 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6698 /* We've verified that all the edges are in the list, no lets make sure
6699 there are no spurious edges in the list. */
6701 for (pred = 0 ; pred < n_basic_blocks; pred++)
6702 for (succ = 0 ; succ < n_basic_blocks; succ++)
6704 basic_block p = BASIC_BLOCK (pred);
6705 basic_block s = BASIC_BLOCK (succ);
6707 int found_edge = 0;
6709 for (e = p->succ; e; e = e->succ_next)
6710 if (e->dest == s)
6712 found_edge = 1;
6713 break;
6715 for (e = s->pred; e; e = e->pred_next)
6716 if (e->src == p)
6718 found_edge = 1;
6719 break;
6721 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6722 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6723 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6724 pred, succ);
6725 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6726 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6727 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6728 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6729 BASIC_BLOCK (succ)));
6731 for (succ = 0 ; succ < n_basic_blocks; succ++)
6733 basic_block p = ENTRY_BLOCK_PTR;
6734 basic_block s = BASIC_BLOCK (succ);
6736 int found_edge = 0;
6738 for (e = p->succ; e; e = e->succ_next)
6739 if (e->dest == s)
6741 found_edge = 1;
6742 break;
6744 for (e = s->pred; e; e = e->pred_next)
6745 if (e->src == p)
6747 found_edge = 1;
6748 break;
6750 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6751 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6752 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6753 succ);
6754 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6755 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6756 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6757 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6758 BASIC_BLOCK (succ)));
6760 for (pred = 0 ; pred < n_basic_blocks; pred++)
6762 basic_block p = BASIC_BLOCK (pred);
6763 basic_block s = EXIT_BLOCK_PTR;
6765 int found_edge = 0;
6767 for (e = p->succ; e; e = e->succ_next)
6768 if (e->dest == s)
6770 found_edge = 1;
6771 break;
6773 for (e = s->pred; e; e = e->pred_next)
6774 if (e->src == p)
6776 found_edge = 1;
6777 break;
6779 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6780 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6781 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6782 pred);
6783 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6784 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6785 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6786 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6787 EXIT_BLOCK_PTR));
6791 /* This routine will determine what, if any, edge there is between
6792 a specified predecessor and successor. */
6795 find_edge_index (edge_list, pred, succ)
6796 struct edge_list *edge_list;
6797 basic_block pred, succ;
6799 int x;
6800 for (x = 0; x < NUM_EDGES (edge_list); x++)
6802 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6803 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6804 return x;
6806 return (EDGE_INDEX_NO_EDGE);
6809 /* This function will remove an edge from the flow graph. */
6810 void
6811 remove_edge (e)
6812 edge e;
6814 edge last_pred = NULL;
6815 edge last_succ = NULL;
6816 edge tmp;
6817 basic_block src, dest;
6818 src = e->src;
6819 dest = e->dest;
6820 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6821 last_succ = tmp;
6823 if (!tmp)
6824 abort ();
6825 if (last_succ)
6826 last_succ->succ_next = e->succ_next;
6827 else
6828 src->succ = e->succ_next;
6830 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6831 last_pred = tmp;
6833 if (!tmp)
6834 abort ();
6835 if (last_pred)
6836 last_pred->pred_next = e->pred_next;
6837 else
6838 dest->pred = e->pred_next;
6840 n_edges--;
6841 free (e);
6844 /* This routine will remove any fake successor edges for a basic block.
6845 When the edge is removed, it is also removed from whatever predecessor
6846 list it is in. */
6847 static void
6848 remove_fake_successors (bb)
6849 basic_block bb;
6851 edge e;
6852 for (e = bb->succ; e ; )
6854 edge tmp = e;
6855 e = e->succ_next;
6856 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6857 remove_edge (tmp);
6861 /* This routine will remove all fake edges from the flow graph. If
6862 we remove all fake successors, it will automatically remove all
6863 fake predecessors. */
6864 void
6865 remove_fake_edges ()
6867 int x;
6869 for (x = 0; x < n_basic_blocks; x++)
6870 remove_fake_successors (BASIC_BLOCK (x));
6872 /* We've handled all successors except the entry block's. */
6873 remove_fake_successors (ENTRY_BLOCK_PTR);
6876 /* This functions will add a fake edge between any block which has no
6877 successors, and the exit block. Some data flow equations require these
6878 edges to exist. */
6879 void
6880 add_noreturn_fake_exit_edges ()
6882 int x;
6884 for (x = 0; x < n_basic_blocks; x++)
6885 if (BASIC_BLOCK (x)->succ == NULL)
6886 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6889 /* Redirect an edge's successor from one block to another. */
6891 void
6892 redirect_edge_succ (e, new_succ)
6893 edge e;
6894 basic_block new_succ;
6896 edge *pe;
6898 /* Disconnect the edge from the old successor block. */
6899 for (pe = &e->dest->pred; *pe != e ; pe = &(*pe)->pred_next)
6900 continue;
6901 *pe = (*pe)->pred_next;
6903 /* Reconnect the edge to the new successor block. */
6904 e->pred_next = new_succ->pred;
6905 new_succ->pred = e;
6906 e->dest = new_succ;
6909 /* Redirect an edge's predecessor from one block to another. */
6911 void
6912 redirect_edge_pred (e, new_pred)
6913 edge e;
6914 basic_block new_pred;
6916 edge *pe;
6918 /* Disconnect the edge from the old predecessor block. */
6919 for (pe = &e->src->succ; *pe != e ; pe = &(*pe)->succ_next)
6920 continue;
6921 *pe = (*pe)->succ_next;
6923 /* Reconnect the edge to the new predecessor block. */
6924 e->succ_next = new_pred->succ;
6925 new_pred->succ = e;
6926 e->src = new_pred;
6929 /* Dump the list of basic blocks in the bitmap NODES. */
6930 static void
6931 flow_nodes_print (str, nodes, file)
6932 const char *str;
6933 const sbitmap nodes;
6934 FILE *file;
6936 int node;
6938 fprintf (file, "%s { ", str);
6939 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6940 fputs ("}\n", file);
6944 /* Dump the list of exiting edges in the array EDGES. */
6945 static void
6946 flow_exits_print (str, edges, num_edges, file)
6947 const char *str;
6948 const edge *edges;
6949 int num_edges;
6950 FILE *file;
6952 int i;
6954 fprintf (file, "%s { ", str);
6955 for (i = 0; i < num_edges; i++)
6956 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6957 fputs ("}\n", file);
6961 /* Dump loop related CFG information. */
6962 static void
6963 flow_loops_cfg_dump (loops, file)
6964 const struct loops *loops;
6965 FILE *file;
6967 int i;
6969 if (! loops->num || ! file || ! loops->cfg.dom)
6970 return;
6972 for (i = 0; i < n_basic_blocks; i++)
6974 edge succ;
6976 fprintf (file, ";; %d succs { ", i);
6977 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6978 fprintf (file, "%d ", succ->dest->index);
6979 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6983 /* Dump the DFS node order. */
6984 if (loops->cfg.dfs_order)
6986 fputs (";; DFS order: ", file);
6987 for (i = 0; i < n_basic_blocks; i++)
6988 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6989 fputs ("\n", file);
6994 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6995 static int
6996 flow_loop_nested_p (outer, loop)
6997 struct loop *outer;
6998 struct loop *loop;
7000 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7004 /* Dump the loop information specified by LOOPS to the stream FILE. */
7005 void
7006 flow_loops_dump (loops, file, verbose)
7007 const struct loops *loops;
7008 FILE *file;
7009 int verbose;
7011 int i;
7012 int num_loops;
7014 num_loops = loops->num;
7015 if (! num_loops || ! file)
7016 return;
7018 fprintf (file, ";; %d loops found, %d levels\n",
7019 num_loops, loops->levels);
7021 for (i = 0; i < num_loops; i++)
7023 struct loop *loop = &loops->array[i];
7025 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
7026 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
7027 loop->header->index, loop->latch->index,
7028 loop->pre_header ? loop->pre_header->index : -1,
7029 loop->depth, loop->level,
7030 (long) (loop->outer ? (loop->outer - loops->array) : -1));
7031 fprintf (file, ";; %d", loop->num_nodes);
7032 flow_nodes_print (" nodes", loop->nodes, file);
7033 fprintf (file, ";; %d", loop->num_exits);
7034 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
7036 if (loop->shared)
7038 int j;
7040 for (j = 0; j < i; j++)
7042 struct loop *oloop = &loops->array[j];
7044 if (loop->header == oloop->header)
7046 int disjoint;
7047 int smaller;
7049 smaller = loop->num_nodes < oloop->num_nodes;
7051 /* If the union of LOOP and OLOOP is different than
7052 the larger of LOOP and OLOOP then LOOP and OLOOP
7053 must be disjoint. */
7054 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7055 smaller ? oloop : loop);
7056 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
7057 loop->header->index, i, j,
7058 disjoint ? "disjoint" : "nested");
7063 if (verbose)
7065 /* Print diagnostics to compare our concept of a loop with
7066 what the loop notes say. */
7067 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
7068 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
7069 != NOTE_INSN_LOOP_BEG)
7070 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
7071 INSN_UID (PREV_INSN (loop->first->head)));
7072 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
7073 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
7074 != NOTE_INSN_LOOP_END)
7075 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
7076 INSN_UID (NEXT_INSN (loop->last->end)));
7080 if (verbose)
7081 flow_loops_cfg_dump (loops, file);
7085 /* Free all the memory allocated for LOOPS. */
7086 void
7087 flow_loops_free (loops)
7088 struct loops *loops;
7090 if (loops->array)
7092 int i;
7094 if (! loops->num)
7095 abort ();
7097 /* Free the loop descriptors. */
7098 for (i = 0; i < loops->num; i++)
7100 struct loop *loop = &loops->array[i];
7102 if (loop->nodes)
7103 sbitmap_free (loop->nodes);
7104 if (loop->exits)
7105 free (loop->exits);
7107 free (loops->array);
7108 loops->array = NULL;
7110 if (loops->cfg.dom)
7111 sbitmap_vector_free (loops->cfg.dom);
7112 if (loops->cfg.dfs_order)
7113 free (loops->cfg.dfs_order);
7115 sbitmap_free (loops->shared_headers);
7120 /* Find the exits from the loop using the bitmap of loop nodes NODES
7121 and store in EXITS array. Return the number of exits from the
7122 loop. */
7123 static int
7124 flow_loop_exits_find (nodes, exits)
7125 const sbitmap nodes;
7126 edge **exits;
7128 edge e;
7129 int node;
7130 int num_exits;
7132 *exits = NULL;
7134 /* Check all nodes within the loop to see if there are any
7135 successors not in the loop. Note that a node may have multiple
7136 exiting edges. */
7137 num_exits = 0;
7138 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7139 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7141 basic_block dest = e->dest;
7143 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7144 num_exits++;
7148 if (! num_exits)
7149 return 0;
7151 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
7153 /* Store all exiting edges into an array. */
7154 num_exits = 0;
7155 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7156 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7158 basic_block dest = e->dest;
7160 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7161 (*exits)[num_exits++] = e;
7165 return num_exits;
7169 /* Find the nodes contained within the loop with header HEADER and
7170 latch LATCH and store in NODES. Return the number of nodes within
7171 the loop. */
7172 static int
7173 flow_loop_nodes_find (header, latch, nodes)
7174 basic_block header;
7175 basic_block latch;
7176 sbitmap nodes;
7178 basic_block *stack;
7179 int sp;
7180 int num_nodes = 0;
7182 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7183 sp = 0;
7185 /* Start with only the loop header in the set of loop nodes. */
7186 sbitmap_zero (nodes);
7187 SET_BIT (nodes, header->index);
7188 num_nodes++;
7189 header->loop_depth++;
7191 /* Push the loop latch on to the stack. */
7192 if (! TEST_BIT (nodes, latch->index))
7194 SET_BIT (nodes, latch->index);
7195 latch->loop_depth++;
7196 num_nodes++;
7197 stack[sp++] = latch;
7200 while (sp)
7202 basic_block node;
7203 edge e;
7205 node = stack[--sp];
7206 for (e = node->pred; e; e = e->pred_next)
7208 basic_block ancestor = e->src;
7210 /* If each ancestor not marked as part of loop, add to set of
7211 loop nodes and push on to stack. */
7212 if (ancestor != ENTRY_BLOCK_PTR
7213 && ! TEST_BIT (nodes, ancestor->index))
7215 SET_BIT (nodes, ancestor->index);
7216 ancestor->loop_depth++;
7217 num_nodes++;
7218 stack[sp++] = ancestor;
7222 free (stack);
7223 return num_nodes;
7227 /* Compute the depth first search order and store in the array
7228 DFS_ORDER, marking the nodes visited in VISITED. Returns the
7229 number of nodes visited. */
7230 static int
7231 flow_depth_first_order_compute (dfs_order)
7232 int *dfs_order;
7234 edge e;
7235 edge *stack;
7236 int sp;
7237 int dfsnum = 0;
7238 sbitmap visited;
7240 /* Allocate stack for back-tracking up CFG. */
7241 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
7242 sp = 0;
7244 /* Allocate bitmap to track nodes that have been visited. */
7245 visited = sbitmap_alloc (n_basic_blocks);
7247 /* None of the nodes in the CFG have been visited yet. */
7248 sbitmap_zero (visited);
7250 /* Start with the first successor edge from the entry block. */
7251 e = ENTRY_BLOCK_PTR->succ;
7252 while (e)
7254 basic_block src = e->src;
7255 basic_block dest = e->dest;
7257 /* Mark that we have visited this node. */
7258 if (src != ENTRY_BLOCK_PTR)
7259 SET_BIT (visited, src->index);
7261 /* If this node has not been visited before, push the current
7262 edge on to the stack and proceed with the first successor
7263 edge of this node. */
7264 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
7265 && dest->succ)
7267 stack[sp++] = e;
7268 e = dest->succ;
7270 else
7272 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
7273 && ! dest->succ)
7275 /* DEST has no successors (for example, a non-returning
7276 function is called) so do not push the current edge
7277 but carry on with its next successor. */
7278 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
7279 SET_BIT (visited, dest->index);
7282 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
7284 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
7286 /* Pop edge off stack. */
7287 e = stack[--sp];
7288 src = e->src;
7290 e = e->succ_next;
7293 free (stack);
7294 sbitmap_free (visited);
7296 /* The number of nodes visited should not be greater than
7297 n_basic_blocks. */
7298 if (dfsnum > n_basic_blocks)
7299 abort ();
7301 /* There are some nodes left in the CFG that are unreachable. */
7302 if (dfsnum < n_basic_blocks)
7303 abort ();
7304 return dfsnum;
7308 /* Return the block for the pre-header of the loop with header
7309 HEADER where DOM specifies the dominator information. Return NULL if
7310 there is no pre-header. */
7311 static basic_block
7312 flow_loop_pre_header_find (header, dom)
7313 basic_block header;
7314 const sbitmap *dom;
7316 basic_block pre_header;
7317 edge e;
7319 /* If block p is a predecessor of the header and is the only block
7320 that the header does not dominate, then it is the pre-header. */
7321 pre_header = NULL;
7322 for (e = header->pred; e; e = e->pred_next)
7324 basic_block node = e->src;
7326 if (node != ENTRY_BLOCK_PTR
7327 && ! TEST_BIT (dom[node->index], header->index))
7329 if (pre_header == NULL)
7330 pre_header = node;
7331 else
7333 /* There are multiple edges into the header from outside
7334 the loop so there is no pre-header block. */
7335 pre_header = NULL;
7336 break;
7340 return pre_header;
7344 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7345 previously added. The insertion algorithm assumes that the loops
7346 are added in the order found by a depth first search of the CFG. */
7347 static void
7348 flow_loop_tree_node_add (prevloop, loop)
7349 struct loop *prevloop;
7350 struct loop *loop;
7353 if (flow_loop_nested_p (prevloop, loop))
7355 prevloop->inner = loop;
7356 loop->outer = prevloop;
7357 return;
7360 while (prevloop->outer)
7362 if (flow_loop_nested_p (prevloop->outer, loop))
7364 prevloop->next = loop;
7365 loop->outer = prevloop->outer;
7366 return;
7368 prevloop = prevloop->outer;
7371 prevloop->next = loop;
7372 loop->outer = NULL;
7376 /* Build the loop hierarchy tree for LOOPS. */
7377 static void
7378 flow_loops_tree_build (loops)
7379 struct loops *loops;
7381 int i;
7382 int num_loops;
7384 num_loops = loops->num;
7385 if (! num_loops)
7386 return;
7388 /* Root the loop hierarchy tree with the first loop found.
7389 Since we used a depth first search this should be the
7390 outermost loop. */
7391 loops->tree = &loops->array[0];
7392 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7394 /* Add the remaining loops to the tree. */
7395 for (i = 1; i < num_loops; i++)
7396 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7400 /* Helper function to compute loop nesting depth and enclosed loop level
7401 for the natural loop specified by LOOP at the loop depth DEPTH.
7402 Returns the loop level. */
7403 static int
7404 flow_loop_level_compute (loop, depth)
7405 struct loop *loop;
7406 int depth;
7408 struct loop *inner;
7409 int level = 1;
7411 if (! loop)
7412 return 0;
7414 /* Traverse loop tree assigning depth and computing level as the
7415 maximum level of all the inner loops of this loop. The loop
7416 level is equivalent to the height of the loop in the loop tree
7417 and corresponds to the number of enclosed loop levels (including
7418 itself). */
7419 for (inner = loop->inner; inner; inner = inner->next)
7421 int ilevel;
7423 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7425 if (ilevel > level)
7426 level = ilevel;
7428 loop->level = level;
7429 loop->depth = depth;
7430 return level;
7434 /* Compute the loop nesting depth and enclosed loop level for the loop
7435 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7436 level. */
7438 static int
7439 flow_loops_level_compute (loops)
7440 struct loops *loops;
7442 struct loop *loop;
7443 int level;
7444 int levels = 0;
7446 /* Traverse all the outer level loops. */
7447 for (loop = loops->tree; loop; loop = loop->next)
7449 level = flow_loop_level_compute (loop, 1);
7450 if (level > levels)
7451 levels = level;
7453 return levels;
7457 /* Find all the natural loops in the function and save in LOOPS structure
7458 and recalculate loop_depth information in basic block structures.
7459 Return the number of natural loops found. */
7461 int
7462 flow_loops_find (loops)
7463 struct loops *loops;
7465 int i;
7466 int b;
7467 int num_loops;
7468 edge e;
7469 sbitmap headers;
7470 sbitmap *dom;
7471 int *dfs_order;
7473 loops->num = 0;
7474 loops->array = NULL;
7475 loops->tree = NULL;
7476 dfs_order = NULL;
7478 /* Taking care of this degenerate case makes the rest of
7479 this code simpler. */
7480 if (n_basic_blocks == 0)
7481 return 0;
7483 /* Compute the dominators. */
7484 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7485 compute_flow_dominators (dom, NULL);
7487 /* Count the number of loop edges (back edges). This should be the
7488 same as the number of natural loops. Also clear the loop_depth
7489 and as we work from inner->outer in a loop nest we call
7490 find_loop_nodes_find which will increment loop_depth for nodes
7491 within the current loop, which happens to enclose inner loops. */
7493 num_loops = 0;
7494 for (b = 0; b < n_basic_blocks; b++)
7496 BASIC_BLOCK (b)->loop_depth = 0;
7497 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7499 basic_block latch = e->src;
7501 /* Look for back edges where a predecessor is dominated
7502 by this block. A natural loop has a single entry
7503 node (header) that dominates all the nodes in the
7504 loop. It also has single back edge to the header
7505 from a latch node. Note that multiple natural loops
7506 may share the same header. */
7507 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7508 num_loops++;
7512 if (num_loops)
7514 /* Compute depth first search order of the CFG so that outer
7515 natural loops will be found before inner natural loops. */
7516 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7517 flow_depth_first_order_compute (dfs_order);
7519 /* Allocate loop structures. */
7520 loops->array
7521 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7523 headers = sbitmap_alloc (n_basic_blocks);
7524 sbitmap_zero (headers);
7526 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7527 sbitmap_zero (loops->shared_headers);
7529 /* Find and record information about all the natural loops
7530 in the CFG. */
7531 num_loops = 0;
7532 for (b = 0; b < n_basic_blocks; b++)
7534 basic_block header;
7536 /* Search the nodes of the CFG in DFS order that we can find
7537 outer loops first. */
7538 header = BASIC_BLOCK (dfs_order[b]);
7540 /* Look for all the possible latch blocks for this header. */
7541 for (e = header->pred; e; e = e->pred_next)
7543 basic_block latch = e->src;
7545 /* Look for back edges where a predecessor is dominated
7546 by this block. A natural loop has a single entry
7547 node (header) that dominates all the nodes in the
7548 loop. It also has single back edge to the header
7549 from a latch node. Note that multiple natural loops
7550 may share the same header. */
7551 if (latch != ENTRY_BLOCK_PTR
7552 && TEST_BIT (dom[latch->index], header->index))
7554 struct loop *loop;
7556 loop = loops->array + num_loops;
7558 loop->header = header;
7559 loop->latch = latch;
7561 /* Keep track of blocks that are loop headers so
7562 that we can tell which loops should be merged. */
7563 if (TEST_BIT (headers, header->index))
7564 SET_BIT (loops->shared_headers, header->index);
7565 SET_BIT (headers, header->index);
7567 /* Find nodes contained within the loop. */
7568 loop->nodes = sbitmap_alloc (n_basic_blocks);
7569 loop->num_nodes
7570 = flow_loop_nodes_find (header, latch, loop->nodes);
7572 /* Compute first and last blocks within the loop.
7573 These are often the same as the loop header and
7574 loop latch respectively, but this is not always
7575 the case. */
7576 loop->first
7577 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7578 loop->last
7579 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7581 /* Find edges which exit the loop. Note that a node
7582 may have several exit edges. */
7583 loop->num_exits
7584 = flow_loop_exits_find (loop->nodes, &loop->exits);
7586 /* Look to see if the loop has a pre-header node. */
7587 loop->pre_header
7588 = flow_loop_pre_header_find (header, dom);
7590 num_loops++;
7595 /* Natural loops with shared headers may either be disjoint or
7596 nested. Disjoint loops with shared headers cannot be inner
7597 loops and should be merged. For now just mark loops that share
7598 headers. */
7599 for (i = 0; i < num_loops; i++)
7600 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7601 loops->array[i].shared = 1;
7603 sbitmap_free (headers);
7606 loops->num = num_loops;
7608 /* Save CFG derived information to avoid recomputing it. */
7609 loops->cfg.dom = dom;
7610 loops->cfg.dfs_order = dfs_order;
7612 /* Build the loop hierarchy tree. */
7613 flow_loops_tree_build (loops);
7615 /* Assign the loop nesting depth and enclosed loop level for each
7616 loop. */
7617 loops->levels = flow_loops_level_compute (loops);
7619 return num_loops;
7623 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7626 flow_loop_outside_edge_p (loop, e)
7627 const struct loop *loop;
7628 edge e;
7630 if (e->dest != loop->header)
7631 abort ();
7632 return (e->src == ENTRY_BLOCK_PTR)
7633 || ! TEST_BIT (loop->nodes, e->src->index);
7637 /* Clear LOG_LINKS fields of insns in a chain. */
7639 void
7640 clear_log_links (insns)
7641 rtx insns;
7643 rtx i;
7644 for (i = insns; i; i = NEXT_INSN (i))
7645 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')
7646 LOG_LINKS (i) = 0;
7649 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
7650 correspond to the hard registers, if any, set in that map. This
7651 could be done far more efficiently by having all sorts of special-cases
7652 with moving single words, but probably isn't worth the trouble. */
7654 void
7655 reg_set_to_hard_reg_set (to, from)
7656 HARD_REG_SET *to;
7657 bitmap from;
7659 int i;
7661 EXECUTE_IF_SET_IN_BITMAP
7662 (from, 0, i,
7664 if (i >= FIRST_PSEUDO_REGISTER)
7665 return;
7666 SET_HARD_REG_BIT (*to, i);