Only allow allow rotations by a constant amount.
[official-gcc.git] / gcc / flow.c
blob98ed692357e4c40a87235843657fa41bc27d10c1
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
137 #include "expr.h"
138 #include "ssa.h"
140 #include "obstack.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
171 /* The contents of the current function definition are allocated
172 in this obstack, and all are freed at the end of the function.
173 For top-level functions, this is temporary_obstack.
174 Separate obstacks are made for nested functions. */
176 extern struct obstack *function_obstack;
178 /* Number of basic blocks in the current function. */
180 int n_basic_blocks;
182 /* Number of edges in the current function. */
184 int n_edges;
186 /* The basic block array. */
188 varray_type basic_block_info;
190 /* The special entry and exit blocks. */
192 struct basic_block_def entry_exit_blocks[2]
193 = {{NULL, /* head */
194 NULL, /* end */
195 NULL, /* pred */
196 NULL, /* succ */
197 NULL, /* local_set */
198 NULL, /* global_live_at_start */
199 NULL, /* global_live_at_end */
200 NULL, /* aux */
201 ENTRY_BLOCK, /* index */
202 0, /* loop_depth */
203 -1, -1, /* eh_beg, eh_end */
204 0 /* count */
207 NULL, /* head */
208 NULL, /* end */
209 NULL, /* pred */
210 NULL, /* succ */
211 NULL, /* local_set */
212 NULL, /* global_live_at_start */
213 NULL, /* global_live_at_end */
214 NULL, /* aux */
215 EXIT_BLOCK, /* index */
216 0, /* loop_depth */
217 -1, -1, /* eh_beg, eh_end */
218 0 /* count */
222 /* Nonzero if the second flow pass has completed. */
223 int flow2_completed;
225 /* Maximum register number used in this function, plus one. */
227 int max_regno;
229 /* Indexed by n, giving various register information */
231 varray_type reg_n_info;
233 /* Size of a regset for the current function,
234 in (1) bytes and (2) elements. */
236 int regset_bytes;
237 int regset_size;
239 /* Regset of regs live when calls to `setjmp'-like functions happen. */
240 /* ??? Does this exist only for the setjmp-clobbered warning message? */
242 regset regs_live_at_setjmp;
244 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
245 that have to go in the same hard reg.
246 The first two regs in the list are a pair, and the next two
247 are another pair, etc. */
248 rtx regs_may_share;
250 /* Set of registers that may be eliminable. These are handled specially
251 in updating regs_ever_live. */
253 static HARD_REG_SET elim_reg_set;
255 /* The basic block structure for every insn, indexed by uid. */
257 varray_type basic_block_for_insn;
259 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
260 /* ??? Should probably be using LABEL_NUSES instead. It would take a
261 bit of surgery to be able to use or co-opt the routines in jump. */
263 static rtx label_value_list;
264 static rtx tail_recursion_label_list;
266 /* Holds information for tracking conditional register life information. */
267 struct reg_cond_life_info
269 /* An EXPR_LIST of conditions under which a register is dead. */
270 rtx condition;
272 /* ??? Could store mask of bytes that are dead, so that we could finally
273 track lifetimes of multi-word registers accessed via subregs. */
276 /* For use in communicating between propagate_block and its subroutines.
277 Holds all information needed to compute life and def-use information. */
279 struct propagate_block_info
281 /* The basic block we're considering. */
282 basic_block bb;
284 /* Bit N is set if register N is conditionally or unconditionally live. */
285 regset reg_live;
287 /* Bit N is set if register N is set this insn. */
288 regset new_set;
290 /* Element N is the next insn that uses (hard or pseudo) register N
291 within the current basic block; or zero, if there is no such insn. */
292 rtx *reg_next_use;
294 /* Contains a list of all the MEMs we are tracking for dead store
295 elimination. */
296 rtx mem_set_list;
298 /* If non-null, record the set of registers set in the basic block. */
299 regset local_set;
301 #ifdef HAVE_conditional_execution
302 /* Indexed by register number, holds a reg_cond_life_info for each
303 register that is not unconditionally live or dead. */
304 splay_tree reg_cond_dead;
306 /* Bit N is set if register N is in an expression in reg_cond_dead. */
307 regset reg_cond_reg;
308 #endif
310 /* Non-zero if the value of CC0 is live. */
311 int cc0_live;
313 /* Flags controling the set of information propagate_block collects. */
314 int flags;
317 /* Store the data structures necessary for depth-first search. */
318 struct depth_first_search_dsS {
319 /* stack for backtracking during the algorithm */
320 basic_block *stack;
322 /* number of edges in the stack. That is, positions 0, ..., sp-1
323 have edges. */
324 unsigned int sp;
326 /* record of basic blocks already seen by depth-first search */
327 sbitmap visited_blocks;
329 typedef struct depth_first_search_dsS *depth_first_search_ds;
331 /* Forward declarations */
332 static int count_basic_blocks PARAMS ((rtx));
333 static void find_basic_blocks_1 PARAMS ((rtx));
334 static rtx find_label_refs PARAMS ((rtx, rtx));
335 static void clear_edges PARAMS ((void));
336 static void make_edges PARAMS ((rtx));
337 static void make_label_edge PARAMS ((sbitmap *, basic_block,
338 rtx, int));
339 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
340 basic_block, rtx, int));
341 static void mark_critical_edges PARAMS ((void));
342 static void move_stray_eh_region_notes PARAMS ((void));
343 static void record_active_eh_regions PARAMS ((rtx));
345 static void commit_one_edge_insertion PARAMS ((edge));
347 static void delete_unreachable_blocks PARAMS ((void));
348 static void delete_eh_regions PARAMS ((void));
349 static int can_delete_note_p PARAMS ((rtx));
350 static void expunge_block PARAMS ((basic_block));
351 static int can_delete_label_p PARAMS ((rtx));
352 static int tail_recursion_label_p PARAMS ((rtx));
353 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
354 basic_block));
355 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
356 basic_block));
357 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
358 static void try_merge_blocks PARAMS ((void));
359 static void tidy_fallthru_edges PARAMS ((void));
360 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
361 static void verify_wide_reg PARAMS ((int, rtx, rtx));
362 static void verify_local_live_at_start PARAMS ((regset, basic_block));
363 static int set_noop_p PARAMS ((rtx));
364 static int noop_move_p PARAMS ((rtx));
365 static void delete_noop_moves PARAMS ((rtx));
366 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
367 static void notice_stack_pointer_modification PARAMS ((rtx));
368 static void mark_reg PARAMS ((rtx, void *));
369 static void mark_regs_live_at_end PARAMS ((regset));
370 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
371 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
372 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
373 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
374 static int insn_dead_p PARAMS ((struct propagate_block_info *,
375 rtx, int, rtx));
376 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
377 rtx, rtx));
378 static void mark_set_regs PARAMS ((struct propagate_block_info *,
379 rtx, rtx));
380 static void mark_set_1 PARAMS ((struct propagate_block_info *,
381 enum rtx_code, rtx, rtx,
382 rtx, int));
383 #ifdef HAVE_conditional_execution
384 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
385 int, rtx));
386 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
387 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
388 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
389 int));
390 static rtx ior_reg_cond PARAMS ((rtx, rtx));
391 static rtx not_reg_cond PARAMS ((rtx));
392 static rtx nand_reg_cond PARAMS ((rtx, rtx));
393 #endif
394 #ifdef AUTO_INC_DEC
395 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
396 rtx, rtx, rtx, rtx, rtx));
397 static void find_auto_inc PARAMS ((struct propagate_block_info *,
398 rtx, rtx));
399 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
400 rtx));
401 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
402 #endif
403 static void mark_used_reg PARAMS ((struct propagate_block_info *,
404 rtx, rtx, rtx));
405 static void mark_used_regs PARAMS ((struct propagate_block_info *,
406 rtx, rtx, rtx));
407 void dump_flow_info PARAMS ((FILE *));
408 void debug_flow_info PARAMS ((void));
409 static void dump_edge_info PARAMS ((FILE *, edge, int));
411 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
412 rtx));
413 static void remove_fake_successors PARAMS ((basic_block));
414 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
415 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
416 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
417 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
418 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
419 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
420 static int flow_depth_first_order_compute PARAMS ((int *, int *));
421 static void flow_dfs_compute_reverse_init
422 PARAMS ((depth_first_search_ds));
423 static void flow_dfs_compute_reverse_add_bb
424 PARAMS ((depth_first_search_ds, basic_block));
425 static basic_block flow_dfs_compute_reverse_execute
426 PARAMS ((depth_first_search_ds));
427 static void flow_dfs_compute_reverse_finish
428 PARAMS ((depth_first_search_ds));
429 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
430 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
431 static void flow_loops_tree_build PARAMS ((struct loops *));
432 static int flow_loop_level_compute PARAMS ((struct loop *, int));
433 static int flow_loops_level_compute PARAMS ((struct loops *));
435 /* Find basic blocks of the current function.
436 F is the first insn of the function and NREGS the number of register
437 numbers in use. */
439 void
440 find_basic_blocks (f, nregs, file)
441 rtx f;
442 int nregs ATTRIBUTE_UNUSED;
443 FILE *file ATTRIBUTE_UNUSED;
445 int max_uid;
447 /* Flush out existing data. */
448 if (basic_block_info != NULL)
450 int i;
452 clear_edges ();
454 /* Clear bb->aux on all extant basic blocks. We'll use this as a
455 tag for reuse during create_basic_block, just in case some pass
456 copies around basic block notes improperly. */
457 for (i = 0; i < n_basic_blocks; ++i)
458 BASIC_BLOCK (i)->aux = NULL;
460 VARRAY_FREE (basic_block_info);
463 n_basic_blocks = count_basic_blocks (f);
465 /* Size the basic block table. The actual structures will be allocated
466 by find_basic_blocks_1, since we want to keep the structure pointers
467 stable across calls to find_basic_blocks. */
468 /* ??? This whole issue would be much simpler if we called find_basic_blocks
469 exactly once, and thereafter we don't have a single long chain of
470 instructions at all until close to the end of compilation when we
471 actually lay them out. */
473 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
475 find_basic_blocks_1 (f);
477 /* Record the block to which an insn belongs. */
478 /* ??? This should be done another way, by which (perhaps) a label is
479 tagged directly with the basic block that it starts. It is used for
480 more than that currently, but IMO that is the only valid use. */
482 max_uid = get_max_uid ();
483 #ifdef AUTO_INC_DEC
484 /* Leave space for insns life_analysis makes in some cases for auto-inc.
485 These cases are rare, so we don't need too much space. */
486 max_uid += max_uid / 10;
487 #endif
489 compute_bb_for_insn (max_uid);
491 /* Discover the edges of our cfg. */
492 record_active_eh_regions (f);
493 make_edges (label_value_list);
495 /* Do very simple cleanup now, for the benefit of code that runs between
496 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
497 tidy_fallthru_edges ();
499 mark_critical_edges ();
501 #ifdef ENABLE_CHECKING
502 verify_flow_info ();
503 #endif
506 /* Count the basic blocks of the function. */
508 static int
509 count_basic_blocks (f)
510 rtx f;
512 register rtx insn;
513 register RTX_CODE prev_code;
514 register int count = 0;
515 int eh_region = 0;
516 int call_had_abnormal_edge = 0;
518 prev_code = JUMP_INSN;
519 for (insn = f; insn; insn = NEXT_INSN (insn))
521 register RTX_CODE code = GET_CODE (insn);
523 if (code == CODE_LABEL
524 || (GET_RTX_CLASS (code) == 'i'
525 && (prev_code == JUMP_INSN
526 || prev_code == BARRIER
527 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
528 count++;
530 /* Record whether this call created an edge. */
531 if (code == CALL_INSN)
533 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
534 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
536 call_had_abnormal_edge = 0;
538 /* If there is an EH region or rethrow, we have an edge. */
539 if ((eh_region && region > 0)
540 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
541 call_had_abnormal_edge = 1;
542 else if (nonlocal_goto_handler_labels && region >= 0)
543 /* If there is a nonlocal goto label and the specified
544 region number isn't -1, we have an edge. (0 means
545 no throw, but might have a nonlocal goto). */
546 call_had_abnormal_edge = 1;
549 if (code != NOTE)
550 prev_code = code;
551 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
552 ++eh_region;
553 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
554 --eh_region;
557 /* The rest of the compiler works a bit smoother when we don't have to
558 check for the edge case of do-nothing functions with no basic blocks. */
559 if (count == 0)
561 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
562 count = 1;
565 return count;
568 /* Scan a list of insns for labels referred to other than by jumps.
569 This is used to scan the alternatives of a call placeholder. */
570 static rtx
571 find_label_refs (f, lvl)
572 rtx f;
573 rtx lvl;
575 rtx insn;
577 for (insn = f; insn; insn = NEXT_INSN (insn))
578 if (INSN_P (insn))
580 rtx note;
582 /* Make a list of all labels referred to other than by jumps
583 (which just don't have the REG_LABEL notes).
585 Make a special exception for labels followed by an ADDR*VEC,
586 as this would be a part of the tablejump setup code.
588 Make a special exception for the eh_return_stub_label, which
589 we know isn't part of any otherwise visible control flow. */
591 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
592 if (REG_NOTE_KIND (note) == REG_LABEL)
594 rtx lab = XEXP (note, 0), next;
596 if (lab == eh_return_stub_label)
598 else if ((next = next_nonnote_insn (lab)) != NULL
599 && GET_CODE (next) == JUMP_INSN
600 && (GET_CODE (PATTERN (next)) == ADDR_VEC
601 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
603 else if (GET_CODE (lab) == NOTE)
605 else
606 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
610 return lvl;
613 /* Find all basic blocks of the function whose first insn is F.
615 Collect and return a list of labels whose addresses are taken. This
616 will be used in make_edges for use with computed gotos. */
618 static void
619 find_basic_blocks_1 (f)
620 rtx f;
622 register rtx insn, next;
623 int i = 0;
624 rtx bb_note = NULL_RTX;
625 rtx eh_list = NULL_RTX;
626 rtx lvl = NULL_RTX;
627 rtx trll = NULL_RTX;
628 rtx head = NULL_RTX;
629 rtx end = NULL_RTX;
631 /* We process the instructions in a slightly different way than we did
632 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
633 closed out the previous block, so that it gets attached at the proper
634 place. Since this form should be equivalent to the previous,
635 count_basic_blocks continues to use the old form as a check. */
637 for (insn = f; insn; insn = next)
639 enum rtx_code code = GET_CODE (insn);
641 next = NEXT_INSN (insn);
643 switch (code)
645 case NOTE:
647 int kind = NOTE_LINE_NUMBER (insn);
649 /* Keep a LIFO list of the currently active exception notes. */
650 if (kind == NOTE_INSN_EH_REGION_BEG)
651 eh_list = alloc_INSN_LIST (insn, eh_list);
652 else if (kind == NOTE_INSN_EH_REGION_END)
654 rtx t = eh_list;
656 eh_list = XEXP (eh_list, 1);
657 free_INSN_LIST_node (t);
660 /* Look for basic block notes with which to keep the
661 basic_block_info pointers stable. Unthread the note now;
662 we'll put it back at the right place in create_basic_block.
663 Or not at all if we've already found a note in this block. */
664 else if (kind == NOTE_INSN_BASIC_BLOCK)
666 if (bb_note == NULL_RTX)
667 bb_note = insn;
668 else
669 next = flow_delete_insn (insn);
671 break;
674 case CODE_LABEL:
675 /* A basic block starts at a label. If we've closed one off due
676 to a barrier or some such, no need to do it again. */
677 if (head != NULL_RTX)
679 /* While we now have edge lists with which other portions of
680 the compiler might determine a call ending a basic block
681 does not imply an abnormal edge, it will be a bit before
682 everything can be updated. So continue to emit a noop at
683 the end of such a block. */
684 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
686 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
687 end = emit_insn_after (nop, end);
690 create_basic_block (i++, head, end, bb_note);
691 bb_note = NULL_RTX;
694 head = end = insn;
695 break;
697 case JUMP_INSN:
698 /* A basic block ends at a jump. */
699 if (head == NULL_RTX)
700 head = insn;
701 else
703 /* ??? Make a special check for table jumps. The way this
704 happens is truly and amazingly gross. We are about to
705 create a basic block that contains just a code label and
706 an addr*vec jump insn. Worse, an addr_diff_vec creates
707 its own natural loop.
709 Prevent this bit of brain damage, pasting things together
710 correctly in make_edges.
712 The correct solution involves emitting the table directly
713 on the tablejump instruction as a note, or JUMP_LABEL. */
715 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
716 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
718 head = end = NULL;
719 n_basic_blocks--;
720 break;
723 end = insn;
724 goto new_bb_inclusive;
726 case BARRIER:
727 /* A basic block ends at a barrier. It may be that an unconditional
728 jump already closed the basic block -- no need to do it again. */
729 if (head == NULL_RTX)
730 break;
732 /* While we now have edge lists with which other portions of the
733 compiler might determine a call ending a basic block does not
734 imply an abnormal edge, it will be a bit before everything can
735 be updated. So continue to emit a noop at the end of such a
736 block. */
737 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
739 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
740 end = emit_insn_after (nop, end);
742 goto new_bb_exclusive;
744 case CALL_INSN:
746 /* Record whether this call created an edge. */
747 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
748 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
749 int call_has_abnormal_edge = 0;
751 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
753 /* Scan each of the alternatives for label refs. */
754 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
755 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
756 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
757 /* Record its tail recursion label, if any. */
758 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
759 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
762 /* If there is an EH region or rethrow, we have an edge. */
763 if ((eh_list && region > 0)
764 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
765 call_has_abnormal_edge = 1;
766 else if (nonlocal_goto_handler_labels && region >= 0)
767 /* If there is a nonlocal goto label and the specified
768 region number isn't -1, we have an edge. (0 means
769 no throw, but might have a nonlocal goto). */
770 call_has_abnormal_edge = 1;
772 /* A basic block ends at a call that can either throw or
773 do a non-local goto. */
774 if (call_has_abnormal_edge)
776 new_bb_inclusive:
777 if (head == NULL_RTX)
778 head = insn;
779 end = insn;
781 new_bb_exclusive:
782 create_basic_block (i++, head, end, bb_note);
783 head = end = NULL_RTX;
784 bb_note = NULL_RTX;
785 break;
788 /* Fall through. */
790 default:
791 if (GET_RTX_CLASS (code) == 'i')
793 if (head == NULL_RTX)
794 head = insn;
795 end = insn;
797 break;
800 if (GET_RTX_CLASS (code) == 'i')
802 rtx note;
804 /* Make a list of all labels referred to other than by jumps
805 (which just don't have the REG_LABEL notes).
807 Make a special exception for labels followed by an ADDR*VEC,
808 as this would be a part of the tablejump setup code.
810 Make a special exception for the eh_return_stub_label, which
811 we know isn't part of any otherwise visible control flow. */
813 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
814 if (REG_NOTE_KIND (note) == REG_LABEL)
816 rtx lab = XEXP (note, 0), next;
818 if (lab == eh_return_stub_label)
820 else if ((next = next_nonnote_insn (lab)) != NULL
821 && GET_CODE (next) == JUMP_INSN
822 && (GET_CODE (PATTERN (next)) == ADDR_VEC
823 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
825 else if (GET_CODE (lab) == NOTE)
827 else
828 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
833 if (head != NULL_RTX)
834 create_basic_block (i++, head, end, bb_note);
835 else if (bb_note)
836 flow_delete_insn (bb_note);
838 if (i != n_basic_blocks)
839 abort ();
841 label_value_list = lvl;
842 tail_recursion_label_list = trll;
845 /* Tidy the CFG by deleting unreachable code and whatnot. */
847 void
848 cleanup_cfg (f)
849 rtx f;
851 delete_unreachable_blocks ();
852 move_stray_eh_region_notes ();
853 record_active_eh_regions (f);
854 try_merge_blocks ();
855 mark_critical_edges ();
857 /* Kill the data we won't maintain. */
858 free_EXPR_LIST_list (&label_value_list);
859 free_EXPR_LIST_list (&tail_recursion_label_list);
862 /* Create a new basic block consisting of the instructions between
863 HEAD and END inclusive. Reuses the note and basic block struct
864 in BB_NOTE, if any. */
866 void
867 create_basic_block (index, head, end, bb_note)
868 int index;
869 rtx head, end, bb_note;
871 basic_block bb;
873 if (bb_note
874 && ! RTX_INTEGRATED_P (bb_note)
875 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
876 && bb->aux == NULL)
878 /* If we found an existing note, thread it back onto the chain. */
880 rtx after;
882 if (GET_CODE (head) == CODE_LABEL)
883 after = head;
884 else
886 after = PREV_INSN (head);
887 head = bb_note;
890 if (after != bb_note && NEXT_INSN (after) != bb_note)
891 reorder_insns (bb_note, bb_note, after);
893 else
895 /* Otherwise we must create a note and a basic block structure.
896 Since we allow basic block structs in rtl, give the struct
897 the same lifetime by allocating it off the function obstack
898 rather than using malloc. */
900 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
901 memset (bb, 0, sizeof (*bb));
903 if (GET_CODE (head) == CODE_LABEL)
904 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
905 else
907 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
908 head = bb_note;
910 NOTE_BASIC_BLOCK (bb_note) = bb;
913 /* Always include the bb note in the block. */
914 if (NEXT_INSN (end) == bb_note)
915 end = bb_note;
917 bb->head = head;
918 bb->end = end;
919 bb->index = index;
920 BASIC_BLOCK (index) = bb;
922 /* Tag the block so that we know it has been used when considering
923 other basic block notes. */
924 bb->aux = bb;
927 /* Records the basic block struct in BB_FOR_INSN, for every instruction
928 indexed by INSN_UID. MAX is the size of the array. */
930 void
931 compute_bb_for_insn (max)
932 int max;
934 int i;
936 if (basic_block_for_insn)
937 VARRAY_FREE (basic_block_for_insn);
938 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
940 for (i = 0; i < n_basic_blocks; ++i)
942 basic_block bb = BASIC_BLOCK (i);
943 rtx insn, end;
945 end = bb->end;
946 insn = bb->head;
947 while (1)
949 int uid = INSN_UID (insn);
950 if (uid < max)
951 VARRAY_BB (basic_block_for_insn, uid) = bb;
952 if (insn == end)
953 break;
954 insn = NEXT_INSN (insn);
959 /* Free the memory associated with the edge structures. */
961 static void
962 clear_edges ()
964 int i;
965 edge n, e;
967 for (i = 0; i < n_basic_blocks; ++i)
969 basic_block bb = BASIC_BLOCK (i);
971 for (e = bb->succ; e; e = n)
973 n = e->succ_next;
974 free (e);
977 bb->succ = 0;
978 bb->pred = 0;
981 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
983 n = e->succ_next;
984 free (e);
987 ENTRY_BLOCK_PTR->succ = 0;
988 EXIT_BLOCK_PTR->pred = 0;
990 n_edges = 0;
993 /* Identify the edges between basic blocks.
995 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
996 that are otherwise unreachable may be reachable with a non-local goto.
998 BB_EH_END is an array indexed by basic block number in which we record
999 the list of exception regions active at the end of the basic block. */
1001 static void
1002 make_edges (label_value_list)
1003 rtx label_value_list;
1005 int i;
1006 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
1007 sbitmap *edge_cache = NULL;
1009 /* Assume no computed jump; revise as we create edges. */
1010 current_function_has_computed_jump = 0;
1012 /* Heavy use of computed goto in machine-generated code can lead to
1013 nearly fully-connected CFGs. In that case we spend a significant
1014 amount of time searching the edge lists for duplicates. */
1015 if (forced_labels || label_value_list)
1017 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1018 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1021 /* By nature of the way these get numbered, block 0 is always the entry. */
1022 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1024 for (i = 0; i < n_basic_blocks; ++i)
1026 basic_block bb = BASIC_BLOCK (i);
1027 rtx insn, x;
1028 enum rtx_code code;
1029 int force_fallthru = 0;
1031 /* Examine the last instruction of the block, and discover the
1032 ways we can leave the block. */
1034 insn = bb->end;
1035 code = GET_CODE (insn);
1037 /* A branch. */
1038 if (code == JUMP_INSN)
1040 rtx tmp;
1042 /* ??? Recognize a tablejump and do the right thing. */
1043 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1044 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1045 && GET_CODE (tmp) == JUMP_INSN
1046 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1047 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1049 rtvec vec;
1050 int j;
1052 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1053 vec = XVEC (PATTERN (tmp), 0);
1054 else
1055 vec = XVEC (PATTERN (tmp), 1);
1057 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1058 make_label_edge (edge_cache, bb,
1059 XEXP (RTVEC_ELT (vec, j), 0), 0);
1061 /* Some targets (eg, ARM) emit a conditional jump that also
1062 contains the out-of-range target. Scan for these and
1063 add an edge if necessary. */
1064 if ((tmp = single_set (insn)) != NULL
1065 && SET_DEST (tmp) == pc_rtx
1066 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1067 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1068 make_label_edge (edge_cache, bb,
1069 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1071 #ifdef CASE_DROPS_THROUGH
1072 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1073 us naturally detecting fallthru into the next block. */
1074 force_fallthru = 1;
1075 #endif
1078 /* If this is a computed jump, then mark it as reaching
1079 everything on the label_value_list and forced_labels list. */
1080 else if (computed_jump_p (insn))
1082 current_function_has_computed_jump = 1;
1084 for (x = label_value_list; x; x = XEXP (x, 1))
1085 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1087 for (x = forced_labels; x; x = XEXP (x, 1))
1088 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1091 /* Returns create an exit out. */
1092 else if (returnjump_p (insn))
1093 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1095 /* Otherwise, we have a plain conditional or unconditional jump. */
1096 else
1098 if (! JUMP_LABEL (insn))
1099 abort ();
1100 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1104 /* If this is a sibling call insn, then this is in effect a
1105 combined call and return, and so we need an edge to the
1106 exit block. No need to worry about EH edges, since we
1107 wouldn't have created the sibling call in the first place. */
1109 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1110 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1111 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1112 else
1114 /* If this is a CALL_INSN, then mark it as reaching the active EH
1115 handler for this CALL_INSN. If we're handling asynchronous
1116 exceptions then any insn can reach any of the active handlers.
1118 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1120 if (code == CALL_INSN || asynchronous_exceptions)
1122 /* Add any appropriate EH edges. We do this unconditionally
1123 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1124 on the call, and this needn't be within an EH region. */
1125 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1127 /* If we have asynchronous exceptions, do the same for *all*
1128 exception regions active in the block. */
1129 if (asynchronous_exceptions
1130 && bb->eh_beg != bb->eh_end)
1132 if (bb->eh_beg >= 0)
1133 make_eh_edge (edge_cache, eh_nest_info, bb,
1134 NULL_RTX, bb->eh_beg);
1136 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1137 if (GET_CODE (x) == NOTE
1138 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1139 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1141 int region = NOTE_EH_HANDLER (x);
1142 make_eh_edge (edge_cache, eh_nest_info, bb,
1143 NULL_RTX, region);
1147 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1149 /* ??? This could be made smarter: in some cases it's possible
1150 to tell that certain calls will not do a nonlocal goto.
1152 For example, if the nested functions that do the nonlocal
1153 gotos do not have their addresses taken, then only calls to
1154 those functions or to other nested functions that use them
1155 could possibly do nonlocal gotos. */
1156 /* We do know that a REG_EH_REGION note with a value less
1157 than 0 is guaranteed not to perform a non-local goto. */
1158 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1159 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1160 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1161 make_label_edge (edge_cache, bb, XEXP (x, 0),
1162 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1166 /* We know something about the structure of the function __throw in
1167 libgcc2.c. It is the only function that ever contains eh_stub
1168 labels. It modifies its return address so that the last block
1169 returns to one of the eh_stub labels within it. So we have to
1170 make additional edges in the flow graph. */
1171 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1172 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1174 /* Find out if we can drop through to the next block. */
1175 insn = next_nonnote_insn (insn);
1176 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1177 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1178 else if (i + 1 < n_basic_blocks)
1180 rtx tmp = BLOCK_HEAD (i + 1);
1181 if (GET_CODE (tmp) == NOTE)
1182 tmp = next_nonnote_insn (tmp);
1183 if (force_fallthru || insn == tmp)
1184 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1188 free_eh_nesting_info (eh_nest_info);
1189 if (edge_cache)
1190 sbitmap_vector_free (edge_cache);
1193 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1194 about the edge that is accumulated between calls. */
1196 void
1197 make_edge (edge_cache, src, dst, flags)
1198 sbitmap *edge_cache;
1199 basic_block src, dst;
1200 int flags;
1202 int use_edge_cache;
1203 edge e;
1205 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1206 many edges to them, and we didn't allocate memory for it. */
1207 use_edge_cache = (edge_cache
1208 && src != ENTRY_BLOCK_PTR
1209 && dst != EXIT_BLOCK_PTR);
1211 /* Make sure we don't add duplicate edges. */
1212 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1213 for (e = src->succ; e; e = e->succ_next)
1214 if (e->dest == dst)
1216 e->flags |= flags;
1217 return;
1220 e = (edge) xcalloc (1, sizeof (*e));
1221 n_edges++;
1223 e->succ_next = src->succ;
1224 e->pred_next = dst->pred;
1225 e->src = src;
1226 e->dest = dst;
1227 e->flags = flags;
1229 src->succ = e;
1230 dst->pred = e;
1232 if (use_edge_cache)
1233 SET_BIT (edge_cache[src->index], dst->index);
1236 /* Create an edge from a basic block to a label. */
1238 static void
1239 make_label_edge (edge_cache, src, label, flags)
1240 sbitmap *edge_cache;
1241 basic_block src;
1242 rtx label;
1243 int flags;
1245 if (GET_CODE (label) != CODE_LABEL)
1246 abort ();
1248 /* If the label was never emitted, this insn is junk, but avoid a
1249 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1250 as a result of a syntax error and a diagnostic has already been
1251 printed. */
1253 if (INSN_UID (label) == 0)
1254 return;
1256 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1259 /* Create the edges generated by INSN in REGION. */
1261 static void
1262 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1263 sbitmap *edge_cache;
1264 eh_nesting_info *eh_nest_info;
1265 basic_block src;
1266 rtx insn;
1267 int region;
1269 handler_info **handler_list;
1270 int num, is_call;
1272 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1273 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1274 while (--num >= 0)
1276 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1277 EDGE_ABNORMAL | EDGE_EH | is_call);
1281 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1282 dangerous if we intend to move basic blocks around. Move such notes
1283 into the following block. */
1285 static void
1286 move_stray_eh_region_notes ()
1288 int i;
1289 basic_block b1, b2;
1291 if (n_basic_blocks < 2)
1292 return;
1294 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1295 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1297 rtx insn, next, list = NULL_RTX;
1299 b1 = BASIC_BLOCK (i);
1300 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1302 next = NEXT_INSN (insn);
1303 if (GET_CODE (insn) == NOTE
1304 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1305 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1307 /* Unlink from the insn chain. */
1308 NEXT_INSN (PREV_INSN (insn)) = next;
1309 PREV_INSN (next) = PREV_INSN (insn);
1311 /* Queue it. */
1312 NEXT_INSN (insn) = list;
1313 list = insn;
1317 if (list == NULL_RTX)
1318 continue;
1320 /* Find where to insert these things. */
1321 insn = b2->head;
1322 if (GET_CODE (insn) == CODE_LABEL)
1323 insn = NEXT_INSN (insn);
1325 while (list)
1327 next = NEXT_INSN (list);
1328 add_insn_after (list, insn);
1329 list = next;
1334 /* Recompute eh_beg/eh_end for each basic block. */
1336 static void
1337 record_active_eh_regions (f)
1338 rtx f;
1340 rtx insn, eh_list = NULL_RTX;
1341 int i = 0;
1342 basic_block bb = BASIC_BLOCK (0);
1344 for (insn = f; insn; insn = NEXT_INSN (insn))
1346 if (bb->head == insn)
1347 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1349 if (GET_CODE (insn) == NOTE)
1351 int kind = NOTE_LINE_NUMBER (insn);
1352 if (kind == NOTE_INSN_EH_REGION_BEG)
1353 eh_list = alloc_INSN_LIST (insn, eh_list);
1354 else if (kind == NOTE_INSN_EH_REGION_END)
1356 rtx t = XEXP (eh_list, 1);
1357 free_INSN_LIST_node (eh_list);
1358 eh_list = t;
1362 if (bb->end == insn)
1364 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1365 i += 1;
1366 if (i == n_basic_blocks)
1367 break;
1368 bb = BASIC_BLOCK (i);
1373 /* Identify critical edges and set the bits appropriately. */
1375 static void
1376 mark_critical_edges ()
1378 int i, n = n_basic_blocks;
1379 basic_block bb;
1381 /* We begin with the entry block. This is not terribly important now,
1382 but could be if a front end (Fortran) implemented alternate entry
1383 points. */
1384 bb = ENTRY_BLOCK_PTR;
1385 i = -1;
1387 while (1)
1389 edge e;
1391 /* (1) Critical edges must have a source with multiple successors. */
1392 if (bb->succ && bb->succ->succ_next)
1394 for (e = bb->succ; e; e = e->succ_next)
1396 /* (2) Critical edges must have a destination with multiple
1397 predecessors. Note that we know there is at least one
1398 predecessor -- the edge we followed to get here. */
1399 if (e->dest->pred->pred_next)
1400 e->flags |= EDGE_CRITICAL;
1401 else
1402 e->flags &= ~EDGE_CRITICAL;
1405 else
1407 for (e = bb->succ; e; e = e->succ_next)
1408 e->flags &= ~EDGE_CRITICAL;
1411 if (++i >= n)
1412 break;
1413 bb = BASIC_BLOCK (i);
1417 /* Split a (typically critical) edge. Return the new block.
1418 Abort on abnormal edges.
1420 ??? The code generally expects to be called on critical edges.
1421 The case of a block ending in an unconditional jump to a
1422 block with multiple predecessors is not handled optimally. */
1424 basic_block
1425 split_edge (edge_in)
1426 edge edge_in;
1428 basic_block old_pred, bb, old_succ;
1429 edge edge_out;
1430 rtx bb_note;
1431 int i, j;
1433 /* Abnormal edges cannot be split. */
1434 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1435 abort ();
1437 old_pred = edge_in->src;
1438 old_succ = edge_in->dest;
1440 /* Remove the existing edge from the destination's pred list. */
1442 edge *pp;
1443 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1444 continue;
1445 *pp = edge_in->pred_next;
1446 edge_in->pred_next = NULL;
1449 /* Create the new structures. */
1450 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1451 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1452 n_edges++;
1454 memset (bb, 0, sizeof (*bb));
1456 /* ??? This info is likely going to be out of date very soon. */
1457 if (old_succ->global_live_at_start)
1459 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1460 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1461 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1462 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1465 /* Wire them up. */
1466 bb->pred = edge_in;
1467 bb->succ = edge_out;
1468 bb->count = edge_in->count;
1470 edge_in->dest = bb;
1471 edge_in->flags &= ~EDGE_CRITICAL;
1473 edge_out->pred_next = old_succ->pred;
1474 edge_out->succ_next = NULL;
1475 edge_out->src = bb;
1476 edge_out->dest = old_succ;
1477 edge_out->flags = EDGE_FALLTHRU;
1478 edge_out->probability = REG_BR_PROB_BASE;
1479 edge_out->count = edge_in->count;
1481 old_succ->pred = edge_out;
1483 /* Tricky case -- if there existed a fallthru into the successor
1484 (and we're not it) we must add a new unconditional jump around
1485 the new block we're actually interested in.
1487 Further, if that edge is critical, this means a second new basic
1488 block must be created to hold it. In order to simplify correct
1489 insn placement, do this before we touch the existing basic block
1490 ordering for the block we were really wanting. */
1491 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1493 edge e;
1494 for (e = edge_out->pred_next; e; e = e->pred_next)
1495 if (e->flags & EDGE_FALLTHRU)
1496 break;
1498 if (e)
1500 basic_block jump_block;
1501 rtx pos;
1503 if ((e->flags & EDGE_CRITICAL) == 0
1504 && e->src != ENTRY_BLOCK_PTR)
1506 /* Non critical -- we can simply add a jump to the end
1507 of the existing predecessor. */
1508 jump_block = e->src;
1510 else
1512 /* We need a new block to hold the jump. The simplest
1513 way to do the bulk of the work here is to recursively
1514 call ourselves. */
1515 jump_block = split_edge (e);
1516 e = jump_block->succ;
1519 /* Now add the jump insn ... */
1520 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1521 jump_block->end);
1522 jump_block->end = pos;
1523 if (basic_block_for_insn)
1524 set_block_for_insn (pos, jump_block);
1525 emit_barrier_after (pos);
1527 /* ... let jump know that label is in use, ... */
1528 JUMP_LABEL (pos) = old_succ->head;
1529 ++LABEL_NUSES (old_succ->head);
1531 /* ... and clear fallthru on the outgoing edge. */
1532 e->flags &= ~EDGE_FALLTHRU;
1534 /* Continue splitting the interesting edge. */
1538 /* Place the new block just in front of the successor. */
1539 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1540 if (old_succ == EXIT_BLOCK_PTR)
1541 j = n_basic_blocks - 1;
1542 else
1543 j = old_succ->index;
1544 for (i = n_basic_blocks - 1; i > j; --i)
1546 basic_block tmp = BASIC_BLOCK (i - 1);
1547 BASIC_BLOCK (i) = tmp;
1548 tmp->index = i;
1550 BASIC_BLOCK (i) = bb;
1551 bb->index = i;
1553 /* Create the basic block note.
1555 Where we place the note can have a noticable impact on the generated
1556 code. Consider this cfg:
1562 +->1-->2--->E
1564 +--+
1566 If we need to insert an insn on the edge from block 0 to block 1,
1567 we want to ensure the instructions we insert are outside of any
1568 loop notes that physically sit between block 0 and block 1. Otherwise
1569 we confuse the loop optimizer into thinking the loop is a phony. */
1570 if (old_succ != EXIT_BLOCK_PTR
1571 && PREV_INSN (old_succ->head)
1572 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1573 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1574 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1575 PREV_INSN (old_succ->head));
1576 else if (old_succ != EXIT_BLOCK_PTR)
1577 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1578 else
1579 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1580 NOTE_BASIC_BLOCK (bb_note) = bb;
1581 bb->head = bb->end = bb_note;
1583 /* Not quite simple -- for non-fallthru edges, we must adjust the
1584 predecessor's jump instruction to target our new block. */
1585 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1587 rtx tmp, insn = old_pred->end;
1588 rtx old_label = old_succ->head;
1589 rtx new_label = gen_label_rtx ();
1591 if (GET_CODE (insn) != JUMP_INSN)
1592 abort ();
1594 /* ??? Recognize a tablejump and adjust all matching cases. */
1595 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1596 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1597 && GET_CODE (tmp) == JUMP_INSN
1598 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1599 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1601 rtvec vec;
1602 int j;
1604 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1605 vec = XVEC (PATTERN (tmp), 0);
1606 else
1607 vec = XVEC (PATTERN (tmp), 1);
1609 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1610 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1612 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1613 --LABEL_NUSES (old_label);
1614 ++LABEL_NUSES (new_label);
1617 /* Handle casesi dispatch insns */
1618 if ((tmp = single_set (insn)) != NULL
1619 && SET_DEST (tmp) == pc_rtx
1620 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1621 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1622 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1624 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1625 new_label);
1626 --LABEL_NUSES (old_label);
1627 ++LABEL_NUSES (new_label);
1630 else
1632 /* This would have indicated an abnormal edge. */
1633 if (computed_jump_p (insn))
1634 abort ();
1636 /* A return instruction can't be redirected. */
1637 if (returnjump_p (insn))
1638 abort ();
1640 /* If the insn doesn't go where we think, we're confused. */
1641 if (JUMP_LABEL (insn) != old_label)
1642 abort ();
1644 redirect_jump (insn, new_label, 0);
1647 emit_label_before (new_label, bb_note);
1648 bb->head = new_label;
1651 return bb;
1654 /* Queue instructions for insertion on an edge between two basic blocks.
1655 The new instructions and basic blocks (if any) will not appear in the
1656 CFG until commit_edge_insertions is called. */
1658 void
1659 insert_insn_on_edge (pattern, e)
1660 rtx pattern;
1661 edge e;
1663 /* We cannot insert instructions on an abnormal critical edge.
1664 It will be easier to find the culprit if we die now. */
1665 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1666 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1667 abort ();
1669 if (e->insns == NULL_RTX)
1670 start_sequence ();
1671 else
1672 push_to_sequence (e->insns);
1674 emit_insn (pattern);
1676 e->insns = get_insns ();
1677 end_sequence ();
1680 /* Update the CFG for the instructions queued on edge E. */
1682 static void
1683 commit_one_edge_insertion (e)
1684 edge e;
1686 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1687 basic_block bb;
1689 /* Pull the insns off the edge now since the edge might go away. */
1690 insns = e->insns;
1691 e->insns = NULL_RTX;
1693 /* Figure out where to put these things. If the destination has
1694 one predecessor, insert there. Except for the exit block. */
1695 if (e->dest->pred->pred_next == NULL
1696 && e->dest != EXIT_BLOCK_PTR)
1698 bb = e->dest;
1700 /* Get the location correct wrt a code label, and "nice" wrt
1701 a basic block note, and before everything else. */
1702 tmp = bb->head;
1703 if (GET_CODE (tmp) == CODE_LABEL)
1704 tmp = NEXT_INSN (tmp);
1705 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1706 tmp = NEXT_INSN (tmp);
1707 if (tmp == bb->head)
1708 before = tmp;
1709 else
1710 after = PREV_INSN (tmp);
1713 /* If the source has one successor and the edge is not abnormal,
1714 insert there. Except for the entry block. */
1715 else if ((e->flags & EDGE_ABNORMAL) == 0
1716 && e->src->succ->succ_next == NULL
1717 && e->src != ENTRY_BLOCK_PTR)
1719 bb = e->src;
1720 /* It is possible to have a non-simple jump here. Consider a target
1721 where some forms of unconditional jumps clobber a register. This
1722 happens on the fr30 for example.
1724 We know this block has a single successor, so we can just emit
1725 the queued insns before the jump. */
1726 if (GET_CODE (bb->end) == JUMP_INSN)
1728 before = bb->end;
1730 else
1732 /* We'd better be fallthru, or we've lost track of what's what. */
1733 if ((e->flags & EDGE_FALLTHRU) == 0)
1734 abort ();
1736 after = bb->end;
1740 /* Otherwise we must split the edge. */
1741 else
1743 bb = split_edge (e);
1744 after = bb->end;
1747 /* Now that we've found the spot, do the insertion. */
1749 /* Set the new block number for these insns, if structure is allocated. */
1750 if (basic_block_for_insn)
1752 rtx i;
1753 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1754 set_block_for_insn (i, bb);
1757 if (before)
1759 emit_insns_before (insns, before);
1760 if (before == bb->head)
1761 bb->head = insns;
1763 last = prev_nonnote_insn (before);
1765 else
1767 last = emit_insns_after (insns, after);
1768 if (after == bb->end)
1769 bb->end = last;
1772 if (returnjump_p (last))
1774 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1775 This is not currently a problem because this only happens
1776 for the (single) epilogue, which already has a fallthru edge
1777 to EXIT. */
1779 e = bb->succ;
1780 if (e->dest != EXIT_BLOCK_PTR
1781 || e->succ_next != NULL
1782 || (e->flags & EDGE_FALLTHRU) == 0)
1783 abort ();
1784 e->flags &= ~EDGE_FALLTHRU;
1786 emit_barrier_after (last);
1787 bb->end = last;
1789 if (before)
1790 flow_delete_insn (before);
1792 else if (GET_CODE (last) == JUMP_INSN)
1793 abort ();
1796 /* Update the CFG for all queued instructions. */
1798 void
1799 commit_edge_insertions ()
1801 int i;
1802 basic_block bb;
1804 #ifdef ENABLE_CHECKING
1805 verify_flow_info ();
1806 #endif
1808 i = -1;
1809 bb = ENTRY_BLOCK_PTR;
1810 while (1)
1812 edge e, next;
1814 for (e = bb->succ; e; e = next)
1816 next = e->succ_next;
1817 if (e->insns)
1818 commit_one_edge_insertion (e);
1821 if (++i >= n_basic_blocks)
1822 break;
1823 bb = BASIC_BLOCK (i);
1827 /* Delete all unreachable basic blocks. */
1829 static void
1830 delete_unreachable_blocks ()
1832 basic_block *worklist, *tos;
1833 int deleted_handler;
1834 edge e;
1835 int i, n;
1837 n = n_basic_blocks;
1838 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1840 /* Use basic_block->aux as a marker. Clear them all. */
1842 for (i = 0; i < n; ++i)
1843 BASIC_BLOCK (i)->aux = NULL;
1845 /* Add our starting points to the worklist. Almost always there will
1846 be only one. It isn't inconcievable that we might one day directly
1847 support Fortran alternate entry points. */
1849 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1851 *tos++ = e->dest;
1853 /* Mark the block with a handy non-null value. */
1854 e->dest->aux = e;
1857 /* Iterate: find everything reachable from what we've already seen. */
1859 while (tos != worklist)
1861 basic_block b = *--tos;
1863 for (e = b->succ; e; e = e->succ_next)
1864 if (!e->dest->aux)
1866 *tos++ = e->dest;
1867 e->dest->aux = e;
1871 /* Delete all unreachable basic blocks. Count down so that we don't
1872 interfere with the block renumbering that happens in flow_delete_block. */
1874 deleted_handler = 0;
1876 for (i = n - 1; i >= 0; --i)
1878 basic_block b = BASIC_BLOCK (i);
1880 if (b->aux != NULL)
1881 /* This block was found. Tidy up the mark. */
1882 b->aux = NULL;
1883 else
1884 deleted_handler |= flow_delete_block (b);
1887 tidy_fallthru_edges ();
1889 /* If we deleted an exception handler, we may have EH region begin/end
1890 blocks to remove as well. */
1891 if (deleted_handler)
1892 delete_eh_regions ();
1894 free (worklist);
1897 /* Find EH regions for which there is no longer a handler, and delete them. */
1899 static void
1900 delete_eh_regions ()
1902 rtx insn;
1904 update_rethrow_references ();
1906 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1907 if (GET_CODE (insn) == NOTE)
1909 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1910 || (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1912 int num = NOTE_EH_HANDLER (insn);
1913 /* A NULL handler indicates a region is no longer needed,
1914 as long as its rethrow label isn't used. */
1915 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1917 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1918 NOTE_SOURCE_FILE (insn) = 0;
1924 /* Return true if NOTE is not one of the ones that must be kept paired,
1925 so that we may simply delete them. */
1927 static int
1928 can_delete_note_p (note)
1929 rtx note;
1931 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1932 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1935 /* Unlink a chain of insns between START and FINISH, leaving notes
1936 that must be paired. */
1938 void
1939 flow_delete_insn_chain (start, finish)
1940 rtx start, finish;
1942 /* Unchain the insns one by one. It would be quicker to delete all
1943 of these with a single unchaining, rather than one at a time, but
1944 we need to keep the NOTE's. */
1946 rtx next;
1948 while (1)
1950 next = NEXT_INSN (start);
1951 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1953 else if (GET_CODE (start) == CODE_LABEL
1954 && ! can_delete_label_p (start))
1956 const char *name = LABEL_NAME (start);
1957 PUT_CODE (start, NOTE);
1958 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
1959 NOTE_SOURCE_FILE (start) = name;
1961 else
1962 next = flow_delete_insn (start);
1964 if (start == finish)
1965 break;
1966 start = next;
1970 /* Delete the insns in a (non-live) block. We physically delete every
1971 non-deleted-note insn, and update the flow graph appropriately.
1973 Return nonzero if we deleted an exception handler. */
1975 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1976 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1979 flow_delete_block (b)
1980 basic_block b;
1982 int deleted_handler = 0;
1983 rtx insn, end, tmp;
1985 /* If the head of this block is a CODE_LABEL, then it might be the
1986 label for an exception handler which can't be reached.
1988 We need to remove the label from the exception_handler_label list
1989 and remove the associated NOTE_INSN_EH_REGION_BEG and
1990 NOTE_INSN_EH_REGION_END notes. */
1992 insn = b->head;
1994 never_reached_warning (insn);
1996 if (GET_CODE (insn) == CODE_LABEL)
1998 rtx x, *prev = &exception_handler_labels;
2000 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2002 if (XEXP (x, 0) == insn)
2004 /* Found a match, splice this label out of the EH label list. */
2005 *prev = XEXP (x, 1);
2006 XEXP (x, 1) = NULL_RTX;
2007 XEXP (x, 0) = NULL_RTX;
2009 /* Remove the handler from all regions */
2010 remove_handler (insn);
2011 deleted_handler = 1;
2012 break;
2014 prev = &XEXP (x, 1);
2018 /* Include any jump table following the basic block. */
2019 end = b->end;
2020 if (GET_CODE (end) == JUMP_INSN
2021 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2022 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2023 && GET_CODE (tmp) == JUMP_INSN
2024 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2025 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2026 end = tmp;
2028 /* Include any barrier that may follow the basic block. */
2029 tmp = next_nonnote_insn (end);
2030 if (tmp && GET_CODE (tmp) == BARRIER)
2031 end = tmp;
2033 /* Selectively delete the entire chain. */
2034 flow_delete_insn_chain (insn, end);
2036 /* Remove the edges into and out of this block. Note that there may
2037 indeed be edges in, if we are removing an unreachable loop. */
2039 edge e, next, *q;
2041 for (e = b->pred; e; e = next)
2043 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2044 continue;
2045 *q = e->succ_next;
2046 next = e->pred_next;
2047 n_edges--;
2048 free (e);
2050 for (e = b->succ; e; e = next)
2052 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2053 continue;
2054 *q = e->pred_next;
2055 next = e->succ_next;
2056 n_edges--;
2057 free (e);
2060 b->pred = NULL;
2061 b->succ = NULL;
2064 /* Remove the basic block from the array, and compact behind it. */
2065 expunge_block (b);
2067 return deleted_handler;
2070 /* Remove block B from the basic block array and compact behind it. */
2072 static void
2073 expunge_block (b)
2074 basic_block b;
2076 int i, n = n_basic_blocks;
2078 for (i = b->index; i + 1 < n; ++i)
2080 basic_block x = BASIC_BLOCK (i + 1);
2081 BASIC_BLOCK (i) = x;
2082 x->index = i;
2085 basic_block_info->num_elements--;
2086 n_basic_blocks--;
2089 /* Delete INSN by patching it out. Return the next insn. */
2092 flow_delete_insn (insn)
2093 rtx insn;
2095 rtx prev = PREV_INSN (insn);
2096 rtx next = NEXT_INSN (insn);
2097 rtx note;
2099 PREV_INSN (insn) = NULL_RTX;
2100 NEXT_INSN (insn) = NULL_RTX;
2101 INSN_DELETED_P (insn) = 1;
2103 if (prev)
2104 NEXT_INSN (prev) = next;
2105 if (next)
2106 PREV_INSN (next) = prev;
2107 else
2108 set_last_insn (prev);
2110 if (GET_CODE (insn) == CODE_LABEL)
2111 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2113 /* If deleting a jump, decrement the use count of the label. Deleting
2114 the label itself should happen in the normal course of block merging. */
2115 if (GET_CODE (insn) == JUMP_INSN
2116 && JUMP_LABEL (insn)
2117 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2118 LABEL_NUSES (JUMP_LABEL (insn))--;
2120 /* Also if deleting an insn that references a label. */
2121 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2122 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2123 LABEL_NUSES (XEXP (note, 0))--;
2125 return next;
2128 /* True if a given label can be deleted. */
2130 static int
2131 can_delete_label_p (label)
2132 rtx label;
2134 rtx x;
2136 if (LABEL_PRESERVE_P (label))
2137 return 0;
2139 for (x = forced_labels; x; x = XEXP (x, 1))
2140 if (label == XEXP (x, 0))
2141 return 0;
2142 for (x = label_value_list; x; x = XEXP (x, 1))
2143 if (label == XEXP (x, 0))
2144 return 0;
2145 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2146 if (label == XEXP (x, 0))
2147 return 0;
2149 /* User declared labels must be preserved. */
2150 if (LABEL_NAME (label) != 0)
2151 return 0;
2153 return 1;
2156 static int
2157 tail_recursion_label_p (label)
2158 rtx label;
2160 rtx x;
2162 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2163 if (label == XEXP (x, 0))
2164 return 1;
2166 return 0;
2169 /* Blocks A and B are to be merged into a single block A. The insns
2170 are already contiguous, hence `nomove'. */
2172 void
2173 merge_blocks_nomove (a, b)
2174 basic_block a, b;
2176 edge e;
2177 rtx b_head, b_end, a_end;
2178 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2179 int b_empty = 0;
2181 /* If there was a CODE_LABEL beginning B, delete it. */
2182 b_head = b->head;
2183 b_end = b->end;
2184 if (GET_CODE (b_head) == CODE_LABEL)
2186 /* Detect basic blocks with nothing but a label. This can happen
2187 in particular at the end of a function. */
2188 if (b_head == b_end)
2189 b_empty = 1;
2190 del_first = del_last = b_head;
2191 b_head = NEXT_INSN (b_head);
2194 /* Delete the basic block note. */
2195 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2197 if (b_head == b_end)
2198 b_empty = 1;
2199 if (! del_last)
2200 del_first = b_head;
2201 del_last = b_head;
2202 b_head = NEXT_INSN (b_head);
2205 /* If there was a jump out of A, delete it. */
2206 a_end = a->end;
2207 if (GET_CODE (a_end) == JUMP_INSN)
2209 rtx prev;
2211 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2212 if (GET_CODE (prev) != NOTE || prev == a->head)
2213 break;
2215 del_first = a_end;
2217 #ifdef HAVE_cc0
2218 /* If this was a conditional jump, we need to also delete
2219 the insn that set cc0. */
2220 if (prev && sets_cc0_p (prev))
2222 rtx tmp = prev;
2223 prev = prev_nonnote_insn (prev);
2224 if (!prev)
2225 prev = a->head;
2226 del_first = tmp;
2228 #endif
2230 a_end = prev;
2232 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2233 del_first = NEXT_INSN (a_end);
2235 /* Delete everything marked above as well as crap that might be
2236 hanging out between the two blocks. */
2237 flow_delete_insn_chain (del_first, del_last);
2239 /* Normally there should only be one successor of A and that is B, but
2240 partway though the merge of blocks for conditional_execution we'll
2241 be merging a TEST block with THEN and ELSE successors. Free the
2242 whole lot of them and hope the caller knows what they're doing. */
2243 while (a->succ)
2244 remove_edge (a->succ);
2246 /* Adjust the edges out of B for the new owner. */
2247 for (e = b->succ; e; e = e->succ_next)
2248 e->src = a;
2249 a->succ = b->succ;
2251 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2252 b->pred = b->succ = NULL;
2254 /* Reassociate the insns of B with A. */
2255 if (!b_empty)
2257 if (basic_block_for_insn)
2259 BLOCK_FOR_INSN (b_head) = a;
2260 while (b_head != b_end)
2262 b_head = NEXT_INSN (b_head);
2263 BLOCK_FOR_INSN (b_head) = a;
2266 a_end = b_end;
2268 a->end = a_end;
2270 expunge_block (b);
2273 /* Blocks A and B are to be merged into a single block. A has no incoming
2274 fallthru edge, so it can be moved before B without adding or modifying
2275 any jumps (aside from the jump from A to B). */
2277 static int
2278 merge_blocks_move_predecessor_nojumps (a, b)
2279 basic_block a, b;
2281 rtx start, end, barrier;
2282 int index;
2284 start = a->head;
2285 end = a->end;
2287 barrier = next_nonnote_insn (end);
2288 if (GET_CODE (barrier) != BARRIER)
2289 abort ();
2290 flow_delete_insn (barrier);
2292 /* Move block and loop notes out of the chain so that we do not
2293 disturb their order.
2295 ??? A better solution would be to squeeze out all the non-nested notes
2296 and adjust the block trees appropriately. Even better would be to have
2297 a tighter connection between block trees and rtl so that this is not
2298 necessary. */
2299 start = squeeze_notes (start, end);
2301 /* Scramble the insn chain. */
2302 if (end != PREV_INSN (b->head))
2303 reorder_insns (start, end, PREV_INSN (b->head));
2305 if (rtl_dump_file)
2307 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2308 a->index, b->index);
2311 /* Swap the records for the two blocks around. Although we are deleting B,
2312 A is now where B was and we want to compact the BB array from where
2313 A used to be. */
2314 BASIC_BLOCK (a->index) = b;
2315 BASIC_BLOCK (b->index) = a;
2316 index = a->index;
2317 a->index = b->index;
2318 b->index = index;
2320 /* Now blocks A and B are contiguous. Merge them. */
2321 merge_blocks_nomove (a, b);
2323 return 1;
2326 /* Blocks A and B are to be merged into a single block. B has no outgoing
2327 fallthru edge, so it can be moved after A without adding or modifying
2328 any jumps (aside from the jump from A to B). */
2330 static int
2331 merge_blocks_move_successor_nojumps (a, b)
2332 basic_block a, b;
2334 rtx start, end, barrier;
2336 start = b->head;
2337 end = b->end;
2338 barrier = NEXT_INSN (end);
2340 /* Recognize a jump table following block B. */
2341 if (GET_CODE (barrier) == CODE_LABEL
2342 && NEXT_INSN (barrier)
2343 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2344 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2345 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2347 end = NEXT_INSN (barrier);
2348 barrier = NEXT_INSN (end);
2351 /* There had better have been a barrier there. Delete it. */
2352 if (GET_CODE (barrier) != BARRIER)
2353 abort ();
2354 flow_delete_insn (barrier);
2356 /* Move block and loop notes out of the chain so that we do not
2357 disturb their order.
2359 ??? A better solution would be to squeeze out all the non-nested notes
2360 and adjust the block trees appropriately. Even better would be to have
2361 a tighter connection between block trees and rtl so that this is not
2362 necessary. */
2363 start = squeeze_notes (start, end);
2365 /* Scramble the insn chain. */
2366 reorder_insns (start, end, a->end);
2368 /* Now blocks A and B are contiguous. Merge them. */
2369 merge_blocks_nomove (a, b);
2371 if (rtl_dump_file)
2373 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2374 b->index, a->index);
2377 return 1;
2380 /* Attempt to merge basic blocks that are potentially non-adjacent.
2381 Return true iff the attempt succeeded. */
2383 static int
2384 merge_blocks (e, b, c)
2385 edge e;
2386 basic_block b, c;
2388 /* If C has a tail recursion label, do not merge. There is no
2389 edge recorded from the call_placeholder back to this label, as
2390 that would make optimize_sibling_and_tail_recursive_calls more
2391 complex for no gain. */
2392 if (GET_CODE (c->head) == CODE_LABEL
2393 && tail_recursion_label_p (c->head))
2394 return 0;
2396 /* If B has a fallthru edge to C, no need to move anything. */
2397 if (e->flags & EDGE_FALLTHRU)
2399 merge_blocks_nomove (b, c);
2401 if (rtl_dump_file)
2403 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2404 b->index, c->index);
2407 return 1;
2409 else
2411 edge tmp_edge;
2412 basic_block d;
2413 int c_has_outgoing_fallthru;
2414 int b_has_incoming_fallthru;
2416 /* We must make sure to not munge nesting of exception regions,
2417 lexical blocks, and loop notes.
2419 The first is taken care of by requiring that the active eh
2420 region at the end of one block always matches the active eh
2421 region at the beginning of the next block.
2423 The later two are taken care of by squeezing out all the notes. */
2425 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2426 executed and we may want to treat blocks which have two out
2427 edges, one normal, one abnormal as only having one edge for
2428 block merging purposes. */
2430 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2431 if (tmp_edge->flags & EDGE_FALLTHRU)
2432 break;
2433 c_has_outgoing_fallthru = (tmp_edge != NULL);
2435 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2436 if (tmp_edge->flags & EDGE_FALLTHRU)
2437 break;
2438 b_has_incoming_fallthru = (tmp_edge != NULL);
2440 /* If B does not have an incoming fallthru, and the exception regions
2441 match, then it can be moved immediately before C without introducing
2442 or modifying jumps.
2444 C can not be the first block, so we do not have to worry about
2445 accessing a non-existent block. */
2446 d = BASIC_BLOCK (c->index - 1);
2447 if (! b_has_incoming_fallthru
2448 && d->eh_end == b->eh_beg
2449 && b->eh_end == c->eh_beg)
2450 return merge_blocks_move_predecessor_nojumps (b, c);
2452 /* Otherwise, we're going to try to move C after B. Make sure the
2453 exception regions match.
2455 If B is the last basic block, then we must not try to access the
2456 block structure for block B + 1. Luckily in that case we do not
2457 need to worry about matching exception regions. */
2458 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2459 if (b->eh_end == c->eh_beg
2460 && (d == NULL || c->eh_end == d->eh_beg))
2462 /* If C does not have an outgoing fallthru, then it can be moved
2463 immediately after B without introducing or modifying jumps. */
2464 if (! c_has_outgoing_fallthru)
2465 return merge_blocks_move_successor_nojumps (b, c);
2467 /* Otherwise, we'll need to insert an extra jump, and possibly
2468 a new block to contain it. */
2469 /* ??? Not implemented yet. */
2472 return 0;
2476 /* Top level driver for merge_blocks. */
2478 static void
2479 try_merge_blocks ()
2481 int i;
2483 /* Attempt to merge blocks as made possible by edge removal. If a block
2484 has only one successor, and the successor has only one predecessor,
2485 they may be combined. */
2487 for (i = 0; i < n_basic_blocks;)
2489 basic_block c, b = BASIC_BLOCK (i);
2490 edge s;
2492 /* A loop because chains of blocks might be combineable. */
2493 while ((s = b->succ) != NULL
2494 && s->succ_next == NULL
2495 && (s->flags & EDGE_EH) == 0
2496 && (c = s->dest) != EXIT_BLOCK_PTR
2497 && c->pred->pred_next == NULL
2498 /* If the jump insn has side effects, we can't kill the edge. */
2499 && (GET_CODE (b->end) != JUMP_INSN
2500 || onlyjump_p (b->end))
2501 && merge_blocks (s, b, c))
2502 continue;
2504 /* Don't get confused by the index shift caused by deleting blocks. */
2505 i = b->index + 1;
2509 /* The given edge should potentially be a fallthru edge. If that is in
2510 fact true, delete the jump and barriers that are in the way. */
2512 void
2513 tidy_fallthru_edge (e, b, c)
2514 edge e;
2515 basic_block b, c;
2517 rtx q;
2519 /* ??? In a late-running flow pass, other folks may have deleted basic
2520 blocks by nopping out blocks, leaving multiple BARRIERs between here
2521 and the target label. They ought to be chastized and fixed.
2523 We can also wind up with a sequence of undeletable labels between
2524 one block and the next.
2526 So search through a sequence of barriers, labels, and notes for
2527 the head of block C and assert that we really do fall through. */
2529 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2530 return;
2532 /* Remove what will soon cease being the jump insn from the source block.
2533 If block B consisted only of this single jump, turn it into a deleted
2534 note. */
2535 q = b->end;
2536 if (GET_CODE (q) == JUMP_INSN
2537 && onlyjump_p (q)
2538 && (any_uncondjump_p (q)
2539 || (b->succ == e && e->succ_next == NULL)))
2541 #ifdef HAVE_cc0
2542 /* If this was a conditional jump, we need to also delete
2543 the insn that set cc0. */
2544 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2545 q = PREV_INSN (q);
2546 #endif
2548 if (b->head == q)
2550 PUT_CODE (q, NOTE);
2551 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2552 NOTE_SOURCE_FILE (q) = 0;
2554 else
2555 b->end = q = PREV_INSN (q);
2558 /* Selectively unlink the sequence. */
2559 if (q != PREV_INSN (c->head))
2560 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2562 e->flags |= EDGE_FALLTHRU;
2565 /* Fix up edges that now fall through, or rather should now fall through
2566 but previously required a jump around now deleted blocks. Simplify
2567 the search by only examining blocks numerically adjacent, since this
2568 is how find_basic_blocks created them. */
2570 static void
2571 tidy_fallthru_edges ()
2573 int i;
2575 for (i = 1; i < n_basic_blocks; ++i)
2577 basic_block b = BASIC_BLOCK (i - 1);
2578 basic_block c = BASIC_BLOCK (i);
2579 edge s;
2581 /* We care about simple conditional or unconditional jumps with
2582 a single successor.
2584 If we had a conditional branch to the next instruction when
2585 find_basic_blocks was called, then there will only be one
2586 out edge for the block which ended with the conditional
2587 branch (since we do not create duplicate edges).
2589 Furthermore, the edge will be marked as a fallthru because we
2590 merge the flags for the duplicate edges. So we do not want to
2591 check that the edge is not a FALLTHRU edge. */
2592 if ((s = b->succ) != NULL
2593 && s->succ_next == NULL
2594 && s->dest == c
2595 /* If the jump insn has side effects, we can't tidy the edge. */
2596 && (GET_CODE (b->end) != JUMP_INSN
2597 || onlyjump_p (b->end)))
2598 tidy_fallthru_edge (s, b, c);
2602 /* Perform data flow analysis.
2603 F is the first insn of the function; FLAGS is a set of PROP_* flags
2604 to be used in accumulating flow info. */
2606 void
2607 life_analysis (f, file, flags)
2608 rtx f;
2609 FILE *file;
2610 int flags;
2612 #ifdef ELIMINABLE_REGS
2613 register int i;
2614 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2615 #endif
2617 /* Record which registers will be eliminated. We use this in
2618 mark_used_regs. */
2620 CLEAR_HARD_REG_SET (elim_reg_set);
2622 #ifdef ELIMINABLE_REGS
2623 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2624 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2625 #else
2626 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2627 #endif
2629 if (! optimize)
2630 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
2632 /* The post-reload life analysis have (on a global basis) the same
2633 registers live as was computed by reload itself. elimination
2634 Otherwise offsets and such may be incorrect.
2636 Reload will make some registers as live even though they do not
2637 appear in the rtl.
2639 We don't want to create new auto-incs after reload, since they
2640 are unlikely to be useful and can cause problems with shared
2641 stack slots. */
2642 if (reload_completed)
2643 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2645 /* We want alias analysis information for local dead store elimination. */
2646 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2647 init_alias_analysis ();
2649 /* Always remove no-op moves. Do this before other processing so
2650 that we don't have to keep re-scanning them. */
2651 delete_noop_moves (f);
2653 /* Some targets can emit simpler epilogues if they know that sp was
2654 not ever modified during the function. After reload, of course,
2655 we've already emitted the epilogue so there's no sense searching. */
2656 if (! reload_completed)
2657 notice_stack_pointer_modification (f);
2659 /* Allocate and zero out data structures that will record the
2660 data from lifetime analysis. */
2661 allocate_reg_life_data ();
2662 allocate_bb_life_data ();
2664 /* Find the set of registers live on function exit. */
2665 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2667 /* "Update" life info from zero. It'd be nice to begin the
2668 relaxation with just the exit and noreturn blocks, but that set
2669 is not immediately handy. */
2671 if (flags & PROP_REG_INFO)
2672 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2673 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2675 /* Clean up. */
2676 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2677 end_alias_analysis ();
2679 if (file)
2680 dump_flow_info (file);
2682 free_basic_block_vars (1);
2685 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2686 Search for REGNO. If found, abort if it is not wider than word_mode. */
2688 static int
2689 verify_wide_reg_1 (px, pregno)
2690 rtx *px;
2691 void *pregno;
2693 rtx x = *px;
2694 unsigned int regno = *(int *) pregno;
2696 if (GET_CODE (x) == REG && REGNO (x) == regno)
2698 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2699 abort ();
2700 return 1;
2702 return 0;
2705 /* A subroutine of verify_local_live_at_start. Search through insns
2706 between HEAD and END looking for register REGNO. */
2708 static void
2709 verify_wide_reg (regno, head, end)
2710 int regno;
2711 rtx head, end;
2713 while (1)
2715 if (INSN_P (head)
2716 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2717 return;
2718 if (head == end)
2719 break;
2720 head = NEXT_INSN (head);
2723 /* We didn't find the register at all. Something's way screwy. */
2724 abort ();
2727 /* A subroutine of update_life_info. Verify that there are no untoward
2728 changes in live_at_start during a local update. */
2730 static void
2731 verify_local_live_at_start (new_live_at_start, bb)
2732 regset new_live_at_start;
2733 basic_block bb;
2735 if (reload_completed)
2737 /* After reload, there are no pseudos, nor subregs of multi-word
2738 registers. The regsets should exactly match. */
2739 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2740 abort ();
2742 else
2744 int i;
2746 /* Find the set of changed registers. */
2747 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2749 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2751 /* No registers should die. */
2752 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2753 abort ();
2754 /* Verify that the now-live register is wider than word_mode. */
2755 verify_wide_reg (i, bb->head, bb->end);
2760 /* Updates life information starting with the basic blocks set in BLOCKS.
2761 If BLOCKS is null, consider it to be the universal set.
2763 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2764 we are only expecting local modifications to basic blocks. If we find
2765 extra registers live at the beginning of a block, then we either killed
2766 useful data, or we have a broken split that wants data not provided.
2767 If we find registers removed from live_at_start, that means we have
2768 a broken peephole that is killing a register it shouldn't.
2770 ??? This is not true in one situation -- when a pre-reload splitter
2771 generates subregs of a multi-word pseudo, current life analysis will
2772 lose the kill. So we _can_ have a pseudo go live. How irritating.
2774 Including PROP_REG_INFO does not properly refresh regs_ever_live
2775 unless the caller resets it to zero. */
2777 void
2778 update_life_info (blocks, extent, prop_flags)
2779 sbitmap blocks;
2780 enum update_life_extent extent;
2781 int prop_flags;
2783 regset tmp;
2784 regset_head tmp_head;
2785 int i;
2787 tmp = INITIALIZE_REG_SET (tmp_head);
2789 /* For a global update, we go through the relaxation process again. */
2790 if (extent != UPDATE_LIFE_LOCAL)
2792 calculate_global_regs_live (blocks, blocks,
2793 prop_flags & PROP_SCAN_DEAD_CODE);
2795 /* If asked, remove notes from the blocks we'll update. */
2796 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2797 count_or_remove_death_notes (blocks, 1);
2800 if (blocks)
2802 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2804 basic_block bb = BASIC_BLOCK (i);
2806 COPY_REG_SET (tmp, bb->global_live_at_end);
2807 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2809 if (extent == UPDATE_LIFE_LOCAL)
2810 verify_local_live_at_start (tmp, bb);
2813 else
2815 for (i = n_basic_blocks - 1; i >= 0; --i)
2817 basic_block bb = BASIC_BLOCK (i);
2819 COPY_REG_SET (tmp, bb->global_live_at_end);
2820 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2822 if (extent == UPDATE_LIFE_LOCAL)
2823 verify_local_live_at_start (tmp, bb);
2827 FREE_REG_SET (tmp);
2829 if (prop_flags & PROP_REG_INFO)
2831 /* The only pseudos that are live at the beginning of the function
2832 are those that were not set anywhere in the function. local-alloc
2833 doesn't know how to handle these correctly, so mark them as not
2834 local to any one basic block. */
2835 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2836 FIRST_PSEUDO_REGISTER, i,
2837 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2839 /* We have a problem with any pseudoreg that lives across the setjmp.
2840 ANSI says that if a user variable does not change in value between
2841 the setjmp and the longjmp, then the longjmp preserves it. This
2842 includes longjmp from a place where the pseudo appears dead.
2843 (In principle, the value still exists if it is in scope.)
2844 If the pseudo goes in a hard reg, some other value may occupy
2845 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2846 Conclusion: such a pseudo must not go in a hard reg. */
2847 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2848 FIRST_PSEUDO_REGISTER, i,
2850 if (regno_reg_rtx[i] != 0)
2852 REG_LIVE_LENGTH (i) = -1;
2853 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2859 /* Free the variables allocated by find_basic_blocks.
2861 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2863 void
2864 free_basic_block_vars (keep_head_end_p)
2865 int keep_head_end_p;
2867 if (basic_block_for_insn)
2869 VARRAY_FREE (basic_block_for_insn);
2870 basic_block_for_insn = NULL;
2873 if (! keep_head_end_p)
2875 clear_edges ();
2876 VARRAY_FREE (basic_block_info);
2877 n_basic_blocks = 0;
2879 ENTRY_BLOCK_PTR->aux = NULL;
2880 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2881 EXIT_BLOCK_PTR->aux = NULL;
2882 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2886 /* Return nonzero if the destination of SET equals the source. */
2888 static int
2889 set_noop_p (set)
2890 rtx set;
2892 rtx src = SET_SRC (set);
2893 rtx dst = SET_DEST (set);
2895 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2897 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2898 return 0;
2899 src = SUBREG_REG (src);
2900 dst = SUBREG_REG (dst);
2903 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2904 && REGNO (src) == REGNO (dst));
2907 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2908 value to itself. */
2910 static int
2911 noop_move_p (insn)
2912 rtx insn;
2914 rtx pat = PATTERN (insn);
2916 /* Insns carrying these notes are useful later on. */
2917 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2918 return 0;
2920 if (GET_CODE (pat) == SET && set_noop_p (pat))
2921 return 1;
2923 if (GET_CODE (pat) == PARALLEL)
2925 int i;
2926 /* If nothing but SETs of registers to themselves,
2927 this insn can also be deleted. */
2928 for (i = 0; i < XVECLEN (pat, 0); i++)
2930 rtx tem = XVECEXP (pat, 0, i);
2932 if (GET_CODE (tem) == USE
2933 || GET_CODE (tem) == CLOBBER)
2934 continue;
2936 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2937 return 0;
2940 return 1;
2942 return 0;
2945 /* Delete any insns that copy a register to itself. */
2947 static void
2948 delete_noop_moves (f)
2949 rtx f;
2951 rtx insn;
2952 for (insn = f; insn; insn = NEXT_INSN (insn))
2954 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2956 PUT_CODE (insn, NOTE);
2957 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2958 NOTE_SOURCE_FILE (insn) = 0;
2963 /* Determine if the stack pointer is constant over the life of the function.
2964 Only useful before prologues have been emitted. */
2966 static void
2967 notice_stack_pointer_modification_1 (x, pat, data)
2968 rtx x;
2969 rtx pat ATTRIBUTE_UNUSED;
2970 void *data ATTRIBUTE_UNUSED;
2972 if (x == stack_pointer_rtx
2973 /* The stack pointer is only modified indirectly as the result
2974 of a push until later in flow. See the comments in rtl.texi
2975 regarding Embedded Side-Effects on Addresses. */
2976 || (GET_CODE (x) == MEM
2977 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2978 || GET_CODE (XEXP (x, 0)) == PRE_INC
2979 || GET_CODE (XEXP (x, 0)) == POST_DEC
2980 || GET_CODE (XEXP (x, 0)) == POST_INC)
2981 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2982 current_function_sp_is_unchanging = 0;
2985 static void
2986 notice_stack_pointer_modification (f)
2987 rtx f;
2989 rtx insn;
2991 /* Assume that the stack pointer is unchanging if alloca hasn't
2992 been used. */
2993 current_function_sp_is_unchanging = !current_function_calls_alloca;
2994 if (! current_function_sp_is_unchanging)
2995 return;
2997 for (insn = f; insn; insn = NEXT_INSN (insn))
2999 if (INSN_P (insn))
3001 /* Check if insn modifies the stack pointer. */
3002 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3003 NULL);
3004 if (! current_function_sp_is_unchanging)
3005 return;
3010 /* Mark a register in SET. Hard registers in large modes get all
3011 of their component registers set as well. */
3013 static void
3014 mark_reg (reg, xset)
3015 rtx reg;
3016 void *xset;
3018 regset set = (regset) xset;
3019 int regno = REGNO (reg);
3021 if (GET_MODE (reg) == BLKmode)
3022 abort ();
3024 SET_REGNO_REG_SET (set, regno);
3025 if (regno < FIRST_PSEUDO_REGISTER)
3027 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3028 while (--n > 0)
3029 SET_REGNO_REG_SET (set, regno + n);
3033 /* Mark those regs which are needed at the end of the function as live
3034 at the end of the last basic block. */
3036 static void
3037 mark_regs_live_at_end (set)
3038 regset set;
3040 int i;
3042 /* If exiting needs the right stack value, consider the stack pointer
3043 live at the end of the function. */
3044 if ((HAVE_epilogue && reload_completed)
3045 || ! EXIT_IGNORE_STACK
3046 || (! FRAME_POINTER_REQUIRED
3047 && ! current_function_calls_alloca
3048 && flag_omit_frame_pointer)
3049 || current_function_sp_is_unchanging)
3051 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3054 /* Mark the frame pointer if needed at the end of the function. If
3055 we end up eliminating it, it will be removed from the live list
3056 of each basic block by reload. */
3058 if (! reload_completed || frame_pointer_needed)
3060 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3061 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3062 /* If they are different, also mark the hard frame pointer as live. */
3063 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3064 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3065 #endif
3068 #ifdef PIC_OFFSET_TABLE_REGNUM
3069 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3070 /* Many architectures have a GP register even without flag_pic.
3071 Assume the pic register is not in use, or will be handled by
3072 other means, if it is not fixed. */
3073 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3074 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3075 #endif
3076 #endif
3078 /* Mark all global registers, and all registers used by the epilogue
3079 as being live at the end of the function since they may be
3080 referenced by our caller. */
3081 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3082 if (global_regs[i] || EPILOGUE_USES (i))
3083 SET_REGNO_REG_SET (set, i);
3085 /* Mark all call-saved registers that we actaully used. */
3086 if (HAVE_epilogue && reload_completed)
3088 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3089 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3090 SET_REGNO_REG_SET (set, i);
3093 /* Mark function return value. */
3094 diddle_return_value (mark_reg, set);
3097 /* Callback function for for_each_successor_phi. DATA is a regset.
3098 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3099 INSN, in the regset. */
3101 static int
3102 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3103 rtx insn ATTRIBUTE_UNUSED;
3104 int dest_regno ATTRIBUTE_UNUSED;
3105 int src_regno;
3106 void *data;
3108 regset live = (regset) data;
3109 SET_REGNO_REG_SET (live, src_regno);
3110 return 0;
3113 /* Propagate global life info around the graph of basic blocks. Begin
3114 considering blocks with their corresponding bit set in BLOCKS_IN.
3115 If BLOCKS_IN is null, consider it the universal set.
3117 BLOCKS_OUT is set for every block that was changed. */
3119 static void
3120 calculate_global_regs_live (blocks_in, blocks_out, flags)
3121 sbitmap blocks_in, blocks_out;
3122 int flags;
3124 basic_block *queue, *qhead, *qtail, *qend;
3125 regset tmp, new_live_at_end;
3126 regset_head tmp_head;
3127 regset_head new_live_at_end_head;
3128 int i;
3130 tmp = INITIALIZE_REG_SET (tmp_head);
3131 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3133 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3134 because the `head == tail' style test for an empty queue doesn't
3135 work with a full queue. */
3136 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3137 qtail = queue;
3138 qhead = qend = queue + n_basic_blocks + 2;
3140 /* Clear out the garbage that might be hanging out in bb->aux. */
3141 for (i = n_basic_blocks - 1; i >= 0; --i)
3142 BASIC_BLOCK (i)->aux = NULL;
3144 /* Queue the blocks set in the initial mask. Do this in reverse block
3145 number order so that we are more likely for the first round to do
3146 useful work. We use AUX non-null to flag that the block is queued. */
3147 if (blocks_in)
3149 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3151 basic_block bb = BASIC_BLOCK (i);
3152 *--qhead = bb;
3153 bb->aux = bb;
3156 else
3158 for (i = 0; i < n_basic_blocks; ++i)
3160 basic_block bb = BASIC_BLOCK (i);
3161 *--qhead = bb;
3162 bb->aux = bb;
3166 if (blocks_out)
3167 sbitmap_zero (blocks_out);
3169 while (qhead != qtail)
3171 int rescan, changed;
3172 basic_block bb;
3173 edge e;
3175 bb = *qhead++;
3176 if (qhead == qend)
3177 qhead = queue;
3178 bb->aux = NULL;
3180 /* Begin by propogating live_at_start from the successor blocks. */
3181 CLEAR_REG_SET (new_live_at_end);
3182 for (e = bb->succ; e; e = e->succ_next)
3184 basic_block sb = e->dest;
3185 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3188 /* Force the stack pointer to be live -- which might not already be
3189 the case for blocks within infinite loops. */
3190 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3192 /* Regs used in phi nodes are not included in
3193 global_live_at_start, since they are live only along a
3194 particular edge. Set those regs that are live because of a
3195 phi node alternative corresponding to this particular block. */
3196 if (in_ssa_form)
3197 for_each_successor_phi (bb, &set_phi_alternative_reg,
3198 new_live_at_end);
3200 if (bb == ENTRY_BLOCK_PTR)
3202 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3203 continue;
3206 /* On our first pass through this block, we'll go ahead and continue.
3207 Recognize first pass by local_set NULL. On subsequent passes, we
3208 get to skip out early if live_at_end wouldn't have changed. */
3210 if (bb->local_set == NULL)
3212 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3213 rescan = 1;
3215 else
3217 /* If any bits were removed from live_at_end, we'll have to
3218 rescan the block. This wouldn't be necessary if we had
3219 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3220 local_live is really dependent on live_at_end. */
3221 CLEAR_REG_SET (tmp);
3222 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3223 new_live_at_end, BITMAP_AND_COMPL);
3225 if (! rescan)
3227 /* Find the set of changed bits. Take this opportunity
3228 to notice that this set is empty and early out. */
3229 CLEAR_REG_SET (tmp);
3230 changed = bitmap_operation (tmp, bb->global_live_at_end,
3231 new_live_at_end, BITMAP_XOR);
3232 if (! changed)
3233 continue;
3235 /* If any of the changed bits overlap with local_set,
3236 we'll have to rescan the block. Detect overlap by
3237 the AND with ~local_set turning off bits. */
3238 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3239 BITMAP_AND_COMPL);
3243 /* Let our caller know that BB changed enough to require its
3244 death notes updated. */
3245 if (blocks_out)
3246 SET_BIT (blocks_out, bb->index);
3248 if (! rescan)
3250 /* Add to live_at_start the set of all registers in
3251 new_live_at_end that aren't in the old live_at_end. */
3253 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3254 BITMAP_AND_COMPL);
3255 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3257 changed = bitmap_operation (bb->global_live_at_start,
3258 bb->global_live_at_start,
3259 tmp, BITMAP_IOR);
3260 if (! changed)
3261 continue;
3263 else
3265 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3267 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3268 into live_at_start. */
3269 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3271 /* If live_at start didn't change, no need to go farther. */
3272 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3273 continue;
3275 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3278 /* Queue all predecessors of BB so that we may re-examine
3279 their live_at_end. */
3280 for (e = bb->pred; e; e = e->pred_next)
3282 basic_block pb = e->src;
3283 if (pb->aux == NULL)
3285 *qtail++ = pb;
3286 if (qtail == qend)
3287 qtail = queue;
3288 pb->aux = pb;
3293 FREE_REG_SET (tmp);
3294 FREE_REG_SET (new_live_at_end);
3296 if (blocks_out)
3298 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3300 basic_block bb = BASIC_BLOCK (i);
3301 FREE_REG_SET (bb->local_set);
3304 else
3306 for (i = n_basic_blocks - 1; i >= 0; --i)
3308 basic_block bb = BASIC_BLOCK (i);
3309 FREE_REG_SET (bb->local_set);
3313 free (queue);
3316 /* Subroutines of life analysis. */
3318 /* Allocate the permanent data structures that represent the results
3319 of life analysis. Not static since used also for stupid life analysis. */
3321 void
3322 allocate_bb_life_data ()
3324 register int i;
3326 for (i = 0; i < n_basic_blocks; i++)
3328 basic_block bb = BASIC_BLOCK (i);
3330 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3331 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3334 ENTRY_BLOCK_PTR->global_live_at_end
3335 = OBSTACK_ALLOC_REG_SET (function_obstack);
3336 EXIT_BLOCK_PTR->global_live_at_start
3337 = OBSTACK_ALLOC_REG_SET (function_obstack);
3339 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3342 void
3343 allocate_reg_life_data ()
3345 int i;
3347 max_regno = max_reg_num ();
3349 /* Recalculate the register space, in case it has grown. Old style
3350 vector oriented regsets would set regset_{size,bytes} here also. */
3351 allocate_reg_info (max_regno, FALSE, FALSE);
3353 /* Reset all the data we'll collect in propagate_block and its
3354 subroutines. */
3355 for (i = 0; i < max_regno; i++)
3357 REG_N_SETS (i) = 0;
3358 REG_N_REFS (i) = 0;
3359 REG_N_DEATHS (i) = 0;
3360 REG_N_CALLS_CROSSED (i) = 0;
3361 REG_LIVE_LENGTH (i) = 0;
3362 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3366 /* Delete dead instructions for propagate_block. */
3368 static void
3369 propagate_block_delete_insn (bb, insn)
3370 basic_block bb;
3371 rtx insn;
3373 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3375 /* If the insn referred to a label, and that label was attached to
3376 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3377 pretty much mandatory to delete it, because the ADDR_VEC may be
3378 referencing labels that no longer exist. */
3380 if (inote)
3382 rtx label = XEXP (inote, 0);
3383 rtx next;
3385 if (LABEL_NUSES (label) == 1
3386 && (next = next_nonnote_insn (label)) != NULL
3387 && GET_CODE (next) == JUMP_INSN
3388 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3389 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3391 rtx pat = PATTERN (next);
3392 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3393 int len = XVECLEN (pat, diff_vec_p);
3394 int i;
3396 for (i = 0; i < len; i++)
3397 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3399 flow_delete_insn (next);
3403 if (bb->end == insn)
3404 bb->end = PREV_INSN (insn);
3405 flow_delete_insn (insn);
3408 /* Delete dead libcalls for propagate_block. Return the insn
3409 before the libcall. */
3411 static rtx
3412 propagate_block_delete_libcall (bb, insn, note)
3413 basic_block bb;
3414 rtx insn, note;
3416 rtx first = XEXP (note, 0);
3417 rtx before = PREV_INSN (first);
3419 if (insn == bb->end)
3420 bb->end = before;
3422 flow_delete_insn_chain (first, insn);
3423 return before;
3426 /* Update the life-status of regs for one insn. Return the previous insn. */
3429 propagate_one_insn (pbi, insn)
3430 struct propagate_block_info *pbi;
3431 rtx insn;
3433 rtx prev = PREV_INSN (insn);
3434 int flags = pbi->flags;
3435 int insn_is_dead = 0;
3436 int libcall_is_dead = 0;
3437 rtx note;
3438 int i;
3440 if (! INSN_P (insn))
3441 return prev;
3443 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3444 if (flags & PROP_SCAN_DEAD_CODE)
3446 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3447 REG_NOTES (insn));
3448 libcall_is_dead = (insn_is_dead && note != 0
3449 && libcall_dead_p (pbi, note, insn));
3452 /* We almost certainly don't want to delete prologue or epilogue
3453 instructions. Warn about probable compiler losage. */
3454 if (insn_is_dead
3455 && reload_completed
3456 && (((HAVE_epilogue || HAVE_prologue)
3457 && prologue_epilogue_contains (insn))
3458 || (HAVE_sibcall_epilogue
3459 && sibcall_epilogue_contains (insn)))
3460 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3462 if (flags & PROP_KILL_DEAD_CODE)
3464 warning ("ICE: would have deleted prologue/epilogue insn");
3465 if (!inhibit_warnings)
3466 debug_rtx (insn);
3468 libcall_is_dead = insn_is_dead = 0;
3471 /* If an instruction consists of just dead store(s) on final pass,
3472 delete it. */
3473 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3475 /* Record sets. Do this even for dead instructions, since they
3476 would have killed the values if they hadn't been deleted. */
3477 mark_set_regs (pbi, PATTERN (insn), insn);
3479 /* CC0 is now known to be dead. Either this insn used it,
3480 in which case it doesn't anymore, or clobbered it,
3481 so the next insn can't use it. */
3482 pbi->cc0_live = 0;
3484 if (libcall_is_dead)
3486 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3487 insn = NEXT_INSN (prev);
3489 else
3490 propagate_block_delete_insn (pbi->bb, insn);
3492 return prev;
3495 /* See if this is an increment or decrement that can be merged into
3496 a following memory address. */
3497 #ifdef AUTO_INC_DEC
3499 register rtx x = single_set (insn);
3501 /* Does this instruction increment or decrement a register? */
3502 if ((flags & PROP_AUTOINC)
3503 && x != 0
3504 && GET_CODE (SET_DEST (x)) == REG
3505 && (GET_CODE (SET_SRC (x)) == PLUS
3506 || GET_CODE (SET_SRC (x)) == MINUS)
3507 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3508 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3509 /* Ok, look for a following memory ref we can combine with.
3510 If one is found, change the memory ref to a PRE_INC
3511 or PRE_DEC, cancel this insn, and return 1.
3512 Return 0 if nothing has been done. */
3513 && try_pre_increment_1 (pbi, insn))
3514 return prev;
3516 #endif /* AUTO_INC_DEC */
3518 CLEAR_REG_SET (pbi->new_set);
3520 /* If this is not the final pass, and this insn is copying the value of
3521 a library call and it's dead, don't scan the insns that perform the
3522 library call, so that the call's arguments are not marked live. */
3523 if (libcall_is_dead)
3525 /* Record the death of the dest reg. */
3526 mark_set_regs (pbi, PATTERN (insn), insn);
3528 insn = XEXP (note, 0);
3529 return PREV_INSN (insn);
3531 else if (GET_CODE (PATTERN (insn)) == SET
3532 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3533 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3534 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3535 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3536 /* We have an insn to pop a constant amount off the stack.
3537 (Such insns use PLUS regardless of the direction of the stack,
3538 and any insn to adjust the stack by a constant is always a pop.)
3539 These insns, if not dead stores, have no effect on life. */
3541 else
3543 /* Any regs live at the time of a call instruction must not go
3544 in a register clobbered by calls. Find all regs now live and
3545 record this for them. */
3547 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3548 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3549 { REG_N_CALLS_CROSSED (i)++; });
3551 /* Record sets. Do this even for dead instructions, since they
3552 would have killed the values if they hadn't been deleted. */
3553 mark_set_regs (pbi, PATTERN (insn), insn);
3555 if (GET_CODE (insn) == CALL_INSN)
3557 register int i;
3558 rtx note, cond;
3560 cond = NULL_RTX;
3561 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3562 cond = COND_EXEC_TEST (PATTERN (insn));
3564 /* Non-constant calls clobber memory. */
3565 if (! CONST_CALL_P (insn))
3566 free_EXPR_LIST_list (&pbi->mem_set_list);
3568 /* There may be extra registers to be clobbered. */
3569 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3570 note;
3571 note = XEXP (note, 1))
3572 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3573 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3574 cond, insn, pbi->flags);
3576 /* Calls change all call-used and global registers. */
3577 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3578 if (call_used_regs[i] && ! global_regs[i]
3579 && ! fixed_regs[i])
3581 /* We do not want REG_UNUSED notes for these registers. */
3582 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3583 cond, insn,
3584 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3588 /* If an insn doesn't use CC0, it becomes dead since we assume
3589 that every insn clobbers it. So show it dead here;
3590 mark_used_regs will set it live if it is referenced. */
3591 pbi->cc0_live = 0;
3593 /* Record uses. */
3594 if (! insn_is_dead)
3595 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3597 /* Sometimes we may have inserted something before INSN (such as a move)
3598 when we make an auto-inc. So ensure we will scan those insns. */
3599 #ifdef AUTO_INC_DEC
3600 prev = PREV_INSN (insn);
3601 #endif
3603 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3605 register int i;
3606 rtx note, cond;
3608 cond = NULL_RTX;
3609 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3610 cond = COND_EXEC_TEST (PATTERN (insn));
3612 /* Calls use their arguments. */
3613 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3614 note;
3615 note = XEXP (note, 1))
3616 if (GET_CODE (XEXP (note, 0)) == USE)
3617 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3618 cond, insn);
3620 /* The stack ptr is used (honorarily) by a CALL insn. */
3621 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3623 /* Calls may also reference any of the global registers,
3624 so they are made live. */
3625 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3626 if (global_regs[i])
3627 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3628 cond, insn);
3632 /* On final pass, update counts of how many insns in which each reg
3633 is live. */
3634 if (flags & PROP_REG_INFO)
3635 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3636 { REG_LIVE_LENGTH (i)++; });
3638 return prev;
3641 /* Initialize a propagate_block_info struct for public consumption.
3642 Note that the structure itself is opaque to this file, but that
3643 the user can use the regsets provided here. */
3645 struct propagate_block_info *
3646 init_propagate_block_info (bb, live, local_set, flags)
3647 basic_block bb;
3648 regset live;
3649 regset local_set;
3650 int flags;
3652 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
3654 pbi->bb = bb;
3655 pbi->reg_live = live;
3656 pbi->mem_set_list = NULL_RTX;
3657 pbi->local_set = local_set;
3658 pbi->cc0_live = 0;
3659 pbi->flags = flags;
3661 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3662 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3663 else
3664 pbi->reg_next_use = NULL;
3666 pbi->new_set = BITMAP_XMALLOC ();
3668 #ifdef HAVE_conditional_execution
3669 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3670 free_reg_cond_life_info);
3671 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3673 /* If this block ends in a conditional branch, for each register live
3674 from one side of the branch and not the other, record the register
3675 as conditionally dead. */
3676 if ((flags & (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE))
3677 && GET_CODE (bb->end) == JUMP_INSN
3678 && any_condjump_p (bb->end))
3680 regset_head diff_head;
3681 regset diff = INITIALIZE_REG_SET (diff_head);
3682 basic_block bb_true, bb_false;
3683 rtx cond_true, cond_false, set_src;
3684 int i;
3686 /* Identify the successor blocks. */
3687 bb_true = bb->succ->dest;
3688 if (bb->succ->succ_next != NULL)
3690 bb_false = bb->succ->succ_next->dest;
3692 if (bb->succ->flags & EDGE_FALLTHRU)
3694 basic_block t = bb_false;
3695 bb_false = bb_true;
3696 bb_true = t;
3698 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3699 abort ();
3701 else
3703 /* This can happen with a conditional jump to the next insn. */
3704 if (JUMP_LABEL (bb->end) != bb_true->head)
3705 abort ();
3707 /* Simplest way to do nothing. */
3708 bb_false = bb_true;
3711 /* Extract the condition from the branch. */
3712 set_src = SET_SRC (pc_set (bb->end));
3713 cond_true = XEXP (set_src, 0);
3714 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3715 GET_MODE (cond_true), XEXP (cond_true, 0),
3716 XEXP (cond_true, 1));
3717 if (GET_CODE (XEXP (set_src, 1)) == PC)
3719 rtx t = cond_false;
3720 cond_false = cond_true;
3721 cond_true = t;
3724 /* Compute which register lead different lives in the successors. */
3725 if (bitmap_operation (diff, bb_true->global_live_at_start,
3726 bb_false->global_live_at_start, BITMAP_XOR))
3728 if (GET_CODE (XEXP (cond_true, 0)) != REG)
3729 abort ();
3730 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond_true, 0)));
3732 /* For each such register, mark it conditionally dead. */
3733 EXECUTE_IF_SET_IN_REG_SET
3734 (diff, 0, i,
3736 struct reg_cond_life_info *rcli;
3737 rtx cond;
3739 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3741 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3742 cond = cond_false;
3743 else
3744 cond = cond_true;
3745 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3747 splay_tree_insert (pbi->reg_cond_dead, i,
3748 (splay_tree_value) rcli);
3752 FREE_REG_SET (diff);
3754 #endif
3756 /* If this block has no successors, any stores to the frame that aren't
3757 used later in the block are dead. So make a pass over the block
3758 recording any such that are made and show them dead at the end. We do
3759 a very conservative and simple job here. */
3760 if (optimize
3761 && (flags & PROP_SCAN_DEAD_CODE)
3762 && (bb->succ == NULL
3763 || (bb->succ->succ_next == NULL
3764 && bb->succ->dest == EXIT_BLOCK_PTR)))
3766 rtx insn;
3767 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3768 if (GET_CODE (insn) == INSN
3769 && GET_CODE (PATTERN (insn)) == SET
3770 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3772 rtx mem = SET_DEST (PATTERN (insn));
3774 if (XEXP (mem, 0) == frame_pointer_rtx
3775 || (GET_CODE (XEXP (mem, 0)) == PLUS
3776 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3777 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3778 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3782 return pbi;
3785 /* Release a propagate_block_info struct. */
3787 void
3788 free_propagate_block_info (pbi)
3789 struct propagate_block_info *pbi;
3791 free_EXPR_LIST_list (&pbi->mem_set_list);
3793 BITMAP_XFREE (pbi->new_set);
3795 #ifdef HAVE_conditional_execution
3796 splay_tree_delete (pbi->reg_cond_dead);
3797 BITMAP_XFREE (pbi->reg_cond_reg);
3798 #endif
3800 if (pbi->reg_next_use)
3801 free (pbi->reg_next_use);
3803 free (pbi);
3806 /* Compute the registers live at the beginning of a basic block BB from
3807 those live at the end.
3809 When called, REG_LIVE contains those live at the end. On return, it
3810 contains those live at the beginning.
3812 LOCAL_SET, if non-null, will be set with all registers killed by
3813 this basic block. */
3815 void
3816 propagate_block (bb, live, local_set, flags)
3817 basic_block bb;
3818 regset live;
3819 regset local_set;
3820 int flags;
3822 struct propagate_block_info *pbi;
3823 rtx insn, prev;
3825 pbi = init_propagate_block_info (bb, live, local_set, flags);
3827 if (flags & PROP_REG_INFO)
3829 register int i;
3831 /* Process the regs live at the end of the block.
3832 Mark them as not local to any one basic block. */
3833 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3834 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3837 /* Scan the block an insn at a time from end to beginning. */
3839 for (insn = bb->end;; insn = prev)
3841 /* If this is a call to `setjmp' et al, warn if any
3842 non-volatile datum is live. */
3843 if ((flags & PROP_REG_INFO)
3844 && GET_CODE (insn) == NOTE
3845 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3846 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3848 prev = propagate_one_insn (pbi, insn);
3850 if (insn == bb->head)
3851 break;
3854 free_propagate_block_info (pbi);
3857 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3858 (SET expressions whose destinations are registers dead after the insn).
3859 NEEDED is the regset that says which regs are alive after the insn.
3861 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3863 If X is the entire body of an insn, NOTES contains the reg notes
3864 pertaining to the insn. */
3866 static int
3867 insn_dead_p (pbi, x, call_ok, notes)
3868 struct propagate_block_info *pbi;
3869 rtx x;
3870 int call_ok;
3871 rtx notes ATTRIBUTE_UNUSED;
3873 enum rtx_code code = GET_CODE (x);
3875 #ifdef AUTO_INC_DEC
3876 /* If flow is invoked after reload, we must take existing AUTO_INC
3877 expresions into account. */
3878 if (reload_completed)
3880 for (; notes; notes = XEXP (notes, 1))
3882 if (REG_NOTE_KIND (notes) == REG_INC)
3884 int regno = REGNO (XEXP (notes, 0));
3886 /* Don't delete insns to set global regs. */
3887 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3888 || REGNO_REG_SET_P (pbi->reg_live, regno))
3889 return 0;
3893 #endif
3895 /* If setting something that's a reg or part of one,
3896 see if that register's altered value will be live. */
3898 if (code == SET)
3900 rtx r = SET_DEST (x);
3902 #ifdef HAVE_cc0
3903 if (GET_CODE (r) == CC0)
3904 return ! pbi->cc0_live;
3905 #endif
3907 /* A SET that is a subroutine call cannot be dead. */
3908 if (GET_CODE (SET_SRC (x)) == CALL)
3910 if (! call_ok)
3911 return 0;
3914 /* Don't eliminate loads from volatile memory or volatile asms. */
3915 else if (volatile_refs_p (SET_SRC (x)))
3916 return 0;
3918 if (GET_CODE (r) == MEM)
3920 rtx temp;
3922 if (MEM_VOLATILE_P (r))
3923 return 0;
3925 /* Walk the set of memory locations we are currently tracking
3926 and see if one is an identical match to this memory location.
3927 If so, this memory write is dead (remember, we're walking
3928 backwards from the end of the block to the start). */
3929 temp = pbi->mem_set_list;
3930 while (temp)
3932 if (rtx_equal_p (XEXP (temp, 0), r))
3933 return 1;
3934 temp = XEXP (temp, 1);
3937 else
3939 while (GET_CODE (r) == SUBREG
3940 || GET_CODE (r) == STRICT_LOW_PART
3941 || GET_CODE (r) == ZERO_EXTRACT)
3942 r = XEXP (r, 0);
3944 if (GET_CODE (r) == REG)
3946 int regno = REGNO (r);
3948 /* Obvious. */
3949 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3950 return 0;
3952 /* If this is a hard register, verify that subsequent
3953 words are not needed. */
3954 if (regno < FIRST_PSEUDO_REGISTER)
3956 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3958 while (--n > 0)
3959 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3960 return 0;
3963 /* Don't delete insns to set global regs. */
3964 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3965 return 0;
3967 /* Make sure insns to set the stack pointer aren't deleted. */
3968 if (regno == STACK_POINTER_REGNUM)
3969 return 0;
3971 /* Make sure insns to set the frame pointer aren't deleted. */
3972 if (regno == FRAME_POINTER_REGNUM
3973 && (! reload_completed || frame_pointer_needed))
3974 return 0;
3975 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3976 if (regno == HARD_FRAME_POINTER_REGNUM
3977 && (! reload_completed || frame_pointer_needed))
3978 return 0;
3979 #endif
3981 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3982 /* Make sure insns to set arg pointer are never deleted
3983 (if the arg pointer isn't fixed, there will be a USE
3984 for it, so we can treat it normally). */
3985 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3986 return 0;
3987 #endif
3989 #ifdef PIC_OFFSET_TABLE_REGNUM
3990 /* Before reload, do not allow sets of the pic register
3991 to be deleted. Reload can insert references to
3992 constant pool memory anywhere in the function, making
3993 the PIC register live where it wasn't before. */
3994 if (regno == PIC_OFFSET_TABLE_REGNUM && fixed_regs[regno]
3995 && ! reload_completed)
3996 return 0;
3997 #endif
3999 /* Otherwise, the set is dead. */
4000 return 1;
4005 /* If performing several activities, insn is dead if each activity
4006 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4007 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4008 worth keeping. */
4009 else if (code == PARALLEL)
4011 int i = XVECLEN (x, 0);
4013 for (i--; i >= 0; i--)
4014 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4015 && GET_CODE (XVECEXP (x, 0, i)) != USE
4016 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4017 return 0;
4019 return 1;
4022 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4023 is not necessarily true for hard registers. */
4024 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4025 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4026 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4027 return 1;
4029 /* We do not check other CLOBBER or USE here. An insn consisting of just
4030 a CLOBBER or just a USE should not be deleted. */
4031 return 0;
4034 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4035 return 1 if the entire library call is dead.
4036 This is true if INSN copies a register (hard or pseudo)
4037 and if the hard return reg of the call insn is dead.
4038 (The caller should have tested the destination of the SET inside
4039 INSN already for death.)
4041 If this insn doesn't just copy a register, then we don't
4042 have an ordinary libcall. In that case, cse could not have
4043 managed to substitute the source for the dest later on,
4044 so we can assume the libcall is dead.
4046 PBI is the block info giving pseudoregs live before this insn.
4047 NOTE is the REG_RETVAL note of the insn. */
4049 static int
4050 libcall_dead_p (pbi, note, insn)
4051 struct propagate_block_info *pbi;
4052 rtx note;
4053 rtx insn;
4055 rtx x = single_set (insn);
4057 if (x)
4059 register rtx r = SET_SRC (x);
4060 if (GET_CODE (r) == REG)
4062 rtx call = XEXP (note, 0);
4063 rtx call_pat;
4064 register int i;
4066 /* Find the call insn. */
4067 while (call != insn && GET_CODE (call) != CALL_INSN)
4068 call = NEXT_INSN (call);
4070 /* If there is none, do nothing special,
4071 since ordinary death handling can understand these insns. */
4072 if (call == insn)
4073 return 0;
4075 /* See if the hard reg holding the value is dead.
4076 If this is a PARALLEL, find the call within it. */
4077 call_pat = PATTERN (call);
4078 if (GET_CODE (call_pat) == PARALLEL)
4080 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4081 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4082 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4083 break;
4085 /* This may be a library call that is returning a value
4086 via invisible pointer. Do nothing special, since
4087 ordinary death handling can understand these insns. */
4088 if (i < 0)
4089 return 0;
4091 call_pat = XVECEXP (call_pat, 0, i);
4094 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4097 return 1;
4100 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4101 live at function entry. Don't count global register variables, variables
4102 in registers that can be used for function arg passing, or variables in
4103 fixed hard registers. */
4106 regno_uninitialized (regno)
4107 int regno;
4109 if (n_basic_blocks == 0
4110 || (regno < FIRST_PSEUDO_REGISTER
4111 && (global_regs[regno]
4112 || fixed_regs[regno]
4113 || FUNCTION_ARG_REGNO_P (regno))))
4114 return 0;
4116 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4119 /* 1 if register REGNO was alive at a place where `setjmp' was called
4120 and was set more than once or is an argument.
4121 Such regs may be clobbered by `longjmp'. */
4124 regno_clobbered_at_setjmp (regno)
4125 int regno;
4127 if (n_basic_blocks == 0)
4128 return 0;
4130 return ((REG_N_SETS (regno) > 1
4131 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4132 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4135 /* INSN references memory, possibly using autoincrement addressing modes.
4136 Find any entries on the mem_set_list that need to be invalidated due
4137 to an address change. */
4139 static void
4140 invalidate_mems_from_autoinc (pbi, insn)
4141 struct propagate_block_info *pbi;
4142 rtx insn;
4144 rtx note = REG_NOTES (insn);
4145 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4147 if (REG_NOTE_KIND (note) == REG_INC)
4149 rtx temp = pbi->mem_set_list;
4150 rtx prev = NULL_RTX;
4151 rtx next;
4153 while (temp)
4155 next = XEXP (temp, 1);
4156 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4158 /* Splice temp out of list. */
4159 if (prev)
4160 XEXP (prev, 1) = next;
4161 else
4162 pbi->mem_set_list = next;
4163 free_EXPR_LIST_node (temp);
4165 else
4166 prev = temp;
4167 temp = next;
4173 /* Process the registers that are set within X. Their bits are set to
4174 1 in the regset DEAD, because they are dead prior to this insn.
4176 If INSN is nonzero, it is the insn being processed.
4178 FLAGS is the set of operations to perform. */
4180 static void
4181 mark_set_regs (pbi, x, insn)
4182 struct propagate_block_info *pbi;
4183 rtx x, insn;
4185 rtx cond = NULL_RTX;
4186 rtx link;
4187 enum rtx_code code;
4189 if (insn)
4190 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4192 if (REG_NOTE_KIND (link) == REG_INC)
4193 mark_set_1 (pbi, SET, XEXP (link, 0),
4194 (GET_CODE (x) == COND_EXEC
4195 ? COND_EXEC_TEST (x) : NULL_RTX),
4196 insn, pbi->flags);
4198 retry:
4199 switch (code = GET_CODE (x))
4201 case SET:
4202 case CLOBBER:
4203 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4204 return;
4206 case COND_EXEC:
4207 cond = COND_EXEC_TEST (x);
4208 x = COND_EXEC_CODE (x);
4209 goto retry;
4211 case PARALLEL:
4213 register int i;
4214 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4216 rtx sub = XVECEXP (x, 0, i);
4217 switch (code = GET_CODE (sub))
4219 case COND_EXEC:
4220 if (cond != NULL_RTX)
4221 abort ();
4223 cond = COND_EXEC_TEST (sub);
4224 sub = COND_EXEC_CODE (sub);
4225 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4226 break;
4227 /* Fall through. */
4229 case SET:
4230 case CLOBBER:
4231 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4232 break;
4234 default:
4235 break;
4238 break;
4241 default:
4242 break;
4246 /* Process a single SET rtx, X. */
4248 static void
4249 mark_set_1 (pbi, code, reg, cond, insn, flags)
4250 struct propagate_block_info *pbi;
4251 enum rtx_code code;
4252 rtx reg, cond, insn;
4253 int flags;
4255 int regno_first = -1, regno_last = -1;
4256 int not_dead = 0;
4257 int i;
4259 /* Some targets place small structures in registers for
4260 return values of functions. We have to detect this
4261 case specially here to get correct flow information. */
4262 if (GET_CODE (reg) == PARALLEL
4263 && GET_MODE (reg) == BLKmode)
4265 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4266 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4267 return;
4270 /* Modifying just one hardware register of a multi-reg value or just a
4271 byte field of a register does not mean the value from before this insn
4272 is now dead. Of course, if it was dead after it's unused now. */
4274 switch (GET_CODE (reg))
4276 case ZERO_EXTRACT:
4277 case SIGN_EXTRACT:
4278 case STRICT_LOW_PART:
4279 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4281 reg = XEXP (reg, 0);
4282 while (GET_CODE (reg) == SUBREG
4283 || GET_CODE (reg) == ZERO_EXTRACT
4284 || GET_CODE (reg) == SIGN_EXTRACT
4285 || GET_CODE (reg) == STRICT_LOW_PART);
4286 if (GET_CODE (reg) == MEM)
4287 break;
4288 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4289 /* Fall through. */
4291 case REG:
4292 regno_last = regno_first = REGNO (reg);
4293 if (regno_first < FIRST_PSEUDO_REGISTER)
4294 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4295 break;
4297 case SUBREG:
4298 if (GET_CODE (SUBREG_REG (reg)) == REG)
4300 enum machine_mode outer_mode = GET_MODE (reg);
4301 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4303 /* Identify the range of registers affected. This is moderately
4304 tricky for hard registers. See alter_subreg. */
4306 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4307 if (regno_first < FIRST_PSEUDO_REGISTER)
4309 #ifdef ALTER_HARD_SUBREG
4310 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4311 inner_mode, regno_first);
4312 #else
4313 regno_first += SUBREG_WORD (reg);
4314 #endif
4315 regno_last = (regno_first
4316 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4318 /* Since we've just adjusted the register number ranges, make
4319 sure REG matches. Otherwise some_was_live will be clear
4320 when it shouldn't have been, and we'll create incorrect
4321 REG_UNUSED notes. */
4322 reg = gen_rtx_REG (outer_mode, regno_first);
4324 else
4326 /* If the number of words in the subreg is less than the number
4327 of words in the full register, we have a well-defined partial
4328 set. Otherwise the high bits are undefined.
4330 This is only really applicable to pseudos, since we just took
4331 care of multi-word hard registers. */
4332 if (((GET_MODE_SIZE (outer_mode)
4333 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4334 < ((GET_MODE_SIZE (inner_mode)
4335 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4336 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4338 reg = SUBREG_REG (reg);
4341 else
4342 reg = SUBREG_REG (reg);
4343 break;
4345 default:
4346 break;
4349 /* If this set is a MEM, then it kills any aliased writes.
4350 If this set is a REG, then it kills any MEMs which use the reg. */
4351 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4353 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4355 rtx temp = pbi->mem_set_list;
4356 rtx prev = NULL_RTX;
4357 rtx next;
4359 while (temp)
4361 next = XEXP (temp, 1);
4362 if ((GET_CODE (reg) == MEM
4363 && output_dependence (XEXP (temp, 0), reg))
4364 || (GET_CODE (reg) == REG
4365 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4367 /* Splice this entry out of the list. */
4368 if (prev)
4369 XEXP (prev, 1) = next;
4370 else
4371 pbi->mem_set_list = next;
4372 free_EXPR_LIST_node (temp);
4374 else
4375 prev = temp;
4376 temp = next;
4380 /* If the memory reference had embedded side effects (autoincrement
4381 address modes. Then we may need to kill some entries on the
4382 memory set list. */
4383 if (insn && GET_CODE (reg) == MEM)
4384 invalidate_mems_from_autoinc (pbi, insn);
4386 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4387 /* ??? With more effort we could track conditional memory life. */
4388 && ! cond
4389 /* We do not know the size of a BLKmode store, so we do not track
4390 them for redundant store elimination. */
4391 && GET_MODE (reg) != BLKmode
4392 /* There are no REG_INC notes for SP, so we can't assume we'll see
4393 everything that invalidates it. To be safe, don't eliminate any
4394 stores though SP; none of them should be redundant anyway. */
4395 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4396 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4399 if (GET_CODE (reg) == REG
4400 && ! (regno_first == FRAME_POINTER_REGNUM
4401 && (! reload_completed || frame_pointer_needed))
4402 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4403 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4404 && (! reload_completed || frame_pointer_needed))
4405 #endif
4406 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4407 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4408 #endif
4411 int some_was_live = 0, some_was_dead = 0;
4413 for (i = regno_first; i <= regno_last; ++i)
4415 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4416 if (pbi->local_set)
4417 SET_REGNO_REG_SET (pbi->local_set, i);
4418 if (code != CLOBBER)
4419 SET_REGNO_REG_SET (pbi->new_set, i);
4421 some_was_live |= needed_regno;
4422 some_was_dead |= ! needed_regno;
4425 #ifdef HAVE_conditional_execution
4426 /* Consider conditional death in deciding that the register needs
4427 a death note. */
4428 if (some_was_live && ! not_dead
4429 /* The stack pointer is never dead. Well, not strictly true,
4430 but it's very difficult to tell from here. Hopefully
4431 combine_stack_adjustments will fix up the most egregious
4432 errors. */
4433 && regno_first != STACK_POINTER_REGNUM)
4435 for (i = regno_first; i <= regno_last; ++i)
4436 if (! mark_regno_cond_dead (pbi, i, cond))
4437 not_dead = 1;
4439 #endif
4441 /* Additional data to record if this is the final pass. */
4442 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4443 | PROP_DEATH_NOTES | PROP_AUTOINC))
4445 register rtx y;
4446 register int blocknum = pbi->bb->index;
4448 y = NULL_RTX;
4449 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4451 y = pbi->reg_next_use[regno_first];
4453 /* The next use is no longer next, since a store intervenes. */
4454 for (i = regno_first; i <= regno_last; ++i)
4455 pbi->reg_next_use[i] = 0;
4458 if (flags & PROP_REG_INFO)
4460 for (i = regno_first; i <= regno_last; ++i)
4462 /* Count (weighted) references, stores, etc. This counts a
4463 register twice if it is modified, but that is correct. */
4464 REG_N_SETS (i) += 1;
4465 REG_N_REFS (i) += (optimize_size ? 1
4466 : pbi->bb->loop_depth + 1);
4468 /* The insns where a reg is live are normally counted
4469 elsewhere, but we want the count to include the insn
4470 where the reg is set, and the normal counting mechanism
4471 would not count it. */
4472 REG_LIVE_LENGTH (i) += 1;
4475 /* If this is a hard reg, record this function uses the reg. */
4476 if (regno_first < FIRST_PSEUDO_REGISTER)
4478 for (i = regno_first; i <= regno_last; i++)
4479 regs_ever_live[i] = 1;
4481 else
4483 /* Keep track of which basic blocks each reg appears in. */
4484 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4485 REG_BASIC_BLOCK (regno_first) = blocknum;
4486 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4487 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4491 if (! some_was_dead)
4493 if (flags & PROP_LOG_LINKS)
4495 /* Make a logical link from the next following insn
4496 that uses this register, back to this insn.
4497 The following insns have already been processed.
4499 We don't build a LOG_LINK for hard registers containing
4500 in ASM_OPERANDs. If these registers get replaced,
4501 we might wind up changing the semantics of the insn,
4502 even if reload can make what appear to be valid
4503 assignments later. */
4504 if (y && (BLOCK_NUM (y) == blocknum)
4505 && (regno_first >= FIRST_PSEUDO_REGISTER
4506 || asm_noperands (PATTERN (y)) < 0))
4507 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4510 else if (not_dead)
4512 else if (! some_was_live)
4514 if (flags & PROP_REG_INFO)
4515 REG_N_DEATHS (regno_first) += 1;
4517 if (flags & PROP_DEATH_NOTES)
4519 /* Note that dead stores have already been deleted
4520 when possible. If we get here, we have found a
4521 dead store that cannot be eliminated (because the
4522 same insn does something useful). Indicate this
4523 by marking the reg being set as dying here. */
4524 REG_NOTES (insn)
4525 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4528 else
4530 if (flags & PROP_DEATH_NOTES)
4532 /* This is a case where we have a multi-word hard register
4533 and some, but not all, of the words of the register are
4534 needed in subsequent insns. Write REG_UNUSED notes
4535 for those parts that were not needed. This case should
4536 be rare. */
4538 for (i = regno_first; i <= regno_last; ++i)
4539 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4540 REG_NOTES (insn)
4541 = alloc_EXPR_LIST (REG_UNUSED,
4542 gen_rtx_REG (reg_raw_mode[i], i),
4543 REG_NOTES (insn));
4548 /* Mark the register as being dead. */
4549 if (some_was_live
4550 && ! not_dead
4551 /* The stack pointer is never dead. Well, not strictly true,
4552 but it's very difficult to tell from here. Hopefully
4553 combine_stack_adjustments will fix up the most egregious
4554 errors. */
4555 && regno_first != STACK_POINTER_REGNUM)
4557 for (i = regno_first; i <= regno_last; ++i)
4558 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4561 else if (GET_CODE (reg) == REG)
4563 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4564 pbi->reg_next_use[regno_first] = 0;
4567 /* If this is the last pass and this is a SCRATCH, show it will be dying
4568 here and count it. */
4569 else if (GET_CODE (reg) == SCRATCH)
4571 if (flags & PROP_DEATH_NOTES)
4572 REG_NOTES (insn)
4573 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4577 #ifdef HAVE_conditional_execution
4578 /* Mark REGNO conditionally dead.
4579 Return true if the register is now unconditionally dead. */
4581 static int
4582 mark_regno_cond_dead (pbi, regno, cond)
4583 struct propagate_block_info *pbi;
4584 int regno;
4585 rtx cond;
4587 /* If this is a store to a predicate register, the value of the
4588 predicate is changing, we don't know that the predicate as seen
4589 before is the same as that seen after. Flush all dependent
4590 conditions from reg_cond_dead. This will make all such
4591 conditionally live registers unconditionally live. */
4592 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4593 flush_reg_cond_reg (pbi, regno);
4595 /* If this is an unconditional store, remove any conditional
4596 life that may have existed. */
4597 if (cond == NULL_RTX)
4598 splay_tree_remove (pbi->reg_cond_dead, regno);
4599 else
4601 splay_tree_node node;
4602 struct reg_cond_life_info *rcli;
4603 rtx ncond;
4605 /* Otherwise this is a conditional set. Record that fact.
4606 It may have been conditionally used, or there may be a
4607 subsequent set with a complimentary condition. */
4609 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4610 if (node == NULL)
4612 /* The register was unconditionally live previously.
4613 Record the current condition as the condition under
4614 which it is dead. */
4615 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4616 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4617 splay_tree_insert (pbi->reg_cond_dead, regno,
4618 (splay_tree_value) rcli);
4620 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4622 /* Not unconditionaly dead. */
4623 return 0;
4625 else
4627 /* The register was conditionally live previously.
4628 Add the new condition to the old. */
4629 rcli = (struct reg_cond_life_info *) node->value;
4630 ncond = rcli->condition;
4631 ncond = ior_reg_cond (ncond, cond);
4633 /* If the register is now unconditionally dead,
4634 remove the entry in the splay_tree. */
4635 if (ncond == const1_rtx)
4636 splay_tree_remove (pbi->reg_cond_dead, regno);
4637 else
4639 rcli->condition = ncond;
4641 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4643 /* Not unconditionaly dead. */
4644 return 0;
4649 return 1;
4652 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4654 static void
4655 free_reg_cond_life_info (value)
4656 splay_tree_value value;
4658 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4659 free_EXPR_LIST_list (&rcli->condition);
4660 free (rcli);
4663 /* Helper function for flush_reg_cond_reg. */
4665 static int
4666 flush_reg_cond_reg_1 (node, data)
4667 splay_tree_node node;
4668 void *data;
4670 struct reg_cond_life_info *rcli;
4671 int *xdata = (int *) data;
4672 unsigned int regno = xdata[0];
4673 rtx c, *prev;
4675 /* Don't need to search if last flushed value was farther on in
4676 the in-order traversal. */
4677 if (xdata[1] >= (int) node->key)
4678 return 0;
4680 /* Splice out portions of the expression that refer to regno. */
4681 rcli = (struct reg_cond_life_info *) node->value;
4682 c = *(prev = &rcli->condition);
4683 while (c)
4685 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4687 rtx next = XEXP (c, 1);
4688 free_EXPR_LIST_node (c);
4689 c = *prev = next;
4691 else
4692 c = *(prev = &XEXP (c, 1));
4695 /* If the entire condition is now NULL, signal the node to be removed. */
4696 if (! rcli->condition)
4698 xdata[1] = node->key;
4699 return -1;
4701 else
4702 return 0;
4705 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4707 static void
4708 flush_reg_cond_reg (pbi, regno)
4709 struct propagate_block_info *pbi;
4710 int regno;
4712 int pair[2];
4714 pair[0] = regno;
4715 pair[1] = -1;
4716 while (splay_tree_foreach (pbi->reg_cond_dead,
4717 flush_reg_cond_reg_1, pair) == -1)
4718 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4720 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4723 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4724 We actually use EXPR_LIST to chain the sub-expressions together
4725 instead of IOR because it's easier to manipulate and we have
4726 the lists.c functions to reuse nodes.
4728 Return a new rtl expression as appropriate. */
4730 static rtx
4731 ior_reg_cond (old, x)
4732 rtx old, x;
4734 enum rtx_code x_code;
4735 rtx x_reg;
4736 rtx c;
4738 /* We expect these conditions to be of the form (eq reg 0). */
4739 x_code = GET_CODE (x);
4740 if (GET_RTX_CLASS (x_code) != '<'
4741 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4742 || XEXP (x, 1) != const0_rtx)
4743 abort ();
4745 /* Search the expression for an existing sub-expression of X_REG. */
4746 for (c = old; c; c = XEXP (c, 1))
4748 rtx y = XEXP (c, 0);
4749 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4751 /* If we find X already present in OLD, we need do nothing. */
4752 if (GET_CODE (y) == x_code)
4753 return old;
4755 /* If we find X being a compliment of a condition in OLD,
4756 then the entire condition is true. */
4757 if (GET_CODE (y) == reverse_condition (x_code))
4758 return const1_rtx;
4762 /* Otherwise just add to the chain. */
4763 return alloc_EXPR_LIST (0, x, old);
4766 static rtx
4767 not_reg_cond (x)
4768 rtx x;
4770 enum rtx_code x_code;
4771 rtx x_reg;
4773 /* We expect these conditions to be of the form (eq reg 0). */
4774 x_code = GET_CODE (x);
4775 if (GET_RTX_CLASS (x_code) != '<'
4776 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4777 || XEXP (x, 1) != const0_rtx)
4778 abort ();
4780 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4781 VOIDmode, x_reg, const0_rtx),
4782 NULL_RTX);
4785 static rtx
4786 nand_reg_cond (old, x)
4787 rtx old, x;
4789 enum rtx_code x_code;
4790 rtx x_reg;
4791 rtx c, *prev;
4793 /* We expect these conditions to be of the form (eq reg 0). */
4794 x_code = GET_CODE (x);
4795 if (GET_RTX_CLASS (x_code) != '<'
4796 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4797 || XEXP (x, 1) != const0_rtx)
4798 abort ();
4800 /* Search the expression for an existing sub-expression of X_REG. */
4802 for (c = *(prev = &old); c; c = *(prev = &XEXP (c, 1)))
4804 rtx y = XEXP (c, 0);
4805 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4807 /* If we find X already present in OLD, then we need to
4808 splice it out. */
4809 if (GET_CODE (y) == x_code)
4811 *prev = XEXP (c, 1);
4812 free_EXPR_LIST_node (c);
4813 return old ? old : const0_rtx;
4816 /* If we find X being a compliment of a condition in OLD,
4817 then we need do nothing. */
4818 if (GET_CODE (y) == reverse_condition (x_code))
4819 return old;
4823 /* Otherwise, by implication, the register in question is now live for
4824 the inverse of the condition X. */
4825 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4826 VOIDmode, x_reg, const0_rtx),
4827 old);
4829 #endif /* HAVE_conditional_execution */
4831 #ifdef AUTO_INC_DEC
4833 /* Try to substitute the auto-inc expression INC as the address inside
4834 MEM which occurs in INSN. Currently, the address of MEM is an expression
4835 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
4836 that has a single set whose source is a PLUS of INCR_REG and something
4837 else. */
4839 static void
4840 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
4841 struct propagate_block_info *pbi;
4842 rtx inc, insn, mem, incr, incr_reg;
4844 int regno = REGNO (incr_reg);
4845 rtx set = single_set (incr);
4846 rtx q = SET_DEST (set);
4847 rtx y = SET_SRC (set);
4848 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
4850 /* Make sure this reg appears only once in this insn. */
4851 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
4852 return;
4854 if (dead_or_set_p (incr, incr_reg)
4855 /* Mustn't autoinc an eliminable register. */
4856 && (regno >= FIRST_PSEUDO_REGISTER
4857 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4859 /* This is the simple case. Try to make the auto-inc. If
4860 we can't, we are done. Otherwise, we will do any
4861 needed updates below. */
4862 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
4863 return;
4865 else if (GET_CODE (q) == REG
4866 /* PREV_INSN used here to check the semi-open interval
4867 [insn,incr). */
4868 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4869 /* We must also check for sets of q as q may be
4870 a call clobbered hard register and there may
4871 be a call between PREV_INSN (insn) and incr. */
4872 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4874 /* We have *p followed sometime later by q = p+size.
4875 Both p and q must be live afterward,
4876 and q is not used between INSN and its assignment.
4877 Change it to q = p, ...*q..., q = q+size.
4878 Then fall into the usual case. */
4879 rtx insns, temp;
4881 start_sequence ();
4882 emit_move_insn (q, incr_reg);
4883 insns = get_insns ();
4884 end_sequence ();
4886 if (basic_block_for_insn)
4887 for (temp = insns; temp; temp = NEXT_INSN (temp))
4888 set_block_for_insn (temp, pbi->bb);
4890 /* If we can't make the auto-inc, or can't make the
4891 replacement into Y, exit. There's no point in making
4892 the change below if we can't do the auto-inc and doing
4893 so is not correct in the pre-inc case. */
4895 XEXP (inc, 0) = q;
4896 validate_change (insn, &XEXP (mem, 0), inc, 1);
4897 validate_change (incr, &XEXP (y, opnum), q, 1);
4898 if (! apply_change_group ())
4899 return;
4901 /* We now know we'll be doing this change, so emit the
4902 new insn(s) and do the updates. */
4903 emit_insns_before (insns, insn);
4905 if (pbi->bb->head == insn)
4906 pbi->bb->head = insns;
4908 /* INCR will become a NOTE and INSN won't contain a
4909 use of INCR_REG. If a use of INCR_REG was just placed in
4910 the insn before INSN, make that the next use.
4911 Otherwise, invalidate it. */
4912 if (GET_CODE (PREV_INSN (insn)) == INSN
4913 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4914 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
4915 pbi->reg_next_use[regno] = PREV_INSN (insn);
4916 else
4917 pbi->reg_next_use[regno] = 0;
4919 incr_reg = q;
4920 regno = REGNO (q);
4922 /* REGNO is now used in INCR which is below INSN, but
4923 it previously wasn't live here. If we don't mark
4924 it as live, we'll put a REG_DEAD note for it
4925 on this insn, which is incorrect. */
4926 SET_REGNO_REG_SET (pbi->reg_live, regno);
4928 /* If there are any calls between INSN and INCR, show
4929 that REGNO now crosses them. */
4930 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4931 if (GET_CODE (temp) == CALL_INSN)
4932 REG_N_CALLS_CROSSED (regno)++;
4934 else
4935 return;
4937 /* If we haven't returned, it means we were able to make the
4938 auto-inc, so update the status. First, record that this insn
4939 has an implicit side effect. */
4941 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
4943 /* Modify the old increment-insn to simply copy
4944 the already-incremented value of our register. */
4945 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
4946 abort ();
4948 /* If that makes it a no-op (copying the register into itself) delete
4949 it so it won't appear to be a "use" and a "set" of this
4950 register. */
4951 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
4953 /* If the original source was dead, it's dead now. */
4954 rtx note;
4956 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
4958 remove_note (incr, note);
4959 if (XEXP (note, 0) != incr_reg)
4960 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4963 PUT_CODE (incr, NOTE);
4964 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4965 NOTE_SOURCE_FILE (incr) = 0;
4968 if (regno >= FIRST_PSEUDO_REGISTER)
4970 /* Count an extra reference to the reg. When a reg is
4971 incremented, spilling it is worse, so we want to make
4972 that less likely. */
4973 REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
4975 /* Count the increment as a setting of the register,
4976 even though it isn't a SET in rtl. */
4977 REG_N_SETS (regno)++;
4981 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4982 reference. */
4984 static void
4985 find_auto_inc (pbi, x, insn)
4986 struct propagate_block_info *pbi;
4987 rtx x;
4988 rtx insn;
4990 rtx addr = XEXP (x, 0);
4991 HOST_WIDE_INT offset = 0;
4992 rtx set, y, incr, inc_val;
4993 int regno;
4994 int size = GET_MODE_SIZE (GET_MODE (x));
4996 if (GET_CODE (insn) == JUMP_INSN)
4997 return;
4999 /* Here we detect use of an index register which might be good for
5000 postincrement, postdecrement, preincrement, or predecrement. */
5002 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5003 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5005 if (GET_CODE (addr) != REG)
5006 return;
5008 regno = REGNO (addr);
5010 /* Is the next use an increment that might make auto-increment? */
5011 incr = pbi->reg_next_use[regno];
5012 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5013 return;
5014 set = single_set (incr);
5015 if (set == 0 || GET_CODE (set) != SET)
5016 return;
5017 y = SET_SRC (set);
5019 if (GET_CODE (y) != PLUS)
5020 return;
5022 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5023 inc_val = XEXP (y, 1);
5024 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5025 inc_val = XEXP (y, 0);
5026 else
5027 return;
5029 if (GET_CODE (inc_val) == CONST_INT)
5031 if (HAVE_POST_INCREMENT
5032 && (INTVAL (inc_val) == size && offset == 0))
5033 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5034 incr, addr);
5035 else if (HAVE_POST_DECREMENT
5036 && (INTVAL (inc_val) == -size && offset == 0))
5037 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5038 incr, addr);
5039 else if (HAVE_PRE_INCREMENT
5040 && (INTVAL (inc_val) == size && offset == size))
5041 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5042 incr, addr);
5043 else if (HAVE_PRE_DECREMENT
5044 && (INTVAL (inc_val) == -size && offset == -size))
5045 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5046 incr, addr);
5047 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5048 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5049 gen_rtx_PLUS (Pmode,
5050 addr,
5051 inc_val)),
5052 insn, x, incr, addr);
5054 else if (GET_CODE (inc_val) == REG
5055 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5056 NEXT_INSN (incr)))
5059 if (HAVE_POST_MODIFY_REG && offset == 0)
5060 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5061 gen_rtx_PLUS (Pmode,
5062 addr,
5063 inc_val)),
5064 insn, x, incr, addr);
5068 #endif /* AUTO_INC_DEC */
5070 static void
5071 mark_used_reg (pbi, reg, cond, insn)
5072 struct propagate_block_info *pbi;
5073 rtx reg;
5074 rtx cond ATTRIBUTE_UNUSED;
5075 rtx insn;
5077 int regno = REGNO (reg);
5078 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
5079 int some_was_dead = ! some_was_live;
5080 int some_not_set;
5081 int n;
5083 /* A hard reg in a wide mode may really be multiple registers.
5084 If so, mark all of them just like the first. */
5085 if (regno < FIRST_PSEUDO_REGISTER)
5087 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5088 while (--n > 0)
5090 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5091 some_was_live |= needed_regno;
5092 some_was_dead |= ! needed_regno;
5096 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5098 /* Record where each reg is used, so when the reg is set we know
5099 the next insn that uses it. */
5100 pbi->reg_next_use[regno] = insn;
5103 if (pbi->flags & PROP_REG_INFO)
5105 if (regno < FIRST_PSEUDO_REGISTER)
5107 /* If this is a register we are going to try to eliminate,
5108 don't mark it live here. If we are successful in
5109 eliminating it, it need not be live unless it is used for
5110 pseudos, in which case it will have been set live when it
5111 was allocated to the pseudos. If the register will not
5112 be eliminated, reload will set it live at that point.
5114 Otherwise, record that this function uses this register. */
5115 /* ??? The PPC backend tries to "eliminate" on the pic
5116 register to itself. This should be fixed. In the mean
5117 time, hack around it. */
5119 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5120 && (regno == FRAME_POINTER_REGNUM
5121 || regno == ARG_POINTER_REGNUM)))
5123 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5125 regs_ever_live[regno + --n] = 1;
5126 while (n > 0);
5129 else
5131 /* Keep track of which basic block each reg appears in. */
5133 register int blocknum = pbi->bb->index;
5134 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5135 REG_BASIC_BLOCK (regno) = blocknum;
5136 else if (REG_BASIC_BLOCK (regno) != blocknum)
5137 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5139 /* Count (weighted) number of uses of each reg. */
5140 REG_N_REFS (regno) += (optimize_size ? 1
5141 : pbi->bb->loop_depth + 1);
5145 /* Find out if any of the register was set this insn. */
5146 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5147 if (regno < FIRST_PSEUDO_REGISTER)
5149 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5150 while (--n > 0)
5151 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5154 /* Record and count the insns in which a reg dies. If it is used in
5155 this insn and was dead below the insn then it dies in this insn.
5156 If it was set in this insn, we do not make a REG_DEAD note;
5157 likewise if we already made such a note. */
5158 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5159 && some_was_dead
5160 && some_not_set)
5162 /* Check for the case where the register dying partially
5163 overlaps the register set by this insn. */
5164 if (regno < FIRST_PSEUDO_REGISTER
5165 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5167 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5168 while (--n >= 0)
5169 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5172 /* If none of the words in X is needed, make a REG_DEAD note.
5173 Otherwise, we must make partial REG_DEAD notes. */
5174 if (! some_was_live)
5176 if ((pbi->flags & PROP_DEATH_NOTES)
5177 && ! find_regno_note (insn, REG_DEAD, regno))
5178 REG_NOTES (insn)
5179 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5181 if (pbi->flags & PROP_REG_INFO)
5182 REG_N_DEATHS (regno)++;
5184 else
5186 /* Don't make a REG_DEAD note for a part of a register
5187 that is set in the insn. */
5189 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5190 for (; n >= regno; n--)
5191 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5192 && ! dead_or_set_regno_p (insn, n))
5193 REG_NOTES (insn)
5194 = alloc_EXPR_LIST (REG_DEAD,
5195 gen_rtx_REG (reg_raw_mode[n], n),
5196 REG_NOTES (insn));
5200 SET_REGNO_REG_SET (pbi->reg_live, regno);
5201 if (regno < FIRST_PSEUDO_REGISTER)
5203 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5204 while (--n > 0)
5205 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5208 #ifdef HAVE_conditional_execution
5209 /* If this is a conditional use, record that fact. If it is later
5210 conditionally set, we'll know to kill the register. */
5211 if (cond != NULL_RTX)
5213 splay_tree_node node;
5214 struct reg_cond_life_info *rcli;
5215 rtx ncond;
5217 if (some_was_live)
5219 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5220 if (node == NULL)
5222 /* The register was unconditionally live previously.
5223 No need to do anything. */
5225 else
5227 /* The register was conditionally live previously.
5228 Subtract the new life cond from the old death cond. */
5229 rcli = (struct reg_cond_life_info *) node->value;
5230 ncond = rcli->condition;
5231 ncond = nand_reg_cond (ncond, cond);
5233 /* If the register is now unconditionally live, remove the
5234 entry in the splay_tree. */
5235 if (ncond == const0_rtx)
5237 rcli->condition = NULL_RTX;
5238 splay_tree_remove (pbi->reg_cond_dead, regno);
5240 else
5242 rcli->condition = ncond;
5243 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5247 else
5249 /* The register was not previously live at all. Record
5250 the condition under which it is still dead. */
5251 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5252 rcli->condition = not_reg_cond (cond);
5253 splay_tree_insert (pbi->reg_cond_dead, regno,
5254 (splay_tree_value) rcli);
5256 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5259 else if (some_was_live)
5261 splay_tree_node node;
5262 struct reg_cond_life_info *rcli;
5264 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5265 if (node != NULL)
5267 /* The register was conditionally live previously, but is now
5268 unconditionally so. Remove it from the conditionally dead
5269 list, so that a conditional set won't cause us to think
5270 it dead. */
5271 rcli = (struct reg_cond_life_info *) node->value;
5272 rcli->condition = NULL_RTX;
5273 splay_tree_remove (pbi->reg_cond_dead, regno);
5277 #endif
5280 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5281 This is done assuming the registers needed from X are those that
5282 have 1-bits in PBI->REG_LIVE.
5284 INSN is the containing instruction. If INSN is dead, this function
5285 is not called. */
5287 static void
5288 mark_used_regs (pbi, x, cond, insn)
5289 struct propagate_block_info *pbi;
5290 rtx x, cond, insn;
5292 register RTX_CODE code;
5293 register int regno;
5294 int flags = pbi->flags;
5296 retry:
5297 code = GET_CODE (x);
5298 switch (code)
5300 case LABEL_REF:
5301 case SYMBOL_REF:
5302 case CONST_INT:
5303 case CONST:
5304 case CONST_DOUBLE:
5305 case PC:
5306 case ADDR_VEC:
5307 case ADDR_DIFF_VEC:
5308 return;
5310 #ifdef HAVE_cc0
5311 case CC0:
5312 pbi->cc0_live = 1;
5313 return;
5314 #endif
5316 case CLOBBER:
5317 /* If we are clobbering a MEM, mark any registers inside the address
5318 as being used. */
5319 if (GET_CODE (XEXP (x, 0)) == MEM)
5320 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5321 return;
5323 case MEM:
5324 /* Don't bother watching stores to mems if this is not the
5325 final pass. We'll not be deleting dead stores this round. */
5326 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5328 /* Invalidate the data for the last MEM stored, but only if MEM is
5329 something that can be stored into. */
5330 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5331 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5332 /* Needn't clear the memory set list. */
5334 else
5336 rtx temp = pbi->mem_set_list;
5337 rtx prev = NULL_RTX;
5338 rtx next;
5340 while (temp)
5342 next = XEXP (temp, 1);
5343 if (anti_dependence (XEXP (temp, 0), x))
5345 /* Splice temp out of the list. */
5346 if (prev)
5347 XEXP (prev, 1) = next;
5348 else
5349 pbi->mem_set_list = next;
5350 free_EXPR_LIST_node (temp);
5352 else
5353 prev = temp;
5354 temp = next;
5358 /* If the memory reference had embedded side effects (autoincrement
5359 address modes. Then we may need to kill some entries on the
5360 memory set list. */
5361 if (insn)
5362 invalidate_mems_from_autoinc (pbi, insn);
5365 #ifdef AUTO_INC_DEC
5366 if (flags & PROP_AUTOINC)
5367 find_auto_inc (pbi, x, insn);
5368 #endif
5369 break;
5371 case SUBREG:
5372 #ifdef CLASS_CANNOT_CHANGE_MODE
5373 if (GET_CODE (SUBREG_REG (x)) == REG
5374 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5375 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5376 GET_MODE (SUBREG_REG (x))))
5377 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5378 #endif
5380 /* While we're here, optimize this case. */
5381 x = SUBREG_REG (x);
5382 if (GET_CODE (x) != REG)
5383 goto retry;
5384 /* Fall through. */
5386 case REG:
5387 /* See a register other than being set => mark it as needed. */
5388 mark_used_reg (pbi, x, cond, insn);
5389 return;
5391 case SET:
5393 register rtx testreg = SET_DEST (x);
5394 int mark_dest = 0;
5396 /* If storing into MEM, don't show it as being used. But do
5397 show the address as being used. */
5398 if (GET_CODE (testreg) == MEM)
5400 #ifdef AUTO_INC_DEC
5401 if (flags & PROP_AUTOINC)
5402 find_auto_inc (pbi, testreg, insn);
5403 #endif
5404 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5405 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5406 return;
5409 /* Storing in STRICT_LOW_PART is like storing in a reg
5410 in that this SET might be dead, so ignore it in TESTREG.
5411 but in some other ways it is like using the reg.
5413 Storing in a SUBREG or a bit field is like storing the entire
5414 register in that if the register's value is not used
5415 then this SET is not needed. */
5416 while (GET_CODE (testreg) == STRICT_LOW_PART
5417 || GET_CODE (testreg) == ZERO_EXTRACT
5418 || GET_CODE (testreg) == SIGN_EXTRACT
5419 || GET_CODE (testreg) == SUBREG)
5421 #ifdef CLASS_CANNOT_CHANGE_MODE
5422 if (GET_CODE (testreg) == SUBREG
5423 && GET_CODE (SUBREG_REG (testreg)) == REG
5424 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5425 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5426 GET_MODE (testreg)))
5427 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5428 #endif
5430 /* Modifying a single register in an alternate mode
5431 does not use any of the old value. But these other
5432 ways of storing in a register do use the old value. */
5433 if (GET_CODE (testreg) == SUBREG
5434 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5436 else
5437 mark_dest = 1;
5439 testreg = XEXP (testreg, 0);
5442 /* If this is a store into a register, recursively scan the
5443 value being stored. */
5445 if ((GET_CODE (testreg) == PARALLEL
5446 && GET_MODE (testreg) == BLKmode)
5447 || (GET_CODE (testreg) == REG
5448 && (regno = REGNO (testreg),
5449 ! (regno == FRAME_POINTER_REGNUM
5450 && (! reload_completed || frame_pointer_needed)))
5451 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5452 && ! (regno == HARD_FRAME_POINTER_REGNUM
5453 && (! reload_completed || frame_pointer_needed))
5454 #endif
5455 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5456 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5457 #endif
5460 if (mark_dest)
5461 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5462 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5463 return;
5466 break;
5468 case ASM_OPERANDS:
5469 case UNSPEC_VOLATILE:
5470 case TRAP_IF:
5471 case ASM_INPUT:
5473 /* Traditional and volatile asm instructions must be considered to use
5474 and clobber all hard registers, all pseudo-registers and all of
5475 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5477 Consider for instance a volatile asm that changes the fpu rounding
5478 mode. An insn should not be moved across this even if it only uses
5479 pseudo-regs because it might give an incorrectly rounded result.
5481 ?!? Unfortunately, marking all hard registers as live causes massive
5482 problems for the register allocator and marking all pseudos as live
5483 creates mountains of uninitialized variable warnings.
5485 So for now, just clear the memory set list and mark any regs
5486 we can find in ASM_OPERANDS as used. */
5487 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5488 free_EXPR_LIST_list (&pbi->mem_set_list);
5490 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5491 We can not just fall through here since then we would be confused
5492 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5493 traditional asms unlike their normal usage. */
5494 if (code == ASM_OPERANDS)
5496 int j;
5498 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5499 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5501 break;
5504 case COND_EXEC:
5505 if (cond != NULL_RTX)
5506 abort ();
5508 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5510 cond = COND_EXEC_TEST (x);
5511 x = COND_EXEC_CODE (x);
5512 goto retry;
5514 case PHI:
5515 /* We _do_not_ want to scan operands of phi nodes. Operands of
5516 a phi function are evaluated only when control reaches this
5517 block along a particular edge. Therefore, regs that appear
5518 as arguments to phi should not be added to the global live at
5519 start. */
5520 return;
5522 default:
5523 break;
5526 /* Recursively scan the operands of this expression. */
5529 register const char *fmt = GET_RTX_FORMAT (code);
5530 register int i;
5532 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5534 if (fmt[i] == 'e')
5536 /* Tail recursive case: save a function call level. */
5537 if (i == 0)
5539 x = XEXP (x, 0);
5540 goto retry;
5542 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5544 else if (fmt[i] == 'E')
5546 register int j;
5547 for (j = 0; j < XVECLEN (x, i); j++)
5548 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5554 #ifdef AUTO_INC_DEC
5556 static int
5557 try_pre_increment_1 (pbi, insn)
5558 struct propagate_block_info *pbi;
5559 rtx insn;
5561 /* Find the next use of this reg. If in same basic block,
5562 make it do pre-increment or pre-decrement if appropriate. */
5563 rtx x = single_set (insn);
5564 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5565 * INTVAL (XEXP (SET_SRC (x), 1)));
5566 int regno = REGNO (SET_DEST (x));
5567 rtx y = pbi->reg_next_use[regno];
5568 if (y != 0
5569 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5570 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5571 mode would be better. */
5572 && ! dead_or_set_p (y, SET_DEST (x))
5573 && try_pre_increment (y, SET_DEST (x), amount))
5575 /* We have found a suitable auto-increment
5576 and already changed insn Y to do it.
5577 So flush this increment-instruction. */
5578 PUT_CODE (insn, NOTE);
5579 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5580 NOTE_SOURCE_FILE (insn) = 0;
5581 /* Count a reference to this reg for the increment
5582 insn we are deleting. When a reg is incremented.
5583 spilling it is worse, so we want to make that
5584 less likely. */
5585 if (regno >= FIRST_PSEUDO_REGISTER)
5587 REG_N_REFS (regno) += (optimize_size ? 1
5588 : pbi->bb->loop_depth + 1);
5589 REG_N_SETS (regno)++;
5591 return 1;
5593 return 0;
5596 /* Try to change INSN so that it does pre-increment or pre-decrement
5597 addressing on register REG in order to add AMOUNT to REG.
5598 AMOUNT is negative for pre-decrement.
5599 Returns 1 if the change could be made.
5600 This checks all about the validity of the result of modifying INSN. */
5602 static int
5603 try_pre_increment (insn, reg, amount)
5604 rtx insn, reg;
5605 HOST_WIDE_INT amount;
5607 register rtx use;
5609 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5610 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5611 int pre_ok = 0;
5612 /* Nonzero if we can try to make a post-increment or post-decrement.
5613 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5614 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5615 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5616 int post_ok = 0;
5618 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5619 int do_post = 0;
5621 /* From the sign of increment, see which possibilities are conceivable
5622 on this target machine. */
5623 if (HAVE_PRE_INCREMENT && amount > 0)
5624 pre_ok = 1;
5625 if (HAVE_POST_INCREMENT && amount > 0)
5626 post_ok = 1;
5628 if (HAVE_PRE_DECREMENT && amount < 0)
5629 pre_ok = 1;
5630 if (HAVE_POST_DECREMENT && amount < 0)
5631 post_ok = 1;
5633 if (! (pre_ok || post_ok))
5634 return 0;
5636 /* It is not safe to add a side effect to a jump insn
5637 because if the incremented register is spilled and must be reloaded
5638 there would be no way to store the incremented value back in memory. */
5640 if (GET_CODE (insn) == JUMP_INSN)
5641 return 0;
5643 use = 0;
5644 if (pre_ok)
5645 use = find_use_as_address (PATTERN (insn), reg, 0);
5646 if (post_ok && (use == 0 || use == (rtx) 1))
5648 use = find_use_as_address (PATTERN (insn), reg, -amount);
5649 do_post = 1;
5652 if (use == 0 || use == (rtx) 1)
5653 return 0;
5655 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5656 return 0;
5658 /* See if this combination of instruction and addressing mode exists. */
5659 if (! validate_change (insn, &XEXP (use, 0),
5660 gen_rtx_fmt_e (amount > 0
5661 ? (do_post ? POST_INC : PRE_INC)
5662 : (do_post ? POST_DEC : PRE_DEC),
5663 Pmode, reg), 0))
5664 return 0;
5666 /* Record that this insn now has an implicit side effect on X. */
5667 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5668 return 1;
5671 #endif /* AUTO_INC_DEC */
5673 /* Find the place in the rtx X where REG is used as a memory address.
5674 Return the MEM rtx that so uses it.
5675 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5676 (plus REG (const_int PLUSCONST)).
5678 If such an address does not appear, return 0.
5679 If REG appears more than once, or is used other than in such an address,
5680 return (rtx)1. */
5683 find_use_as_address (x, reg, plusconst)
5684 register rtx x;
5685 rtx reg;
5686 HOST_WIDE_INT plusconst;
5688 enum rtx_code code = GET_CODE (x);
5689 const char *fmt = GET_RTX_FORMAT (code);
5690 register int i;
5691 register rtx value = 0;
5692 register rtx tem;
5694 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5695 return x;
5697 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5698 && XEXP (XEXP (x, 0), 0) == reg
5699 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5700 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5701 return x;
5703 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5705 /* If REG occurs inside a MEM used in a bit-field reference,
5706 that is unacceptable. */
5707 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5708 return (rtx) (HOST_WIDE_INT) 1;
5711 if (x == reg)
5712 return (rtx) (HOST_WIDE_INT) 1;
5714 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5716 if (fmt[i] == 'e')
5718 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5719 if (value == 0)
5720 value = tem;
5721 else if (tem != 0)
5722 return (rtx) (HOST_WIDE_INT) 1;
5724 else if (fmt[i] == 'E')
5726 register int j;
5727 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5729 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5730 if (value == 0)
5731 value = tem;
5732 else if (tem != 0)
5733 return (rtx) (HOST_WIDE_INT) 1;
5738 return value;
5741 /* Write information about registers and basic blocks into FILE.
5742 This is part of making a debugging dump. */
5744 void
5745 dump_regset (r, outf)
5746 regset r;
5747 FILE *outf;
5749 int i;
5750 if (r == NULL)
5752 fputs (" (nil)", outf);
5753 return;
5756 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5758 fprintf (outf, " %d", i);
5759 if (i < FIRST_PSEUDO_REGISTER)
5760 fprintf (outf, " [%s]",
5761 reg_names[i]);
5765 void
5766 debug_regset (r)
5767 regset r;
5769 dump_regset (r, stderr);
5770 putc ('\n', stderr);
5773 void
5774 dump_flow_info (file)
5775 FILE *file;
5777 register int i;
5778 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5780 fprintf (file, "%d registers.\n", max_regno);
5781 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5782 if (REG_N_REFS (i))
5784 enum reg_class class, altclass;
5785 fprintf (file, "\nRegister %d used %d times across %d insns",
5786 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5787 if (REG_BASIC_BLOCK (i) >= 0)
5788 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5789 if (REG_N_SETS (i))
5790 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5791 (REG_N_SETS (i) == 1) ? "" : "s");
5792 if (REG_USERVAR_P (regno_reg_rtx[i]))
5793 fprintf (file, "; user var");
5794 if (REG_N_DEATHS (i) != 1)
5795 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5796 if (REG_N_CALLS_CROSSED (i) == 1)
5797 fprintf (file, "; crosses 1 call");
5798 else if (REG_N_CALLS_CROSSED (i))
5799 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5800 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5801 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5802 class = reg_preferred_class (i);
5803 altclass = reg_alternate_class (i);
5804 if (class != GENERAL_REGS || altclass != ALL_REGS)
5806 if (altclass == ALL_REGS || class == ALL_REGS)
5807 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5808 else if (altclass == NO_REGS)
5809 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5810 else
5811 fprintf (file, "; pref %s, else %s",
5812 reg_class_names[(int) class],
5813 reg_class_names[(int) altclass]);
5815 if (REGNO_POINTER_FLAG (i))
5816 fprintf (file, "; pointer");
5817 fprintf (file, ".\n");
5820 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5821 for (i = 0; i < n_basic_blocks; i++)
5823 register basic_block bb = BASIC_BLOCK (i);
5824 register edge e;
5826 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
5827 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
5829 fprintf (file, "Predecessors: ");
5830 for (e = bb->pred; e; e = e->pred_next)
5831 dump_edge_info (file, e, 0);
5833 fprintf (file, "\nSuccessors: ");
5834 for (e = bb->succ; e; e = e->succ_next)
5835 dump_edge_info (file, e, 1);
5837 fprintf (file, "\nRegisters live at start:");
5838 dump_regset (bb->global_live_at_start, file);
5840 fprintf (file, "\nRegisters live at end:");
5841 dump_regset (bb->global_live_at_end, file);
5843 putc ('\n', file);
5846 putc ('\n', file);
5849 void
5850 debug_flow_info ()
5852 dump_flow_info (stderr);
5855 static void
5856 dump_edge_info (file, e, do_succ)
5857 FILE *file;
5858 edge e;
5859 int do_succ;
5861 basic_block side = (do_succ ? e->dest : e->src);
5863 if (side == ENTRY_BLOCK_PTR)
5864 fputs (" ENTRY", file);
5865 else if (side == EXIT_BLOCK_PTR)
5866 fputs (" EXIT", file);
5867 else
5868 fprintf (file, " %d", side->index);
5870 if (e->count)
5871 fprintf (file, " count:%d", e->count);
5873 if (e->flags)
5875 static const char * const bitnames[] = {
5876 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5878 int comma = 0;
5879 int i, flags = e->flags;
5881 fputc (' ', file);
5882 fputc ('(', file);
5883 for (i = 0; flags; i++)
5884 if (flags & (1 << i))
5886 flags &= ~(1 << i);
5888 if (comma)
5889 fputc (',', file);
5890 if (i < (int) (sizeof (bitnames) / sizeof (*bitnames)))
5891 fputs (bitnames[i], file);
5892 else
5893 fprintf (file, "%d", i);
5894 comma = 1;
5896 fputc (')', file);
5900 /* Print out one basic block with live information at start and end. */
5902 void
5903 dump_bb (bb, outf)
5904 basic_block bb;
5905 FILE *outf;
5907 rtx insn;
5908 rtx last;
5909 edge e;
5911 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
5912 bb->index, bb->loop_depth, bb->count);
5913 if (bb->eh_beg != -1 || bb->eh_end != -1)
5914 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5915 putc ('\n', outf);
5917 fputs (";; Predecessors: ", outf);
5918 for (e = bb->pred; e; e = e->pred_next)
5919 dump_edge_info (outf, e, 0);
5920 putc ('\n', outf);
5922 fputs (";; Registers live at start:", outf);
5923 dump_regset (bb->global_live_at_start, outf);
5924 putc ('\n', outf);
5926 for (insn = bb->head, last = NEXT_INSN (bb->end);
5927 insn != last;
5928 insn = NEXT_INSN (insn))
5929 print_rtl_single (outf, insn);
5931 fputs (";; Registers live at end:", outf);
5932 dump_regset (bb->global_live_at_end, outf);
5933 putc ('\n', outf);
5935 fputs (";; Successors: ", outf);
5936 for (e = bb->succ; e; e = e->succ_next)
5937 dump_edge_info (outf, e, 1);
5938 putc ('\n', outf);
5941 void
5942 debug_bb (bb)
5943 basic_block bb;
5945 dump_bb (bb, stderr);
5948 void
5949 debug_bb_n (n)
5950 int n;
5952 dump_bb (BASIC_BLOCK (n), stderr);
5955 /* Like print_rtl, but also print out live information for the start of each
5956 basic block. */
5958 void
5959 print_rtl_with_bb (outf, rtx_first)
5960 FILE *outf;
5961 rtx rtx_first;
5963 register rtx tmp_rtx;
5965 if (rtx_first == 0)
5966 fprintf (outf, "(nil)\n");
5967 else
5969 int i;
5970 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5971 int max_uid = get_max_uid ();
5972 basic_block *start = (basic_block *)
5973 xcalloc (max_uid, sizeof (basic_block));
5974 basic_block *end = (basic_block *)
5975 xcalloc (max_uid, sizeof (basic_block));
5976 enum bb_state *in_bb_p = (enum bb_state *)
5977 xcalloc (max_uid, sizeof (enum bb_state));
5979 for (i = n_basic_blocks - 1; i >= 0; i--)
5981 basic_block bb = BASIC_BLOCK (i);
5982 rtx x;
5984 start[INSN_UID (bb->head)] = bb;
5985 end[INSN_UID (bb->end)] = bb;
5986 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5988 enum bb_state state = IN_MULTIPLE_BB;
5989 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
5990 state = IN_ONE_BB;
5991 in_bb_p[INSN_UID (x)] = state;
5993 if (x == bb->end)
5994 break;
5998 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6000 int did_output;
6001 basic_block bb;
6003 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6005 fprintf (outf, ";; Start of basic block %d, registers live:",
6006 bb->index);
6007 dump_regset (bb->global_live_at_start, outf);
6008 putc ('\n', outf);
6011 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6012 && GET_CODE (tmp_rtx) != NOTE
6013 && GET_CODE (tmp_rtx) != BARRIER)
6014 fprintf (outf, ";; Insn is not within a basic block\n");
6015 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6016 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6018 did_output = print_rtl_single (outf, tmp_rtx);
6020 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6022 fprintf (outf, ";; End of basic block %d, registers live:\n",
6023 bb->index);
6024 dump_regset (bb->global_live_at_end, outf);
6025 putc ('\n', outf);
6028 if (did_output)
6029 putc ('\n', outf);
6032 free (start);
6033 free (end);
6034 free (in_bb_p);
6037 if (current_function_epilogue_delay_list != 0)
6039 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6040 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6041 tmp_rtx = XEXP (tmp_rtx, 1))
6042 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6046 /* Compute dominator relationships using new flow graph structures. */
6048 void
6049 compute_flow_dominators (dominators, post_dominators)
6050 sbitmap *dominators;
6051 sbitmap *post_dominators;
6053 int bb;
6054 sbitmap *temp_bitmap;
6055 edge e;
6056 basic_block *worklist, *workend, *qin, *qout;
6057 int qlen;
6059 /* Allocate a worklist array/queue. Entries are only added to the
6060 list if they were not already on the list. So the size is
6061 bounded by the number of basic blocks. */
6062 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
6063 workend = &worklist[n_basic_blocks];
6065 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6066 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
6068 if (dominators)
6070 /* The optimistic setting of dominators requires us to put every
6071 block on the work list initially. */
6072 qin = qout = worklist;
6073 for (bb = 0; bb < n_basic_blocks; bb++)
6075 *qin++ = BASIC_BLOCK (bb);
6076 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6078 qlen = n_basic_blocks;
6079 qin = worklist;
6081 /* We want a maximal solution, so initially assume everything dominates
6082 everything else. */
6083 sbitmap_vector_ones (dominators, n_basic_blocks);
6085 /* Mark successors of the entry block so we can identify them below. */
6086 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6087 e->dest->aux = ENTRY_BLOCK_PTR;
6089 /* Iterate until the worklist is empty. */
6090 while (qlen)
6092 /* Take the first entry off the worklist. */
6093 basic_block b = *qout++;
6094 if (qout >= workend)
6095 qout = worklist;
6096 qlen--;
6098 bb = b->index;
6100 /* Compute the intersection of the dominators of all the
6101 predecessor blocks.
6103 If one of the predecessor blocks is the ENTRY block, then the
6104 intersection of the dominators of the predecessor blocks is
6105 defined as the null set. We can identify such blocks by the
6106 special value in the AUX field in the block structure. */
6107 if (b->aux == ENTRY_BLOCK_PTR)
6109 /* Do not clear the aux field for blocks which are
6110 successors of the ENTRY block. That way we never add
6111 them to the worklist again.
6113 The intersect of dominators of the preds of this block is
6114 defined as the null set. */
6115 sbitmap_zero (temp_bitmap[bb]);
6117 else
6119 /* Clear the aux field of this block so it can be added to
6120 the worklist again if necessary. */
6121 b->aux = NULL;
6122 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
6125 /* Make sure each block always dominates itself. */
6126 SET_BIT (temp_bitmap[bb], bb);
6128 /* If the out state of this block changed, then we need to
6129 add the successors of this block to the worklist if they
6130 are not already on the worklist. */
6131 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
6133 for (e = b->succ; e; e = e->succ_next)
6135 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
6137 *qin++ = e->dest;
6138 if (qin >= workend)
6139 qin = worklist;
6140 qlen++;
6142 e->dest->aux = e;
6149 if (post_dominators)
6151 /* The optimistic setting of dominators requires us to put every
6152 block on the work list initially. */
6153 qin = qout = worklist;
6154 for (bb = 0; bb < n_basic_blocks; bb++)
6156 *qin++ = BASIC_BLOCK (bb);
6157 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6159 qlen = n_basic_blocks;
6160 qin = worklist;
6162 /* We want a maximal solution, so initially assume everything post
6163 dominates everything else. */
6164 sbitmap_vector_ones (post_dominators, n_basic_blocks);
6166 /* Mark predecessors of the exit block so we can identify them below. */
6167 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6168 e->src->aux = EXIT_BLOCK_PTR;
6170 /* Iterate until the worklist is empty. */
6171 while (qlen)
6173 /* Take the first entry off the worklist. */
6174 basic_block b = *qout++;
6175 if (qout >= workend)
6176 qout = worklist;
6177 qlen--;
6179 bb = b->index;
6181 /* Compute the intersection of the post dominators of all the
6182 successor blocks.
6184 If one of the successor blocks is the EXIT block, then the
6185 intersection of the dominators of the successor blocks is
6186 defined as the null set. We can identify such blocks by the
6187 special value in the AUX field in the block structure. */
6188 if (b->aux == EXIT_BLOCK_PTR)
6190 /* Do not clear the aux field for blocks which are
6191 predecessors of the EXIT block. That way we we never
6192 add them to the worklist again.
6194 The intersect of dominators of the succs of this block is
6195 defined as the null set. */
6196 sbitmap_zero (temp_bitmap[bb]);
6198 else
6200 /* Clear the aux field of this block so it can be added to
6201 the worklist again if necessary. */
6202 b->aux = NULL;
6203 sbitmap_intersection_of_succs (temp_bitmap[bb],
6204 post_dominators, bb);
6207 /* Make sure each block always post dominates itself. */
6208 SET_BIT (temp_bitmap[bb], bb);
6210 /* If the out state of this block changed, then we need to
6211 add the successors of this block to the worklist if they
6212 are not already on the worklist. */
6213 if (sbitmap_a_and_b (post_dominators[bb],
6214 post_dominators[bb],
6215 temp_bitmap[bb]))
6217 for (e = b->pred; e; e = e->pred_next)
6219 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6221 *qin++ = e->src;
6222 if (qin >= workend)
6223 qin = worklist;
6224 qlen++;
6226 e->src->aux = e;
6233 free (worklist);
6234 free (temp_bitmap);
6237 /* Given DOMINATORS, compute the immediate dominators into IDOM. If a
6238 block dominates only itself, its entry remains as INVALID_BLOCK. */
6240 void
6241 compute_immediate_dominators (idom, dominators)
6242 int *idom;
6243 sbitmap *dominators;
6245 sbitmap *tmp;
6246 int b;
6248 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6250 /* Begin with tmp(n) = dom(n) - { n }. */
6251 for (b = n_basic_blocks; --b >= 0;)
6253 sbitmap_copy (tmp[b], dominators[b]);
6254 RESET_BIT (tmp[b], b);
6257 /* Subtract out all of our dominator's dominators. */
6258 for (b = n_basic_blocks; --b >= 0;)
6260 sbitmap tmp_b = tmp[b];
6261 int s;
6263 for (s = n_basic_blocks; --s >= 0;)
6264 if (TEST_BIT (tmp_b, s))
6265 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6268 /* Find the one bit set in the bitmap and put it in the output array. */
6269 for (b = n_basic_blocks; --b >= 0;)
6271 int t;
6272 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6275 sbitmap_vector_free (tmp);
6278 /* Given POSTDOMINATORS, compute the immediate postdominators into
6279 IDOM. If a block is only dominated by itself, its entry remains as
6280 INVALID_BLOCK. */
6282 void
6283 compute_immediate_postdominators (idom, postdominators)
6284 int *idom;
6285 sbitmap *postdominators;
6287 compute_immediate_dominators (idom, postdominators);
6290 /* Recompute register set/reference counts immediately prior to register
6291 allocation.
6293 This avoids problems with set/reference counts changing to/from values
6294 which have special meanings to the register allocators.
6296 Additionally, the reference counts are the primary component used by the
6297 register allocators to prioritize pseudos for allocation to hard regs.
6298 More accurate reference counts generally lead to better register allocation.
6300 F is the first insn to be scanned.
6302 LOOP_STEP denotes how much loop_depth should be incremented per
6303 loop nesting level in order to increase the ref count more for
6304 references in a loop.
6306 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6307 possibly other information which is used by the register allocators. */
6309 void
6310 recompute_reg_usage (f, loop_step)
6311 rtx f ATTRIBUTE_UNUSED;
6312 int loop_step ATTRIBUTE_UNUSED;
6314 allocate_reg_life_data ();
6315 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6318 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6319 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6320 of the number of registers that died. */
6323 count_or_remove_death_notes (blocks, kill)
6324 sbitmap blocks;
6325 int kill;
6327 int i, count = 0;
6329 for (i = n_basic_blocks - 1; i >= 0; --i)
6331 basic_block bb;
6332 rtx insn;
6334 if (blocks && ! TEST_BIT (blocks, i))
6335 continue;
6337 bb = BASIC_BLOCK (i);
6339 for (insn = bb->head;; insn = NEXT_INSN (insn))
6341 if (INSN_P (insn))
6343 rtx *pprev = &REG_NOTES (insn);
6344 rtx link = *pprev;
6346 while (link)
6348 switch (REG_NOTE_KIND (link))
6350 case REG_DEAD:
6351 if (GET_CODE (XEXP (link, 0)) == REG)
6353 rtx reg = XEXP (link, 0);
6354 int n;
6356 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6357 n = 1;
6358 else
6359 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6360 count += n;
6362 /* Fall through. */
6364 case REG_UNUSED:
6365 if (kill)
6367 rtx next = XEXP (link, 1);
6368 free_EXPR_LIST_node (link);
6369 *pprev = link = next;
6370 break;
6372 /* Fall through. */
6374 default:
6375 pprev = &XEXP (link, 1);
6376 link = *pprev;
6377 break;
6382 if (insn == bb->end)
6383 break;
6387 return count;
6390 /* Record INSN's block as BB. */
6392 void
6393 set_block_for_insn (insn, bb)
6394 rtx insn;
6395 basic_block bb;
6397 size_t uid = INSN_UID (insn);
6398 if (uid >= basic_block_for_insn->num_elements)
6400 int new_size;
6402 /* Add one-eighth the size so we don't keep calling xrealloc. */
6403 new_size = uid + (uid + 7) / 8;
6405 VARRAY_GROW (basic_block_for_insn, new_size);
6407 VARRAY_BB (basic_block_for_insn, uid) = bb;
6410 /* Record INSN's block number as BB. */
6411 /* ??? This has got to go. */
6413 void
6414 set_block_num (insn, bb)
6415 rtx insn;
6416 int bb;
6418 set_block_for_insn (insn, BASIC_BLOCK (bb));
6421 /* Verify the CFG consistency. This function check some CFG invariants and
6422 aborts when something is wrong. Hope that this function will help to
6423 convert many optimization passes to preserve CFG consistent.
6425 Currently it does following checks:
6427 - test head/end pointers
6428 - overlapping of basic blocks
6429 - edge list corectness
6430 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6431 - tails of basic blocks (ensure that boundary is necesary)
6432 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6433 and NOTE_INSN_BASIC_BLOCK
6434 - check that all insns are in the basic blocks
6435 (except the switch handling code, barriers and notes)
6436 - check that all returns are followed by barriers
6438 In future it can be extended check a lot of other stuff as well
6439 (reachability of basic blocks, life information, etc. etc.). */
6441 void
6442 verify_flow_info ()
6444 const int max_uid = get_max_uid ();
6445 const rtx rtx_first = get_insns ();
6446 rtx last_head = get_last_insn ();
6447 basic_block *bb_info;
6448 rtx x;
6449 int i, last_bb_num_seen, num_bb_notes, err = 0;
6451 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6453 for (i = n_basic_blocks - 1; i >= 0; i--)
6455 basic_block bb = BASIC_BLOCK (i);
6456 rtx head = bb->head;
6457 rtx end = bb->end;
6459 /* Verify the end of the basic block is in the INSN chain. */
6460 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6461 if (x == end)
6462 break;
6463 if (!x)
6465 error ("End insn %d for block %d not found in the insn stream.",
6466 INSN_UID (end), bb->index);
6467 err = 1;
6470 /* Work backwards from the end to the head of the basic block
6471 to verify the head is in the RTL chain. */
6472 for (; x != NULL_RTX; x = PREV_INSN (x))
6474 /* While walking over the insn chain, verify insns appear
6475 in only one basic block and initialize the BB_INFO array
6476 used by other passes. */
6477 if (bb_info[INSN_UID (x)] != NULL)
6479 error ("Insn %d is in multiple basic blocks (%d and %d)",
6480 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6481 err = 1;
6483 bb_info[INSN_UID (x)] = bb;
6485 if (x == head)
6486 break;
6488 if (!x)
6490 error ("Head insn %d for block %d not found in the insn stream.",
6491 INSN_UID (head), bb->index);
6492 err = 1;
6495 last_head = x;
6498 /* Now check the basic blocks (boundaries etc.) */
6499 for (i = n_basic_blocks - 1; i >= 0; i--)
6501 basic_block bb = BASIC_BLOCK (i);
6502 /* Check corectness of edge lists */
6503 edge e;
6505 e = bb->succ;
6506 while (e)
6508 if (e->src != bb)
6510 fprintf (stderr,
6511 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6512 bb->index);
6513 fprintf (stderr, "Predecessor: ");
6514 dump_edge_info (stderr, e, 0);
6515 fprintf (stderr, "\nSuccessor: ");
6516 dump_edge_info (stderr, e, 1);
6517 fflush (stderr);
6518 err = 1;
6520 if (e->dest != EXIT_BLOCK_PTR)
6522 edge e2 = e->dest->pred;
6523 while (e2 && e2 != e)
6524 e2 = e2->pred_next;
6525 if (!e2)
6527 error ("Basic block %i edge lists are corrupted", bb->index);
6528 err = 1;
6531 e = e->succ_next;
6534 e = bb->pred;
6535 while (e)
6537 if (e->dest != bb)
6539 error ("Basic block %d pred edge is corrupted", bb->index);
6540 fputs ("Predecessor: ", stderr);
6541 dump_edge_info (stderr, e, 0);
6542 fputs ("\nSuccessor: ", stderr);
6543 dump_edge_info (stderr, e, 1);
6544 fputc ('\n', stderr);
6545 err = 1;
6547 if (e->src != ENTRY_BLOCK_PTR)
6549 edge e2 = e->src->succ;
6550 while (e2 && e2 != e)
6551 e2 = e2->succ_next;
6552 if (!e2)
6554 error ("Basic block %i edge lists are corrupted", bb->index);
6555 err = 1;
6558 e = e->pred_next;
6561 /* OK pointers are correct. Now check the header of basic
6562 block. It ought to contain optional CODE_LABEL followed
6563 by NOTE_BASIC_BLOCK. */
6564 x = bb->head;
6565 if (GET_CODE (x) == CODE_LABEL)
6567 if (bb->end == x)
6569 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6570 bb->index);
6571 err = 1;
6573 x = NEXT_INSN (x);
6575 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6577 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6578 bb->index);
6579 err = 1;
6582 if (bb->end == x)
6584 /* Do checks for empty blocks here */
6586 else
6588 x = NEXT_INSN (x);
6589 while (x)
6591 if (NOTE_INSN_BASIC_BLOCK_P (x))
6593 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6594 INSN_UID (x), bb->index);
6595 err = 1;
6598 if (x == bb->end)
6599 break;
6601 if (GET_CODE (x) == JUMP_INSN
6602 || GET_CODE (x) == CODE_LABEL
6603 || GET_CODE (x) == BARRIER)
6605 error ("In basic block %d:", bb->index);
6606 fatal_insn ("Flow control insn inside a basic block", x);
6609 x = NEXT_INSN (x);
6614 last_bb_num_seen = -1;
6615 num_bb_notes = 0;
6616 x = rtx_first;
6617 while (x)
6619 if (NOTE_INSN_BASIC_BLOCK_P (x))
6621 basic_block bb = NOTE_BASIC_BLOCK (x);
6622 num_bb_notes++;
6623 if (bb->index != last_bb_num_seen + 1)
6624 fatal ("Basic blocks not numbered consecutively");
6625 last_bb_num_seen = bb->index;
6628 if (!bb_info[INSN_UID (x)])
6630 switch (GET_CODE (x))
6632 case BARRIER:
6633 case NOTE:
6634 break;
6636 case CODE_LABEL:
6637 /* An addr_vec is placed outside any block block. */
6638 if (NEXT_INSN (x)
6639 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6640 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6641 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6643 x = NEXT_INSN (x);
6646 /* But in any case, non-deletable labels can appear anywhere. */
6647 break;
6649 default:
6650 fatal_insn ("Insn outside basic block", x);
6654 if (INSN_P (x)
6655 && GET_CODE (x) == JUMP_INSN
6656 && returnjump_p (x) && ! condjump_p (x)
6657 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6658 fatal_insn ("Return not followed by barrier", x);
6660 x = NEXT_INSN (x);
6663 if (num_bb_notes != n_basic_blocks)
6664 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6665 num_bb_notes, n_basic_blocks);
6667 if (err)
6668 abort ();
6670 /* Clean up. */
6671 free (bb_info);
6674 /* Functions to access an edge list with a vector representation.
6675 Enough data is kept such that given an index number, the
6676 pred and succ that edge represents can be determined, or
6677 given a pred and a succ, its index number can be returned.
6678 This allows algorithms which consume a lot of memory to
6679 represent the normally full matrix of edge (pred,succ) with a
6680 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6681 wasted space in the client code due to sparse flow graphs. */
6683 /* This functions initializes the edge list. Basically the entire
6684 flowgraph is processed, and all edges are assigned a number,
6685 and the data structure is filled in. */
6687 struct edge_list *
6688 create_edge_list ()
6690 struct edge_list *elist;
6691 edge e;
6692 int num_edges;
6693 int x;
6694 int block_count;
6696 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6698 num_edges = 0;
6700 /* Determine the number of edges in the flow graph by counting successor
6701 edges on each basic block. */
6702 for (x = 0; x < n_basic_blocks; x++)
6704 basic_block bb = BASIC_BLOCK (x);
6706 for (e = bb->succ; e; e = e->succ_next)
6707 num_edges++;
6709 /* Don't forget successors of the entry block. */
6710 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6711 num_edges++;
6713 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6714 elist->num_blocks = block_count;
6715 elist->num_edges = num_edges;
6716 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6718 num_edges = 0;
6720 /* Follow successors of the entry block, and register these edges. */
6721 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6723 elist->index_to_edge[num_edges] = e;
6724 num_edges++;
6727 for (x = 0; x < n_basic_blocks; x++)
6729 basic_block bb = BASIC_BLOCK (x);
6731 /* Follow all successors of blocks, and register these edges. */
6732 for (e = bb->succ; e; e = e->succ_next)
6734 elist->index_to_edge[num_edges] = e;
6735 num_edges++;
6738 return elist;
6741 /* This function free's memory associated with an edge list. */
6743 void
6744 free_edge_list (elist)
6745 struct edge_list *elist;
6747 if (elist)
6749 free (elist->index_to_edge);
6750 free (elist);
6754 /* This function provides debug output showing an edge list. */
6756 void
6757 print_edge_list (f, elist)
6758 FILE *f;
6759 struct edge_list *elist;
6761 int x;
6762 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6763 elist->num_blocks - 2, elist->num_edges);
6765 for (x = 0; x < elist->num_edges; x++)
6767 fprintf (f, " %-4d - edge(", x);
6768 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6769 fprintf (f, "entry,");
6770 else
6771 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6773 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6774 fprintf (f, "exit)\n");
6775 else
6776 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6780 /* This function provides an internal consistency check of an edge list,
6781 verifying that all edges are present, and that there are no
6782 extra edges. */
6784 void
6785 verify_edge_list (f, elist)
6786 FILE *f;
6787 struct edge_list *elist;
6789 int x, pred, succ, index;
6790 edge e;
6792 for (x = 0; x < n_basic_blocks; x++)
6794 basic_block bb = BASIC_BLOCK (x);
6796 for (e = bb->succ; e; e = e->succ_next)
6798 pred = e->src->index;
6799 succ = e->dest->index;
6800 index = EDGE_INDEX (elist, e->src, e->dest);
6801 if (index == EDGE_INDEX_NO_EDGE)
6803 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
6804 continue;
6806 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6807 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6808 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6809 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6810 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6811 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6814 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6816 pred = e->src->index;
6817 succ = e->dest->index;
6818 index = EDGE_INDEX (elist, e->src, e->dest);
6819 if (index == EDGE_INDEX_NO_EDGE)
6821 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
6822 continue;
6824 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6825 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6826 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6827 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6828 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6829 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6831 /* We've verified that all the edges are in the list, no lets make sure
6832 there are no spurious edges in the list. */
6834 for (pred = 0; pred < n_basic_blocks; pred++)
6835 for (succ = 0; succ < n_basic_blocks; succ++)
6837 basic_block p = BASIC_BLOCK (pred);
6838 basic_block s = BASIC_BLOCK (succ);
6840 int found_edge = 0;
6842 for (e = p->succ; e; e = e->succ_next)
6843 if (e->dest == s)
6845 found_edge = 1;
6846 break;
6848 for (e = s->pred; e; e = e->pred_next)
6849 if (e->src == p)
6851 found_edge = 1;
6852 break;
6854 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6855 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6856 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6857 pred, succ);
6858 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6859 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6860 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6861 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6862 BASIC_BLOCK (succ)));
6864 for (succ = 0; succ < n_basic_blocks; succ++)
6866 basic_block p = ENTRY_BLOCK_PTR;
6867 basic_block s = BASIC_BLOCK (succ);
6869 int found_edge = 0;
6871 for (e = p->succ; e; e = e->succ_next)
6872 if (e->dest == s)
6874 found_edge = 1;
6875 break;
6877 for (e = s->pred; e; e = e->pred_next)
6878 if (e->src == p)
6880 found_edge = 1;
6881 break;
6883 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6884 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6885 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6886 succ);
6887 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6888 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6889 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6890 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6891 BASIC_BLOCK (succ)));
6893 for (pred = 0; pred < n_basic_blocks; pred++)
6895 basic_block p = BASIC_BLOCK (pred);
6896 basic_block s = EXIT_BLOCK_PTR;
6898 int found_edge = 0;
6900 for (e = p->succ; e; e = e->succ_next)
6901 if (e->dest == s)
6903 found_edge = 1;
6904 break;
6906 for (e = s->pred; e; e = e->pred_next)
6907 if (e->src == p)
6909 found_edge = 1;
6910 break;
6912 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6913 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6914 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6915 pred);
6916 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6917 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6918 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6919 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6920 EXIT_BLOCK_PTR));
6924 /* This routine will determine what, if any, edge there is between
6925 a specified predecessor and successor. */
6928 find_edge_index (edge_list, pred, succ)
6929 struct edge_list *edge_list;
6930 basic_block pred, succ;
6932 int x;
6933 for (x = 0; x < NUM_EDGES (edge_list); x++)
6935 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6936 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6937 return x;
6939 return (EDGE_INDEX_NO_EDGE);
6942 /* This function will remove an edge from the flow graph. */
6944 void
6945 remove_edge (e)
6946 edge e;
6948 edge last_pred = NULL;
6949 edge last_succ = NULL;
6950 edge tmp;
6951 basic_block src, dest;
6952 src = e->src;
6953 dest = e->dest;
6954 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6955 last_succ = tmp;
6957 if (!tmp)
6958 abort ();
6959 if (last_succ)
6960 last_succ->succ_next = e->succ_next;
6961 else
6962 src->succ = e->succ_next;
6964 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6965 last_pred = tmp;
6967 if (!tmp)
6968 abort ();
6969 if (last_pred)
6970 last_pred->pred_next = e->pred_next;
6971 else
6972 dest->pred = e->pred_next;
6974 n_edges--;
6975 free (e);
6978 /* This routine will remove any fake successor edges for a basic block.
6979 When the edge is removed, it is also removed from whatever predecessor
6980 list it is in. */
6982 static void
6983 remove_fake_successors (bb)
6984 basic_block bb;
6986 edge e;
6987 for (e = bb->succ; e;)
6989 edge tmp = e;
6990 e = e->succ_next;
6991 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6992 remove_edge (tmp);
6996 /* This routine will remove all fake edges from the flow graph. If
6997 we remove all fake successors, it will automatically remove all
6998 fake predecessors. */
7000 void
7001 remove_fake_edges ()
7003 int x;
7005 for (x = 0; x < n_basic_blocks; x++)
7006 remove_fake_successors (BASIC_BLOCK (x));
7008 /* We've handled all successors except the entry block's. */
7009 remove_fake_successors (ENTRY_BLOCK_PTR);
7012 /* This function will add a fake edge between any block which has no
7013 successors, and the exit block. Some data flow equations require these
7014 edges to exist. */
7016 void
7017 add_noreturn_fake_exit_edges ()
7019 int x;
7021 for (x = 0; x < n_basic_blocks; x++)
7022 if (BASIC_BLOCK (x)->succ == NULL)
7023 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7026 /* This function adds a fake edge between any infinite loops to the
7027 exit block. Some optimizations require a path from each node to
7028 the exit node.
7030 See also Morgan, Figure 3.10, pp. 82-83.
7032 The current implementation is ugly, not attempting to minimize the
7033 number of inserted fake edges. To reduce the number of fake edges
7034 to insert, add fake edges from _innermost_ loops containing only
7035 nodes not reachable from the exit block. */
7037 void
7038 connect_infinite_loops_to_exit ()
7040 basic_block unvisited_block;
7042 /* Perform depth-first search in the reverse graph to find nodes
7043 reachable from the exit block. */
7044 struct depth_first_search_dsS dfs_ds;
7046 flow_dfs_compute_reverse_init (&dfs_ds);
7047 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7049 /* Repeatedly add fake edges, updating the unreachable nodes. */
7050 while (1)
7052 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7053 if (!unvisited_block)
7054 break;
7055 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7056 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7059 flow_dfs_compute_reverse_finish (&dfs_ds);
7061 return;
7064 /* Redirect an edge's successor from one block to another. */
7066 void
7067 redirect_edge_succ (e, new_succ)
7068 edge e;
7069 basic_block new_succ;
7071 edge *pe;
7073 /* Disconnect the edge from the old successor block. */
7074 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7075 continue;
7076 *pe = (*pe)->pred_next;
7078 /* Reconnect the edge to the new successor block. */
7079 e->pred_next = new_succ->pred;
7080 new_succ->pred = e;
7081 e->dest = new_succ;
7084 /* Redirect an edge's predecessor from one block to another. */
7086 void
7087 redirect_edge_pred (e, new_pred)
7088 edge e;
7089 basic_block new_pred;
7091 edge *pe;
7093 /* Disconnect the edge from the old predecessor block. */
7094 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7095 continue;
7096 *pe = (*pe)->succ_next;
7098 /* Reconnect the edge to the new predecessor block. */
7099 e->succ_next = new_pred->succ;
7100 new_pred->succ = e;
7101 e->src = new_pred;
7104 /* Dump the list of basic blocks in the bitmap NODES. */
7106 static void
7107 flow_nodes_print (str, nodes, file)
7108 const char *str;
7109 const sbitmap nodes;
7110 FILE *file;
7112 int node;
7114 fprintf (file, "%s { ", str);
7115 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7116 fputs ("}\n", file);
7119 /* Dump the list of exiting edges in the array EDGES. */
7121 static void
7122 flow_exits_print (str, edges, num_edges, file)
7123 const char *str;
7124 const edge *edges;
7125 int num_edges;
7126 FILE *file;
7128 int i;
7130 fprintf (file, "%s { ", str);
7131 for (i = 0; i < num_edges; i++)
7132 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
7133 fputs ("}\n", file);
7136 /* Dump loop related CFG information. */
7138 static void
7139 flow_loops_cfg_dump (loops, file)
7140 const struct loops *loops;
7141 FILE *file;
7143 int i;
7145 if (! loops->num || ! file || ! loops->cfg.dom)
7146 return;
7148 for (i = 0; i < n_basic_blocks; i++)
7150 edge succ;
7152 fprintf (file, ";; %d succs { ", i);
7153 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7154 fprintf (file, "%d ", succ->dest->index);
7155 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7158 /* Dump the DFS node order. */
7159 if (loops->cfg.dfs_order)
7161 fputs (";; DFS order: ", file);
7162 for (i = 0; i < n_basic_blocks; i++)
7163 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7164 fputs ("\n", file);
7166 /* Dump the reverse completion node order. */
7167 if (loops->cfg.rc_order)
7169 fputs (";; RC order: ", file);
7170 for (i = 0; i < n_basic_blocks; i++)
7171 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7172 fputs ("\n", file);
7176 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7178 static int
7179 flow_loop_nested_p (outer, loop)
7180 struct loop *outer;
7181 struct loop *loop;
7183 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7186 /* Dump the loop information specified by LOOPS to the stream FILE. */
7188 void
7189 flow_loops_dump (loops, file, verbose)
7190 const struct loops *loops;
7191 FILE *file;
7192 int verbose;
7194 int i;
7195 int num_loops;
7197 num_loops = loops->num;
7198 if (! num_loops || ! file)
7199 return;
7201 fprintf (file, ";; %d loops found, %d levels\n",
7202 num_loops, loops->levels);
7204 for (i = 0; i < num_loops; i++)
7206 struct loop *loop = &loops->array[i];
7208 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
7209 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
7210 loop->header->index, loop->latch->index,
7211 loop->pre_header ? loop->pre_header->index : -1,
7212 loop->depth, loop->level,
7213 (long) (loop->outer ? (loop->outer - loops->array) : -1));
7214 fprintf (file, ";; %d", loop->num_nodes);
7215 flow_nodes_print (" nodes", loop->nodes, file);
7216 fprintf (file, ";; %d", loop->num_exits);
7217 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
7219 if (loop->shared)
7221 int j;
7223 for (j = 0; j < i; j++)
7225 struct loop *oloop = &loops->array[j];
7227 if (loop->header == oloop->header)
7229 int disjoint;
7230 int smaller;
7232 smaller = loop->num_nodes < oloop->num_nodes;
7234 /* If the union of LOOP and OLOOP is different than
7235 the larger of LOOP and OLOOP then LOOP and OLOOP
7236 must be disjoint. */
7237 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7238 smaller ? oloop : loop);
7239 fprintf (file,
7240 ";; loop header %d shared by loops %d, %d %s\n",
7241 loop->header->index, i, j,
7242 disjoint ? "disjoint" : "nested");
7247 if (verbose)
7249 /* Print diagnostics to compare our concept of a loop with
7250 what the loop notes say. */
7251 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
7252 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
7253 != NOTE_INSN_LOOP_BEG)
7254 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
7255 INSN_UID (PREV_INSN (loop->first->head)));
7256 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
7257 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
7258 != NOTE_INSN_LOOP_END)
7259 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
7260 INSN_UID (NEXT_INSN (loop->last->end)));
7264 if (verbose)
7265 flow_loops_cfg_dump (loops, file);
7268 /* Free all the memory allocated for LOOPS. */
7270 void
7271 flow_loops_free (loops)
7272 struct loops *loops;
7274 if (loops->array)
7276 int i;
7278 if (! loops->num)
7279 abort ();
7281 /* Free the loop descriptors. */
7282 for (i = 0; i < loops->num; i++)
7284 struct loop *loop = &loops->array[i];
7286 if (loop->nodes)
7287 sbitmap_free (loop->nodes);
7288 if (loop->exits)
7289 free (loop->exits);
7291 free (loops->array);
7292 loops->array = NULL;
7294 if (loops->cfg.dom)
7295 sbitmap_vector_free (loops->cfg.dom);
7296 if (loops->cfg.dfs_order)
7297 free (loops->cfg.dfs_order);
7299 sbitmap_free (loops->shared_headers);
7303 /* Find the exits from the loop using the bitmap of loop nodes NODES
7304 and store in EXITS array. Return the number of exits from the
7305 loop. */
7307 static int
7308 flow_loop_exits_find (nodes, exits)
7309 const sbitmap nodes;
7310 edge **exits;
7312 edge e;
7313 int node;
7314 int num_exits;
7316 *exits = NULL;
7318 /* Check all nodes within the loop to see if there are any
7319 successors not in the loop. Note that a node may have multiple
7320 exiting edges. */
7321 num_exits = 0;
7322 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7323 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7325 basic_block dest = e->dest;
7327 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7328 num_exits++;
7332 if (! num_exits)
7333 return 0;
7335 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
7337 /* Store all exiting edges into an array. */
7338 num_exits = 0;
7339 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7340 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7342 basic_block dest = e->dest;
7344 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7345 (*exits)[num_exits++] = e;
7349 return num_exits;
7352 /* Find the nodes contained within the loop with header HEADER and
7353 latch LATCH and store in NODES. Return the number of nodes within
7354 the loop. */
7356 static int
7357 flow_loop_nodes_find (header, latch, nodes)
7358 basic_block header;
7359 basic_block latch;
7360 sbitmap nodes;
7362 basic_block *stack;
7363 int sp;
7364 int num_nodes = 0;
7366 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7367 sp = 0;
7369 /* Start with only the loop header in the set of loop nodes. */
7370 sbitmap_zero (nodes);
7371 SET_BIT (nodes, header->index);
7372 num_nodes++;
7373 header->loop_depth++;
7375 /* Push the loop latch on to the stack. */
7376 if (! TEST_BIT (nodes, latch->index))
7378 SET_BIT (nodes, latch->index);
7379 latch->loop_depth++;
7380 num_nodes++;
7381 stack[sp++] = latch;
7384 while (sp)
7386 basic_block node;
7387 edge e;
7389 node = stack[--sp];
7390 for (e = node->pred; e; e = e->pred_next)
7392 basic_block ancestor = e->src;
7394 /* If each ancestor not marked as part of loop, add to set of
7395 loop nodes and push on to stack. */
7396 if (ancestor != ENTRY_BLOCK_PTR
7397 && ! TEST_BIT (nodes, ancestor->index))
7399 SET_BIT (nodes, ancestor->index);
7400 ancestor->loop_depth++;
7401 num_nodes++;
7402 stack[sp++] = ancestor;
7406 free (stack);
7407 return num_nodes;
7410 /* Compute the depth first search order and store in the array
7411 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7412 RC_ORDER is non-zero, return the reverse completion number for each
7413 node. Returns the number of nodes visited. A depth first search
7414 tries to get as far away from the starting point as quickly as
7415 possible. */
7417 static int
7418 flow_depth_first_order_compute (dfs_order, rc_order)
7419 int *dfs_order;
7420 int *rc_order;
7422 edge *stack;
7423 int sp;
7424 int dfsnum = 0;
7425 int rcnum = n_basic_blocks - 1;
7426 sbitmap visited;
7428 /* Allocate stack for back-tracking up CFG. */
7429 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7430 sp = 0;
7432 /* Allocate bitmap to track nodes that have been visited. */
7433 visited = sbitmap_alloc (n_basic_blocks);
7435 /* None of the nodes in the CFG have been visited yet. */
7436 sbitmap_zero (visited);
7438 /* Push the first edge on to the stack. */
7439 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7441 while (sp)
7443 edge e;
7444 basic_block src;
7445 basic_block dest;
7447 /* Look at the edge on the top of the stack. */
7448 e = stack[sp - 1];
7449 src = e->src;
7450 dest = e->dest;
7452 /* Check if the edge destination has been visited yet. */
7453 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7455 /* Mark that we have visited the destination. */
7456 SET_BIT (visited, dest->index);
7458 if (dfs_order)
7459 dfs_order[dfsnum++] = dest->index;
7461 if (dest->succ)
7463 /* Since the DEST node has been visited for the first
7464 time, check its successors. */
7465 stack[sp++] = dest->succ;
7467 else
7469 /* There are no successors for the DEST node so assign
7470 its reverse completion number. */
7471 if (rc_order)
7472 rc_order[rcnum--] = dest->index;
7475 else
7477 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7479 /* There are no more successors for the SRC node
7480 so assign its reverse completion number. */
7481 if (rc_order)
7482 rc_order[rcnum--] = src->index;
7485 if (e->succ_next)
7486 stack[sp - 1] = e->succ_next;
7487 else
7488 sp--;
7492 free (stack);
7493 sbitmap_free (visited);
7495 /* The number of nodes visited should not be greater than
7496 n_basic_blocks. */
7497 if (dfsnum > n_basic_blocks)
7498 abort ();
7500 /* There are some nodes left in the CFG that are unreachable. */
7501 if (dfsnum < n_basic_blocks)
7502 abort ();
7503 return dfsnum;
7506 /* Compute the depth first search order on the _reverse_ graph and
7507 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7508 Returns the number of nodes visited.
7510 The computation is split into three pieces:
7512 flow_dfs_compute_reverse_init () creates the necessary data
7513 structures.
7515 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7516 structures. The block will start the search.
7518 flow_dfs_compute_reverse_execute () continues (or starts) the
7519 search using the block on the top of the stack, stopping when the
7520 stack is empty.
7522 flow_dfs_compute_reverse_finish () destroys the necessary data
7523 structures.
7525 Thus, the user will probably call ..._init(), call ..._add_bb() to
7526 add a beginning basic block to the stack, call ..._execute(),
7527 possibly add another bb to the stack and again call ..._execute(),
7528 ..., and finally call _finish(). */
7530 /* Initialize the data structures used for depth-first search on the
7531 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7532 added to the basic block stack. DATA is the current depth-first
7533 search context. If INITIALIZE_STACK is non-zero, there is an
7534 element on the stack. */
7536 static void
7537 flow_dfs_compute_reverse_init (data)
7538 depth_first_search_ds data;
7540 /* Allocate stack for back-tracking up CFG. */
7541 data->stack =
7542 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
7543 * sizeof (basic_block));
7544 data->sp = 0;
7546 /* Allocate bitmap to track nodes that have been visited. */
7547 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
7549 /* None of the nodes in the CFG have been visited yet. */
7550 sbitmap_zero (data->visited_blocks);
7552 return;
7555 /* Add the specified basic block to the top of the dfs data
7556 structures. When the search continues, it will start at the
7557 block. */
7559 static void
7560 flow_dfs_compute_reverse_add_bb (data, bb)
7561 depth_first_search_ds data;
7562 basic_block bb;
7564 data->stack[data->sp++] = bb;
7565 return;
7568 /* Continue the depth-first search through the reverse graph starting
7569 with the block at the stack's top and ending when the stack is
7570 empty. Visited nodes are marked. Returns an unvisited basic
7571 block, or NULL if there is none available. */
7573 static basic_block
7574 flow_dfs_compute_reverse_execute (data)
7575 depth_first_search_ds data;
7577 basic_block bb;
7578 edge e;
7579 int i;
7581 while (data->sp > 0)
7583 bb = data->stack[--data->sp];
7585 /* Mark that we have visited this node. */
7586 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
7588 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
7590 /* Perform depth-first search on adjacent vertices. */
7591 for (e = bb->pred; e; e = e->pred_next)
7592 flow_dfs_compute_reverse_add_bb (data, e->src);
7596 /* Determine if there are unvisited basic blocks. */
7597 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
7598 if (!TEST_BIT (data->visited_blocks, i))
7599 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
7600 return NULL;
7603 /* Destroy the data structures needed for depth-first search on the
7604 reverse graph. */
7606 static void
7607 flow_dfs_compute_reverse_finish (data)
7608 depth_first_search_ds data;
7610 free (data->stack);
7611 sbitmap_free (data->visited_blocks);
7612 return;
7615 /* Return the block for the pre-header of the loop with header
7616 HEADER where DOM specifies the dominator information. Return NULL if
7617 there is no pre-header. */
7619 static basic_block
7620 flow_loop_pre_header_find (header, dom)
7621 basic_block header;
7622 const sbitmap *dom;
7624 basic_block pre_header;
7625 edge e;
7627 /* If block p is a predecessor of the header and is the only block
7628 that the header does not dominate, then it is the pre-header. */
7629 pre_header = NULL;
7630 for (e = header->pred; e; e = e->pred_next)
7632 basic_block node = e->src;
7634 if (node != ENTRY_BLOCK_PTR
7635 && ! TEST_BIT (dom[node->index], header->index))
7637 if (pre_header == NULL)
7638 pre_header = node;
7639 else
7641 /* There are multiple edges into the header from outside
7642 the loop so there is no pre-header block. */
7643 pre_header = NULL;
7644 break;
7648 return pre_header;
7651 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7652 previously added. The insertion algorithm assumes that the loops
7653 are added in the order found by a depth first search of the CFG. */
7655 static void
7656 flow_loop_tree_node_add (prevloop, loop)
7657 struct loop *prevloop;
7658 struct loop *loop;
7661 if (flow_loop_nested_p (prevloop, loop))
7663 prevloop->inner = loop;
7664 loop->outer = prevloop;
7665 return;
7668 while (prevloop->outer)
7670 if (flow_loop_nested_p (prevloop->outer, loop))
7672 prevloop->next = loop;
7673 loop->outer = prevloop->outer;
7674 return;
7676 prevloop = prevloop->outer;
7679 prevloop->next = loop;
7680 loop->outer = NULL;
7683 /* Build the loop hierarchy tree for LOOPS. */
7685 static void
7686 flow_loops_tree_build (loops)
7687 struct loops *loops;
7689 int i;
7690 int num_loops;
7692 num_loops = loops->num;
7693 if (! num_loops)
7694 return;
7696 /* Root the loop hierarchy tree with the first loop found.
7697 Since we used a depth first search this should be the
7698 outermost loop. */
7699 loops->tree = &loops->array[0];
7700 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7702 /* Add the remaining loops to the tree. */
7703 for (i = 1; i < num_loops; i++)
7704 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7707 /* Helper function to compute loop nesting depth and enclosed loop level
7708 for the natural loop specified by LOOP at the loop depth DEPTH.
7709 Returns the loop level. */
7711 static int
7712 flow_loop_level_compute (loop, depth)
7713 struct loop *loop;
7714 int depth;
7716 struct loop *inner;
7717 int level = 1;
7719 if (! loop)
7720 return 0;
7722 /* Traverse loop tree assigning depth and computing level as the
7723 maximum level of all the inner loops of this loop. The loop
7724 level is equivalent to the height of the loop in the loop tree
7725 and corresponds to the number of enclosed loop levels (including
7726 itself). */
7727 for (inner = loop->inner; inner; inner = inner->next)
7729 int ilevel;
7731 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7733 if (ilevel > level)
7734 level = ilevel;
7736 loop->level = level;
7737 loop->depth = depth;
7738 return level;
7741 /* Compute the loop nesting depth and enclosed loop level for the loop
7742 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7743 level. */
7745 static int
7746 flow_loops_level_compute (loops)
7747 struct loops *loops;
7749 struct loop *loop;
7750 int level;
7751 int levels = 0;
7753 /* Traverse all the outer level loops. */
7754 for (loop = loops->tree; loop; loop = loop->next)
7756 level = flow_loop_level_compute (loop, 1);
7757 if (level > levels)
7758 levels = level;
7760 return levels;
7763 /* Find all the natural loops in the function and save in LOOPS structure
7764 and recalculate loop_depth information in basic block structures.
7765 Return the number of natural loops found. */
7768 flow_loops_find (loops)
7769 struct loops *loops;
7771 int i;
7772 int b;
7773 int num_loops;
7774 edge e;
7775 sbitmap headers;
7776 sbitmap *dom;
7777 int *dfs_order;
7778 int *rc_order;
7780 loops->num = 0;
7781 loops->array = NULL;
7782 loops->tree = NULL;
7783 dfs_order = NULL;
7784 rc_order = NULL;
7786 /* Taking care of this degenerate case makes the rest of
7787 this code simpler. */
7788 if (n_basic_blocks == 0)
7789 return 0;
7791 /* Compute the dominators. */
7792 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7793 compute_flow_dominators (dom, NULL);
7795 /* Count the number of loop edges (back edges). This should be the
7796 same as the number of natural loops. Also clear the loop_depth
7797 and as we work from inner->outer in a loop nest we call
7798 find_loop_nodes_find which will increment loop_depth for nodes
7799 within the current loop, which happens to enclose inner loops. */
7801 num_loops = 0;
7802 for (b = 0; b < n_basic_blocks; b++)
7804 BASIC_BLOCK (b)->loop_depth = 0;
7805 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7807 basic_block latch = e->src;
7809 /* Look for back edges where a predecessor is dominated
7810 by this block. A natural loop has a single entry
7811 node (header) that dominates all the nodes in the
7812 loop. It also has single back edge to the header
7813 from a latch node. Note that multiple natural loops
7814 may share the same header. */
7815 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7816 num_loops++;
7820 if (num_loops)
7822 /* Compute depth first search order of the CFG so that outer
7823 natural loops will be found before inner natural loops. */
7824 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7825 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7826 flow_depth_first_order_compute (dfs_order, rc_order);
7828 /* Allocate loop structures. */
7829 loops->array
7830 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7832 headers = sbitmap_alloc (n_basic_blocks);
7833 sbitmap_zero (headers);
7835 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7836 sbitmap_zero (loops->shared_headers);
7838 /* Find and record information about all the natural loops
7839 in the CFG. */
7840 num_loops = 0;
7841 for (b = 0; b < n_basic_blocks; b++)
7843 basic_block header;
7845 /* Search the nodes of the CFG in DFS order that we can find
7846 outer loops first. */
7847 header = BASIC_BLOCK (rc_order[b]);
7849 /* Look for all the possible latch blocks for this header. */
7850 for (e = header->pred; e; e = e->pred_next)
7852 basic_block latch = e->src;
7854 /* Look for back edges where a predecessor is dominated
7855 by this block. A natural loop has a single entry
7856 node (header) that dominates all the nodes in the
7857 loop. It also has single back edge to the header
7858 from a latch node. Note that multiple natural loops
7859 may share the same header. */
7860 if (latch != ENTRY_BLOCK_PTR
7861 && TEST_BIT (dom[latch->index], header->index))
7863 struct loop *loop;
7865 loop = loops->array + num_loops;
7867 loop->header = header;
7868 loop->latch = latch;
7869 loop->num = num_loops;
7871 /* Keep track of blocks that are loop headers so
7872 that we can tell which loops should be merged. */
7873 if (TEST_BIT (headers, header->index))
7874 SET_BIT (loops->shared_headers, header->index);
7875 SET_BIT (headers, header->index);
7877 /* Find nodes contained within the loop. */
7878 loop->nodes = sbitmap_alloc (n_basic_blocks);
7879 loop->num_nodes
7880 = flow_loop_nodes_find (header, latch, loop->nodes);
7882 /* Compute first and last blocks within the loop.
7883 These are often the same as the loop header and
7884 loop latch respectively, but this is not always
7885 the case. */
7886 loop->first
7887 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7888 loop->last
7889 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7891 /* Find edges which exit the loop. Note that a node
7892 may have several exit edges. */
7893 loop->num_exits
7894 = flow_loop_exits_find (loop->nodes, &loop->exits);
7896 /* Look to see if the loop has a pre-header node. */
7897 loop->pre_header = flow_loop_pre_header_find (header, dom);
7899 num_loops++;
7904 /* Natural loops with shared headers may either be disjoint or
7905 nested. Disjoint loops with shared headers cannot be inner
7906 loops and should be merged. For now just mark loops that share
7907 headers. */
7908 for (i = 0; i < num_loops; i++)
7909 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7910 loops->array[i].shared = 1;
7912 sbitmap_free (headers);
7915 loops->num = num_loops;
7917 /* Save CFG derived information to avoid recomputing it. */
7918 loops->cfg.dom = dom;
7919 loops->cfg.dfs_order = dfs_order;
7920 loops->cfg.rc_order = rc_order;
7922 /* Build the loop hierarchy tree. */
7923 flow_loops_tree_build (loops);
7925 /* Assign the loop nesting depth and enclosed loop level for each
7926 loop. */
7927 loops->levels = flow_loops_level_compute (loops);
7929 return num_loops;
7932 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7935 flow_loop_outside_edge_p (loop, e)
7936 const struct loop *loop;
7937 edge e;
7939 if (e->dest != loop->header)
7940 abort ();
7941 return (e->src == ENTRY_BLOCK_PTR)
7942 || ! TEST_BIT (loop->nodes, e->src->index);
7945 /* Clear LOG_LINKS fields of insns in a chain.
7946 Also clear the global_live_at_{start,end} fields of the basic block
7947 structures. */
7949 void
7950 clear_log_links (insns)
7951 rtx insns;
7953 rtx i;
7954 int b;
7956 for (i = insns; i; i = NEXT_INSN (i))
7957 if (INSN_P (i))
7958 LOG_LINKS (i) = 0;
7960 for (b = 0; b < n_basic_blocks; b++)
7962 basic_block bb = BASIC_BLOCK (b);
7964 bb->global_live_at_start = NULL;
7965 bb->global_live_at_end = NULL;
7968 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
7969 EXIT_BLOCK_PTR->global_live_at_start = NULL;
7972 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
7973 correspond to the hard registers, if any, set in that map. This
7974 could be done far more efficiently by having all sorts of special-cases
7975 with moving single words, but probably isn't worth the trouble. */
7977 void
7978 reg_set_to_hard_reg_set (to, from)
7979 HARD_REG_SET *to;
7980 bitmap from;
7982 int i;
7984 EXECUTE_IF_SET_IN_BITMAP
7985 (from, 0, i,
7987 if (i >= FIRST_PSEUDO_REGISTER)
7988 return;
7989 SET_HARD_REG_BIT (*to, i);