2000-08-08 Alexandre Petit-Bianco <apbianco@cygnus.com>
[official-gcc.git] / gcc / flow.c
blob1291ac404a608e66d384955e75f6e67eb3b9d11d
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
137 #include "expr.h"
138 #include "ssa.h"
140 #include "obstack.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
171 /* The contents of the current function definition are allocated
172 in this obstack, and all are freed at the end of the function.
173 For top-level functions, this is temporary_obstack.
174 Separate obstacks are made for nested functions. */
176 extern struct obstack *function_obstack;
178 /* Number of basic blocks in the current function. */
180 int n_basic_blocks;
182 /* Number of edges in the current function. */
184 int n_edges;
186 /* The basic block array. */
188 varray_type basic_block_info;
190 /* The special entry and exit blocks. */
192 struct basic_block_def entry_exit_blocks[2]
193 = {{NULL, /* head */
194 NULL, /* end */
195 NULL, /* pred */
196 NULL, /* succ */
197 NULL, /* local_set */
198 NULL, /* global_live_at_start */
199 NULL, /* global_live_at_end */
200 NULL, /* aux */
201 ENTRY_BLOCK, /* index */
202 0, /* loop_depth */
203 -1, -1, /* eh_beg, eh_end */
204 0 /* count */
207 NULL, /* head */
208 NULL, /* end */
209 NULL, /* pred */
210 NULL, /* succ */
211 NULL, /* local_set */
212 NULL, /* global_live_at_start */
213 NULL, /* global_live_at_end */
214 NULL, /* aux */
215 EXIT_BLOCK, /* index */
216 0, /* loop_depth */
217 -1, -1, /* eh_beg, eh_end */
218 0 /* count */
222 /* Nonzero if the second flow pass has completed. */
223 int flow2_completed;
225 /* Maximum register number used in this function, plus one. */
227 int max_regno;
229 /* Indexed by n, giving various register information */
231 varray_type reg_n_info;
233 /* Size of a regset for the current function,
234 in (1) bytes and (2) elements. */
236 int regset_bytes;
237 int regset_size;
239 /* Regset of regs live when calls to `setjmp'-like functions happen. */
240 /* ??? Does this exist only for the setjmp-clobbered warning message? */
242 regset regs_live_at_setjmp;
244 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
245 that have to go in the same hard reg.
246 The first two regs in the list are a pair, and the next two
247 are another pair, etc. */
248 rtx regs_may_share;
250 /* Set of registers that may be eliminable. These are handled specially
251 in updating regs_ever_live. */
253 static HARD_REG_SET elim_reg_set;
255 /* The basic block structure for every insn, indexed by uid. */
257 varray_type basic_block_for_insn;
259 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
260 /* ??? Should probably be using LABEL_NUSES instead. It would take a
261 bit of surgery to be able to use or co-opt the routines in jump. */
263 static rtx label_value_list;
264 static rtx tail_recursion_label_list;
266 /* Holds information for tracking conditional register life information. */
267 struct reg_cond_life_info
269 /* An EXPR_LIST of conditions under which a register is dead. */
270 rtx condition;
272 /* ??? Could store mask of bytes that are dead, so that we could finally
273 track lifetimes of multi-word registers accessed via subregs. */
276 /* For use in communicating between propagate_block and its subroutines.
277 Holds all information needed to compute life and def-use information. */
279 struct propagate_block_info
281 /* The basic block we're considering. */
282 basic_block bb;
284 /* Bit N is set if register N is conditionally or unconditionally live. */
285 regset reg_live;
287 /* Bit N is set if register N is set this insn. */
288 regset new_set;
290 /* Element N is the next insn that uses (hard or pseudo) register N
291 within the current basic block; or zero, if there is no such insn. */
292 rtx *reg_next_use;
294 /* Contains a list of all the MEMs we are tracking for dead store
295 elimination. */
296 rtx mem_set_list;
298 /* If non-null, record the set of registers set in the basic block. */
299 regset local_set;
301 #ifdef HAVE_conditional_execution
302 /* Indexed by register number, holds a reg_cond_life_info for each
303 register that is not unconditionally live or dead. */
304 splay_tree reg_cond_dead;
306 /* Bit N is set if register N is in an expression in reg_cond_dead. */
307 regset reg_cond_reg;
308 #endif
310 /* Non-zero if the value of CC0 is live. */
311 int cc0_live;
313 /* Flags controling the set of information propagate_block collects. */
314 int flags;
317 /* Store the data structures necessary for depth-first search. */
318 struct depth_first_search_dsS {
319 /* stack for backtracking during the algorithm */
320 basic_block *stack;
322 /* number of edges in the stack. That is, positions 0, ..., sp-1
323 have edges. */
324 unsigned int sp;
326 /* record of basic blocks already seen by depth-first search */
327 sbitmap visited_blocks;
329 typedef struct depth_first_search_dsS *depth_first_search_ds;
331 /* Forward declarations */
332 static int count_basic_blocks PARAMS ((rtx));
333 static void find_basic_blocks_1 PARAMS ((rtx));
334 static rtx find_label_refs PARAMS ((rtx, rtx));
335 static void clear_edges PARAMS ((void));
336 static void make_edges PARAMS ((rtx));
337 static void make_label_edge PARAMS ((sbitmap *, basic_block,
338 rtx, int));
339 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
340 basic_block, rtx, int));
341 static void mark_critical_edges PARAMS ((void));
342 static void move_stray_eh_region_notes PARAMS ((void));
343 static void record_active_eh_regions PARAMS ((rtx));
345 static void commit_one_edge_insertion PARAMS ((edge));
347 static void delete_unreachable_blocks PARAMS ((void));
348 static void delete_eh_regions PARAMS ((void));
349 static int can_delete_note_p PARAMS ((rtx));
350 static void expunge_block PARAMS ((basic_block));
351 static int can_delete_label_p PARAMS ((rtx));
352 static int tail_recursion_label_p PARAMS ((rtx));
353 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
354 basic_block));
355 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
356 basic_block));
357 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
358 static void try_merge_blocks PARAMS ((void));
359 static void tidy_fallthru_edges PARAMS ((void));
360 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
361 static void verify_wide_reg PARAMS ((int, rtx, rtx));
362 static void verify_local_live_at_start PARAMS ((regset, basic_block));
363 static int set_noop_p PARAMS ((rtx));
364 static int noop_move_p PARAMS ((rtx));
365 static void delete_noop_moves PARAMS ((rtx));
366 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
367 static void notice_stack_pointer_modification PARAMS ((rtx));
368 static void mark_reg PARAMS ((rtx, void *));
369 static void mark_regs_live_at_end PARAMS ((regset));
370 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
371 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
372 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
373 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
374 static int insn_dead_p PARAMS ((struct propagate_block_info *,
375 rtx, int, rtx));
376 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
377 rtx, rtx));
378 static void mark_set_regs PARAMS ((struct propagate_block_info *,
379 rtx, rtx));
380 static void mark_set_1 PARAMS ((struct propagate_block_info *,
381 enum rtx_code, rtx, rtx,
382 rtx, int));
383 #ifdef HAVE_conditional_execution
384 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
385 int, rtx));
386 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
387 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
388 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
389 int));
390 static rtx ior_reg_cond PARAMS ((rtx, rtx));
391 static rtx not_reg_cond PARAMS ((rtx));
392 static rtx nand_reg_cond PARAMS ((rtx, rtx));
393 #endif
394 #ifdef AUTO_INC_DEC
395 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
396 rtx, rtx, rtx, rtx, rtx));
397 static void find_auto_inc PARAMS ((struct propagate_block_info *,
398 rtx, rtx));
399 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
400 rtx));
401 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
402 #endif
403 static void mark_used_reg PARAMS ((struct propagate_block_info *,
404 rtx, rtx, rtx));
405 static void mark_used_regs PARAMS ((struct propagate_block_info *,
406 rtx, rtx, rtx));
407 void dump_flow_info PARAMS ((FILE *));
408 void debug_flow_info PARAMS ((void));
409 static void dump_edge_info PARAMS ((FILE *, edge, int));
411 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
412 rtx));
413 static void remove_fake_successors PARAMS ((basic_block));
414 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
415 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
416 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
417 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
418 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
419 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
420 static int flow_depth_first_order_compute PARAMS ((int *, int *));
421 static void flow_dfs_compute_reverse_init
422 PARAMS ((depth_first_search_ds));
423 static void flow_dfs_compute_reverse_add_bb
424 PARAMS ((depth_first_search_ds, basic_block));
425 static basic_block flow_dfs_compute_reverse_execute
426 PARAMS ((depth_first_search_ds));
427 static void flow_dfs_compute_reverse_finish
428 PARAMS ((depth_first_search_ds));
429 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
430 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
431 static void flow_loops_tree_build PARAMS ((struct loops *));
432 static int flow_loop_level_compute PARAMS ((struct loop *, int));
433 static int flow_loops_level_compute PARAMS ((struct loops *));
435 /* Find basic blocks of the current function.
436 F is the first insn of the function and NREGS the number of register
437 numbers in use. */
439 void
440 find_basic_blocks (f, nregs, file)
441 rtx f;
442 int nregs ATTRIBUTE_UNUSED;
443 FILE *file ATTRIBUTE_UNUSED;
445 int max_uid;
447 /* Flush out existing data. */
448 if (basic_block_info != NULL)
450 int i;
452 clear_edges ();
454 /* Clear bb->aux on all extant basic blocks. We'll use this as a
455 tag for reuse during create_basic_block, just in case some pass
456 copies around basic block notes improperly. */
457 for (i = 0; i < n_basic_blocks; ++i)
458 BASIC_BLOCK (i)->aux = NULL;
460 VARRAY_FREE (basic_block_info);
463 n_basic_blocks = count_basic_blocks (f);
465 /* Size the basic block table. The actual structures will be allocated
466 by find_basic_blocks_1, since we want to keep the structure pointers
467 stable across calls to find_basic_blocks. */
468 /* ??? This whole issue would be much simpler if we called find_basic_blocks
469 exactly once, and thereafter we don't have a single long chain of
470 instructions at all until close to the end of compilation when we
471 actually lay them out. */
473 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
475 find_basic_blocks_1 (f);
477 /* Record the block to which an insn belongs. */
478 /* ??? This should be done another way, by which (perhaps) a label is
479 tagged directly with the basic block that it starts. It is used for
480 more than that currently, but IMO that is the only valid use. */
482 max_uid = get_max_uid ();
483 #ifdef AUTO_INC_DEC
484 /* Leave space for insns life_analysis makes in some cases for auto-inc.
485 These cases are rare, so we don't need too much space. */
486 max_uid += max_uid / 10;
487 #endif
489 compute_bb_for_insn (max_uid);
491 /* Discover the edges of our cfg. */
492 record_active_eh_regions (f);
493 make_edges (label_value_list);
495 /* Do very simple cleanup now, for the benefit of code that runs between
496 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
497 tidy_fallthru_edges ();
499 mark_critical_edges ();
501 #ifdef ENABLE_CHECKING
502 verify_flow_info ();
503 #endif
506 /* Count the basic blocks of the function. */
508 static int
509 count_basic_blocks (f)
510 rtx f;
512 register rtx insn;
513 register RTX_CODE prev_code;
514 register int count = 0;
515 int eh_region = 0;
516 int call_had_abnormal_edge = 0;
518 prev_code = JUMP_INSN;
519 for (insn = f; insn; insn = NEXT_INSN (insn))
521 register RTX_CODE code = GET_CODE (insn);
523 if (code == CODE_LABEL
524 || (GET_RTX_CLASS (code) == 'i'
525 && (prev_code == JUMP_INSN
526 || prev_code == BARRIER
527 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
528 count++;
530 /* Record whether this call created an edge. */
531 if (code == CALL_INSN)
533 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
534 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
536 call_had_abnormal_edge = 0;
538 /* If there is an EH region or rethrow, we have an edge. */
539 if ((eh_region && region > 0)
540 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
541 call_had_abnormal_edge = 1;
542 else if (nonlocal_goto_handler_labels && region >= 0)
543 /* If there is a nonlocal goto label and the specified
544 region number isn't -1, we have an edge. (0 means
545 no throw, but might have a nonlocal goto). */
546 call_had_abnormal_edge = 1;
549 if (code != NOTE)
550 prev_code = code;
551 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
552 ++eh_region;
553 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
554 --eh_region;
557 /* The rest of the compiler works a bit smoother when we don't have to
558 check for the edge case of do-nothing functions with no basic blocks. */
559 if (count == 0)
561 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
562 count = 1;
565 return count;
568 /* Scan a list of insns for labels referred to other than by jumps.
569 This is used to scan the alternatives of a call placeholder. */
570 static rtx
571 find_label_refs (f, lvl)
572 rtx f;
573 rtx lvl;
575 rtx insn;
577 for (insn = f; insn; insn = NEXT_INSN (insn))
578 if (INSN_P (insn))
580 rtx note;
582 /* Make a list of all labels referred to other than by jumps
583 (which just don't have the REG_LABEL notes).
585 Make a special exception for labels followed by an ADDR*VEC,
586 as this would be a part of the tablejump setup code.
588 Make a special exception for the eh_return_stub_label, which
589 we know isn't part of any otherwise visible control flow. */
591 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
592 if (REG_NOTE_KIND (note) == REG_LABEL)
594 rtx lab = XEXP (note, 0), next;
596 if (lab == eh_return_stub_label)
598 else if ((next = next_nonnote_insn (lab)) != NULL
599 && GET_CODE (next) == JUMP_INSN
600 && (GET_CODE (PATTERN (next)) == ADDR_VEC
601 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
603 else if (GET_CODE (lab) == NOTE)
605 else
606 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
610 return lvl;
613 /* Find all basic blocks of the function whose first insn is F.
615 Collect and return a list of labels whose addresses are taken. This
616 will be used in make_edges for use with computed gotos. */
618 static void
619 find_basic_blocks_1 (f)
620 rtx f;
622 register rtx insn, next;
623 int i = 0;
624 rtx bb_note = NULL_RTX;
625 rtx eh_list = NULL_RTX;
626 rtx lvl = NULL_RTX;
627 rtx trll = NULL_RTX;
628 rtx head = NULL_RTX;
629 rtx end = NULL_RTX;
631 /* We process the instructions in a slightly different way than we did
632 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
633 closed out the previous block, so that it gets attached at the proper
634 place. Since this form should be equivalent to the previous,
635 count_basic_blocks continues to use the old form as a check. */
637 for (insn = f; insn; insn = next)
639 enum rtx_code code = GET_CODE (insn);
641 next = NEXT_INSN (insn);
643 switch (code)
645 case NOTE:
647 int kind = NOTE_LINE_NUMBER (insn);
649 /* Keep a LIFO list of the currently active exception notes. */
650 if (kind == NOTE_INSN_EH_REGION_BEG)
651 eh_list = alloc_INSN_LIST (insn, eh_list);
652 else if (kind == NOTE_INSN_EH_REGION_END)
654 rtx t = eh_list;
656 eh_list = XEXP (eh_list, 1);
657 free_INSN_LIST_node (t);
660 /* Look for basic block notes with which to keep the
661 basic_block_info pointers stable. Unthread the note now;
662 we'll put it back at the right place in create_basic_block.
663 Or not at all if we've already found a note in this block. */
664 else if (kind == NOTE_INSN_BASIC_BLOCK)
666 if (bb_note == NULL_RTX)
667 bb_note = insn;
668 else
669 next = flow_delete_insn (insn);
671 break;
674 case CODE_LABEL:
675 /* A basic block starts at a label. If we've closed one off due
676 to a barrier or some such, no need to do it again. */
677 if (head != NULL_RTX)
679 /* While we now have edge lists with which other portions of
680 the compiler might determine a call ending a basic block
681 does not imply an abnormal edge, it will be a bit before
682 everything can be updated. So continue to emit a noop at
683 the end of such a block. */
684 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
686 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
687 end = emit_insn_after (nop, end);
690 create_basic_block (i++, head, end, bb_note);
691 bb_note = NULL_RTX;
694 head = end = insn;
695 break;
697 case JUMP_INSN:
698 /* A basic block ends at a jump. */
699 if (head == NULL_RTX)
700 head = insn;
701 else
703 /* ??? Make a special check for table jumps. The way this
704 happens is truly and amazingly gross. We are about to
705 create a basic block that contains just a code label and
706 an addr*vec jump insn. Worse, an addr_diff_vec creates
707 its own natural loop.
709 Prevent this bit of brain damage, pasting things together
710 correctly in make_edges.
712 The correct solution involves emitting the table directly
713 on the tablejump instruction as a note, or JUMP_LABEL. */
715 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
716 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
718 head = end = NULL;
719 n_basic_blocks--;
720 break;
723 end = insn;
724 goto new_bb_inclusive;
726 case BARRIER:
727 /* A basic block ends at a barrier. It may be that an unconditional
728 jump already closed the basic block -- no need to do it again. */
729 if (head == NULL_RTX)
730 break;
732 /* While we now have edge lists with which other portions of the
733 compiler might determine a call ending a basic block does not
734 imply an abnormal edge, it will be a bit before everything can
735 be updated. So continue to emit a noop at the end of such a
736 block. */
737 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
739 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
740 end = emit_insn_after (nop, end);
742 goto new_bb_exclusive;
744 case CALL_INSN:
746 /* Record whether this call created an edge. */
747 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
748 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
749 int call_has_abnormal_edge = 0;
751 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
753 /* Scan each of the alternatives for label refs. */
754 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
755 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
756 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
757 /* Record its tail recursion label, if any. */
758 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
759 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
762 /* If there is an EH region or rethrow, we have an edge. */
763 if ((eh_list && region > 0)
764 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
765 call_has_abnormal_edge = 1;
766 else if (nonlocal_goto_handler_labels && region >= 0)
767 /* If there is a nonlocal goto label and the specified
768 region number isn't -1, we have an edge. (0 means
769 no throw, but might have a nonlocal goto). */
770 call_has_abnormal_edge = 1;
772 /* A basic block ends at a call that can either throw or
773 do a non-local goto. */
774 if (call_has_abnormal_edge)
776 new_bb_inclusive:
777 if (head == NULL_RTX)
778 head = insn;
779 end = insn;
781 new_bb_exclusive:
782 create_basic_block (i++, head, end, bb_note);
783 head = end = NULL_RTX;
784 bb_note = NULL_RTX;
785 break;
788 /* Fall through. */
790 default:
791 if (GET_RTX_CLASS (code) == 'i')
793 if (head == NULL_RTX)
794 head = insn;
795 end = insn;
797 break;
800 if (GET_RTX_CLASS (code) == 'i')
802 rtx note;
804 /* Make a list of all labels referred to other than by jumps
805 (which just don't have the REG_LABEL notes).
807 Make a special exception for labels followed by an ADDR*VEC,
808 as this would be a part of the tablejump setup code.
810 Make a special exception for the eh_return_stub_label, which
811 we know isn't part of any otherwise visible control flow. */
813 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
814 if (REG_NOTE_KIND (note) == REG_LABEL)
816 rtx lab = XEXP (note, 0), next;
818 if (lab == eh_return_stub_label)
820 else if ((next = next_nonnote_insn (lab)) != NULL
821 && GET_CODE (next) == JUMP_INSN
822 && (GET_CODE (PATTERN (next)) == ADDR_VEC
823 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
825 else if (GET_CODE (lab) == NOTE)
827 else
828 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
833 if (head != NULL_RTX)
834 create_basic_block (i++, head, end, bb_note);
835 else if (bb_note)
836 flow_delete_insn (bb_note);
838 if (i != n_basic_blocks)
839 abort ();
841 label_value_list = lvl;
842 tail_recursion_label_list = trll;
845 /* Tidy the CFG by deleting unreachable code and whatnot. */
847 void
848 cleanup_cfg (f)
849 rtx f;
851 delete_unreachable_blocks ();
852 move_stray_eh_region_notes ();
853 record_active_eh_regions (f);
854 try_merge_blocks ();
855 mark_critical_edges ();
857 /* Kill the data we won't maintain. */
858 free_EXPR_LIST_list (&label_value_list);
859 free_EXPR_LIST_list (&tail_recursion_label_list);
862 /* Create a new basic block consisting of the instructions between
863 HEAD and END inclusive. Reuses the note and basic block struct
864 in BB_NOTE, if any. */
866 void
867 create_basic_block (index, head, end, bb_note)
868 int index;
869 rtx head, end, bb_note;
871 basic_block bb;
873 if (bb_note
874 && ! RTX_INTEGRATED_P (bb_note)
875 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
876 && bb->aux == NULL)
878 /* If we found an existing note, thread it back onto the chain. */
880 rtx after;
882 if (GET_CODE (head) == CODE_LABEL)
883 after = head;
884 else
886 after = PREV_INSN (head);
887 head = bb_note;
890 if (after != bb_note && NEXT_INSN (after) != bb_note)
891 reorder_insns (bb_note, bb_note, after);
893 else
895 /* Otherwise we must create a note and a basic block structure.
896 Since we allow basic block structs in rtl, give the struct
897 the same lifetime by allocating it off the function obstack
898 rather than using malloc. */
900 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
901 memset (bb, 0, sizeof (*bb));
903 if (GET_CODE (head) == CODE_LABEL)
904 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
905 else
907 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
908 head = bb_note;
910 NOTE_BASIC_BLOCK (bb_note) = bb;
913 /* Always include the bb note in the block. */
914 if (NEXT_INSN (end) == bb_note)
915 end = bb_note;
917 bb->head = head;
918 bb->end = end;
919 bb->index = index;
920 BASIC_BLOCK (index) = bb;
922 /* Tag the block so that we know it has been used when considering
923 other basic block notes. */
924 bb->aux = bb;
927 /* Records the basic block struct in BB_FOR_INSN, for every instruction
928 indexed by INSN_UID. MAX is the size of the array. */
930 void
931 compute_bb_for_insn (max)
932 int max;
934 int i;
936 if (basic_block_for_insn)
937 VARRAY_FREE (basic_block_for_insn);
938 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
940 for (i = 0; i < n_basic_blocks; ++i)
942 basic_block bb = BASIC_BLOCK (i);
943 rtx insn, end;
945 end = bb->end;
946 insn = bb->head;
947 while (1)
949 int uid = INSN_UID (insn);
950 if (uid < max)
951 VARRAY_BB (basic_block_for_insn, uid) = bb;
952 if (insn == end)
953 break;
954 insn = NEXT_INSN (insn);
959 /* Free the memory associated with the edge structures. */
961 static void
962 clear_edges ()
964 int i;
965 edge n, e;
967 for (i = 0; i < n_basic_blocks; ++i)
969 basic_block bb = BASIC_BLOCK (i);
971 for (e = bb->succ; e; e = n)
973 n = e->succ_next;
974 free (e);
977 bb->succ = 0;
978 bb->pred = 0;
981 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
983 n = e->succ_next;
984 free (e);
987 ENTRY_BLOCK_PTR->succ = 0;
988 EXIT_BLOCK_PTR->pred = 0;
990 n_edges = 0;
993 /* Identify the edges between basic blocks.
995 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
996 that are otherwise unreachable may be reachable with a non-local goto.
998 BB_EH_END is an array indexed by basic block number in which we record
999 the list of exception regions active at the end of the basic block. */
1001 static void
1002 make_edges (label_value_list)
1003 rtx label_value_list;
1005 int i;
1006 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
1007 sbitmap *edge_cache = NULL;
1009 /* Assume no computed jump; revise as we create edges. */
1010 current_function_has_computed_jump = 0;
1012 /* Heavy use of computed goto in machine-generated code can lead to
1013 nearly fully-connected CFGs. In that case we spend a significant
1014 amount of time searching the edge lists for duplicates. */
1015 if (forced_labels || label_value_list)
1017 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1018 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1021 /* By nature of the way these get numbered, block 0 is always the entry. */
1022 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1024 for (i = 0; i < n_basic_blocks; ++i)
1026 basic_block bb = BASIC_BLOCK (i);
1027 rtx insn, x;
1028 enum rtx_code code;
1029 int force_fallthru = 0;
1031 /* Examine the last instruction of the block, and discover the
1032 ways we can leave the block. */
1034 insn = bb->end;
1035 code = GET_CODE (insn);
1037 /* A branch. */
1038 if (code == JUMP_INSN)
1040 rtx tmp;
1042 /* ??? Recognize a tablejump and do the right thing. */
1043 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1044 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1045 && GET_CODE (tmp) == JUMP_INSN
1046 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1047 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1049 rtvec vec;
1050 int j;
1052 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1053 vec = XVEC (PATTERN (tmp), 0);
1054 else
1055 vec = XVEC (PATTERN (tmp), 1);
1057 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1058 make_label_edge (edge_cache, bb,
1059 XEXP (RTVEC_ELT (vec, j), 0), 0);
1061 /* Some targets (eg, ARM) emit a conditional jump that also
1062 contains the out-of-range target. Scan for these and
1063 add an edge if necessary. */
1064 if ((tmp = single_set (insn)) != NULL
1065 && SET_DEST (tmp) == pc_rtx
1066 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1067 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1068 make_label_edge (edge_cache, bb,
1069 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1071 #ifdef CASE_DROPS_THROUGH
1072 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1073 us naturally detecting fallthru into the next block. */
1074 force_fallthru = 1;
1075 #endif
1078 /* If this is a computed jump, then mark it as reaching
1079 everything on the label_value_list and forced_labels list. */
1080 else if (computed_jump_p (insn))
1082 current_function_has_computed_jump = 1;
1084 for (x = label_value_list; x; x = XEXP (x, 1))
1085 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1087 for (x = forced_labels; x; x = XEXP (x, 1))
1088 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1091 /* Returns create an exit out. */
1092 else if (returnjump_p (insn))
1093 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1095 /* Otherwise, we have a plain conditional or unconditional jump. */
1096 else
1098 if (! JUMP_LABEL (insn))
1099 abort ();
1100 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1104 /* If this is a sibling call insn, then this is in effect a
1105 combined call and return, and so we need an edge to the
1106 exit block. No need to worry about EH edges, since we
1107 wouldn't have created the sibling call in the first place. */
1109 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1110 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1111 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1112 else
1114 /* If this is a CALL_INSN, then mark it as reaching the active EH
1115 handler for this CALL_INSN. If we're handling asynchronous
1116 exceptions then any insn can reach any of the active handlers.
1118 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1120 if (code == CALL_INSN || asynchronous_exceptions)
1122 /* Add any appropriate EH edges. We do this unconditionally
1123 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1124 on the call, and this needn't be within an EH region. */
1125 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1127 /* If we have asynchronous exceptions, do the same for *all*
1128 exception regions active in the block. */
1129 if (asynchronous_exceptions
1130 && bb->eh_beg != bb->eh_end)
1132 if (bb->eh_beg >= 0)
1133 make_eh_edge (edge_cache, eh_nest_info, bb,
1134 NULL_RTX, bb->eh_beg);
1136 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1137 if (GET_CODE (x) == NOTE
1138 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1139 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1141 int region = NOTE_EH_HANDLER (x);
1142 make_eh_edge (edge_cache, eh_nest_info, bb,
1143 NULL_RTX, region);
1147 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1149 /* ??? This could be made smarter: in some cases it's possible
1150 to tell that certain calls will not do a nonlocal goto.
1152 For example, if the nested functions that do the nonlocal
1153 gotos do not have their addresses taken, then only calls to
1154 those functions or to other nested functions that use them
1155 could possibly do nonlocal gotos. */
1156 /* We do know that a REG_EH_REGION note with a value less
1157 than 0 is guaranteed not to perform a non-local goto. */
1158 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1159 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1160 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1161 make_label_edge (edge_cache, bb, XEXP (x, 0),
1162 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1166 /* We know something about the structure of the function __throw in
1167 libgcc2.c. It is the only function that ever contains eh_stub
1168 labels. It modifies its return address so that the last block
1169 returns to one of the eh_stub labels within it. So we have to
1170 make additional edges in the flow graph. */
1171 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1172 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1174 /* Find out if we can drop through to the next block. */
1175 insn = next_nonnote_insn (insn);
1176 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1177 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1178 else if (i + 1 < n_basic_blocks)
1180 rtx tmp = BLOCK_HEAD (i + 1);
1181 if (GET_CODE (tmp) == NOTE)
1182 tmp = next_nonnote_insn (tmp);
1183 if (force_fallthru || insn == tmp)
1184 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1188 free_eh_nesting_info (eh_nest_info);
1189 if (edge_cache)
1190 sbitmap_vector_free (edge_cache);
1193 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1194 about the edge that is accumulated between calls. */
1196 void
1197 make_edge (edge_cache, src, dst, flags)
1198 sbitmap *edge_cache;
1199 basic_block src, dst;
1200 int flags;
1202 int use_edge_cache;
1203 edge e;
1205 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1206 many edges to them, and we didn't allocate memory for it. */
1207 use_edge_cache = (edge_cache
1208 && src != ENTRY_BLOCK_PTR
1209 && dst != EXIT_BLOCK_PTR);
1211 /* Make sure we don't add duplicate edges. */
1212 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1213 for (e = src->succ; e; e = e->succ_next)
1214 if (e->dest == dst)
1216 e->flags |= flags;
1217 return;
1220 e = (edge) xcalloc (1, sizeof (*e));
1221 n_edges++;
1223 e->succ_next = src->succ;
1224 e->pred_next = dst->pred;
1225 e->src = src;
1226 e->dest = dst;
1227 e->flags = flags;
1229 src->succ = e;
1230 dst->pred = e;
1232 if (use_edge_cache)
1233 SET_BIT (edge_cache[src->index], dst->index);
1236 /* Create an edge from a basic block to a label. */
1238 static void
1239 make_label_edge (edge_cache, src, label, flags)
1240 sbitmap *edge_cache;
1241 basic_block src;
1242 rtx label;
1243 int flags;
1245 if (GET_CODE (label) != CODE_LABEL)
1246 abort ();
1248 /* If the label was never emitted, this insn is junk, but avoid a
1249 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1250 as a result of a syntax error and a diagnostic has already been
1251 printed. */
1253 if (INSN_UID (label) == 0)
1254 return;
1256 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1259 /* Create the edges generated by INSN in REGION. */
1261 static void
1262 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1263 sbitmap *edge_cache;
1264 eh_nesting_info *eh_nest_info;
1265 basic_block src;
1266 rtx insn;
1267 int region;
1269 handler_info **handler_list;
1270 int num, is_call;
1272 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1273 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1274 while (--num >= 0)
1276 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1277 EDGE_ABNORMAL | EDGE_EH | is_call);
1281 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1282 dangerous if we intend to move basic blocks around. Move such notes
1283 into the following block. */
1285 static void
1286 move_stray_eh_region_notes ()
1288 int i;
1289 basic_block b1, b2;
1291 if (n_basic_blocks < 2)
1292 return;
1294 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1295 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1297 rtx insn, next, list = NULL_RTX;
1299 b1 = BASIC_BLOCK (i);
1300 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1302 next = NEXT_INSN (insn);
1303 if (GET_CODE (insn) == NOTE
1304 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1305 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1307 /* Unlink from the insn chain. */
1308 NEXT_INSN (PREV_INSN (insn)) = next;
1309 PREV_INSN (next) = PREV_INSN (insn);
1311 /* Queue it. */
1312 NEXT_INSN (insn) = list;
1313 list = insn;
1317 if (list == NULL_RTX)
1318 continue;
1320 /* Find where to insert these things. */
1321 insn = b2->head;
1322 if (GET_CODE (insn) == CODE_LABEL)
1323 insn = NEXT_INSN (insn);
1325 while (list)
1327 next = NEXT_INSN (list);
1328 add_insn_after (list, insn);
1329 list = next;
1334 /* Recompute eh_beg/eh_end for each basic block. */
1336 static void
1337 record_active_eh_regions (f)
1338 rtx f;
1340 rtx insn, eh_list = NULL_RTX;
1341 int i = 0;
1342 basic_block bb = BASIC_BLOCK (0);
1344 for (insn = f; insn; insn = NEXT_INSN (insn))
1346 if (bb->head == insn)
1347 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1349 if (GET_CODE (insn) == NOTE)
1351 int kind = NOTE_LINE_NUMBER (insn);
1352 if (kind == NOTE_INSN_EH_REGION_BEG)
1353 eh_list = alloc_INSN_LIST (insn, eh_list);
1354 else if (kind == NOTE_INSN_EH_REGION_END)
1356 rtx t = XEXP (eh_list, 1);
1357 free_INSN_LIST_node (eh_list);
1358 eh_list = t;
1362 if (bb->end == insn)
1364 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1365 i += 1;
1366 if (i == n_basic_blocks)
1367 break;
1368 bb = BASIC_BLOCK (i);
1373 /* Identify critical edges and set the bits appropriately. */
1375 static void
1376 mark_critical_edges ()
1378 int i, n = n_basic_blocks;
1379 basic_block bb;
1381 /* We begin with the entry block. This is not terribly important now,
1382 but could be if a front end (Fortran) implemented alternate entry
1383 points. */
1384 bb = ENTRY_BLOCK_PTR;
1385 i = -1;
1387 while (1)
1389 edge e;
1391 /* (1) Critical edges must have a source with multiple successors. */
1392 if (bb->succ && bb->succ->succ_next)
1394 for (e = bb->succ; e; e = e->succ_next)
1396 /* (2) Critical edges must have a destination with multiple
1397 predecessors. Note that we know there is at least one
1398 predecessor -- the edge we followed to get here. */
1399 if (e->dest->pred->pred_next)
1400 e->flags |= EDGE_CRITICAL;
1401 else
1402 e->flags &= ~EDGE_CRITICAL;
1405 else
1407 for (e = bb->succ; e; e = e->succ_next)
1408 e->flags &= ~EDGE_CRITICAL;
1411 if (++i >= n)
1412 break;
1413 bb = BASIC_BLOCK (i);
1417 /* Split a (typically critical) edge. Return the new block.
1418 Abort on abnormal edges.
1420 ??? The code generally expects to be called on critical edges.
1421 The case of a block ending in an unconditional jump to a
1422 block with multiple predecessors is not handled optimally. */
1424 basic_block
1425 split_edge (edge_in)
1426 edge edge_in;
1428 basic_block old_pred, bb, old_succ;
1429 edge edge_out;
1430 rtx bb_note;
1431 int i, j;
1433 /* Abnormal edges cannot be split. */
1434 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1435 abort ();
1437 old_pred = edge_in->src;
1438 old_succ = edge_in->dest;
1440 /* Remove the existing edge from the destination's pred list. */
1442 edge *pp;
1443 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1444 continue;
1445 *pp = edge_in->pred_next;
1446 edge_in->pred_next = NULL;
1449 /* Create the new structures. */
1450 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1451 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1452 n_edges++;
1454 memset (bb, 0, sizeof (*bb));
1456 /* ??? This info is likely going to be out of date very soon. */
1457 if (old_succ->global_live_at_start)
1459 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1460 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1461 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1462 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1465 /* Wire them up. */
1466 bb->pred = edge_in;
1467 bb->succ = edge_out;
1468 bb->count = edge_in->count;
1470 edge_in->dest = bb;
1471 edge_in->flags &= ~EDGE_CRITICAL;
1473 edge_out->pred_next = old_succ->pred;
1474 edge_out->succ_next = NULL;
1475 edge_out->src = bb;
1476 edge_out->dest = old_succ;
1477 edge_out->flags = EDGE_FALLTHRU;
1478 edge_out->probability = REG_BR_PROB_BASE;
1479 edge_out->count = edge_in->count;
1481 old_succ->pred = edge_out;
1483 /* Tricky case -- if there existed a fallthru into the successor
1484 (and we're not it) we must add a new unconditional jump around
1485 the new block we're actually interested in.
1487 Further, if that edge is critical, this means a second new basic
1488 block must be created to hold it. In order to simplify correct
1489 insn placement, do this before we touch the existing basic block
1490 ordering for the block we were really wanting. */
1491 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1493 edge e;
1494 for (e = edge_out->pred_next; e; e = e->pred_next)
1495 if (e->flags & EDGE_FALLTHRU)
1496 break;
1498 if (e)
1500 basic_block jump_block;
1501 rtx pos;
1503 if ((e->flags & EDGE_CRITICAL) == 0
1504 && e->src != ENTRY_BLOCK_PTR)
1506 /* Non critical -- we can simply add a jump to the end
1507 of the existing predecessor. */
1508 jump_block = e->src;
1510 else
1512 /* We need a new block to hold the jump. The simplest
1513 way to do the bulk of the work here is to recursively
1514 call ourselves. */
1515 jump_block = split_edge (e);
1516 e = jump_block->succ;
1519 /* Now add the jump insn ... */
1520 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1521 jump_block->end);
1522 jump_block->end = pos;
1523 if (basic_block_for_insn)
1524 set_block_for_insn (pos, jump_block);
1525 emit_barrier_after (pos);
1527 /* ... let jump know that label is in use, ... */
1528 JUMP_LABEL (pos) = old_succ->head;
1529 ++LABEL_NUSES (old_succ->head);
1531 /* ... and clear fallthru on the outgoing edge. */
1532 e->flags &= ~EDGE_FALLTHRU;
1534 /* Continue splitting the interesting edge. */
1538 /* Place the new block just in front of the successor. */
1539 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1540 if (old_succ == EXIT_BLOCK_PTR)
1541 j = n_basic_blocks - 1;
1542 else
1543 j = old_succ->index;
1544 for (i = n_basic_blocks - 1; i > j; --i)
1546 basic_block tmp = BASIC_BLOCK (i - 1);
1547 BASIC_BLOCK (i) = tmp;
1548 tmp->index = i;
1550 BASIC_BLOCK (i) = bb;
1551 bb->index = i;
1553 /* Create the basic block note.
1555 Where we place the note can have a noticable impact on the generated
1556 code. Consider this cfg:
1562 +->1-->2--->E
1564 +--+
1566 If we need to insert an insn on the edge from block 0 to block 1,
1567 we want to ensure the instructions we insert are outside of any
1568 loop notes that physically sit between block 0 and block 1. Otherwise
1569 we confuse the loop optimizer into thinking the loop is a phony. */
1570 if (old_succ != EXIT_BLOCK_PTR
1571 && PREV_INSN (old_succ->head)
1572 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1573 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1574 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1575 PREV_INSN (old_succ->head));
1576 else if (old_succ != EXIT_BLOCK_PTR)
1577 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1578 else
1579 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1580 NOTE_BASIC_BLOCK (bb_note) = bb;
1581 bb->head = bb->end = bb_note;
1583 /* Not quite simple -- for non-fallthru edges, we must adjust the
1584 predecessor's jump instruction to target our new block. */
1585 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1587 rtx tmp, insn = old_pred->end;
1588 rtx old_label = old_succ->head;
1589 rtx new_label = gen_label_rtx ();
1591 if (GET_CODE (insn) != JUMP_INSN)
1592 abort ();
1594 /* ??? Recognize a tablejump and adjust all matching cases. */
1595 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1596 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1597 && GET_CODE (tmp) == JUMP_INSN
1598 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1599 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1601 rtvec vec;
1602 int j;
1604 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1605 vec = XVEC (PATTERN (tmp), 0);
1606 else
1607 vec = XVEC (PATTERN (tmp), 1);
1609 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1610 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1612 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1613 --LABEL_NUSES (old_label);
1614 ++LABEL_NUSES (new_label);
1617 /* Handle casesi dispatch insns */
1618 if ((tmp = single_set (insn)) != NULL
1619 && SET_DEST (tmp) == pc_rtx
1620 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1621 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1622 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1624 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1625 new_label);
1626 --LABEL_NUSES (old_label);
1627 ++LABEL_NUSES (new_label);
1630 else
1632 /* This would have indicated an abnormal edge. */
1633 if (computed_jump_p (insn))
1634 abort ();
1636 /* A return instruction can't be redirected. */
1637 if (returnjump_p (insn))
1638 abort ();
1640 /* If the insn doesn't go where we think, we're confused. */
1641 if (JUMP_LABEL (insn) != old_label)
1642 abort ();
1644 redirect_jump (insn, new_label, 0);
1647 emit_label_before (new_label, bb_note);
1648 bb->head = new_label;
1651 return bb;
1654 /* Queue instructions for insertion on an edge between two basic blocks.
1655 The new instructions and basic blocks (if any) will not appear in the
1656 CFG until commit_edge_insertions is called. */
1658 void
1659 insert_insn_on_edge (pattern, e)
1660 rtx pattern;
1661 edge e;
1663 /* We cannot insert instructions on an abnormal critical edge.
1664 It will be easier to find the culprit if we die now. */
1665 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1666 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1667 abort ();
1669 if (e->insns == NULL_RTX)
1670 start_sequence ();
1671 else
1672 push_to_sequence (e->insns);
1674 emit_insn (pattern);
1676 e->insns = get_insns ();
1677 end_sequence ();
1680 /* Update the CFG for the instructions queued on edge E. */
1682 static void
1683 commit_one_edge_insertion (e)
1684 edge e;
1686 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1687 basic_block bb;
1689 /* Pull the insns off the edge now since the edge might go away. */
1690 insns = e->insns;
1691 e->insns = NULL_RTX;
1693 /* Figure out where to put these things. If the destination has
1694 one predecessor, insert there. Except for the exit block. */
1695 if (e->dest->pred->pred_next == NULL
1696 && e->dest != EXIT_BLOCK_PTR)
1698 bb = e->dest;
1700 /* Get the location correct wrt a code label, and "nice" wrt
1701 a basic block note, and before everything else. */
1702 tmp = bb->head;
1703 if (GET_CODE (tmp) == CODE_LABEL)
1704 tmp = NEXT_INSN (tmp);
1705 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1706 tmp = NEXT_INSN (tmp);
1707 if (tmp == bb->head)
1708 before = tmp;
1709 else
1710 after = PREV_INSN (tmp);
1713 /* If the source has one successor and the edge is not abnormal,
1714 insert there. Except for the entry block. */
1715 else if ((e->flags & EDGE_ABNORMAL) == 0
1716 && e->src->succ->succ_next == NULL
1717 && e->src != ENTRY_BLOCK_PTR)
1719 bb = e->src;
1720 /* It is possible to have a non-simple jump here. Consider a target
1721 where some forms of unconditional jumps clobber a register. This
1722 happens on the fr30 for example.
1724 We know this block has a single successor, so we can just emit
1725 the queued insns before the jump. */
1726 if (GET_CODE (bb->end) == JUMP_INSN)
1728 before = bb->end;
1730 else
1732 /* We'd better be fallthru, or we've lost track of what's what. */
1733 if ((e->flags & EDGE_FALLTHRU) == 0)
1734 abort ();
1736 after = bb->end;
1740 /* Otherwise we must split the edge. */
1741 else
1743 bb = split_edge (e);
1744 after = bb->end;
1747 /* Now that we've found the spot, do the insertion. */
1749 /* Set the new block number for these insns, if structure is allocated. */
1750 if (basic_block_for_insn)
1752 rtx i;
1753 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1754 set_block_for_insn (i, bb);
1757 if (before)
1759 emit_insns_before (insns, before);
1760 if (before == bb->head)
1761 bb->head = insns;
1763 last = prev_nonnote_insn (before);
1765 else
1767 last = emit_insns_after (insns, after);
1768 if (after == bb->end)
1769 bb->end = last;
1772 if (returnjump_p (last))
1774 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1775 This is not currently a problem because this only happens
1776 for the (single) epilogue, which already has a fallthru edge
1777 to EXIT. */
1779 e = bb->succ;
1780 if (e->dest != EXIT_BLOCK_PTR
1781 || e->succ_next != NULL
1782 || (e->flags & EDGE_FALLTHRU) == 0)
1783 abort ();
1784 e->flags &= ~EDGE_FALLTHRU;
1786 emit_barrier_after (last);
1787 bb->end = last;
1789 if (before)
1790 flow_delete_insn (before);
1792 else if (GET_CODE (last) == JUMP_INSN)
1793 abort ();
1796 /* Update the CFG for all queued instructions. */
1798 void
1799 commit_edge_insertions ()
1801 int i;
1802 basic_block bb;
1804 #ifdef ENABLE_CHECKING
1805 verify_flow_info ();
1806 #endif
1808 i = -1;
1809 bb = ENTRY_BLOCK_PTR;
1810 while (1)
1812 edge e, next;
1814 for (e = bb->succ; e; e = next)
1816 next = e->succ_next;
1817 if (e->insns)
1818 commit_one_edge_insertion (e);
1821 if (++i >= n_basic_blocks)
1822 break;
1823 bb = BASIC_BLOCK (i);
1827 /* Delete all unreachable basic blocks. */
1829 static void
1830 delete_unreachable_blocks ()
1832 basic_block *worklist, *tos;
1833 int deleted_handler;
1834 edge e;
1835 int i, n;
1837 n = n_basic_blocks;
1838 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1840 /* Use basic_block->aux as a marker. Clear them all. */
1842 for (i = 0; i < n; ++i)
1843 BASIC_BLOCK (i)->aux = NULL;
1845 /* Add our starting points to the worklist. Almost always there will
1846 be only one. It isn't inconcievable that we might one day directly
1847 support Fortran alternate entry points. */
1849 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1851 *tos++ = e->dest;
1853 /* Mark the block with a handy non-null value. */
1854 e->dest->aux = e;
1857 /* Iterate: find everything reachable from what we've already seen. */
1859 while (tos != worklist)
1861 basic_block b = *--tos;
1863 for (e = b->succ; e; e = e->succ_next)
1864 if (!e->dest->aux)
1866 *tos++ = e->dest;
1867 e->dest->aux = e;
1871 /* Delete all unreachable basic blocks. Count down so that we don't
1872 interfere with the block renumbering that happens in flow_delete_block. */
1874 deleted_handler = 0;
1876 for (i = n - 1; i >= 0; --i)
1878 basic_block b = BASIC_BLOCK (i);
1880 if (b->aux != NULL)
1881 /* This block was found. Tidy up the mark. */
1882 b->aux = NULL;
1883 else
1884 deleted_handler |= flow_delete_block (b);
1887 tidy_fallthru_edges ();
1889 /* If we deleted an exception handler, we may have EH region begin/end
1890 blocks to remove as well. */
1891 if (deleted_handler)
1892 delete_eh_regions ();
1894 free (worklist);
1897 /* Find EH regions for which there is no longer a handler, and delete them. */
1899 static void
1900 delete_eh_regions ()
1902 rtx insn;
1904 update_rethrow_references ();
1906 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1907 if (GET_CODE (insn) == NOTE)
1909 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1910 || (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1912 int num = NOTE_EH_HANDLER (insn);
1913 /* A NULL handler indicates a region is no longer needed,
1914 as long as its rethrow label isn't used. */
1915 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1917 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1918 NOTE_SOURCE_FILE (insn) = 0;
1924 /* Return true if NOTE is not one of the ones that must be kept paired,
1925 so that we may simply delete them. */
1927 static int
1928 can_delete_note_p (note)
1929 rtx note;
1931 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1932 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1935 /* Unlink a chain of insns between START and FINISH, leaving notes
1936 that must be paired. */
1938 void
1939 flow_delete_insn_chain (start, finish)
1940 rtx start, finish;
1942 /* Unchain the insns one by one. It would be quicker to delete all
1943 of these with a single unchaining, rather than one at a time, but
1944 we need to keep the NOTE's. */
1946 rtx next;
1948 while (1)
1950 next = NEXT_INSN (start);
1951 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1953 else if (GET_CODE (start) == CODE_LABEL
1954 && ! can_delete_label_p (start))
1956 const char *name = LABEL_NAME (start);
1957 PUT_CODE (start, NOTE);
1958 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
1959 NOTE_SOURCE_FILE (start) = name;
1961 else
1962 next = flow_delete_insn (start);
1964 if (start == finish)
1965 break;
1966 start = next;
1970 /* Delete the insns in a (non-live) block. We physically delete every
1971 non-deleted-note insn, and update the flow graph appropriately.
1973 Return nonzero if we deleted an exception handler. */
1975 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1976 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1979 flow_delete_block (b)
1980 basic_block b;
1982 int deleted_handler = 0;
1983 rtx insn, end, tmp;
1985 /* If the head of this block is a CODE_LABEL, then it might be the
1986 label for an exception handler which can't be reached.
1988 We need to remove the label from the exception_handler_label list
1989 and remove the associated NOTE_INSN_EH_REGION_BEG and
1990 NOTE_INSN_EH_REGION_END notes. */
1992 insn = b->head;
1994 never_reached_warning (insn);
1996 if (GET_CODE (insn) == CODE_LABEL)
1998 rtx x, *prev = &exception_handler_labels;
2000 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2002 if (XEXP (x, 0) == insn)
2004 /* Found a match, splice this label out of the EH label list. */
2005 *prev = XEXP (x, 1);
2006 XEXP (x, 1) = NULL_RTX;
2007 XEXP (x, 0) = NULL_RTX;
2009 /* Remove the handler from all regions */
2010 remove_handler (insn);
2011 deleted_handler = 1;
2012 break;
2014 prev = &XEXP (x, 1);
2018 /* Include any jump table following the basic block. */
2019 end = b->end;
2020 if (GET_CODE (end) == JUMP_INSN
2021 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2022 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2023 && GET_CODE (tmp) == JUMP_INSN
2024 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2025 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2026 end = tmp;
2028 /* Include any barrier that may follow the basic block. */
2029 tmp = next_nonnote_insn (end);
2030 if (tmp && GET_CODE (tmp) == BARRIER)
2031 end = tmp;
2033 /* Selectively delete the entire chain. */
2034 flow_delete_insn_chain (insn, end);
2036 /* Remove the edges into and out of this block. Note that there may
2037 indeed be edges in, if we are removing an unreachable loop. */
2039 edge e, next, *q;
2041 for (e = b->pred; e; e = next)
2043 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2044 continue;
2045 *q = e->succ_next;
2046 next = e->pred_next;
2047 n_edges--;
2048 free (e);
2050 for (e = b->succ; e; e = next)
2052 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2053 continue;
2054 *q = e->pred_next;
2055 next = e->succ_next;
2056 n_edges--;
2057 free (e);
2060 b->pred = NULL;
2061 b->succ = NULL;
2064 /* Remove the basic block from the array, and compact behind it. */
2065 expunge_block (b);
2067 return deleted_handler;
2070 /* Remove block B from the basic block array and compact behind it. */
2072 static void
2073 expunge_block (b)
2074 basic_block b;
2076 int i, n = n_basic_blocks;
2078 for (i = b->index; i + 1 < n; ++i)
2080 basic_block x = BASIC_BLOCK (i + 1);
2081 BASIC_BLOCK (i) = x;
2082 x->index = i;
2085 basic_block_info->num_elements--;
2086 n_basic_blocks--;
2089 /* Delete INSN by patching it out. Return the next insn. */
2092 flow_delete_insn (insn)
2093 rtx insn;
2095 rtx prev = PREV_INSN (insn);
2096 rtx next = NEXT_INSN (insn);
2097 rtx note;
2099 PREV_INSN (insn) = NULL_RTX;
2100 NEXT_INSN (insn) = NULL_RTX;
2101 INSN_DELETED_P (insn) = 1;
2103 if (prev)
2104 NEXT_INSN (prev) = next;
2105 if (next)
2106 PREV_INSN (next) = prev;
2107 else
2108 set_last_insn (prev);
2110 if (GET_CODE (insn) == CODE_LABEL)
2111 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2113 /* If deleting a jump, decrement the use count of the label. Deleting
2114 the label itself should happen in the normal course of block merging. */
2115 if (GET_CODE (insn) == JUMP_INSN
2116 && JUMP_LABEL (insn)
2117 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2118 LABEL_NUSES (JUMP_LABEL (insn))--;
2120 /* Also if deleting an insn that references a label. */
2121 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2122 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2123 LABEL_NUSES (XEXP (note, 0))--;
2125 return next;
2128 /* True if a given label can be deleted. */
2130 static int
2131 can_delete_label_p (label)
2132 rtx label;
2134 rtx x;
2136 if (LABEL_PRESERVE_P (label))
2137 return 0;
2139 for (x = forced_labels; x; x = XEXP (x, 1))
2140 if (label == XEXP (x, 0))
2141 return 0;
2142 for (x = label_value_list; x; x = XEXP (x, 1))
2143 if (label == XEXP (x, 0))
2144 return 0;
2145 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2146 if (label == XEXP (x, 0))
2147 return 0;
2149 /* User declared labels must be preserved. */
2150 if (LABEL_NAME (label) != 0)
2151 return 0;
2153 return 1;
2156 static int
2157 tail_recursion_label_p (label)
2158 rtx label;
2160 rtx x;
2162 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2163 if (label == XEXP (x, 0))
2164 return 1;
2166 return 0;
2169 /* Blocks A and B are to be merged into a single block A. The insns
2170 are already contiguous, hence `nomove'. */
2172 void
2173 merge_blocks_nomove (a, b)
2174 basic_block a, b;
2176 edge e;
2177 rtx b_head, b_end, a_end;
2178 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2179 int b_empty = 0;
2181 /* If there was a CODE_LABEL beginning B, delete it. */
2182 b_head = b->head;
2183 b_end = b->end;
2184 if (GET_CODE (b_head) == CODE_LABEL)
2186 /* Detect basic blocks with nothing but a label. This can happen
2187 in particular at the end of a function. */
2188 if (b_head == b_end)
2189 b_empty = 1;
2190 del_first = del_last = b_head;
2191 b_head = NEXT_INSN (b_head);
2194 /* Delete the basic block note. */
2195 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2197 if (b_head == b_end)
2198 b_empty = 1;
2199 if (! del_last)
2200 del_first = b_head;
2201 del_last = b_head;
2202 b_head = NEXT_INSN (b_head);
2205 /* If there was a jump out of A, delete it. */
2206 a_end = a->end;
2207 if (GET_CODE (a_end) == JUMP_INSN)
2209 rtx prev;
2211 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2212 if (GET_CODE (prev) != NOTE
2213 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2214 || prev == a->head)
2215 break;
2217 del_first = a_end;
2219 #ifdef HAVE_cc0
2220 /* If this was a conditional jump, we need to also delete
2221 the insn that set cc0. */
2222 if (prev && sets_cc0_p (prev))
2224 rtx tmp = prev;
2225 prev = prev_nonnote_insn (prev);
2226 if (!prev)
2227 prev = a->head;
2228 del_first = tmp;
2230 #endif
2232 a_end = prev;
2234 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2235 del_first = NEXT_INSN (a_end);
2237 /* Delete everything marked above as well as crap that might be
2238 hanging out between the two blocks. */
2239 flow_delete_insn_chain (del_first, del_last);
2241 /* Normally there should only be one successor of A and that is B, but
2242 partway though the merge of blocks for conditional_execution we'll
2243 be merging a TEST block with THEN and ELSE successors. Free the
2244 whole lot of them and hope the caller knows what they're doing. */
2245 while (a->succ)
2246 remove_edge (a->succ);
2248 /* Adjust the edges out of B for the new owner. */
2249 for (e = b->succ; e; e = e->succ_next)
2250 e->src = a;
2251 a->succ = b->succ;
2253 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2254 b->pred = b->succ = NULL;
2256 /* Reassociate the insns of B with A. */
2257 if (!b_empty)
2259 if (basic_block_for_insn)
2261 BLOCK_FOR_INSN (b_head) = a;
2262 while (b_head != b_end)
2264 b_head = NEXT_INSN (b_head);
2265 BLOCK_FOR_INSN (b_head) = a;
2268 a_end = b_end;
2270 a->end = a_end;
2272 expunge_block (b);
2275 /* Blocks A and B are to be merged into a single block. A has no incoming
2276 fallthru edge, so it can be moved before B without adding or modifying
2277 any jumps (aside from the jump from A to B). */
2279 static int
2280 merge_blocks_move_predecessor_nojumps (a, b)
2281 basic_block a, b;
2283 rtx start, end, barrier;
2284 int index;
2286 start = a->head;
2287 end = a->end;
2289 barrier = next_nonnote_insn (end);
2290 if (GET_CODE (barrier) != BARRIER)
2291 abort ();
2292 flow_delete_insn (barrier);
2294 /* Move block and loop notes out of the chain so that we do not
2295 disturb their order.
2297 ??? A better solution would be to squeeze out all the non-nested notes
2298 and adjust the block trees appropriately. Even better would be to have
2299 a tighter connection between block trees and rtl so that this is not
2300 necessary. */
2301 start = squeeze_notes (start, end);
2303 /* Scramble the insn chain. */
2304 if (end != PREV_INSN (b->head))
2305 reorder_insns (start, end, PREV_INSN (b->head));
2307 if (rtl_dump_file)
2309 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2310 a->index, b->index);
2313 /* Swap the records for the two blocks around. Although we are deleting B,
2314 A is now where B was and we want to compact the BB array from where
2315 A used to be. */
2316 BASIC_BLOCK (a->index) = b;
2317 BASIC_BLOCK (b->index) = a;
2318 index = a->index;
2319 a->index = b->index;
2320 b->index = index;
2322 /* Now blocks A and B are contiguous. Merge them. */
2323 merge_blocks_nomove (a, b);
2325 return 1;
2328 /* Blocks A and B are to be merged into a single block. B has no outgoing
2329 fallthru edge, so it can be moved after A without adding or modifying
2330 any jumps (aside from the jump from A to B). */
2332 static int
2333 merge_blocks_move_successor_nojumps (a, b)
2334 basic_block a, b;
2336 rtx start, end, barrier;
2338 start = b->head;
2339 end = b->end;
2340 barrier = NEXT_INSN (end);
2342 /* Recognize a jump table following block B. */
2343 if (GET_CODE (barrier) == CODE_LABEL
2344 && NEXT_INSN (barrier)
2345 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2346 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2347 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2349 end = NEXT_INSN (barrier);
2350 barrier = NEXT_INSN (end);
2353 /* There had better have been a barrier there. Delete it. */
2354 if (GET_CODE (barrier) != BARRIER)
2355 abort ();
2356 flow_delete_insn (barrier);
2358 /* Move block and loop notes out of the chain so that we do not
2359 disturb their order.
2361 ??? A better solution would be to squeeze out all the non-nested notes
2362 and adjust the block trees appropriately. Even better would be to have
2363 a tighter connection between block trees and rtl so that this is not
2364 necessary. */
2365 start = squeeze_notes (start, end);
2367 /* Scramble the insn chain. */
2368 reorder_insns (start, end, a->end);
2370 /* Now blocks A and B are contiguous. Merge them. */
2371 merge_blocks_nomove (a, b);
2373 if (rtl_dump_file)
2375 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2376 b->index, a->index);
2379 return 1;
2382 /* Attempt to merge basic blocks that are potentially non-adjacent.
2383 Return true iff the attempt succeeded. */
2385 static int
2386 merge_blocks (e, b, c)
2387 edge e;
2388 basic_block b, c;
2390 /* If C has a tail recursion label, do not merge. There is no
2391 edge recorded from the call_placeholder back to this label, as
2392 that would make optimize_sibling_and_tail_recursive_calls more
2393 complex for no gain. */
2394 if (GET_CODE (c->head) == CODE_LABEL
2395 && tail_recursion_label_p (c->head))
2396 return 0;
2398 /* If B has a fallthru edge to C, no need to move anything. */
2399 if (e->flags & EDGE_FALLTHRU)
2401 merge_blocks_nomove (b, c);
2403 if (rtl_dump_file)
2405 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2406 b->index, c->index);
2409 return 1;
2411 else
2413 edge tmp_edge;
2414 basic_block d;
2415 int c_has_outgoing_fallthru;
2416 int b_has_incoming_fallthru;
2418 /* We must make sure to not munge nesting of exception regions,
2419 lexical blocks, and loop notes.
2421 The first is taken care of by requiring that the active eh
2422 region at the end of one block always matches the active eh
2423 region at the beginning of the next block.
2425 The later two are taken care of by squeezing out all the notes. */
2427 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2428 executed and we may want to treat blocks which have two out
2429 edges, one normal, one abnormal as only having one edge for
2430 block merging purposes. */
2432 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2433 if (tmp_edge->flags & EDGE_FALLTHRU)
2434 break;
2435 c_has_outgoing_fallthru = (tmp_edge != NULL);
2437 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2438 if (tmp_edge->flags & EDGE_FALLTHRU)
2439 break;
2440 b_has_incoming_fallthru = (tmp_edge != NULL);
2442 /* If B does not have an incoming fallthru, and the exception regions
2443 match, then it can be moved immediately before C without introducing
2444 or modifying jumps.
2446 C can not be the first block, so we do not have to worry about
2447 accessing a non-existent block. */
2448 d = BASIC_BLOCK (c->index - 1);
2449 if (! b_has_incoming_fallthru
2450 && d->eh_end == b->eh_beg
2451 && b->eh_end == c->eh_beg)
2452 return merge_blocks_move_predecessor_nojumps (b, c);
2454 /* Otherwise, we're going to try to move C after B. Make sure the
2455 exception regions match.
2457 If B is the last basic block, then we must not try to access the
2458 block structure for block B + 1. Luckily in that case we do not
2459 need to worry about matching exception regions. */
2460 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2461 if (b->eh_end == c->eh_beg
2462 && (d == NULL || c->eh_end == d->eh_beg))
2464 /* If C does not have an outgoing fallthru, then it can be moved
2465 immediately after B without introducing or modifying jumps. */
2466 if (! c_has_outgoing_fallthru)
2467 return merge_blocks_move_successor_nojumps (b, c);
2469 /* Otherwise, we'll need to insert an extra jump, and possibly
2470 a new block to contain it. */
2471 /* ??? Not implemented yet. */
2474 return 0;
2478 /* Top level driver for merge_blocks. */
2480 static void
2481 try_merge_blocks ()
2483 int i;
2485 /* Attempt to merge blocks as made possible by edge removal. If a block
2486 has only one successor, and the successor has only one predecessor,
2487 they may be combined. */
2489 for (i = 0; i < n_basic_blocks;)
2491 basic_block c, b = BASIC_BLOCK (i);
2492 edge s;
2494 /* A loop because chains of blocks might be combineable. */
2495 while ((s = b->succ) != NULL
2496 && s->succ_next == NULL
2497 && (s->flags & EDGE_EH) == 0
2498 && (c = s->dest) != EXIT_BLOCK_PTR
2499 && c->pred->pred_next == NULL
2500 /* If the jump insn has side effects, we can't kill the edge. */
2501 && (GET_CODE (b->end) != JUMP_INSN
2502 || onlyjump_p (b->end))
2503 && merge_blocks (s, b, c))
2504 continue;
2506 /* Don't get confused by the index shift caused by deleting blocks. */
2507 i = b->index + 1;
2511 /* The given edge should potentially be a fallthru edge. If that is in
2512 fact true, delete the jump and barriers that are in the way. */
2514 void
2515 tidy_fallthru_edge (e, b, c)
2516 edge e;
2517 basic_block b, c;
2519 rtx q;
2521 /* ??? In a late-running flow pass, other folks may have deleted basic
2522 blocks by nopping out blocks, leaving multiple BARRIERs between here
2523 and the target label. They ought to be chastized and fixed.
2525 We can also wind up with a sequence of undeletable labels between
2526 one block and the next.
2528 So search through a sequence of barriers, labels, and notes for
2529 the head of block C and assert that we really do fall through. */
2531 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2532 return;
2534 /* Remove what will soon cease being the jump insn from the source block.
2535 If block B consisted only of this single jump, turn it into a deleted
2536 note. */
2537 q = b->end;
2538 if (GET_CODE (q) == JUMP_INSN
2539 && onlyjump_p (q)
2540 && (any_uncondjump_p (q)
2541 || (b->succ == e && e->succ_next == NULL)))
2543 #ifdef HAVE_cc0
2544 /* If this was a conditional jump, we need to also delete
2545 the insn that set cc0. */
2546 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2547 q = PREV_INSN (q);
2548 #endif
2550 if (b->head == q)
2552 PUT_CODE (q, NOTE);
2553 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2554 NOTE_SOURCE_FILE (q) = 0;
2556 else
2557 q = PREV_INSN (q);
2559 b->end = q;
2562 /* Selectively unlink the sequence. */
2563 if (q != PREV_INSN (c->head))
2564 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2566 e->flags |= EDGE_FALLTHRU;
2569 /* Fix up edges that now fall through, or rather should now fall through
2570 but previously required a jump around now deleted blocks. Simplify
2571 the search by only examining blocks numerically adjacent, since this
2572 is how find_basic_blocks created them. */
2574 static void
2575 tidy_fallthru_edges ()
2577 int i;
2579 for (i = 1; i < n_basic_blocks; ++i)
2581 basic_block b = BASIC_BLOCK (i - 1);
2582 basic_block c = BASIC_BLOCK (i);
2583 edge s;
2585 /* We care about simple conditional or unconditional jumps with
2586 a single successor.
2588 If we had a conditional branch to the next instruction when
2589 find_basic_blocks was called, then there will only be one
2590 out edge for the block which ended with the conditional
2591 branch (since we do not create duplicate edges).
2593 Furthermore, the edge will be marked as a fallthru because we
2594 merge the flags for the duplicate edges. So we do not want to
2595 check that the edge is not a FALLTHRU edge. */
2596 if ((s = b->succ) != NULL
2597 && s->succ_next == NULL
2598 && s->dest == c
2599 /* If the jump insn has side effects, we can't tidy the edge. */
2600 && (GET_CODE (b->end) != JUMP_INSN
2601 || onlyjump_p (b->end)))
2602 tidy_fallthru_edge (s, b, c);
2606 /* Perform data flow analysis.
2607 F is the first insn of the function; FLAGS is a set of PROP_* flags
2608 to be used in accumulating flow info. */
2610 void
2611 life_analysis (f, file, flags)
2612 rtx f;
2613 FILE *file;
2614 int flags;
2616 #ifdef ELIMINABLE_REGS
2617 register int i;
2618 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2619 #endif
2621 /* Record which registers will be eliminated. We use this in
2622 mark_used_regs. */
2624 CLEAR_HARD_REG_SET (elim_reg_set);
2626 #ifdef ELIMINABLE_REGS
2627 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2628 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2629 #else
2630 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2631 #endif
2633 if (! optimize)
2634 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
2636 /* The post-reload life analysis have (on a global basis) the same
2637 registers live as was computed by reload itself. elimination
2638 Otherwise offsets and such may be incorrect.
2640 Reload will make some registers as live even though they do not
2641 appear in the rtl.
2643 We don't want to create new auto-incs after reload, since they
2644 are unlikely to be useful and can cause problems with shared
2645 stack slots. */
2646 if (reload_completed)
2647 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2649 /* We want alias analysis information for local dead store elimination. */
2650 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2651 init_alias_analysis ();
2653 /* Always remove no-op moves. Do this before other processing so
2654 that we don't have to keep re-scanning them. */
2655 delete_noop_moves (f);
2657 /* Some targets can emit simpler epilogues if they know that sp was
2658 not ever modified during the function. After reload, of course,
2659 we've already emitted the epilogue so there's no sense searching. */
2660 if (! reload_completed)
2661 notice_stack_pointer_modification (f);
2663 /* Allocate and zero out data structures that will record the
2664 data from lifetime analysis. */
2665 allocate_reg_life_data ();
2666 allocate_bb_life_data ();
2668 /* Find the set of registers live on function exit. */
2669 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2671 /* "Update" life info from zero. It'd be nice to begin the
2672 relaxation with just the exit and noreturn blocks, but that set
2673 is not immediately handy. */
2675 if (flags & PROP_REG_INFO)
2676 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2677 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2679 /* Clean up. */
2680 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2681 end_alias_analysis ();
2683 if (file)
2684 dump_flow_info (file);
2686 free_basic_block_vars (1);
2689 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2690 Search for REGNO. If found, abort if it is not wider than word_mode. */
2692 static int
2693 verify_wide_reg_1 (px, pregno)
2694 rtx *px;
2695 void *pregno;
2697 rtx x = *px;
2698 unsigned int regno = *(int *) pregno;
2700 if (GET_CODE (x) == REG && REGNO (x) == regno)
2702 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2703 abort ();
2704 return 1;
2706 return 0;
2709 /* A subroutine of verify_local_live_at_start. Search through insns
2710 between HEAD and END looking for register REGNO. */
2712 static void
2713 verify_wide_reg (regno, head, end)
2714 int regno;
2715 rtx head, end;
2717 while (1)
2719 if (INSN_P (head)
2720 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2721 return;
2722 if (head == end)
2723 break;
2724 head = NEXT_INSN (head);
2727 /* We didn't find the register at all. Something's way screwy. */
2728 abort ();
2731 /* A subroutine of update_life_info. Verify that there are no untoward
2732 changes in live_at_start during a local update. */
2734 static void
2735 verify_local_live_at_start (new_live_at_start, bb)
2736 regset new_live_at_start;
2737 basic_block bb;
2739 if (reload_completed)
2741 /* After reload, there are no pseudos, nor subregs of multi-word
2742 registers. The regsets should exactly match. */
2743 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2744 abort ();
2746 else
2748 int i;
2750 /* Find the set of changed registers. */
2751 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2753 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2755 /* No registers should die. */
2756 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2757 abort ();
2758 /* Verify that the now-live register is wider than word_mode. */
2759 verify_wide_reg (i, bb->head, bb->end);
2764 /* Updates life information starting with the basic blocks set in BLOCKS.
2765 If BLOCKS is null, consider it to be the universal set.
2767 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2768 we are only expecting local modifications to basic blocks. If we find
2769 extra registers live at the beginning of a block, then we either killed
2770 useful data, or we have a broken split that wants data not provided.
2771 If we find registers removed from live_at_start, that means we have
2772 a broken peephole that is killing a register it shouldn't.
2774 ??? This is not true in one situation -- when a pre-reload splitter
2775 generates subregs of a multi-word pseudo, current life analysis will
2776 lose the kill. So we _can_ have a pseudo go live. How irritating.
2778 Including PROP_REG_INFO does not properly refresh regs_ever_live
2779 unless the caller resets it to zero. */
2781 void
2782 update_life_info (blocks, extent, prop_flags)
2783 sbitmap blocks;
2784 enum update_life_extent extent;
2785 int prop_flags;
2787 regset tmp;
2788 regset_head tmp_head;
2789 int i;
2791 tmp = INITIALIZE_REG_SET (tmp_head);
2793 /* For a global update, we go through the relaxation process again. */
2794 if (extent != UPDATE_LIFE_LOCAL)
2796 calculate_global_regs_live (blocks, blocks,
2797 prop_flags & PROP_SCAN_DEAD_CODE);
2799 /* If asked, remove notes from the blocks we'll update. */
2800 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2801 count_or_remove_death_notes (blocks, 1);
2804 if (blocks)
2806 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2808 basic_block bb = BASIC_BLOCK (i);
2810 COPY_REG_SET (tmp, bb->global_live_at_end);
2811 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2813 if (extent == UPDATE_LIFE_LOCAL)
2814 verify_local_live_at_start (tmp, bb);
2817 else
2819 for (i = n_basic_blocks - 1; i >= 0; --i)
2821 basic_block bb = BASIC_BLOCK (i);
2823 COPY_REG_SET (tmp, bb->global_live_at_end);
2824 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2826 if (extent == UPDATE_LIFE_LOCAL)
2827 verify_local_live_at_start (tmp, bb);
2831 FREE_REG_SET (tmp);
2833 if (prop_flags & PROP_REG_INFO)
2835 /* The only pseudos that are live at the beginning of the function
2836 are those that were not set anywhere in the function. local-alloc
2837 doesn't know how to handle these correctly, so mark them as not
2838 local to any one basic block. */
2839 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2840 FIRST_PSEUDO_REGISTER, i,
2841 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2843 /* We have a problem with any pseudoreg that lives across the setjmp.
2844 ANSI says that if a user variable does not change in value between
2845 the setjmp and the longjmp, then the longjmp preserves it. This
2846 includes longjmp from a place where the pseudo appears dead.
2847 (In principle, the value still exists if it is in scope.)
2848 If the pseudo goes in a hard reg, some other value may occupy
2849 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2850 Conclusion: such a pseudo must not go in a hard reg. */
2851 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2852 FIRST_PSEUDO_REGISTER, i,
2854 if (regno_reg_rtx[i] != 0)
2856 REG_LIVE_LENGTH (i) = -1;
2857 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2863 /* Free the variables allocated by find_basic_blocks.
2865 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2867 void
2868 free_basic_block_vars (keep_head_end_p)
2869 int keep_head_end_p;
2871 if (basic_block_for_insn)
2873 VARRAY_FREE (basic_block_for_insn);
2874 basic_block_for_insn = NULL;
2877 if (! keep_head_end_p)
2879 clear_edges ();
2880 VARRAY_FREE (basic_block_info);
2881 n_basic_blocks = 0;
2883 ENTRY_BLOCK_PTR->aux = NULL;
2884 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2885 EXIT_BLOCK_PTR->aux = NULL;
2886 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2890 /* Return nonzero if the destination of SET equals the source. */
2892 static int
2893 set_noop_p (set)
2894 rtx set;
2896 rtx src = SET_SRC (set);
2897 rtx dst = SET_DEST (set);
2899 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2901 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2902 return 0;
2903 src = SUBREG_REG (src);
2904 dst = SUBREG_REG (dst);
2907 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2908 && REGNO (src) == REGNO (dst));
2911 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2912 value to itself. */
2914 static int
2915 noop_move_p (insn)
2916 rtx insn;
2918 rtx pat = PATTERN (insn);
2920 /* Insns carrying these notes are useful later on. */
2921 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2922 return 0;
2924 if (GET_CODE (pat) == SET && set_noop_p (pat))
2925 return 1;
2927 if (GET_CODE (pat) == PARALLEL)
2929 int i;
2930 /* If nothing but SETs of registers to themselves,
2931 this insn can also be deleted. */
2932 for (i = 0; i < XVECLEN (pat, 0); i++)
2934 rtx tem = XVECEXP (pat, 0, i);
2936 if (GET_CODE (tem) == USE
2937 || GET_CODE (tem) == CLOBBER)
2938 continue;
2940 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2941 return 0;
2944 return 1;
2946 return 0;
2949 /* Delete any insns that copy a register to itself. */
2951 static void
2952 delete_noop_moves (f)
2953 rtx f;
2955 rtx insn;
2956 for (insn = f; insn; insn = NEXT_INSN (insn))
2958 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2960 PUT_CODE (insn, NOTE);
2961 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2962 NOTE_SOURCE_FILE (insn) = 0;
2967 /* Determine if the stack pointer is constant over the life of the function.
2968 Only useful before prologues have been emitted. */
2970 static void
2971 notice_stack_pointer_modification_1 (x, pat, data)
2972 rtx x;
2973 rtx pat ATTRIBUTE_UNUSED;
2974 void *data ATTRIBUTE_UNUSED;
2976 if (x == stack_pointer_rtx
2977 /* The stack pointer is only modified indirectly as the result
2978 of a push until later in flow. See the comments in rtl.texi
2979 regarding Embedded Side-Effects on Addresses. */
2980 || (GET_CODE (x) == MEM
2981 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2982 || GET_CODE (XEXP (x, 0)) == PRE_INC
2983 || GET_CODE (XEXP (x, 0)) == POST_DEC
2984 || GET_CODE (XEXP (x, 0)) == POST_INC)
2985 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2986 current_function_sp_is_unchanging = 0;
2989 static void
2990 notice_stack_pointer_modification (f)
2991 rtx f;
2993 rtx insn;
2995 /* Assume that the stack pointer is unchanging if alloca hasn't
2996 been used. */
2997 current_function_sp_is_unchanging = !current_function_calls_alloca;
2998 if (! current_function_sp_is_unchanging)
2999 return;
3001 for (insn = f; insn; insn = NEXT_INSN (insn))
3003 if (INSN_P (insn))
3005 /* Check if insn modifies the stack pointer. */
3006 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3007 NULL);
3008 if (! current_function_sp_is_unchanging)
3009 return;
3014 /* Mark a register in SET. Hard registers in large modes get all
3015 of their component registers set as well. */
3017 static void
3018 mark_reg (reg, xset)
3019 rtx reg;
3020 void *xset;
3022 regset set = (regset) xset;
3023 int regno = REGNO (reg);
3025 if (GET_MODE (reg) == BLKmode)
3026 abort ();
3028 SET_REGNO_REG_SET (set, regno);
3029 if (regno < FIRST_PSEUDO_REGISTER)
3031 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3032 while (--n > 0)
3033 SET_REGNO_REG_SET (set, regno + n);
3037 /* Mark those regs which are needed at the end of the function as live
3038 at the end of the last basic block. */
3040 static void
3041 mark_regs_live_at_end (set)
3042 regset set;
3044 int i;
3046 /* If exiting needs the right stack value, consider the stack pointer
3047 live at the end of the function. */
3048 if ((HAVE_epilogue && reload_completed)
3049 || ! EXIT_IGNORE_STACK
3050 || (! FRAME_POINTER_REQUIRED
3051 && ! current_function_calls_alloca
3052 && flag_omit_frame_pointer)
3053 || current_function_sp_is_unchanging)
3055 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3058 /* Mark the frame pointer if needed at the end of the function. If
3059 we end up eliminating it, it will be removed from the live list
3060 of each basic block by reload. */
3062 if (! reload_completed || frame_pointer_needed)
3064 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3065 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3066 /* If they are different, also mark the hard frame pointer as live. */
3067 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3068 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3069 #endif
3072 #ifdef PIC_OFFSET_TABLE_REGNUM
3073 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3074 /* Many architectures have a GP register even without flag_pic.
3075 Assume the pic register is not in use, or will be handled by
3076 other means, if it is not fixed. */
3077 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3078 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3079 #endif
3080 #endif
3082 /* Mark all global registers, and all registers used by the epilogue
3083 as being live at the end of the function since they may be
3084 referenced by our caller. */
3085 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3086 if (global_regs[i] || EPILOGUE_USES (i))
3087 SET_REGNO_REG_SET (set, i);
3089 /* Mark all call-saved registers that we actaully used. */
3090 if (HAVE_epilogue && reload_completed)
3092 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3093 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3094 SET_REGNO_REG_SET (set, i);
3097 /* Mark function return value. */
3098 diddle_return_value (mark_reg, set);
3101 /* Callback function for for_each_successor_phi. DATA is a regset.
3102 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3103 INSN, in the regset. */
3105 static int
3106 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3107 rtx insn ATTRIBUTE_UNUSED;
3108 int dest_regno ATTRIBUTE_UNUSED;
3109 int src_regno;
3110 void *data;
3112 regset live = (regset) data;
3113 SET_REGNO_REG_SET (live, src_regno);
3114 return 0;
3117 /* Propagate global life info around the graph of basic blocks. Begin
3118 considering blocks with their corresponding bit set in BLOCKS_IN.
3119 If BLOCKS_IN is null, consider it the universal set.
3121 BLOCKS_OUT is set for every block that was changed. */
3123 static void
3124 calculate_global_regs_live (blocks_in, blocks_out, flags)
3125 sbitmap blocks_in, blocks_out;
3126 int flags;
3128 basic_block *queue, *qhead, *qtail, *qend;
3129 regset tmp, new_live_at_end;
3130 regset_head tmp_head;
3131 regset_head new_live_at_end_head;
3132 int i;
3134 tmp = INITIALIZE_REG_SET (tmp_head);
3135 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3137 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3138 because the `head == tail' style test for an empty queue doesn't
3139 work with a full queue. */
3140 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3141 qtail = queue;
3142 qhead = qend = queue + n_basic_blocks + 2;
3144 /* Clear out the garbage that might be hanging out in bb->aux. */
3145 for (i = n_basic_blocks - 1; i >= 0; --i)
3146 BASIC_BLOCK (i)->aux = NULL;
3148 /* Queue the blocks set in the initial mask. Do this in reverse block
3149 number order so that we are more likely for the first round to do
3150 useful work. We use AUX non-null to flag that the block is queued. */
3151 if (blocks_in)
3153 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3155 basic_block bb = BASIC_BLOCK (i);
3156 *--qhead = bb;
3157 bb->aux = bb;
3160 else
3162 for (i = 0; i < n_basic_blocks; ++i)
3164 basic_block bb = BASIC_BLOCK (i);
3165 *--qhead = bb;
3166 bb->aux = bb;
3170 if (blocks_out)
3171 sbitmap_zero (blocks_out);
3173 while (qhead != qtail)
3175 int rescan, changed;
3176 basic_block bb;
3177 edge e;
3179 bb = *qhead++;
3180 if (qhead == qend)
3181 qhead = queue;
3182 bb->aux = NULL;
3184 /* Begin by propogating live_at_start from the successor blocks. */
3185 CLEAR_REG_SET (new_live_at_end);
3186 for (e = bb->succ; e; e = e->succ_next)
3188 basic_block sb = e->dest;
3189 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3192 /* Force the stack pointer to be live -- which might not already be
3193 the case for blocks within infinite loops. */
3194 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3196 /* Similarly for the frame pointer before reload. Any reference
3197 to any pseudo before reload is a potential reference of the
3198 frame pointer. */
3199 if (! reload_completed)
3200 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
3202 /* Regs used in phi nodes are not included in
3203 global_live_at_start, since they are live only along a
3204 particular edge. Set those regs that are live because of a
3205 phi node alternative corresponding to this particular block. */
3206 if (in_ssa_form)
3207 for_each_successor_phi (bb, &set_phi_alternative_reg,
3208 new_live_at_end);
3210 if (bb == ENTRY_BLOCK_PTR)
3212 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3213 continue;
3216 /* On our first pass through this block, we'll go ahead and continue.
3217 Recognize first pass by local_set NULL. On subsequent passes, we
3218 get to skip out early if live_at_end wouldn't have changed. */
3220 if (bb->local_set == NULL)
3222 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3223 rescan = 1;
3225 else
3227 /* If any bits were removed from live_at_end, we'll have to
3228 rescan the block. This wouldn't be necessary if we had
3229 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3230 local_live is really dependent on live_at_end. */
3231 CLEAR_REG_SET (tmp);
3232 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3233 new_live_at_end, BITMAP_AND_COMPL);
3235 if (! rescan)
3237 /* Find the set of changed bits. Take this opportunity
3238 to notice that this set is empty and early out. */
3239 CLEAR_REG_SET (tmp);
3240 changed = bitmap_operation (tmp, bb->global_live_at_end,
3241 new_live_at_end, BITMAP_XOR);
3242 if (! changed)
3243 continue;
3245 /* If any of the changed bits overlap with local_set,
3246 we'll have to rescan the block. Detect overlap by
3247 the AND with ~local_set turning off bits. */
3248 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3249 BITMAP_AND_COMPL);
3253 /* Let our caller know that BB changed enough to require its
3254 death notes updated. */
3255 if (blocks_out)
3256 SET_BIT (blocks_out, bb->index);
3258 if (! rescan)
3260 /* Add to live_at_start the set of all registers in
3261 new_live_at_end that aren't in the old live_at_end. */
3263 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3264 BITMAP_AND_COMPL);
3265 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3267 changed = bitmap_operation (bb->global_live_at_start,
3268 bb->global_live_at_start,
3269 tmp, BITMAP_IOR);
3270 if (! changed)
3271 continue;
3273 else
3275 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3277 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3278 into live_at_start. */
3279 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3281 /* If live_at start didn't change, no need to go farther. */
3282 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3283 continue;
3285 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3288 /* Queue all predecessors of BB so that we may re-examine
3289 their live_at_end. */
3290 for (e = bb->pred; e; e = e->pred_next)
3292 basic_block pb = e->src;
3293 if (pb->aux == NULL)
3295 *qtail++ = pb;
3296 if (qtail == qend)
3297 qtail = queue;
3298 pb->aux = pb;
3303 FREE_REG_SET (tmp);
3304 FREE_REG_SET (new_live_at_end);
3306 if (blocks_out)
3308 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3310 basic_block bb = BASIC_BLOCK (i);
3311 FREE_REG_SET (bb->local_set);
3314 else
3316 for (i = n_basic_blocks - 1; i >= 0; --i)
3318 basic_block bb = BASIC_BLOCK (i);
3319 FREE_REG_SET (bb->local_set);
3323 free (queue);
3326 /* Subroutines of life analysis. */
3328 /* Allocate the permanent data structures that represent the results
3329 of life analysis. Not static since used also for stupid life analysis. */
3331 void
3332 allocate_bb_life_data ()
3334 register int i;
3336 for (i = 0; i < n_basic_blocks; i++)
3338 basic_block bb = BASIC_BLOCK (i);
3340 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3341 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3344 ENTRY_BLOCK_PTR->global_live_at_end
3345 = OBSTACK_ALLOC_REG_SET (function_obstack);
3346 EXIT_BLOCK_PTR->global_live_at_start
3347 = OBSTACK_ALLOC_REG_SET (function_obstack);
3349 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3352 void
3353 allocate_reg_life_data ()
3355 int i;
3357 max_regno = max_reg_num ();
3359 /* Recalculate the register space, in case it has grown. Old style
3360 vector oriented regsets would set regset_{size,bytes} here also. */
3361 allocate_reg_info (max_regno, FALSE, FALSE);
3363 /* Reset all the data we'll collect in propagate_block and its
3364 subroutines. */
3365 for (i = 0; i < max_regno; i++)
3367 REG_N_SETS (i) = 0;
3368 REG_N_REFS (i) = 0;
3369 REG_N_DEATHS (i) = 0;
3370 REG_N_CALLS_CROSSED (i) = 0;
3371 REG_LIVE_LENGTH (i) = 0;
3372 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3376 /* Delete dead instructions for propagate_block. */
3378 static void
3379 propagate_block_delete_insn (bb, insn)
3380 basic_block bb;
3381 rtx insn;
3383 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3385 /* If the insn referred to a label, and that label was attached to
3386 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3387 pretty much mandatory to delete it, because the ADDR_VEC may be
3388 referencing labels that no longer exist. */
3390 if (inote)
3392 rtx label = XEXP (inote, 0);
3393 rtx next;
3395 if (LABEL_NUSES (label) == 1
3396 && (next = next_nonnote_insn (label)) != NULL
3397 && GET_CODE (next) == JUMP_INSN
3398 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3399 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3401 rtx pat = PATTERN (next);
3402 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3403 int len = XVECLEN (pat, diff_vec_p);
3404 int i;
3406 for (i = 0; i < len; i++)
3407 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3409 flow_delete_insn (next);
3413 if (bb->end == insn)
3414 bb->end = PREV_INSN (insn);
3415 flow_delete_insn (insn);
3418 /* Delete dead libcalls for propagate_block. Return the insn
3419 before the libcall. */
3421 static rtx
3422 propagate_block_delete_libcall (bb, insn, note)
3423 basic_block bb;
3424 rtx insn, note;
3426 rtx first = XEXP (note, 0);
3427 rtx before = PREV_INSN (first);
3429 if (insn == bb->end)
3430 bb->end = before;
3432 flow_delete_insn_chain (first, insn);
3433 return before;
3436 /* Update the life-status of regs for one insn. Return the previous insn. */
3439 propagate_one_insn (pbi, insn)
3440 struct propagate_block_info *pbi;
3441 rtx insn;
3443 rtx prev = PREV_INSN (insn);
3444 int flags = pbi->flags;
3445 int insn_is_dead = 0;
3446 int libcall_is_dead = 0;
3447 rtx note;
3448 int i;
3450 if (! INSN_P (insn))
3451 return prev;
3453 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3454 if (flags & PROP_SCAN_DEAD_CODE)
3456 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3457 REG_NOTES (insn));
3458 libcall_is_dead = (insn_is_dead && note != 0
3459 && libcall_dead_p (pbi, note, insn));
3462 /* We almost certainly don't want to delete prologue or epilogue
3463 instructions. Warn about probable compiler losage. */
3464 if (insn_is_dead
3465 && reload_completed
3466 && (((HAVE_epilogue || HAVE_prologue)
3467 && prologue_epilogue_contains (insn))
3468 || (HAVE_sibcall_epilogue
3469 && sibcall_epilogue_contains (insn)))
3470 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3472 if (flags & PROP_KILL_DEAD_CODE)
3474 warning ("ICE: would have deleted prologue/epilogue insn");
3475 if (!inhibit_warnings)
3476 debug_rtx (insn);
3478 libcall_is_dead = insn_is_dead = 0;
3481 /* If an instruction consists of just dead store(s) on final pass,
3482 delete it. */
3483 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3485 /* Record sets. Do this even for dead instructions, since they
3486 would have killed the values if they hadn't been deleted. */
3487 mark_set_regs (pbi, PATTERN (insn), insn);
3489 /* CC0 is now known to be dead. Either this insn used it,
3490 in which case it doesn't anymore, or clobbered it,
3491 so the next insn can't use it. */
3492 pbi->cc0_live = 0;
3494 if (libcall_is_dead)
3496 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3497 insn = NEXT_INSN (prev);
3499 else
3500 propagate_block_delete_insn (pbi->bb, insn);
3502 return prev;
3505 /* See if this is an increment or decrement that can be merged into
3506 a following memory address. */
3507 #ifdef AUTO_INC_DEC
3509 register rtx x = single_set (insn);
3511 /* Does this instruction increment or decrement a register? */
3512 if ((flags & PROP_AUTOINC)
3513 && x != 0
3514 && GET_CODE (SET_DEST (x)) == REG
3515 && (GET_CODE (SET_SRC (x)) == PLUS
3516 || GET_CODE (SET_SRC (x)) == MINUS)
3517 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3518 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3519 /* Ok, look for a following memory ref we can combine with.
3520 If one is found, change the memory ref to a PRE_INC
3521 or PRE_DEC, cancel this insn, and return 1.
3522 Return 0 if nothing has been done. */
3523 && try_pre_increment_1 (pbi, insn))
3524 return prev;
3526 #endif /* AUTO_INC_DEC */
3528 CLEAR_REG_SET (pbi->new_set);
3530 /* If this is not the final pass, and this insn is copying the value of
3531 a library call and it's dead, don't scan the insns that perform the
3532 library call, so that the call's arguments are not marked live. */
3533 if (libcall_is_dead)
3535 /* Record the death of the dest reg. */
3536 mark_set_regs (pbi, PATTERN (insn), insn);
3538 insn = XEXP (note, 0);
3539 return PREV_INSN (insn);
3541 else if (GET_CODE (PATTERN (insn)) == SET
3542 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3543 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3544 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3545 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3546 /* We have an insn to pop a constant amount off the stack.
3547 (Such insns use PLUS regardless of the direction of the stack,
3548 and any insn to adjust the stack by a constant is always a pop.)
3549 These insns, if not dead stores, have no effect on life. */
3551 else
3553 /* Any regs live at the time of a call instruction must not go
3554 in a register clobbered by calls. Find all regs now live and
3555 record this for them. */
3557 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3558 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3559 { REG_N_CALLS_CROSSED (i)++; });
3561 /* Record sets. Do this even for dead instructions, since they
3562 would have killed the values if they hadn't been deleted. */
3563 mark_set_regs (pbi, PATTERN (insn), insn);
3565 if (GET_CODE (insn) == CALL_INSN)
3567 register int i;
3568 rtx note, cond;
3570 cond = NULL_RTX;
3571 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3572 cond = COND_EXEC_TEST (PATTERN (insn));
3574 /* Non-constant calls clobber memory. */
3575 if (! CONST_CALL_P (insn))
3576 free_EXPR_LIST_list (&pbi->mem_set_list);
3578 /* There may be extra registers to be clobbered. */
3579 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3580 note;
3581 note = XEXP (note, 1))
3582 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3583 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3584 cond, insn, pbi->flags);
3586 /* Calls change all call-used and global registers. */
3587 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3588 if (call_used_regs[i] && ! global_regs[i]
3589 && ! fixed_regs[i])
3591 /* We do not want REG_UNUSED notes for these registers. */
3592 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3593 cond, insn,
3594 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3598 /* If an insn doesn't use CC0, it becomes dead since we assume
3599 that every insn clobbers it. So show it dead here;
3600 mark_used_regs will set it live if it is referenced. */
3601 pbi->cc0_live = 0;
3603 /* Record uses. */
3604 if (! insn_is_dead)
3605 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3607 /* Sometimes we may have inserted something before INSN (such as a move)
3608 when we make an auto-inc. So ensure we will scan those insns. */
3609 #ifdef AUTO_INC_DEC
3610 prev = PREV_INSN (insn);
3611 #endif
3613 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3615 register int i;
3616 rtx note, cond;
3618 cond = NULL_RTX;
3619 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3620 cond = COND_EXEC_TEST (PATTERN (insn));
3622 /* Calls use their arguments. */
3623 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3624 note;
3625 note = XEXP (note, 1))
3626 if (GET_CODE (XEXP (note, 0)) == USE)
3627 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3628 cond, insn);
3630 /* The stack ptr is used (honorarily) by a CALL insn. */
3631 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3633 /* Calls may also reference any of the global registers,
3634 so they are made live. */
3635 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3636 if (global_regs[i])
3637 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3638 cond, insn);
3642 /* On final pass, update counts of how many insns in which each reg
3643 is live. */
3644 if (flags & PROP_REG_INFO)
3645 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3646 { REG_LIVE_LENGTH (i)++; });
3648 return prev;
3651 /* Initialize a propagate_block_info struct for public consumption.
3652 Note that the structure itself is opaque to this file, but that
3653 the user can use the regsets provided here. */
3655 struct propagate_block_info *
3656 init_propagate_block_info (bb, live, local_set, flags)
3657 basic_block bb;
3658 regset live;
3659 regset local_set;
3660 int flags;
3662 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
3664 pbi->bb = bb;
3665 pbi->reg_live = live;
3666 pbi->mem_set_list = NULL_RTX;
3667 pbi->local_set = local_set;
3668 pbi->cc0_live = 0;
3669 pbi->flags = flags;
3671 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3672 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3673 else
3674 pbi->reg_next_use = NULL;
3676 pbi->new_set = BITMAP_XMALLOC ();
3678 #ifdef HAVE_conditional_execution
3679 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3680 free_reg_cond_life_info);
3681 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3683 /* If this block ends in a conditional branch, for each register live
3684 from one side of the branch and not the other, record the register
3685 as conditionally dead. */
3686 if ((flags & (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE))
3687 && GET_CODE (bb->end) == JUMP_INSN
3688 && any_condjump_p (bb->end))
3690 regset_head diff_head;
3691 regset diff = INITIALIZE_REG_SET (diff_head);
3692 basic_block bb_true, bb_false;
3693 rtx cond_true, cond_false, set_src;
3694 int i;
3696 /* Identify the successor blocks. */
3697 bb_true = bb->succ->dest;
3698 if (bb->succ->succ_next != NULL)
3700 bb_false = bb->succ->succ_next->dest;
3702 if (bb->succ->flags & EDGE_FALLTHRU)
3704 basic_block t = bb_false;
3705 bb_false = bb_true;
3706 bb_true = t;
3708 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3709 abort ();
3711 else
3713 /* This can happen with a conditional jump to the next insn. */
3714 if (JUMP_LABEL (bb->end) != bb_true->head)
3715 abort ();
3717 /* Simplest way to do nothing. */
3718 bb_false = bb_true;
3721 /* Extract the condition from the branch. */
3722 set_src = SET_SRC (pc_set (bb->end));
3723 cond_true = XEXP (set_src, 0);
3724 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3725 GET_MODE (cond_true), XEXP (cond_true, 0),
3726 XEXP (cond_true, 1));
3727 if (GET_CODE (XEXP (set_src, 1)) == PC)
3729 rtx t = cond_false;
3730 cond_false = cond_true;
3731 cond_true = t;
3734 /* Compute which register lead different lives in the successors. */
3735 if (bitmap_operation (diff, bb_true->global_live_at_start,
3736 bb_false->global_live_at_start, BITMAP_XOR))
3738 rtx reg = XEXP (cond_true, 0);
3740 if (GET_CODE (reg) == SUBREG)
3741 reg = SUBREG_REG (reg);
3743 if (GET_CODE (reg) != REG)
3744 abort ();
3746 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
3748 /* For each such register, mark it conditionally dead. */
3749 EXECUTE_IF_SET_IN_REG_SET
3750 (diff, 0, i,
3752 struct reg_cond_life_info *rcli;
3753 rtx cond;
3755 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3757 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3758 cond = cond_false;
3759 else
3760 cond = cond_true;
3761 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3763 splay_tree_insert (pbi->reg_cond_dead, i,
3764 (splay_tree_value) rcli);
3768 FREE_REG_SET (diff);
3770 #endif
3772 /* If this block has no successors, any stores to the frame that aren't
3773 used later in the block are dead. So make a pass over the block
3774 recording any such that are made and show them dead at the end. We do
3775 a very conservative and simple job here. */
3776 if (optimize
3777 && (flags & PROP_SCAN_DEAD_CODE)
3778 && (bb->succ == NULL
3779 || (bb->succ->succ_next == NULL
3780 && bb->succ->dest == EXIT_BLOCK_PTR)))
3782 rtx insn;
3783 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3784 if (GET_CODE (insn) == INSN
3785 && GET_CODE (PATTERN (insn)) == SET
3786 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3788 rtx mem = SET_DEST (PATTERN (insn));
3790 if (XEXP (mem, 0) == frame_pointer_rtx
3791 || (GET_CODE (XEXP (mem, 0)) == PLUS
3792 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3793 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3794 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3798 return pbi;
3801 /* Release a propagate_block_info struct. */
3803 void
3804 free_propagate_block_info (pbi)
3805 struct propagate_block_info *pbi;
3807 free_EXPR_LIST_list (&pbi->mem_set_list);
3809 BITMAP_XFREE (pbi->new_set);
3811 #ifdef HAVE_conditional_execution
3812 splay_tree_delete (pbi->reg_cond_dead);
3813 BITMAP_XFREE (pbi->reg_cond_reg);
3814 #endif
3816 if (pbi->reg_next_use)
3817 free (pbi->reg_next_use);
3819 free (pbi);
3822 /* Compute the registers live at the beginning of a basic block BB from
3823 those live at the end.
3825 When called, REG_LIVE contains those live at the end. On return, it
3826 contains those live at the beginning.
3828 LOCAL_SET, if non-null, will be set with all registers killed by
3829 this basic block. */
3831 void
3832 propagate_block (bb, live, local_set, flags)
3833 basic_block bb;
3834 regset live;
3835 regset local_set;
3836 int flags;
3838 struct propagate_block_info *pbi;
3839 rtx insn, prev;
3841 pbi = init_propagate_block_info (bb, live, local_set, flags);
3843 if (flags & PROP_REG_INFO)
3845 register int i;
3847 /* Process the regs live at the end of the block.
3848 Mark them as not local to any one basic block. */
3849 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3850 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3853 /* Scan the block an insn at a time from end to beginning. */
3855 for (insn = bb->end;; insn = prev)
3857 /* If this is a call to `setjmp' et al, warn if any
3858 non-volatile datum is live. */
3859 if ((flags & PROP_REG_INFO)
3860 && GET_CODE (insn) == NOTE
3861 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3862 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3864 prev = propagate_one_insn (pbi, insn);
3866 if (insn == bb->head)
3867 break;
3870 free_propagate_block_info (pbi);
3873 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3874 (SET expressions whose destinations are registers dead after the insn).
3875 NEEDED is the regset that says which regs are alive after the insn.
3877 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3879 If X is the entire body of an insn, NOTES contains the reg notes
3880 pertaining to the insn. */
3882 static int
3883 insn_dead_p (pbi, x, call_ok, notes)
3884 struct propagate_block_info *pbi;
3885 rtx x;
3886 int call_ok;
3887 rtx notes ATTRIBUTE_UNUSED;
3889 enum rtx_code code = GET_CODE (x);
3891 #ifdef AUTO_INC_DEC
3892 /* If flow is invoked after reload, we must take existing AUTO_INC
3893 expresions into account. */
3894 if (reload_completed)
3896 for (; notes; notes = XEXP (notes, 1))
3898 if (REG_NOTE_KIND (notes) == REG_INC)
3900 int regno = REGNO (XEXP (notes, 0));
3902 /* Don't delete insns to set global regs. */
3903 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3904 || REGNO_REG_SET_P (pbi->reg_live, regno))
3905 return 0;
3909 #endif
3911 /* If setting something that's a reg or part of one,
3912 see if that register's altered value will be live. */
3914 if (code == SET)
3916 rtx r = SET_DEST (x);
3918 #ifdef HAVE_cc0
3919 if (GET_CODE (r) == CC0)
3920 return ! pbi->cc0_live;
3921 #endif
3923 /* A SET that is a subroutine call cannot be dead. */
3924 if (GET_CODE (SET_SRC (x)) == CALL)
3926 if (! call_ok)
3927 return 0;
3930 /* Don't eliminate loads from volatile memory or volatile asms. */
3931 else if (volatile_refs_p (SET_SRC (x)))
3932 return 0;
3934 if (GET_CODE (r) == MEM)
3936 rtx temp;
3938 if (MEM_VOLATILE_P (r))
3939 return 0;
3941 /* Walk the set of memory locations we are currently tracking
3942 and see if one is an identical match to this memory location.
3943 If so, this memory write is dead (remember, we're walking
3944 backwards from the end of the block to the start). */
3945 temp = pbi->mem_set_list;
3946 while (temp)
3948 if (rtx_equal_p (XEXP (temp, 0), r))
3949 return 1;
3950 temp = XEXP (temp, 1);
3953 else
3955 while (GET_CODE (r) == SUBREG
3956 || GET_CODE (r) == STRICT_LOW_PART
3957 || GET_CODE (r) == ZERO_EXTRACT)
3958 r = XEXP (r, 0);
3960 if (GET_CODE (r) == REG)
3962 int regno = REGNO (r);
3964 /* Obvious. */
3965 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3966 return 0;
3968 /* If this is a hard register, verify that subsequent
3969 words are not needed. */
3970 if (regno < FIRST_PSEUDO_REGISTER)
3972 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3974 while (--n > 0)
3975 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3976 return 0;
3979 /* Don't delete insns to set global regs. */
3980 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3981 return 0;
3983 /* Make sure insns to set the stack pointer aren't deleted. */
3984 if (regno == STACK_POINTER_REGNUM)
3985 return 0;
3987 /* Make sure insns to set the frame pointer aren't deleted. */
3988 if (regno == FRAME_POINTER_REGNUM
3989 && (! reload_completed || frame_pointer_needed))
3990 return 0;
3991 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3992 if (regno == HARD_FRAME_POINTER_REGNUM
3993 && (! reload_completed || frame_pointer_needed))
3994 return 0;
3995 #endif
3997 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3998 /* Make sure insns to set arg pointer are never deleted
3999 (if the arg pointer isn't fixed, there will be a USE
4000 for it, so we can treat it normally). */
4001 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4002 return 0;
4003 #endif
4005 #ifdef PIC_OFFSET_TABLE_REGNUM
4006 /* Before reload, do not allow sets of the pic register
4007 to be deleted. Reload can insert references to
4008 constant pool memory anywhere in the function, making
4009 the PIC register live where it wasn't before. */
4010 if (regno == PIC_OFFSET_TABLE_REGNUM && fixed_regs[regno]
4011 && ! reload_completed)
4012 return 0;
4013 #endif
4015 /* Otherwise, the set is dead. */
4016 return 1;
4021 /* If performing several activities, insn is dead if each activity
4022 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4023 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4024 worth keeping. */
4025 else if (code == PARALLEL)
4027 int i = XVECLEN (x, 0);
4029 for (i--; i >= 0; i--)
4030 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4031 && GET_CODE (XVECEXP (x, 0, i)) != USE
4032 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4033 return 0;
4035 return 1;
4038 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4039 is not necessarily true for hard registers. */
4040 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4041 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4042 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4043 return 1;
4045 /* We do not check other CLOBBER or USE here. An insn consisting of just
4046 a CLOBBER or just a USE should not be deleted. */
4047 return 0;
4050 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4051 return 1 if the entire library call is dead.
4052 This is true if INSN copies a register (hard or pseudo)
4053 and if the hard return reg of the call insn is dead.
4054 (The caller should have tested the destination of the SET inside
4055 INSN already for death.)
4057 If this insn doesn't just copy a register, then we don't
4058 have an ordinary libcall. In that case, cse could not have
4059 managed to substitute the source for the dest later on,
4060 so we can assume the libcall is dead.
4062 PBI is the block info giving pseudoregs live before this insn.
4063 NOTE is the REG_RETVAL note of the insn. */
4065 static int
4066 libcall_dead_p (pbi, note, insn)
4067 struct propagate_block_info *pbi;
4068 rtx note;
4069 rtx insn;
4071 rtx x = single_set (insn);
4073 if (x)
4075 register rtx r = SET_SRC (x);
4076 if (GET_CODE (r) == REG)
4078 rtx call = XEXP (note, 0);
4079 rtx call_pat;
4080 register int i;
4082 /* Find the call insn. */
4083 while (call != insn && GET_CODE (call) != CALL_INSN)
4084 call = NEXT_INSN (call);
4086 /* If there is none, do nothing special,
4087 since ordinary death handling can understand these insns. */
4088 if (call == insn)
4089 return 0;
4091 /* See if the hard reg holding the value is dead.
4092 If this is a PARALLEL, find the call within it. */
4093 call_pat = PATTERN (call);
4094 if (GET_CODE (call_pat) == PARALLEL)
4096 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4097 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4098 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4099 break;
4101 /* This may be a library call that is returning a value
4102 via invisible pointer. Do nothing special, since
4103 ordinary death handling can understand these insns. */
4104 if (i < 0)
4105 return 0;
4107 call_pat = XVECEXP (call_pat, 0, i);
4110 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4113 return 1;
4116 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4117 live at function entry. Don't count global register variables, variables
4118 in registers that can be used for function arg passing, or variables in
4119 fixed hard registers. */
4122 regno_uninitialized (regno)
4123 int regno;
4125 if (n_basic_blocks == 0
4126 || (regno < FIRST_PSEUDO_REGISTER
4127 && (global_regs[regno]
4128 || fixed_regs[regno]
4129 || FUNCTION_ARG_REGNO_P (regno))))
4130 return 0;
4132 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4135 /* 1 if register REGNO was alive at a place where `setjmp' was called
4136 and was set more than once or is an argument.
4137 Such regs may be clobbered by `longjmp'. */
4140 regno_clobbered_at_setjmp (regno)
4141 int regno;
4143 if (n_basic_blocks == 0)
4144 return 0;
4146 return ((REG_N_SETS (regno) > 1
4147 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4148 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4151 /* INSN references memory, possibly using autoincrement addressing modes.
4152 Find any entries on the mem_set_list that need to be invalidated due
4153 to an address change. */
4155 static void
4156 invalidate_mems_from_autoinc (pbi, insn)
4157 struct propagate_block_info *pbi;
4158 rtx insn;
4160 rtx note = REG_NOTES (insn);
4161 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4163 if (REG_NOTE_KIND (note) == REG_INC)
4165 rtx temp = pbi->mem_set_list;
4166 rtx prev = NULL_RTX;
4167 rtx next;
4169 while (temp)
4171 next = XEXP (temp, 1);
4172 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4174 /* Splice temp out of list. */
4175 if (prev)
4176 XEXP (prev, 1) = next;
4177 else
4178 pbi->mem_set_list = next;
4179 free_EXPR_LIST_node (temp);
4181 else
4182 prev = temp;
4183 temp = next;
4189 /* Process the registers that are set within X. Their bits are set to
4190 1 in the regset DEAD, because they are dead prior to this insn.
4192 If INSN is nonzero, it is the insn being processed.
4194 FLAGS is the set of operations to perform. */
4196 static void
4197 mark_set_regs (pbi, x, insn)
4198 struct propagate_block_info *pbi;
4199 rtx x, insn;
4201 rtx cond = NULL_RTX;
4202 rtx link;
4203 enum rtx_code code;
4205 if (insn)
4206 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4208 if (REG_NOTE_KIND (link) == REG_INC)
4209 mark_set_1 (pbi, SET, XEXP (link, 0),
4210 (GET_CODE (x) == COND_EXEC
4211 ? COND_EXEC_TEST (x) : NULL_RTX),
4212 insn, pbi->flags);
4214 retry:
4215 switch (code = GET_CODE (x))
4217 case SET:
4218 case CLOBBER:
4219 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4220 return;
4222 case COND_EXEC:
4223 cond = COND_EXEC_TEST (x);
4224 x = COND_EXEC_CODE (x);
4225 goto retry;
4227 case PARALLEL:
4229 register int i;
4230 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4232 rtx sub = XVECEXP (x, 0, i);
4233 switch (code = GET_CODE (sub))
4235 case COND_EXEC:
4236 if (cond != NULL_RTX)
4237 abort ();
4239 cond = COND_EXEC_TEST (sub);
4240 sub = COND_EXEC_CODE (sub);
4241 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4242 break;
4243 /* Fall through. */
4245 case SET:
4246 case CLOBBER:
4247 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4248 break;
4250 default:
4251 break;
4254 break;
4257 default:
4258 break;
4262 /* Process a single SET rtx, X. */
4264 static void
4265 mark_set_1 (pbi, code, reg, cond, insn, flags)
4266 struct propagate_block_info *pbi;
4267 enum rtx_code code;
4268 rtx reg, cond, insn;
4269 int flags;
4271 int regno_first = -1, regno_last = -1;
4272 int not_dead = 0;
4273 int i;
4275 /* Some targets place small structures in registers for
4276 return values of functions. We have to detect this
4277 case specially here to get correct flow information. */
4278 if (GET_CODE (reg) == PARALLEL
4279 && GET_MODE (reg) == BLKmode)
4281 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4282 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4283 return;
4286 /* Modifying just one hardware register of a multi-reg value or just a
4287 byte field of a register does not mean the value from before this insn
4288 is now dead. Of course, if it was dead after it's unused now. */
4290 switch (GET_CODE (reg))
4292 case ZERO_EXTRACT:
4293 case SIGN_EXTRACT:
4294 case STRICT_LOW_PART:
4295 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4297 reg = XEXP (reg, 0);
4298 while (GET_CODE (reg) == SUBREG
4299 || GET_CODE (reg) == ZERO_EXTRACT
4300 || GET_CODE (reg) == SIGN_EXTRACT
4301 || GET_CODE (reg) == STRICT_LOW_PART);
4302 if (GET_CODE (reg) == MEM)
4303 break;
4304 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4305 /* Fall through. */
4307 case REG:
4308 regno_last = regno_first = REGNO (reg);
4309 if (regno_first < FIRST_PSEUDO_REGISTER)
4310 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4311 break;
4313 case SUBREG:
4314 if (GET_CODE (SUBREG_REG (reg)) == REG)
4316 enum machine_mode outer_mode = GET_MODE (reg);
4317 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4319 /* Identify the range of registers affected. This is moderately
4320 tricky for hard registers. See alter_subreg. */
4322 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4323 if (regno_first < FIRST_PSEUDO_REGISTER)
4325 #ifdef ALTER_HARD_SUBREG
4326 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4327 inner_mode, regno_first);
4328 #else
4329 regno_first += SUBREG_WORD (reg);
4330 #endif
4331 regno_last = (regno_first
4332 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4334 /* Since we've just adjusted the register number ranges, make
4335 sure REG matches. Otherwise some_was_live will be clear
4336 when it shouldn't have been, and we'll create incorrect
4337 REG_UNUSED notes. */
4338 reg = gen_rtx_REG (outer_mode, regno_first);
4340 else
4342 /* If the number of words in the subreg is less than the number
4343 of words in the full register, we have a well-defined partial
4344 set. Otherwise the high bits are undefined.
4346 This is only really applicable to pseudos, since we just took
4347 care of multi-word hard registers. */
4348 if (((GET_MODE_SIZE (outer_mode)
4349 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4350 < ((GET_MODE_SIZE (inner_mode)
4351 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4352 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4354 reg = SUBREG_REG (reg);
4357 else
4358 reg = SUBREG_REG (reg);
4359 break;
4361 default:
4362 break;
4365 /* If this set is a MEM, then it kills any aliased writes.
4366 If this set is a REG, then it kills any MEMs which use the reg. */
4367 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4369 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4371 rtx temp = pbi->mem_set_list;
4372 rtx prev = NULL_RTX;
4373 rtx next;
4375 while (temp)
4377 next = XEXP (temp, 1);
4378 if ((GET_CODE (reg) == MEM
4379 && output_dependence (XEXP (temp, 0), reg))
4380 || (GET_CODE (reg) == REG
4381 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4383 /* Splice this entry out of the list. */
4384 if (prev)
4385 XEXP (prev, 1) = next;
4386 else
4387 pbi->mem_set_list = next;
4388 free_EXPR_LIST_node (temp);
4390 else
4391 prev = temp;
4392 temp = next;
4396 /* If the memory reference had embedded side effects (autoincrement
4397 address modes. Then we may need to kill some entries on the
4398 memory set list. */
4399 if (insn && GET_CODE (reg) == MEM)
4400 invalidate_mems_from_autoinc (pbi, insn);
4402 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4403 /* ??? With more effort we could track conditional memory life. */
4404 && ! cond
4405 /* We do not know the size of a BLKmode store, so we do not track
4406 them for redundant store elimination. */
4407 && GET_MODE (reg) != BLKmode
4408 /* There are no REG_INC notes for SP, so we can't assume we'll see
4409 everything that invalidates it. To be safe, don't eliminate any
4410 stores though SP; none of them should be redundant anyway. */
4411 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4412 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4415 if (GET_CODE (reg) == REG
4416 && ! (regno_first == FRAME_POINTER_REGNUM
4417 && (! reload_completed || frame_pointer_needed))
4418 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4419 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4420 && (! reload_completed || frame_pointer_needed))
4421 #endif
4422 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4423 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4424 #endif
4427 int some_was_live = 0, some_was_dead = 0;
4429 for (i = regno_first; i <= regno_last; ++i)
4431 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4432 if (pbi->local_set)
4433 SET_REGNO_REG_SET (pbi->local_set, i);
4434 if (code != CLOBBER)
4435 SET_REGNO_REG_SET (pbi->new_set, i);
4437 some_was_live |= needed_regno;
4438 some_was_dead |= ! needed_regno;
4441 #ifdef HAVE_conditional_execution
4442 /* Consider conditional death in deciding that the register needs
4443 a death note. */
4444 if (some_was_live && ! not_dead
4445 /* The stack pointer is never dead. Well, not strictly true,
4446 but it's very difficult to tell from here. Hopefully
4447 combine_stack_adjustments will fix up the most egregious
4448 errors. */
4449 && regno_first != STACK_POINTER_REGNUM)
4451 for (i = regno_first; i <= regno_last; ++i)
4452 if (! mark_regno_cond_dead (pbi, i, cond))
4453 not_dead = 1;
4455 #endif
4457 /* Additional data to record if this is the final pass. */
4458 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4459 | PROP_DEATH_NOTES | PROP_AUTOINC))
4461 register rtx y;
4462 register int blocknum = pbi->bb->index;
4464 y = NULL_RTX;
4465 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4467 y = pbi->reg_next_use[regno_first];
4469 /* The next use is no longer next, since a store intervenes. */
4470 for (i = regno_first; i <= regno_last; ++i)
4471 pbi->reg_next_use[i] = 0;
4474 if (flags & PROP_REG_INFO)
4476 for (i = regno_first; i <= regno_last; ++i)
4478 /* Count (weighted) references, stores, etc. This counts a
4479 register twice if it is modified, but that is correct. */
4480 REG_N_SETS (i) += 1;
4481 REG_N_REFS (i) += (optimize_size ? 1
4482 : pbi->bb->loop_depth + 1);
4484 /* The insns where a reg is live are normally counted
4485 elsewhere, but we want the count to include the insn
4486 where the reg is set, and the normal counting mechanism
4487 would not count it. */
4488 REG_LIVE_LENGTH (i) += 1;
4491 /* If this is a hard reg, record this function uses the reg. */
4492 if (regno_first < FIRST_PSEUDO_REGISTER)
4494 for (i = regno_first; i <= regno_last; i++)
4495 regs_ever_live[i] = 1;
4497 else
4499 /* Keep track of which basic blocks each reg appears in. */
4500 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4501 REG_BASIC_BLOCK (regno_first) = blocknum;
4502 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4503 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4507 if (! some_was_dead)
4509 if (flags & PROP_LOG_LINKS)
4511 /* Make a logical link from the next following insn
4512 that uses this register, back to this insn.
4513 The following insns have already been processed.
4515 We don't build a LOG_LINK for hard registers containing
4516 in ASM_OPERANDs. If these registers get replaced,
4517 we might wind up changing the semantics of the insn,
4518 even if reload can make what appear to be valid
4519 assignments later. */
4520 if (y && (BLOCK_NUM (y) == blocknum)
4521 && (regno_first >= FIRST_PSEUDO_REGISTER
4522 || asm_noperands (PATTERN (y)) < 0))
4523 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4526 else if (not_dead)
4528 else if (! some_was_live)
4530 if (flags & PROP_REG_INFO)
4531 REG_N_DEATHS (regno_first) += 1;
4533 if (flags & PROP_DEATH_NOTES)
4535 /* Note that dead stores have already been deleted
4536 when possible. If we get here, we have found a
4537 dead store that cannot be eliminated (because the
4538 same insn does something useful). Indicate this
4539 by marking the reg being set as dying here. */
4540 REG_NOTES (insn)
4541 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4544 else
4546 if (flags & PROP_DEATH_NOTES)
4548 /* This is a case where we have a multi-word hard register
4549 and some, but not all, of the words of the register are
4550 needed in subsequent insns. Write REG_UNUSED notes
4551 for those parts that were not needed. This case should
4552 be rare. */
4554 for (i = regno_first; i <= regno_last; ++i)
4555 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4556 REG_NOTES (insn)
4557 = alloc_EXPR_LIST (REG_UNUSED,
4558 gen_rtx_REG (reg_raw_mode[i], i),
4559 REG_NOTES (insn));
4564 /* Mark the register as being dead. */
4565 if (some_was_live
4566 && ! not_dead
4567 /* The stack pointer is never dead. Well, not strictly true,
4568 but it's very difficult to tell from here. Hopefully
4569 combine_stack_adjustments will fix up the most egregious
4570 errors. */
4571 && regno_first != STACK_POINTER_REGNUM)
4573 for (i = regno_first; i <= regno_last; ++i)
4574 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4577 else if (GET_CODE (reg) == REG)
4579 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4580 pbi->reg_next_use[regno_first] = 0;
4583 /* If this is the last pass and this is a SCRATCH, show it will be dying
4584 here and count it. */
4585 else if (GET_CODE (reg) == SCRATCH)
4587 if (flags & PROP_DEATH_NOTES)
4588 REG_NOTES (insn)
4589 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4593 #ifdef HAVE_conditional_execution
4594 /* Mark REGNO conditionally dead.
4595 Return true if the register is now unconditionally dead. */
4597 static int
4598 mark_regno_cond_dead (pbi, regno, cond)
4599 struct propagate_block_info *pbi;
4600 int regno;
4601 rtx cond;
4603 /* If this is a store to a predicate register, the value of the
4604 predicate is changing, we don't know that the predicate as seen
4605 before is the same as that seen after. Flush all dependent
4606 conditions from reg_cond_dead. This will make all such
4607 conditionally live registers unconditionally live. */
4608 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4609 flush_reg_cond_reg (pbi, regno);
4611 /* If this is an unconditional store, remove any conditional
4612 life that may have existed. */
4613 if (cond == NULL_RTX)
4614 splay_tree_remove (pbi->reg_cond_dead, regno);
4615 else
4617 splay_tree_node node;
4618 struct reg_cond_life_info *rcli;
4619 rtx ncond;
4621 /* Otherwise this is a conditional set. Record that fact.
4622 It may have been conditionally used, or there may be a
4623 subsequent set with a complimentary condition. */
4625 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4626 if (node == NULL)
4628 /* The register was unconditionally live previously.
4629 Record the current condition as the condition under
4630 which it is dead. */
4631 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4632 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4633 splay_tree_insert (pbi->reg_cond_dead, regno,
4634 (splay_tree_value) rcli);
4636 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4638 /* Not unconditionaly dead. */
4639 return 0;
4641 else
4643 /* The register was conditionally live previously.
4644 Add the new condition to the old. */
4645 rcli = (struct reg_cond_life_info *) node->value;
4646 ncond = rcli->condition;
4647 ncond = ior_reg_cond (ncond, cond);
4649 /* If the register is now unconditionally dead,
4650 remove the entry in the splay_tree. */
4651 if (ncond == const1_rtx)
4652 splay_tree_remove (pbi->reg_cond_dead, regno);
4653 else
4655 rcli->condition = ncond;
4657 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4659 /* Not unconditionaly dead. */
4660 return 0;
4665 return 1;
4668 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4670 static void
4671 free_reg_cond_life_info (value)
4672 splay_tree_value value;
4674 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4675 free_EXPR_LIST_list (&rcli->condition);
4676 free (rcli);
4679 /* Helper function for flush_reg_cond_reg. */
4681 static int
4682 flush_reg_cond_reg_1 (node, data)
4683 splay_tree_node node;
4684 void *data;
4686 struct reg_cond_life_info *rcli;
4687 int *xdata = (int *) data;
4688 unsigned int regno = xdata[0];
4689 rtx c, *prev;
4691 /* Don't need to search if last flushed value was farther on in
4692 the in-order traversal. */
4693 if (xdata[1] >= (int) node->key)
4694 return 0;
4696 /* Splice out portions of the expression that refer to regno. */
4697 rcli = (struct reg_cond_life_info *) node->value;
4698 c = *(prev = &rcli->condition);
4699 while (c)
4701 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4703 rtx next = XEXP (c, 1);
4704 free_EXPR_LIST_node (c);
4705 c = *prev = next;
4707 else
4708 c = *(prev = &XEXP (c, 1));
4711 /* If the entire condition is now NULL, signal the node to be removed. */
4712 if (! rcli->condition)
4714 xdata[1] = node->key;
4715 return -1;
4717 else
4718 return 0;
4721 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4723 static void
4724 flush_reg_cond_reg (pbi, regno)
4725 struct propagate_block_info *pbi;
4726 int regno;
4728 int pair[2];
4730 pair[0] = regno;
4731 pair[1] = -1;
4732 while (splay_tree_foreach (pbi->reg_cond_dead,
4733 flush_reg_cond_reg_1, pair) == -1)
4734 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4736 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4739 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4740 We actually use EXPR_LIST to chain the sub-expressions together
4741 instead of IOR because it's easier to manipulate and we have
4742 the lists.c functions to reuse nodes.
4744 Return a new rtl expression as appropriate. */
4746 static rtx
4747 ior_reg_cond (old, x)
4748 rtx old, x;
4750 enum rtx_code x_code;
4751 rtx x_reg;
4752 rtx c;
4754 /* We expect these conditions to be of the form (eq reg 0). */
4755 x_code = GET_CODE (x);
4756 if (GET_RTX_CLASS (x_code) != '<'
4757 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4758 || XEXP (x, 1) != const0_rtx)
4759 abort ();
4761 /* Search the expression for an existing sub-expression of X_REG. */
4762 for (c = old; c; c = XEXP (c, 1))
4764 rtx y = XEXP (c, 0);
4765 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4767 /* If we find X already present in OLD, we need do nothing. */
4768 if (GET_CODE (y) == x_code)
4769 return old;
4771 /* If we find X being a compliment of a condition in OLD,
4772 then the entire condition is true. */
4773 if (GET_CODE (y) == reverse_condition (x_code))
4774 return const1_rtx;
4778 /* Otherwise just add to the chain. */
4779 return alloc_EXPR_LIST (0, x, old);
4782 static rtx
4783 not_reg_cond (x)
4784 rtx x;
4786 enum rtx_code x_code;
4787 rtx x_reg;
4789 /* We expect these conditions to be of the form (eq reg 0). */
4790 x_code = GET_CODE (x);
4791 if (GET_RTX_CLASS (x_code) != '<'
4792 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4793 || XEXP (x, 1) != const0_rtx)
4794 abort ();
4796 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4797 VOIDmode, x_reg, const0_rtx),
4798 NULL_RTX);
4801 static rtx
4802 nand_reg_cond (old, x)
4803 rtx old, x;
4805 enum rtx_code x_code;
4806 rtx x_reg;
4807 rtx c, *prev;
4809 /* We expect these conditions to be of the form (eq reg 0). */
4810 x_code = GET_CODE (x);
4811 if (GET_RTX_CLASS (x_code) != '<'
4812 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4813 || XEXP (x, 1) != const0_rtx)
4814 abort ();
4816 /* Search the expression for an existing sub-expression of X_REG. */
4818 for (c = *(prev = &old); c; c = *(prev = &XEXP (c, 1)))
4820 rtx y = XEXP (c, 0);
4821 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4823 /* If we find X already present in OLD, then we need to
4824 splice it out. */
4825 if (GET_CODE (y) == x_code)
4827 *prev = XEXP (c, 1);
4828 free_EXPR_LIST_node (c);
4829 return old ? old : const0_rtx;
4832 /* If we find X being a compliment of a condition in OLD,
4833 then we need do nothing. */
4834 if (GET_CODE (y) == reverse_condition (x_code))
4835 return old;
4839 /* Otherwise, by implication, the register in question is now live for
4840 the inverse of the condition X. */
4841 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4842 VOIDmode, x_reg, const0_rtx),
4843 old);
4845 #endif /* HAVE_conditional_execution */
4847 #ifdef AUTO_INC_DEC
4849 /* Try to substitute the auto-inc expression INC as the address inside
4850 MEM which occurs in INSN. Currently, the address of MEM is an expression
4851 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
4852 that has a single set whose source is a PLUS of INCR_REG and something
4853 else. */
4855 static void
4856 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
4857 struct propagate_block_info *pbi;
4858 rtx inc, insn, mem, incr, incr_reg;
4860 int regno = REGNO (incr_reg);
4861 rtx set = single_set (incr);
4862 rtx q = SET_DEST (set);
4863 rtx y = SET_SRC (set);
4864 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
4866 /* Make sure this reg appears only once in this insn. */
4867 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
4868 return;
4870 if (dead_or_set_p (incr, incr_reg)
4871 /* Mustn't autoinc an eliminable register. */
4872 && (regno >= FIRST_PSEUDO_REGISTER
4873 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4875 /* This is the simple case. Try to make the auto-inc. If
4876 we can't, we are done. Otherwise, we will do any
4877 needed updates below. */
4878 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
4879 return;
4881 else if (GET_CODE (q) == REG
4882 /* PREV_INSN used here to check the semi-open interval
4883 [insn,incr). */
4884 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4885 /* We must also check for sets of q as q may be
4886 a call clobbered hard register and there may
4887 be a call between PREV_INSN (insn) and incr. */
4888 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4890 /* We have *p followed sometime later by q = p+size.
4891 Both p and q must be live afterward,
4892 and q is not used between INSN and its assignment.
4893 Change it to q = p, ...*q..., q = q+size.
4894 Then fall into the usual case. */
4895 rtx insns, temp;
4897 start_sequence ();
4898 emit_move_insn (q, incr_reg);
4899 insns = get_insns ();
4900 end_sequence ();
4902 if (basic_block_for_insn)
4903 for (temp = insns; temp; temp = NEXT_INSN (temp))
4904 set_block_for_insn (temp, pbi->bb);
4906 /* If we can't make the auto-inc, or can't make the
4907 replacement into Y, exit. There's no point in making
4908 the change below if we can't do the auto-inc and doing
4909 so is not correct in the pre-inc case. */
4911 XEXP (inc, 0) = q;
4912 validate_change (insn, &XEXP (mem, 0), inc, 1);
4913 validate_change (incr, &XEXP (y, opnum), q, 1);
4914 if (! apply_change_group ())
4915 return;
4917 /* We now know we'll be doing this change, so emit the
4918 new insn(s) and do the updates. */
4919 emit_insns_before (insns, insn);
4921 if (pbi->bb->head == insn)
4922 pbi->bb->head = insns;
4924 /* INCR will become a NOTE and INSN won't contain a
4925 use of INCR_REG. If a use of INCR_REG was just placed in
4926 the insn before INSN, make that the next use.
4927 Otherwise, invalidate it. */
4928 if (GET_CODE (PREV_INSN (insn)) == INSN
4929 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4930 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
4931 pbi->reg_next_use[regno] = PREV_INSN (insn);
4932 else
4933 pbi->reg_next_use[regno] = 0;
4935 incr_reg = q;
4936 regno = REGNO (q);
4938 /* REGNO is now used in INCR which is below INSN, but
4939 it previously wasn't live here. If we don't mark
4940 it as live, we'll put a REG_DEAD note for it
4941 on this insn, which is incorrect. */
4942 SET_REGNO_REG_SET (pbi->reg_live, regno);
4944 /* If there are any calls between INSN and INCR, show
4945 that REGNO now crosses them. */
4946 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4947 if (GET_CODE (temp) == CALL_INSN)
4948 REG_N_CALLS_CROSSED (regno)++;
4950 else
4951 return;
4953 /* If we haven't returned, it means we were able to make the
4954 auto-inc, so update the status. First, record that this insn
4955 has an implicit side effect. */
4957 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
4959 /* Modify the old increment-insn to simply copy
4960 the already-incremented value of our register. */
4961 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
4962 abort ();
4964 /* If that makes it a no-op (copying the register into itself) delete
4965 it so it won't appear to be a "use" and a "set" of this
4966 register. */
4967 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
4969 /* If the original source was dead, it's dead now. */
4970 rtx note;
4972 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
4974 remove_note (incr, note);
4975 if (XEXP (note, 0) != incr_reg)
4976 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4979 PUT_CODE (incr, NOTE);
4980 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4981 NOTE_SOURCE_FILE (incr) = 0;
4984 if (regno >= FIRST_PSEUDO_REGISTER)
4986 /* Count an extra reference to the reg. When a reg is
4987 incremented, spilling it is worse, so we want to make
4988 that less likely. */
4989 REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
4991 /* Count the increment as a setting of the register,
4992 even though it isn't a SET in rtl. */
4993 REG_N_SETS (regno)++;
4997 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4998 reference. */
5000 static void
5001 find_auto_inc (pbi, x, insn)
5002 struct propagate_block_info *pbi;
5003 rtx x;
5004 rtx insn;
5006 rtx addr = XEXP (x, 0);
5007 HOST_WIDE_INT offset = 0;
5008 rtx set, y, incr, inc_val;
5009 int regno;
5010 int size = GET_MODE_SIZE (GET_MODE (x));
5012 if (GET_CODE (insn) == JUMP_INSN)
5013 return;
5015 /* Here we detect use of an index register which might be good for
5016 postincrement, postdecrement, preincrement, or predecrement. */
5018 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5019 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5021 if (GET_CODE (addr) != REG)
5022 return;
5024 regno = REGNO (addr);
5026 /* Is the next use an increment that might make auto-increment? */
5027 incr = pbi->reg_next_use[regno];
5028 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5029 return;
5030 set = single_set (incr);
5031 if (set == 0 || GET_CODE (set) != SET)
5032 return;
5033 y = SET_SRC (set);
5035 if (GET_CODE (y) != PLUS)
5036 return;
5038 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5039 inc_val = XEXP (y, 1);
5040 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5041 inc_val = XEXP (y, 0);
5042 else
5043 return;
5045 if (GET_CODE (inc_val) == CONST_INT)
5047 if (HAVE_POST_INCREMENT
5048 && (INTVAL (inc_val) == size && offset == 0))
5049 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5050 incr, addr);
5051 else if (HAVE_POST_DECREMENT
5052 && (INTVAL (inc_val) == -size && offset == 0))
5053 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5054 incr, addr);
5055 else if (HAVE_PRE_INCREMENT
5056 && (INTVAL (inc_val) == size && offset == size))
5057 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5058 incr, addr);
5059 else if (HAVE_PRE_DECREMENT
5060 && (INTVAL (inc_val) == -size && offset == -size))
5061 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5062 incr, addr);
5063 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5064 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5065 gen_rtx_PLUS (Pmode,
5066 addr,
5067 inc_val)),
5068 insn, x, incr, addr);
5070 else if (GET_CODE (inc_val) == REG
5071 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5072 NEXT_INSN (incr)))
5075 if (HAVE_POST_MODIFY_REG && offset == 0)
5076 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5077 gen_rtx_PLUS (Pmode,
5078 addr,
5079 inc_val)),
5080 insn, x, incr, addr);
5084 #endif /* AUTO_INC_DEC */
5086 static void
5087 mark_used_reg (pbi, reg, cond, insn)
5088 struct propagate_block_info *pbi;
5089 rtx reg;
5090 rtx cond ATTRIBUTE_UNUSED;
5091 rtx insn;
5093 int regno = REGNO (reg);
5094 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
5095 int some_was_dead = ! some_was_live;
5096 int some_not_set;
5097 int n;
5099 /* A hard reg in a wide mode may really be multiple registers.
5100 If so, mark all of them just like the first. */
5101 if (regno < FIRST_PSEUDO_REGISTER)
5103 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5104 while (--n > 0)
5106 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5107 some_was_live |= needed_regno;
5108 some_was_dead |= ! needed_regno;
5112 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5114 /* Record where each reg is used, so when the reg is set we know
5115 the next insn that uses it. */
5116 pbi->reg_next_use[regno] = insn;
5119 if (pbi->flags & PROP_REG_INFO)
5121 if (regno < FIRST_PSEUDO_REGISTER)
5123 /* If this is a register we are going to try to eliminate,
5124 don't mark it live here. If we are successful in
5125 eliminating it, it need not be live unless it is used for
5126 pseudos, in which case it will have been set live when it
5127 was allocated to the pseudos. If the register will not
5128 be eliminated, reload will set it live at that point.
5130 Otherwise, record that this function uses this register. */
5131 /* ??? The PPC backend tries to "eliminate" on the pic
5132 register to itself. This should be fixed. In the mean
5133 time, hack around it. */
5135 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5136 && (regno == FRAME_POINTER_REGNUM
5137 || regno == ARG_POINTER_REGNUM)))
5139 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5141 regs_ever_live[regno + --n] = 1;
5142 while (n > 0);
5145 else
5147 /* Keep track of which basic block each reg appears in. */
5149 register int blocknum = pbi->bb->index;
5150 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5151 REG_BASIC_BLOCK (regno) = blocknum;
5152 else if (REG_BASIC_BLOCK (regno) != blocknum)
5153 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5155 /* Count (weighted) number of uses of each reg. */
5156 REG_N_REFS (regno) += (optimize_size ? 1
5157 : pbi->bb->loop_depth + 1);
5161 /* Find out if any of the register was set this insn. */
5162 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5163 if (regno < FIRST_PSEUDO_REGISTER)
5165 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5166 while (--n > 0)
5167 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5170 /* Record and count the insns in which a reg dies. If it is used in
5171 this insn and was dead below the insn then it dies in this insn.
5172 If it was set in this insn, we do not make a REG_DEAD note;
5173 likewise if we already made such a note. */
5174 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5175 && some_was_dead
5176 && some_not_set)
5178 /* Check for the case where the register dying partially
5179 overlaps the register set by this insn. */
5180 if (regno < FIRST_PSEUDO_REGISTER
5181 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5183 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5184 while (--n >= 0)
5185 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5188 /* If none of the words in X is needed, make a REG_DEAD note.
5189 Otherwise, we must make partial REG_DEAD notes. */
5190 if (! some_was_live)
5192 if ((pbi->flags & PROP_DEATH_NOTES)
5193 && ! find_regno_note (insn, REG_DEAD, regno))
5194 REG_NOTES (insn)
5195 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5197 if (pbi->flags & PROP_REG_INFO)
5198 REG_N_DEATHS (regno)++;
5200 else
5202 /* Don't make a REG_DEAD note for a part of a register
5203 that is set in the insn. */
5205 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5206 for (; n >= regno; n--)
5207 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5208 && ! dead_or_set_regno_p (insn, n))
5209 REG_NOTES (insn)
5210 = alloc_EXPR_LIST (REG_DEAD,
5211 gen_rtx_REG (reg_raw_mode[n], n),
5212 REG_NOTES (insn));
5216 SET_REGNO_REG_SET (pbi->reg_live, regno);
5217 if (regno < FIRST_PSEUDO_REGISTER)
5219 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5220 while (--n > 0)
5221 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5224 #ifdef HAVE_conditional_execution
5225 /* If this is a conditional use, record that fact. If it is later
5226 conditionally set, we'll know to kill the register. */
5227 if (cond != NULL_RTX)
5229 splay_tree_node node;
5230 struct reg_cond_life_info *rcli;
5231 rtx ncond;
5233 if (some_was_live)
5235 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5236 if (node == NULL)
5238 /* The register was unconditionally live previously.
5239 No need to do anything. */
5241 else
5243 /* The register was conditionally live previously.
5244 Subtract the new life cond from the old death cond. */
5245 rcli = (struct reg_cond_life_info *) node->value;
5246 ncond = rcli->condition;
5247 ncond = nand_reg_cond (ncond, cond);
5249 /* If the register is now unconditionally live, remove the
5250 entry in the splay_tree. */
5251 if (ncond == const0_rtx)
5253 rcli->condition = NULL_RTX;
5254 splay_tree_remove (pbi->reg_cond_dead, regno);
5256 else
5258 rcli->condition = ncond;
5259 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5263 else
5265 /* The register was not previously live at all. Record
5266 the condition under which it is still dead. */
5267 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5268 rcli->condition = not_reg_cond (cond);
5269 splay_tree_insert (pbi->reg_cond_dead, regno,
5270 (splay_tree_value) rcli);
5272 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5275 else if (some_was_live)
5277 splay_tree_node node;
5278 struct reg_cond_life_info *rcli;
5280 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5281 if (node != NULL)
5283 /* The register was conditionally live previously, but is now
5284 unconditionally so. Remove it from the conditionally dead
5285 list, so that a conditional set won't cause us to think
5286 it dead. */
5287 rcli = (struct reg_cond_life_info *) node->value;
5288 rcli->condition = NULL_RTX;
5289 splay_tree_remove (pbi->reg_cond_dead, regno);
5293 #endif
5296 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5297 This is done assuming the registers needed from X are those that
5298 have 1-bits in PBI->REG_LIVE.
5300 INSN is the containing instruction. If INSN is dead, this function
5301 is not called. */
5303 static void
5304 mark_used_regs (pbi, x, cond, insn)
5305 struct propagate_block_info *pbi;
5306 rtx x, cond, insn;
5308 register RTX_CODE code;
5309 register int regno;
5310 int flags = pbi->flags;
5312 retry:
5313 code = GET_CODE (x);
5314 switch (code)
5316 case LABEL_REF:
5317 case SYMBOL_REF:
5318 case CONST_INT:
5319 case CONST:
5320 case CONST_DOUBLE:
5321 case PC:
5322 case ADDR_VEC:
5323 case ADDR_DIFF_VEC:
5324 return;
5326 #ifdef HAVE_cc0
5327 case CC0:
5328 pbi->cc0_live = 1;
5329 return;
5330 #endif
5332 case CLOBBER:
5333 /* If we are clobbering a MEM, mark any registers inside the address
5334 as being used. */
5335 if (GET_CODE (XEXP (x, 0)) == MEM)
5336 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5337 return;
5339 case MEM:
5340 /* Don't bother watching stores to mems if this is not the
5341 final pass. We'll not be deleting dead stores this round. */
5342 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5344 /* Invalidate the data for the last MEM stored, but only if MEM is
5345 something that can be stored into. */
5346 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5347 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5348 /* Needn't clear the memory set list. */
5350 else
5352 rtx temp = pbi->mem_set_list;
5353 rtx prev = NULL_RTX;
5354 rtx next;
5356 while (temp)
5358 next = XEXP (temp, 1);
5359 if (anti_dependence (XEXP (temp, 0), x))
5361 /* Splice temp out of the list. */
5362 if (prev)
5363 XEXP (prev, 1) = next;
5364 else
5365 pbi->mem_set_list = next;
5366 free_EXPR_LIST_node (temp);
5368 else
5369 prev = temp;
5370 temp = next;
5374 /* If the memory reference had embedded side effects (autoincrement
5375 address modes. Then we may need to kill some entries on the
5376 memory set list. */
5377 if (insn)
5378 invalidate_mems_from_autoinc (pbi, insn);
5381 #ifdef AUTO_INC_DEC
5382 if (flags & PROP_AUTOINC)
5383 find_auto_inc (pbi, x, insn);
5384 #endif
5385 break;
5387 case SUBREG:
5388 #ifdef CLASS_CANNOT_CHANGE_MODE
5389 if (GET_CODE (SUBREG_REG (x)) == REG
5390 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5391 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5392 GET_MODE (SUBREG_REG (x))))
5393 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5394 #endif
5396 /* While we're here, optimize this case. */
5397 x = SUBREG_REG (x);
5398 if (GET_CODE (x) != REG)
5399 goto retry;
5400 /* Fall through. */
5402 case REG:
5403 /* See a register other than being set => mark it as needed. */
5404 mark_used_reg (pbi, x, cond, insn);
5405 return;
5407 case SET:
5409 register rtx testreg = SET_DEST (x);
5410 int mark_dest = 0;
5412 /* If storing into MEM, don't show it as being used. But do
5413 show the address as being used. */
5414 if (GET_CODE (testreg) == MEM)
5416 #ifdef AUTO_INC_DEC
5417 if (flags & PROP_AUTOINC)
5418 find_auto_inc (pbi, testreg, insn);
5419 #endif
5420 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5421 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5422 return;
5425 /* Storing in STRICT_LOW_PART is like storing in a reg
5426 in that this SET might be dead, so ignore it in TESTREG.
5427 but in some other ways it is like using the reg.
5429 Storing in a SUBREG or a bit field is like storing the entire
5430 register in that if the register's value is not used
5431 then this SET is not needed. */
5432 while (GET_CODE (testreg) == STRICT_LOW_PART
5433 || GET_CODE (testreg) == ZERO_EXTRACT
5434 || GET_CODE (testreg) == SIGN_EXTRACT
5435 || GET_CODE (testreg) == SUBREG)
5437 #ifdef CLASS_CANNOT_CHANGE_MODE
5438 if (GET_CODE (testreg) == SUBREG
5439 && GET_CODE (SUBREG_REG (testreg)) == REG
5440 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5441 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5442 GET_MODE (testreg)))
5443 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5444 #endif
5446 /* Modifying a single register in an alternate mode
5447 does not use any of the old value. But these other
5448 ways of storing in a register do use the old value. */
5449 if (GET_CODE (testreg) == SUBREG
5450 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5452 else
5453 mark_dest = 1;
5455 testreg = XEXP (testreg, 0);
5458 /* If this is a store into a register, recursively scan the
5459 value being stored. */
5461 if ((GET_CODE (testreg) == PARALLEL
5462 && GET_MODE (testreg) == BLKmode)
5463 || (GET_CODE (testreg) == REG
5464 && (regno = REGNO (testreg),
5465 ! (regno == FRAME_POINTER_REGNUM
5466 && (! reload_completed || frame_pointer_needed)))
5467 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5468 && ! (regno == HARD_FRAME_POINTER_REGNUM
5469 && (! reload_completed || frame_pointer_needed))
5470 #endif
5471 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5472 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5473 #endif
5476 if (mark_dest)
5477 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5478 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5479 return;
5482 break;
5484 case ASM_OPERANDS:
5485 case UNSPEC_VOLATILE:
5486 case TRAP_IF:
5487 case ASM_INPUT:
5489 /* Traditional and volatile asm instructions must be considered to use
5490 and clobber all hard registers, all pseudo-registers and all of
5491 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5493 Consider for instance a volatile asm that changes the fpu rounding
5494 mode. An insn should not be moved across this even if it only uses
5495 pseudo-regs because it might give an incorrectly rounded result.
5497 ?!? Unfortunately, marking all hard registers as live causes massive
5498 problems for the register allocator and marking all pseudos as live
5499 creates mountains of uninitialized variable warnings.
5501 So for now, just clear the memory set list and mark any regs
5502 we can find in ASM_OPERANDS as used. */
5503 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5504 free_EXPR_LIST_list (&pbi->mem_set_list);
5506 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5507 We can not just fall through here since then we would be confused
5508 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5509 traditional asms unlike their normal usage. */
5510 if (code == ASM_OPERANDS)
5512 int j;
5514 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5515 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5517 break;
5520 case COND_EXEC:
5521 if (cond != NULL_RTX)
5522 abort ();
5524 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5526 cond = COND_EXEC_TEST (x);
5527 x = COND_EXEC_CODE (x);
5528 goto retry;
5530 case PHI:
5531 /* We _do_not_ want to scan operands of phi nodes. Operands of
5532 a phi function are evaluated only when control reaches this
5533 block along a particular edge. Therefore, regs that appear
5534 as arguments to phi should not be added to the global live at
5535 start. */
5536 return;
5538 default:
5539 break;
5542 /* Recursively scan the operands of this expression. */
5545 register const char *fmt = GET_RTX_FORMAT (code);
5546 register int i;
5548 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5550 if (fmt[i] == 'e')
5552 /* Tail recursive case: save a function call level. */
5553 if (i == 0)
5555 x = XEXP (x, 0);
5556 goto retry;
5558 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5560 else if (fmt[i] == 'E')
5562 register int j;
5563 for (j = 0; j < XVECLEN (x, i); j++)
5564 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5570 #ifdef AUTO_INC_DEC
5572 static int
5573 try_pre_increment_1 (pbi, insn)
5574 struct propagate_block_info *pbi;
5575 rtx insn;
5577 /* Find the next use of this reg. If in same basic block,
5578 make it do pre-increment or pre-decrement if appropriate. */
5579 rtx x = single_set (insn);
5580 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5581 * INTVAL (XEXP (SET_SRC (x), 1)));
5582 int regno = REGNO (SET_DEST (x));
5583 rtx y = pbi->reg_next_use[regno];
5584 if (y != 0
5585 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5586 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5587 mode would be better. */
5588 && ! dead_or_set_p (y, SET_DEST (x))
5589 && try_pre_increment (y, SET_DEST (x), amount))
5591 /* We have found a suitable auto-increment
5592 and already changed insn Y to do it.
5593 So flush this increment-instruction. */
5594 PUT_CODE (insn, NOTE);
5595 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5596 NOTE_SOURCE_FILE (insn) = 0;
5597 /* Count a reference to this reg for the increment
5598 insn we are deleting. When a reg is incremented.
5599 spilling it is worse, so we want to make that
5600 less likely. */
5601 if (regno >= FIRST_PSEUDO_REGISTER)
5603 REG_N_REFS (regno) += (optimize_size ? 1
5604 : pbi->bb->loop_depth + 1);
5605 REG_N_SETS (regno)++;
5607 return 1;
5609 return 0;
5612 /* Try to change INSN so that it does pre-increment or pre-decrement
5613 addressing on register REG in order to add AMOUNT to REG.
5614 AMOUNT is negative for pre-decrement.
5615 Returns 1 if the change could be made.
5616 This checks all about the validity of the result of modifying INSN. */
5618 static int
5619 try_pre_increment (insn, reg, amount)
5620 rtx insn, reg;
5621 HOST_WIDE_INT amount;
5623 register rtx use;
5625 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5626 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5627 int pre_ok = 0;
5628 /* Nonzero if we can try to make a post-increment or post-decrement.
5629 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5630 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5631 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5632 int post_ok = 0;
5634 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5635 int do_post = 0;
5637 /* From the sign of increment, see which possibilities are conceivable
5638 on this target machine. */
5639 if (HAVE_PRE_INCREMENT && amount > 0)
5640 pre_ok = 1;
5641 if (HAVE_POST_INCREMENT && amount > 0)
5642 post_ok = 1;
5644 if (HAVE_PRE_DECREMENT && amount < 0)
5645 pre_ok = 1;
5646 if (HAVE_POST_DECREMENT && amount < 0)
5647 post_ok = 1;
5649 if (! (pre_ok || post_ok))
5650 return 0;
5652 /* It is not safe to add a side effect to a jump insn
5653 because if the incremented register is spilled and must be reloaded
5654 there would be no way to store the incremented value back in memory. */
5656 if (GET_CODE (insn) == JUMP_INSN)
5657 return 0;
5659 use = 0;
5660 if (pre_ok)
5661 use = find_use_as_address (PATTERN (insn), reg, 0);
5662 if (post_ok && (use == 0 || use == (rtx) 1))
5664 use = find_use_as_address (PATTERN (insn), reg, -amount);
5665 do_post = 1;
5668 if (use == 0 || use == (rtx) 1)
5669 return 0;
5671 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5672 return 0;
5674 /* See if this combination of instruction and addressing mode exists. */
5675 if (! validate_change (insn, &XEXP (use, 0),
5676 gen_rtx_fmt_e (amount > 0
5677 ? (do_post ? POST_INC : PRE_INC)
5678 : (do_post ? POST_DEC : PRE_DEC),
5679 Pmode, reg), 0))
5680 return 0;
5682 /* Record that this insn now has an implicit side effect on X. */
5683 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5684 return 1;
5687 #endif /* AUTO_INC_DEC */
5689 /* Find the place in the rtx X where REG is used as a memory address.
5690 Return the MEM rtx that so uses it.
5691 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5692 (plus REG (const_int PLUSCONST)).
5694 If such an address does not appear, return 0.
5695 If REG appears more than once, or is used other than in such an address,
5696 return (rtx)1. */
5699 find_use_as_address (x, reg, plusconst)
5700 register rtx x;
5701 rtx reg;
5702 HOST_WIDE_INT plusconst;
5704 enum rtx_code code = GET_CODE (x);
5705 const char *fmt = GET_RTX_FORMAT (code);
5706 register int i;
5707 register rtx value = 0;
5708 register rtx tem;
5710 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5711 return x;
5713 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5714 && XEXP (XEXP (x, 0), 0) == reg
5715 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5716 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5717 return x;
5719 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5721 /* If REG occurs inside a MEM used in a bit-field reference,
5722 that is unacceptable. */
5723 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5724 return (rtx) (HOST_WIDE_INT) 1;
5727 if (x == reg)
5728 return (rtx) (HOST_WIDE_INT) 1;
5730 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5732 if (fmt[i] == 'e')
5734 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5735 if (value == 0)
5736 value = tem;
5737 else if (tem != 0)
5738 return (rtx) (HOST_WIDE_INT) 1;
5740 else if (fmt[i] == 'E')
5742 register int j;
5743 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5745 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5746 if (value == 0)
5747 value = tem;
5748 else if (tem != 0)
5749 return (rtx) (HOST_WIDE_INT) 1;
5754 return value;
5757 /* Write information about registers and basic blocks into FILE.
5758 This is part of making a debugging dump. */
5760 void
5761 dump_regset (r, outf)
5762 regset r;
5763 FILE *outf;
5765 int i;
5766 if (r == NULL)
5768 fputs (" (nil)", outf);
5769 return;
5772 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5774 fprintf (outf, " %d", i);
5775 if (i < FIRST_PSEUDO_REGISTER)
5776 fprintf (outf, " [%s]",
5777 reg_names[i]);
5781 void
5782 debug_regset (r)
5783 regset r;
5785 dump_regset (r, stderr);
5786 putc ('\n', stderr);
5789 void
5790 dump_flow_info (file)
5791 FILE *file;
5793 register int i;
5794 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5796 fprintf (file, "%d registers.\n", max_regno);
5797 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5798 if (REG_N_REFS (i))
5800 enum reg_class class, altclass;
5801 fprintf (file, "\nRegister %d used %d times across %d insns",
5802 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5803 if (REG_BASIC_BLOCK (i) >= 0)
5804 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5805 if (REG_N_SETS (i))
5806 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5807 (REG_N_SETS (i) == 1) ? "" : "s");
5808 if (REG_USERVAR_P (regno_reg_rtx[i]))
5809 fprintf (file, "; user var");
5810 if (REG_N_DEATHS (i) != 1)
5811 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5812 if (REG_N_CALLS_CROSSED (i) == 1)
5813 fprintf (file, "; crosses 1 call");
5814 else if (REG_N_CALLS_CROSSED (i))
5815 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5816 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5817 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5818 class = reg_preferred_class (i);
5819 altclass = reg_alternate_class (i);
5820 if (class != GENERAL_REGS || altclass != ALL_REGS)
5822 if (altclass == ALL_REGS || class == ALL_REGS)
5823 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5824 else if (altclass == NO_REGS)
5825 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5826 else
5827 fprintf (file, "; pref %s, else %s",
5828 reg_class_names[(int) class],
5829 reg_class_names[(int) altclass]);
5831 if (REGNO_POINTER_FLAG (i))
5832 fprintf (file, "; pointer");
5833 fprintf (file, ".\n");
5836 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5837 for (i = 0; i < n_basic_blocks; i++)
5839 register basic_block bb = BASIC_BLOCK (i);
5840 register edge e;
5842 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
5843 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
5845 fprintf (file, "Predecessors: ");
5846 for (e = bb->pred; e; e = e->pred_next)
5847 dump_edge_info (file, e, 0);
5849 fprintf (file, "\nSuccessors: ");
5850 for (e = bb->succ; e; e = e->succ_next)
5851 dump_edge_info (file, e, 1);
5853 fprintf (file, "\nRegisters live at start:");
5854 dump_regset (bb->global_live_at_start, file);
5856 fprintf (file, "\nRegisters live at end:");
5857 dump_regset (bb->global_live_at_end, file);
5859 putc ('\n', file);
5862 putc ('\n', file);
5865 void
5866 debug_flow_info ()
5868 dump_flow_info (stderr);
5871 static void
5872 dump_edge_info (file, e, do_succ)
5873 FILE *file;
5874 edge e;
5875 int do_succ;
5877 basic_block side = (do_succ ? e->dest : e->src);
5879 if (side == ENTRY_BLOCK_PTR)
5880 fputs (" ENTRY", file);
5881 else if (side == EXIT_BLOCK_PTR)
5882 fputs (" EXIT", file);
5883 else
5884 fprintf (file, " %d", side->index);
5886 if (e->count)
5887 fprintf (file, " count:%d", e->count);
5889 if (e->flags)
5891 static const char * const bitnames[] = {
5892 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5894 int comma = 0;
5895 int i, flags = e->flags;
5897 fputc (' ', file);
5898 fputc ('(', file);
5899 for (i = 0; flags; i++)
5900 if (flags & (1 << i))
5902 flags &= ~(1 << i);
5904 if (comma)
5905 fputc (',', file);
5906 if (i < (int) (sizeof (bitnames) / sizeof (*bitnames)))
5907 fputs (bitnames[i], file);
5908 else
5909 fprintf (file, "%d", i);
5910 comma = 1;
5912 fputc (')', file);
5916 /* Print out one basic block with live information at start and end. */
5918 void
5919 dump_bb (bb, outf)
5920 basic_block bb;
5921 FILE *outf;
5923 rtx insn;
5924 rtx last;
5925 edge e;
5927 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
5928 bb->index, bb->loop_depth, bb->count);
5929 if (bb->eh_beg != -1 || bb->eh_end != -1)
5930 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5931 putc ('\n', outf);
5933 fputs (";; Predecessors: ", outf);
5934 for (e = bb->pred; e; e = e->pred_next)
5935 dump_edge_info (outf, e, 0);
5936 putc ('\n', outf);
5938 fputs (";; Registers live at start:", outf);
5939 dump_regset (bb->global_live_at_start, outf);
5940 putc ('\n', outf);
5942 for (insn = bb->head, last = NEXT_INSN (bb->end);
5943 insn != last;
5944 insn = NEXT_INSN (insn))
5945 print_rtl_single (outf, insn);
5947 fputs (";; Registers live at end:", outf);
5948 dump_regset (bb->global_live_at_end, outf);
5949 putc ('\n', outf);
5951 fputs (";; Successors: ", outf);
5952 for (e = bb->succ; e; e = e->succ_next)
5953 dump_edge_info (outf, e, 1);
5954 putc ('\n', outf);
5957 void
5958 debug_bb (bb)
5959 basic_block bb;
5961 dump_bb (bb, stderr);
5964 void
5965 debug_bb_n (n)
5966 int n;
5968 dump_bb (BASIC_BLOCK (n), stderr);
5971 /* Like print_rtl, but also print out live information for the start of each
5972 basic block. */
5974 void
5975 print_rtl_with_bb (outf, rtx_first)
5976 FILE *outf;
5977 rtx rtx_first;
5979 register rtx tmp_rtx;
5981 if (rtx_first == 0)
5982 fprintf (outf, "(nil)\n");
5983 else
5985 int i;
5986 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5987 int max_uid = get_max_uid ();
5988 basic_block *start = (basic_block *)
5989 xcalloc (max_uid, sizeof (basic_block));
5990 basic_block *end = (basic_block *)
5991 xcalloc (max_uid, sizeof (basic_block));
5992 enum bb_state *in_bb_p = (enum bb_state *)
5993 xcalloc (max_uid, sizeof (enum bb_state));
5995 for (i = n_basic_blocks - 1; i >= 0; i--)
5997 basic_block bb = BASIC_BLOCK (i);
5998 rtx x;
6000 start[INSN_UID (bb->head)] = bb;
6001 end[INSN_UID (bb->end)] = bb;
6002 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6004 enum bb_state state = IN_MULTIPLE_BB;
6005 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
6006 state = IN_ONE_BB;
6007 in_bb_p[INSN_UID (x)] = state;
6009 if (x == bb->end)
6010 break;
6014 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6016 int did_output;
6017 basic_block bb;
6019 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6021 fprintf (outf, ";; Start of basic block %d, registers live:",
6022 bb->index);
6023 dump_regset (bb->global_live_at_start, outf);
6024 putc ('\n', outf);
6027 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6028 && GET_CODE (tmp_rtx) != NOTE
6029 && GET_CODE (tmp_rtx) != BARRIER)
6030 fprintf (outf, ";; Insn is not within a basic block\n");
6031 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6032 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6034 did_output = print_rtl_single (outf, tmp_rtx);
6036 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6038 fprintf (outf, ";; End of basic block %d, registers live:\n",
6039 bb->index);
6040 dump_regset (bb->global_live_at_end, outf);
6041 putc ('\n', outf);
6044 if (did_output)
6045 putc ('\n', outf);
6048 free (start);
6049 free (end);
6050 free (in_bb_p);
6053 if (current_function_epilogue_delay_list != 0)
6055 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6056 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6057 tmp_rtx = XEXP (tmp_rtx, 1))
6058 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6062 /* Compute dominator relationships using new flow graph structures. */
6064 void
6065 compute_flow_dominators (dominators, post_dominators)
6066 sbitmap *dominators;
6067 sbitmap *post_dominators;
6069 int bb;
6070 sbitmap *temp_bitmap;
6071 edge e;
6072 basic_block *worklist, *workend, *qin, *qout;
6073 int qlen;
6075 /* Allocate a worklist array/queue. Entries are only added to the
6076 list if they were not already on the list. So the size is
6077 bounded by the number of basic blocks. */
6078 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
6079 workend = &worklist[n_basic_blocks];
6081 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6082 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
6084 if (dominators)
6086 /* The optimistic setting of dominators requires us to put every
6087 block on the work list initially. */
6088 qin = qout = worklist;
6089 for (bb = 0; bb < n_basic_blocks; bb++)
6091 *qin++ = BASIC_BLOCK (bb);
6092 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6094 qlen = n_basic_blocks;
6095 qin = worklist;
6097 /* We want a maximal solution, so initially assume everything dominates
6098 everything else. */
6099 sbitmap_vector_ones (dominators, n_basic_blocks);
6101 /* Mark successors of the entry block so we can identify them below. */
6102 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6103 e->dest->aux = ENTRY_BLOCK_PTR;
6105 /* Iterate until the worklist is empty. */
6106 while (qlen)
6108 /* Take the first entry off the worklist. */
6109 basic_block b = *qout++;
6110 if (qout >= workend)
6111 qout = worklist;
6112 qlen--;
6114 bb = b->index;
6116 /* Compute the intersection of the dominators of all the
6117 predecessor blocks.
6119 If one of the predecessor blocks is the ENTRY block, then the
6120 intersection of the dominators of the predecessor blocks is
6121 defined as the null set. We can identify such blocks by the
6122 special value in the AUX field in the block structure. */
6123 if (b->aux == ENTRY_BLOCK_PTR)
6125 /* Do not clear the aux field for blocks which are
6126 successors of the ENTRY block. That way we never add
6127 them to the worklist again.
6129 The intersect of dominators of the preds of this block is
6130 defined as the null set. */
6131 sbitmap_zero (temp_bitmap[bb]);
6133 else
6135 /* Clear the aux field of this block so it can be added to
6136 the worklist again if necessary. */
6137 b->aux = NULL;
6138 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
6141 /* Make sure each block always dominates itself. */
6142 SET_BIT (temp_bitmap[bb], bb);
6144 /* If the out state of this block changed, then we need to
6145 add the successors of this block to the worklist if they
6146 are not already on the worklist. */
6147 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
6149 for (e = b->succ; e; e = e->succ_next)
6151 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
6153 *qin++ = e->dest;
6154 if (qin >= workend)
6155 qin = worklist;
6156 qlen++;
6158 e->dest->aux = e;
6165 if (post_dominators)
6167 /* The optimistic setting of dominators requires us to put every
6168 block on the work list initially. */
6169 qin = qout = worklist;
6170 for (bb = 0; bb < n_basic_blocks; bb++)
6172 *qin++ = BASIC_BLOCK (bb);
6173 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6175 qlen = n_basic_blocks;
6176 qin = worklist;
6178 /* We want a maximal solution, so initially assume everything post
6179 dominates everything else. */
6180 sbitmap_vector_ones (post_dominators, n_basic_blocks);
6182 /* Mark predecessors of the exit block so we can identify them below. */
6183 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6184 e->src->aux = EXIT_BLOCK_PTR;
6186 /* Iterate until the worklist is empty. */
6187 while (qlen)
6189 /* Take the first entry off the worklist. */
6190 basic_block b = *qout++;
6191 if (qout >= workend)
6192 qout = worklist;
6193 qlen--;
6195 bb = b->index;
6197 /* Compute the intersection of the post dominators of all the
6198 successor blocks.
6200 If one of the successor blocks is the EXIT block, then the
6201 intersection of the dominators of the successor blocks is
6202 defined as the null set. We can identify such blocks by the
6203 special value in the AUX field in the block structure. */
6204 if (b->aux == EXIT_BLOCK_PTR)
6206 /* Do not clear the aux field for blocks which are
6207 predecessors of the EXIT block. That way we we never
6208 add them to the worklist again.
6210 The intersect of dominators of the succs of this block is
6211 defined as the null set. */
6212 sbitmap_zero (temp_bitmap[bb]);
6214 else
6216 /* Clear the aux field of this block so it can be added to
6217 the worklist again if necessary. */
6218 b->aux = NULL;
6219 sbitmap_intersection_of_succs (temp_bitmap[bb],
6220 post_dominators, bb);
6223 /* Make sure each block always post dominates itself. */
6224 SET_BIT (temp_bitmap[bb], bb);
6226 /* If the out state of this block changed, then we need to
6227 add the successors of this block to the worklist if they
6228 are not already on the worklist. */
6229 if (sbitmap_a_and_b (post_dominators[bb],
6230 post_dominators[bb],
6231 temp_bitmap[bb]))
6233 for (e = b->pred; e; e = e->pred_next)
6235 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6237 *qin++ = e->src;
6238 if (qin >= workend)
6239 qin = worklist;
6240 qlen++;
6242 e->src->aux = e;
6249 free (worklist);
6250 free (temp_bitmap);
6253 /* Given DOMINATORS, compute the immediate dominators into IDOM. If a
6254 block dominates only itself, its entry remains as INVALID_BLOCK. */
6256 void
6257 compute_immediate_dominators (idom, dominators)
6258 int *idom;
6259 sbitmap *dominators;
6261 sbitmap *tmp;
6262 int b;
6264 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6266 /* Begin with tmp(n) = dom(n) - { n }. */
6267 for (b = n_basic_blocks; --b >= 0;)
6269 sbitmap_copy (tmp[b], dominators[b]);
6270 RESET_BIT (tmp[b], b);
6273 /* Subtract out all of our dominator's dominators. */
6274 for (b = n_basic_blocks; --b >= 0;)
6276 sbitmap tmp_b = tmp[b];
6277 int s;
6279 for (s = n_basic_blocks; --s >= 0;)
6280 if (TEST_BIT (tmp_b, s))
6281 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6284 /* Find the one bit set in the bitmap and put it in the output array. */
6285 for (b = n_basic_blocks; --b >= 0;)
6287 int t;
6288 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6291 sbitmap_vector_free (tmp);
6294 /* Given POSTDOMINATORS, compute the immediate postdominators into
6295 IDOM. If a block is only dominated by itself, its entry remains as
6296 INVALID_BLOCK. */
6298 void
6299 compute_immediate_postdominators (idom, postdominators)
6300 int *idom;
6301 sbitmap *postdominators;
6303 compute_immediate_dominators (idom, postdominators);
6306 /* Recompute register set/reference counts immediately prior to register
6307 allocation.
6309 This avoids problems with set/reference counts changing to/from values
6310 which have special meanings to the register allocators.
6312 Additionally, the reference counts are the primary component used by the
6313 register allocators to prioritize pseudos for allocation to hard regs.
6314 More accurate reference counts generally lead to better register allocation.
6316 F is the first insn to be scanned.
6318 LOOP_STEP denotes how much loop_depth should be incremented per
6319 loop nesting level in order to increase the ref count more for
6320 references in a loop.
6322 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6323 possibly other information which is used by the register allocators. */
6325 void
6326 recompute_reg_usage (f, loop_step)
6327 rtx f ATTRIBUTE_UNUSED;
6328 int loop_step ATTRIBUTE_UNUSED;
6330 allocate_reg_life_data ();
6331 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6334 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6335 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6336 of the number of registers that died. */
6339 count_or_remove_death_notes (blocks, kill)
6340 sbitmap blocks;
6341 int kill;
6343 int i, count = 0;
6345 for (i = n_basic_blocks - 1; i >= 0; --i)
6347 basic_block bb;
6348 rtx insn;
6350 if (blocks && ! TEST_BIT (blocks, i))
6351 continue;
6353 bb = BASIC_BLOCK (i);
6355 for (insn = bb->head;; insn = NEXT_INSN (insn))
6357 if (INSN_P (insn))
6359 rtx *pprev = &REG_NOTES (insn);
6360 rtx link = *pprev;
6362 while (link)
6364 switch (REG_NOTE_KIND (link))
6366 case REG_DEAD:
6367 if (GET_CODE (XEXP (link, 0)) == REG)
6369 rtx reg = XEXP (link, 0);
6370 int n;
6372 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6373 n = 1;
6374 else
6375 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6376 count += n;
6378 /* Fall through. */
6380 case REG_UNUSED:
6381 if (kill)
6383 rtx next = XEXP (link, 1);
6384 free_EXPR_LIST_node (link);
6385 *pprev = link = next;
6386 break;
6388 /* Fall through. */
6390 default:
6391 pprev = &XEXP (link, 1);
6392 link = *pprev;
6393 break;
6398 if (insn == bb->end)
6399 break;
6403 return count;
6406 /* Record INSN's block as BB. */
6408 void
6409 set_block_for_insn (insn, bb)
6410 rtx insn;
6411 basic_block bb;
6413 size_t uid = INSN_UID (insn);
6414 if (uid >= basic_block_for_insn->num_elements)
6416 int new_size;
6418 /* Add one-eighth the size so we don't keep calling xrealloc. */
6419 new_size = uid + (uid + 7) / 8;
6421 VARRAY_GROW (basic_block_for_insn, new_size);
6423 VARRAY_BB (basic_block_for_insn, uid) = bb;
6426 /* Record INSN's block number as BB. */
6427 /* ??? This has got to go. */
6429 void
6430 set_block_num (insn, bb)
6431 rtx insn;
6432 int bb;
6434 set_block_for_insn (insn, BASIC_BLOCK (bb));
6437 /* Verify the CFG consistency. This function check some CFG invariants and
6438 aborts when something is wrong. Hope that this function will help to
6439 convert many optimization passes to preserve CFG consistent.
6441 Currently it does following checks:
6443 - test head/end pointers
6444 - overlapping of basic blocks
6445 - edge list corectness
6446 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6447 - tails of basic blocks (ensure that boundary is necesary)
6448 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6449 and NOTE_INSN_BASIC_BLOCK
6450 - check that all insns are in the basic blocks
6451 (except the switch handling code, barriers and notes)
6452 - check that all returns are followed by barriers
6454 In future it can be extended check a lot of other stuff as well
6455 (reachability of basic blocks, life information, etc. etc.). */
6457 void
6458 verify_flow_info ()
6460 const int max_uid = get_max_uid ();
6461 const rtx rtx_first = get_insns ();
6462 rtx last_head = get_last_insn ();
6463 basic_block *bb_info;
6464 rtx x;
6465 int i, last_bb_num_seen, num_bb_notes, err = 0;
6467 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6469 for (i = n_basic_blocks - 1; i >= 0; i--)
6471 basic_block bb = BASIC_BLOCK (i);
6472 rtx head = bb->head;
6473 rtx end = bb->end;
6475 /* Verify the end of the basic block is in the INSN chain. */
6476 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6477 if (x == end)
6478 break;
6479 if (!x)
6481 error ("End insn %d for block %d not found in the insn stream.",
6482 INSN_UID (end), bb->index);
6483 err = 1;
6486 /* Work backwards from the end to the head of the basic block
6487 to verify the head is in the RTL chain. */
6488 for (; x != NULL_RTX; x = PREV_INSN (x))
6490 /* While walking over the insn chain, verify insns appear
6491 in only one basic block and initialize the BB_INFO array
6492 used by other passes. */
6493 if (bb_info[INSN_UID (x)] != NULL)
6495 error ("Insn %d is in multiple basic blocks (%d and %d)",
6496 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6497 err = 1;
6499 bb_info[INSN_UID (x)] = bb;
6501 if (x == head)
6502 break;
6504 if (!x)
6506 error ("Head insn %d for block %d not found in the insn stream.",
6507 INSN_UID (head), bb->index);
6508 err = 1;
6511 last_head = x;
6514 /* Now check the basic blocks (boundaries etc.) */
6515 for (i = n_basic_blocks - 1; i >= 0; i--)
6517 basic_block bb = BASIC_BLOCK (i);
6518 /* Check corectness of edge lists */
6519 edge e;
6521 e = bb->succ;
6522 while (e)
6524 if (e->src != bb)
6526 fprintf (stderr,
6527 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6528 bb->index);
6529 fprintf (stderr, "Predecessor: ");
6530 dump_edge_info (stderr, e, 0);
6531 fprintf (stderr, "\nSuccessor: ");
6532 dump_edge_info (stderr, e, 1);
6533 fflush (stderr);
6534 err = 1;
6536 if (e->dest != EXIT_BLOCK_PTR)
6538 edge e2 = e->dest->pred;
6539 while (e2 && e2 != e)
6540 e2 = e2->pred_next;
6541 if (!e2)
6543 error ("Basic block %i edge lists are corrupted", bb->index);
6544 err = 1;
6547 e = e->succ_next;
6550 e = bb->pred;
6551 while (e)
6553 if (e->dest != bb)
6555 error ("Basic block %d pred edge is corrupted", bb->index);
6556 fputs ("Predecessor: ", stderr);
6557 dump_edge_info (stderr, e, 0);
6558 fputs ("\nSuccessor: ", stderr);
6559 dump_edge_info (stderr, e, 1);
6560 fputc ('\n', stderr);
6561 err = 1;
6563 if (e->src != ENTRY_BLOCK_PTR)
6565 edge e2 = e->src->succ;
6566 while (e2 && e2 != e)
6567 e2 = e2->succ_next;
6568 if (!e2)
6570 error ("Basic block %i edge lists are corrupted", bb->index);
6571 err = 1;
6574 e = e->pred_next;
6577 /* OK pointers are correct. Now check the header of basic
6578 block. It ought to contain optional CODE_LABEL followed
6579 by NOTE_BASIC_BLOCK. */
6580 x = bb->head;
6581 if (GET_CODE (x) == CODE_LABEL)
6583 if (bb->end == x)
6585 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6586 bb->index);
6587 err = 1;
6589 x = NEXT_INSN (x);
6591 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6593 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6594 bb->index);
6595 err = 1;
6598 if (bb->end == x)
6600 /* Do checks for empty blocks here */
6602 else
6604 x = NEXT_INSN (x);
6605 while (x)
6607 if (NOTE_INSN_BASIC_BLOCK_P (x))
6609 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6610 INSN_UID (x), bb->index);
6611 err = 1;
6614 if (x == bb->end)
6615 break;
6617 if (GET_CODE (x) == JUMP_INSN
6618 || GET_CODE (x) == CODE_LABEL
6619 || GET_CODE (x) == BARRIER)
6621 error ("In basic block %d:", bb->index);
6622 fatal_insn ("Flow control insn inside a basic block", x);
6625 x = NEXT_INSN (x);
6630 last_bb_num_seen = -1;
6631 num_bb_notes = 0;
6632 x = rtx_first;
6633 while (x)
6635 if (NOTE_INSN_BASIC_BLOCK_P (x))
6637 basic_block bb = NOTE_BASIC_BLOCK (x);
6638 num_bb_notes++;
6639 if (bb->index != last_bb_num_seen + 1)
6640 fatal ("Basic blocks not numbered consecutively");
6641 last_bb_num_seen = bb->index;
6644 if (!bb_info[INSN_UID (x)])
6646 switch (GET_CODE (x))
6648 case BARRIER:
6649 case NOTE:
6650 break;
6652 case CODE_LABEL:
6653 /* An addr_vec is placed outside any block block. */
6654 if (NEXT_INSN (x)
6655 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6656 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6657 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6659 x = NEXT_INSN (x);
6662 /* But in any case, non-deletable labels can appear anywhere. */
6663 break;
6665 default:
6666 fatal_insn ("Insn outside basic block", x);
6670 if (INSN_P (x)
6671 && GET_CODE (x) == JUMP_INSN
6672 && returnjump_p (x) && ! condjump_p (x)
6673 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6674 fatal_insn ("Return not followed by barrier", x);
6676 x = NEXT_INSN (x);
6679 if (num_bb_notes != n_basic_blocks)
6680 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6681 num_bb_notes, n_basic_blocks);
6683 if (err)
6684 abort ();
6686 /* Clean up. */
6687 free (bb_info);
6690 /* Functions to access an edge list with a vector representation.
6691 Enough data is kept such that given an index number, the
6692 pred and succ that edge represents can be determined, or
6693 given a pred and a succ, its index number can be returned.
6694 This allows algorithms which consume a lot of memory to
6695 represent the normally full matrix of edge (pred,succ) with a
6696 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6697 wasted space in the client code due to sparse flow graphs. */
6699 /* This functions initializes the edge list. Basically the entire
6700 flowgraph is processed, and all edges are assigned a number,
6701 and the data structure is filled in. */
6703 struct edge_list *
6704 create_edge_list ()
6706 struct edge_list *elist;
6707 edge e;
6708 int num_edges;
6709 int x;
6710 int block_count;
6712 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6714 num_edges = 0;
6716 /* Determine the number of edges in the flow graph by counting successor
6717 edges on each basic block. */
6718 for (x = 0; x < n_basic_blocks; x++)
6720 basic_block bb = BASIC_BLOCK (x);
6722 for (e = bb->succ; e; e = e->succ_next)
6723 num_edges++;
6725 /* Don't forget successors of the entry block. */
6726 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6727 num_edges++;
6729 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6730 elist->num_blocks = block_count;
6731 elist->num_edges = num_edges;
6732 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6734 num_edges = 0;
6736 /* Follow successors of the entry block, and register these edges. */
6737 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6739 elist->index_to_edge[num_edges] = e;
6740 num_edges++;
6743 for (x = 0; x < n_basic_blocks; x++)
6745 basic_block bb = BASIC_BLOCK (x);
6747 /* Follow all successors of blocks, and register these edges. */
6748 for (e = bb->succ; e; e = e->succ_next)
6750 elist->index_to_edge[num_edges] = e;
6751 num_edges++;
6754 return elist;
6757 /* This function free's memory associated with an edge list. */
6759 void
6760 free_edge_list (elist)
6761 struct edge_list *elist;
6763 if (elist)
6765 free (elist->index_to_edge);
6766 free (elist);
6770 /* This function provides debug output showing an edge list. */
6772 void
6773 print_edge_list (f, elist)
6774 FILE *f;
6775 struct edge_list *elist;
6777 int x;
6778 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6779 elist->num_blocks - 2, elist->num_edges);
6781 for (x = 0; x < elist->num_edges; x++)
6783 fprintf (f, " %-4d - edge(", x);
6784 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6785 fprintf (f, "entry,");
6786 else
6787 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6789 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6790 fprintf (f, "exit)\n");
6791 else
6792 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6796 /* This function provides an internal consistency check of an edge list,
6797 verifying that all edges are present, and that there are no
6798 extra edges. */
6800 void
6801 verify_edge_list (f, elist)
6802 FILE *f;
6803 struct edge_list *elist;
6805 int x, pred, succ, index;
6806 edge e;
6808 for (x = 0; x < n_basic_blocks; x++)
6810 basic_block bb = BASIC_BLOCK (x);
6812 for (e = bb->succ; e; e = e->succ_next)
6814 pred = e->src->index;
6815 succ = e->dest->index;
6816 index = EDGE_INDEX (elist, e->src, e->dest);
6817 if (index == EDGE_INDEX_NO_EDGE)
6819 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
6820 continue;
6822 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6823 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6824 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6825 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6826 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6827 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6830 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6832 pred = e->src->index;
6833 succ = e->dest->index;
6834 index = EDGE_INDEX (elist, e->src, e->dest);
6835 if (index == EDGE_INDEX_NO_EDGE)
6837 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
6838 continue;
6840 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6841 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6842 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6843 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6844 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6845 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6847 /* We've verified that all the edges are in the list, no lets make sure
6848 there are no spurious edges in the list. */
6850 for (pred = 0; pred < n_basic_blocks; pred++)
6851 for (succ = 0; succ < n_basic_blocks; succ++)
6853 basic_block p = BASIC_BLOCK (pred);
6854 basic_block s = BASIC_BLOCK (succ);
6856 int found_edge = 0;
6858 for (e = p->succ; e; e = e->succ_next)
6859 if (e->dest == s)
6861 found_edge = 1;
6862 break;
6864 for (e = s->pred; e; e = e->pred_next)
6865 if (e->src == p)
6867 found_edge = 1;
6868 break;
6870 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6871 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6872 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6873 pred, succ);
6874 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6875 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6876 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6877 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6878 BASIC_BLOCK (succ)));
6880 for (succ = 0; succ < n_basic_blocks; succ++)
6882 basic_block p = ENTRY_BLOCK_PTR;
6883 basic_block s = BASIC_BLOCK (succ);
6885 int found_edge = 0;
6887 for (e = p->succ; e; e = e->succ_next)
6888 if (e->dest == s)
6890 found_edge = 1;
6891 break;
6893 for (e = s->pred; e; e = e->pred_next)
6894 if (e->src == p)
6896 found_edge = 1;
6897 break;
6899 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6900 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6901 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6902 succ);
6903 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6904 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6905 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6906 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6907 BASIC_BLOCK (succ)));
6909 for (pred = 0; pred < n_basic_blocks; pred++)
6911 basic_block p = BASIC_BLOCK (pred);
6912 basic_block s = EXIT_BLOCK_PTR;
6914 int found_edge = 0;
6916 for (e = p->succ; e; e = e->succ_next)
6917 if (e->dest == s)
6919 found_edge = 1;
6920 break;
6922 for (e = s->pred; e; e = e->pred_next)
6923 if (e->src == p)
6925 found_edge = 1;
6926 break;
6928 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6929 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6930 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6931 pred);
6932 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6933 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6934 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6935 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6936 EXIT_BLOCK_PTR));
6940 /* This routine will determine what, if any, edge there is between
6941 a specified predecessor and successor. */
6944 find_edge_index (edge_list, pred, succ)
6945 struct edge_list *edge_list;
6946 basic_block pred, succ;
6948 int x;
6949 for (x = 0; x < NUM_EDGES (edge_list); x++)
6951 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6952 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6953 return x;
6955 return (EDGE_INDEX_NO_EDGE);
6958 /* This function will remove an edge from the flow graph. */
6960 void
6961 remove_edge (e)
6962 edge e;
6964 edge last_pred = NULL;
6965 edge last_succ = NULL;
6966 edge tmp;
6967 basic_block src, dest;
6968 src = e->src;
6969 dest = e->dest;
6970 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6971 last_succ = tmp;
6973 if (!tmp)
6974 abort ();
6975 if (last_succ)
6976 last_succ->succ_next = e->succ_next;
6977 else
6978 src->succ = e->succ_next;
6980 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6981 last_pred = tmp;
6983 if (!tmp)
6984 abort ();
6985 if (last_pred)
6986 last_pred->pred_next = e->pred_next;
6987 else
6988 dest->pred = e->pred_next;
6990 n_edges--;
6991 free (e);
6994 /* This routine will remove any fake successor edges for a basic block.
6995 When the edge is removed, it is also removed from whatever predecessor
6996 list it is in. */
6998 static void
6999 remove_fake_successors (bb)
7000 basic_block bb;
7002 edge e;
7003 for (e = bb->succ; e;)
7005 edge tmp = e;
7006 e = e->succ_next;
7007 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
7008 remove_edge (tmp);
7012 /* This routine will remove all fake edges from the flow graph. If
7013 we remove all fake successors, it will automatically remove all
7014 fake predecessors. */
7016 void
7017 remove_fake_edges ()
7019 int x;
7021 for (x = 0; x < n_basic_blocks; x++)
7022 remove_fake_successors (BASIC_BLOCK (x));
7024 /* We've handled all successors except the entry block's. */
7025 remove_fake_successors (ENTRY_BLOCK_PTR);
7028 /* This function will add a fake edge between any block which has no
7029 successors, and the exit block. Some data flow equations require these
7030 edges to exist. */
7032 void
7033 add_noreturn_fake_exit_edges ()
7035 int x;
7037 for (x = 0; x < n_basic_blocks; x++)
7038 if (BASIC_BLOCK (x)->succ == NULL)
7039 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7042 /* This function adds a fake edge between any infinite loops to the
7043 exit block. Some optimizations require a path from each node to
7044 the exit node.
7046 See also Morgan, Figure 3.10, pp. 82-83.
7048 The current implementation is ugly, not attempting to minimize the
7049 number of inserted fake edges. To reduce the number of fake edges
7050 to insert, add fake edges from _innermost_ loops containing only
7051 nodes not reachable from the exit block. */
7053 void
7054 connect_infinite_loops_to_exit ()
7056 basic_block unvisited_block;
7058 /* Perform depth-first search in the reverse graph to find nodes
7059 reachable from the exit block. */
7060 struct depth_first_search_dsS dfs_ds;
7062 flow_dfs_compute_reverse_init (&dfs_ds);
7063 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7065 /* Repeatedly add fake edges, updating the unreachable nodes. */
7066 while (1)
7068 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7069 if (!unvisited_block)
7070 break;
7071 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7072 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7075 flow_dfs_compute_reverse_finish (&dfs_ds);
7077 return;
7080 /* Redirect an edge's successor from one block to another. */
7082 void
7083 redirect_edge_succ (e, new_succ)
7084 edge e;
7085 basic_block new_succ;
7087 edge *pe;
7089 /* Disconnect the edge from the old successor block. */
7090 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7091 continue;
7092 *pe = (*pe)->pred_next;
7094 /* Reconnect the edge to the new successor block. */
7095 e->pred_next = new_succ->pred;
7096 new_succ->pred = e;
7097 e->dest = new_succ;
7100 /* Redirect an edge's predecessor from one block to another. */
7102 void
7103 redirect_edge_pred (e, new_pred)
7104 edge e;
7105 basic_block new_pred;
7107 edge *pe;
7109 /* Disconnect the edge from the old predecessor block. */
7110 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7111 continue;
7112 *pe = (*pe)->succ_next;
7114 /* Reconnect the edge to the new predecessor block. */
7115 e->succ_next = new_pred->succ;
7116 new_pred->succ = e;
7117 e->src = new_pred;
7120 /* Dump the list of basic blocks in the bitmap NODES. */
7122 static void
7123 flow_nodes_print (str, nodes, file)
7124 const char *str;
7125 const sbitmap nodes;
7126 FILE *file;
7128 int node;
7130 fprintf (file, "%s { ", str);
7131 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7132 fputs ("}\n", file);
7135 /* Dump the list of exiting edges in the array EDGES. */
7137 static void
7138 flow_exits_print (str, edges, num_edges, file)
7139 const char *str;
7140 const edge *edges;
7141 int num_edges;
7142 FILE *file;
7144 int i;
7146 fprintf (file, "%s { ", str);
7147 for (i = 0; i < num_edges; i++)
7148 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
7149 fputs ("}\n", file);
7152 /* Dump loop related CFG information. */
7154 static void
7155 flow_loops_cfg_dump (loops, file)
7156 const struct loops *loops;
7157 FILE *file;
7159 int i;
7161 if (! loops->num || ! file || ! loops->cfg.dom)
7162 return;
7164 for (i = 0; i < n_basic_blocks; i++)
7166 edge succ;
7168 fprintf (file, ";; %d succs { ", i);
7169 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7170 fprintf (file, "%d ", succ->dest->index);
7171 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7174 /* Dump the DFS node order. */
7175 if (loops->cfg.dfs_order)
7177 fputs (";; DFS order: ", file);
7178 for (i = 0; i < n_basic_blocks; i++)
7179 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7180 fputs ("\n", file);
7182 /* Dump the reverse completion node order. */
7183 if (loops->cfg.rc_order)
7185 fputs (";; RC order: ", file);
7186 for (i = 0; i < n_basic_blocks; i++)
7187 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7188 fputs ("\n", file);
7192 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7194 static int
7195 flow_loop_nested_p (outer, loop)
7196 struct loop *outer;
7197 struct loop *loop;
7199 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7202 /* Dump the loop information specified by LOOPS to the stream FILE. */
7204 void
7205 flow_loops_dump (loops, file, verbose)
7206 const struct loops *loops;
7207 FILE *file;
7208 int verbose;
7210 int i;
7211 int num_loops;
7213 num_loops = loops->num;
7214 if (! num_loops || ! file)
7215 return;
7217 fprintf (file, ";; %d loops found, %d levels\n",
7218 num_loops, loops->levels);
7220 for (i = 0; i < num_loops; i++)
7222 struct loop *loop = &loops->array[i];
7224 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
7225 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
7226 loop->header->index, loop->latch->index,
7227 loop->pre_header ? loop->pre_header->index : -1,
7228 loop->depth, loop->level,
7229 (long) (loop->outer ? (loop->outer - loops->array) : -1));
7230 fprintf (file, ";; %d", loop->num_nodes);
7231 flow_nodes_print (" nodes", loop->nodes, file);
7232 fprintf (file, ";; %d", loop->num_exits);
7233 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
7235 if (loop->shared)
7237 int j;
7239 for (j = 0; j < i; j++)
7241 struct loop *oloop = &loops->array[j];
7243 if (loop->header == oloop->header)
7245 int disjoint;
7246 int smaller;
7248 smaller = loop->num_nodes < oloop->num_nodes;
7250 /* If the union of LOOP and OLOOP is different than
7251 the larger of LOOP and OLOOP then LOOP and OLOOP
7252 must be disjoint. */
7253 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7254 smaller ? oloop : loop);
7255 fprintf (file,
7256 ";; loop header %d shared by loops %d, %d %s\n",
7257 loop->header->index, i, j,
7258 disjoint ? "disjoint" : "nested");
7263 if (verbose)
7265 /* Print diagnostics to compare our concept of a loop with
7266 what the loop notes say. */
7267 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
7268 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
7269 != NOTE_INSN_LOOP_BEG)
7270 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
7271 INSN_UID (PREV_INSN (loop->first->head)));
7272 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
7273 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
7274 != NOTE_INSN_LOOP_END)
7275 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
7276 INSN_UID (NEXT_INSN (loop->last->end)));
7280 if (verbose)
7281 flow_loops_cfg_dump (loops, file);
7284 /* Free all the memory allocated for LOOPS. */
7286 void
7287 flow_loops_free (loops)
7288 struct loops *loops;
7290 if (loops->array)
7292 int i;
7294 if (! loops->num)
7295 abort ();
7297 /* Free the loop descriptors. */
7298 for (i = 0; i < loops->num; i++)
7300 struct loop *loop = &loops->array[i];
7302 if (loop->nodes)
7303 sbitmap_free (loop->nodes);
7304 if (loop->exits)
7305 free (loop->exits);
7307 free (loops->array);
7308 loops->array = NULL;
7310 if (loops->cfg.dom)
7311 sbitmap_vector_free (loops->cfg.dom);
7312 if (loops->cfg.dfs_order)
7313 free (loops->cfg.dfs_order);
7315 sbitmap_free (loops->shared_headers);
7319 /* Find the exits from the loop using the bitmap of loop nodes NODES
7320 and store in EXITS array. Return the number of exits from the
7321 loop. */
7323 static int
7324 flow_loop_exits_find (nodes, exits)
7325 const sbitmap nodes;
7326 edge **exits;
7328 edge e;
7329 int node;
7330 int num_exits;
7332 *exits = NULL;
7334 /* Check all nodes within the loop to see if there are any
7335 successors not in the loop. Note that a node may have multiple
7336 exiting edges. */
7337 num_exits = 0;
7338 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7339 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7341 basic_block dest = e->dest;
7343 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7344 num_exits++;
7348 if (! num_exits)
7349 return 0;
7351 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
7353 /* Store all exiting edges into an array. */
7354 num_exits = 0;
7355 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7356 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7358 basic_block dest = e->dest;
7360 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7361 (*exits)[num_exits++] = e;
7365 return num_exits;
7368 /* Find the nodes contained within the loop with header HEADER and
7369 latch LATCH and store in NODES. Return the number of nodes within
7370 the loop. */
7372 static int
7373 flow_loop_nodes_find (header, latch, nodes)
7374 basic_block header;
7375 basic_block latch;
7376 sbitmap nodes;
7378 basic_block *stack;
7379 int sp;
7380 int num_nodes = 0;
7382 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7383 sp = 0;
7385 /* Start with only the loop header in the set of loop nodes. */
7386 sbitmap_zero (nodes);
7387 SET_BIT (nodes, header->index);
7388 num_nodes++;
7389 header->loop_depth++;
7391 /* Push the loop latch on to the stack. */
7392 if (! TEST_BIT (nodes, latch->index))
7394 SET_BIT (nodes, latch->index);
7395 latch->loop_depth++;
7396 num_nodes++;
7397 stack[sp++] = latch;
7400 while (sp)
7402 basic_block node;
7403 edge e;
7405 node = stack[--sp];
7406 for (e = node->pred; e; e = e->pred_next)
7408 basic_block ancestor = e->src;
7410 /* If each ancestor not marked as part of loop, add to set of
7411 loop nodes and push on to stack. */
7412 if (ancestor != ENTRY_BLOCK_PTR
7413 && ! TEST_BIT (nodes, ancestor->index))
7415 SET_BIT (nodes, ancestor->index);
7416 ancestor->loop_depth++;
7417 num_nodes++;
7418 stack[sp++] = ancestor;
7422 free (stack);
7423 return num_nodes;
7426 /* Compute the depth first search order and store in the array
7427 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7428 RC_ORDER is non-zero, return the reverse completion number for each
7429 node. Returns the number of nodes visited. A depth first search
7430 tries to get as far away from the starting point as quickly as
7431 possible. */
7433 static int
7434 flow_depth_first_order_compute (dfs_order, rc_order)
7435 int *dfs_order;
7436 int *rc_order;
7438 edge *stack;
7439 int sp;
7440 int dfsnum = 0;
7441 int rcnum = n_basic_blocks - 1;
7442 sbitmap visited;
7444 /* Allocate stack for back-tracking up CFG. */
7445 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7446 sp = 0;
7448 /* Allocate bitmap to track nodes that have been visited. */
7449 visited = sbitmap_alloc (n_basic_blocks);
7451 /* None of the nodes in the CFG have been visited yet. */
7452 sbitmap_zero (visited);
7454 /* Push the first edge on to the stack. */
7455 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7457 while (sp)
7459 edge e;
7460 basic_block src;
7461 basic_block dest;
7463 /* Look at the edge on the top of the stack. */
7464 e = stack[sp - 1];
7465 src = e->src;
7466 dest = e->dest;
7468 /* Check if the edge destination has been visited yet. */
7469 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7471 /* Mark that we have visited the destination. */
7472 SET_BIT (visited, dest->index);
7474 if (dfs_order)
7475 dfs_order[dfsnum++] = dest->index;
7477 if (dest->succ)
7479 /* Since the DEST node has been visited for the first
7480 time, check its successors. */
7481 stack[sp++] = dest->succ;
7483 else
7485 /* There are no successors for the DEST node so assign
7486 its reverse completion number. */
7487 if (rc_order)
7488 rc_order[rcnum--] = dest->index;
7491 else
7493 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7495 /* There are no more successors for the SRC node
7496 so assign its reverse completion number. */
7497 if (rc_order)
7498 rc_order[rcnum--] = src->index;
7501 if (e->succ_next)
7502 stack[sp - 1] = e->succ_next;
7503 else
7504 sp--;
7508 free (stack);
7509 sbitmap_free (visited);
7511 /* The number of nodes visited should not be greater than
7512 n_basic_blocks. */
7513 if (dfsnum > n_basic_blocks)
7514 abort ();
7516 /* There are some nodes left in the CFG that are unreachable. */
7517 if (dfsnum < n_basic_blocks)
7518 abort ();
7519 return dfsnum;
7522 /* Compute the depth first search order on the _reverse_ graph and
7523 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7524 Returns the number of nodes visited.
7526 The computation is split into three pieces:
7528 flow_dfs_compute_reverse_init () creates the necessary data
7529 structures.
7531 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7532 structures. The block will start the search.
7534 flow_dfs_compute_reverse_execute () continues (or starts) the
7535 search using the block on the top of the stack, stopping when the
7536 stack is empty.
7538 flow_dfs_compute_reverse_finish () destroys the necessary data
7539 structures.
7541 Thus, the user will probably call ..._init(), call ..._add_bb() to
7542 add a beginning basic block to the stack, call ..._execute(),
7543 possibly add another bb to the stack and again call ..._execute(),
7544 ..., and finally call _finish(). */
7546 /* Initialize the data structures used for depth-first search on the
7547 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7548 added to the basic block stack. DATA is the current depth-first
7549 search context. If INITIALIZE_STACK is non-zero, there is an
7550 element on the stack. */
7552 static void
7553 flow_dfs_compute_reverse_init (data)
7554 depth_first_search_ds data;
7556 /* Allocate stack for back-tracking up CFG. */
7557 data->stack =
7558 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
7559 * sizeof (basic_block));
7560 data->sp = 0;
7562 /* Allocate bitmap to track nodes that have been visited. */
7563 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
7565 /* None of the nodes in the CFG have been visited yet. */
7566 sbitmap_zero (data->visited_blocks);
7568 return;
7571 /* Add the specified basic block to the top of the dfs data
7572 structures. When the search continues, it will start at the
7573 block. */
7575 static void
7576 flow_dfs_compute_reverse_add_bb (data, bb)
7577 depth_first_search_ds data;
7578 basic_block bb;
7580 data->stack[data->sp++] = bb;
7581 return;
7584 /* Continue the depth-first search through the reverse graph starting
7585 with the block at the stack's top and ending when the stack is
7586 empty. Visited nodes are marked. Returns an unvisited basic
7587 block, or NULL if there is none available. */
7589 static basic_block
7590 flow_dfs_compute_reverse_execute (data)
7591 depth_first_search_ds data;
7593 basic_block bb;
7594 edge e;
7595 int i;
7597 while (data->sp > 0)
7599 bb = data->stack[--data->sp];
7601 /* Mark that we have visited this node. */
7602 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
7604 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
7606 /* Perform depth-first search on adjacent vertices. */
7607 for (e = bb->pred; e; e = e->pred_next)
7608 flow_dfs_compute_reverse_add_bb (data, e->src);
7612 /* Determine if there are unvisited basic blocks. */
7613 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
7614 if (!TEST_BIT (data->visited_blocks, i))
7615 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
7616 return NULL;
7619 /* Destroy the data structures needed for depth-first search on the
7620 reverse graph. */
7622 static void
7623 flow_dfs_compute_reverse_finish (data)
7624 depth_first_search_ds data;
7626 free (data->stack);
7627 sbitmap_free (data->visited_blocks);
7628 return;
7631 /* Return the block for the pre-header of the loop with header
7632 HEADER where DOM specifies the dominator information. Return NULL if
7633 there is no pre-header. */
7635 static basic_block
7636 flow_loop_pre_header_find (header, dom)
7637 basic_block header;
7638 const sbitmap *dom;
7640 basic_block pre_header;
7641 edge e;
7643 /* If block p is a predecessor of the header and is the only block
7644 that the header does not dominate, then it is the pre-header. */
7645 pre_header = NULL;
7646 for (e = header->pred; e; e = e->pred_next)
7648 basic_block node = e->src;
7650 if (node != ENTRY_BLOCK_PTR
7651 && ! TEST_BIT (dom[node->index], header->index))
7653 if (pre_header == NULL)
7654 pre_header = node;
7655 else
7657 /* There are multiple edges into the header from outside
7658 the loop so there is no pre-header block. */
7659 pre_header = NULL;
7660 break;
7664 return pre_header;
7667 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7668 previously added. The insertion algorithm assumes that the loops
7669 are added in the order found by a depth first search of the CFG. */
7671 static void
7672 flow_loop_tree_node_add (prevloop, loop)
7673 struct loop *prevloop;
7674 struct loop *loop;
7677 if (flow_loop_nested_p (prevloop, loop))
7679 prevloop->inner = loop;
7680 loop->outer = prevloop;
7681 return;
7684 while (prevloop->outer)
7686 if (flow_loop_nested_p (prevloop->outer, loop))
7688 prevloop->next = loop;
7689 loop->outer = prevloop->outer;
7690 return;
7692 prevloop = prevloop->outer;
7695 prevloop->next = loop;
7696 loop->outer = NULL;
7699 /* Build the loop hierarchy tree for LOOPS. */
7701 static void
7702 flow_loops_tree_build (loops)
7703 struct loops *loops;
7705 int i;
7706 int num_loops;
7708 num_loops = loops->num;
7709 if (! num_loops)
7710 return;
7712 /* Root the loop hierarchy tree with the first loop found.
7713 Since we used a depth first search this should be the
7714 outermost loop. */
7715 loops->tree = &loops->array[0];
7716 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7718 /* Add the remaining loops to the tree. */
7719 for (i = 1; i < num_loops; i++)
7720 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7723 /* Helper function to compute loop nesting depth and enclosed loop level
7724 for the natural loop specified by LOOP at the loop depth DEPTH.
7725 Returns the loop level. */
7727 static int
7728 flow_loop_level_compute (loop, depth)
7729 struct loop *loop;
7730 int depth;
7732 struct loop *inner;
7733 int level = 1;
7735 if (! loop)
7736 return 0;
7738 /* Traverse loop tree assigning depth and computing level as the
7739 maximum level of all the inner loops of this loop. The loop
7740 level is equivalent to the height of the loop in the loop tree
7741 and corresponds to the number of enclosed loop levels (including
7742 itself). */
7743 for (inner = loop->inner; inner; inner = inner->next)
7745 int ilevel;
7747 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7749 if (ilevel > level)
7750 level = ilevel;
7752 loop->level = level;
7753 loop->depth = depth;
7754 return level;
7757 /* Compute the loop nesting depth and enclosed loop level for the loop
7758 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7759 level. */
7761 static int
7762 flow_loops_level_compute (loops)
7763 struct loops *loops;
7765 struct loop *loop;
7766 int level;
7767 int levels = 0;
7769 /* Traverse all the outer level loops. */
7770 for (loop = loops->tree; loop; loop = loop->next)
7772 level = flow_loop_level_compute (loop, 1);
7773 if (level > levels)
7774 levels = level;
7776 return levels;
7779 /* Find all the natural loops in the function and save in LOOPS structure
7780 and recalculate loop_depth information in basic block structures.
7781 Return the number of natural loops found. */
7784 flow_loops_find (loops)
7785 struct loops *loops;
7787 int i;
7788 int b;
7789 int num_loops;
7790 edge e;
7791 sbitmap headers;
7792 sbitmap *dom;
7793 int *dfs_order;
7794 int *rc_order;
7796 loops->num = 0;
7797 loops->array = NULL;
7798 loops->tree = NULL;
7799 dfs_order = NULL;
7800 rc_order = NULL;
7802 /* Taking care of this degenerate case makes the rest of
7803 this code simpler. */
7804 if (n_basic_blocks == 0)
7805 return 0;
7807 /* Compute the dominators. */
7808 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7809 compute_flow_dominators (dom, NULL);
7811 /* Count the number of loop edges (back edges). This should be the
7812 same as the number of natural loops. Also clear the loop_depth
7813 and as we work from inner->outer in a loop nest we call
7814 find_loop_nodes_find which will increment loop_depth for nodes
7815 within the current loop, which happens to enclose inner loops. */
7817 num_loops = 0;
7818 for (b = 0; b < n_basic_blocks; b++)
7820 BASIC_BLOCK (b)->loop_depth = 0;
7821 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7823 basic_block latch = e->src;
7825 /* Look for back edges where a predecessor is dominated
7826 by this block. A natural loop has a single entry
7827 node (header) that dominates all the nodes in the
7828 loop. It also has single back edge to the header
7829 from a latch node. Note that multiple natural loops
7830 may share the same header. */
7831 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7832 num_loops++;
7836 if (num_loops)
7838 /* Compute depth first search order of the CFG so that outer
7839 natural loops will be found before inner natural loops. */
7840 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7841 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7842 flow_depth_first_order_compute (dfs_order, rc_order);
7844 /* Allocate loop structures. */
7845 loops->array
7846 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7848 headers = sbitmap_alloc (n_basic_blocks);
7849 sbitmap_zero (headers);
7851 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7852 sbitmap_zero (loops->shared_headers);
7854 /* Find and record information about all the natural loops
7855 in the CFG. */
7856 num_loops = 0;
7857 for (b = 0; b < n_basic_blocks; b++)
7859 basic_block header;
7861 /* Search the nodes of the CFG in DFS order that we can find
7862 outer loops first. */
7863 header = BASIC_BLOCK (rc_order[b]);
7865 /* Look for all the possible latch blocks for this header. */
7866 for (e = header->pred; e; e = e->pred_next)
7868 basic_block latch = e->src;
7870 /* Look for back edges where a predecessor is dominated
7871 by this block. A natural loop has a single entry
7872 node (header) that dominates all the nodes in the
7873 loop. It also has single back edge to the header
7874 from a latch node. Note that multiple natural loops
7875 may share the same header. */
7876 if (latch != ENTRY_BLOCK_PTR
7877 && TEST_BIT (dom[latch->index], header->index))
7879 struct loop *loop;
7881 loop = loops->array + num_loops;
7883 loop->header = header;
7884 loop->latch = latch;
7885 loop->num = num_loops;
7887 /* Keep track of blocks that are loop headers so
7888 that we can tell which loops should be merged. */
7889 if (TEST_BIT (headers, header->index))
7890 SET_BIT (loops->shared_headers, header->index);
7891 SET_BIT (headers, header->index);
7893 /* Find nodes contained within the loop. */
7894 loop->nodes = sbitmap_alloc (n_basic_blocks);
7895 loop->num_nodes
7896 = flow_loop_nodes_find (header, latch, loop->nodes);
7898 /* Compute first and last blocks within the loop.
7899 These are often the same as the loop header and
7900 loop latch respectively, but this is not always
7901 the case. */
7902 loop->first
7903 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7904 loop->last
7905 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7907 /* Find edges which exit the loop. Note that a node
7908 may have several exit edges. */
7909 loop->num_exits
7910 = flow_loop_exits_find (loop->nodes, &loop->exits);
7912 /* Look to see if the loop has a pre-header node. */
7913 loop->pre_header = flow_loop_pre_header_find (header, dom);
7915 num_loops++;
7920 /* Natural loops with shared headers may either be disjoint or
7921 nested. Disjoint loops with shared headers cannot be inner
7922 loops and should be merged. For now just mark loops that share
7923 headers. */
7924 for (i = 0; i < num_loops; i++)
7925 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7926 loops->array[i].shared = 1;
7928 sbitmap_free (headers);
7931 loops->num = num_loops;
7933 /* Save CFG derived information to avoid recomputing it. */
7934 loops->cfg.dom = dom;
7935 loops->cfg.dfs_order = dfs_order;
7936 loops->cfg.rc_order = rc_order;
7938 /* Build the loop hierarchy tree. */
7939 flow_loops_tree_build (loops);
7941 /* Assign the loop nesting depth and enclosed loop level for each
7942 loop. */
7943 loops->levels = flow_loops_level_compute (loops);
7945 return num_loops;
7948 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7951 flow_loop_outside_edge_p (loop, e)
7952 const struct loop *loop;
7953 edge e;
7955 if (e->dest != loop->header)
7956 abort ();
7957 return (e->src == ENTRY_BLOCK_PTR)
7958 || ! TEST_BIT (loop->nodes, e->src->index);
7961 /* Clear LOG_LINKS fields of insns in a chain.
7962 Also clear the global_live_at_{start,end} fields of the basic block
7963 structures. */
7965 void
7966 clear_log_links (insns)
7967 rtx insns;
7969 rtx i;
7970 int b;
7972 for (i = insns; i; i = NEXT_INSN (i))
7973 if (INSN_P (i))
7974 LOG_LINKS (i) = 0;
7976 for (b = 0; b < n_basic_blocks; b++)
7978 basic_block bb = BASIC_BLOCK (b);
7980 bb->global_live_at_start = NULL;
7981 bb->global_live_at_end = NULL;
7984 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
7985 EXIT_BLOCK_PTR->global_live_at_start = NULL;
7988 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
7989 correspond to the hard registers, if any, set in that map. This
7990 could be done far more efficiently by having all sorts of special-cases
7991 with moving single words, but probably isn't worth the trouble. */
7993 void
7994 reg_set_to_hard_reg_set (to, from)
7995 HARD_REG_SET *to;
7996 bitmap from;
7998 int i;
8000 EXECUTE_IF_SET_IN_BITMAP
8001 (from, 0, i,
8003 if (i >= FIRST_PSEUDO_REGISTER)
8004 return;
8005 SET_HARD_REG_BIT (*to, i);