* sh.h (STRUCT_VALUE): Just 0 for TARGET_HITACHI.
[official-gcc.git] / gcc / flow.c
bloba0b1266b032e16430ce6130b5630b4e9b8ea2bec
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
44 ** life_analysis **
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
76 REG_DEAD notes.
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
95 that is never used.
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
113 /* TODO:
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
118 - log links creation
119 - pre/post modify transformation
122 #include "config.h"
123 #include "system.h"
124 #include "tree.h"
125 #include "rtl.h"
126 #include "tm_p.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "hard-reg-set.h"
131 #include "flags.h"
132 #include "output.h"
133 #include "function.h"
134 #include "except.h"
135 #include "toplev.h"
136 #include "recog.h"
137 #include "insn-flags.h"
138 #include "expr.h"
140 #include "obstack.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
173 int n_basic_blocks;
175 /* Number of edges in the current function. */
177 int n_edges;
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
186 = {{NULL, /* head */
187 NULL, /* end */
188 NULL, /* pred */
189 NULL, /* succ */
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
193 NULL, /* aux */
194 ENTRY_BLOCK, /* index */
195 0, /* loop_depth */
196 -1, -1 /* eh_beg, eh_end */
199 NULL, /* head */
200 NULL, /* end */
201 NULL, /* pred */
202 NULL, /* succ */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
206 NULL, /* aux */
207 EXIT_BLOCK, /* index */
208 0, /* loop_depth */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
214 int flow2_completed;
216 /* Maximum register number used in this function, plus one. */
218 int max_regno;
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of the reg_n_info table. */
226 unsigned int reg_n_max;
228 /* Size of a regset for the current function,
229 in (1) bytes and (2) elements. */
231 int regset_bytes;
232 int regset_size;
234 /* Regset of regs live when calls to `setjmp'-like functions happen. */
235 /* ??? Does this exist only for the setjmp-clobbered warning message? */
237 regset regs_live_at_setjmp;
239 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
240 that have to go in the same hard reg.
241 The first two regs in the list are a pair, and the next two
242 are another pair, etc. */
243 rtx regs_may_share;
245 /* Set of registers that may be eliminable. These are handled specially
246 in updating regs_ever_live. */
248 static HARD_REG_SET elim_reg_set;
250 /* The basic block structure for every insn, indexed by uid. */
252 varray_type basic_block_for_insn;
254 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
255 /* ??? Should probably be using LABEL_NUSES instead. It would take a
256 bit of surgery to be able to use or co-opt the routines in jump. */
258 static rtx label_value_list;
260 /* For use in communicating between propagate_block and its subroutines.
261 Holds all information needed to compute life and def-use information. */
263 struct propagate_block_info
265 /* The basic block we're considering. */
266 basic_block bb;
268 /* Bit N is set if register N is conditionally or unconditionally live. */
269 regset reg_live;
271 /* Bit N is set if register N is unconditionally dead this insn. */
272 regset new_dead;
274 /* Bit N is set if register N is live this insn. */
275 regset new_live;
277 /* Element N is the next insn that uses (hard or pseudo) register N
278 within the current basic block; or zero, if there is no such insn. */
279 rtx *reg_next_use;
281 /* Contains a list of all the MEMs we are tracking for dead store
282 elimination. */
283 rtx mem_set_list;
285 /* If non-null, record the set of registers set in the basic block. */
286 regset local_set;
288 /* Non-zero if the value of CC0 is live. */
289 int cc0_live;
291 /* Flags controling the set of information propagate_block collects. */
292 int flags;
295 /* Forward declarations */
296 static int count_basic_blocks PARAMS ((rtx));
297 static rtx find_basic_blocks_1 PARAMS ((rtx));
298 static void clear_edges PARAMS ((void));
299 static void make_edges PARAMS ((rtx));
300 static void make_label_edge PARAMS ((sbitmap *, basic_block,
301 rtx, int));
302 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
303 basic_block, rtx, int));
304 static void mark_critical_edges PARAMS ((void));
305 static void move_stray_eh_region_notes PARAMS ((void));
306 static void record_active_eh_regions PARAMS ((rtx));
308 static void commit_one_edge_insertion PARAMS ((edge));
310 static void delete_unreachable_blocks PARAMS ((void));
311 static void delete_eh_regions PARAMS ((void));
312 static int can_delete_note_p PARAMS ((rtx));
313 static int delete_block PARAMS ((basic_block));
314 static void expunge_block PARAMS ((basic_block));
315 static int can_delete_label_p PARAMS ((rtx));
316 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
317 basic_block));
318 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
319 basic_block));
320 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
321 static void try_merge_blocks PARAMS ((void));
322 static void tidy_fallthru_edges PARAMS ((void));
323 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
324 static void verify_wide_reg PARAMS ((int, rtx, rtx));
325 static void verify_local_live_at_start PARAMS ((regset, basic_block));
326 static int set_noop_p PARAMS ((rtx));
327 static int noop_move_p PARAMS ((rtx));
328 static void delete_noop_moves PARAMS ((rtx));
329 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
330 static void notice_stack_pointer_modification PARAMS ((rtx));
331 static void mark_reg PARAMS ((rtx, void *));
332 static void mark_regs_live_at_end PARAMS ((regset));
333 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
334 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
335 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
336 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
337 static void propagate_block PARAMS ((basic_block, regset,
338 regset, int));
339 static int insn_dead_p PARAMS ((struct propagate_block_info *,
340 rtx, int, rtx));
341 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
342 rtx, rtx, rtx));
343 static void mark_set_regs PARAMS ((struct propagate_block_info *,
344 rtx, rtx));
345 static void mark_set_1 PARAMS ((struct propagate_block_info *,
346 rtx, rtx, rtx));
347 static int mark_set_reg PARAMS ((struct propagate_block_info *,
348 rtx, rtx, int *, int *));
349 #ifdef AUTO_INC_DEC
350 static void find_auto_inc PARAMS ((struct propagate_block_info *,
351 rtx, rtx));
352 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
353 rtx));
354 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
355 #endif
356 static void mark_used_reg PARAMS ((struct propagate_block_info *,
357 rtx, rtx, rtx));
358 static void mark_used_regs PARAMS ((struct propagate_block_info *,
359 rtx, rtx, rtx));
360 void dump_flow_info PARAMS ((FILE *));
361 void debug_flow_info PARAMS ((void));
362 static void dump_edge_info PARAMS ((FILE *, edge, int));
364 static void count_reg_sets_1 PARAMS ((rtx, int));
365 static void count_reg_sets PARAMS ((rtx, int));
366 static void count_reg_references PARAMS ((rtx, int));
367 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
368 rtx));
369 static void remove_fake_successors PARAMS ((basic_block));
370 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
371 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
372 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
373 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
374 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
375 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
376 static int flow_depth_first_order_compute PARAMS ((int *));
377 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
378 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
379 static void flow_loops_tree_build PARAMS ((struct loops *));
380 static int flow_loop_level_compute PARAMS ((struct loop *, int));
381 static int flow_loops_level_compute PARAMS ((struct loops *));
383 /* Find basic blocks of the current function.
384 F is the first insn of the function and NREGS the number of register
385 numbers in use. */
387 void
388 find_basic_blocks (f, nregs, file)
389 rtx f;
390 int nregs ATTRIBUTE_UNUSED;
391 FILE *file ATTRIBUTE_UNUSED;
393 int max_uid;
395 /* Flush out existing data. */
396 if (basic_block_info != NULL)
398 int i;
400 clear_edges ();
402 /* Clear bb->aux on all extant basic blocks. We'll use this as a
403 tag for reuse during create_basic_block, just in case some pass
404 copies around basic block notes improperly. */
405 for (i = 0; i < n_basic_blocks; ++i)
406 BASIC_BLOCK (i)->aux = NULL;
408 VARRAY_FREE (basic_block_info);
411 n_basic_blocks = count_basic_blocks (f);
413 /* Size the basic block table. The actual structures will be allocated
414 by find_basic_blocks_1, since we want to keep the structure pointers
415 stable across calls to find_basic_blocks. */
416 /* ??? This whole issue would be much simpler if we called find_basic_blocks
417 exactly once, and thereafter we don't have a single long chain of
418 instructions at all until close to the end of compilation when we
419 actually lay them out. */
421 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
423 label_value_list = find_basic_blocks_1 (f);
425 /* Record the block to which an insn belongs. */
426 /* ??? This should be done another way, by which (perhaps) a label is
427 tagged directly with the basic block that it starts. It is used for
428 more than that currently, but IMO that is the only valid use. */
430 max_uid = get_max_uid ();
431 #ifdef AUTO_INC_DEC
432 /* Leave space for insns life_analysis makes in some cases for auto-inc.
433 These cases are rare, so we don't need too much space. */
434 max_uid += max_uid / 10;
435 #endif
437 compute_bb_for_insn (max_uid);
439 /* Discover the edges of our cfg. */
440 record_active_eh_regions (f);
441 make_edges (label_value_list);
443 /* Do very simple cleanup now, for the benefit of code that runs between
444 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
445 tidy_fallthru_edges ();
447 mark_critical_edges ();
449 #ifdef ENABLE_CHECKING
450 verify_flow_info ();
451 #endif
454 /* Count the basic blocks of the function. */
456 static int
457 count_basic_blocks (f)
458 rtx f;
460 register rtx insn;
461 register RTX_CODE prev_code;
462 register int count = 0;
463 int eh_region = 0;
464 int call_had_abnormal_edge = 0;
465 rtx prev_call = NULL_RTX;
467 prev_code = JUMP_INSN;
468 for (insn = f; insn; insn = NEXT_INSN (insn))
470 register RTX_CODE code = GET_CODE (insn);
472 if (code == CODE_LABEL
473 || (GET_RTX_CLASS (code) == 'i'
474 && (prev_code == JUMP_INSN
475 || prev_code == BARRIER
476 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
478 count++;
481 /* Record whether this call created an edge. */
482 if (code == CALL_INSN)
484 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
485 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
486 prev_call = insn;
487 call_had_abnormal_edge = 0;
489 /* If there is an EH region or rethrow, we have an edge. */
490 if ((eh_region && region > 0)
491 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
492 call_had_abnormal_edge = 1;
493 else
495 /* If there is a nonlocal goto label and the specified
496 region number isn't -1, we have an edge. (0 means
497 no throw, but might have a nonlocal goto). */
498 if (nonlocal_goto_handler_labels && region >= 0)
499 call_had_abnormal_edge = 1;
502 else if (code != NOTE)
503 prev_call = NULL_RTX;
505 if (code != NOTE)
506 prev_code = code;
507 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
508 ++eh_region;
509 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
510 --eh_region;
514 /* The rest of the compiler works a bit smoother when we don't have to
515 check for the edge case of do-nothing functions with no basic blocks. */
516 if (count == 0)
518 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
519 count = 1;
522 return count;
525 /* Find all basic blocks of the function whose first insn is F.
527 Collect and return a list of labels whose addresses are taken. This
528 will be used in make_edges for use with computed gotos. */
530 static rtx
531 find_basic_blocks_1 (f)
532 rtx f;
534 register rtx insn, next;
535 int call_has_abnormal_edge = 0;
536 int i = 0;
537 rtx bb_note = NULL_RTX;
538 rtx eh_list = NULL_RTX;
539 rtx label_value_list = NULL_RTX;
540 rtx head = NULL_RTX;
541 rtx end = NULL_RTX;
543 /* We process the instructions in a slightly different way than we did
544 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
545 closed out the previous block, so that it gets attached at the proper
546 place. Since this form should be equivalent to the previous,
547 count_basic_blocks continues to use the old form as a check. */
549 for (insn = f; insn; insn = next)
551 enum rtx_code code = GET_CODE (insn);
553 next = NEXT_INSN (insn);
555 if (code == CALL_INSN)
557 /* Record whether this call created an edge. */
558 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
559 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
560 call_has_abnormal_edge = 0;
562 /* If there is an EH region or rethrow, we have an edge. */
563 if ((eh_list && region > 0)
564 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
565 call_has_abnormal_edge = 1;
566 else
568 /* If there is a nonlocal goto label and the specified
569 region number isn't -1, we have an edge. (0 means
570 no throw, but might have a nonlocal goto). */
571 if (nonlocal_goto_handler_labels && region >= 0)
572 call_has_abnormal_edge = 1;
576 switch (code)
578 case NOTE:
580 int kind = NOTE_LINE_NUMBER (insn);
582 /* Keep a LIFO list of the currently active exception notes. */
583 if (kind == NOTE_INSN_EH_REGION_BEG)
584 eh_list = alloc_INSN_LIST (insn, eh_list);
585 else if (kind == NOTE_INSN_EH_REGION_END)
587 rtx t = eh_list;
588 eh_list = XEXP (eh_list, 1);
589 free_INSN_LIST_node (t);
592 /* Look for basic block notes with which to keep the
593 basic_block_info pointers stable. Unthread the note now;
594 we'll put it back at the right place in create_basic_block.
595 Or not at all if we've already found a note in this block. */
596 else if (kind == NOTE_INSN_BASIC_BLOCK)
598 if (bb_note == NULL_RTX)
599 bb_note = insn;
600 next = flow_delete_insn (insn);
603 break;
606 case CODE_LABEL:
607 /* A basic block starts at a label. If we've closed one off due
608 to a barrier or some such, no need to do it again. */
609 if (head != NULL_RTX)
611 /* While we now have edge lists with which other portions of
612 the compiler might determine a call ending a basic block
613 does not imply an abnormal edge, it will be a bit before
614 everything can be updated. So continue to emit a noop at
615 the end of such a block. */
616 if (GET_CODE (end) == CALL_INSN
617 && ! SIBLING_CALL_P (end))
619 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
620 end = emit_insn_after (nop, end);
623 create_basic_block (i++, head, end, bb_note);
624 bb_note = NULL_RTX;
626 head = end = insn;
627 break;
629 case JUMP_INSN:
630 /* A basic block ends at a jump. */
631 if (head == NULL_RTX)
632 head = insn;
633 else
635 /* ??? Make a special check for table jumps. The way this
636 happens is truly and amazingly gross. We are about to
637 create a basic block that contains just a code label and
638 an addr*vec jump insn. Worse, an addr_diff_vec creates
639 its own natural loop.
641 Prevent this bit of brain damage, pasting things together
642 correctly in make_edges.
644 The correct solution involves emitting the table directly
645 on the tablejump instruction as a note, or JUMP_LABEL. */
647 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
648 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
650 head = end = NULL;
651 n_basic_blocks--;
652 break;
655 end = insn;
656 goto new_bb_inclusive;
658 case BARRIER:
659 /* A basic block ends at a barrier. It may be that an unconditional
660 jump already closed the basic block -- no need to do it again. */
661 if (head == NULL_RTX)
662 break;
664 /* While we now have edge lists with which other portions of the
665 compiler might determine a call ending a basic block does not
666 imply an abnormal edge, it will be a bit before everything can
667 be updated. So continue to emit a noop at the end of such a
668 block. */
669 if (GET_CODE (end) == CALL_INSN
670 && ! SIBLING_CALL_P (end))
672 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
673 end = emit_insn_after (nop, end);
675 goto new_bb_exclusive;
677 case CALL_INSN:
678 /* A basic block ends at a call that can either throw or
679 do a non-local goto. */
680 if (call_has_abnormal_edge)
682 new_bb_inclusive:
683 if (head == NULL_RTX)
684 head = insn;
685 end = insn;
687 new_bb_exclusive:
688 create_basic_block (i++, head, end, bb_note);
689 head = end = NULL_RTX;
690 bb_note = NULL_RTX;
691 break;
693 /* FALLTHRU */
695 default:
696 if (GET_RTX_CLASS (code) == 'i')
698 if (head == NULL_RTX)
699 head = insn;
700 end = insn;
702 break;
705 if (GET_RTX_CLASS (code) == 'i')
707 rtx note;
709 /* Make a list of all labels referred to other than by jumps
710 (which just don't have the REG_LABEL notes).
712 Make a special exception for labels followed by an ADDR*VEC,
713 as this would be a part of the tablejump setup code.
715 Make a special exception for the eh_return_stub_label, which
716 we know isn't part of any otherwise visible control flow. */
718 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
719 if (REG_NOTE_KIND (note) == REG_LABEL)
721 rtx lab = XEXP (note, 0), next;
723 if (lab == eh_return_stub_label)
725 else if ((next = next_nonnote_insn (lab)) != NULL
726 && GET_CODE (next) == JUMP_INSN
727 && (GET_CODE (PATTERN (next)) == ADDR_VEC
728 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
730 else
731 label_value_list
732 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
737 if (head != NULL_RTX)
738 create_basic_block (i++, head, end, bb_note);
740 if (i != n_basic_blocks)
741 abort ();
743 return label_value_list;
746 /* Tidy the CFG by deleting unreachable code and whatnot. */
748 void
749 cleanup_cfg (f)
750 rtx f;
752 delete_unreachable_blocks ();
753 move_stray_eh_region_notes ();
754 record_active_eh_regions (f);
755 try_merge_blocks ();
756 mark_critical_edges ();
758 /* Kill the data we won't maintain. */
759 label_value_list = NULL_RTX;
762 /* Create a new basic block consisting of the instructions between
763 HEAD and END inclusive. Reuses the note and basic block struct
764 in BB_NOTE, if any. */
766 void
767 create_basic_block (index, head, end, bb_note)
768 int index;
769 rtx head, end, bb_note;
771 basic_block bb;
773 if (bb_note
774 && ! RTX_INTEGRATED_P (bb_note)
775 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
776 && bb->aux == NULL)
778 /* If we found an existing note, thread it back onto the chain. */
780 if (GET_CODE (head) == CODE_LABEL)
781 add_insn_after (bb_note, head);
782 else
784 add_insn_before (bb_note, head);
785 head = bb_note;
788 else
790 /* Otherwise we must create a note and a basic block structure.
791 Since we allow basic block structs in rtl, give the struct
792 the same lifetime by allocating it off the function obstack
793 rather than using malloc. */
795 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
796 memset (bb, 0, sizeof (*bb));
798 if (GET_CODE (head) == CODE_LABEL)
799 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
800 else
802 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
803 head = bb_note;
805 NOTE_BASIC_BLOCK (bb_note) = bb;
808 /* Always include the bb note in the block. */
809 if (NEXT_INSN (end) == bb_note)
810 end = bb_note;
812 bb->head = head;
813 bb->end = end;
814 bb->index = index;
815 BASIC_BLOCK (index) = bb;
817 /* Tag the block so that we know it has been used when considering
818 other basic block notes. */
819 bb->aux = bb;
822 /* Records the basic block struct in BB_FOR_INSN, for every instruction
823 indexed by INSN_UID. MAX is the size of the array. */
825 void
826 compute_bb_for_insn (max)
827 int max;
829 int i;
831 if (basic_block_for_insn)
832 VARRAY_FREE (basic_block_for_insn);
833 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
835 for (i = 0; i < n_basic_blocks; ++i)
837 basic_block bb = BASIC_BLOCK (i);
838 rtx insn, end;
840 end = bb->end;
841 insn = bb->head;
842 while (1)
844 int uid = INSN_UID (insn);
845 if (uid < max)
846 VARRAY_BB (basic_block_for_insn, uid) = bb;
847 if (insn == end)
848 break;
849 insn = NEXT_INSN (insn);
854 /* Free the memory associated with the edge structures. */
856 static void
857 clear_edges ()
859 int i;
860 edge n, e;
862 for (i = 0; i < n_basic_blocks; ++i)
864 basic_block bb = BASIC_BLOCK (i);
866 for (e = bb->succ; e ; e = n)
868 n = e->succ_next;
869 free (e);
872 bb->succ = 0;
873 bb->pred = 0;
876 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
878 n = e->succ_next;
879 free (e);
882 ENTRY_BLOCK_PTR->succ = 0;
883 EXIT_BLOCK_PTR->pred = 0;
885 n_edges = 0;
888 /* Identify the edges between basic blocks.
890 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
891 that are otherwise unreachable may be reachable with a non-local goto.
893 BB_EH_END is an array indexed by basic block number in which we record
894 the list of exception regions active at the end of the basic block. */
896 static void
897 make_edges (label_value_list)
898 rtx label_value_list;
900 int i;
901 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
902 sbitmap *edge_cache = NULL;
904 /* Assume no computed jump; revise as we create edges. */
905 current_function_has_computed_jump = 0;
907 /* Heavy use of computed goto in machine-generated code can lead to
908 nearly fully-connected CFGs. In that case we spend a significant
909 amount of time searching the edge lists for duplicates. */
910 if (forced_labels || label_value_list)
912 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
913 sbitmap_vector_zero (edge_cache, n_basic_blocks);
916 /* By nature of the way these get numbered, block 0 is always the entry. */
917 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
919 for (i = 0; i < n_basic_blocks; ++i)
921 basic_block bb = BASIC_BLOCK (i);
922 rtx insn, x;
923 enum rtx_code code;
924 int force_fallthru = 0;
926 /* Examine the last instruction of the block, and discover the
927 ways we can leave the block. */
929 insn = bb->end;
930 code = GET_CODE (insn);
932 /* A branch. */
933 if (code == JUMP_INSN)
935 rtx tmp;
937 /* ??? Recognize a tablejump and do the right thing. */
938 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
939 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
940 && GET_CODE (tmp) == JUMP_INSN
941 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
942 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
944 rtvec vec;
945 int j;
947 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
948 vec = XVEC (PATTERN (tmp), 0);
949 else
950 vec = XVEC (PATTERN (tmp), 1);
952 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
953 make_label_edge (edge_cache, bb,
954 XEXP (RTVEC_ELT (vec, j), 0), 0);
956 /* Some targets (eg, ARM) emit a conditional jump that also
957 contains the out-of-range target. Scan for these and
958 add an edge if necessary. */
959 if ((tmp = single_set (insn)) != NULL
960 && SET_DEST (tmp) == pc_rtx
961 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
962 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
963 make_label_edge (edge_cache, bb,
964 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
966 #ifdef CASE_DROPS_THROUGH
967 /* Silly VAXen. The ADDR_VEC is going to be in the way of
968 us naturally detecting fallthru into the next block. */
969 force_fallthru = 1;
970 #endif
973 /* If this is a computed jump, then mark it as reaching
974 everything on the label_value_list and forced_labels list. */
975 else if (computed_jump_p (insn))
977 current_function_has_computed_jump = 1;
979 for (x = label_value_list; x; x = XEXP (x, 1))
980 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
982 for (x = forced_labels; x; x = XEXP (x, 1))
983 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
986 /* Returns create an exit out. */
987 else if (returnjump_p (insn))
988 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
990 /* Otherwise, we have a plain conditional or unconditional jump. */
991 else
993 if (! JUMP_LABEL (insn))
994 abort ();
995 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
999 /* If this is a sibling call insn, then this is in effect a
1000 combined call and return, and so we need an edge to the
1001 exit block. No need to worry about EH edges, since we
1002 wouldn't have created the sibling call in the first place. */
1004 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1005 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1006 else
1008 /* If this is a CALL_INSN, then mark it as reaching the active EH
1009 handler for this CALL_INSN. If we're handling asynchronous
1010 exceptions then any insn can reach any of the active handlers.
1012 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1014 if (code == CALL_INSN || asynchronous_exceptions)
1016 /* Add any appropriate EH edges. We do this unconditionally
1017 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1018 on the call, and this needn't be within an EH region. */
1019 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1021 /* If we have asynchronous exceptions, do the same for *all*
1022 exception regions active in the block. */
1023 if (asynchronous_exceptions
1024 && bb->eh_beg != bb->eh_end)
1026 if (bb->eh_beg >= 0)
1027 make_eh_edge (edge_cache, eh_nest_info, bb,
1028 NULL_RTX, bb->eh_beg);
1030 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1031 if (GET_CODE (x) == NOTE
1032 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1033 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1035 int region = NOTE_EH_HANDLER (x);
1036 make_eh_edge (edge_cache, eh_nest_info, bb,
1037 NULL_RTX, region);
1041 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1043 /* ??? This could be made smarter: in some cases it's possible
1044 to tell that certain calls will not do a nonlocal goto.
1046 For example, if the nested functions that do the nonlocal
1047 gotos do not have their addresses taken, then only calls to
1048 those functions or to other nested functions that use them
1049 could possibly do nonlocal gotos. */
1050 /* We do know that a REG_EH_REGION note with a value less
1051 than 0 is guaranteed not to perform a non-local goto. */
1052 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1053 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1054 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1055 make_label_edge (edge_cache, bb, XEXP (x, 0),
1056 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1060 /* We know something about the structure of the function __throw in
1061 libgcc2.c. It is the only function that ever contains eh_stub
1062 labels. It modifies its return address so that the last block
1063 returns to one of the eh_stub labels within it. So we have to
1064 make additional edges in the flow graph. */
1065 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1066 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1068 /* Find out if we can drop through to the next block. */
1069 insn = next_nonnote_insn (insn);
1070 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1071 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1072 else if (i + 1 < n_basic_blocks)
1074 rtx tmp = BLOCK_HEAD (i + 1);
1075 if (GET_CODE (tmp) == NOTE)
1076 tmp = next_nonnote_insn (tmp);
1077 if (force_fallthru || insn == tmp)
1078 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1082 free_eh_nesting_info (eh_nest_info);
1083 if (edge_cache)
1084 sbitmap_vector_free (edge_cache);
1087 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1088 about the edge that is accumulated between calls. */
1090 void
1091 make_edge (edge_cache, src, dst, flags)
1092 sbitmap *edge_cache;
1093 basic_block src, dst;
1094 int flags;
1096 int use_edge_cache;
1097 edge e;
1099 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1100 many edges to them, and we didn't allocate memory for it. */
1101 use_edge_cache = (edge_cache
1102 && src != ENTRY_BLOCK_PTR
1103 && dst != EXIT_BLOCK_PTR);
1105 /* Make sure we don't add duplicate edges. */
1106 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1107 for (e = src->succ; e ; e = e->succ_next)
1108 if (e->dest == dst)
1110 e->flags |= flags;
1111 return;
1114 e = (edge) xcalloc (1, sizeof (*e));
1115 n_edges++;
1117 e->succ_next = src->succ;
1118 e->pred_next = dst->pred;
1119 e->src = src;
1120 e->dest = dst;
1121 e->flags = flags;
1123 src->succ = e;
1124 dst->pred = e;
1126 if (use_edge_cache)
1127 SET_BIT (edge_cache[src->index], dst->index);
1130 /* Create an edge from a basic block to a label. */
1132 static void
1133 make_label_edge (edge_cache, src, label, flags)
1134 sbitmap *edge_cache;
1135 basic_block src;
1136 rtx label;
1137 int flags;
1139 if (GET_CODE (label) != CODE_LABEL)
1140 abort ();
1142 /* If the label was never emitted, this insn is junk, but avoid a
1143 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1144 as a result of a syntax error and a diagnostic has already been
1145 printed. */
1147 if (INSN_UID (label) == 0)
1148 return;
1150 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1153 /* Create the edges generated by INSN in REGION. */
1155 static void
1156 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1157 sbitmap *edge_cache;
1158 eh_nesting_info *eh_nest_info;
1159 basic_block src;
1160 rtx insn;
1161 int region;
1163 handler_info **handler_list;
1164 int num, is_call;
1166 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1167 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1168 while (--num >= 0)
1170 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1171 EDGE_ABNORMAL | EDGE_EH | is_call);
1175 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1176 dangerous if we intend to move basic blocks around. Move such notes
1177 into the following block. */
1179 static void
1180 move_stray_eh_region_notes ()
1182 int i;
1183 basic_block b1, b2;
1185 if (n_basic_blocks < 2)
1186 return;
1188 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1189 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1191 rtx insn, next, list = NULL_RTX;
1193 b1 = BASIC_BLOCK (i);
1194 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1196 next = NEXT_INSN (insn);
1197 if (GET_CODE (insn) == NOTE
1198 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1199 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1201 /* Unlink from the insn chain. */
1202 NEXT_INSN (PREV_INSN (insn)) = next;
1203 PREV_INSN (next) = PREV_INSN (insn);
1205 /* Queue it. */
1206 NEXT_INSN (insn) = list;
1207 list = insn;
1211 if (list == NULL_RTX)
1212 continue;
1214 /* Find where to insert these things. */
1215 insn = b2->head;
1216 if (GET_CODE (insn) == CODE_LABEL)
1217 insn = NEXT_INSN (insn);
1219 while (list)
1221 next = NEXT_INSN (list);
1222 add_insn_after (list, insn);
1223 list = next;
1228 /* Recompute eh_beg/eh_end for each basic block. */
1230 static void
1231 record_active_eh_regions (f)
1232 rtx f;
1234 rtx insn, eh_list = NULL_RTX;
1235 int i = 0;
1236 basic_block bb = BASIC_BLOCK (0);
1238 for (insn = f; insn ; insn = NEXT_INSN (insn))
1240 if (bb->head == insn)
1241 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1243 if (GET_CODE (insn) == NOTE)
1245 int kind = NOTE_LINE_NUMBER (insn);
1246 if (kind == NOTE_INSN_EH_REGION_BEG)
1247 eh_list = alloc_INSN_LIST (insn, eh_list);
1248 else if (kind == NOTE_INSN_EH_REGION_END)
1250 rtx t = XEXP (eh_list, 1);
1251 free_INSN_LIST_node (eh_list);
1252 eh_list = t;
1256 if (bb->end == insn)
1258 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1259 i += 1;
1260 if (i == n_basic_blocks)
1261 break;
1262 bb = BASIC_BLOCK (i);
1267 /* Identify critical edges and set the bits appropriately. */
1269 static void
1270 mark_critical_edges ()
1272 int i, n = n_basic_blocks;
1273 basic_block bb;
1275 /* We begin with the entry block. This is not terribly important now,
1276 but could be if a front end (Fortran) implemented alternate entry
1277 points. */
1278 bb = ENTRY_BLOCK_PTR;
1279 i = -1;
1281 while (1)
1283 edge e;
1285 /* (1) Critical edges must have a source with multiple successors. */
1286 if (bb->succ && bb->succ->succ_next)
1288 for (e = bb->succ; e ; e = e->succ_next)
1290 /* (2) Critical edges must have a destination with multiple
1291 predecessors. Note that we know there is at least one
1292 predecessor -- the edge we followed to get here. */
1293 if (e->dest->pred->pred_next)
1294 e->flags |= EDGE_CRITICAL;
1295 else
1296 e->flags &= ~EDGE_CRITICAL;
1299 else
1301 for (e = bb->succ; e ; e = e->succ_next)
1302 e->flags &= ~EDGE_CRITICAL;
1305 if (++i >= n)
1306 break;
1307 bb = BASIC_BLOCK (i);
1311 /* Split a (typically critical) edge. Return the new block.
1312 Abort on abnormal edges.
1314 ??? The code generally expects to be called on critical edges.
1315 The case of a block ending in an unconditional jump to a
1316 block with multiple predecessors is not handled optimally. */
1318 basic_block
1319 split_edge (edge_in)
1320 edge edge_in;
1322 basic_block old_pred, bb, old_succ;
1323 edge edge_out;
1324 rtx bb_note;
1325 int i, j;
1327 /* Abnormal edges cannot be split. */
1328 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1329 abort ();
1331 old_pred = edge_in->src;
1332 old_succ = edge_in->dest;
1334 /* Remove the existing edge from the destination's pred list. */
1336 edge *pp;
1337 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1338 continue;
1339 *pp = edge_in->pred_next;
1340 edge_in->pred_next = NULL;
1343 /* Create the new structures. */
1344 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1345 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1346 n_edges++;
1348 memset (bb, 0, sizeof (*bb));
1349 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1350 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1352 /* ??? This info is likely going to be out of date very soon. */
1353 if (old_succ->global_live_at_start)
1355 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1356 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1358 else
1360 CLEAR_REG_SET (bb->global_live_at_start);
1361 CLEAR_REG_SET (bb->global_live_at_end);
1364 /* Wire them up. */
1365 bb->pred = edge_in;
1366 bb->succ = edge_out;
1368 edge_in->dest = bb;
1369 edge_in->flags &= ~EDGE_CRITICAL;
1371 edge_out->pred_next = old_succ->pred;
1372 edge_out->succ_next = NULL;
1373 edge_out->src = bb;
1374 edge_out->dest = old_succ;
1375 edge_out->flags = EDGE_FALLTHRU;
1376 edge_out->probability = REG_BR_PROB_BASE;
1378 old_succ->pred = edge_out;
1380 /* Tricky case -- if there existed a fallthru into the successor
1381 (and we're not it) we must add a new unconditional jump around
1382 the new block we're actually interested in.
1384 Further, if that edge is critical, this means a second new basic
1385 block must be created to hold it. In order to simplify correct
1386 insn placement, do this before we touch the existing basic block
1387 ordering for the block we were really wanting. */
1388 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1390 edge e;
1391 for (e = edge_out->pred_next; e ; e = e->pred_next)
1392 if (e->flags & EDGE_FALLTHRU)
1393 break;
1395 if (e)
1397 basic_block jump_block;
1398 rtx pos;
1400 if ((e->flags & EDGE_CRITICAL) == 0
1401 && e->src != ENTRY_BLOCK_PTR)
1403 /* Non critical -- we can simply add a jump to the end
1404 of the existing predecessor. */
1405 jump_block = e->src;
1407 else
1409 /* We need a new block to hold the jump. The simplest
1410 way to do the bulk of the work here is to recursively
1411 call ourselves. */
1412 jump_block = split_edge (e);
1413 e = jump_block->succ;
1416 /* Now add the jump insn ... */
1417 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1418 jump_block->end);
1419 jump_block->end = pos;
1420 if (basic_block_for_insn)
1421 set_block_for_insn (pos, jump_block);
1422 emit_barrier_after (pos);
1424 /* ... let jump know that label is in use, ... */
1425 JUMP_LABEL (pos) = old_succ->head;
1426 ++LABEL_NUSES (old_succ->head);
1428 /* ... and clear fallthru on the outgoing edge. */
1429 e->flags &= ~EDGE_FALLTHRU;
1431 /* Continue splitting the interesting edge. */
1435 /* Place the new block just in front of the successor. */
1436 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1437 if (old_succ == EXIT_BLOCK_PTR)
1438 j = n_basic_blocks - 1;
1439 else
1440 j = old_succ->index;
1441 for (i = n_basic_blocks - 1; i > j; --i)
1443 basic_block tmp = BASIC_BLOCK (i - 1);
1444 BASIC_BLOCK (i) = tmp;
1445 tmp->index = i;
1447 BASIC_BLOCK (i) = bb;
1448 bb->index = i;
1450 /* Create the basic block note.
1452 Where we place the note can have a noticable impact on the generated
1453 code. Consider this cfg:
1460 +->1-->2--->E
1462 +--+
1464 If we need to insert an insn on the edge from block 0 to block 1,
1465 we want to ensure the instructions we insert are outside of any
1466 loop notes that physically sit between block 0 and block 1. Otherwise
1467 we confuse the loop optimizer into thinking the loop is a phony. */
1468 if (old_succ != EXIT_BLOCK_PTR
1469 && PREV_INSN (old_succ->head)
1470 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1471 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1472 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1473 PREV_INSN (old_succ->head));
1474 else if (old_succ != EXIT_BLOCK_PTR)
1475 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1476 else
1477 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1478 NOTE_BASIC_BLOCK (bb_note) = bb;
1479 bb->head = bb->end = bb_note;
1481 /* Not quite simple -- for non-fallthru edges, we must adjust the
1482 predecessor's jump instruction to target our new block. */
1483 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1485 rtx tmp, insn = old_pred->end;
1486 rtx old_label = old_succ->head;
1487 rtx new_label = gen_label_rtx ();
1489 if (GET_CODE (insn) != JUMP_INSN)
1490 abort ();
1492 /* ??? Recognize a tablejump and adjust all matching cases. */
1493 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1494 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1495 && GET_CODE (tmp) == JUMP_INSN
1496 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1497 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1499 rtvec vec;
1500 int j;
1502 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1503 vec = XVEC (PATTERN (tmp), 0);
1504 else
1505 vec = XVEC (PATTERN (tmp), 1);
1507 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1508 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1510 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1511 --LABEL_NUSES (old_label);
1512 ++LABEL_NUSES (new_label);
1515 /* Handle casesi dispatch insns */
1516 if ((tmp = single_set (insn)) != NULL
1517 && SET_DEST (tmp) == pc_rtx
1518 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1519 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1520 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1522 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1523 new_label);
1524 --LABEL_NUSES (old_label);
1525 ++LABEL_NUSES (new_label);
1528 else
1530 /* This would have indicated an abnormal edge. */
1531 if (computed_jump_p (insn))
1532 abort ();
1534 /* A return instruction can't be redirected. */
1535 if (returnjump_p (insn))
1536 abort ();
1538 /* If the insn doesn't go where we think, we're confused. */
1539 if (JUMP_LABEL (insn) != old_label)
1540 abort ();
1542 redirect_jump (insn, new_label);
1545 emit_label_before (new_label, bb_note);
1546 bb->head = new_label;
1549 return bb;
1552 /* Queue instructions for insertion on an edge between two basic blocks.
1553 The new instructions and basic blocks (if any) will not appear in the
1554 CFG until commit_edge_insertions is called. */
1556 void
1557 insert_insn_on_edge (pattern, e)
1558 rtx pattern;
1559 edge e;
1561 /* We cannot insert instructions on an abnormal critical edge.
1562 It will be easier to find the culprit if we die now. */
1563 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1564 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1565 abort ();
1567 if (e->insns == NULL_RTX)
1568 start_sequence ();
1569 else
1570 push_to_sequence (e->insns);
1572 emit_insn (pattern);
1574 e->insns = get_insns ();
1575 end_sequence();
1578 /* Update the CFG for the instructions queued on edge E. */
1580 static void
1581 commit_one_edge_insertion (e)
1582 edge e;
1584 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1585 basic_block bb;
1587 /* Pull the insns off the edge now since the edge might go away. */
1588 insns = e->insns;
1589 e->insns = NULL_RTX;
1591 /* Figure out where to put these things. If the destination has
1592 one predecessor, insert there. Except for the exit block. */
1593 if (e->dest->pred->pred_next == NULL
1594 && e->dest != EXIT_BLOCK_PTR)
1596 bb = e->dest;
1598 /* Get the location correct wrt a code label, and "nice" wrt
1599 a basic block note, and before everything else. */
1600 tmp = bb->head;
1601 if (GET_CODE (tmp) == CODE_LABEL)
1602 tmp = NEXT_INSN (tmp);
1603 if (GET_CODE (tmp) == NOTE
1604 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1605 tmp = NEXT_INSN (tmp);
1606 if (tmp == bb->head)
1607 before = tmp;
1608 else
1609 after = PREV_INSN (tmp);
1612 /* If the source has one successor and the edge is not abnormal,
1613 insert there. Except for the entry block. */
1614 else if ((e->flags & EDGE_ABNORMAL) == 0
1615 && e->src->succ->succ_next == NULL
1616 && e->src != ENTRY_BLOCK_PTR)
1618 bb = e->src;
1619 /* It is possible to have a non-simple jump here. Consider a target
1620 where some forms of unconditional jumps clobber a register. This
1621 happens on the fr30 for example.
1623 We know this block has a single successor, so we can just emit
1624 the queued insns before the jump. */
1625 if (GET_CODE (bb->end) == JUMP_INSN)
1627 before = bb->end;
1629 else
1631 /* We'd better be fallthru, or we've lost track of what's what. */
1632 if ((e->flags & EDGE_FALLTHRU) == 0)
1633 abort ();
1635 after = bb->end;
1639 /* Otherwise we must split the edge. */
1640 else
1642 bb = split_edge (e);
1643 after = bb->end;
1646 /* Now that we've found the spot, do the insertion. */
1648 /* Set the new block number for these insns, if structure is allocated. */
1649 if (basic_block_for_insn)
1651 rtx i;
1652 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1653 set_block_for_insn (i, bb);
1656 if (before)
1658 emit_insns_before (insns, before);
1659 if (before == bb->head)
1660 bb->head = insns;
1662 else
1664 rtx last = emit_insns_after (insns, after);
1665 if (after == bb->end)
1667 bb->end = last;
1669 if (GET_CODE (last) == JUMP_INSN)
1671 if (returnjump_p (last))
1673 /* ??? Remove all outgoing edges from BB and add one
1674 for EXIT. This is not currently a problem because
1675 this only happens for the (single) epilogue, which
1676 already has a fallthru edge to EXIT. */
1678 e = bb->succ;
1679 if (e->dest != EXIT_BLOCK_PTR
1680 || e->succ_next != NULL
1681 || (e->flags & EDGE_FALLTHRU) == 0)
1682 abort ();
1683 e->flags &= ~EDGE_FALLTHRU;
1685 emit_barrier_after (last);
1687 else
1688 abort ();
1694 /* Update the CFG for all queued instructions. */
1696 void
1697 commit_edge_insertions ()
1699 int i;
1700 basic_block bb;
1702 #ifdef ENABLE_CHECKING
1703 verify_flow_info ();
1704 #endif
1706 i = -1;
1707 bb = ENTRY_BLOCK_PTR;
1708 while (1)
1710 edge e, next;
1712 for (e = bb->succ; e ; e = next)
1714 next = e->succ_next;
1715 if (e->insns)
1716 commit_one_edge_insertion (e);
1719 if (++i >= n_basic_blocks)
1720 break;
1721 bb = BASIC_BLOCK (i);
1725 /* Delete all unreachable basic blocks. */
1727 static void
1728 delete_unreachable_blocks ()
1730 basic_block *worklist, *tos;
1731 int deleted_handler;
1732 edge e;
1733 int i, n;
1735 n = n_basic_blocks;
1736 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1738 /* Use basic_block->aux as a marker. Clear them all. */
1740 for (i = 0; i < n; ++i)
1741 BASIC_BLOCK (i)->aux = NULL;
1743 /* Add our starting points to the worklist. Almost always there will
1744 be only one. It isn't inconcievable that we might one day directly
1745 support Fortran alternate entry points. */
1747 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1749 *tos++ = e->dest;
1751 /* Mark the block with a handy non-null value. */
1752 e->dest->aux = e;
1755 /* Iterate: find everything reachable from what we've already seen. */
1757 while (tos != worklist)
1759 basic_block b = *--tos;
1761 for (e = b->succ; e ; e = e->succ_next)
1762 if (!e->dest->aux)
1764 *tos++ = e->dest;
1765 e->dest->aux = e;
1769 /* Delete all unreachable basic blocks. Count down so that we don't
1770 interfere with the block renumbering that happens in delete_block. */
1772 deleted_handler = 0;
1774 for (i = n - 1; i >= 0; --i)
1776 basic_block b = BASIC_BLOCK (i);
1778 if (b->aux != NULL)
1779 /* This block was found. Tidy up the mark. */
1780 b->aux = NULL;
1781 else
1782 deleted_handler |= delete_block (b);
1785 tidy_fallthru_edges ();
1787 /* If we deleted an exception handler, we may have EH region begin/end
1788 blocks to remove as well. */
1789 if (deleted_handler)
1790 delete_eh_regions ();
1792 free (worklist);
1795 /* Find EH regions for which there is no longer a handler, and delete them. */
1797 static void
1798 delete_eh_regions ()
1800 rtx insn;
1802 update_rethrow_references ();
1804 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1805 if (GET_CODE (insn) == NOTE)
1807 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1808 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1810 int num = NOTE_EH_HANDLER (insn);
1811 /* A NULL handler indicates a region is no longer needed,
1812 as long as its rethrow label isn't used. */
1813 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1815 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1816 NOTE_SOURCE_FILE (insn) = 0;
1822 /* Return true if NOTE is not one of the ones that must be kept paired,
1823 so that we may simply delete them. */
1825 static int
1826 can_delete_note_p (note)
1827 rtx note;
1829 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1830 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1833 /* Unlink a chain of insns between START and FINISH, leaving notes
1834 that must be paired. */
1836 void
1837 flow_delete_insn_chain (start, finish)
1838 rtx start, finish;
1840 /* Unchain the insns one by one. It would be quicker to delete all
1841 of these with a single unchaining, rather than one at a time, but
1842 we need to keep the NOTE's. */
1844 rtx next;
1846 while (1)
1848 next = NEXT_INSN (start);
1849 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1851 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1853 else
1854 next = flow_delete_insn (start);
1856 if (start == finish)
1857 break;
1858 start = next;
1862 /* Delete the insns in a (non-live) block. We physically delete every
1863 non-deleted-note insn, and update the flow graph appropriately.
1865 Return nonzero if we deleted an exception handler. */
1867 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1868 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1870 static int
1871 delete_block (b)
1872 basic_block b;
1874 int deleted_handler = 0;
1875 rtx insn, end, tmp;
1877 /* If the head of this block is a CODE_LABEL, then it might be the
1878 label for an exception handler which can't be reached.
1880 We need to remove the label from the exception_handler_label list
1881 and remove the associated NOTE_INSN_EH_REGION_BEG and
1882 NOTE_INSN_EH_REGION_END notes. */
1884 insn = b->head;
1886 never_reached_warning (insn);
1888 if (GET_CODE (insn) == CODE_LABEL)
1890 rtx x, *prev = &exception_handler_labels;
1892 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1894 if (XEXP (x, 0) == insn)
1896 /* Found a match, splice this label out of the EH label list. */
1897 *prev = XEXP (x, 1);
1898 XEXP (x, 1) = NULL_RTX;
1899 XEXP (x, 0) = NULL_RTX;
1901 /* Remove the handler from all regions */
1902 remove_handler (insn);
1903 deleted_handler = 1;
1904 break;
1906 prev = &XEXP (x, 1);
1909 /* This label may be referenced by code solely for its value, or
1910 referenced by static data, or something. We have determined
1911 that it is not reachable, but cannot delete the label itself.
1912 Save code space and continue to delete the balance of the block,
1913 along with properly updating the cfg. */
1914 if (!can_delete_label_p (insn))
1916 /* If we've only got one of these, skip the whole deleting
1917 insns thing. */
1918 if (insn == b->end)
1919 goto no_delete_insns;
1920 insn = NEXT_INSN (insn);
1924 /* Include any jump table following the basic block. */
1925 end = b->end;
1926 if (GET_CODE (end) == JUMP_INSN
1927 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1928 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1929 && GET_CODE (tmp) == JUMP_INSN
1930 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1931 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1932 end = tmp;
1934 /* Include any barrier that may follow the basic block. */
1935 tmp = next_nonnote_insn (end);
1936 if (tmp && GET_CODE (tmp) == BARRIER)
1937 end = tmp;
1939 /* Selectively delete the entire chain. */
1940 flow_delete_insn_chain (insn, end);
1942 no_delete_insns:
1944 /* Remove the edges into and out of this block. Note that there may
1945 indeed be edges in, if we are removing an unreachable loop. */
1947 edge e, next, *q;
1949 for (e = b->pred; e ; e = next)
1951 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1952 continue;
1953 *q = e->succ_next;
1954 next = e->pred_next;
1955 n_edges--;
1956 free (e);
1958 for (e = b->succ; e ; e = next)
1960 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1961 continue;
1962 *q = e->pred_next;
1963 next = e->succ_next;
1964 n_edges--;
1965 free (e);
1968 b->pred = NULL;
1969 b->succ = NULL;
1972 /* Remove the basic block from the array, and compact behind it. */
1973 expunge_block (b);
1975 return deleted_handler;
1978 /* Remove block B from the basic block array and compact behind it. */
1980 static void
1981 expunge_block (b)
1982 basic_block b;
1984 int i, n = n_basic_blocks;
1986 for (i = b->index; i + 1 < n; ++i)
1988 basic_block x = BASIC_BLOCK (i + 1);
1989 BASIC_BLOCK (i) = x;
1990 x->index = i;
1993 basic_block_info->num_elements--;
1994 n_basic_blocks--;
1997 /* Delete INSN by patching it out. Return the next insn. */
2000 flow_delete_insn (insn)
2001 rtx insn;
2003 rtx prev = PREV_INSN (insn);
2004 rtx next = NEXT_INSN (insn);
2005 rtx note;
2007 PREV_INSN (insn) = NULL_RTX;
2008 NEXT_INSN (insn) = NULL_RTX;
2010 if (prev)
2011 NEXT_INSN (prev) = next;
2012 if (next)
2013 PREV_INSN (next) = prev;
2014 else
2015 set_last_insn (prev);
2017 if (GET_CODE (insn) == CODE_LABEL)
2018 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2020 /* If deleting a jump, decrement the use count of the label. Deleting
2021 the label itself should happen in the normal course of block merging. */
2022 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2023 LABEL_NUSES (JUMP_LABEL (insn))--;
2025 /* Also if deleting an insn that references a label. */
2026 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2027 LABEL_NUSES (XEXP (note, 0))--;
2029 return next;
2032 /* True if a given label can be deleted. */
2034 static int
2035 can_delete_label_p (label)
2036 rtx label;
2038 rtx x;
2040 if (LABEL_PRESERVE_P (label))
2041 return 0;
2043 for (x = forced_labels; x ; x = XEXP (x, 1))
2044 if (label == XEXP (x, 0))
2045 return 0;
2046 for (x = label_value_list; x ; x = XEXP (x, 1))
2047 if (label == XEXP (x, 0))
2048 return 0;
2049 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2050 if (label == XEXP (x, 0))
2051 return 0;
2053 /* User declared labels must be preserved. */
2054 if (LABEL_NAME (label) != 0)
2055 return 0;
2057 return 1;
2060 /* Blocks A and B are to be merged into a single block A. The insns
2061 are already contiguous, hence `nomove'. */
2063 void
2064 merge_blocks_nomove (a, b)
2065 basic_block a, b;
2067 edge e;
2068 rtx b_head, b_end, a_end;
2069 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2070 int b_empty = 0;
2072 /* If there was a CODE_LABEL beginning B, delete it. */
2073 b_head = b->head;
2074 b_end = b->end;
2075 if (GET_CODE (b_head) == CODE_LABEL)
2077 /* Detect basic blocks with nothing but a label. This can happen
2078 in particular at the end of a function. */
2079 if (b_head == b_end)
2080 b_empty = 1;
2081 del_first = del_last = b_head;
2082 b_head = NEXT_INSN (b_head);
2085 /* Delete the basic block note. */
2086 if (GET_CODE (b_head) == NOTE
2087 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2089 if (b_head == b_end)
2090 b_empty = 1;
2091 if (! del_last)
2092 del_first = b_head;
2093 del_last = b_head;
2094 b_head = NEXT_INSN (b_head);
2097 /* If there was a jump out of A, delete it. */
2098 a_end = a->end;
2099 if (GET_CODE (a_end) == JUMP_INSN)
2101 rtx prev;
2103 prev = prev_nonnote_insn (a_end);
2104 if (!prev)
2105 prev = a->head;
2107 del_first = a_end;
2109 #ifdef HAVE_cc0
2110 /* If this was a conditional jump, we need to also delete
2111 the insn that set cc0. */
2112 if (prev && sets_cc0_p (prev))
2114 rtx tmp = prev;
2115 prev = prev_nonnote_insn (prev);
2116 if (!prev)
2117 prev = a->head;
2118 del_first = tmp;
2120 #endif
2122 a_end = prev;
2125 /* Delete everything marked above as well as crap that might be
2126 hanging out between the two blocks. */
2127 flow_delete_insn_chain (del_first, del_last);
2129 /* Normally there should only be one successor of A and that is B, but
2130 partway though the merge of blocks for conditional_execution we'll
2131 be merging a TEST block with THEN and ELSE successors. Free the
2132 whole lot of them and hope the caller knows what they're doing. */
2133 while (a->succ)
2134 remove_edge (a->succ);
2136 /* Adjust the edges out of B for the new owner. */
2137 for (e = b->succ; e ; e = e->succ_next)
2138 e->src = a;
2139 a->succ = b->succ;
2141 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2142 b->pred = b->succ = NULL;
2144 /* Reassociate the insns of B with A. */
2145 if (!b_empty)
2147 if (basic_block_for_insn)
2149 BLOCK_FOR_INSN (b_head) = a;
2150 while (b_head != b_end)
2152 b_head = NEXT_INSN (b_head);
2153 BLOCK_FOR_INSN (b_head) = a;
2156 a_end = b_end;
2158 a->end = a_end;
2160 expunge_block (b);
2163 /* Blocks A and B are to be merged into a single block. A has no incoming
2164 fallthru edge, so it can be moved before B without adding or modifying
2165 any jumps (aside from the jump from A to B). */
2167 static int
2168 merge_blocks_move_predecessor_nojumps (a, b)
2169 basic_block a, b;
2171 rtx start, end, barrier;
2172 int index;
2174 start = a->head;
2175 end = a->end;
2177 /* We want to delete the BARRIER after the end of the insns we are
2178 going to move. If we don't find a BARRIER, then do nothing. This
2179 can happen in some cases if we have labels we can not delete.
2181 Similarly, do nothing if we can not delete the label at the start
2182 of the target block. */
2183 barrier = next_nonnote_insn (end);
2184 if (GET_CODE (barrier) != BARRIER
2185 || (GET_CODE (b->head) == CODE_LABEL
2186 && ! can_delete_label_p (b->head)))
2187 return 0;
2188 else
2189 flow_delete_insn (barrier);
2191 /* Move block and loop notes out of the chain so that we do not
2192 disturb their order.
2194 ??? A better solution would be to squeeze out all the non-nested notes
2195 and adjust the block trees appropriately. Even better would be to have
2196 a tighter connection between block trees and rtl so that this is not
2197 necessary. */
2198 start = squeeze_notes (start, end);
2200 /* Scramble the insn chain. */
2201 if (end != PREV_INSN (b->head))
2202 reorder_insns (start, end, PREV_INSN (b->head));
2204 if (rtl_dump_file)
2206 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2207 a->index, b->index);
2210 /* Swap the records for the two blocks around. Although we are deleting B,
2211 A is now where B was and we want to compact the BB array from where
2212 A used to be. */
2213 BASIC_BLOCK(a->index) = b;
2214 BASIC_BLOCK(b->index) = a;
2215 index = a->index;
2216 a->index = b->index;
2217 b->index = index;
2219 /* Now blocks A and B are contiguous. Merge them. */
2220 merge_blocks_nomove (a, b);
2222 return 1;
2225 /* Blocks A and B are to be merged into a single block. B has no outgoing
2226 fallthru edge, so it can be moved after A without adding or modifying
2227 any jumps (aside from the jump from A to B). */
2229 static int
2230 merge_blocks_move_successor_nojumps (a, b)
2231 basic_block a, b;
2233 rtx start, end, barrier;
2235 start = b->head;
2236 end = b->end;
2238 /* We want to delete the BARRIER after the end of the insns we are
2239 going to move. If we don't find a BARRIER, then do nothing. This
2240 can happen in some cases if we have labels we can not delete.
2242 Similarly, do nothing if we can not delete the label at the start
2243 of the target block. */
2244 barrier = next_nonnote_insn (end);
2245 if (GET_CODE (barrier) != BARRIER
2246 || (GET_CODE (b->head) == CODE_LABEL
2247 && ! can_delete_label_p (b->head)))
2248 return 0;
2249 else
2250 flow_delete_insn (barrier);
2252 /* Move block and loop notes out of the chain so that we do not
2253 disturb their order.
2255 ??? A better solution would be to squeeze out all the non-nested notes
2256 and adjust the block trees appropriately. Even better would be to have
2257 a tighter connection between block trees and rtl so that this is not
2258 necessary. */
2259 start = squeeze_notes (start, end);
2261 /* Scramble the insn chain. */
2262 reorder_insns (start, end, a->end);
2264 /* Now blocks A and B are contiguous. Merge them. */
2265 merge_blocks_nomove (a, b);
2267 if (rtl_dump_file)
2269 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2270 b->index, a->index);
2273 return 1;
2276 /* Attempt to merge basic blocks that are potentially non-adjacent.
2277 Return true iff the attempt succeeded. */
2279 static int
2280 merge_blocks (e, b, c)
2281 edge e;
2282 basic_block b, c;
2284 /* If B has a fallthru edge to C, no need to move anything. */
2285 if (e->flags & EDGE_FALLTHRU)
2287 /* If a label still appears somewhere and we cannot delete the label,
2288 then we cannot merge the blocks. The edge was tidied already. */
2290 rtx insn, stop = NEXT_INSN (c->head);
2291 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2292 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2293 return 0;
2295 merge_blocks_nomove (b, c);
2297 if (rtl_dump_file)
2299 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2300 b->index, c->index);
2303 return 1;
2305 else
2307 edge tmp_edge;
2308 basic_block d;
2309 int c_has_outgoing_fallthru;
2310 int b_has_incoming_fallthru;
2312 /* We must make sure to not munge nesting of exception regions,
2313 lexical blocks, and loop notes.
2315 The first is taken care of by requiring that the active eh
2316 region at the end of one block always matches the active eh
2317 region at the beginning of the next block.
2319 The later two are taken care of by squeezing out all the notes. */
2321 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2322 executed and we may want to treat blocks which have two out
2323 edges, one normal, one abnormal as only having one edge for
2324 block merging purposes. */
2326 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2327 if (tmp_edge->flags & EDGE_FALLTHRU)
2328 break;
2329 c_has_outgoing_fallthru = (tmp_edge != NULL);
2331 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2332 if (tmp_edge->flags & EDGE_FALLTHRU)
2333 break;
2334 b_has_incoming_fallthru = (tmp_edge != NULL);
2336 /* If B does not have an incoming fallthru, and the exception regions
2337 match, then it can be moved immediately before C without introducing
2338 or modifying jumps.
2340 C can not be the first block, so we do not have to worry about
2341 accessing a non-existent block. */
2342 d = BASIC_BLOCK (c->index - 1);
2343 if (! b_has_incoming_fallthru
2344 && d->eh_end == b->eh_beg
2345 && b->eh_end == c->eh_beg)
2346 return merge_blocks_move_predecessor_nojumps (b, c);
2348 /* Otherwise, we're going to try to move C after B. Make sure the
2349 exception regions match.
2351 If B is the last basic block, then we must not try to access the
2352 block structure for block B + 1. Luckily in that case we do not
2353 need to worry about matching exception regions. */
2354 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2355 if (b->eh_end == c->eh_beg
2356 && (d == NULL || c->eh_end == d->eh_beg))
2358 /* If C does not have an outgoing fallthru, then it can be moved
2359 immediately after B without introducing or modifying jumps. */
2360 if (! c_has_outgoing_fallthru)
2361 return merge_blocks_move_successor_nojumps (b, c);
2363 /* Otherwise, we'll need to insert an extra jump, and possibly
2364 a new block to contain it. */
2365 /* ??? Not implemented yet. */
2368 return 0;
2372 /* Top level driver for merge_blocks. */
2374 static void
2375 try_merge_blocks ()
2377 int i;
2379 /* Attempt to merge blocks as made possible by edge removal. If a block
2380 has only one successor, and the successor has only one predecessor,
2381 they may be combined. */
2383 for (i = 0; i < n_basic_blocks; )
2385 basic_block c, b = BASIC_BLOCK (i);
2386 edge s;
2388 /* A loop because chains of blocks might be combineable. */
2389 while ((s = b->succ) != NULL
2390 && s->succ_next == NULL
2391 && (s->flags & EDGE_EH) == 0
2392 && (c = s->dest) != EXIT_BLOCK_PTR
2393 && c->pred->pred_next == NULL
2394 /* If the jump insn has side effects, we can't kill the edge. */
2395 && (GET_CODE (b->end) != JUMP_INSN
2396 || onlyjump_p (b->end))
2397 && merge_blocks (s, b, c))
2398 continue;
2400 /* Don't get confused by the index shift caused by deleting blocks. */
2401 i = b->index + 1;
2405 /* The given edge should potentially be a fallthru edge. If that is in
2406 fact true, delete the jump and barriers that are in the way. */
2408 void
2409 tidy_fallthru_edge (e, b, c)
2410 edge e;
2411 basic_block b, c;
2413 rtx q;
2415 /* ??? In a late-running flow pass, other folks may have deleted basic
2416 blocks by nopping out blocks, leaving multiple BARRIERs between here
2417 and the target label. They ought to be chastized and fixed.
2419 We can also wind up with a sequence of undeletable labels between
2420 one block and the next.
2422 So search through a sequence of barriers, labels, and notes for
2423 the head of block C and assert that we really do fall through. */
2425 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2426 return;
2428 /* Remove what will soon cease being the jump insn from the source block.
2429 If block B consisted only of this single jump, turn it into a deleted
2430 note. */
2431 q = b->end;
2432 if (GET_CODE (q) == JUMP_INSN)
2434 #ifdef HAVE_cc0
2435 /* If this was a conditional jump, we need to also delete
2436 the insn that set cc0. */
2437 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2438 q = PREV_INSN (q);
2439 #endif
2441 if (b->head == q)
2443 PUT_CODE (q, NOTE);
2444 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2445 NOTE_SOURCE_FILE (q) = 0;
2447 else
2448 b->end = q = PREV_INSN (q);
2451 /* Selectively unlink the sequence. */
2452 if (q != PREV_INSN (c->head))
2453 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2455 e->flags |= EDGE_FALLTHRU;
2458 /* Fix up edges that now fall through, or rather should now fall through
2459 but previously required a jump around now deleted blocks. Simplify
2460 the search by only examining blocks numerically adjacent, since this
2461 is how find_basic_blocks created them. */
2463 static void
2464 tidy_fallthru_edges ()
2466 int i;
2468 for (i = 1; i < n_basic_blocks; ++i)
2470 basic_block b = BASIC_BLOCK (i - 1);
2471 basic_block c = BASIC_BLOCK (i);
2472 edge s;
2474 /* We care about simple conditional or unconditional jumps with
2475 a single successor.
2477 If we had a conditional branch to the next instruction when
2478 find_basic_blocks was called, then there will only be one
2479 out edge for the block which ended with the conditional
2480 branch (since we do not create duplicate edges).
2482 Furthermore, the edge will be marked as a fallthru because we
2483 merge the flags for the duplicate edges. So we do not want to
2484 check that the edge is not a FALLTHRU edge. */
2485 if ((s = b->succ) != NULL
2486 && s->succ_next == NULL
2487 && s->dest == c
2488 /* If the jump insn has side effects, we can't tidy the edge. */
2489 && (GET_CODE (b->end) != JUMP_INSN
2490 || onlyjump_p (b->end)))
2491 tidy_fallthru_edge (s, b, c);
2495 /* Discover and record the loop depth at the head of each basic block. */
2497 void
2498 calculate_loop_depth (dump)
2499 FILE *dump;
2501 struct loops loops;
2503 /* The loop infrastructure does the real job for us. */
2504 flow_loops_find (&loops);
2506 if (dump)
2507 flow_loops_dump (&loops, dump, 0);
2509 flow_loops_free (&loops);
2512 /* Perform data flow analysis.
2513 F is the first insn of the function and NREGS the number of register numbers
2514 in use. */
2516 void
2517 life_analysis (f, nregs, file, remove_dead_code)
2518 rtx f;
2519 int nregs;
2520 FILE *file;
2521 int remove_dead_code;
2523 #ifdef ELIMINABLE_REGS
2524 register int i;
2525 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2526 #endif
2527 int flags;
2528 sbitmap all_blocks;
2530 /* Dead code elimination changes basic block structure and therefore
2531 breaks the SSA phi representation. Particularly, a phi node
2532 can have an alternative value for each incoming block, referenced
2533 by the block number. Removing dead code can bump entire blocks
2534 and therefore cause blocks to be renumbered, invalidating the
2535 numbering of phi alternatives. */
2536 if (remove_dead_code && in_ssa_form)
2537 abort ();
2539 /* Record which registers will be eliminated. We use this in
2540 mark_used_regs. */
2542 CLEAR_HARD_REG_SET (elim_reg_set);
2544 #ifdef ELIMINABLE_REGS
2545 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2546 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2547 #else
2548 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2549 #endif
2551 /* We want alias analysis information for local dead store elimination. */
2552 init_alias_analysis ();
2554 if (! optimize)
2555 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2556 else
2558 flags = PROP_FINAL;
2559 if (! remove_dead_code)
2560 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2563 /* The post-reload life analysis have (on a global basis) the same
2564 registers live as was computed by reload itself. elimination
2565 Otherwise offsets and such may be incorrect.
2567 Reload will make some registers as live even though they do not
2568 appear in the rtl. */
2569 if (reload_completed)
2570 flags &= ~PROP_REG_INFO;
2572 max_regno = nregs;
2574 /* Always remove no-op moves. Do this before other processing so
2575 that we don't have to keep re-scanning them. */
2576 delete_noop_moves (f);
2578 /* Some targets can emit simpler epilogues if they know that sp was
2579 not ever modified during the function. After reload, of course,
2580 we've already emitted the epilogue so there's no sense searching. */
2581 if (! reload_completed)
2582 notice_stack_pointer_modification (f);
2584 /* Allocate and zero out data structures that will record the
2585 data from lifetime analysis. */
2586 allocate_reg_life_data ();
2587 allocate_bb_life_data ();
2588 all_blocks = sbitmap_alloc (n_basic_blocks);
2589 sbitmap_ones (all_blocks);
2591 /* Find the set of registers live on function exit. */
2592 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2594 /* "Update" life info from zero. It'd be nice to begin the
2595 relaxation with just the exit and noreturn blocks, but that set
2596 is not immediately handy. */
2598 if (flags & PROP_REG_INFO)
2599 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2600 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2602 /* Clean up. */
2603 sbitmap_free (all_blocks);
2604 end_alias_analysis ();
2606 if (file)
2607 dump_flow_info (file);
2609 free_basic_block_vars (1);
2612 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2613 Search for REGNO. If found, abort if it is not wider than word_mode. */
2615 static int
2616 verify_wide_reg_1 (px, pregno)
2617 rtx *px;
2618 void *pregno;
2620 rtx x = *px;
2621 unsigned int regno = *(int *) pregno;
2623 if (GET_CODE (x) == REG && REGNO (x) == regno)
2625 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2626 abort ();
2627 return 1;
2629 return 0;
2632 /* A subroutine of verify_local_live_at_start. Search through insns
2633 between HEAD and END looking for register REGNO. */
2635 static void
2636 verify_wide_reg (regno, head, end)
2637 int regno;
2638 rtx head, end;
2640 while (1)
2642 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2643 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2644 return;
2645 if (head == end)
2646 break;
2647 head = NEXT_INSN (head);
2650 /* We didn't find the register at all. Something's way screwy. */
2651 abort ();
2654 /* A subroutine of update_life_info. Verify that there are no untoward
2655 changes in live_at_start during a local update. */
2657 static void
2658 verify_local_live_at_start (new_live_at_start, bb)
2659 regset new_live_at_start;
2660 basic_block bb;
2662 if (reload_completed)
2664 /* After reload, there are no pseudos, nor subregs of multi-word
2665 registers. The regsets should exactly match. */
2666 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2667 abort ();
2669 else
2671 int i;
2673 /* Find the set of changed registers. */
2674 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2676 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2678 /* No registers should die. */
2679 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2680 abort ();
2681 /* Verify that the now-live register is wider than word_mode. */
2682 verify_wide_reg (i, bb->head, bb->end);
2687 /* Updates life information starting with the basic blocks set in BLOCKS.
2689 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2690 expecting local modifications to basic blocks. If we find extra
2691 registers live at the beginning of a block, then we either killed
2692 useful data, or we have a broken split that wants data not provided.
2693 If we find registers removed from live_at_start, that means we have
2694 a broken peephole that is killing a register it shouldn't.
2696 ??? This is not true in one situation -- when a pre-reload splitter
2697 generates subregs of a multi-word pseudo, current life analysis will
2698 lose the kill. So we _can_ have a pseudo go live. How irritating.
2700 BLOCK_FOR_INSN is assumed to be correct.
2702 Including PROP_REG_INFO does not properly refresh regs_ever_live
2703 unless the caller resets it to zero. */
2705 void
2706 update_life_info (blocks, extent, prop_flags)
2707 sbitmap blocks;
2708 enum update_life_extent extent;
2709 int prop_flags;
2711 regset tmp;
2712 regset_head tmp_head;
2713 int i;
2715 tmp = INITIALIZE_REG_SET (tmp_head);
2717 /* For a global update, we go through the relaxation process again. */
2718 if (extent != UPDATE_LIFE_LOCAL)
2720 calculate_global_regs_live (blocks, blocks,
2721 prop_flags & PROP_SCAN_DEAD_CODE);
2723 /* If asked, remove notes from the blocks we'll update. */
2724 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2725 count_or_remove_death_notes (blocks, 1);
2728 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2730 basic_block bb = BASIC_BLOCK (i);
2732 COPY_REG_SET (tmp, bb->global_live_at_end);
2733 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2735 if (extent == UPDATE_LIFE_LOCAL)
2736 verify_local_live_at_start (tmp, bb);
2739 FREE_REG_SET (tmp);
2741 if (prop_flags & PROP_REG_INFO)
2743 /* The only pseudos that are live at the beginning of the function
2744 are those that were not set anywhere in the function. local-alloc
2745 doesn't know how to handle these correctly, so mark them as not
2746 local to any one basic block. */
2747 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2748 FIRST_PSEUDO_REGISTER, i,
2749 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2751 /* We have a problem with any pseudoreg that lives across the setjmp.
2752 ANSI says that if a user variable does not change in value between
2753 the setjmp and the longjmp, then the longjmp preserves it. This
2754 includes longjmp from a place where the pseudo appears dead.
2755 (In principle, the value still exists if it is in scope.)
2756 If the pseudo goes in a hard reg, some other value may occupy
2757 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2758 Conclusion: such a pseudo must not go in a hard reg. */
2759 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2760 FIRST_PSEUDO_REGISTER, i,
2762 if (regno_reg_rtx[i] != 0)
2764 REG_LIVE_LENGTH (i) = -1;
2765 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2771 /* Free the variables allocated by find_basic_blocks.
2773 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2775 void
2776 free_basic_block_vars (keep_head_end_p)
2777 int keep_head_end_p;
2779 if (basic_block_for_insn)
2781 VARRAY_FREE (basic_block_for_insn);
2782 basic_block_for_insn = NULL;
2785 if (! keep_head_end_p)
2787 clear_edges ();
2788 VARRAY_FREE (basic_block_info);
2789 n_basic_blocks = 0;
2791 ENTRY_BLOCK_PTR->aux = NULL;
2792 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2793 EXIT_BLOCK_PTR->aux = NULL;
2794 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2798 /* Return nonzero if the destination of SET equals the source. */
2799 static int
2800 set_noop_p (set)
2801 rtx set;
2803 rtx src = SET_SRC (set);
2804 rtx dst = SET_DEST (set);
2806 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2808 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2809 return 0;
2810 src = SUBREG_REG (src);
2811 dst = SUBREG_REG (dst);
2814 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2815 && REGNO (src) == REGNO (dst));
2818 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2819 value to itself. */
2820 static int
2821 noop_move_p (insn)
2822 rtx insn;
2824 rtx pat = PATTERN (insn);
2826 /* Insns carrying these notes are useful later on. */
2827 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2828 return 0;
2830 if (GET_CODE (pat) == SET && set_noop_p (pat))
2831 return 1;
2833 if (GET_CODE (pat) == PARALLEL)
2835 int i;
2836 /* If nothing but SETs of registers to themselves,
2837 this insn can also be deleted. */
2838 for (i = 0; i < XVECLEN (pat, 0); i++)
2840 rtx tem = XVECEXP (pat, 0, i);
2842 if (GET_CODE (tem) == USE
2843 || GET_CODE (tem) == CLOBBER)
2844 continue;
2846 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2847 return 0;
2850 return 1;
2852 return 0;
2855 /* Delete any insns that copy a register to itself. */
2857 static void
2858 delete_noop_moves (f)
2859 rtx f;
2861 rtx insn;
2862 for (insn = f; insn; insn = NEXT_INSN (insn))
2864 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2866 PUT_CODE (insn, NOTE);
2867 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2868 NOTE_SOURCE_FILE (insn) = 0;
2873 /* Determine if the stack pointer is constant over the life of the function.
2874 Only useful before prologues have been emitted. */
2876 static void
2877 notice_stack_pointer_modification_1 (x, pat, data)
2878 rtx x;
2879 rtx pat ATTRIBUTE_UNUSED;
2880 void *data ATTRIBUTE_UNUSED;
2882 if (x == stack_pointer_rtx
2883 /* The stack pointer is only modified indirectly as the result
2884 of a push until later in flow. See the comments in rtl.texi
2885 regarding Embedded Side-Effects on Addresses. */
2886 || (GET_CODE (x) == MEM
2887 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2888 || GET_CODE (XEXP (x, 0)) == PRE_INC
2889 || GET_CODE (XEXP (x, 0)) == POST_DEC
2890 || GET_CODE (XEXP (x, 0)) == POST_INC)
2891 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2892 current_function_sp_is_unchanging = 0;
2895 static void
2896 notice_stack_pointer_modification (f)
2897 rtx f;
2899 rtx insn;
2901 /* Assume that the stack pointer is unchanging if alloca hasn't
2902 been used. */
2903 current_function_sp_is_unchanging = !current_function_calls_alloca;
2904 if (! current_function_sp_is_unchanging)
2905 return;
2907 for (insn = f; insn; insn = NEXT_INSN (insn))
2909 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2911 /* Check if insn modifies the stack pointer. */
2912 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2913 NULL);
2914 if (! current_function_sp_is_unchanging)
2915 return;
2920 /* Mark a register in SET. Hard registers in large modes get all
2921 of their component registers set as well. */
2922 static void
2923 mark_reg (reg, xset)
2924 rtx reg;
2925 void *xset;
2927 regset set = (regset) xset;
2928 int regno = REGNO (reg);
2930 if (GET_MODE (reg) == BLKmode)
2931 abort ();
2933 SET_REGNO_REG_SET (set, regno);
2934 if (regno < FIRST_PSEUDO_REGISTER)
2936 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2937 while (--n > 0)
2938 SET_REGNO_REG_SET (set, regno + n);
2942 /* Mark those regs which are needed at the end of the function as live
2943 at the end of the last basic block. */
2944 static void
2945 mark_regs_live_at_end (set)
2946 regset set;
2948 int i;
2950 /* If exiting needs the right stack value, consider the stack pointer
2951 live at the end of the function. */
2952 if ((HAVE_epilogue && reload_completed)
2953 || ! EXIT_IGNORE_STACK
2954 || (! FRAME_POINTER_REQUIRED
2955 && ! current_function_calls_alloca
2956 && flag_omit_frame_pointer)
2957 || current_function_sp_is_unchanging)
2959 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2962 /* Mark the frame pointer if needed at the end of the function. If
2963 we end up eliminating it, it will be removed from the live list
2964 of each basic block by reload. */
2966 if (! reload_completed || frame_pointer_needed)
2968 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2969 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2970 /* If they are different, also mark the hard frame pointer as live */
2971 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2972 #endif
2975 #ifdef PIC_OFFSET_TABLE_REGNUM
2976 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2977 /* Many architectures have a GP register even without flag_pic.
2978 Assume the pic register is not in use, or will be handled by
2979 other means, if it is not fixed. */
2980 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2981 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2982 #endif
2983 #endif
2985 /* Mark all global registers, and all registers used by the epilogue
2986 as being live at the end of the function since they may be
2987 referenced by our caller. */
2988 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2989 if (global_regs[i]
2990 #ifdef EPILOGUE_USES
2991 || EPILOGUE_USES (i)
2992 #endif
2994 SET_REGNO_REG_SET (set, i);
2996 /* Mark all call-saved registers that we actaully used. */
2997 if (HAVE_epilogue && reload_completed)
2999 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3000 if (! call_used_regs[i] && regs_ever_live[i])
3001 SET_REGNO_REG_SET (set, i);
3004 /* Mark function return value. */
3005 diddle_return_value (mark_reg, set);
3008 /* Callback function for for_each_successor_phi. DATA is a regset.
3009 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3010 INSN, in the regset. */
3012 static int
3013 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3014 rtx insn ATTRIBUTE_UNUSED;
3015 int dest_regno ATTRIBUTE_UNUSED;
3016 int src_regno;
3017 void *data;
3019 regset live = (regset) data;
3020 SET_REGNO_REG_SET (live, src_regno);
3021 return 0;
3024 /* Propagate global life info around the graph of basic blocks. Begin
3025 considering blocks with their corresponding bit set in BLOCKS_IN.
3026 BLOCKS_OUT is set for every block that was changed. */
3028 static void
3029 calculate_global_regs_live (blocks_in, blocks_out, flags)
3030 sbitmap blocks_in, blocks_out;
3031 int flags;
3033 basic_block *queue, *qhead, *qtail, *qend;
3034 regset tmp, new_live_at_end;
3035 regset_head tmp_head;
3036 regset_head new_live_at_end_head;
3037 int i;
3039 tmp = INITIALIZE_REG_SET (tmp_head);
3040 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3042 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3043 because the `head == tail' style test for an empty queue doesn't
3044 work with a full queue. */
3045 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3046 qtail = queue;
3047 qhead = qend = queue + n_basic_blocks + 2;
3049 /* Clear out the garbage that might be hanging out in bb->aux. */
3050 for (i = n_basic_blocks - 1; i >= 0; --i)
3051 BASIC_BLOCK (i)->aux = NULL;
3053 /* Queue the blocks set in the initial mask. Do this in reverse block
3054 number order so that we are more likely for the first round to do
3055 useful work. We use AUX non-null to flag that the block is queued. */
3056 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3058 basic_block bb = BASIC_BLOCK (i);
3059 *--qhead = bb;
3060 bb->aux = bb;
3063 sbitmap_zero (blocks_out);
3065 while (qhead != qtail)
3067 int rescan, changed;
3068 basic_block bb;
3069 edge e;
3071 bb = *qhead++;
3072 if (qhead == qend)
3073 qhead = queue;
3074 bb->aux = NULL;
3076 /* Begin by propogating live_at_start from the successor blocks. */
3077 CLEAR_REG_SET (new_live_at_end);
3078 for (e = bb->succ; e ; e = e->succ_next)
3080 basic_block sb = e->dest;
3081 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3084 /* Regs used in phi nodes are not included in
3085 global_live_at_start, since they are live only along a
3086 particular edge. Set those regs that are live because of a
3087 phi node alternative corresponding to this particular block. */
3088 for_each_successor_phi (bb, &set_phi_alternative_reg,
3089 new_live_at_end);
3091 if (bb == ENTRY_BLOCK_PTR)
3093 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3094 continue;
3097 /* On our first pass through this block, we'll go ahead and continue.
3098 Recognize first pass by local_set NULL. On subsequent passes, we
3099 get to skip out early if live_at_end wouldn't have changed. */
3101 if (bb->local_set == NULL)
3103 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3104 rescan = 1;
3106 else
3108 /* If any bits were removed from live_at_end, we'll have to
3109 rescan the block. This wouldn't be necessary if we had
3110 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3111 local_live is really dependant on live_at_end. */
3112 CLEAR_REG_SET (tmp);
3113 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3114 new_live_at_end, BITMAP_AND_COMPL);
3116 if (! rescan)
3118 /* Find the set of changed bits. Take this opportunity
3119 to notice that this set is empty and early out. */
3120 CLEAR_REG_SET (tmp);
3121 changed = bitmap_operation (tmp, bb->global_live_at_end,
3122 new_live_at_end, BITMAP_XOR);
3123 if (! changed)
3124 continue;
3126 /* If any of the changed bits overlap with local_set,
3127 we'll have to rescan the block. Detect overlap by
3128 the AND with ~local_set turning off bits. */
3129 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3130 BITMAP_AND_COMPL);
3134 /* Let our caller know that BB changed enough to require its
3135 death notes updated. */
3136 SET_BIT (blocks_out, bb->index);
3138 if (! rescan)
3140 /* Add to live_at_start the set of all registers in
3141 new_live_at_end that aren't in the old live_at_end. */
3143 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3144 BITMAP_AND_COMPL);
3145 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3147 changed = bitmap_operation (bb->global_live_at_start,
3148 bb->global_live_at_start,
3149 tmp, BITMAP_IOR);
3150 if (! changed)
3151 continue;
3153 else
3155 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3157 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3158 into live_at_start. */
3159 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3161 /* If live_at start didn't change, no need to go farther. */
3162 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3163 continue;
3165 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3168 /* Queue all predecessors of BB so that we may re-examine
3169 their live_at_end. */
3170 for (e = bb->pred; e ; e = e->pred_next)
3172 basic_block pb = e->src;
3173 if (pb->aux == NULL)
3175 *qtail++ = pb;
3176 if (qtail == qend)
3177 qtail = queue;
3178 pb->aux = pb;
3183 FREE_REG_SET (tmp);
3184 FREE_REG_SET (new_live_at_end);
3186 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3188 basic_block bb = BASIC_BLOCK (i);
3189 FREE_REG_SET (bb->local_set);
3192 free (queue);
3195 /* Subroutines of life analysis. */
3197 /* Allocate the permanent data structures that represent the results
3198 of life analysis. Not static since used also for stupid life analysis. */
3200 void
3201 allocate_bb_life_data ()
3203 register int i;
3205 for (i = 0; i < n_basic_blocks; i++)
3207 basic_block bb = BASIC_BLOCK (i);
3209 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3210 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3213 ENTRY_BLOCK_PTR->global_live_at_end
3214 = OBSTACK_ALLOC_REG_SET (function_obstack);
3215 EXIT_BLOCK_PTR->global_live_at_start
3216 = OBSTACK_ALLOC_REG_SET (function_obstack);
3218 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3221 void
3222 allocate_reg_life_data ()
3224 int i;
3226 /* Recalculate the register space, in case it has grown. Old style
3227 vector oriented regsets would set regset_{size,bytes} here also. */
3228 allocate_reg_info (max_regno, FALSE, FALSE);
3230 /* Reset all the data we'll collect in propagate_block and its
3231 subroutines. */
3232 for (i = 0; i < max_regno; i++)
3234 REG_N_SETS (i) = 0;
3235 REG_N_REFS (i) = 0;
3236 REG_N_DEATHS (i) = 0;
3237 REG_N_CALLS_CROSSED (i) = 0;
3238 REG_LIVE_LENGTH (i) = 0;
3239 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3243 /* Delete dead instructions for propagate_block. */
3245 static void
3246 propagate_block_delete_insn (bb, insn)
3247 basic_block bb;
3248 rtx insn;
3250 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3252 /* If the insn referred to a label, and that label was attached to
3253 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3254 pretty much mandatory to delete it, because the ADDR_VEC may be
3255 referencing labels that no longer exist. */
3257 if (inote)
3259 rtx label = XEXP (inote, 0);
3260 rtx next;
3262 if (LABEL_NUSES (label) == 1
3263 && (next = next_nonnote_insn (label)) != NULL
3264 && GET_CODE (next) == JUMP_INSN
3265 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3266 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3268 rtx pat = PATTERN (next);
3269 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3270 int len = XVECLEN (pat, diff_vec_p);
3271 int i;
3273 for (i = 0; i < len; i++)
3274 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3276 flow_delete_insn (next);
3280 if (bb->end == insn)
3281 bb->end = PREV_INSN (insn);
3282 flow_delete_insn (insn);
3285 /* Delete dead libcalls for propagate_block. Return the insn
3286 before the libcall. */
3288 static rtx
3289 propagate_block_delete_libcall (bb, insn, note)
3290 basic_block bb;
3291 rtx insn, note;
3293 rtx first = XEXP (note, 0);
3294 rtx before = PREV_INSN (first);
3296 if (insn == bb->end)
3297 bb->end = before;
3299 flow_delete_insn_chain (first, insn);
3300 return before;
3303 /* Compute the registers live at the beginning of a basic block BB from
3304 those live at the end.
3306 When called, REG_LIVE contains those live at the end. On return, it
3307 contains those live at the beginning.
3309 LOCAL_SET, if non-null, will be set with all registers killed by
3310 this basic block. */
3312 static void
3313 propagate_block (bb, live, local_set, flags)
3314 basic_block bb;
3315 regset live;
3316 regset local_set;
3317 int flags;
3319 struct propagate_block_info pbi;
3320 rtx insn, prev;
3321 regset_head new_live_head, new_dead_head;
3323 pbi.bb = bb;
3324 pbi.reg_live = live;
3325 pbi.mem_set_list = NULL_RTX;
3326 pbi.local_set = local_set;
3327 pbi.cc0_live = 0;
3328 pbi.flags = flags;
3330 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3331 pbi.reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3332 else
3333 pbi.reg_next_use = NULL;
3335 pbi.new_live = INITIALIZE_REG_SET (new_live_head);
3336 pbi.new_dead = INITIALIZE_REG_SET (new_dead_head);
3338 if (flags & PROP_REG_INFO)
3340 register int i;
3342 /* Process the regs live at the end of the block.
3343 Mark them as not local to any one basic block. */
3344 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3346 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3350 /* Scan the block an insn at a time from end to beginning. */
3352 for (insn = bb->end; ; insn = prev)
3354 prev = PREV_INSN (insn);
3356 if (GET_CODE (insn) == NOTE)
3358 /* If this is a call to `setjmp' et al,
3359 warn if any non-volatile datum is live. */
3361 if ((flags & PROP_REG_INFO)
3362 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3363 IOR_REG_SET (regs_live_at_setjmp, pbi.reg_live);
3366 /* Update the life-status of regs for this insn.
3367 First DEAD gets which regs are set in this insn
3368 then LIVE gets which regs are used in this insn.
3369 Then the regs live before the insn
3370 are those live after, with DEAD regs turned off,
3371 and then LIVE regs turned on. */
3373 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3375 register int i;
3376 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3377 int insn_is_dead = 0;
3378 int libcall_is_dead = 0;
3380 if (flags & PROP_SCAN_DEAD_CODE)
3382 insn_is_dead = insn_dead_p (&pbi, PATTERN (insn), 0,
3383 REG_NOTES (insn));
3384 libcall_is_dead = (insn_is_dead && note != 0
3385 && libcall_dead_p (&pbi, PATTERN (insn),
3386 note, insn));
3389 /* We almost certainly don't want to delete prologue or epilogue
3390 instructions. Warn about probable compiler losage. */
3391 if (insn_is_dead
3392 && reload_completed
3393 && (((HAVE_epilogue || HAVE_prologue)
3394 && prologue_epilogue_contains (insn))
3395 || (HAVE_sibcall_epilogue
3396 && sibcall_epilogue_contains (insn))))
3398 if (flags & PROP_KILL_DEAD_CODE)
3400 warning ("ICE: would have deleted prologue/epilogue insn");
3401 if (!inhibit_warnings)
3402 debug_rtx (insn);
3404 libcall_is_dead = insn_is_dead = 0;
3407 /* If an instruction consists of just dead store(s) on final pass,
3408 delete it. */
3409 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3411 if (libcall_is_dead)
3413 prev = propagate_block_delete_libcall (bb, insn, note);
3414 insn = NEXT_INSN (prev);
3416 else
3417 propagate_block_delete_insn (bb, insn);
3419 /* CC0 is now known to be dead. Either this insn used it,
3420 in which case it doesn't anymore, or clobbered it,
3421 so the next insn can't use it. */
3422 pbi.cc0_live = 0;
3424 goto flushed;
3427 /* See if this is an increment or decrement that can be
3428 merged into a following memory address. */
3429 #ifdef AUTO_INC_DEC
3431 register rtx x = single_set (insn);
3433 /* Does this instruction increment or decrement a register? */
3434 if (!reload_completed
3435 && (flags & PROP_AUTOINC)
3436 && x != 0
3437 && GET_CODE (SET_DEST (x)) == REG
3438 && (GET_CODE (SET_SRC (x)) == PLUS
3439 || GET_CODE (SET_SRC (x)) == MINUS)
3440 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3441 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3442 /* Ok, look for a following memory ref we can combine with.
3443 If one is found, change the memory ref to a PRE_INC
3444 or PRE_DEC, cancel this insn, and return 1.
3445 Return 0 if nothing has been done. */
3446 && try_pre_increment_1 (&pbi, insn))
3447 goto flushed;
3449 #endif /* AUTO_INC_DEC */
3451 CLEAR_REG_SET (pbi.new_live);
3452 CLEAR_REG_SET (pbi.new_dead);
3454 /* If this is not the final pass, and this insn is copying the
3455 value of a library call and it's dead, don't scan the
3456 insns that perform the library call, so that the call's
3457 arguments are not marked live. */
3458 if (libcall_is_dead)
3460 /* Record the death of the dest reg. */
3461 mark_set_regs (&pbi, PATTERN (insn), insn);
3463 insn = XEXP (note, 0);
3464 prev = PREV_INSN (insn);
3466 else if (GET_CODE (PATTERN (insn)) == SET
3467 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3468 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3469 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3470 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3471 /* We have an insn to pop a constant amount off the stack.
3472 (Such insns use PLUS regardless of the direction of the stack,
3473 and any insn to adjust the stack by a constant is always a pop.)
3474 These insns, if not dead stores, have no effect on life. */
3476 else
3478 /* Any regs live at the time of a call instruction
3479 must not go in a register clobbered by calls.
3480 Find all regs now live and record this for them. */
3482 if (GET_CODE (insn) == CALL_INSN
3483 && (flags & PROP_REG_INFO))
3484 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3485 { REG_N_CALLS_CROSSED (i)++; });
3487 /* Record sets. Do this even for dead instructions,
3488 since they would have killed the values if they hadn't
3489 been deleted. */
3490 mark_set_regs (&pbi, PATTERN (insn), insn);
3492 /* If an insn doesn't use CC0, it becomes dead since we
3493 assume that every insn clobbers it. So show it dead here;
3494 mark_used_regs will set it live if it is referenced. */
3495 pbi.cc0_live = 0;
3497 /* Record uses. */
3498 if (! insn_is_dead)
3499 mark_used_regs (&pbi, PATTERN (insn), NULL_RTX, insn);
3501 /* Sometimes we may have inserted something before INSN
3502 (such as a move) when we make an auto-inc. So ensure
3503 we will scan those insns. */
3504 #ifdef AUTO_INC_DEC
3505 prev = PREV_INSN (insn);
3506 #endif
3508 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3510 register int i;
3511 rtx note, cond;
3513 cond = NULL_RTX;
3514 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3515 cond = COND_EXEC_TEST (PATTERN (insn));
3517 /* Non-constant calls clobber memory. */
3518 if (! CONST_CALL_P (insn))
3519 free_EXPR_LIST_list (&pbi.mem_set_list);
3521 /* There may be extra registers to be clobbered. */
3522 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3523 note;
3524 note = XEXP (note, 1))
3525 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3526 mark_set_1 (&pbi, XEXP (XEXP (note, 0), 0),
3527 cond, insn);
3529 /* Calls change all call-used and global registers. */
3530 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3531 if (call_used_regs[i] && ! global_regs[i]
3532 && ! fixed_regs[i])
3534 int dummy;
3535 mark_set_reg (&pbi, gen_rtx_REG (reg_raw_mode[i], i),
3536 cond, &dummy, &dummy);
3539 /* Calls use their arguments. */
3540 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3541 note;
3542 note = XEXP (note, 1))
3543 if (GET_CODE (XEXP (note, 0)) == USE)
3544 mark_used_regs (&pbi, XEXP (XEXP (note, 0), 0),
3545 cond, insn);
3547 /* The stack ptr is used (honorarily) by a CALL insn. */
3548 SET_REGNO_REG_SET (pbi.new_live, STACK_POINTER_REGNUM);
3550 /* Calls may also reference any of the global registers,
3551 so they are made live. */
3552 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3553 if (global_regs[i])
3554 mark_used_reg (&pbi, gen_rtx_REG (reg_raw_mode[i], i),
3555 cond, insn);
3559 /* Update reg_live for the registers killed and used. */
3560 AND_COMPL_REG_SET (pbi.reg_live, pbi.new_dead);
3561 IOR_REG_SET (pbi.reg_live, pbi.new_live);
3563 /* On final pass, update counts of how many insns in which
3564 each reg is live. */
3565 if (flags & PROP_REG_INFO)
3566 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3567 { REG_LIVE_LENGTH (i)++; });
3569 flushed:
3570 if (insn == bb->head)
3571 break;
3574 FREE_REG_SET (pbi.new_live);
3575 FREE_REG_SET (pbi.new_dead);
3576 free_EXPR_LIST_list (&pbi.mem_set_list);
3578 if (pbi.reg_next_use)
3579 free (pbi.reg_next_use);
3583 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3584 (SET expressions whose destinations are registers dead after the insn).
3585 NEEDED is the regset that says which regs are alive after the insn.
3587 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3589 If X is the entire body of an insn, NOTES contains the reg notes
3590 pertaining to the insn. */
3592 static int
3593 insn_dead_p (pbi, x, call_ok, notes)
3594 struct propagate_block_info *pbi;
3595 rtx x;
3596 int call_ok;
3597 rtx notes ATTRIBUTE_UNUSED;
3599 enum rtx_code code = GET_CODE (x);
3601 #ifdef AUTO_INC_DEC
3602 /* If flow is invoked after reload, we must take existing AUTO_INC
3603 expresions into account. */
3604 if (reload_completed)
3606 for ( ; notes; notes = XEXP (notes, 1))
3608 if (REG_NOTE_KIND (notes) == REG_INC)
3610 int regno = REGNO (XEXP (notes, 0));
3612 /* Don't delete insns to set global regs. */
3613 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3614 || REGNO_REG_SET_P (pbi->reg_live, regno))
3615 return 0;
3619 #endif
3621 /* If setting something that's a reg or part of one,
3622 see if that register's altered value will be live. */
3624 if (code == SET)
3626 rtx r = SET_DEST (x);
3628 #ifdef HAVE_cc0
3629 if (GET_CODE (r) == CC0)
3630 return ! pbi->cc0_live;
3631 #endif
3633 /* A SET that is a subroutine call cannot be dead. */
3634 if (GET_CODE (SET_SRC (x)) == CALL)
3636 if (! call_ok)
3637 return 0;
3640 /* Don't eliminate loads from volatile memory or volatile asms. */
3641 else if (volatile_refs_p (SET_SRC (x)))
3642 return 0;
3644 if (GET_CODE (r) == MEM)
3646 rtx temp;
3648 if (MEM_VOLATILE_P (r))
3649 return 0;
3651 /* Walk the set of memory locations we are currently tracking
3652 and see if one is an identical match to this memory location.
3653 If so, this memory write is dead (remember, we're walking
3654 backwards from the end of the block to the start. */
3655 temp = pbi->mem_set_list;
3656 while (temp)
3658 if (rtx_equal_p (XEXP (temp, 0), r))
3659 return 1;
3660 temp = XEXP (temp, 1);
3663 else
3665 while (GET_CODE (r) == SUBREG
3666 || GET_CODE (r) == STRICT_LOW_PART
3667 || GET_CODE (r) == ZERO_EXTRACT)
3668 r = XEXP (r, 0);
3670 if (GET_CODE (r) == REG)
3672 int regno = REGNO (r);
3674 /* Obvious. */
3675 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3676 return 0;
3678 /* If this is a hard register, verify that subsequent
3679 words are not needed. */
3680 if (regno < FIRST_PSEUDO_REGISTER)
3682 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3684 while (--n > 0)
3685 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3686 return 0;
3689 /* Don't delete insns to set global regs. */
3690 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3691 return 0;
3693 /* Make sure insns to set the stack pointer aren't deleted. */
3694 if (regno == STACK_POINTER_REGNUM)
3695 return 0;
3697 /* Make sure insns to set the frame pointer aren't deleted. */
3698 if (regno == FRAME_POINTER_REGNUM
3699 && (! reload_completed || frame_pointer_needed))
3700 return 0;
3701 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3702 if (regno == HARD_FRAME_POINTER_REGNUM
3703 && (! reload_completed || frame_pointer_needed))
3704 return 0;
3705 #endif
3707 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3708 /* Make sure insns to set arg pointer are never deleted
3709 (if the arg pointer isn't fixed, there will be a USE
3710 for it, so we can treat it normally). */
3711 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3712 return 0;
3713 #endif
3715 /* Otherwise, the set is dead. */
3716 return 1;
3721 /* If performing several activities, insn is dead if each activity
3722 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3723 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3724 worth keeping. */
3725 else if (code == PARALLEL)
3727 int i = XVECLEN (x, 0);
3729 for (i--; i >= 0; i--)
3730 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3731 && GET_CODE (XVECEXP (x, 0, i)) != USE
3732 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3733 return 0;
3735 return 1;
3738 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3739 is not necessarily true for hard registers. */
3740 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3741 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3742 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3743 return 1;
3745 /* We do not check other CLOBBER or USE here. An insn consisting of just
3746 a CLOBBER or just a USE should not be deleted. */
3747 return 0;
3750 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3751 return 1 if the entire library call is dead.
3752 This is true if X copies a register (hard or pseudo)
3753 and if the hard return reg of the call insn is dead.
3754 (The caller should have tested the destination of X already for death.)
3756 If this insn doesn't just copy a register, then we don't
3757 have an ordinary libcall. In that case, cse could not have
3758 managed to substitute the source for the dest later on,
3759 so we can assume the libcall is dead.
3761 NEEDED is the bit vector of pseudoregs live before this insn.
3762 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3764 static int
3765 libcall_dead_p (pbi, x, note, insn)
3766 struct propagate_block_info *pbi;
3767 rtx x;
3768 rtx note;
3769 rtx insn;
3771 register RTX_CODE code = GET_CODE (x);
3773 if (code == SET)
3775 register rtx r = SET_SRC (x);
3776 if (GET_CODE (r) == REG)
3778 rtx call = XEXP (note, 0);
3779 rtx call_pat;
3780 register int i;
3782 /* Find the call insn. */
3783 while (call != insn && GET_CODE (call) != CALL_INSN)
3784 call = NEXT_INSN (call);
3786 /* If there is none, do nothing special,
3787 since ordinary death handling can understand these insns. */
3788 if (call == insn)
3789 return 0;
3791 /* See if the hard reg holding the value is dead.
3792 If this is a PARALLEL, find the call within it. */
3793 call_pat = PATTERN (call);
3794 if (GET_CODE (call_pat) == PARALLEL)
3796 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3797 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3798 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3799 break;
3801 /* This may be a library call that is returning a value
3802 via invisible pointer. Do nothing special, since
3803 ordinary death handling can understand these insns. */
3804 if (i < 0)
3805 return 0;
3807 call_pat = XVECEXP (call_pat, 0, i);
3810 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3813 return 1;
3816 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3817 live at function entry. Don't count global register variables, variables
3818 in registers that can be used for function arg passing, or variables in
3819 fixed hard registers. */
3822 regno_uninitialized (regno)
3823 int regno;
3825 if (n_basic_blocks == 0
3826 || (regno < FIRST_PSEUDO_REGISTER
3827 && (global_regs[regno]
3828 || fixed_regs[regno]
3829 || FUNCTION_ARG_REGNO_P (regno))))
3830 return 0;
3832 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3835 /* 1 if register REGNO was alive at a place where `setjmp' was called
3836 and was set more than once or is an argument.
3837 Such regs may be clobbered by `longjmp'. */
3840 regno_clobbered_at_setjmp (regno)
3841 int regno;
3843 if (n_basic_blocks == 0)
3844 return 0;
3846 return ((REG_N_SETS (regno) > 1
3847 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3848 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3851 /* INSN references memory, possibly using autoincrement addressing modes.
3852 Find any entries on the mem_set_list that need to be invalidated due
3853 to an address change. */
3854 static void
3855 invalidate_mems_from_autoinc (pbi, insn)
3856 struct propagate_block_info *pbi;
3857 rtx insn;
3859 rtx note = REG_NOTES (insn);
3860 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3862 if (REG_NOTE_KIND (note) == REG_INC)
3864 rtx temp = pbi->mem_set_list;
3865 rtx prev = NULL_RTX;
3866 rtx next;
3868 while (temp)
3870 next = XEXP (temp, 1);
3871 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3873 /* Splice temp out of list. */
3874 if (prev)
3875 XEXP (prev, 1) = next;
3876 else
3877 pbi->mem_set_list = next;
3878 free_EXPR_LIST_node (temp);
3880 else
3881 prev = temp;
3882 temp = next;
3888 /* Process the registers that are set within X. Their bits are set to
3889 1 in the regset DEAD, because they are dead prior to this insn.
3891 If INSN is nonzero, it is the insn being processed.
3893 FLAGS is the set of operations to perform. */
3895 static void
3896 mark_set_regs (pbi, x, insn)
3897 struct propagate_block_info *pbi;
3898 rtx x, insn;
3900 rtx cond = NULL_RTX;
3902 retry:
3903 switch (GET_CODE (x))
3905 case SET:
3906 case CLOBBER:
3907 mark_set_1 (pbi, SET_DEST (x), cond, insn);
3908 return;
3910 case COND_EXEC:
3911 cond = COND_EXEC_TEST (x);
3912 x = COND_EXEC_CODE (x);
3913 goto retry;
3915 case PARALLEL:
3917 register int i;
3918 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3920 rtx sub = XVECEXP (x, 0, i);
3921 switch (GET_CODE (sub))
3923 case COND_EXEC:
3924 if (cond != NULL_RTX)
3925 abort ();
3927 cond = COND_EXEC_TEST (sub);
3928 sub = COND_EXEC_CODE (sub);
3929 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
3930 break;
3931 /* FALLTHRU */
3933 case SET:
3934 case CLOBBER:
3935 mark_set_1 (pbi, SET_DEST (sub), cond, insn);
3936 break;
3938 default:
3939 break;
3942 break;
3945 default:
3946 break;
3950 /* Process a single SET rtx, X. */
3952 static void
3953 mark_set_1 (pbi, reg, cond, insn)
3954 struct propagate_block_info *pbi;
3955 rtx reg, cond, insn;
3957 register int regno = -1;
3958 int flags = pbi->flags;
3960 /* Some targets place small structures in registers for
3961 return values of functions. We have to detect this
3962 case specially here to get correct flow information. */
3963 if (GET_CODE (reg) == PARALLEL
3964 && GET_MODE (reg) == BLKmode)
3966 register int i;
3968 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3969 mark_set_1 (pbi, XVECEXP (reg, 0, i), cond, insn);
3970 return;
3973 /* Modifying just one hardware register of a multi-reg value or just a
3974 byte field of a register does not mean the value from before this insn
3975 is now dead. But it does mean liveness of that register at the end of
3976 the block is significant.
3978 Within mark_set_1, however, we treat it as if the register is indeed
3979 modified. mark_used_regs will, however, also treat this register as
3980 being used. Thus, we treat these insns as setting a new value for the
3981 register as a function of its old value. This cases LOG_LINKS to be
3982 made appropriately and this will help combine.
3984 ??? This is all done incorrectly. We should not be setting bits in
3985 new_dead for these registers, since, as we just explained, they are
3986 not dead. We should be setting bits in local_set, and updating
3987 LOG_LINKS, but that is different. */
3989 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3990 || GET_CODE (reg) == SIGN_EXTRACT
3991 || GET_CODE (reg) == STRICT_LOW_PART)
3992 reg = XEXP (reg, 0);
3994 /* If this set is a MEM, then it kills any aliased writes.
3995 If this set is a REG, then it kills any MEMs which use the reg. */
3996 if (flags & PROP_SCAN_DEAD_CODE)
3998 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4000 rtx temp = pbi->mem_set_list;
4001 rtx prev = NULL_RTX;
4002 rtx next;
4004 while (temp)
4006 next = XEXP (temp, 1);
4007 if ((GET_CODE (reg) == MEM
4008 && output_dependence (XEXP (temp, 0), reg))
4009 || (GET_CODE (reg) == REG
4010 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4012 /* Splice this entry out of the list. */
4013 if (prev)
4014 XEXP (prev, 1) = next;
4015 else
4016 pbi->mem_set_list = next;
4017 free_EXPR_LIST_node (temp);
4019 else
4020 prev = temp;
4021 temp = next;
4025 /* If the memory reference had embedded side effects (autoincrement
4026 address modes. Then we may need to kill some entries on the
4027 memory set list. */
4028 if (insn && GET_CODE (reg) == MEM)
4029 invalidate_mems_from_autoinc (pbi, insn);
4031 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4032 /* We do not know the size of a BLKmode store, so we do not track
4033 them for redundant store elimination. */
4034 && GET_MODE (reg) != BLKmode
4035 /* There are no REG_INC notes for SP, so we can't assume we'll see
4036 everything that invalidates it. To be safe, don't eliminate any
4037 stores though SP; none of them should be redundant anyway. */
4038 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4039 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4042 if (GET_CODE (reg) == REG
4043 && (regno = REGNO (reg),
4044 ! (regno == FRAME_POINTER_REGNUM
4045 && (! reload_completed || frame_pointer_needed)))
4046 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4047 && ! (regno == HARD_FRAME_POINTER_REGNUM
4048 && (! reload_completed || frame_pointer_needed))
4049 #endif
4050 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4051 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4052 #endif
4055 int some_was_live, some_was_dead;
4057 /* Perform the pbi datastructure update. */
4058 if (! mark_set_reg (pbi, reg, cond, &some_was_live, &some_was_dead))
4059 return;
4061 /* Additional data to record if this is the final pass. */
4062 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4063 | PROP_DEATH_NOTES | PROP_AUTOINC))
4065 register rtx y;
4066 register int blocknum = pbi->bb->index;
4068 y = NULL_RTX;
4069 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4070 y = pbi->reg_next_use[regno];
4072 /* If this is a hard reg, record this function uses the reg. */
4074 if (regno < FIRST_PSEUDO_REGISTER)
4076 register int i;
4077 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4079 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4080 for (i = regno; i < endregno; i++)
4082 /* The next use is no longer "next", since a store
4083 intervenes. */
4084 pbi->reg_next_use[i] = 0;
4087 if (flags & PROP_REG_INFO)
4088 for (i = regno; i < endregno; i++)
4090 regs_ever_live[i] = 1;
4091 REG_N_SETS (i)++;
4094 else
4096 /* The next use is no longer "next", since a store
4097 intervenes. */
4098 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4099 pbi->reg_next_use[regno] = 0;
4101 /* Keep track of which basic blocks each reg appears in. */
4103 if (flags & PROP_REG_INFO)
4105 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4106 REG_BASIC_BLOCK (regno) = blocknum;
4107 else if (REG_BASIC_BLOCK (regno) != blocknum)
4108 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4110 /* Count (weighted) references, stores, etc. This counts a
4111 register twice if it is modified, but that is correct. */
4112 REG_N_SETS (regno)++;
4113 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4115 /* The insns where a reg is live are normally counted
4116 elsewhere, but we want the count to include the insn
4117 where the reg is set, and the normal counting mechanism
4118 would not count it. */
4119 REG_LIVE_LENGTH (regno)++;
4123 if (! some_was_dead)
4125 if (flags & PROP_LOG_LINKS)
4127 /* Make a logical link from the next following insn
4128 that uses this register, back to this insn.
4129 The following insns have already been processed.
4131 We don't build a LOG_LINK for hard registers containing
4132 in ASM_OPERANDs. If these registers get replaced,
4133 we might wind up changing the semantics of the insn,
4134 even if reload can make what appear to be valid
4135 assignments later. */
4136 if (y && (BLOCK_NUM (y) == blocknum)
4137 && (regno >= FIRST_PSEUDO_REGISTER
4138 || asm_noperands (PATTERN (y)) < 0))
4139 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4142 else if (! some_was_live)
4144 if (flags & PROP_REG_INFO)
4145 REG_N_DEATHS (REGNO (reg))++;
4147 if (flags & PROP_DEATH_NOTES)
4149 /* Note that dead stores have already been deleted
4150 when possible. If we get here, we have found a
4151 dead store that cannot be eliminated (because the
4152 same insn does something useful). Indicate this
4153 by marking the reg being set as dying here. */
4154 REG_NOTES (insn)
4155 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4158 else
4160 if (flags & PROP_DEATH_NOTES)
4162 /* This is a case where we have a multi-word hard register
4163 and some, but not all, of the words of the register are
4164 needed in subsequent insns. Write REG_UNUSED notes
4165 for those parts that were not needed. This case should
4166 be rare. */
4168 int i;
4170 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4171 i >= 0; i--)
4172 if (! REGNO_REG_SET_P (pbi->reg_live, regno + i))
4173 REG_NOTES (insn)
4174 = (alloc_EXPR_LIST
4175 (REG_UNUSED,
4176 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4177 REG_NOTES (insn)));
4182 else if (GET_CODE (reg) == REG)
4184 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4185 pbi->reg_next_use[regno] = 0;
4188 /* If this is the last pass and this is a SCRATCH, show it will be dying
4189 here and count it. */
4190 else if (GET_CODE (reg) == SCRATCH)
4192 if (flags & PROP_DEATH_NOTES)
4193 REG_NOTES (insn)
4194 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4198 /* Update data structures for a (possibly conditional) store into REG.
4199 Return true if REG is now unconditionally dead. */
4201 static int
4202 mark_set_reg (pbi, reg, cond, p_some_was_live, p_some_was_dead)
4203 struct propagate_block_info *pbi;
4204 rtx reg;
4205 rtx cond ATTRIBUTE_UNUSED;
4206 int *p_some_was_live, *p_some_was_dead;
4208 int regno = REGNO (reg);
4209 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4210 int some_was_dead = ! some_was_live;
4212 /* Mark it as a significant register for this basic block. */
4213 if (pbi->local_set)
4214 SET_REGNO_REG_SET (pbi->local_set, regno);
4216 /* A hard reg in a wide mode may really be multiple registers.
4217 If so, mark all of them just like the first. */
4218 if (regno < FIRST_PSEUDO_REGISTER)
4220 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4221 while (--n > 0)
4223 int regno_n = regno + n;
4224 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4225 if (pbi->local_set)
4226 SET_REGNO_REG_SET (pbi->local_set, regno_n);
4228 some_was_live |= needed_regno;
4229 some_was_dead |= ! needed_regno;
4233 *p_some_was_live = some_was_live;
4234 *p_some_was_dead = some_was_dead;
4236 /* The stack pointer is never dead. Well, not strictly true, but it's
4237 very difficult to tell from here. Hopefully combine_stack_adjustments
4238 will fix up the most egregious errors. */
4239 if (regno == STACK_POINTER_REGNUM)
4240 return 0;
4242 /* Mark it as dead before this insn. */
4243 SET_REGNO_REG_SET (pbi->new_dead, regno);
4244 if (regno < FIRST_PSEUDO_REGISTER)
4246 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4247 while (--n > 0)
4248 SET_REGNO_REG_SET (pbi->new_dead, regno + n);
4251 /* Unconditionally dead. */
4252 return 1;
4255 #ifdef AUTO_INC_DEC
4257 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4258 reference. */
4260 static void
4261 find_auto_inc (pbi, x, insn)
4262 struct propagate_block_info *pbi;
4263 rtx x;
4264 rtx insn;
4266 rtx addr = XEXP (x, 0);
4267 HOST_WIDE_INT offset = 0;
4268 rtx set;
4270 /* Here we detect use of an index register which might be good for
4271 postincrement, postdecrement, preincrement, or predecrement. */
4273 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4274 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4276 if (GET_CODE (addr) == REG)
4278 register rtx y;
4279 register int size = GET_MODE_SIZE (GET_MODE (x));
4280 rtx use;
4281 rtx incr;
4282 int regno = REGNO (addr);
4284 /* Is the next use an increment that might make auto-increment? */
4285 if ((incr = pbi->reg_next_use[regno]) != 0
4286 && (set = single_set (incr)) != 0
4287 && GET_CODE (set) == SET
4288 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4289 /* Can't add side effects to jumps; if reg is spilled and
4290 reloaded, there's no way to store back the altered value. */
4291 && GET_CODE (insn) != JUMP_INSN
4292 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4293 && XEXP (y, 0) == addr
4294 && GET_CODE (XEXP (y, 1)) == CONST_INT
4295 && ((HAVE_POST_INCREMENT
4296 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4297 || (HAVE_POST_DECREMENT
4298 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4299 || (HAVE_PRE_INCREMENT
4300 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4301 || (HAVE_PRE_DECREMENT
4302 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4303 /* Make sure this reg appears only once in this insn. */
4304 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4305 use != 0 && use != (rtx) 1))
4307 rtx q = SET_DEST (set);
4308 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4309 ? (offset ? PRE_INC : POST_INC)
4310 : (offset ? PRE_DEC : POST_DEC));
4312 if (dead_or_set_p (incr, addr))
4314 /* This is the simple case. Try to make the auto-inc. If
4315 we can't, we are done. Otherwise, we will do any
4316 needed updates below. */
4317 if (! validate_change (insn, &XEXP (x, 0),
4318 gen_rtx_fmt_e (inc_code, Pmode, addr),
4320 return;
4322 else if (GET_CODE (q) == REG
4323 /* PREV_INSN used here to check the semi-open interval
4324 [insn,incr). */
4325 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4326 /* We must also check for sets of q as q may be
4327 a call clobbered hard register and there may
4328 be a call between PREV_INSN (insn) and incr. */
4329 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4331 /* We have *p followed sometime later by q = p+size.
4332 Both p and q must be live afterward,
4333 and q is not used between INSN and its assignment.
4334 Change it to q = p, ...*q..., q = q+size.
4335 Then fall into the usual case. */
4336 rtx insns, temp;
4337 basic_block bb;
4339 start_sequence ();
4340 emit_move_insn (q, addr);
4341 insns = get_insns ();
4342 end_sequence ();
4344 bb = BLOCK_FOR_INSN (insn);
4345 for (temp = insns; temp; temp = NEXT_INSN (temp))
4346 set_block_for_insn (temp, bb);
4348 /* If we can't make the auto-inc, or can't make the
4349 replacement into Y, exit. There's no point in making
4350 the change below if we can't do the auto-inc and doing
4351 so is not correct in the pre-inc case. */
4353 validate_change (insn, &XEXP (x, 0),
4354 gen_rtx_fmt_e (inc_code, Pmode, q),
4356 validate_change (incr, &XEXP (y, 0), q, 1);
4357 if (! apply_change_group ())
4358 return;
4360 /* We now know we'll be doing this change, so emit the
4361 new insn(s) and do the updates. */
4362 emit_insns_before (insns, insn);
4364 if (BLOCK_FOR_INSN (insn)->head == insn)
4365 BLOCK_FOR_INSN (insn)->head = insns;
4367 /* INCR will become a NOTE and INSN won't contain a
4368 use of ADDR. If a use of ADDR was just placed in
4369 the insn before INSN, make that the next use.
4370 Otherwise, invalidate it. */
4371 if (GET_CODE (PREV_INSN (insn)) == INSN
4372 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4373 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4374 pbi->reg_next_use[regno] = PREV_INSN (insn);
4375 else
4376 pbi->reg_next_use[regno] = 0;
4378 addr = q;
4379 regno = REGNO (q);
4381 /* REGNO is now used in INCR which is below INSN, but it
4382 previously wasn't live here. If we don't mark it as
4383 live, we'll put a REG_DEAD note for it on this insn,
4384 which is incorrect. */
4385 SET_REGNO_REG_SET (pbi->reg_live, regno);
4387 /* If there are any calls between INSN and INCR, show
4388 that REGNO now crosses them. */
4389 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4390 if (GET_CODE (temp) == CALL_INSN)
4391 REG_N_CALLS_CROSSED (regno)++;
4393 else
4394 return;
4396 /* If we haven't returned, it means we were able to make the
4397 auto-inc, so update the status. First, record that this insn
4398 has an implicit side effect. */
4400 REG_NOTES (insn)
4401 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4403 /* Modify the old increment-insn to simply copy
4404 the already-incremented value of our register. */
4405 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4406 abort ();
4408 /* If that makes it a no-op (copying the register into itself) delete
4409 it so it won't appear to be a "use" and a "set" of this
4410 register. */
4411 if (SET_DEST (set) == addr)
4413 PUT_CODE (incr, NOTE);
4414 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4415 NOTE_SOURCE_FILE (incr) = 0;
4418 if (regno >= FIRST_PSEUDO_REGISTER)
4420 /* Count an extra reference to the reg. When a reg is
4421 incremented, spilling it is worse, so we want to make
4422 that less likely. */
4423 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4425 /* Count the increment as a setting of the register,
4426 even though it isn't a SET in rtl. */
4427 REG_N_SETS (regno)++;
4432 #endif /* AUTO_INC_DEC */
4434 static void
4435 mark_used_reg (pbi, reg, cond, insn)
4436 struct propagate_block_info *pbi;
4437 rtx reg;
4438 rtx cond ATTRIBUTE_UNUSED;
4439 rtx insn;
4441 int regno = REGNO (reg);
4442 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4443 int some_was_dead = ! some_was_live;
4445 SET_REGNO_REG_SET (pbi->new_live, regno);
4447 /* A hard reg in a wide mode may really be multiple registers.
4448 If so, mark all of them just like the first. */
4449 if (regno < FIRST_PSEUDO_REGISTER)
4451 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4452 while (--n > 0)
4454 int regno_n = regno + n;
4455 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4457 SET_REGNO_REG_SET (pbi->new_live, regno_n);
4458 some_was_live |= needed_regno;
4459 some_was_dead |= ! needed_regno;
4463 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4465 /* Record where each reg is used, so when the reg is set we know
4466 the next insn that uses it. */
4467 pbi->reg_next_use[regno] = insn;
4470 if (pbi->flags & PROP_REG_INFO)
4472 if (regno < FIRST_PSEUDO_REGISTER)
4474 /* If this is a register we are going to try to eliminate,
4475 don't mark it live here. If we are successful in
4476 eliminating it, it need not be live unless it is used for
4477 pseudos, in which case it will have been set live when it
4478 was allocated to the pseudos. If the register will not
4479 be eliminated, reload will set it live at that point.
4481 Otherwise, record that this function uses this register. */
4483 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4485 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4486 if (n == 0)
4487 n = 1;
4489 regs_ever_live[regno + --n] = 1;
4490 while (n > 0);
4493 else
4495 /* Keep track of which basic block each reg appears in. */
4497 register int blocknum = pbi->bb->index;
4498 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4499 REG_BASIC_BLOCK (regno) = blocknum;
4500 else if (REG_BASIC_BLOCK (regno) != blocknum)
4501 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4503 /* Count (weighted) number of uses of each reg. */
4504 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4508 /* Record and count the insns in which a reg dies. If it is used in
4509 this insn and was dead below the insn then it dies in this insn.
4510 If it was set in this insn, we do not make a REG_DEAD note;
4511 likewise if we already made such a note.
4513 ??? This could be done better. In new_dead we have a record of
4514 which registers are set or clobbered this insn (which in itself is
4515 slightly incorrect, see the commentary near strict_low_part in
4516 mark_set_1), which should be the set of registers that we do not
4517 wish to create death notes for under the above rule. Note that
4518 we have not yet processed the call-clobbered/call-used registers,
4519 and they do not quite follow the above rule, since we do want death
4520 notes for call-clobbered register arguments. Which begs the whole
4521 question of whether we should in fact have death notes for registers
4522 used and clobbered (but not set) in the same insn. The only useful
4523 thing we ought to be getting from dead_or_set_p is detection of
4524 duplicate death notes. */
4526 if ((pbi->flags & PROP_DEATH_NOTES)
4527 && some_was_dead
4528 && ! dead_or_set_p (insn, reg))
4530 int n;
4532 /* Check for the case where the register dying partially
4533 overlaps the register set by this insn. */
4534 if (regno < FIRST_PSEUDO_REGISTER
4535 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4537 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4538 while (--n >= 0)
4539 some_was_live |= dead_or_set_regno_p (insn, regno + n);
4542 /* If none of the words in X is needed, make a REG_DEAD note.
4543 Otherwise, we must make partial REG_DEAD notes. */
4544 if (! some_was_live)
4546 REG_NOTES (insn)
4547 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4548 REG_N_DEATHS (regno)++;
4550 else
4552 /* Don't make a REG_DEAD note for a part of a register
4553 that is set in the insn. */
4555 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4556 for (; n >= regno; n--)
4557 if (!REGNO_REG_SET_P (pbi->reg_live, n)
4558 && ! dead_or_set_regno_p (insn, n))
4559 REG_NOTES (insn)
4560 = alloc_EXPR_LIST (REG_DEAD,
4561 gen_rtx_REG (reg_raw_mode[n], n),
4562 REG_NOTES (insn));
4567 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
4568 This is done assuming the registers needed from X are those that
4569 have 1-bits in PBI->REG_LIVE.
4571 INSN is the containing instruction. If INSN is dead, this function
4572 is not called. */
4574 static void
4575 mark_used_regs (pbi, x, cond, insn)
4576 struct propagate_block_info *pbi;
4577 rtx x, cond, insn;
4579 register RTX_CODE code;
4580 register int regno;
4581 int flags = pbi->flags;
4583 retry:
4584 code = GET_CODE (x);
4585 switch (code)
4587 case LABEL_REF:
4588 case SYMBOL_REF:
4589 case CONST_INT:
4590 case CONST:
4591 case CONST_DOUBLE:
4592 case PC:
4593 case ADDR_VEC:
4594 case ADDR_DIFF_VEC:
4595 return;
4597 #ifdef HAVE_cc0
4598 case CC0:
4599 pbi->cc0_live = 1;
4600 return;
4601 #endif
4603 case CLOBBER:
4604 /* If we are clobbering a MEM, mark any registers inside the address
4605 as being used. */
4606 if (GET_CODE (XEXP (x, 0)) == MEM)
4607 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
4608 return;
4610 case MEM:
4611 /* Don't bother watching stores to mems if this is not the
4612 final pass. We'll not be deleting dead stores this round. */
4613 if (flags & PROP_SCAN_DEAD_CODE)
4615 /* Invalidate the data for the last MEM stored, but only if MEM is
4616 something that can be stored into. */
4617 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4618 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4619 ; /* needn't clear the memory set list */
4620 else
4622 rtx temp = pbi->mem_set_list;
4623 rtx prev = NULL_RTX;
4624 rtx next;
4626 while (temp)
4628 next = XEXP (temp, 1);
4629 if (anti_dependence (XEXP (temp, 0), x))
4631 /* Splice temp out of the list. */
4632 if (prev)
4633 XEXP (prev, 1) = next;
4634 else
4635 pbi->mem_set_list = next;
4636 free_EXPR_LIST_node (temp);
4638 else
4639 prev = temp;
4640 temp = next;
4644 /* If the memory reference had embedded side effects (autoincrement
4645 address modes. Then we may need to kill some entries on the
4646 memory set list. */
4647 if (insn)
4648 invalidate_mems_from_autoinc (pbi, insn);
4651 #ifdef AUTO_INC_DEC
4652 if (flags & PROP_AUTOINC)
4653 find_auto_inc (pbi, x, insn);
4654 #endif
4655 break;
4657 case SUBREG:
4658 if (GET_CODE (SUBREG_REG (x)) == REG
4659 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4660 && (GET_MODE_SIZE (GET_MODE (x))
4661 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4662 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4664 /* While we're here, optimize this case. */
4665 x = SUBREG_REG (x);
4666 if (GET_CODE (x) != REG)
4667 goto retry;
4668 /* FALLTHRU */
4670 case REG:
4671 /* See a register other than being set => mark it as needed. */
4672 mark_used_reg (pbi, x, cond, insn);
4673 return;
4675 case SET:
4677 register rtx testreg = SET_DEST (x);
4678 int mark_dest = 0;
4680 /* If storing into MEM, don't show it as being used. But do
4681 show the address as being used. */
4682 if (GET_CODE (testreg) == MEM)
4684 #ifdef AUTO_INC_DEC
4685 if (flags & PROP_AUTOINC)
4686 find_auto_inc (pbi, testreg, insn);
4687 #endif
4688 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
4689 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4690 return;
4693 /* Storing in STRICT_LOW_PART is like storing in a reg
4694 in that this SET might be dead, so ignore it in TESTREG.
4695 but in some other ways it is like using the reg.
4697 Storing in a SUBREG or a bit field is like storing the entire
4698 register in that if the register's value is not used
4699 then this SET is not needed. */
4700 while (GET_CODE (testreg) == STRICT_LOW_PART
4701 || GET_CODE (testreg) == ZERO_EXTRACT
4702 || GET_CODE (testreg) == SIGN_EXTRACT
4703 || GET_CODE (testreg) == SUBREG)
4705 if (GET_CODE (testreg) == SUBREG
4706 && GET_CODE (SUBREG_REG (testreg)) == REG
4707 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4708 && (GET_MODE_SIZE (GET_MODE (testreg))
4709 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4710 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4712 /* Modifying a single register in an alternate mode
4713 does not use any of the old value. But these other
4714 ways of storing in a register do use the old value. */
4715 if (GET_CODE (testreg) == SUBREG
4716 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4718 else
4719 mark_dest = 1;
4721 testreg = XEXP (testreg, 0);
4724 /* If this is a store into a register, recursively scan the
4725 value being stored. */
4727 if ((GET_CODE (testreg) == PARALLEL
4728 && GET_MODE (testreg) == BLKmode)
4729 || (GET_CODE (testreg) == REG
4730 && (regno = REGNO (testreg),
4731 ! (regno == FRAME_POINTER_REGNUM
4732 && (! reload_completed || frame_pointer_needed)))
4733 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4734 && ! (regno == HARD_FRAME_POINTER_REGNUM
4735 && (! reload_completed || frame_pointer_needed))
4736 #endif
4737 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4738 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4739 #endif
4742 if (mark_dest)
4743 mark_used_regs (pbi, SET_DEST (x), cond, insn);
4744 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4745 return;
4748 break;
4750 case ASM_OPERANDS:
4751 case UNSPEC_VOLATILE:
4752 case TRAP_IF:
4753 case ASM_INPUT:
4755 /* Traditional and volatile asm instructions must be considered to use
4756 and clobber all hard registers, all pseudo-registers and all of
4757 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4759 Consider for instance a volatile asm that changes the fpu rounding
4760 mode. An insn should not be moved across this even if it only uses
4761 pseudo-regs because it might give an incorrectly rounded result.
4763 ?!? Unfortunately, marking all hard registers as live causes massive
4764 problems for the register allocator and marking all pseudos as live
4765 creates mountains of uninitialized variable warnings.
4767 So for now, just clear the memory set list and mark any regs
4768 we can find in ASM_OPERANDS as used. */
4769 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4770 free_EXPR_LIST_list (&pbi->mem_set_list);
4772 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4773 We can not just fall through here since then we would be confused
4774 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4775 traditional asms unlike their normal usage. */
4776 if (code == ASM_OPERANDS)
4778 int j;
4780 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4781 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
4783 break;
4786 case COND_EXEC:
4787 if (cond != NULL_RTX)
4788 abort ();
4790 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
4792 cond = COND_EXEC_TEST (x);
4793 x = COND_EXEC_CODE (x);
4794 goto retry;
4796 case PHI:
4797 /* We _do_not_ want to scan operands of phi nodes. Operands of
4798 a phi function are evaluated only when control reaches this
4799 block along a particular edge. Therefore, regs that appear
4800 as arguments to phi should not be added to the global live at
4801 start. */
4802 return;
4804 default:
4805 break;
4808 /* Recursively scan the operands of this expression. */
4811 register const char *fmt = GET_RTX_FORMAT (code);
4812 register int i;
4814 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4816 if (fmt[i] == 'e')
4818 /* Tail recursive case: save a function call level. */
4819 if (i == 0)
4821 x = XEXP (x, 0);
4822 goto retry;
4824 mark_used_regs (pbi, XEXP (x, i), cond, insn);
4826 else if (fmt[i] == 'E')
4828 register int j;
4829 for (j = 0; j < XVECLEN (x, i); j++)
4830 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4836 #ifdef AUTO_INC_DEC
4838 static int
4839 try_pre_increment_1 (pbi, insn)
4840 struct propagate_block_info *pbi;
4841 rtx insn;
4843 /* Find the next use of this reg. If in same basic block,
4844 make it do pre-increment or pre-decrement if appropriate. */
4845 rtx x = single_set (insn);
4846 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4847 * INTVAL (XEXP (SET_SRC (x), 1)));
4848 int regno = REGNO (SET_DEST (x));
4849 rtx y = pbi->reg_next_use[regno];
4850 if (y != 0
4851 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4852 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4853 mode would be better. */
4854 && ! dead_or_set_p (y, SET_DEST (x))
4855 && try_pre_increment (y, SET_DEST (x), amount))
4857 /* We have found a suitable auto-increment
4858 and already changed insn Y to do it.
4859 So flush this increment-instruction. */
4860 PUT_CODE (insn, NOTE);
4861 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4862 NOTE_SOURCE_FILE (insn) = 0;
4863 /* Count a reference to this reg for the increment
4864 insn we are deleting. When a reg is incremented.
4865 spilling it is worse, so we want to make that
4866 less likely. */
4867 if (regno >= FIRST_PSEUDO_REGISTER)
4869 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4870 REG_N_SETS (regno)++;
4872 return 1;
4874 return 0;
4877 /* Try to change INSN so that it does pre-increment or pre-decrement
4878 addressing on register REG in order to add AMOUNT to REG.
4879 AMOUNT is negative for pre-decrement.
4880 Returns 1 if the change could be made.
4881 This checks all about the validity of the result of modifying INSN. */
4883 static int
4884 try_pre_increment (insn, reg, amount)
4885 rtx insn, reg;
4886 HOST_WIDE_INT amount;
4888 register rtx use;
4890 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4891 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4892 int pre_ok = 0;
4893 /* Nonzero if we can try to make a post-increment or post-decrement.
4894 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4895 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4896 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4897 int post_ok = 0;
4899 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4900 int do_post = 0;
4902 /* From the sign of increment, see which possibilities are conceivable
4903 on this target machine. */
4904 if (HAVE_PRE_INCREMENT && amount > 0)
4905 pre_ok = 1;
4906 if (HAVE_POST_INCREMENT && amount > 0)
4907 post_ok = 1;
4909 if (HAVE_PRE_DECREMENT && amount < 0)
4910 pre_ok = 1;
4911 if (HAVE_POST_DECREMENT && amount < 0)
4912 post_ok = 1;
4914 if (! (pre_ok || post_ok))
4915 return 0;
4917 /* It is not safe to add a side effect to a jump insn
4918 because if the incremented register is spilled and must be reloaded
4919 there would be no way to store the incremented value back in memory. */
4921 if (GET_CODE (insn) == JUMP_INSN)
4922 return 0;
4924 use = 0;
4925 if (pre_ok)
4926 use = find_use_as_address (PATTERN (insn), reg, 0);
4927 if (post_ok && (use == 0 || use == (rtx) 1))
4929 use = find_use_as_address (PATTERN (insn), reg, -amount);
4930 do_post = 1;
4933 if (use == 0 || use == (rtx) 1)
4934 return 0;
4936 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4937 return 0;
4939 /* See if this combination of instruction and addressing mode exists. */
4940 if (! validate_change (insn, &XEXP (use, 0),
4941 gen_rtx_fmt_e (amount > 0
4942 ? (do_post ? POST_INC : PRE_INC)
4943 : (do_post ? POST_DEC : PRE_DEC),
4944 Pmode, reg), 0))
4945 return 0;
4947 /* Record that this insn now has an implicit side effect on X. */
4948 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4949 return 1;
4952 #endif /* AUTO_INC_DEC */
4954 /* Find the place in the rtx X where REG is used as a memory address.
4955 Return the MEM rtx that so uses it.
4956 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4957 (plus REG (const_int PLUSCONST)).
4959 If such an address does not appear, return 0.
4960 If REG appears more than once, or is used other than in such an address,
4961 return (rtx)1. */
4964 find_use_as_address (x, reg, plusconst)
4965 register rtx x;
4966 rtx reg;
4967 HOST_WIDE_INT plusconst;
4969 enum rtx_code code = GET_CODE (x);
4970 const char *fmt = GET_RTX_FORMAT (code);
4971 register int i;
4972 register rtx value = 0;
4973 register rtx tem;
4975 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4976 return x;
4978 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4979 && XEXP (XEXP (x, 0), 0) == reg
4980 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4981 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4982 return x;
4984 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4986 /* If REG occurs inside a MEM used in a bit-field reference,
4987 that is unacceptable. */
4988 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4989 return (rtx) (HOST_WIDE_INT) 1;
4992 if (x == reg)
4993 return (rtx) (HOST_WIDE_INT) 1;
4995 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4997 if (fmt[i] == 'e')
4999 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5000 if (value == 0)
5001 value = tem;
5002 else if (tem != 0)
5003 return (rtx) (HOST_WIDE_INT) 1;
5005 else if (fmt[i] == 'E')
5007 register int j;
5008 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5010 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5011 if (value == 0)
5012 value = tem;
5013 else if (tem != 0)
5014 return (rtx) (HOST_WIDE_INT) 1;
5019 return value;
5022 /* Write information about registers and basic blocks into FILE.
5023 This is part of making a debugging dump. */
5025 void
5026 dump_regset (r, outf)
5027 regset r;
5028 FILE *outf;
5030 int i;
5031 if (r == NULL)
5033 fputs (" (nil)", outf);
5034 return;
5037 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5039 fprintf (outf, " %d", i);
5040 if (i < FIRST_PSEUDO_REGISTER)
5041 fprintf (outf, " [%s]",
5042 reg_names[i]);
5046 void
5047 debug_regset (r)
5048 regset r;
5050 dump_regset (r, stderr);
5051 putc ('\n', stderr);
5054 void
5055 dump_flow_info (file)
5056 FILE *file;
5058 register int i;
5059 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5061 fprintf (file, "%d registers.\n", max_regno);
5062 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5063 if (REG_N_REFS (i))
5065 enum reg_class class, altclass;
5066 fprintf (file, "\nRegister %d used %d times across %d insns",
5067 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5068 if (REG_BASIC_BLOCK (i) >= 0)
5069 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5070 if (REG_N_SETS (i))
5071 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5072 (REG_N_SETS (i) == 1) ? "" : "s");
5073 if (REG_USERVAR_P (regno_reg_rtx[i]))
5074 fprintf (file, "; user var");
5075 if (REG_N_DEATHS (i) != 1)
5076 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5077 if (REG_N_CALLS_CROSSED (i) == 1)
5078 fprintf (file, "; crosses 1 call");
5079 else if (REG_N_CALLS_CROSSED (i))
5080 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5081 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5082 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5083 class = reg_preferred_class (i);
5084 altclass = reg_alternate_class (i);
5085 if (class != GENERAL_REGS || altclass != ALL_REGS)
5087 if (altclass == ALL_REGS || class == ALL_REGS)
5088 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5089 else if (altclass == NO_REGS)
5090 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5091 else
5092 fprintf (file, "; pref %s, else %s",
5093 reg_class_names[(int) class],
5094 reg_class_names[(int) altclass]);
5096 if (REGNO_POINTER_FLAG (i))
5097 fprintf (file, "; pointer");
5098 fprintf (file, ".\n");
5101 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5102 for (i = 0; i < n_basic_blocks; i++)
5104 register basic_block bb = BASIC_BLOCK (i);
5105 register edge e;
5107 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5108 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5110 fprintf (file, "Predecessors: ");
5111 for (e = bb->pred; e ; e = e->pred_next)
5112 dump_edge_info (file, e, 0);
5114 fprintf (file, "\nSuccessors: ");
5115 for (e = bb->succ; e ; e = e->succ_next)
5116 dump_edge_info (file, e, 1);
5118 fprintf (file, "\nRegisters live at start:");
5119 dump_regset (bb->global_live_at_start, file);
5121 fprintf (file, "\nRegisters live at end:");
5122 dump_regset (bb->global_live_at_end, file);
5124 putc('\n', file);
5127 putc('\n', file);
5130 void
5131 debug_flow_info ()
5133 dump_flow_info (stderr);
5136 static void
5137 dump_edge_info (file, e, do_succ)
5138 FILE *file;
5139 edge e;
5140 int do_succ;
5142 basic_block side = (do_succ ? e->dest : e->src);
5144 if (side == ENTRY_BLOCK_PTR)
5145 fputs (" ENTRY", file);
5146 else if (side == EXIT_BLOCK_PTR)
5147 fputs (" EXIT", file);
5148 else
5149 fprintf (file, " %d", side->index);
5151 if (e->flags)
5153 static const char * const bitnames[] = {
5154 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5156 int comma = 0;
5157 int i, flags = e->flags;
5159 fputc (' ', file);
5160 fputc ('(', file);
5161 for (i = 0; flags; i++)
5162 if (flags & (1 << i))
5164 flags &= ~(1 << i);
5166 if (comma)
5167 fputc (',', file);
5168 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5169 fputs (bitnames[i], file);
5170 else
5171 fprintf (file, "%d", i);
5172 comma = 1;
5174 fputc (')', file);
5179 /* Print out one basic block with live information at start and end. */
5180 void
5181 dump_bb (bb, outf)
5182 basic_block bb;
5183 FILE *outf;
5185 rtx insn;
5186 rtx last;
5187 edge e;
5189 fprintf (outf, ";; Basic block %d, loop depth %d",
5190 bb->index, bb->loop_depth);
5191 if (bb->eh_beg != -1 || bb->eh_end != -1)
5192 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5193 putc ('\n', outf);
5195 fputs (";; Predecessors: ", outf);
5196 for (e = bb->pred; e ; e = e->pred_next)
5197 dump_edge_info (outf, e, 0);
5198 putc ('\n', outf);
5200 fputs (";; Registers live at start:", outf);
5201 dump_regset (bb->global_live_at_start, outf);
5202 putc ('\n', outf);
5204 for (insn = bb->head, last = NEXT_INSN (bb->end);
5205 insn != last;
5206 insn = NEXT_INSN (insn))
5207 print_rtl_single (outf, insn);
5209 fputs (";; Registers live at end:", outf);
5210 dump_regset (bb->global_live_at_end, outf);
5211 putc ('\n', outf);
5213 fputs (";; Successors: ", outf);
5214 for (e = bb->succ; e; e = e->succ_next)
5215 dump_edge_info (outf, e, 1);
5216 putc ('\n', outf);
5219 void
5220 debug_bb (bb)
5221 basic_block bb;
5223 dump_bb (bb, stderr);
5226 void
5227 debug_bb_n (n)
5228 int n;
5230 dump_bb (BASIC_BLOCK(n), stderr);
5233 /* Like print_rtl, but also print out live information for the start of each
5234 basic block. */
5236 void
5237 print_rtl_with_bb (outf, rtx_first)
5238 FILE *outf;
5239 rtx rtx_first;
5241 register rtx tmp_rtx;
5243 if (rtx_first == 0)
5244 fprintf (outf, "(nil)\n");
5245 else
5247 int i;
5248 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5249 int max_uid = get_max_uid ();
5250 basic_block *start = (basic_block *)
5251 xcalloc (max_uid, sizeof (basic_block));
5252 basic_block *end = (basic_block *)
5253 xcalloc (max_uid, sizeof (basic_block));
5254 enum bb_state *in_bb_p = (enum bb_state *)
5255 xcalloc (max_uid, sizeof (enum bb_state));
5257 for (i = n_basic_blocks - 1; i >= 0; i--)
5259 basic_block bb = BASIC_BLOCK (i);
5260 rtx x;
5262 start[INSN_UID (bb->head)] = bb;
5263 end[INSN_UID (bb->end)] = bb;
5264 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5266 enum bb_state state = IN_MULTIPLE_BB;
5267 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5268 state = IN_ONE_BB;
5269 in_bb_p[INSN_UID(x)] = state;
5271 if (x == bb->end)
5272 break;
5276 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5278 int did_output;
5279 basic_block bb;
5281 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5283 fprintf (outf, ";; Start of basic block %d, registers live:",
5284 bb->index);
5285 dump_regset (bb->global_live_at_start, outf);
5286 putc ('\n', outf);
5289 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5290 && GET_CODE (tmp_rtx) != NOTE
5291 && GET_CODE (tmp_rtx) != BARRIER)
5292 fprintf (outf, ";; Insn is not within a basic block\n");
5293 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5294 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5296 did_output = print_rtl_single (outf, tmp_rtx);
5298 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5300 fprintf (outf, ";; End of basic block %d, registers live:\n",
5301 bb->index);
5302 dump_regset (bb->global_live_at_end, outf);
5303 putc ('\n', outf);
5306 if (did_output)
5307 putc ('\n', outf);
5310 free (start);
5311 free (end);
5312 free (in_bb_p);
5315 if (current_function_epilogue_delay_list != 0)
5317 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5318 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5319 tmp_rtx = XEXP (tmp_rtx, 1))
5320 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5324 /* Compute dominator relationships using new flow graph structures. */
5325 void
5326 compute_flow_dominators (dominators, post_dominators)
5327 sbitmap *dominators;
5328 sbitmap *post_dominators;
5330 int bb;
5331 sbitmap *temp_bitmap;
5332 edge e;
5333 basic_block *worklist, *workend, *qin, *qout;
5334 int qlen;
5336 /* Allocate a worklist array/queue. Entries are only added to the
5337 list if they were not already on the list. So the size is
5338 bounded by the number of basic blocks. */
5339 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5340 workend = &worklist[n_basic_blocks];
5342 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5343 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5345 if (dominators)
5347 /* The optimistic setting of dominators requires us to put every
5348 block on the work list initially. */
5349 qin = qout = worklist;
5350 for (bb = 0; bb < n_basic_blocks; bb++)
5352 *qin++ = BASIC_BLOCK (bb);
5353 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5355 qlen = n_basic_blocks;
5356 qin = worklist;
5358 /* We want a maximal solution, so initially assume everything dominates
5359 everything else. */
5360 sbitmap_vector_ones (dominators, n_basic_blocks);
5362 /* Mark successors of the entry block so we can identify them below. */
5363 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5364 e->dest->aux = ENTRY_BLOCK_PTR;
5366 /* Iterate until the worklist is empty. */
5367 while (qlen)
5369 /* Take the first entry off the worklist. */
5370 basic_block b = *qout++;
5371 if (qout >= workend)
5372 qout = worklist;
5373 qlen--;
5375 bb = b->index;
5377 /* Compute the intersection of the dominators of all the
5378 predecessor blocks.
5380 If one of the predecessor blocks is the ENTRY block, then the
5381 intersection of the dominators of the predecessor blocks is
5382 defined as the null set. We can identify such blocks by the
5383 special value in the AUX field in the block structure. */
5384 if (b->aux == ENTRY_BLOCK_PTR)
5386 /* Do not clear the aux field for blocks which are
5387 successors of the ENTRY block. That way we we never
5388 add them to the worklist again.
5390 The intersect of dominators of the preds of this block is
5391 defined as the null set. */
5392 sbitmap_zero (temp_bitmap[bb]);
5394 else
5396 /* Clear the aux field of this block so it can be added to
5397 the worklist again if necessary. */
5398 b->aux = NULL;
5399 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5402 /* Make sure each block always dominates itself. */
5403 SET_BIT (temp_bitmap[bb], bb);
5405 /* If the out state of this block changed, then we need to
5406 add the successors of this block to the worklist if they
5407 are not already on the worklist. */
5408 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5410 for (e = b->succ; e; e = e->succ_next)
5412 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5414 *qin++ = e->dest;
5415 if (qin >= workend)
5416 qin = worklist;
5417 qlen++;
5419 e->dest->aux = e;
5426 if (post_dominators)
5428 /* The optimistic setting of dominators requires us to put every
5429 block on the work list initially. */
5430 qin = qout = worklist;
5431 for (bb = 0; bb < n_basic_blocks; bb++)
5433 *qin++ = BASIC_BLOCK (bb);
5434 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5436 qlen = n_basic_blocks;
5437 qin = worklist;
5439 /* We want a maximal solution, so initially assume everything post
5440 dominates everything else. */
5441 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5443 /* Mark predecessors of the exit block so we can identify them below. */
5444 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5445 e->src->aux = EXIT_BLOCK_PTR;
5447 /* Iterate until the worklist is empty. */
5448 while (qlen)
5450 /* Take the first entry off the worklist. */
5451 basic_block b = *qout++;
5452 if (qout >= workend)
5453 qout = worklist;
5454 qlen--;
5456 bb = b->index;
5458 /* Compute the intersection of the post dominators of all the
5459 successor blocks.
5461 If one of the successor blocks is the EXIT block, then the
5462 intersection of the dominators of the successor blocks is
5463 defined as the null set. We can identify such blocks by the
5464 special value in the AUX field in the block structure. */
5465 if (b->aux == EXIT_BLOCK_PTR)
5467 /* Do not clear the aux field for blocks which are
5468 predecessors of the EXIT block. That way we we never
5469 add them to the worklist again.
5471 The intersect of dominators of the succs of this block is
5472 defined as the null set. */
5473 sbitmap_zero (temp_bitmap[bb]);
5475 else
5477 /* Clear the aux field of this block so it can be added to
5478 the worklist again if necessary. */
5479 b->aux = NULL;
5480 sbitmap_intersection_of_succs (temp_bitmap[bb],
5481 post_dominators, bb);
5484 /* Make sure each block always post dominates itself. */
5485 SET_BIT (temp_bitmap[bb], bb);
5487 /* If the out state of this block changed, then we need to
5488 add the successors of this block to the worklist if they
5489 are not already on the worklist. */
5490 if (sbitmap_a_and_b (post_dominators[bb],
5491 post_dominators[bb],
5492 temp_bitmap[bb]))
5494 for (e = b->pred; e; e = e->pred_next)
5496 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5498 *qin++ = e->src;
5499 if (qin >= workend)
5500 qin = worklist;
5501 qlen++;
5503 e->src->aux = e;
5510 free (worklist);
5511 free (temp_bitmap);
5514 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5516 void
5517 compute_immediate_dominators (idom, dominators)
5518 int *idom;
5519 sbitmap *dominators;
5521 sbitmap *tmp;
5522 int b;
5524 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5526 /* Begin with tmp(n) = dom(n) - { n }. */
5527 for (b = n_basic_blocks; --b >= 0; )
5529 sbitmap_copy (tmp[b], dominators[b]);
5530 RESET_BIT (tmp[b], b);
5533 /* Subtract out all of our dominator's dominators. */
5534 for (b = n_basic_blocks; --b >= 0; )
5536 sbitmap tmp_b = tmp[b];
5537 int s;
5539 for (s = n_basic_blocks; --s >= 0; )
5540 if (TEST_BIT (tmp_b, s))
5541 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5544 /* Find the one bit set in the bitmap and put it in the output array. */
5545 for (b = n_basic_blocks; --b >= 0; )
5547 int t;
5548 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5551 sbitmap_vector_free (tmp);
5554 /* Count for a single SET rtx, X. */
5556 static void
5557 count_reg_sets_1 (x, loop_depth)
5558 rtx x;
5559 int loop_depth;
5561 register int regno;
5562 register rtx reg = SET_DEST (x);
5564 /* Find the register that's set/clobbered. */
5565 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5566 || GET_CODE (reg) == SIGN_EXTRACT
5567 || GET_CODE (reg) == STRICT_LOW_PART)
5568 reg = XEXP (reg, 0);
5570 if (GET_CODE (reg) == PARALLEL
5571 && GET_MODE (reg) == BLKmode)
5573 register int i;
5574 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5575 count_reg_sets_1 (XVECEXP (reg, 0, i), loop_depth);
5576 return;
5579 if (GET_CODE (reg) == REG)
5581 regno = REGNO (reg);
5582 if (regno >= FIRST_PSEUDO_REGISTER)
5584 /* Count (weighted) references, stores, etc. This counts a
5585 register twice if it is modified, but that is correct. */
5586 REG_N_SETS (regno)++;
5587 REG_N_REFS (regno) += loop_depth + 1;
5592 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5593 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5595 static void
5596 count_reg_sets (x, loop_depth)
5597 rtx x;
5598 int loop_depth;
5600 register RTX_CODE code = GET_CODE (x);
5602 if (code == SET || code == CLOBBER)
5603 count_reg_sets_1 (x, loop_depth);
5604 else if (code == PARALLEL)
5606 register int i;
5607 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5609 code = GET_CODE (XVECEXP (x, 0, i));
5610 if (code == SET || code == CLOBBER)
5611 count_reg_sets_1 (XVECEXP (x, 0, i), loop_depth);
5616 /* Increment REG_N_REFS by the current loop depth each register reference
5617 found in X. */
5619 static void
5620 count_reg_references (x, loop_depth)
5621 rtx x;
5622 int loop_depth;
5624 register RTX_CODE code;
5626 retry:
5627 code = GET_CODE (x);
5628 switch (code)
5630 case LABEL_REF:
5631 case SYMBOL_REF:
5632 case CONST_INT:
5633 case CONST:
5634 case CONST_DOUBLE:
5635 case PC:
5636 case ADDR_VEC:
5637 case ADDR_DIFF_VEC:
5638 case ASM_INPUT:
5639 return;
5641 #ifdef HAVE_cc0
5642 case CC0:
5643 return;
5644 #endif
5646 case CLOBBER:
5647 /* If we are clobbering a MEM, mark any registers inside the address
5648 as being used. */
5649 if (GET_CODE (XEXP (x, 0)) == MEM)
5650 count_reg_references (XEXP (XEXP (x, 0), 0), loop_depth);
5651 return;
5653 case SUBREG:
5654 /* While we're here, optimize this case. */
5655 x = SUBREG_REG (x);
5657 /* In case the SUBREG is not of a register, don't optimize */
5658 if (GET_CODE (x) != REG)
5660 count_reg_references (x, loop_depth);
5661 return;
5664 /* ... fall through ... */
5666 case REG:
5667 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5668 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5669 return;
5671 case SET:
5673 register rtx testreg = SET_DEST (x);
5674 int mark_dest = 0;
5676 /* If storing into MEM, don't show it as being used. But do
5677 show the address as being used. */
5678 if (GET_CODE (testreg) == MEM)
5680 count_reg_references (XEXP (testreg, 0), loop_depth);
5681 count_reg_references (SET_SRC (x), loop_depth);
5682 return;
5685 /* Storing in STRICT_LOW_PART is like storing in a reg
5686 in that this SET might be dead, so ignore it in TESTREG.
5687 but in some other ways it is like using the reg.
5689 Storing in a SUBREG or a bit field is like storing the entire
5690 register in that if the register's value is not used
5691 then this SET is not needed. */
5692 while (GET_CODE (testreg) == STRICT_LOW_PART
5693 || GET_CODE (testreg) == ZERO_EXTRACT
5694 || GET_CODE (testreg) == SIGN_EXTRACT
5695 || GET_CODE (testreg) == SUBREG)
5697 /* Modifying a single register in an alternate mode
5698 does not use any of the old value. But these other
5699 ways of storing in a register do use the old value. */
5700 if (GET_CODE (testreg) == SUBREG
5701 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5703 else
5704 mark_dest = 1;
5706 testreg = XEXP (testreg, 0);
5709 /* If this is a store into a register,
5710 recursively scan the value being stored. */
5712 if ((GET_CODE (testreg) == PARALLEL
5713 && GET_MODE (testreg) == BLKmode)
5714 || GET_CODE (testreg) == REG)
5716 count_reg_references (SET_SRC (x), loop_depth);
5717 if (mark_dest)
5718 count_reg_references (SET_DEST (x), loop_depth);
5719 return;
5722 break;
5724 default:
5725 break;
5728 /* Recursively scan the operands of this expression. */
5731 register const char *fmt = GET_RTX_FORMAT (code);
5732 register int i;
5734 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5736 if (fmt[i] == 'e')
5738 /* Tail recursive case: save a function call level. */
5739 if (i == 0)
5741 x = XEXP (x, 0);
5742 goto retry;
5744 count_reg_references (XEXP (x, i), loop_depth);
5746 else if (fmt[i] == 'E')
5748 register int j;
5749 for (j = 0; j < XVECLEN (x, i); j++)
5750 count_reg_references (XVECEXP (x, i, j), loop_depth);
5756 /* Recompute register set/reference counts immediately prior to register
5757 allocation.
5759 This avoids problems with set/reference counts changing to/from values
5760 which have special meanings to the register allocators.
5762 Additionally, the reference counts are the primary component used by the
5763 register allocators to prioritize pseudos for allocation to hard regs.
5764 More accurate reference counts generally lead to better register allocation.
5766 F is the first insn to be scanned.
5768 LOOP_STEP denotes how much loop_depth should be incremented per
5769 loop nesting level in order to increase the ref count more for
5770 references in a loop.
5772 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5773 possibly other information which is used by the register allocators. */
5775 void
5776 recompute_reg_usage (f, loop_step)
5777 rtx f ATTRIBUTE_UNUSED;
5778 int loop_step ATTRIBUTE_UNUSED;
5780 rtx insn;
5781 int i, max_reg;
5782 int index;
5783 int loop_depth;
5785 /* Clear out the old data. */
5786 max_reg = max_reg_num ();
5787 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5789 REG_N_SETS (i) = 0;
5790 REG_N_REFS (i) = 0;
5793 /* Scan each insn in the chain and count how many times each register is
5794 set/used. */
5795 for (index = 0; index < n_basic_blocks; index++)
5797 basic_block bb = BASIC_BLOCK (index);
5798 loop_depth = bb->loop_depth;
5799 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5801 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5803 rtx links;
5805 /* This call will increment REG_N_SETS for each SET or CLOBBER
5806 of a register in INSN. It will also increment REG_N_REFS
5807 by the loop depth for each set of a register in INSN. */
5808 count_reg_sets (PATTERN (insn), loop_depth);
5810 /* count_reg_sets does not detect autoincrement address modes, so
5811 detect them here by looking at the notes attached to INSN. */
5812 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5814 if (REG_NOTE_KIND (links) == REG_INC)
5815 /* Count (weighted) references, stores, etc. This
5816 counts a register twice if it is modified, but
5817 that is correct. */
5818 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5821 /* This call will increment REG_N_REFS by the current loop depth
5822 for each reference to a register in INSN. */
5823 count_reg_references (PATTERN (insn), loop_depth);
5825 /* count_reg_references will not include counts for arguments to
5826 function calls, so detect them here by examining the
5827 CALL_INSN_FUNCTION_USAGE data. */
5828 if (GET_CODE (insn) == CALL_INSN)
5830 rtx note;
5832 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5833 note;
5834 note = XEXP (note, 1))
5835 if (GET_CODE (XEXP (note, 0)) == USE)
5836 count_reg_references (XEXP (XEXP (note, 0), 0),
5837 loop_depth);
5840 if (insn == bb->end)
5841 break;
5846 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5847 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5848 of the number of registers that died. */
5851 count_or_remove_death_notes (blocks, kill)
5852 sbitmap blocks;
5853 int kill;
5855 int i, count = 0;
5857 for (i = n_basic_blocks - 1; i >= 0; --i)
5859 basic_block bb;
5860 rtx insn;
5862 if (blocks && ! TEST_BIT (blocks, i))
5863 continue;
5865 bb = BASIC_BLOCK (i);
5867 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5869 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5871 rtx *pprev = &REG_NOTES (insn);
5872 rtx link = *pprev;
5874 while (link)
5876 switch (REG_NOTE_KIND (link))
5878 case REG_DEAD:
5879 if (GET_CODE (XEXP (link, 0)) == REG)
5881 rtx reg = XEXP (link, 0);
5882 int n;
5884 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5885 n = 1;
5886 else
5887 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5888 count += n;
5890 /* FALLTHRU */
5892 case REG_UNUSED:
5893 if (kill)
5895 rtx next = XEXP (link, 1);
5896 free_EXPR_LIST_node (link);
5897 *pprev = link = next;
5898 break;
5900 /* FALLTHRU */
5902 default:
5903 pprev = &XEXP (link, 1);
5904 link = *pprev;
5905 break;
5910 if (insn == bb->end)
5911 break;
5915 return count;
5918 /* Record INSN's block as BB. */
5920 void
5921 set_block_for_insn (insn, bb)
5922 rtx insn;
5923 basic_block bb;
5925 size_t uid = INSN_UID (insn);
5926 if (uid >= basic_block_for_insn->num_elements)
5928 int new_size;
5930 /* Add one-eighth the size so we don't keep calling xrealloc. */
5931 new_size = uid + (uid + 7) / 8;
5933 VARRAY_GROW (basic_block_for_insn, new_size);
5935 VARRAY_BB (basic_block_for_insn, uid) = bb;
5938 /* Record INSN's block number as BB. */
5939 /* ??? This has got to go. */
5941 void
5942 set_block_num (insn, bb)
5943 rtx insn;
5944 int bb;
5946 set_block_for_insn (insn, BASIC_BLOCK (bb));
5949 /* Verify the CFG consistency. This function check some CFG invariants and
5950 aborts when something is wrong. Hope that this function will help to
5951 convert many optimization passes to preserve CFG consistent.
5953 Currently it does following checks:
5955 - test head/end pointers
5956 - overlapping of basic blocks
5957 - edge list corectness
5958 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5959 - tails of basic blocks (ensure that boundary is necesary)
5960 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5961 and NOTE_INSN_BASIC_BLOCK
5962 - check that all insns are in the basic blocks
5963 (except the switch handling code, barriers and notes)
5964 - check that all returns are followed by barriers
5966 In future it can be extended check a lot of other stuff as well
5967 (reachability of basic blocks, life information, etc. etc.). */
5969 void
5970 verify_flow_info ()
5972 const int max_uid = get_max_uid ();
5973 const rtx rtx_first = get_insns ();
5974 basic_block *bb_info;
5975 rtx x;
5976 int i, err = 0;
5978 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5980 /* First pass check head/end pointers and set bb_info array used by
5981 later passes. */
5982 for (i = n_basic_blocks - 1; i >= 0; i--)
5984 basic_block bb = BASIC_BLOCK (i);
5986 /* Check the head pointer and make sure that it is pointing into
5987 insn list. */
5988 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5989 if (x == bb->head)
5990 break;
5991 if (!x)
5993 error ("Head insn %d for block %d not found in the insn stream.",
5994 INSN_UID (bb->head), bb->index);
5995 err = 1;
5998 /* Check the end pointer and make sure that it is pointing into
5999 insn list. */
6000 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6002 if (bb_info[INSN_UID (x)] != NULL)
6004 error ("Insn %d is in multiple basic blocks (%d and %d)",
6005 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6006 err = 1;
6008 bb_info[INSN_UID (x)] = bb;
6010 if (x == bb->end)
6011 break;
6013 if (!x)
6015 error ("End insn %d for block %d not found in the insn stream.",
6016 INSN_UID (bb->end), bb->index);
6017 err = 1;
6021 /* Now check the basic blocks (boundaries etc.) */
6022 for (i = n_basic_blocks - 1; i >= 0; i--)
6024 basic_block bb = BASIC_BLOCK (i);
6025 /* Check corectness of edge lists */
6026 edge e;
6028 e = bb->succ;
6029 while (e)
6031 if (e->src != bb)
6033 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6034 bb->index);
6035 fprintf (stderr, "Predecessor: ");
6036 dump_edge_info (stderr, e, 0);
6037 fprintf (stderr, "\nSuccessor: ");
6038 dump_edge_info (stderr, e, 1);
6039 fflush (stderr);
6040 err = 1;
6042 if (e->dest != EXIT_BLOCK_PTR)
6044 edge e2 = e->dest->pred;
6045 while (e2 && e2 != e)
6046 e2 = e2->pred_next;
6047 if (!e2)
6049 error ("Basic block %i edge lists are corrupted", bb->index);
6050 err = 1;
6053 e = e->succ_next;
6056 e = bb->pred;
6057 while (e)
6059 if (e->dest != bb)
6061 error ("Basic block %d pred edge is corrupted", bb->index);
6062 fputs ("Predecessor: ", stderr);
6063 dump_edge_info (stderr, e, 0);
6064 fputs ("\nSuccessor: ", stderr);
6065 dump_edge_info (stderr, e, 1);
6066 fputc ('\n', stderr);
6067 err = 1;
6069 if (e->src != ENTRY_BLOCK_PTR)
6071 edge e2 = e->src->succ;
6072 while (e2 && e2 != e)
6073 e2 = e2->succ_next;
6074 if (!e2)
6076 error ("Basic block %i edge lists are corrupted", bb->index);
6077 err = 1;
6080 e = e->pred_next;
6083 /* OK pointers are correct. Now check the header of basic
6084 block. It ought to contain optional CODE_LABEL followed
6085 by NOTE_BASIC_BLOCK. */
6086 x = bb->head;
6087 if (GET_CODE (x) == CODE_LABEL)
6089 if (bb->end == x)
6091 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6092 bb->index);
6093 err = 1;
6095 x = NEXT_INSN (x);
6097 if (GET_CODE (x) != NOTE
6098 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6099 || NOTE_BASIC_BLOCK (x) != bb)
6101 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6102 bb->index);
6103 err = 1;
6106 if (bb->end == x)
6108 /* Do checks for empty blocks here */
6110 else
6112 x = NEXT_INSN (x);
6113 while (x)
6115 if (GET_CODE (x) == NOTE
6116 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6118 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6119 INSN_UID (x), bb->index);
6120 err = 1;
6123 if (x == bb->end)
6124 break;
6126 if (GET_CODE (x) == JUMP_INSN
6127 || GET_CODE (x) == CODE_LABEL
6128 || GET_CODE (x) == BARRIER)
6130 error ("In basic block %d:", bb->index);
6131 fatal_insn ("Flow control insn inside a basic block", x);
6134 x = NEXT_INSN (x);
6139 x = rtx_first;
6140 while (x)
6142 if (!bb_info[INSN_UID (x)])
6144 switch (GET_CODE (x))
6146 case BARRIER:
6147 case NOTE:
6148 break;
6150 case CODE_LABEL:
6151 /* An addr_vec is placed outside any block block. */
6152 if (NEXT_INSN (x)
6153 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6154 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6155 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6157 x = NEXT_INSN (x);
6160 /* But in any case, non-deletable labels can appear anywhere. */
6161 break;
6163 default:
6164 fatal_insn ("Insn outside basic block", x);
6168 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6169 && GET_CODE (x) == JUMP_INSN
6170 && returnjump_p (x) && ! condjump_p (x)
6171 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6172 fatal_insn ("Return not followed by barrier", x);
6174 x = NEXT_INSN (x);
6177 if (err)
6178 abort ();
6180 /* Clean up. */
6181 free (bb_info);
6184 /* Functions to access an edge list with a vector representation.
6185 Enough data is kept such that given an index number, the
6186 pred and succ that edge reprsents can be determined, or
6187 given a pred and a succ, it's index number can be returned.
6188 This allows algorithms which comsume a lot of memory to
6189 represent the normally full matrix of edge (pred,succ) with a
6190 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6191 wasted space in the client code due to sparse flow graphs. */
6193 /* This functions initializes the edge list. Basically the entire
6194 flowgraph is processed, and all edges are assigned a number,
6195 and the data structure is filed in. */
6196 struct edge_list *
6197 create_edge_list ()
6199 struct edge_list *elist;
6200 edge e;
6201 int num_edges;
6202 int x;
6203 int block_count;
6205 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6207 num_edges = 0;
6209 /* Determine the number of edges in the flow graph by counting successor
6210 edges on each basic block. */
6211 for (x = 0; x < n_basic_blocks; x++)
6213 basic_block bb = BASIC_BLOCK (x);
6215 for (e = bb->succ; e; e = e->succ_next)
6216 num_edges++;
6218 /* Don't forget successors of the entry block. */
6219 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6220 num_edges++;
6222 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6223 elist->num_blocks = block_count;
6224 elist->num_edges = num_edges;
6225 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6227 num_edges = 0;
6229 /* Follow successors of the entry block, and register these edges. */
6230 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6232 elist->index_to_edge[num_edges] = e;
6233 num_edges++;
6236 for (x = 0; x < n_basic_blocks; x++)
6238 basic_block bb = BASIC_BLOCK (x);
6240 /* Follow all successors of blocks, and register these edges. */
6241 for (e = bb->succ; e; e = e->succ_next)
6243 elist->index_to_edge[num_edges] = e;
6244 num_edges++;
6247 return elist;
6250 /* This function free's memory associated with an edge list. */
6251 void
6252 free_edge_list (elist)
6253 struct edge_list *elist;
6255 if (elist)
6257 free (elist->index_to_edge);
6258 free (elist);
6262 /* This function provides debug output showing an edge list. */
6263 void
6264 print_edge_list (f, elist)
6265 FILE *f;
6266 struct edge_list *elist;
6268 int x;
6269 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6270 elist->num_blocks - 2, elist->num_edges);
6272 for (x = 0; x < elist->num_edges; x++)
6274 fprintf (f, " %-4d - edge(", x);
6275 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6276 fprintf (f,"entry,");
6277 else
6278 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6280 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6281 fprintf (f,"exit)\n");
6282 else
6283 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6287 /* This function provides an internal consistancy check of an edge list,
6288 verifying that all edges are present, and that there are no
6289 extra edges. */
6290 void
6291 verify_edge_list (f, elist)
6292 FILE *f;
6293 struct edge_list *elist;
6295 int x, pred, succ, index;
6296 edge e;
6298 for (x = 0; x < n_basic_blocks; x++)
6300 basic_block bb = BASIC_BLOCK (x);
6302 for (e = bb->succ; e; e = e->succ_next)
6304 pred = e->src->index;
6305 succ = e->dest->index;
6306 index = EDGE_INDEX (elist, e->src, e->dest);
6307 if (index == EDGE_INDEX_NO_EDGE)
6309 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6310 continue;
6312 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6313 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6314 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6315 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6316 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6317 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6320 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6322 pred = e->src->index;
6323 succ = e->dest->index;
6324 index = EDGE_INDEX (elist, e->src, e->dest);
6325 if (index == EDGE_INDEX_NO_EDGE)
6327 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6328 continue;
6330 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6331 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6332 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6333 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6334 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6335 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6337 /* We've verified that all the edges are in the list, no lets make sure
6338 there are no spurious edges in the list. */
6340 for (pred = 0 ; pred < n_basic_blocks; pred++)
6341 for (succ = 0 ; succ < n_basic_blocks; succ++)
6343 basic_block p = BASIC_BLOCK (pred);
6344 basic_block s = BASIC_BLOCK (succ);
6346 int found_edge = 0;
6348 for (e = p->succ; e; e = e->succ_next)
6349 if (e->dest == s)
6351 found_edge = 1;
6352 break;
6354 for (e = s->pred; e; e = e->pred_next)
6355 if (e->src == p)
6357 found_edge = 1;
6358 break;
6360 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6361 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6362 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6363 pred, succ);
6364 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6365 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6366 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6367 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6368 BASIC_BLOCK (succ)));
6370 for (succ = 0 ; succ < n_basic_blocks; succ++)
6372 basic_block p = ENTRY_BLOCK_PTR;
6373 basic_block s = BASIC_BLOCK (succ);
6375 int found_edge = 0;
6377 for (e = p->succ; e; e = e->succ_next)
6378 if (e->dest == s)
6380 found_edge = 1;
6381 break;
6383 for (e = s->pred; e; e = e->pred_next)
6384 if (e->src == p)
6386 found_edge = 1;
6387 break;
6389 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6390 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6391 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6392 succ);
6393 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6394 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6395 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6396 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6397 BASIC_BLOCK (succ)));
6399 for (pred = 0 ; pred < n_basic_blocks; pred++)
6401 basic_block p = BASIC_BLOCK (pred);
6402 basic_block s = EXIT_BLOCK_PTR;
6404 int found_edge = 0;
6406 for (e = p->succ; e; e = e->succ_next)
6407 if (e->dest == s)
6409 found_edge = 1;
6410 break;
6412 for (e = s->pred; e; e = e->pred_next)
6413 if (e->src == p)
6415 found_edge = 1;
6416 break;
6418 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6419 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6420 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6421 pred);
6422 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6423 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6424 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6425 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6426 EXIT_BLOCK_PTR));
6430 /* This routine will determine what, if any, edge there is between
6431 a specified predecessor and successor. */
6434 find_edge_index (edge_list, pred, succ)
6435 struct edge_list *edge_list;
6436 basic_block pred, succ;
6438 int x;
6439 for (x = 0; x < NUM_EDGES (edge_list); x++)
6441 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6442 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6443 return x;
6445 return (EDGE_INDEX_NO_EDGE);
6448 /* This function will remove an edge from the flow graph. */
6449 void
6450 remove_edge (e)
6451 edge e;
6453 edge last_pred = NULL;
6454 edge last_succ = NULL;
6455 edge tmp;
6456 basic_block src, dest;
6457 src = e->src;
6458 dest = e->dest;
6459 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6460 last_succ = tmp;
6462 if (!tmp)
6463 abort ();
6464 if (last_succ)
6465 last_succ->succ_next = e->succ_next;
6466 else
6467 src->succ = e->succ_next;
6469 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6470 last_pred = tmp;
6472 if (!tmp)
6473 abort ();
6474 if (last_pred)
6475 last_pred->pred_next = e->pred_next;
6476 else
6477 dest->pred = e->pred_next;
6479 n_edges--;
6480 free (e);
6483 /* This routine will remove any fake successor edges for a basic block.
6484 When the edge is removed, it is also removed from whatever predecessor
6485 list it is in. */
6486 static void
6487 remove_fake_successors (bb)
6488 basic_block bb;
6490 edge e;
6491 for (e = bb->succ; e ; )
6493 edge tmp = e;
6494 e = e->succ_next;
6495 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6496 remove_edge (tmp);
6500 /* This routine will remove all fake edges from the flow graph. If
6501 we remove all fake successors, it will automatically remove all
6502 fake predecessors. */
6503 void
6504 remove_fake_edges ()
6506 int x;
6508 for (x = 0; x < n_basic_blocks; x++)
6509 remove_fake_successors (BASIC_BLOCK (x));
6511 /* We've handled all successors except the entry block's. */
6512 remove_fake_successors (ENTRY_BLOCK_PTR);
6515 /* This functions will add a fake edge between any block which has no
6516 successors, and the exit block. Some data flow equations require these
6517 edges to exist. */
6518 void
6519 add_noreturn_fake_exit_edges ()
6521 int x;
6523 for (x = 0; x < n_basic_blocks; x++)
6524 if (BASIC_BLOCK (x)->succ == NULL)
6525 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6528 /* Dump the list of basic blocks in the bitmap NODES. */
6529 static void
6530 flow_nodes_print (str, nodes, file)
6531 const char *str;
6532 const sbitmap nodes;
6533 FILE *file;
6535 int node;
6537 fprintf (file, "%s { ", str);
6538 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6539 fputs ("}\n", file);
6543 /* Dump the list of exiting edges in the array EDGES. */
6544 static void
6545 flow_exits_print (str, edges, num_edges, file)
6546 const char *str;
6547 const edge *edges;
6548 int num_edges;
6549 FILE *file;
6551 int i;
6553 fprintf (file, "%s { ", str);
6554 for (i = 0; i < num_edges; i++)
6555 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6556 fputs ("}\n", file);
6560 /* Dump loop related CFG information. */
6561 static void
6562 flow_loops_cfg_dump (loops, file)
6563 const struct loops *loops;
6564 FILE *file;
6566 int i;
6568 if (! loops->num || ! file || ! loops->cfg.dom)
6569 return;
6571 for (i = 0; i < n_basic_blocks; i++)
6573 edge succ;
6575 fprintf (file, ";; %d succs { ", i);
6576 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6577 fprintf (file, "%d ", succ->dest->index);
6578 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6582 /* Dump the DFS node order. */
6583 if (loops->cfg.dfs_order)
6585 fputs (";; DFS order: ", file);
6586 for (i = 0; i < n_basic_blocks; i++)
6587 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6588 fputs ("\n", file);
6593 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6594 static int
6595 flow_loop_nested_p (outer, loop)
6596 struct loop *outer;
6597 struct loop *loop;
6599 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6603 /* Dump the loop information specified by LOOPS to the stream FILE. */
6604 void
6605 flow_loops_dump (loops, file, verbose)
6606 const struct loops *loops;
6607 FILE *file;
6608 int verbose;
6610 int i;
6611 int num_loops;
6613 num_loops = loops->num;
6614 if (! num_loops || ! file)
6615 return;
6617 fprintf (file, ";; %d loops found, %d levels\n",
6618 num_loops, loops->levels);
6620 for (i = 0; i < num_loops; i++)
6622 struct loop *loop = &loops->array[i];
6624 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6625 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6626 loop->header->index, loop->latch->index,
6627 loop->pre_header ? loop->pre_header->index : -1,
6628 loop->depth, loop->level,
6629 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6630 fprintf (file, ";; %d", loop->num_nodes);
6631 flow_nodes_print (" nodes", loop->nodes, file);
6632 fprintf (file, ";; %d", loop->num_exits);
6633 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6635 if (loop->shared)
6637 int j;
6639 for (j = 0; j < i; j++)
6641 struct loop *oloop = &loops->array[j];
6643 if (loop->header == oloop->header)
6645 int disjoint;
6646 int smaller;
6648 smaller = loop->num_nodes < oloop->num_nodes;
6650 /* If the union of LOOP and OLOOP is different than
6651 the larger of LOOP and OLOOP then LOOP and OLOOP
6652 must be disjoint. */
6653 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6654 smaller ? oloop : loop);
6655 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6656 loop->header->index, i, j,
6657 disjoint ? "disjoint" : "nested");
6662 if (verbose)
6664 /* Print diagnostics to compare our concept of a loop with
6665 what the loop notes say. */
6666 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6667 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6668 != NOTE_INSN_LOOP_BEG)
6669 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6670 INSN_UID (PREV_INSN (loop->first->head)));
6671 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6672 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6673 != NOTE_INSN_LOOP_END)
6674 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6675 INSN_UID (NEXT_INSN (loop->last->end)));
6679 if (verbose)
6680 flow_loops_cfg_dump (loops, file);
6684 /* Free all the memory allocated for LOOPS. */
6685 void
6686 flow_loops_free (loops)
6687 struct loops *loops;
6689 if (loops->array)
6691 int i;
6693 if (! loops->num)
6694 abort ();
6696 /* Free the loop descriptors. */
6697 for (i = 0; i < loops->num; i++)
6699 struct loop *loop = &loops->array[i];
6701 if (loop->nodes)
6702 sbitmap_free (loop->nodes);
6703 if (loop->exits)
6704 free (loop->exits);
6706 free (loops->array);
6707 loops->array = NULL;
6709 if (loops->cfg.dom)
6710 sbitmap_vector_free (loops->cfg.dom);
6711 if (loops->cfg.dfs_order)
6712 free (loops->cfg.dfs_order);
6714 sbitmap_free (loops->shared_headers);
6719 /* Find the exits from the loop using the bitmap of loop nodes NODES
6720 and store in EXITS array. Return the number of exits from the
6721 loop. */
6722 static int
6723 flow_loop_exits_find (nodes, exits)
6724 const sbitmap nodes;
6725 edge **exits;
6727 edge e;
6728 int node;
6729 int num_exits;
6731 *exits = NULL;
6733 /* Check all nodes within the loop to see if there are any
6734 successors not in the loop. Note that a node may have multiple
6735 exiting edges. */
6736 num_exits = 0;
6737 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6738 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6740 basic_block dest = e->dest;
6742 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6743 num_exits++;
6747 if (! num_exits)
6748 return 0;
6750 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6752 /* Store all exiting edges into an array. */
6753 num_exits = 0;
6754 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6755 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6757 basic_block dest = e->dest;
6759 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6760 (*exits)[num_exits++] = e;
6764 return num_exits;
6768 /* Find the nodes contained within the loop with header HEADER and
6769 latch LATCH and store in NODES. Return the number of nodes within
6770 the loop. */
6771 static int
6772 flow_loop_nodes_find (header, latch, nodes)
6773 basic_block header;
6774 basic_block latch;
6775 sbitmap nodes;
6777 basic_block *stack;
6778 int sp;
6779 int num_nodes = 0;
6781 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6782 sp = 0;
6784 /* Start with only the loop header in the set of loop nodes. */
6785 sbitmap_zero (nodes);
6786 SET_BIT (nodes, header->index);
6787 num_nodes++;
6788 header->loop_depth++;
6790 /* Push the loop latch on to the stack. */
6791 if (! TEST_BIT (nodes, latch->index))
6793 SET_BIT (nodes, latch->index);
6794 latch->loop_depth++;
6795 num_nodes++;
6796 stack[sp++] = latch;
6799 while (sp)
6801 basic_block node;
6802 edge e;
6804 node = stack[--sp];
6805 for (e = node->pred; e; e = e->pred_next)
6807 basic_block ancestor = e->src;
6809 /* If each ancestor not marked as part of loop, add to set of
6810 loop nodes and push on to stack. */
6811 if (ancestor != ENTRY_BLOCK_PTR
6812 && ! TEST_BIT (nodes, ancestor->index))
6814 SET_BIT (nodes, ancestor->index);
6815 ancestor->loop_depth++;
6816 num_nodes++;
6817 stack[sp++] = ancestor;
6821 free (stack);
6822 return num_nodes;
6826 /* Compute the depth first search order and store in the array
6827 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6828 number of nodes visited. */
6829 static int
6830 flow_depth_first_order_compute (dfs_order)
6831 int *dfs_order;
6833 edge e;
6834 edge *stack;
6835 int sp;
6836 int dfsnum = 0;
6837 sbitmap visited;
6839 /* Allocate stack for back-tracking up CFG. */
6840 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6841 sp = 0;
6843 /* Allocate bitmap to track nodes that have been visited. */
6844 visited = sbitmap_alloc (n_basic_blocks);
6846 /* None of the nodes in the CFG have been visited yet. */
6847 sbitmap_zero (visited);
6849 /* Start with the first successor edge from the entry block. */
6850 e = ENTRY_BLOCK_PTR->succ;
6851 while (e)
6853 basic_block src = e->src;
6854 basic_block dest = e->dest;
6856 /* Mark that we have visited this node. */
6857 if (src != ENTRY_BLOCK_PTR)
6858 SET_BIT (visited, src->index);
6860 /* If this node has not been visited before, push the current
6861 edge on to the stack and proceed with the first successor
6862 edge of this node. */
6863 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6864 && dest->succ)
6866 stack[sp++] = e;
6867 e = dest->succ;
6869 else
6871 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6872 && ! dest->succ)
6874 /* DEST has no successors (for example, a non-returning
6875 function is called) so do not push the current edge
6876 but carry on with its next successor. */
6877 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6878 SET_BIT (visited, dest->index);
6881 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6883 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6885 /* Pop edge off stack. */
6886 e = stack[--sp];
6887 src = e->src;
6889 e = e->succ_next;
6892 free (stack);
6893 sbitmap_free (visited);
6895 /* The number of nodes visited should not be greater than
6896 n_basic_blocks. */
6897 if (dfsnum > n_basic_blocks)
6898 abort ();
6900 /* There are some nodes left in the CFG that are unreachable. */
6901 if (dfsnum < n_basic_blocks)
6902 abort ();
6903 return dfsnum;
6907 /* Return the block for the pre-header of the loop with header
6908 HEADER where DOM specifies the dominator information. Return NULL if
6909 there is no pre-header. */
6910 static basic_block
6911 flow_loop_pre_header_find (header, dom)
6912 basic_block header;
6913 const sbitmap *dom;
6915 basic_block pre_header;
6916 edge e;
6918 /* If block p is a predecessor of the header and is the only block
6919 that the header does not dominate, then it is the pre-header. */
6920 pre_header = NULL;
6921 for (e = header->pred; e; e = e->pred_next)
6923 basic_block node = e->src;
6925 if (node != ENTRY_BLOCK_PTR
6926 && ! TEST_BIT (dom[node->index], header->index))
6928 if (pre_header == NULL)
6929 pre_header = node;
6930 else
6932 /* There are multiple edges into the header from outside
6933 the loop so there is no pre-header block. */
6934 pre_header = NULL;
6935 break;
6939 return pre_header;
6943 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6944 previously added. The insertion algorithm assumes that the loops
6945 are added in the order found by a depth first search of the CFG. */
6946 static void
6947 flow_loop_tree_node_add (prevloop, loop)
6948 struct loop *prevloop;
6949 struct loop *loop;
6952 if (flow_loop_nested_p (prevloop, loop))
6954 prevloop->inner = loop;
6955 loop->outer = prevloop;
6956 return;
6959 while (prevloop->outer)
6961 if (flow_loop_nested_p (prevloop->outer, loop))
6963 prevloop->next = loop;
6964 loop->outer = prevloop->outer;
6965 return;
6967 prevloop = prevloop->outer;
6970 prevloop->next = loop;
6971 loop->outer = NULL;
6975 /* Build the loop hierarchy tree for LOOPS. */
6976 static void
6977 flow_loops_tree_build (loops)
6978 struct loops *loops;
6980 int i;
6981 int num_loops;
6983 num_loops = loops->num;
6984 if (! num_loops)
6985 return;
6987 /* Root the loop hierarchy tree with the first loop found.
6988 Since we used a depth first search this should be the
6989 outermost loop. */
6990 loops->tree = &loops->array[0];
6991 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6993 /* Add the remaining loops to the tree. */
6994 for (i = 1; i < num_loops; i++)
6995 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6999 /* Helper function to compute loop nesting depth and enclosed loop level
7000 for the natural loop specified by LOOP at the loop depth DEPTH.
7001 Returns the loop level. */
7002 static int
7003 flow_loop_level_compute (loop, depth)
7004 struct loop *loop;
7005 int depth;
7007 struct loop *inner;
7008 int level = 1;
7010 if (! loop)
7011 return 0;
7013 /* Traverse loop tree assigning depth and computing level as the
7014 maximum level of all the inner loops of this loop. The loop
7015 level is equivalent to the height of the loop in the loop tree
7016 and corresponds to the number of enclosed loop levels (including
7017 itself). */
7018 for (inner = loop->inner; inner; inner = inner->next)
7020 int ilevel;
7022 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7024 if (ilevel > level)
7025 level = ilevel;
7027 loop->level = level;
7028 loop->depth = depth;
7029 return level;
7033 /* Compute the loop nesting depth and enclosed loop level for the loop
7034 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7035 level. */
7037 static int
7038 flow_loops_level_compute (loops)
7039 struct loops *loops;
7041 struct loop *loop;
7042 int level;
7043 int levels = 0;
7045 /* Traverse all the outer level loops. */
7046 for (loop = loops->tree; loop; loop = loop->next)
7048 level = flow_loop_level_compute (loop, 1);
7049 if (level > levels)
7050 levels = level;
7052 return levels;
7056 /* Find all the natural loops in the function and save in LOOPS structure
7057 and recalculate loop_depth information in basic block structures.
7058 Return the number of natural loops found. */
7060 int
7061 flow_loops_find (loops)
7062 struct loops *loops;
7064 int i;
7065 int b;
7066 int num_loops;
7067 edge e;
7068 sbitmap headers;
7069 sbitmap *dom;
7070 int *dfs_order;
7072 loops->num = 0;
7073 loops->array = NULL;
7074 loops->tree = NULL;
7075 dfs_order = NULL;
7077 /* Taking care of this degenerate case makes the rest of
7078 this code simpler. */
7079 if (n_basic_blocks == 0)
7080 return 0;
7082 /* Compute the dominators. */
7083 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7084 compute_flow_dominators (dom, NULL);
7086 /* Count the number of loop edges (back edges). This should be the
7087 same as the number of natural loops. Also clear the loop_depth
7088 and as we work from inner->outer in a loop nest we call
7089 find_loop_nodes_find which will increment loop_depth for nodes
7090 within the current loop, which happens to enclose inner loops. */
7092 num_loops = 0;
7093 for (b = 0; b < n_basic_blocks; b++)
7095 BASIC_BLOCK (b)->loop_depth = 0;
7096 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7098 basic_block latch = e->src;
7100 /* Look for back edges where a predecessor is dominated
7101 by this block. A natural loop has a single entry
7102 node (header) that dominates all the nodes in the
7103 loop. It also has single back edge to the header
7104 from a latch node. Note that multiple natural loops
7105 may share the same header. */
7106 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7107 num_loops++;
7111 if (num_loops)
7113 /* Compute depth first search order of the CFG so that outer
7114 natural loops will be found before inner natural loops. */
7115 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7116 flow_depth_first_order_compute (dfs_order);
7118 /* Allocate loop structures. */
7119 loops->array
7120 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7122 headers = sbitmap_alloc (n_basic_blocks);
7123 sbitmap_zero (headers);
7125 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7126 sbitmap_zero (loops->shared_headers);
7128 /* Find and record information about all the natural loops
7129 in the CFG. */
7130 num_loops = 0;
7131 for (b = 0; b < n_basic_blocks; b++)
7133 basic_block header;
7135 /* Search the nodes of the CFG in DFS order that we can find
7136 outer loops first. */
7137 header = BASIC_BLOCK (dfs_order[b]);
7139 /* Look for all the possible latch blocks for this header. */
7140 for (e = header->pred; e; e = e->pred_next)
7142 basic_block latch = e->src;
7144 /* Look for back edges where a predecessor is dominated
7145 by this block. A natural loop has a single entry
7146 node (header) that dominates all the nodes in the
7147 loop. It also has single back edge to the header
7148 from a latch node. Note that multiple natural loops
7149 may share the same header. */
7150 if (latch != ENTRY_BLOCK_PTR
7151 && TEST_BIT (dom[latch->index], header->index))
7153 struct loop *loop;
7155 loop = loops->array + num_loops;
7157 loop->header = header;
7158 loop->latch = latch;
7160 /* Keep track of blocks that are loop headers so
7161 that we can tell which loops should be merged. */
7162 if (TEST_BIT (headers, header->index))
7163 SET_BIT (loops->shared_headers, header->index);
7164 SET_BIT (headers, header->index);
7166 /* Find nodes contained within the loop. */
7167 loop->nodes = sbitmap_alloc (n_basic_blocks);
7168 loop->num_nodes
7169 = flow_loop_nodes_find (header, latch, loop->nodes);
7171 /* Compute first and last blocks within the loop.
7172 These are often the same as the loop header and
7173 loop latch respectively, but this is not always
7174 the case. */
7175 loop->first
7176 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7177 loop->last
7178 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7180 /* Find edges which exit the loop. Note that a node
7181 may have several exit edges. */
7182 loop->num_exits
7183 = flow_loop_exits_find (loop->nodes, &loop->exits);
7185 /* Look to see if the loop has a pre-header node. */
7186 loop->pre_header
7187 = flow_loop_pre_header_find (header, dom);
7189 num_loops++;
7194 /* Natural loops with shared headers may either be disjoint or
7195 nested. Disjoint loops with shared headers cannot be inner
7196 loops and should be merged. For now just mark loops that share
7197 headers. */
7198 for (i = 0; i < num_loops; i++)
7199 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7200 loops->array[i].shared = 1;
7202 sbitmap_free (headers);
7205 loops->num = num_loops;
7207 /* Save CFG derived information to avoid recomputing it. */
7208 loops->cfg.dom = dom;
7209 loops->cfg.dfs_order = dfs_order;
7211 /* Build the loop hierarchy tree. */
7212 flow_loops_tree_build (loops);
7214 /* Assign the loop nesting depth and enclosed loop level for each
7215 loop. */
7216 loops->levels = flow_loops_level_compute (loops);
7218 return num_loops;
7222 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7224 flow_loop_outside_edge_p (loop, e)
7225 const struct loop *loop;
7226 edge e;
7228 if (e->dest != loop->header)
7229 abort ();
7230 return (e->src == ENTRY_BLOCK_PTR)
7231 || ! TEST_BIT (loop->nodes, e->src->index);
7235 /* Clear LOG_LINKS fields of insns in a chain. */
7236 void
7237 clear_log_links (insns)
7238 rtx insns;
7240 rtx i;
7241 for (i = insns; i; i = NEXT_INSN (i))
7242 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')
7243 LOG_LINKS (i) = 0;