(arm_comp_type_attributes): Simply and comment tests on type attributes.
[official-gcc.git] / gcc / flow.c
bloba7217dbf486ff246e9e5b24b9b4b399311e3d7af
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "basic-block.h"
127 #include "insn-config.h"
128 #include "regs.h"
129 #include "hard-reg-set.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
137 #include "expr.h"
139 #include "obstack.h"
141 #define obstack_chunk_alloc xmalloc
142 #define obstack_chunk_free free
145 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
146 the stack pointer does not matter. The value is tested only in
147 functions that have frame pointers.
148 No definition is equivalent to always zero. */
149 #ifndef EXIT_IGNORE_STACK
150 #define EXIT_IGNORE_STACK 0
151 #endif
153 #ifndef HAVE_epilogue
154 #define HAVE_epilogue 0
155 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
161 /* The contents of the current function definition are allocated
162 in this obstack, and all are freed at the end of the function.
163 For top-level functions, this is temporary_obstack.
164 Separate obstacks are made for nested functions. */
166 extern struct obstack *function_obstack;
168 /* Number of basic blocks in the current function. */
170 int n_basic_blocks;
172 /* Number of edges in the current function. */
174 int n_edges;
176 /* The basic block array. */
178 varray_type basic_block_info;
180 /* The special entry and exit blocks. */
182 struct basic_block_def entry_exit_blocks[2]
183 = {{NULL, /* head */
184 NULL, /* end */
185 NULL, /* pred */
186 NULL, /* succ */
187 NULL, /* local_set */
188 NULL, /* global_live_at_start */
189 NULL, /* global_live_at_end */
190 NULL, /* aux */
191 ENTRY_BLOCK, /* index */
192 0, /* loop_depth */
193 -1, -1 /* eh_beg, eh_end */
196 NULL, /* head */
197 NULL, /* end */
198 NULL, /* pred */
199 NULL, /* succ */
200 NULL, /* local_set */
201 NULL, /* global_live_at_start */
202 NULL, /* global_live_at_end */
203 NULL, /* aux */
204 EXIT_BLOCK, /* index */
205 0, /* loop_depth */
206 -1, -1 /* eh_beg, eh_end */
210 /* Nonzero if the second flow pass has completed. */
211 int flow2_completed;
213 /* Maximum register number used in this function, plus one. */
215 int max_regno;
217 /* Indexed by n, giving various register information */
219 varray_type reg_n_info;
221 /* Size of the reg_n_info table. */
223 unsigned int reg_n_max;
225 /* Element N is the next insn that uses (hard or pseudo) register number N
226 within the current basic block; or zero, if there is no such insn.
227 This is valid only during the final backward scan in propagate_block. */
229 static rtx *reg_next_use;
231 /* Size of a regset for the current function,
232 in (1) bytes and (2) elements. */
234 int regset_bytes;
235 int regset_size;
237 /* Regset of regs live when calls to `setjmp'-like functions happen. */
238 /* ??? Does this exist only for the setjmp-clobbered warning message? */
240 regset regs_live_at_setjmp;
242 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
243 that have to go in the same hard reg.
244 The first two regs in the list are a pair, and the next two
245 are another pair, etc. */
246 rtx regs_may_share;
248 /* Depth within loops of basic block being scanned for lifetime analysis,
249 plus one. This is the weight attached to references to registers. */
251 static int loop_depth;
253 /* During propagate_block, this is non-zero if the value of CC0 is live. */
255 static int cc0_live;
257 /* During propagate_block, this contains a list of all the MEMs we are
258 tracking for dead store elimination. */
260 static rtx mem_set_list;
262 /* Set of registers that may be eliminable. These are handled specially
263 in updating regs_ever_live. */
265 static HARD_REG_SET elim_reg_set;
267 /* The basic block structure for every insn, indexed by uid. */
269 varray_type basic_block_for_insn;
271 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
272 /* ??? Should probably be using LABEL_NUSES instead. It would take a
273 bit of surgery to be able to use or co-opt the routines in jump. */
275 static rtx label_value_list;
277 /* Forward declarations */
278 static int count_basic_blocks PARAMS ((rtx));
279 static rtx find_basic_blocks_1 PARAMS ((rtx));
280 static void create_basic_block PARAMS ((int, rtx, rtx, rtx));
281 static void clear_edges PARAMS ((void));
282 static void make_edges PARAMS ((rtx));
283 static void make_label_edge PARAMS ((sbitmap *, basic_block,
284 rtx, int));
285 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
286 basic_block, rtx, int));
287 static void mark_critical_edges PARAMS ((void));
288 static void move_stray_eh_region_notes PARAMS ((void));
289 static void record_active_eh_regions PARAMS ((rtx));
291 static void commit_one_edge_insertion PARAMS ((edge));
293 static void delete_unreachable_blocks PARAMS ((void));
294 static void delete_eh_regions PARAMS ((void));
295 static int can_delete_note_p PARAMS ((rtx));
296 static int delete_block PARAMS ((basic_block));
297 static void expunge_block PARAMS ((basic_block));
298 static int can_delete_label_p PARAMS ((rtx));
299 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
300 basic_block));
301 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
302 basic_block));
303 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
304 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
305 static void try_merge_blocks PARAMS ((void));
306 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
307 static void tidy_fallthru_edges PARAMS ((void));
308 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
309 static void verify_wide_reg PARAMS ((int, rtx, rtx));
310 static void verify_local_live_at_start PARAMS ((regset, basic_block));
311 static int set_noop_p PARAMS ((rtx));
312 static int noop_move_p PARAMS ((rtx));
313 static void delete_noop_moves PARAMS ((rtx));
314 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
315 static void notice_stack_pointer_modification PARAMS ((rtx));
316 static void mark_reg PARAMS ((rtx, void *));
317 static void mark_regs_live_at_end PARAMS ((regset));
318 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
319 static void propagate_block PARAMS ((basic_block, regset,
320 regset, int));
321 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
322 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
323 static void mark_set_regs PARAMS ((regset, regset, rtx,
324 rtx, regset, int));
325 static void mark_set_1 PARAMS ((regset, regset, rtx,
326 rtx, regset, int));
327 #ifdef AUTO_INC_DEC
328 static void find_auto_inc PARAMS ((regset, rtx, rtx));
329 static int try_pre_increment_1 PARAMS ((rtx));
330 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
331 #endif
332 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
333 void dump_flow_info PARAMS ((FILE *));
334 void debug_flow_info PARAMS ((void));
335 static void dump_edge_info PARAMS ((FILE *, edge, int));
337 static void count_reg_sets_1 PARAMS ((rtx));
338 static void count_reg_sets PARAMS ((rtx));
339 static void count_reg_references PARAMS ((rtx));
340 static void invalidate_mems_from_autoinc PARAMS ((rtx));
341 static void remove_fake_successors PARAMS ((basic_block));
342 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
343 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
344 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
345 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
346 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
347 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
348 static int flow_depth_first_order_compute PARAMS ((int *));
349 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
350 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
351 static void flow_loops_tree_build PARAMS ((struct loops *));
352 static int flow_loop_level_compute PARAMS ((struct loop *, int));
353 static int flow_loops_level_compute PARAMS ((struct loops *));
354 static basic_block get_common_dest PARAMS ((basic_block, basic_block));
355 static basic_block chain_reorder_blocks PARAMS ((edge, basic_block));
356 static void make_reorder_chain PARAMS ((basic_block));
357 static void fixup_reorder_chain PARAMS ((void));
359 /* This function is always defined so it can be called from the
360 debugger, and it is declared extern so we don't get warnings about
361 it being unused. */
362 void verify_flow_info PARAMS ((void));
363 int flow_loop_outside_edge_p PARAMS ((const struct loop *, edge));
365 /* Find basic blocks of the current function.
366 F is the first insn of the function and NREGS the number of register
367 numbers in use. */
369 void
370 find_basic_blocks (f, nregs, file)
371 rtx f;
372 int nregs ATTRIBUTE_UNUSED;
373 FILE *file ATTRIBUTE_UNUSED;
375 int max_uid;
377 /* Flush out existing data. */
378 if (basic_block_info != NULL)
380 int i;
382 clear_edges ();
384 /* Clear bb->aux on all extant basic blocks. We'll use this as a
385 tag for reuse during create_basic_block, just in case some pass
386 copies around basic block notes improperly. */
387 for (i = 0; i < n_basic_blocks; ++i)
388 BASIC_BLOCK (i)->aux = NULL;
390 VARRAY_FREE (basic_block_info);
393 n_basic_blocks = count_basic_blocks (f);
395 /* Size the basic block table. The actual structures will be allocated
396 by find_basic_blocks_1, since we want to keep the structure pointers
397 stable across calls to find_basic_blocks. */
398 /* ??? This whole issue would be much simpler if we called find_basic_blocks
399 exactly once, and thereafter we don't have a single long chain of
400 instructions at all until close to the end of compilation when we
401 actually lay them out. */
403 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
405 label_value_list = find_basic_blocks_1 (f);
407 /* Record the block to which an insn belongs. */
408 /* ??? This should be done another way, by which (perhaps) a label is
409 tagged directly with the basic block that it starts. It is used for
410 more than that currently, but IMO that is the only valid use. */
412 max_uid = get_max_uid ();
413 #ifdef AUTO_INC_DEC
414 /* Leave space for insns life_analysis makes in some cases for auto-inc.
415 These cases are rare, so we don't need too much space. */
416 max_uid += max_uid / 10;
417 #endif
419 compute_bb_for_insn (max_uid);
421 /* Discover the edges of our cfg. */
422 record_active_eh_regions (f);
423 make_edges (label_value_list);
425 /* Do very simple cleanup now, for the benefit of code that runs between
426 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
427 tidy_fallthru_edges ();
429 mark_critical_edges ();
431 #ifdef ENABLE_CHECKING
432 verify_flow_info ();
433 #endif
436 /* Count the basic blocks of the function. */
438 static int
439 count_basic_blocks (f)
440 rtx f;
442 register rtx insn;
443 register RTX_CODE prev_code;
444 register int count = 0;
445 int eh_region = 0;
446 int call_had_abnormal_edge = 0;
447 rtx prev_call = NULL_RTX;
449 prev_code = JUMP_INSN;
450 for (insn = f; insn; insn = NEXT_INSN (insn))
452 register RTX_CODE code = GET_CODE (insn);
454 if (code == CODE_LABEL
455 || (GET_RTX_CLASS (code) == 'i'
456 && (prev_code == JUMP_INSN
457 || prev_code == BARRIER
458 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
460 count++;
463 /* Record whether this call created an edge. */
464 if (code == CALL_INSN)
466 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
467 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
468 prev_call = insn;
469 call_had_abnormal_edge = 0;
471 /* If there is a specified EH region, we have an edge. */
472 if (eh_region && region > 0)
473 call_had_abnormal_edge = 1;
474 else
476 /* If there is a nonlocal goto label and the specified
477 region number isn't -1, we have an edge. (0 means
478 no throw, but might have a nonlocal goto). */
479 if (nonlocal_goto_handler_labels && region >= 0)
480 call_had_abnormal_edge = 1;
483 else if (code != NOTE)
484 prev_call = NULL_RTX;
486 if (code != NOTE)
487 prev_code = code;
488 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
489 ++eh_region;
490 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
491 --eh_region;
495 /* The rest of the compiler works a bit smoother when we don't have to
496 check for the edge case of do-nothing functions with no basic blocks. */
497 if (count == 0)
499 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
500 count = 1;
503 return count;
506 /* Find all basic blocks of the function whose first insn is F.
508 Collect and return a list of labels whose addresses are taken. This
509 will be used in make_edges for use with computed gotos. */
511 static rtx
512 find_basic_blocks_1 (f)
513 rtx f;
515 register rtx insn, next;
516 int call_has_abnormal_edge = 0;
517 int i = 0;
518 rtx bb_note = NULL_RTX;
519 rtx eh_list = NULL_RTX;
520 rtx label_value_list = NULL_RTX;
521 rtx head = NULL_RTX;
522 rtx end = NULL_RTX;
524 /* We process the instructions in a slightly different way than we did
525 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
526 closed out the previous block, so that it gets attached at the proper
527 place. Since this form should be equivalent to the previous,
528 count_basic_blocks continues to use the old form as a check. */
530 for (insn = f; insn; insn = next)
532 enum rtx_code code = GET_CODE (insn);
534 next = NEXT_INSN (insn);
536 if (code == CALL_INSN)
538 /* Record whether this call created an edge. */
539 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
540 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
541 call_has_abnormal_edge = 0;
543 /* If there is an EH region, we have an edge. */
544 if (eh_list && region > 0)
545 call_has_abnormal_edge = 1;
546 else
548 /* If there is a nonlocal goto label and the specified
549 region number isn't -1, we have an edge. (0 means
550 no throw, but might have a nonlocal goto). */
551 if (nonlocal_goto_handler_labels && region >= 0)
552 call_has_abnormal_edge = 1;
556 switch (code)
558 case NOTE:
560 int kind = NOTE_LINE_NUMBER (insn);
562 /* Keep a LIFO list of the currently active exception notes. */
563 if (kind == NOTE_INSN_EH_REGION_BEG)
564 eh_list = alloc_INSN_LIST (insn, eh_list);
565 else if (kind == NOTE_INSN_EH_REGION_END)
567 rtx t = eh_list;
568 eh_list = XEXP (eh_list, 1);
569 free_INSN_LIST_node (t);
572 /* Look for basic block notes with which to keep the
573 basic_block_info pointers stable. Unthread the note now;
574 we'll put it back at the right place in create_basic_block.
575 Or not at all if we've already found a note in this block. */
576 else if (kind == NOTE_INSN_BASIC_BLOCK)
578 if (bb_note == NULL_RTX)
579 bb_note = insn;
580 next = flow_delete_insn (insn);
583 break;
586 case CODE_LABEL:
587 /* A basic block starts at a label. If we've closed one off due
588 to a barrier or some such, no need to do it again. */
589 if (head != NULL_RTX)
591 /* While we now have edge lists with which other portions of
592 the compiler might determine a call ending a basic block
593 does not imply an abnormal edge, it will be a bit before
594 everything can be updated. So continue to emit a noop at
595 the end of such a block. */
596 if (GET_CODE (end) == CALL_INSN)
598 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
599 end = emit_insn_after (nop, end);
602 create_basic_block (i++, head, end, bb_note);
603 bb_note = NULL_RTX;
605 head = end = insn;
606 break;
608 case JUMP_INSN:
609 /* A basic block ends at a jump. */
610 if (head == NULL_RTX)
611 head = insn;
612 else
614 /* ??? Make a special check for table jumps. The way this
615 happens is truly and amazingly gross. We are about to
616 create a basic block that contains just a code label and
617 an addr*vec jump insn. Worse, an addr_diff_vec creates
618 its own natural loop.
620 Prevent this bit of brain damage, pasting things together
621 correctly in make_edges.
623 The correct solution involves emitting the table directly
624 on the tablejump instruction as a note, or JUMP_LABEL. */
626 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
627 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
629 head = end = NULL;
630 n_basic_blocks--;
631 break;
634 end = insn;
635 goto new_bb_inclusive;
637 case BARRIER:
638 /* A basic block ends at a barrier. It may be that an unconditional
639 jump already closed the basic block -- no need to do it again. */
640 if (head == NULL_RTX)
641 break;
643 /* While we now have edge lists with which other portions of the
644 compiler might determine a call ending a basic block does not
645 imply an abnormal edge, it will be a bit before everything can
646 be updated. So continue to emit a noop at the end of such a
647 block. */
648 if (GET_CODE (end) == CALL_INSN)
650 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
651 end = emit_insn_after (nop, end);
653 goto new_bb_exclusive;
655 case CALL_INSN:
656 /* A basic block ends at a call that can either throw or
657 do a non-local goto. */
658 if (call_has_abnormal_edge)
660 new_bb_inclusive:
661 if (head == NULL_RTX)
662 head = insn;
663 end = insn;
665 new_bb_exclusive:
666 create_basic_block (i++, head, end, bb_note);
667 head = end = NULL_RTX;
668 bb_note = NULL_RTX;
669 break;
671 /* FALLTHRU */
673 default:
674 if (GET_RTX_CLASS (code) == 'i')
676 if (head == NULL_RTX)
677 head = insn;
678 end = insn;
680 break;
683 if (GET_RTX_CLASS (code) == 'i')
685 rtx note;
687 /* Make a list of all labels referred to other than by jumps
688 (which just don't have the REG_LABEL notes).
690 Make a special exception for labels followed by an ADDR*VEC,
691 as this would be a part of the tablejump setup code.
693 Make a special exception for the eh_return_stub_label, which
694 we know isn't part of any otherwise visible control flow. */
696 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
697 if (REG_NOTE_KIND (note) == REG_LABEL)
699 rtx lab = XEXP (note, 0), next;
701 if (lab == eh_return_stub_label)
703 else if ((next = next_nonnote_insn (lab)) != NULL
704 && GET_CODE (next) == JUMP_INSN
705 && (GET_CODE (PATTERN (next)) == ADDR_VEC
706 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
708 else
709 label_value_list
710 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
715 if (head != NULL_RTX)
716 create_basic_block (i++, head, end, bb_note);
718 if (i != n_basic_blocks)
719 abort ();
721 return label_value_list;
724 /* Tidy the CFG by deleting unreachable code and whatnot. */
726 void
727 cleanup_cfg (f)
728 rtx f;
730 delete_unreachable_blocks ();
731 move_stray_eh_region_notes ();
732 record_active_eh_regions (f);
733 try_merge_blocks ();
734 mark_critical_edges ();
736 /* Kill the data we won't maintain. */
737 label_value_list = NULL_RTX;
740 /* Create a new basic block consisting of the instructions between
741 HEAD and END inclusive. Reuses the note and basic block struct
742 in BB_NOTE, if any. */
744 static void
745 create_basic_block (index, head, end, bb_note)
746 int index;
747 rtx head, end, bb_note;
749 basic_block bb;
751 if (bb_note
752 && ! RTX_INTEGRATED_P (bb_note)
753 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
754 && bb->aux == NULL)
756 /* If we found an existing note, thread it back onto the chain. */
758 if (GET_CODE (head) == CODE_LABEL)
759 add_insn_after (bb_note, head);
760 else
762 add_insn_before (bb_note, head);
763 head = bb_note;
766 else
768 /* Otherwise we must create a note and a basic block structure.
769 Since we allow basic block structs in rtl, give the struct
770 the same lifetime by allocating it off the function obstack
771 rather than using malloc. */
773 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
774 memset (bb, 0, sizeof (*bb));
776 if (GET_CODE (head) == CODE_LABEL)
777 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
778 else
780 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
781 head = bb_note;
783 NOTE_BASIC_BLOCK (bb_note) = bb;
786 /* Always include the bb note in the block. */
787 if (NEXT_INSN (end) == bb_note)
788 end = bb_note;
790 bb->head = head;
791 bb->end = end;
792 bb->index = index;
793 BASIC_BLOCK (index) = bb;
795 /* Tag the block so that we know it has been used when considering
796 other basic block notes. */
797 bb->aux = bb;
800 /* Records the basic block struct in BB_FOR_INSN, for every instruction
801 indexed by INSN_UID. MAX is the size of the array. */
803 void
804 compute_bb_for_insn (max)
805 int max;
807 int i;
809 if (basic_block_for_insn)
810 VARRAY_FREE (basic_block_for_insn);
811 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
813 for (i = 0; i < n_basic_blocks; ++i)
815 basic_block bb = BASIC_BLOCK (i);
816 rtx insn, end;
818 end = bb->end;
819 insn = bb->head;
820 while (1)
822 int uid = INSN_UID (insn);
823 if (uid < max)
824 VARRAY_BB (basic_block_for_insn, uid) = bb;
825 if (insn == end)
826 break;
827 insn = NEXT_INSN (insn);
832 /* Free the memory associated with the edge structures. */
834 static void
835 clear_edges ()
837 int i;
838 edge n, e;
840 for (i = 0; i < n_basic_blocks; ++i)
842 basic_block bb = BASIC_BLOCK (i);
844 for (e = bb->succ; e ; e = n)
846 n = e->succ_next;
847 free (e);
850 bb->succ = 0;
851 bb->pred = 0;
854 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
856 n = e->succ_next;
857 free (e);
860 ENTRY_BLOCK_PTR->succ = 0;
861 EXIT_BLOCK_PTR->pred = 0;
863 n_edges = 0;
866 /* Identify the edges between basic blocks.
868 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
869 that are otherwise unreachable may be reachable with a non-local goto.
871 BB_EH_END is an array indexed by basic block number in which we record
872 the list of exception regions active at the end of the basic block. */
874 static void
875 make_edges (label_value_list)
876 rtx label_value_list;
878 int i;
879 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
880 sbitmap *edge_cache = NULL;
882 /* Assume no computed jump; revise as we create edges. */
883 current_function_has_computed_jump = 0;
885 /* Heavy use of computed goto in machine-generated code can lead to
886 nearly fully-connected CFGs. In that case we spend a significant
887 amount of time searching the edge lists for duplicates. */
888 if (forced_labels || label_value_list)
890 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
891 sbitmap_vector_zero (edge_cache, n_basic_blocks);
894 /* By nature of the way these get numbered, block 0 is always the entry. */
895 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
897 for (i = 0; i < n_basic_blocks; ++i)
899 basic_block bb = BASIC_BLOCK (i);
900 rtx insn, x;
901 enum rtx_code code;
902 int force_fallthru = 0;
904 /* Examine the last instruction of the block, and discover the
905 ways we can leave the block. */
907 insn = bb->end;
908 code = GET_CODE (insn);
910 /* A branch. */
911 if (code == JUMP_INSN)
913 rtx tmp;
915 /* ??? Recognize a tablejump and do the right thing. */
916 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
917 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
918 && GET_CODE (tmp) == JUMP_INSN
919 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
920 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
922 rtvec vec;
923 int j;
925 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
926 vec = XVEC (PATTERN (tmp), 0);
927 else
928 vec = XVEC (PATTERN (tmp), 1);
930 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
931 make_label_edge (edge_cache, bb,
932 XEXP (RTVEC_ELT (vec, j), 0), 0);
934 /* Some targets (eg, ARM) emit a conditional jump that also
935 contains the out-of-range target. Scan for these and
936 add an edge if necessary. */
937 if ((tmp = single_set (insn)) != NULL
938 && SET_DEST (tmp) == pc_rtx
939 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
940 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
941 make_label_edge (edge_cache, bb,
942 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
944 #ifdef CASE_DROPS_THROUGH
945 /* Silly VAXen. The ADDR_VEC is going to be in the way of
946 us naturally detecting fallthru into the next block. */
947 force_fallthru = 1;
948 #endif
951 /* If this is a computed jump, then mark it as reaching
952 everything on the label_value_list and forced_labels list. */
953 else if (computed_jump_p (insn))
955 current_function_has_computed_jump = 1;
957 for (x = label_value_list; x; x = XEXP (x, 1))
958 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
960 for (x = forced_labels; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
964 /* Returns create an exit out. */
965 else if (returnjump_p (insn))
966 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
968 /* Otherwise, we have a plain conditional or unconditional jump. */
969 else
971 if (! JUMP_LABEL (insn))
972 abort ();
973 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
977 /* If this is a CALL_INSN, then mark it as reaching the active EH
978 handler for this CALL_INSN. If we're handling asynchronous
979 exceptions then any insn can reach any of the active handlers.
981 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
983 if (code == CALL_INSN || asynchronous_exceptions)
985 /* If there's an EH region active at the end of a block,
986 add the appropriate edges. */
987 if (bb->eh_end >= 0)
988 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
990 /* If we have asynchronous exceptions, do the same for *all*
991 exception regions active in the block. */
992 if (asynchronous_exceptions
993 && bb->eh_beg != bb->eh_end)
995 if (bb->eh_beg >= 0)
996 make_eh_edge (edge_cache, eh_nest_info, bb,
997 NULL_RTX, bb->eh_beg);
999 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1000 if (GET_CODE (x) == NOTE
1001 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1002 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1004 int region = NOTE_EH_HANDLER (x);
1005 make_eh_edge (edge_cache, eh_nest_info, bb,
1006 NULL_RTX, region);
1010 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1012 /* ??? This could be made smarter: in some cases it's possible
1013 to tell that certain calls will not do a nonlocal goto.
1015 For example, if the nested functions that do the nonlocal
1016 gotos do not have their addresses taken, then only calls to
1017 those functions or to other nested functions that use them
1018 could possibly do nonlocal gotos. */
1019 /* We do know that a REG_EH_REGION note with a value less
1020 than 0 is guaranteed not to perform a non-local goto. */
1021 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1022 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1023 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1024 make_label_edge (edge_cache, bb, XEXP (x, 0),
1025 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1029 /* We know something about the structure of the function __throw in
1030 libgcc2.c. It is the only function that ever contains eh_stub
1031 labels. It modifies its return address so that the last block
1032 returns to one of the eh_stub labels within it. So we have to
1033 make additional edges in the flow graph. */
1034 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1035 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1037 /* Find out if we can drop through to the next block. */
1038 insn = next_nonnote_insn (insn);
1039 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1040 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1041 else if (i + 1 < n_basic_blocks)
1043 rtx tmp = BLOCK_HEAD (i + 1);
1044 if (GET_CODE (tmp) == NOTE)
1045 tmp = next_nonnote_insn (tmp);
1046 if (force_fallthru || insn == tmp)
1047 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1051 free_eh_nesting_info (eh_nest_info);
1052 if (edge_cache)
1053 sbitmap_vector_free (edge_cache);
1056 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1057 about the edge that is accumulated between calls. */
1059 void
1060 make_edge (edge_cache, src, dst, flags)
1061 sbitmap *edge_cache;
1062 basic_block src, dst;
1063 int flags;
1065 int use_edge_cache;
1066 edge e;
1068 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1069 many edges to them, and we didn't allocate memory for it. */
1070 use_edge_cache = (edge_cache
1071 && src != ENTRY_BLOCK_PTR
1072 && dst != EXIT_BLOCK_PTR);
1074 /* Make sure we don't add duplicate edges. */
1075 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1076 for (e = src->succ; e ; e = e->succ_next)
1077 if (e->dest == dst)
1079 e->flags |= flags;
1080 return;
1083 e = (edge) xcalloc (1, sizeof (*e));
1084 n_edges++;
1086 e->succ_next = src->succ;
1087 e->pred_next = dst->pred;
1088 e->src = src;
1089 e->dest = dst;
1090 e->flags = flags;
1092 src->succ = e;
1093 dst->pred = e;
1095 if (use_edge_cache)
1096 SET_BIT (edge_cache[src->index], dst->index);
1099 /* Create an edge from a basic block to a label. */
1101 static void
1102 make_label_edge (edge_cache, src, label, flags)
1103 sbitmap *edge_cache;
1104 basic_block src;
1105 rtx label;
1106 int flags;
1108 if (GET_CODE (label) != CODE_LABEL)
1109 abort ();
1111 /* If the label was never emitted, this insn is junk, but avoid a
1112 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1113 as a result of a syntax error and a diagnostic has already been
1114 printed. */
1116 if (INSN_UID (label) == 0)
1117 return;
1119 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1122 /* Create the edges generated by INSN in REGION. */
1124 static void
1125 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1126 sbitmap *edge_cache;
1127 eh_nesting_info *eh_nest_info;
1128 basic_block src;
1129 rtx insn;
1130 int region;
1132 handler_info **handler_list;
1133 int num, is_call;
1135 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1136 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1137 while (--num >= 0)
1139 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1140 EDGE_ABNORMAL | EDGE_EH | is_call);
1144 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1145 dangerous if we intend to move basic blocks around. Move such notes
1146 into the following block. */
1148 static void
1149 move_stray_eh_region_notes ()
1151 int i;
1152 basic_block b1, b2;
1154 if (n_basic_blocks < 2)
1155 return;
1157 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1158 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1160 rtx insn, next, list = NULL_RTX;
1162 b1 = BASIC_BLOCK (i);
1163 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1165 next = NEXT_INSN (insn);
1166 if (GET_CODE (insn) == NOTE
1167 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1168 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1170 /* Unlink from the insn chain. */
1171 NEXT_INSN (PREV_INSN (insn)) = next;
1172 PREV_INSN (next) = PREV_INSN (insn);
1174 /* Queue it. */
1175 NEXT_INSN (insn) = list;
1176 list = insn;
1180 if (list == NULL_RTX)
1181 continue;
1183 /* Find where to insert these things. */
1184 insn = b2->head;
1185 if (GET_CODE (insn) == CODE_LABEL)
1186 insn = NEXT_INSN (insn);
1188 while (list)
1190 next = NEXT_INSN (list);
1191 add_insn_after (list, insn);
1192 list = next;
1197 /* Recompute eh_beg/eh_end for each basic block. */
1199 static void
1200 record_active_eh_regions (f)
1201 rtx f;
1203 rtx insn, eh_list = NULL_RTX;
1204 int i = 0;
1205 basic_block bb = BASIC_BLOCK (0);
1207 for (insn = f; insn ; insn = NEXT_INSN (insn))
1209 if (bb->head == insn)
1210 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1212 if (GET_CODE (insn) == NOTE)
1214 int kind = NOTE_LINE_NUMBER (insn);
1215 if (kind == NOTE_INSN_EH_REGION_BEG)
1216 eh_list = alloc_INSN_LIST (insn, eh_list);
1217 else if (kind == NOTE_INSN_EH_REGION_END)
1219 rtx t = XEXP (eh_list, 1);
1220 free_INSN_LIST_node (eh_list);
1221 eh_list = t;
1225 if (bb->end == insn)
1227 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1228 i += 1;
1229 if (i == n_basic_blocks)
1230 break;
1231 bb = BASIC_BLOCK (i);
1236 /* Identify critical edges and set the bits appropriately. */
1238 static void
1239 mark_critical_edges ()
1241 int i, n = n_basic_blocks;
1242 basic_block bb;
1244 /* We begin with the entry block. This is not terribly important now,
1245 but could be if a front end (Fortran) implemented alternate entry
1246 points. */
1247 bb = ENTRY_BLOCK_PTR;
1248 i = -1;
1250 while (1)
1252 edge e;
1254 /* (1) Critical edges must have a source with multiple successors. */
1255 if (bb->succ && bb->succ->succ_next)
1257 for (e = bb->succ; e ; e = e->succ_next)
1259 /* (2) Critical edges must have a destination with multiple
1260 predecessors. Note that we know there is at least one
1261 predecessor -- the edge we followed to get here. */
1262 if (e->dest->pred->pred_next)
1263 e->flags |= EDGE_CRITICAL;
1264 else
1265 e->flags &= ~EDGE_CRITICAL;
1268 else
1270 for (e = bb->succ; e ; e = e->succ_next)
1271 e->flags &= ~EDGE_CRITICAL;
1274 if (++i >= n)
1275 break;
1276 bb = BASIC_BLOCK (i);
1280 /* Split a (typically critical) edge. Return the new block.
1281 Abort on abnormal edges.
1283 ??? The code generally expects to be called on critical edges.
1284 The case of a block ending in an unconditional jump to a
1285 block with multiple predecessors is not handled optimally. */
1287 basic_block
1288 split_edge (edge_in)
1289 edge edge_in;
1291 basic_block old_pred, bb, old_succ;
1292 edge edge_out;
1293 rtx bb_note;
1294 int i, j;
1296 /* Abnormal edges cannot be split. */
1297 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1298 abort ();
1300 old_pred = edge_in->src;
1301 old_succ = edge_in->dest;
1303 /* Remove the existing edge from the destination's pred list. */
1305 edge *pp;
1306 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1307 continue;
1308 *pp = edge_in->pred_next;
1309 edge_in->pred_next = NULL;
1312 /* Create the new structures. */
1313 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1314 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1315 n_edges++;
1317 memset (bb, 0, sizeof (*bb));
1318 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1319 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1321 /* ??? This info is likely going to be out of date very soon. */
1322 if (old_succ->global_live_at_start)
1324 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1325 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1327 else
1329 CLEAR_REG_SET (bb->global_live_at_start);
1330 CLEAR_REG_SET (bb->global_live_at_end);
1333 /* Wire them up. */
1334 bb->pred = edge_in;
1335 bb->succ = edge_out;
1337 edge_in->dest = bb;
1338 edge_in->flags &= ~EDGE_CRITICAL;
1340 edge_out->pred_next = old_succ->pred;
1341 edge_out->succ_next = NULL;
1342 edge_out->src = bb;
1343 edge_out->dest = old_succ;
1344 edge_out->flags = EDGE_FALLTHRU;
1345 edge_out->probability = REG_BR_PROB_BASE;
1347 old_succ->pred = edge_out;
1349 /* Tricky case -- if there existed a fallthru into the successor
1350 (and we're not it) we must add a new unconditional jump around
1351 the new block we're actually interested in.
1353 Further, if that edge is critical, this means a second new basic
1354 block must be created to hold it. In order to simplify correct
1355 insn placement, do this before we touch the existing basic block
1356 ordering for the block we were really wanting. */
1357 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1359 edge e;
1360 for (e = edge_out->pred_next; e ; e = e->pred_next)
1361 if (e->flags & EDGE_FALLTHRU)
1362 break;
1364 if (e)
1366 basic_block jump_block;
1367 rtx pos;
1369 if ((e->flags & EDGE_CRITICAL) == 0)
1371 /* Non critical -- we can simply add a jump to the end
1372 of the existing predecessor. */
1373 jump_block = e->src;
1375 else
1377 /* We need a new block to hold the jump. The simplest
1378 way to do the bulk of the work here is to recursively
1379 call ourselves. */
1380 jump_block = split_edge (e);
1381 e = jump_block->succ;
1384 /* Now add the jump insn ... */
1385 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1386 jump_block->end);
1387 jump_block->end = pos;
1388 if (basic_block_for_insn)
1389 set_block_for_insn (pos, jump_block);
1390 emit_barrier_after (pos);
1392 /* ... let jump know that label is in use, ... */
1393 JUMP_LABEL (pos) = old_succ->head;
1394 ++LABEL_NUSES (old_succ->head);
1396 /* ... and clear fallthru on the outgoing edge. */
1397 e->flags &= ~EDGE_FALLTHRU;
1399 /* Continue splitting the interesting edge. */
1403 /* Place the new block just in front of the successor. */
1404 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1405 if (old_succ == EXIT_BLOCK_PTR)
1406 j = n_basic_blocks - 1;
1407 else
1408 j = old_succ->index;
1409 for (i = n_basic_blocks - 1; i > j; --i)
1411 basic_block tmp = BASIC_BLOCK (i - 1);
1412 BASIC_BLOCK (i) = tmp;
1413 tmp->index = i;
1415 BASIC_BLOCK (i) = bb;
1416 bb->index = i;
1418 /* Create the basic block note.
1420 Where we place the note can have a noticable impact on the generated
1421 code. Consider this cfg:
1428 +->1-->2--->E
1430 +--+
1432 If we need to insert an insn on the edge from block 0 to block 1,
1433 we want to ensure the instructions we insert are outside of any
1434 loop notes that physically sit between block 0 and block 1. Otherwise
1435 we confuse the loop optimizer into thinking the loop is a phony. */
1436 if (old_succ != EXIT_BLOCK_PTR
1437 && PREV_INSN (old_succ->head)
1438 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1439 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1440 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1441 PREV_INSN (old_succ->head));
1442 else if (old_succ != EXIT_BLOCK_PTR)
1443 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1444 else
1445 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1446 NOTE_BASIC_BLOCK (bb_note) = bb;
1447 bb->head = bb->end = bb_note;
1449 /* Not quite simple -- for non-fallthru edges, we must adjust the
1450 predecessor's jump instruction to target our new block. */
1451 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1453 rtx tmp, insn = old_pred->end;
1454 rtx old_label = old_succ->head;
1455 rtx new_label = gen_label_rtx ();
1457 if (GET_CODE (insn) != JUMP_INSN)
1458 abort ();
1460 /* ??? Recognize a tablejump and adjust all matching cases. */
1461 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1462 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1463 && GET_CODE (tmp) == JUMP_INSN
1464 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1465 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1467 rtvec vec;
1468 int j;
1470 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1471 vec = XVEC (PATTERN (tmp), 0);
1472 else
1473 vec = XVEC (PATTERN (tmp), 1);
1475 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1476 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1478 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1479 --LABEL_NUSES (old_label);
1480 ++LABEL_NUSES (new_label);
1483 /* Handle casesi dispatch insns */
1484 if ((tmp = single_set (insn)) != NULL
1485 && SET_DEST (tmp) == pc_rtx
1486 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1487 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1488 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1490 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1491 new_label);
1492 --LABEL_NUSES (old_label);
1493 ++LABEL_NUSES (new_label);
1496 else
1498 /* This would have indicated an abnormal edge. */
1499 if (computed_jump_p (insn))
1500 abort ();
1502 /* A return instruction can't be redirected. */
1503 if (returnjump_p (insn))
1504 abort ();
1506 /* If the insn doesn't go where we think, we're confused. */
1507 if (JUMP_LABEL (insn) != old_label)
1508 abort ();
1510 redirect_jump (insn, new_label);
1513 emit_label_before (new_label, bb_note);
1514 bb->head = new_label;
1517 return bb;
1520 /* Queue instructions for insertion on an edge between two basic blocks.
1521 The new instructions and basic blocks (if any) will not appear in the
1522 CFG until commit_edge_insertions is called. */
1524 void
1525 insert_insn_on_edge (pattern, e)
1526 rtx pattern;
1527 edge e;
1529 /* We cannot insert instructions on an abnormal critical edge.
1530 It will be easier to find the culprit if we die now. */
1531 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1532 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1533 abort ();
1535 if (e->insns == NULL_RTX)
1536 start_sequence ();
1537 else
1538 push_to_sequence (e->insns);
1540 emit_insn (pattern);
1542 e->insns = get_insns ();
1543 end_sequence();
1546 /* Update the CFG for the instructions queued on edge E. */
1548 static void
1549 commit_one_edge_insertion (e)
1550 edge e;
1552 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1553 basic_block bb;
1555 /* Pull the insns off the edge now since the edge might go away. */
1556 insns = e->insns;
1557 e->insns = NULL_RTX;
1559 /* Figure out where to put these things. If the destination has
1560 one predecessor, insert there. Except for the exit block. */
1561 if (e->dest->pred->pred_next == NULL
1562 && e->dest != EXIT_BLOCK_PTR)
1564 bb = e->dest;
1566 /* Get the location correct wrt a code label, and "nice" wrt
1567 a basic block note, and before everything else. */
1568 tmp = bb->head;
1569 if (GET_CODE (tmp) == CODE_LABEL)
1570 tmp = NEXT_INSN (tmp);
1571 if (GET_CODE (tmp) == NOTE
1572 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1573 tmp = NEXT_INSN (tmp);
1574 if (tmp == bb->head)
1575 before = tmp;
1576 else
1577 after = PREV_INSN (tmp);
1580 /* If the source has one successor and the edge is not abnormal,
1581 insert there. Except for the entry block. */
1582 else if ((e->flags & EDGE_ABNORMAL) == 0
1583 && e->src->succ->succ_next == NULL
1584 && e->src != ENTRY_BLOCK_PTR)
1586 bb = e->src;
1587 /* It is possible to have a non-simple jump here. Consider a target
1588 where some forms of unconditional jumps clobber a register. This
1589 happens on the fr30 for example.
1591 We know this block has a single successor, so we can just emit
1592 the queued insns before the jump. */
1593 if (GET_CODE (bb->end) == JUMP_INSN)
1595 before = bb->end;
1597 else
1599 /* We'd better be fallthru, or we've lost track of what's what. */
1600 if ((e->flags & EDGE_FALLTHRU) == 0)
1601 abort ();
1603 after = bb->end;
1607 /* Otherwise we must split the edge. */
1608 else
1610 bb = split_edge (e);
1611 after = bb->end;
1614 /* Now that we've found the spot, do the insertion. */
1616 /* Set the new block number for these insns, if structure is allocated. */
1617 if (basic_block_for_insn)
1619 rtx i;
1620 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1621 set_block_for_insn (i, bb);
1624 if (before)
1626 emit_insns_before (insns, before);
1627 if (before == bb->head)
1628 bb->head = insns;
1630 else
1632 rtx last = emit_insns_after (insns, after);
1633 if (after == bb->end)
1635 bb->end = last;
1637 if (GET_CODE (last) == JUMP_INSN)
1639 if (returnjump_p (last))
1641 /* ??? Remove all outgoing edges from BB and add one
1642 for EXIT. This is not currently a problem because
1643 this only happens for the (single) epilogue, which
1644 already has a fallthru edge to EXIT. */
1646 e = bb->succ;
1647 if (e->dest != EXIT_BLOCK_PTR
1648 || e->succ_next != NULL
1649 || (e->flags & EDGE_FALLTHRU) == 0)
1650 abort ();
1651 e->flags &= ~EDGE_FALLTHRU;
1653 emit_barrier_after (last);
1655 else
1656 abort ();
1662 /* Update the CFG for all queued instructions. */
1664 void
1665 commit_edge_insertions ()
1667 int i;
1668 basic_block bb;
1670 #ifdef ENABLE_CHECKING
1671 verify_flow_info ();
1672 #endif
1674 i = -1;
1675 bb = ENTRY_BLOCK_PTR;
1676 while (1)
1678 edge e, next;
1680 for (e = bb->succ; e ; e = next)
1682 next = e->succ_next;
1683 if (e->insns)
1684 commit_one_edge_insertion (e);
1687 if (++i >= n_basic_blocks)
1688 break;
1689 bb = BASIC_BLOCK (i);
1693 /* Delete all unreachable basic blocks. */
1695 static void
1696 delete_unreachable_blocks ()
1698 basic_block *worklist, *tos;
1699 int deleted_handler;
1700 edge e;
1701 int i, n;
1703 n = n_basic_blocks;
1704 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1706 /* Use basic_block->aux as a marker. Clear them all. */
1708 for (i = 0; i < n; ++i)
1709 BASIC_BLOCK (i)->aux = NULL;
1711 /* Add our starting points to the worklist. Almost always there will
1712 be only one. It isn't inconcievable that we might one day directly
1713 support Fortran alternate entry points. */
1715 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1717 *tos++ = e->dest;
1719 /* Mark the block with a handy non-null value. */
1720 e->dest->aux = e;
1723 /* Iterate: find everything reachable from what we've already seen. */
1725 while (tos != worklist)
1727 basic_block b = *--tos;
1729 for (e = b->succ; e ; e = e->succ_next)
1730 if (!e->dest->aux)
1732 *tos++ = e->dest;
1733 e->dest->aux = e;
1737 /* Delete all unreachable basic blocks. Count down so that we don't
1738 interfere with the block renumbering that happens in delete_block. */
1740 deleted_handler = 0;
1742 for (i = n - 1; i >= 0; --i)
1744 basic_block b = BASIC_BLOCK (i);
1746 if (b->aux != NULL)
1747 /* This block was found. Tidy up the mark. */
1748 b->aux = NULL;
1749 else
1750 deleted_handler |= delete_block (b);
1753 tidy_fallthru_edges ();
1755 /* If we deleted an exception handler, we may have EH region begin/end
1756 blocks to remove as well. */
1757 if (deleted_handler)
1758 delete_eh_regions ();
1760 free (worklist);
1763 /* Find EH regions for which there is no longer a handler, and delete them. */
1765 static void
1766 delete_eh_regions ()
1768 rtx insn;
1770 update_rethrow_references ();
1772 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1773 if (GET_CODE (insn) == NOTE)
1775 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1776 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1778 int num = NOTE_EH_HANDLER (insn);
1779 /* A NULL handler indicates a region is no longer needed,
1780 as long as it isn't the target of a rethrow. */
1781 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1783 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1784 NOTE_SOURCE_FILE (insn) = 0;
1790 /* Return true if NOTE is not one of the ones that must be kept paired,
1791 so that we may simply delete them. */
1793 static int
1794 can_delete_note_p (note)
1795 rtx note;
1797 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1798 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1801 /* Unlink a chain of insns between START and FINISH, leaving notes
1802 that must be paired. */
1804 void
1805 flow_delete_insn_chain (start, finish)
1806 rtx start, finish;
1808 /* Unchain the insns one by one. It would be quicker to delete all
1809 of these with a single unchaining, rather than one at a time, but
1810 we need to keep the NOTE's. */
1812 rtx next;
1814 while (1)
1816 next = NEXT_INSN (start);
1817 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1819 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1821 else
1822 next = flow_delete_insn (start);
1824 if (start == finish)
1825 break;
1826 start = next;
1830 /* Delete the insns in a (non-live) block. We physically delete every
1831 non-deleted-note insn, and update the flow graph appropriately.
1833 Return nonzero if we deleted an exception handler. */
1835 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1836 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1838 static int
1839 delete_block (b)
1840 basic_block b;
1842 int deleted_handler = 0;
1843 rtx insn, end;
1845 /* If the head of this block is a CODE_LABEL, then it might be the
1846 label for an exception handler which can't be reached.
1848 We need to remove the label from the exception_handler_label list
1849 and remove the associated NOTE_INSN_EH_REGION_BEG and
1850 NOTE_INSN_EH_REGION_END notes. */
1852 insn = b->head;
1854 never_reached_warning (insn);
1856 if (GET_CODE (insn) == CODE_LABEL)
1858 rtx x, *prev = &exception_handler_labels;
1860 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1862 if (XEXP (x, 0) == insn)
1864 /* Found a match, splice this label out of the EH label list. */
1865 *prev = XEXP (x, 1);
1866 XEXP (x, 1) = NULL_RTX;
1867 XEXP (x, 0) = NULL_RTX;
1869 /* Remove the handler from all regions */
1870 remove_handler (insn);
1871 deleted_handler = 1;
1872 break;
1874 prev = &XEXP (x, 1);
1877 /* This label may be referenced by code solely for its value, or
1878 referenced by static data, or something. We have determined
1879 that it is not reachable, but cannot delete the label itself.
1880 Save code space and continue to delete the balance of the block,
1881 along with properly updating the cfg. */
1882 if (!can_delete_label_p (insn))
1884 /* If we've only got one of these, skip the whole deleting
1885 insns thing. */
1886 if (insn == b->end)
1887 goto no_delete_insns;
1888 insn = NEXT_INSN (insn);
1892 /* Selectively unlink the insn chain. Include any BARRIER that may
1893 follow the basic block. */
1894 end = next_nonnote_insn (b->end);
1895 if (!end || GET_CODE (end) != BARRIER)
1896 end = b->end;
1897 flow_delete_insn_chain (insn, end);
1899 no_delete_insns:
1901 /* Remove the edges into and out of this block. Note that there may
1902 indeed be edges in, if we are removing an unreachable loop. */
1904 edge e, next, *q;
1906 for (e = b->pred; e ; e = next)
1908 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1909 continue;
1910 *q = e->succ_next;
1911 next = e->pred_next;
1912 n_edges--;
1913 free (e);
1915 for (e = b->succ; e ; e = next)
1917 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1918 continue;
1919 *q = e->pred_next;
1920 next = e->succ_next;
1921 n_edges--;
1922 free (e);
1925 b->pred = NULL;
1926 b->succ = NULL;
1929 /* Remove the basic block from the array, and compact behind it. */
1930 expunge_block (b);
1932 return deleted_handler;
1935 /* Remove block B from the basic block array and compact behind it. */
1937 static void
1938 expunge_block (b)
1939 basic_block b;
1941 int i, n = n_basic_blocks;
1943 for (i = b->index; i + 1 < n; ++i)
1945 basic_block x = BASIC_BLOCK (i + 1);
1946 BASIC_BLOCK (i) = x;
1947 x->index = i;
1950 basic_block_info->num_elements--;
1951 n_basic_blocks--;
1954 /* Delete INSN by patching it out. Return the next insn. */
1957 flow_delete_insn (insn)
1958 rtx insn;
1960 rtx prev = PREV_INSN (insn);
1961 rtx next = NEXT_INSN (insn);
1963 PREV_INSN (insn) = NULL_RTX;
1964 NEXT_INSN (insn) = NULL_RTX;
1966 if (prev)
1967 NEXT_INSN (prev) = next;
1968 if (next)
1969 PREV_INSN (next) = prev;
1970 else
1971 set_last_insn (prev);
1973 if (GET_CODE (insn) == CODE_LABEL)
1974 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1976 /* If deleting a jump, decrement the use count of the label. Deleting
1977 the label itself should happen in the normal course of block merging. */
1978 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1979 LABEL_NUSES (JUMP_LABEL (insn))--;
1981 return next;
1984 /* True if a given label can be deleted. */
1986 static int
1987 can_delete_label_p (label)
1988 rtx label;
1990 rtx x;
1992 if (LABEL_PRESERVE_P (label))
1993 return 0;
1995 for (x = forced_labels; x ; x = XEXP (x, 1))
1996 if (label == XEXP (x, 0))
1997 return 0;
1998 for (x = label_value_list; x ; x = XEXP (x, 1))
1999 if (label == XEXP (x, 0))
2000 return 0;
2001 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2002 if (label == XEXP (x, 0))
2003 return 0;
2005 /* User declared labels must be preserved. */
2006 if (LABEL_NAME (label) != 0)
2007 return 0;
2009 return 1;
2012 /* Blocks A and B are to be merged into a single block. A has no incoming
2013 fallthru edge, so it can be moved before B without adding or modifying
2014 any jumps (aside from the jump from A to B). */
2016 static int
2017 merge_blocks_move_predecessor_nojumps (a, b)
2018 basic_block a, b;
2020 rtx start, end, barrier;
2021 int index;
2023 start = a->head;
2024 end = a->end;
2026 /* We want to delete the BARRIER after the end of the insns we are
2027 going to move. If we don't find a BARRIER, then do nothing. This
2028 can happen in some cases if we have labels we can not delete.
2030 Similarly, do nothing if we can not delete the label at the start
2031 of the target block. */
2032 barrier = next_nonnote_insn (end);
2033 if (GET_CODE (barrier) != BARRIER
2034 || (GET_CODE (b->head) == CODE_LABEL
2035 && ! can_delete_label_p (b->head)))
2036 return 0;
2037 else
2038 flow_delete_insn (barrier);
2040 /* Move block and loop notes out of the chain so that we do not
2041 disturb their order.
2043 ??? A better solution would be to squeeze out all the non-nested notes
2044 and adjust the block trees appropriately. Even better would be to have
2045 a tighter connection between block trees and rtl so that this is not
2046 necessary. */
2047 start = squeeze_notes (start, end);
2049 /* Scramble the insn chain. */
2050 if (end != PREV_INSN (b->head))
2051 reorder_insns (start, end, PREV_INSN (b->head));
2053 if (rtl_dump_file)
2055 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2056 a->index, b->index);
2059 /* Swap the records for the two blocks around. Although we are deleting B,
2060 A is now where B was and we want to compact the BB array from where
2061 A used to be. */
2062 BASIC_BLOCK(a->index) = b;
2063 BASIC_BLOCK(b->index) = a;
2064 index = a->index;
2065 a->index = b->index;
2066 b->index = index;
2068 /* Now blocks A and B are contiguous. Merge them. */
2069 merge_blocks_nomove (a, b);
2071 return 1;
2074 /* Blocks A and B are to be merged into a single block. B has no outgoing
2075 fallthru edge, so it can be moved after A without adding or modifying
2076 any jumps (aside from the jump from A to B). */
2078 static int
2079 merge_blocks_move_successor_nojumps (a, b)
2080 basic_block a, b;
2082 rtx start, end, barrier;
2084 start = b->head;
2085 end = b->end;
2087 /* We want to delete the BARRIER after the end of the insns we are
2088 going to move. If we don't find a BARRIER, then do nothing. This
2089 can happen in some cases if we have labels we can not delete.
2091 Similarly, do nothing if we can not delete the label at the start
2092 of the target block. */
2093 barrier = next_nonnote_insn (end);
2094 if (GET_CODE (barrier) != BARRIER
2095 || (GET_CODE (b->head) == CODE_LABEL
2096 && ! can_delete_label_p (b->head)))
2097 return 0;
2098 else
2099 flow_delete_insn (barrier);
2101 /* Move block and loop notes out of the chain so that we do not
2102 disturb their order.
2104 ??? A better solution would be to squeeze out all the non-nested notes
2105 and adjust the block trees appropriately. Even better would be to have
2106 a tighter connection between block trees and rtl so that this is not
2107 necessary. */
2108 start = squeeze_notes (start, end);
2110 /* Scramble the insn chain. */
2111 reorder_insns (start, end, a->end);
2113 /* Now blocks A and B are contiguous. Merge them. */
2114 merge_blocks_nomove (a, b);
2116 if (rtl_dump_file)
2118 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2119 b->index, a->index);
2122 return 1;
2125 /* Blocks A and B are to be merged into a single block. The insns
2126 are already contiguous, hence `nomove'. */
2128 static void
2129 merge_blocks_nomove (a, b)
2130 basic_block a, b;
2132 edge e;
2133 rtx b_head, b_end, a_end;
2134 int b_empty = 0;
2136 /* If there was a CODE_LABEL beginning B, delete it. */
2137 b_head = b->head;
2138 b_end = b->end;
2139 if (GET_CODE (b_head) == CODE_LABEL)
2141 /* Detect basic blocks with nothing but a label. This can happen
2142 in particular at the end of a function. */
2143 if (b_head == b_end)
2144 b_empty = 1;
2145 b_head = flow_delete_insn (b_head);
2148 /* Delete the basic block note. */
2149 if (GET_CODE (b_head) == NOTE
2150 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2152 if (b_head == b_end)
2153 b_empty = 1;
2154 b_head = flow_delete_insn (b_head);
2157 /* If there was a jump out of A, delete it. */
2158 a_end = a->end;
2159 if (GET_CODE (a_end) == JUMP_INSN)
2161 rtx prev;
2163 prev = prev_nonnote_insn (a_end);
2164 if (!prev)
2165 prev = a->head;
2167 #ifdef HAVE_cc0
2168 /* If this was a conditional jump, we need to also delete
2169 the insn that set cc0. */
2171 if (prev && sets_cc0_p (prev))
2173 rtx tmp = prev;
2174 prev = prev_nonnote_insn (prev);
2175 if (!prev)
2176 prev = a->head;
2177 flow_delete_insn (tmp);
2179 #endif
2181 /* Note that a->head != a->end, since we should have at least a
2182 bb note plus the jump, so prev != insn. */
2183 flow_delete_insn (a_end);
2184 a_end = prev;
2187 /* By definition, there should only be one successor of A, and that is
2188 B. Free that edge struct. */
2189 n_edges--;
2190 free (a->succ);
2192 /* Adjust the edges out of B for the new owner. */
2193 for (e = b->succ; e ; e = e->succ_next)
2194 e->src = a;
2195 a->succ = b->succ;
2197 /* Reassociate the insns of B with A. */
2198 if (!b_empty)
2200 BLOCK_FOR_INSN (b_head) = a;
2201 while (b_head != b_end)
2203 b_head = NEXT_INSN (b_head);
2204 BLOCK_FOR_INSN (b_head) = a;
2206 a_end = b_head;
2208 a->end = a_end;
2210 /* Compact the basic block array. */
2211 expunge_block (b);
2214 /* Attempt to merge basic blocks that are potentially non-adjacent.
2215 Return true iff the attempt succeeded. */
2217 static int
2218 merge_blocks (e, b, c)
2219 edge e;
2220 basic_block b, c;
2222 /* If B has a fallthru edge to C, no need to move anything. */
2223 if (e->flags & EDGE_FALLTHRU)
2225 /* If a label still appears somewhere and we cannot delete the label,
2226 then we cannot merge the blocks. The edge was tidied already. */
2228 rtx insn, stop = NEXT_INSN (c->head);
2229 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2230 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2231 return 0;
2233 merge_blocks_nomove (b, c);
2235 if (rtl_dump_file)
2237 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2238 b->index, c->index);
2241 return 1;
2243 else
2245 edge tmp_edge;
2246 basic_block d;
2247 int c_has_outgoing_fallthru;
2248 int b_has_incoming_fallthru;
2250 /* We must make sure to not munge nesting of exception regions,
2251 lexical blocks, and loop notes.
2253 The first is taken care of by requiring that the active eh
2254 region at the end of one block always matches the active eh
2255 region at the beginning of the next block.
2257 The later two are taken care of by squeezing out all the notes. */
2259 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2260 executed and we may want to treat blocks which have two out
2261 edges, one normal, one abnormal as only having one edge for
2262 block merging purposes. */
2264 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2265 if (tmp_edge->flags & EDGE_FALLTHRU)
2266 break;
2267 c_has_outgoing_fallthru = (tmp_edge != NULL);
2269 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2270 if (tmp_edge->flags & EDGE_FALLTHRU)
2271 break;
2272 b_has_incoming_fallthru = (tmp_edge != NULL);
2274 /* If B does not have an incoming fallthru, and the exception regions
2275 match, then it can be moved immediately before C without introducing
2276 or modifying jumps.
2278 C can not be the first block, so we do not have to worry about
2279 accessing a non-existent block. */
2280 d = BASIC_BLOCK (c->index - 1);
2281 if (! b_has_incoming_fallthru
2282 && d->eh_end == b->eh_beg
2283 && b->eh_end == c->eh_beg)
2284 return merge_blocks_move_predecessor_nojumps (b, c);
2286 /* Otherwise, we're going to try to move C after B. Make sure the
2287 exception regions match.
2289 If B is the last basic block, then we must not try to access the
2290 block structure for block B + 1. Luckily in that case we do not
2291 need to worry about matching exception regions. */
2292 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2293 if (b->eh_end == c->eh_beg
2294 && (d == NULL || c->eh_end == d->eh_beg))
2296 /* If C does not have an outgoing fallthru, then it can be moved
2297 immediately after B without introducing or modifying jumps. */
2298 if (! c_has_outgoing_fallthru)
2299 return merge_blocks_move_successor_nojumps (b, c);
2301 /* Otherwise, we'll need to insert an extra jump, and possibly
2302 a new block to contain it. */
2303 /* ??? Not implemented yet. */
2306 return 0;
2310 /* Top level driver for merge_blocks. */
2312 static void
2313 try_merge_blocks ()
2315 int i;
2317 /* Attempt to merge blocks as made possible by edge removal. If a block
2318 has only one successor, and the successor has only one predecessor,
2319 they may be combined. */
2321 for (i = 0; i < n_basic_blocks; )
2323 basic_block c, b = BASIC_BLOCK (i);
2324 edge s;
2326 /* A loop because chains of blocks might be combineable. */
2327 while ((s = b->succ) != NULL
2328 && s->succ_next == NULL
2329 && (s->flags & EDGE_EH) == 0
2330 && (c = s->dest) != EXIT_BLOCK_PTR
2331 && c->pred->pred_next == NULL
2332 /* If the jump insn has side effects, we can't kill the edge. */
2333 && (GET_CODE (b->end) != JUMP_INSN
2334 || onlyjump_p (b->end))
2335 && merge_blocks (s, b, c))
2336 continue;
2338 /* Don't get confused by the index shift caused by deleting blocks. */
2339 i = b->index + 1;
2343 /* The given edge should potentially be a fallthru edge. If that is in
2344 fact true, delete the jump and barriers that are in the way. */
2346 static void
2347 tidy_fallthru_edge (e, b, c)
2348 edge e;
2349 basic_block b, c;
2351 rtx q;
2353 /* ??? In a late-running flow pass, other folks may have deleted basic
2354 blocks by nopping out blocks, leaving multiple BARRIERs between here
2355 and the target label. They ought to be chastized and fixed.
2357 We can also wind up with a sequence of undeletable labels between
2358 one block and the next.
2360 So search through a sequence of barriers, labels, and notes for
2361 the head of block C and assert that we really do fall through. */
2363 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2364 return;
2366 /* Remove what will soon cease being the jump insn from the source block.
2367 If block B consisted only of this single jump, turn it into a deleted
2368 note. */
2369 q = b->end;
2370 if (GET_CODE (q) == JUMP_INSN)
2372 #ifdef HAVE_cc0
2373 /* If this was a conditional jump, we need to also delete
2374 the insn that set cc0. */
2375 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2376 q = PREV_INSN (q);
2377 #endif
2379 if (b->head == q)
2381 PUT_CODE (q, NOTE);
2382 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2383 NOTE_SOURCE_FILE (q) = 0;
2385 else
2386 b->end = q = PREV_INSN (q);
2389 /* Selectively unlink the sequence. */
2390 if (q != PREV_INSN (c->head))
2391 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2393 e->flags |= EDGE_FALLTHRU;
2396 /* Fix up edges that now fall through, or rather should now fall through
2397 but previously required a jump around now deleted blocks. Simplify
2398 the search by only examining blocks numerically adjacent, since this
2399 is how find_basic_blocks created them. */
2401 static void
2402 tidy_fallthru_edges ()
2404 int i;
2406 for (i = 1; i < n_basic_blocks; ++i)
2408 basic_block b = BASIC_BLOCK (i - 1);
2409 basic_block c = BASIC_BLOCK (i);
2410 edge s;
2412 /* We care about simple conditional or unconditional jumps with
2413 a single successor.
2415 If we had a conditional branch to the next instruction when
2416 find_basic_blocks was called, then there will only be one
2417 out edge for the block which ended with the conditional
2418 branch (since we do not create duplicate edges).
2420 Furthermore, the edge will be marked as a fallthru because we
2421 merge the flags for the duplicate edges. So we do not want to
2422 check that the edge is not a FALLTHRU edge. */
2423 if ((s = b->succ) != NULL
2424 && s->succ_next == NULL
2425 && s->dest == c
2426 /* If the jump insn has side effects, we can't tidy the edge. */
2427 && (GET_CODE (b->end) != JUMP_INSN
2428 || onlyjump_p (b->end)))
2429 tidy_fallthru_edge (s, b, c);
2433 /* Discover and record the loop depth at the head of each basic block. */
2435 void
2436 calculate_loop_depth (dump)
2437 FILE *dump;
2439 struct loops loops;
2441 /* The loop infrastructure does the real job for us. */
2442 flow_loops_find (&loops);
2444 if (dump)
2445 flow_loops_dump (&loops, dump, 0);
2447 flow_loops_free (&loops);
2450 /* Perform data flow analysis.
2451 F is the first insn of the function and NREGS the number of register numbers
2452 in use. */
2454 void
2455 life_analysis (f, nregs, file, remove_dead_code)
2456 rtx f;
2457 int nregs;
2458 FILE *file;
2459 int remove_dead_code;
2461 #ifdef ELIMINABLE_REGS
2462 register int i;
2463 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2464 #endif
2465 int flags;
2466 sbitmap all_blocks;
2468 /* Record which registers will be eliminated. We use this in
2469 mark_used_regs. */
2471 CLEAR_HARD_REG_SET (elim_reg_set);
2473 #ifdef ELIMINABLE_REGS
2474 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2475 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2476 #else
2477 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2478 #endif
2480 /* We want alias analysis information for local dead store elimination. */
2481 init_alias_analysis ();
2483 if (! optimize)
2484 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2485 else
2487 flags = PROP_FINAL;
2488 if (! remove_dead_code)
2489 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2492 /* The post-reload life analysis have (on a global basis) the same
2493 registers live as was computed by reload itself. elimination
2494 Otherwise offsets and such may be incorrect.
2496 Reload will make some registers as live even though they do not
2497 appear in the rtl. */
2498 if (reload_completed)
2499 flags &= ~PROP_REG_INFO;
2501 max_regno = nregs;
2503 /* Always remove no-op moves. Do this before other processing so
2504 that we don't have to keep re-scanning them. */
2505 delete_noop_moves (f);
2507 /* Some targets can emit simpler epilogues if they know that sp was
2508 not ever modified during the function. After reload, of course,
2509 we've already emitted the epilogue so there's no sense searching. */
2510 if (! reload_completed)
2511 notice_stack_pointer_modification (f);
2513 /* Allocate and zero out data structures that will record the
2514 data from lifetime analysis. */
2515 allocate_reg_life_data ();
2516 allocate_bb_life_data ();
2517 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2518 all_blocks = sbitmap_alloc (n_basic_blocks);
2519 sbitmap_ones (all_blocks);
2521 /* Find the set of registers live on function exit. */
2522 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2524 /* "Update" life info from zero. It'd be nice to begin the
2525 relaxation with just the exit and noreturn blocks, but that set
2526 is not immediately handy. */
2527 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2529 /* Clean up. */
2530 sbitmap_free (all_blocks);
2531 free (reg_next_use);
2532 reg_next_use = NULL;
2533 end_alias_analysis ();
2535 if (file)
2536 dump_flow_info (file);
2538 free_basic_block_vars (1);
2541 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2542 Search for REGNO. If found, abort if it is not wider than word_mode. */
2544 static int
2545 verify_wide_reg_1 (px, pregno)
2546 rtx *px;
2547 void *pregno;
2549 rtx x = *px;
2550 int regno = *(int *) pregno;
2552 if (GET_CODE (x) == REG && REGNO (x) == regno)
2554 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2555 abort ();
2556 return 1;
2558 return 0;
2561 /* A subroutine of verify_local_live_at_start. Search through insns
2562 between HEAD and END looking for register REGNO. */
2564 static void
2565 verify_wide_reg (regno, head, end)
2566 int regno;
2567 rtx head, end;
2569 while (1)
2571 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2572 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2573 return;
2574 if (head == end)
2575 break;
2576 head = NEXT_INSN (head);
2579 /* We didn't find the register at all. Something's way screwy. */
2580 abort ();
2583 /* A subroutine of update_life_info. Verify that there are no untoward
2584 changes in live_at_start during a local update. */
2586 static void
2587 verify_local_live_at_start (new_live_at_start, bb)
2588 regset new_live_at_start;
2589 basic_block bb;
2591 if (reload_completed)
2593 /* After reload, there are no pseudos, nor subregs of multi-word
2594 registers. The regsets should exactly match. */
2595 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2596 abort ();
2598 else
2600 int i;
2602 /* Find the set of changed registers. */
2603 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2605 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2607 /* No registers should die. */
2608 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2609 abort ();
2610 /* Verify that the now-live register is wider than word_mode. */
2611 verify_wide_reg (i, bb->head, bb->end);
2616 /* Updates life information starting with the basic blocks set in BLOCKS.
2618 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2619 expecting local modifications to basic blocks. If we find extra
2620 registers live at the beginning of a block, then we either killed
2621 useful data, or we have a broken split that wants data not provided.
2622 If we find registers removed from live_at_start, that means we have
2623 a broken peephole that is killing a register it shouldn't.
2625 ??? This is not true in one situation -- when a pre-reload splitter
2626 generates subregs of a multi-word pseudo, current life analysis will
2627 lose the kill. So we _can_ have a pseudo go live. How irritating.
2629 BLOCK_FOR_INSN is assumed to be correct.
2631 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2632 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2633 regs_ever_live unless the caller resets it to zero. */
2635 void
2636 update_life_info (blocks, extent, prop_flags)
2637 sbitmap blocks;
2638 enum update_life_extent extent;
2639 int prop_flags;
2641 regset tmp;
2642 int i;
2644 tmp = ALLOCA_REG_SET ();
2646 /* For a global update, we go through the relaxation process again. */
2647 if (extent != UPDATE_LIFE_LOCAL)
2649 calculate_global_regs_live (blocks, blocks,
2650 prop_flags & PROP_SCAN_DEAD_CODE);
2652 /* If asked, remove notes from the blocks we'll update. */
2653 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2654 count_or_remove_death_notes (blocks, 1);
2657 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2659 basic_block bb = BASIC_BLOCK (i);
2661 COPY_REG_SET (tmp, bb->global_live_at_end);
2662 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2664 if (extent == UPDATE_LIFE_LOCAL)
2665 verify_local_live_at_start (tmp, bb);
2668 FREE_REG_SET (tmp);
2670 if (prop_flags & PROP_REG_INFO)
2672 /* The only pseudos that are live at the beginning of the function
2673 are those that were not set anywhere in the function. local-alloc
2674 doesn't know how to handle these correctly, so mark them as not
2675 local to any one basic block. */
2676 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2677 FIRST_PSEUDO_REGISTER, i,
2678 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2680 /* We have a problem with any pseudoreg that lives across the setjmp.
2681 ANSI says that if a user variable does not change in value between
2682 the setjmp and the longjmp, then the longjmp preserves it. This
2683 includes longjmp from a place where the pseudo appears dead.
2684 (In principle, the value still exists if it is in scope.)
2685 If the pseudo goes in a hard reg, some other value may occupy
2686 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2687 Conclusion: such a pseudo must not go in a hard reg. */
2688 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2689 FIRST_PSEUDO_REGISTER, i,
2691 if (regno_reg_rtx[i] != 0)
2693 REG_LIVE_LENGTH (i) = -1;
2694 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2700 /* Free the variables allocated by find_basic_blocks.
2702 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2704 void
2705 free_basic_block_vars (keep_head_end_p)
2706 int keep_head_end_p;
2708 if (basic_block_for_insn)
2710 VARRAY_FREE (basic_block_for_insn);
2711 basic_block_for_insn = NULL;
2714 if (! keep_head_end_p)
2716 clear_edges ();
2717 VARRAY_FREE (basic_block_info);
2718 n_basic_blocks = 0;
2720 ENTRY_BLOCK_PTR->aux = NULL;
2721 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2722 EXIT_BLOCK_PTR->aux = NULL;
2723 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2727 /* Return nonzero if the destination of SET equals the source. */
2728 static int
2729 set_noop_p (set)
2730 rtx set;
2732 rtx src = SET_SRC (set);
2733 rtx dst = SET_DEST (set);
2735 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2737 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2738 return 0;
2739 src = SUBREG_REG (src);
2740 dst = SUBREG_REG (dst);
2743 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2744 && REGNO (src) == REGNO (dst));
2747 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2748 value to itself. */
2749 static int
2750 noop_move_p (insn)
2751 rtx insn;
2753 rtx pat = PATTERN (insn);
2755 /* Insns carrying these notes are useful later on. */
2756 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2757 return 0;
2759 if (GET_CODE (pat) == SET && set_noop_p (pat))
2760 return 1;
2762 if (GET_CODE (pat) == PARALLEL)
2764 int i;
2765 /* If nothing but SETs of registers to themselves,
2766 this insn can also be deleted. */
2767 for (i = 0; i < XVECLEN (pat, 0); i++)
2769 rtx tem = XVECEXP (pat, 0, i);
2771 if (GET_CODE (tem) == USE
2772 || GET_CODE (tem) == CLOBBER)
2773 continue;
2775 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2776 return 0;
2779 return 1;
2781 return 0;
2784 /* Delete any insns that copy a register to itself. */
2786 static void
2787 delete_noop_moves (f)
2788 rtx f;
2790 rtx insn;
2791 for (insn = f; insn; insn = NEXT_INSN (insn))
2793 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2795 PUT_CODE (insn, NOTE);
2796 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2797 NOTE_SOURCE_FILE (insn) = 0;
2802 /* Determine if the stack pointer is constant over the life of the function.
2803 Only useful before prologues have been emitted. */
2805 static void
2806 notice_stack_pointer_modification_1 (x, pat, data)
2807 rtx x;
2808 rtx pat ATTRIBUTE_UNUSED;
2809 void *data ATTRIBUTE_UNUSED;
2811 if (x == stack_pointer_rtx
2812 /* The stack pointer is only modified indirectly as the result
2813 of a push until later in flow. See the comments in rtl.texi
2814 regarding Embedded Side-Effects on Addresses. */
2815 || (GET_CODE (x) == MEM
2816 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2817 || GET_CODE (XEXP (x, 0)) == PRE_INC
2818 || GET_CODE (XEXP (x, 0)) == POST_DEC
2819 || GET_CODE (XEXP (x, 0)) == POST_INC)
2820 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2821 current_function_sp_is_unchanging = 0;
2824 static void
2825 notice_stack_pointer_modification (f)
2826 rtx f;
2828 rtx insn;
2830 /* Assume that the stack pointer is unchanging if alloca hasn't
2831 been used. */
2832 current_function_sp_is_unchanging = !current_function_calls_alloca;
2833 if (! current_function_sp_is_unchanging)
2834 return;
2836 for (insn = f; insn; insn = NEXT_INSN (insn))
2838 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2840 /* Check if insn modifies the stack pointer. */
2841 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2842 NULL);
2843 if (! current_function_sp_is_unchanging)
2844 return;
2849 /* Mark a register in SET. Hard registers in large modes get all
2850 of their component registers set as well. */
2851 static void
2852 mark_reg (reg, xset)
2853 rtx reg;
2854 void *xset;
2856 regset set = (regset) xset;
2857 int regno = REGNO (reg);
2859 if (GET_MODE (reg) == BLKmode)
2860 abort ();
2862 SET_REGNO_REG_SET (set, regno);
2863 if (regno < FIRST_PSEUDO_REGISTER)
2865 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2866 while (--n > 0)
2867 SET_REGNO_REG_SET (set, regno + n);
2871 /* Mark those regs which are needed at the end of the function as live
2872 at the end of the last basic block. */
2873 static void
2874 mark_regs_live_at_end (set)
2875 regset set;
2877 int i;
2879 /* If exiting needs the right stack value, consider the stack pointer
2880 live at the end of the function. */
2881 if ((HAVE_epilogue && reload_completed)
2882 || ! EXIT_IGNORE_STACK
2883 || (! FRAME_POINTER_REQUIRED
2884 && ! current_function_calls_alloca
2885 && flag_omit_frame_pointer)
2886 || current_function_sp_is_unchanging)
2888 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2891 /* Mark the frame pointer if needed at the end of the function. If
2892 we end up eliminating it, it will be removed from the live list
2893 of each basic block by reload. */
2895 if (! reload_completed || frame_pointer_needed)
2897 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2898 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2899 /* If they are different, also mark the hard frame pointer as live */
2900 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2901 #endif
2904 #ifdef PIC_OFFSET_TABLE_REGNUM
2905 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2906 /* Many architectures have a GP register even without flag_pic.
2907 Assume the pic register is not in use, or will be handled by
2908 other means, if it is not fixed. */
2909 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2910 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2911 #endif
2912 #endif
2914 /* Mark all global registers, and all registers used by the epilogue
2915 as being live at the end of the function since they may be
2916 referenced by our caller. */
2917 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2918 if (global_regs[i]
2919 #ifdef EPILOGUE_USES
2920 || EPILOGUE_USES (i)
2921 #endif
2923 SET_REGNO_REG_SET (set, i);
2925 /* Mark all call-saved registers that we actaully used. */
2926 if (HAVE_epilogue && reload_completed)
2928 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2929 if (! call_used_regs[i] && regs_ever_live[i])
2930 SET_REGNO_REG_SET (set, i);
2933 /* Mark function return value. */
2934 diddle_return_value (mark_reg, set);
2937 /* Propagate global life info around the graph of basic blocks. Begin
2938 considering blocks with their corresponding bit set in BLOCKS_IN.
2939 BLOCKS_OUT is set for every block that was changed. */
2941 static void
2942 calculate_global_regs_live (blocks_in, blocks_out, flags)
2943 sbitmap blocks_in, blocks_out;
2944 int flags;
2946 basic_block *queue, *qhead, *qtail, *qend;
2947 regset tmp, new_live_at_end;
2948 int i;
2950 tmp = ALLOCA_REG_SET ();
2951 new_live_at_end = ALLOCA_REG_SET ();
2953 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2954 because the `head == tail' style test for an empty queue doesn't
2955 work with a full queue. */
2956 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2957 qtail = queue;
2958 qhead = qend = queue + n_basic_blocks + 2;
2960 /* Clear out the garbage that might be hanging out in bb->aux. */
2961 for (i = n_basic_blocks - 1; i >= 0; --i)
2962 BASIC_BLOCK (i)->aux = NULL;
2964 /* Queue the blocks set in the initial mask. Do this in reverse block
2965 number order so that we are more likely for the first round to do
2966 useful work. We use AUX non-null to flag that the block is queued. */
2967 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
2969 basic_block bb = BASIC_BLOCK (i);
2970 *--qhead = bb;
2971 bb->aux = bb;
2974 sbitmap_zero (blocks_out);
2976 while (qhead != qtail)
2978 int rescan, changed;
2979 basic_block bb;
2980 edge e;
2982 bb = *qhead++;
2983 if (qhead == qend)
2984 qhead = queue;
2985 bb->aux = NULL;
2987 /* Begin by propogating live_at_start from the successor blocks. */
2988 CLEAR_REG_SET (new_live_at_end);
2989 for (e = bb->succ; e ; e = e->succ_next)
2991 basic_block sb = e->dest;
2992 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
2995 if (bb == ENTRY_BLOCK_PTR)
2997 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
2998 continue;
3001 /* On our first pass through this block, we'll go ahead and continue.
3002 Recognize first pass by local_set NULL. On subsequent passes, we
3003 get to skip out early if live_at_end wouldn't have changed. */
3005 if (bb->local_set == NULL)
3007 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3008 rescan = 1;
3010 else
3012 /* If any bits were removed from live_at_end, we'll have to
3013 rescan the block. This wouldn't be necessary if we had
3014 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3015 local_live is really dependant on live_at_end. */
3016 CLEAR_REG_SET (tmp);
3017 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3018 new_live_at_end, BITMAP_AND_COMPL);
3020 if (! rescan)
3022 /* Find the set of changed bits. Take this opportunity
3023 to notice that this set is empty and early out. */
3024 CLEAR_REG_SET (tmp);
3025 changed = bitmap_operation (tmp, bb->global_live_at_end,
3026 new_live_at_end, BITMAP_XOR);
3027 if (! changed)
3028 continue;
3030 /* If any of the changed bits overlap with local_set,
3031 we'll have to rescan the block. Detect overlap by
3032 the AND with ~local_set turning off bits. */
3033 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3034 BITMAP_AND_COMPL);
3038 /* Let our caller know that BB changed enough to require its
3039 death notes updated. */
3040 SET_BIT (blocks_out, bb->index);
3042 if (! rescan)
3044 /* Add to live_at_start the set of all registers in
3045 new_live_at_end that aren't in the old live_at_end. */
3047 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3048 BITMAP_AND_COMPL);
3049 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3051 changed = bitmap_operation (bb->global_live_at_start,
3052 bb->global_live_at_start,
3053 tmp, BITMAP_IOR);
3054 if (! changed)
3055 continue;
3057 else
3059 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3061 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3062 into live_at_start. */
3063 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3065 /* If live_at start didn't change, no need to go farther. */
3066 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3067 continue;
3069 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3072 /* Queue all predecessors of BB so that we may re-examine
3073 their live_at_end. */
3074 for (e = bb->pred; e ; e = e->pred_next)
3076 basic_block pb = e->src;
3077 if (pb->aux == NULL)
3079 *qtail++ = pb;
3080 if (qtail == qend)
3081 qtail = queue;
3082 pb->aux = pb;
3087 FREE_REG_SET (tmp);
3088 FREE_REG_SET (new_live_at_end);
3090 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3092 basic_block bb = BASIC_BLOCK (i);
3093 FREE_REG_SET (bb->local_set);
3096 free (queue);
3099 /* Subroutines of life analysis. */
3101 /* Allocate the permanent data structures that represent the results
3102 of life analysis. Not static since used also for stupid life analysis. */
3104 void
3105 allocate_bb_life_data ()
3107 register int i;
3109 for (i = 0; i < n_basic_blocks; i++)
3111 basic_block bb = BASIC_BLOCK (i);
3113 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3114 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3117 ENTRY_BLOCK_PTR->global_live_at_end
3118 = OBSTACK_ALLOC_REG_SET (function_obstack);
3119 EXIT_BLOCK_PTR->global_live_at_start
3120 = OBSTACK_ALLOC_REG_SET (function_obstack);
3122 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3125 void
3126 allocate_reg_life_data ()
3128 int i;
3130 /* Recalculate the register space, in case it has grown. Old style
3131 vector oriented regsets would set regset_{size,bytes} here also. */
3132 allocate_reg_info (max_regno, FALSE, FALSE);
3134 /* Reset all the data we'll collect in propagate_block and its
3135 subroutines. */
3136 for (i = 0; i < max_regno; i++)
3138 REG_N_SETS (i) = 0;
3139 REG_N_REFS (i) = 0;
3140 REG_N_DEATHS (i) = 0;
3141 REG_N_CALLS_CROSSED (i) = 0;
3142 REG_LIVE_LENGTH (i) = 0;
3143 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3147 /* Compute the registers live at the beginning of a basic block
3148 from those live at the end.
3150 When called, OLD contains those live at the end.
3151 On return, it contains those live at the beginning.
3152 FIRST and LAST are the first and last insns of the basic block.
3154 FINAL is nonzero if we are doing the final pass which is not
3155 for computing the life info (since that has already been done)
3156 but for acting on it. On this pass, we delete dead stores,
3157 set up the logical links and dead-variables lists of instructions,
3158 and merge instructions for autoincrement and autodecrement addresses.
3160 SIGNIFICANT is nonzero only the first time for each basic block.
3161 If it is nonzero, it points to a regset in which we store
3162 a 1 for each register that is set within the block.
3164 BNUM is the number of the basic block. */
3166 static void
3167 propagate_block (bb, old, significant, flags)
3168 basic_block bb;
3169 regset old;
3170 regset significant;
3171 int flags;
3173 register rtx insn;
3174 rtx prev;
3175 regset live;
3176 regset dead;
3178 /* Find the loop depth for this block. Ignore loop level changes in the
3179 middle of the basic block -- for register allocation purposes, the
3180 important uses will be in the blocks wholely contained within the loop
3181 not in the loop pre-header or post-trailer. */
3182 loop_depth = bb->loop_depth;
3184 dead = ALLOCA_REG_SET ();
3185 live = ALLOCA_REG_SET ();
3187 cc0_live = 0;
3189 if (flags & PROP_REG_INFO)
3191 register int i;
3193 /* Process the regs live at the end of the block.
3194 Mark them as not local to any one basic block. */
3195 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3197 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3201 /* Scan the block an insn at a time from end to beginning. */
3203 for (insn = bb->end; ; insn = prev)
3205 prev = PREV_INSN (insn);
3207 if (GET_CODE (insn) == NOTE)
3209 /* If this is a call to `setjmp' et al,
3210 warn if any non-volatile datum is live. */
3212 if ((flags & PROP_REG_INFO)
3213 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3214 IOR_REG_SET (regs_live_at_setjmp, old);
3217 /* Update the life-status of regs for this insn.
3218 First DEAD gets which regs are set in this insn
3219 then LIVE gets which regs are used in this insn.
3220 Then the regs live before the insn
3221 are those live after, with DEAD regs turned off,
3222 and then LIVE regs turned on. */
3224 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3226 register int i;
3227 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3228 int insn_is_dead = 0;
3229 int libcall_is_dead = 0;
3231 if (flags & PROP_SCAN_DEAD_CODE)
3233 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3234 REG_NOTES (insn));
3235 libcall_is_dead = (insn_is_dead && note != 0
3236 && libcall_dead_p (PATTERN (insn), old,
3237 note, insn));
3240 /* We almost certainly don't want to delete prologue or epilogue
3241 instructions. Warn about probable compiler losage. */
3242 if (insn_is_dead
3243 && reload_completed
3244 && (HAVE_epilogue || HAVE_prologue)
3245 && prologue_epilogue_contains (insn))
3247 if (flags & PROP_KILL_DEAD_CODE)
3249 warning ("ICE: would have deleted prologue/epilogue insn");
3250 if (!inhibit_warnings)
3251 debug_rtx (insn);
3253 libcall_is_dead = insn_is_dead = 0;
3256 /* If an instruction consists of just dead store(s) on final pass,
3257 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3258 We could really delete it with delete_insn, but that
3259 can cause trouble for first or last insn in a basic block. */
3260 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3262 rtx inote;
3263 /* If the insn referred to a label, note that the label is
3264 now less used. */
3265 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3267 if (REG_NOTE_KIND (inote) == REG_LABEL)
3269 rtx label = XEXP (inote, 0);
3270 rtx next;
3271 LABEL_NUSES (label)--;
3273 /* If this label was attached to an ADDR_VEC, it's
3274 safe to delete the ADDR_VEC. In fact, it's pretty
3275 much mandatory to delete it, because the ADDR_VEC may
3276 be referencing labels that no longer exist. */
3277 if (LABEL_NUSES (label) == 0
3278 && (next = next_nonnote_insn (label)) != NULL
3279 && GET_CODE (next) == JUMP_INSN
3280 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3281 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3283 rtx pat = PATTERN (next);
3284 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3285 int len = XVECLEN (pat, diff_vec_p);
3286 int i;
3287 for (i = 0; i < len; i++)
3288 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3289 PUT_CODE (next, NOTE);
3290 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3291 NOTE_SOURCE_FILE (next) = 0;
3296 PUT_CODE (insn, NOTE);
3297 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3298 NOTE_SOURCE_FILE (insn) = 0;
3300 /* CC0 is now known to be dead. Either this insn used it,
3301 in which case it doesn't anymore, or clobbered it,
3302 so the next insn can't use it. */
3303 cc0_live = 0;
3305 /* If this insn is copying the return value from a library call,
3306 delete the entire library call. */
3307 if (libcall_is_dead)
3309 rtx first = XEXP (note, 0);
3310 rtx p = insn;
3311 while (INSN_DELETED_P (first))
3312 first = NEXT_INSN (first);
3313 while (p != first)
3315 p = PREV_INSN (p);
3316 PUT_CODE (p, NOTE);
3317 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3318 NOTE_SOURCE_FILE (p) = 0;
3321 goto flushed;
3324 CLEAR_REG_SET (dead);
3325 CLEAR_REG_SET (live);
3327 /* See if this is an increment or decrement that can be
3328 merged into a following memory address. */
3329 #ifdef AUTO_INC_DEC
3331 register rtx x = single_set (insn);
3333 /* Does this instruction increment or decrement a register? */
3334 if (!reload_completed
3335 && (flags & PROP_AUTOINC)
3336 && x != 0
3337 && GET_CODE (SET_DEST (x)) == REG
3338 && (GET_CODE (SET_SRC (x)) == PLUS
3339 || GET_CODE (SET_SRC (x)) == MINUS)
3340 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3341 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3342 /* Ok, look for a following memory ref we can combine with.
3343 If one is found, change the memory ref to a PRE_INC
3344 or PRE_DEC, cancel this insn, and return 1.
3345 Return 0 if nothing has been done. */
3346 && try_pre_increment_1 (insn))
3347 goto flushed;
3349 #endif /* AUTO_INC_DEC */
3351 /* If this is not the final pass, and this insn is copying the
3352 value of a library call and it's dead, don't scan the
3353 insns that perform the library call, so that the call's
3354 arguments are not marked live. */
3355 if (libcall_is_dead)
3357 /* Mark the dest reg as `significant'. */
3358 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3359 significant, flags);
3361 insn = XEXP (note, 0);
3362 prev = PREV_INSN (insn);
3364 else if (GET_CODE (PATTERN (insn)) == SET
3365 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3366 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3367 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3368 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3369 /* We have an insn to pop a constant amount off the stack.
3370 (Such insns use PLUS regardless of the direction of the stack,
3371 and any insn to adjust the stack by a constant is always a pop.)
3372 These insns, if not dead stores, have no effect on life. */
3374 else
3376 /* Any regs live at the time of a call instruction
3377 must not go in a register clobbered by calls.
3378 Find all regs now live and record this for them. */
3380 if (GET_CODE (insn) == CALL_INSN
3381 && (flags & PROP_REG_INFO))
3382 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3384 REG_N_CALLS_CROSSED (i)++;
3387 /* LIVE gets the regs used in INSN;
3388 DEAD gets those set by it. Dead insns don't make anything
3389 live. */
3391 mark_set_regs (old, dead, PATTERN (insn),
3392 insn, significant, flags);
3394 /* If an insn doesn't use CC0, it becomes dead since we
3395 assume that every insn clobbers it. So show it dead here;
3396 mark_used_regs will set it live if it is referenced. */
3397 cc0_live = 0;
3399 if (! insn_is_dead)
3400 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3402 /* Sometimes we may have inserted something before INSN (such as
3403 a move) when we make an auto-inc. So ensure we will scan
3404 those insns. */
3405 #ifdef AUTO_INC_DEC
3406 prev = PREV_INSN (insn);
3407 #endif
3409 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3411 register int i;
3413 rtx note;
3415 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3416 note;
3417 note = XEXP (note, 1))
3418 if (GET_CODE (XEXP (note, 0)) == USE)
3419 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3420 flags, insn);
3422 /* Each call clobbers all call-clobbered regs that are not
3423 global or fixed. Note that the function-value reg is a
3424 call-clobbered reg, and mark_set_regs has already had
3425 a chance to handle it. */
3427 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3428 if (call_used_regs[i] && ! global_regs[i]
3429 && ! fixed_regs[i])
3431 SET_REGNO_REG_SET (dead, i);
3432 if (significant)
3433 SET_REGNO_REG_SET (significant, i);
3436 /* The stack ptr is used (honorarily) by a CALL insn. */
3437 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3439 /* Calls may also reference any of the global registers,
3440 so they are made live. */
3441 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3442 if (global_regs[i])
3443 mark_used_regs (old, live,
3444 gen_rtx_REG (reg_raw_mode[i], i),
3445 flags, insn);
3447 /* Calls also clobber memory. */
3448 free_EXPR_LIST_list (&mem_set_list);
3451 /* Update OLD for the registers used or set. */
3452 AND_COMPL_REG_SET (old, dead);
3453 IOR_REG_SET (old, live);
3457 /* On final pass, update counts of how many insns in which
3458 each reg is live. */
3459 if (flags & PROP_REG_INFO)
3460 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3462 flushed:
3463 if (insn == bb->head)
3464 break;
3467 FREE_REG_SET (dead);
3468 FREE_REG_SET (live);
3469 free_EXPR_LIST_list (&mem_set_list);
3472 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3473 (SET expressions whose destinations are registers dead after the insn).
3474 NEEDED is the regset that says which regs are alive after the insn.
3476 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3478 If X is the entire body of an insn, NOTES contains the reg notes
3479 pertaining to the insn. */
3481 static int
3482 insn_dead_p (x, needed, call_ok, notes)
3483 rtx x;
3484 regset needed;
3485 int call_ok;
3486 rtx notes ATTRIBUTE_UNUSED;
3488 enum rtx_code code = GET_CODE (x);
3490 #ifdef AUTO_INC_DEC
3491 /* If flow is invoked after reload, we must take existing AUTO_INC
3492 expresions into account. */
3493 if (reload_completed)
3495 for ( ; notes; notes = XEXP (notes, 1))
3497 if (REG_NOTE_KIND (notes) == REG_INC)
3499 int regno = REGNO (XEXP (notes, 0));
3501 /* Don't delete insns to set global regs. */
3502 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3503 || REGNO_REG_SET_P (needed, regno))
3504 return 0;
3508 #endif
3510 /* If setting something that's a reg or part of one,
3511 see if that register's altered value will be live. */
3513 if (code == SET)
3515 rtx r = SET_DEST (x);
3517 #ifdef HAVE_cc0
3518 if (GET_CODE (r) == CC0)
3519 return ! cc0_live;
3520 #endif
3522 /* A SET that is a subroutine call cannot be dead. */
3523 if (GET_CODE (SET_SRC (x)) == CALL)
3525 if (! call_ok)
3526 return 0;
3529 /* Don't eliminate loads from volatile memory or volatile asms. */
3530 else if (volatile_refs_p (SET_SRC (x)))
3531 return 0;
3533 if (GET_CODE (r) == MEM)
3535 rtx temp;
3537 if (MEM_VOLATILE_P (r))
3538 return 0;
3540 /* Walk the set of memory locations we are currently tracking
3541 and see if one is an identical match to this memory location.
3542 If so, this memory write is dead (remember, we're walking
3543 backwards from the end of the block to the start. */
3544 temp = mem_set_list;
3545 while (temp)
3547 if (rtx_equal_p (XEXP (temp, 0), r))
3548 return 1;
3549 temp = XEXP (temp, 1);
3552 else
3554 while (GET_CODE (r) == SUBREG
3555 || GET_CODE (r) == STRICT_LOW_PART
3556 || GET_CODE (r) == ZERO_EXTRACT)
3557 r = XEXP (r, 0);
3559 if (GET_CODE (r) == REG)
3561 int regno = REGNO (r);
3563 /* Obvious. */
3564 if (REGNO_REG_SET_P (needed, regno))
3565 return 0;
3567 /* If this is a hard register, verify that subsequent
3568 words are not needed. */
3569 if (regno < FIRST_PSEUDO_REGISTER)
3571 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3573 while (--n > 0)
3574 if (REGNO_REG_SET_P (needed, regno+n))
3575 return 0;
3578 /* Don't delete insns to set global regs. */
3579 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3580 return 0;
3582 /* Make sure insns to set the stack pointer aren't deleted. */
3583 if (regno == STACK_POINTER_REGNUM)
3584 return 0;
3586 /* Make sure insns to set the frame pointer aren't deleted. */
3587 if (regno == FRAME_POINTER_REGNUM
3588 && (! reload_completed || frame_pointer_needed))
3589 return 0;
3590 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3591 if (regno == HARD_FRAME_POINTER_REGNUM
3592 && (! reload_completed || frame_pointer_needed))
3593 return 0;
3594 #endif
3596 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3597 /* Make sure insns to set arg pointer are never deleted
3598 (if the arg pointer isn't fixed, there will be a USE
3599 for it, so we can treat it normally). */
3600 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3601 return 0;
3602 #endif
3604 /* Otherwise, the set is dead. */
3605 return 1;
3610 /* If performing several activities, insn is dead if each activity
3611 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3612 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3613 worth keeping. */
3614 else if (code == PARALLEL)
3616 int i = XVECLEN (x, 0);
3618 for (i--; i >= 0; i--)
3619 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3620 && GET_CODE (XVECEXP (x, 0, i)) != USE
3621 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3622 return 0;
3624 return 1;
3627 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3628 is not necessarily true for hard registers. */
3629 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3630 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3631 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3632 return 1;
3634 /* We do not check other CLOBBER or USE here. An insn consisting of just
3635 a CLOBBER or just a USE should not be deleted. */
3636 return 0;
3639 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3640 return 1 if the entire library call is dead.
3641 This is true if X copies a register (hard or pseudo)
3642 and if the hard return reg of the call insn is dead.
3643 (The caller should have tested the destination of X already for death.)
3645 If this insn doesn't just copy a register, then we don't
3646 have an ordinary libcall. In that case, cse could not have
3647 managed to substitute the source for the dest later on,
3648 so we can assume the libcall is dead.
3650 NEEDED is the bit vector of pseudoregs live before this insn.
3651 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3653 static int
3654 libcall_dead_p (x, needed, note, insn)
3655 rtx x;
3656 regset needed;
3657 rtx note;
3658 rtx insn;
3660 register RTX_CODE code = GET_CODE (x);
3662 if (code == SET)
3664 register rtx r = SET_SRC (x);
3665 if (GET_CODE (r) == REG)
3667 rtx call = XEXP (note, 0);
3668 rtx call_pat;
3669 register int i;
3671 /* Find the call insn. */
3672 while (call != insn && GET_CODE (call) != CALL_INSN)
3673 call = NEXT_INSN (call);
3675 /* If there is none, do nothing special,
3676 since ordinary death handling can understand these insns. */
3677 if (call == insn)
3678 return 0;
3680 /* See if the hard reg holding the value is dead.
3681 If this is a PARALLEL, find the call within it. */
3682 call_pat = PATTERN (call);
3683 if (GET_CODE (call_pat) == PARALLEL)
3685 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3686 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3687 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3688 break;
3690 /* This may be a library call that is returning a value
3691 via invisible pointer. Do nothing special, since
3692 ordinary death handling can understand these insns. */
3693 if (i < 0)
3694 return 0;
3696 call_pat = XVECEXP (call_pat, 0, i);
3699 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3702 return 1;
3705 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3706 live at function entry. Don't count global register variables, variables
3707 in registers that can be used for function arg passing, or variables in
3708 fixed hard registers. */
3711 regno_uninitialized (regno)
3712 int regno;
3714 if (n_basic_blocks == 0
3715 || (regno < FIRST_PSEUDO_REGISTER
3716 && (global_regs[regno]
3717 || fixed_regs[regno]
3718 || FUNCTION_ARG_REGNO_P (regno))))
3719 return 0;
3721 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3724 /* 1 if register REGNO was alive at a place where `setjmp' was called
3725 and was set more than once or is an argument.
3726 Such regs may be clobbered by `longjmp'. */
3729 regno_clobbered_at_setjmp (regno)
3730 int regno;
3732 if (n_basic_blocks == 0)
3733 return 0;
3735 return ((REG_N_SETS (regno) > 1
3736 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3737 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3740 /* INSN references memory, possibly using autoincrement addressing modes.
3741 Find any entries on the mem_set_list that need to be invalidated due
3742 to an address change. */
3743 static void
3744 invalidate_mems_from_autoinc (insn)
3745 rtx insn;
3747 rtx note = REG_NOTES (insn);
3748 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3750 if (REG_NOTE_KIND (note) == REG_INC)
3752 rtx temp = mem_set_list;
3753 rtx prev = NULL_RTX;
3754 rtx next;
3756 while (temp)
3758 next = XEXP (temp, 1);
3759 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3761 /* Splice temp out of list. */
3762 if (prev)
3763 XEXP (prev, 1) = next;
3764 else
3765 mem_set_list = next;
3766 free_EXPR_LIST_node (temp);
3768 else
3769 prev = temp;
3770 temp = next;
3776 /* Process the registers that are set within X. Their bits are set to
3777 1 in the regset DEAD, because they are dead prior to this insn.
3779 If INSN is nonzero, it is the insn being processed.
3781 FLAGS is the set of operations to perform. */
3783 static void
3784 mark_set_regs (needed, dead, x, insn, significant, flags)
3785 regset needed;
3786 regset dead;
3787 rtx x;
3788 rtx insn;
3789 regset significant;
3790 int flags;
3792 register RTX_CODE code = GET_CODE (x);
3794 if (code == SET || code == CLOBBER)
3795 mark_set_1 (needed, dead, x, insn, significant, flags);
3796 else if (code == PARALLEL)
3798 register int i;
3799 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3801 code = GET_CODE (XVECEXP (x, 0, i));
3802 if (code == SET || code == CLOBBER)
3803 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3804 significant, flags);
3809 /* Process a single SET rtx, X. */
3811 static void
3812 mark_set_1 (needed, dead, x, insn, significant, flags)
3813 regset needed;
3814 regset dead;
3815 rtx x;
3816 rtx insn;
3817 regset significant;
3818 int flags;
3820 register int regno = -1;
3821 register rtx reg = SET_DEST (x);
3823 /* Some targets place small structures in registers for
3824 return values of functions. We have to detect this
3825 case specially here to get correct flow information. */
3826 if (GET_CODE (reg) == PARALLEL
3827 && GET_MODE (reg) == BLKmode)
3829 register int i;
3831 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3832 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3833 significant, flags);
3834 return;
3837 /* Modifying just one hardware register of a multi-reg value
3838 or just a byte field of a register
3839 does not mean the value from before this insn is now dead.
3840 But it does mean liveness of that register at the end of the block
3841 is significant.
3843 Within mark_set_1, however, we treat it as if the register is
3844 indeed modified. mark_used_regs will, however, also treat this
3845 register as being used. Thus, we treat these insns as setting a
3846 new value for the register as a function of its old value. This
3847 cases LOG_LINKS to be made appropriately and this will help combine. */
3849 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3850 || GET_CODE (reg) == SIGN_EXTRACT
3851 || GET_CODE (reg) == STRICT_LOW_PART)
3852 reg = XEXP (reg, 0);
3854 /* If this set is a MEM, then it kills any aliased writes.
3855 If this set is a REG, then it kills any MEMs which use the reg. */
3856 if (flags & PROP_SCAN_DEAD_CODE)
3858 if (GET_CODE (reg) == MEM
3859 || GET_CODE (reg) == REG)
3861 rtx temp = mem_set_list;
3862 rtx prev = NULL_RTX;
3863 rtx next;
3865 while (temp)
3867 next = XEXP (temp, 1);
3868 if ((GET_CODE (reg) == MEM
3869 && output_dependence (XEXP (temp, 0), reg))
3870 || (GET_CODE (reg) == REG
3871 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3873 /* Splice this entry out of the list. */
3874 if (prev)
3875 XEXP (prev, 1) = next;
3876 else
3877 mem_set_list = next;
3878 free_EXPR_LIST_node (temp);
3880 else
3881 prev = temp;
3882 temp = next;
3886 /* If the memory reference had embedded side effects (autoincrement
3887 address modes. Then we may need to kill some entries on the
3888 memory set list. */
3889 if (insn && GET_CODE (reg) == MEM)
3890 invalidate_mems_from_autoinc (insn);
3892 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3893 /* We do not know the size of a BLKmode store, so we do not track
3894 them for redundant store elimination. */
3895 && GET_MODE (reg) != BLKmode
3896 /* There are no REG_INC notes for SP, so we can't assume we'll see
3897 everything that invalidates it. To be safe, don't eliminate any
3898 stores though SP; none of them should be redundant anyway. */
3899 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3900 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3903 if (GET_CODE (reg) == REG
3904 && (regno = REGNO (reg),
3905 ! (regno == FRAME_POINTER_REGNUM
3906 && (! reload_completed || frame_pointer_needed)))
3907 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3908 && ! (regno == HARD_FRAME_POINTER_REGNUM
3909 && (! reload_completed || frame_pointer_needed))
3910 #endif
3911 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3912 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3913 #endif
3914 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3915 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3917 int some_needed = REGNO_REG_SET_P (needed, regno);
3918 int some_not_needed = ! some_needed;
3920 /* Mark it as a significant register for this basic block. */
3921 if (significant)
3922 SET_REGNO_REG_SET (significant, regno);
3924 /* Mark it as dead before this insn. */
3925 SET_REGNO_REG_SET (dead, regno);
3927 /* A hard reg in a wide mode may really be multiple registers.
3928 If so, mark all of them just like the first. */
3929 if (regno < FIRST_PSEUDO_REGISTER)
3931 int n;
3933 /* Nothing below is needed for the stack pointer; get out asap.
3934 Eg, log links aren't needed, since combine won't use them. */
3935 if (regno == STACK_POINTER_REGNUM)
3936 return;
3938 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3939 while (--n > 0)
3941 int regno_n = regno + n;
3942 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3943 if (significant)
3944 SET_REGNO_REG_SET (significant, regno_n);
3946 SET_REGNO_REG_SET (dead, regno_n);
3947 some_needed |= needed_regno;
3948 some_not_needed |= ! needed_regno;
3952 /* Additional data to record if this is the final pass. */
3953 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3954 | PROP_DEATH_NOTES | PROP_AUTOINC))
3956 register rtx y;
3957 register int blocknum = BLOCK_NUM (insn);
3959 y = NULL_RTX;
3960 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3961 y = reg_next_use[regno];
3963 /* If this is a hard reg, record this function uses the reg. */
3965 if (regno < FIRST_PSEUDO_REGISTER)
3967 register int i;
3968 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3970 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3971 for (i = regno; i < endregno; i++)
3973 /* The next use is no longer "next", since a store
3974 intervenes. */
3975 reg_next_use[i] = 0;
3978 if (flags & PROP_REG_INFO)
3979 for (i = regno; i < endregno; i++)
3981 regs_ever_live[i] = 1;
3982 REG_N_SETS (i)++;
3985 else
3987 /* The next use is no longer "next", since a store
3988 intervenes. */
3989 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3990 reg_next_use[regno] = 0;
3992 /* Keep track of which basic blocks each reg appears in. */
3994 if (flags & PROP_REG_INFO)
3996 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3997 REG_BASIC_BLOCK (regno) = blocknum;
3998 else if (REG_BASIC_BLOCK (regno) != blocknum)
3999 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4001 /* Count (weighted) references, stores, etc. This counts a
4002 register twice if it is modified, but that is correct. */
4003 REG_N_SETS (regno)++;
4004 REG_N_REFS (regno) += loop_depth + 1;
4006 /* The insns where a reg is live are normally counted
4007 elsewhere, but we want the count to include the insn
4008 where the reg is set, and the normal counting mechanism
4009 would not count it. */
4010 REG_LIVE_LENGTH (regno)++;
4014 if (! some_not_needed)
4016 if (flags & PROP_LOG_LINKS)
4018 /* Make a logical link from the next following insn
4019 that uses this register, back to this insn.
4020 The following insns have already been processed.
4022 We don't build a LOG_LINK for hard registers containing
4023 in ASM_OPERANDs. If these registers get replaced,
4024 we might wind up changing the semantics of the insn,
4025 even if reload can make what appear to be valid
4026 assignments later. */
4027 if (y && (BLOCK_NUM (y) == blocknum)
4028 && (regno >= FIRST_PSEUDO_REGISTER
4029 || asm_noperands (PATTERN (y)) < 0))
4030 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4033 else if (! some_needed)
4035 if (flags & PROP_REG_INFO)
4036 REG_N_DEATHS (REGNO (reg))++;
4038 if (flags & PROP_DEATH_NOTES)
4040 /* Note that dead stores have already been deleted
4041 when possible. If we get here, we have found a
4042 dead store that cannot be eliminated (because the
4043 same insn does something useful). Indicate this
4044 by marking the reg being set as dying here. */
4045 REG_NOTES (insn)
4046 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4049 else
4051 if (flags & PROP_DEATH_NOTES)
4053 /* This is a case where we have a multi-word hard register
4054 and some, but not all, of the words of the register are
4055 needed in subsequent insns. Write REG_UNUSED notes
4056 for those parts that were not needed. This case should
4057 be rare. */
4059 int i;
4061 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4062 i >= 0; i--)
4063 if (!REGNO_REG_SET_P (needed, regno + i))
4064 REG_NOTES (insn)
4065 = (alloc_EXPR_LIST
4066 (REG_UNUSED,
4067 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4068 REG_NOTES (insn)));
4073 else if (GET_CODE (reg) == REG)
4075 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4076 reg_next_use[regno] = 0;
4079 /* If this is the last pass and this is a SCRATCH, show it will be dying
4080 here and count it. */
4081 else if (GET_CODE (reg) == SCRATCH)
4083 if (flags & PROP_DEATH_NOTES)
4084 REG_NOTES (insn)
4085 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4089 #ifdef AUTO_INC_DEC
4091 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4092 reference. */
4094 static void
4095 find_auto_inc (needed, x, insn)
4096 regset needed;
4097 rtx x;
4098 rtx insn;
4100 rtx addr = XEXP (x, 0);
4101 HOST_WIDE_INT offset = 0;
4102 rtx set;
4104 /* Here we detect use of an index register which might be good for
4105 postincrement, postdecrement, preincrement, or predecrement. */
4107 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4108 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4110 if (GET_CODE (addr) == REG)
4112 register rtx y;
4113 register int size = GET_MODE_SIZE (GET_MODE (x));
4114 rtx use;
4115 rtx incr;
4116 int regno = REGNO (addr);
4118 /* Is the next use an increment that might make auto-increment? */
4119 if ((incr = reg_next_use[regno]) != 0
4120 && (set = single_set (incr)) != 0
4121 && GET_CODE (set) == SET
4122 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4123 /* Can't add side effects to jumps; if reg is spilled and
4124 reloaded, there's no way to store back the altered value. */
4125 && GET_CODE (insn) != JUMP_INSN
4126 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4127 && XEXP (y, 0) == addr
4128 && GET_CODE (XEXP (y, 1)) == CONST_INT
4129 && ((HAVE_POST_INCREMENT
4130 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4131 || (HAVE_POST_DECREMENT
4132 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4133 || (HAVE_PRE_INCREMENT
4134 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4135 || (HAVE_PRE_DECREMENT
4136 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4137 /* Make sure this reg appears only once in this insn. */
4138 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4139 use != 0 && use != (rtx) 1))
4141 rtx q = SET_DEST (set);
4142 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4143 ? (offset ? PRE_INC : POST_INC)
4144 : (offset ? PRE_DEC : POST_DEC));
4146 if (dead_or_set_p (incr, addr))
4148 /* This is the simple case. Try to make the auto-inc. If
4149 we can't, we are done. Otherwise, we will do any
4150 needed updates below. */
4151 if (! validate_change (insn, &XEXP (x, 0),
4152 gen_rtx_fmt_e (inc_code, Pmode, addr),
4154 return;
4156 else if (GET_CODE (q) == REG
4157 /* PREV_INSN used here to check the semi-open interval
4158 [insn,incr). */
4159 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4160 /* We must also check for sets of q as q may be
4161 a call clobbered hard register and there may
4162 be a call between PREV_INSN (insn) and incr. */
4163 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4165 /* We have *p followed sometime later by q = p+size.
4166 Both p and q must be live afterward,
4167 and q is not used between INSN and its assignment.
4168 Change it to q = p, ...*q..., q = q+size.
4169 Then fall into the usual case. */
4170 rtx insns, temp;
4171 basic_block bb;
4173 start_sequence ();
4174 emit_move_insn (q, addr);
4175 insns = get_insns ();
4176 end_sequence ();
4178 bb = BLOCK_FOR_INSN (insn);
4179 for (temp = insns; temp; temp = NEXT_INSN (temp))
4180 set_block_for_insn (temp, bb);
4182 /* If we can't make the auto-inc, or can't make the
4183 replacement into Y, exit. There's no point in making
4184 the change below if we can't do the auto-inc and doing
4185 so is not correct in the pre-inc case. */
4187 validate_change (insn, &XEXP (x, 0),
4188 gen_rtx_fmt_e (inc_code, Pmode, q),
4190 validate_change (incr, &XEXP (y, 0), q, 1);
4191 if (! apply_change_group ())
4192 return;
4194 /* We now know we'll be doing this change, so emit the
4195 new insn(s) and do the updates. */
4196 emit_insns_before (insns, insn);
4198 if (BLOCK_FOR_INSN (insn)->head == insn)
4199 BLOCK_FOR_INSN (insn)->head = insns;
4201 /* INCR will become a NOTE and INSN won't contain a
4202 use of ADDR. If a use of ADDR was just placed in
4203 the insn before INSN, make that the next use.
4204 Otherwise, invalidate it. */
4205 if (GET_CODE (PREV_INSN (insn)) == INSN
4206 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4207 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4208 reg_next_use[regno] = PREV_INSN (insn);
4209 else
4210 reg_next_use[regno] = 0;
4212 addr = q;
4213 regno = REGNO (q);
4215 /* REGNO is now used in INCR which is below INSN, but
4216 it previously wasn't live here. If we don't mark
4217 it as needed, we'll put a REG_DEAD note for it
4218 on this insn, which is incorrect. */
4219 SET_REGNO_REG_SET (needed, regno);
4221 /* If there are any calls between INSN and INCR, show
4222 that REGNO now crosses them. */
4223 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4224 if (GET_CODE (temp) == CALL_INSN)
4225 REG_N_CALLS_CROSSED (regno)++;
4227 else
4228 return;
4230 /* If we haven't returned, it means we were able to make the
4231 auto-inc, so update the status. First, record that this insn
4232 has an implicit side effect. */
4234 REG_NOTES (insn)
4235 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4237 /* Modify the old increment-insn to simply copy
4238 the already-incremented value of our register. */
4239 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4240 abort ();
4242 /* If that makes it a no-op (copying the register into itself) delete
4243 it so it won't appear to be a "use" and a "set" of this
4244 register. */
4245 if (SET_DEST (set) == addr)
4247 PUT_CODE (incr, NOTE);
4248 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4249 NOTE_SOURCE_FILE (incr) = 0;
4252 if (regno >= FIRST_PSEUDO_REGISTER)
4254 /* Count an extra reference to the reg. When a reg is
4255 incremented, spilling it is worse, so we want to make
4256 that less likely. */
4257 REG_N_REFS (regno) += loop_depth + 1;
4259 /* Count the increment as a setting of the register,
4260 even though it isn't a SET in rtl. */
4261 REG_N_SETS (regno)++;
4266 #endif /* AUTO_INC_DEC */
4268 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4269 This is done assuming the registers needed from X
4270 are those that have 1-bits in NEEDED.
4272 FLAGS is the set of enabled operations.
4274 INSN is the containing instruction. If INSN is dead, this function is not
4275 called. */
4277 static void
4278 mark_used_regs (needed, live, x, flags, insn)
4279 regset needed;
4280 regset live;
4281 rtx x;
4282 int flags;
4283 rtx insn;
4285 register RTX_CODE code;
4286 register int regno;
4287 int i;
4289 retry:
4290 code = GET_CODE (x);
4291 switch (code)
4293 case LABEL_REF:
4294 case SYMBOL_REF:
4295 case CONST_INT:
4296 case CONST:
4297 case CONST_DOUBLE:
4298 case PC:
4299 case ADDR_VEC:
4300 case ADDR_DIFF_VEC:
4301 return;
4303 #ifdef HAVE_cc0
4304 case CC0:
4305 cc0_live = 1;
4306 return;
4307 #endif
4309 case CLOBBER:
4310 /* If we are clobbering a MEM, mark any registers inside the address
4311 as being used. */
4312 if (GET_CODE (XEXP (x, 0)) == MEM)
4313 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4314 return;
4316 case MEM:
4317 /* Don't bother watching stores to mems if this is not the
4318 final pass. We'll not be deleting dead stores this round. */
4319 if (flags & PROP_SCAN_DEAD_CODE)
4321 /* Invalidate the data for the last MEM stored, but only if MEM is
4322 something that can be stored into. */
4323 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4324 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4325 ; /* needn't clear the memory set list */
4326 else
4328 rtx temp = mem_set_list;
4329 rtx prev = NULL_RTX;
4330 rtx next;
4332 while (temp)
4334 next = XEXP (temp, 1);
4335 if (anti_dependence (XEXP (temp, 0), x))
4337 /* Splice temp out of the list. */
4338 if (prev)
4339 XEXP (prev, 1) = next;
4340 else
4341 mem_set_list = next;
4342 free_EXPR_LIST_node (temp);
4344 else
4345 prev = temp;
4346 temp = next;
4350 /* If the memory reference had embedded side effects (autoincrement
4351 address modes. Then we may need to kill some entries on the
4352 memory set list. */
4353 if (insn)
4354 invalidate_mems_from_autoinc (insn);
4357 #ifdef AUTO_INC_DEC
4358 if (flags & PROP_AUTOINC)
4359 find_auto_inc (needed, x, insn);
4360 #endif
4361 break;
4363 case SUBREG:
4364 if (GET_CODE (SUBREG_REG (x)) == REG
4365 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4366 && (GET_MODE_SIZE (GET_MODE (x))
4367 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4368 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4370 /* While we're here, optimize this case. */
4371 x = SUBREG_REG (x);
4373 /* In case the SUBREG is not of a register, don't optimize */
4374 if (GET_CODE (x) != REG)
4376 mark_used_regs (needed, live, x, flags, insn);
4377 return;
4380 /* ... fall through ... */
4382 case REG:
4383 /* See a register other than being set
4384 => mark it as needed. */
4386 regno = REGNO (x);
4388 int some_needed = REGNO_REG_SET_P (needed, regno);
4389 int some_not_needed = ! some_needed;
4391 SET_REGNO_REG_SET (live, regno);
4393 /* A hard reg in a wide mode may really be multiple registers.
4394 If so, mark all of them just like the first. */
4395 if (regno < FIRST_PSEUDO_REGISTER)
4397 int n;
4399 /* For stack ptr or fixed arg pointer,
4400 nothing below can be necessary, so waste no more time. */
4401 if (regno == STACK_POINTER_REGNUM
4402 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4403 || (regno == HARD_FRAME_POINTER_REGNUM
4404 && (! reload_completed || frame_pointer_needed))
4405 #endif
4406 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4407 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4408 #endif
4409 || (regno == FRAME_POINTER_REGNUM
4410 && (! reload_completed || frame_pointer_needed)))
4412 /* If this is a register we are going to try to eliminate,
4413 don't mark it live here. If we are successful in
4414 eliminating it, it need not be live unless it is used for
4415 pseudos, in which case it will have been set live when
4416 it was allocated to the pseudos. If the register will not
4417 be eliminated, reload will set it live at that point. */
4419 if ((flags & PROP_REG_INFO)
4420 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4421 regs_ever_live[regno] = 1;
4422 return;
4424 /* No death notes for global register variables;
4425 their values are live after this function exits. */
4426 if (global_regs[regno])
4428 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4429 reg_next_use[regno] = insn;
4430 return;
4433 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4434 while (--n > 0)
4436 int regno_n = regno + n;
4437 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4439 SET_REGNO_REG_SET (live, regno_n);
4440 some_needed |= needed_regno;
4441 some_not_needed |= ! needed_regno;
4445 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4447 /* Record where each reg is used, so when the reg
4448 is set we know the next insn that uses it. */
4450 reg_next_use[regno] = insn;
4452 if (flags & PROP_REG_INFO)
4454 if (regno < FIRST_PSEUDO_REGISTER)
4456 /* If a hard reg is being used,
4457 record that this function does use it. */
4459 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4460 if (i == 0)
4461 i = 1;
4463 regs_ever_live[regno + --i] = 1;
4464 while (i > 0);
4466 else
4468 /* Keep track of which basic block each reg appears in. */
4470 register int blocknum = BLOCK_NUM (insn);
4472 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4473 REG_BASIC_BLOCK (regno) = blocknum;
4474 else if (REG_BASIC_BLOCK (regno) != blocknum)
4475 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4477 /* Count (weighted) number of uses of each reg. */
4479 REG_N_REFS (regno) += loop_depth + 1;
4483 /* Record and count the insns in which a reg dies.
4484 If it is used in this insn and was dead below the insn
4485 then it dies in this insn. If it was set in this insn,
4486 we do not make a REG_DEAD note; likewise if we already
4487 made such a note. */
4489 if (flags & PROP_DEATH_NOTES)
4491 if (some_not_needed
4492 && ! dead_or_set_p (insn, x)
4493 #if 0
4494 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4495 #endif
4498 /* Check for the case where the register dying partially
4499 overlaps the register set by this insn. */
4500 if (regno < FIRST_PSEUDO_REGISTER
4501 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4503 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4504 while (--n >= 0)
4505 some_needed |= dead_or_set_regno_p (insn, regno + n);
4508 /* If none of the words in X is needed, make a REG_DEAD
4509 note. Otherwise, we must make partial REG_DEAD notes. */
4510 if (! some_needed)
4512 REG_NOTES (insn)
4513 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4514 REG_N_DEATHS (regno)++;
4516 else
4518 int i;
4520 /* Don't make a REG_DEAD note for a part of a register
4521 that is set in the insn. */
4523 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4524 i >= 0; i--)
4525 if (!REGNO_REG_SET_P (needed, regno + i)
4526 && ! dead_or_set_regno_p (insn, regno + i))
4527 REG_NOTES (insn)
4528 = (alloc_EXPR_LIST
4529 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4530 regno + i),
4531 REG_NOTES (insn)));
4536 return;
4538 case SET:
4540 register rtx testreg = SET_DEST (x);
4541 int mark_dest = 0;
4543 /* If storing into MEM, don't show it as being used. But do
4544 show the address as being used. */
4545 if (GET_CODE (testreg) == MEM)
4547 #ifdef AUTO_INC_DEC
4548 if (flags & PROP_AUTOINC)
4549 find_auto_inc (needed, testreg, insn);
4550 #endif
4551 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4552 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4553 return;
4556 /* Storing in STRICT_LOW_PART is like storing in a reg
4557 in that this SET might be dead, so ignore it in TESTREG.
4558 but in some other ways it is like using the reg.
4560 Storing in a SUBREG or a bit field is like storing the entire
4561 register in that if the register's value is not used
4562 then this SET is not needed. */
4563 while (GET_CODE (testreg) == STRICT_LOW_PART
4564 || GET_CODE (testreg) == ZERO_EXTRACT
4565 || GET_CODE (testreg) == SIGN_EXTRACT
4566 || GET_CODE (testreg) == SUBREG)
4568 if (GET_CODE (testreg) == SUBREG
4569 && GET_CODE (SUBREG_REG (testreg)) == REG
4570 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4571 && (GET_MODE_SIZE (GET_MODE (testreg))
4572 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4573 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4575 /* Modifying a single register in an alternate mode
4576 does not use any of the old value. But these other
4577 ways of storing in a register do use the old value. */
4578 if (GET_CODE (testreg) == SUBREG
4579 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4581 else
4582 mark_dest = 1;
4584 testreg = XEXP (testreg, 0);
4587 /* If this is a store into a register,
4588 recursively scan the value being stored. */
4590 if ((GET_CODE (testreg) == PARALLEL
4591 && GET_MODE (testreg) == BLKmode)
4592 || (GET_CODE (testreg) == REG
4593 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4594 && (! reload_completed || frame_pointer_needed)))
4595 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4596 && ! (regno == HARD_FRAME_POINTER_REGNUM
4597 && (! reload_completed || frame_pointer_needed))
4598 #endif
4599 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4600 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4601 #endif
4603 /* We used to exclude global_regs here, but that seems wrong.
4604 Storing in them is like storing in mem. */
4606 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4607 if (mark_dest)
4608 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4609 return;
4612 break;
4614 case ASM_OPERANDS:
4615 case UNSPEC_VOLATILE:
4616 case TRAP_IF:
4617 case ASM_INPUT:
4619 /* Traditional and volatile asm instructions must be considered to use
4620 and clobber all hard registers, all pseudo-registers and all of
4621 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4623 Consider for instance a volatile asm that changes the fpu rounding
4624 mode. An insn should not be moved across this even if it only uses
4625 pseudo-regs because it might give an incorrectly rounded result.
4627 ?!? Unfortunately, marking all hard registers as live causes massive
4628 problems for the register allocator and marking all pseudos as live
4629 creates mountains of uninitialized variable warnings.
4631 So for now, just clear the memory set list and mark any regs
4632 we can find in ASM_OPERANDS as used. */
4633 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4634 free_EXPR_LIST_list (&mem_set_list);
4636 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4637 We can not just fall through here since then we would be confused
4638 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4639 traditional asms unlike their normal usage. */
4640 if (code == ASM_OPERANDS)
4642 int j;
4644 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4645 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4646 flags, insn);
4648 break;
4652 default:
4653 break;
4656 /* Recursively scan the operands of this expression. */
4659 register const char *fmt = GET_RTX_FORMAT (code);
4660 register int i;
4662 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4664 if (fmt[i] == 'e')
4666 /* Tail recursive case: save a function call level. */
4667 if (i == 0)
4669 x = XEXP (x, 0);
4670 goto retry;
4672 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4674 else if (fmt[i] == 'E')
4676 register int j;
4677 for (j = 0; j < XVECLEN (x, i); j++)
4678 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4684 #ifdef AUTO_INC_DEC
4686 static int
4687 try_pre_increment_1 (insn)
4688 rtx insn;
4690 /* Find the next use of this reg. If in same basic block,
4691 make it do pre-increment or pre-decrement if appropriate. */
4692 rtx x = single_set (insn);
4693 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4694 * INTVAL (XEXP (SET_SRC (x), 1)));
4695 int regno = REGNO (SET_DEST (x));
4696 rtx y = reg_next_use[regno];
4697 if (y != 0
4698 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4699 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4700 mode would be better. */
4701 && ! dead_or_set_p (y, SET_DEST (x))
4702 && try_pre_increment (y, SET_DEST (x), amount))
4704 /* We have found a suitable auto-increment
4705 and already changed insn Y to do it.
4706 So flush this increment-instruction. */
4707 PUT_CODE (insn, NOTE);
4708 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4709 NOTE_SOURCE_FILE (insn) = 0;
4710 /* Count a reference to this reg for the increment
4711 insn we are deleting. When a reg is incremented.
4712 spilling it is worse, so we want to make that
4713 less likely. */
4714 if (regno >= FIRST_PSEUDO_REGISTER)
4716 REG_N_REFS (regno) += loop_depth + 1;
4717 REG_N_SETS (regno)++;
4719 return 1;
4721 return 0;
4724 /* Try to change INSN so that it does pre-increment or pre-decrement
4725 addressing on register REG in order to add AMOUNT to REG.
4726 AMOUNT is negative for pre-decrement.
4727 Returns 1 if the change could be made.
4728 This checks all about the validity of the result of modifying INSN. */
4730 static int
4731 try_pre_increment (insn, reg, amount)
4732 rtx insn, reg;
4733 HOST_WIDE_INT amount;
4735 register rtx use;
4737 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4738 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4739 int pre_ok = 0;
4740 /* Nonzero if we can try to make a post-increment or post-decrement.
4741 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4742 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4743 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4744 int post_ok = 0;
4746 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4747 int do_post = 0;
4749 /* From the sign of increment, see which possibilities are conceivable
4750 on this target machine. */
4751 if (HAVE_PRE_INCREMENT && amount > 0)
4752 pre_ok = 1;
4753 if (HAVE_POST_INCREMENT && amount > 0)
4754 post_ok = 1;
4756 if (HAVE_PRE_DECREMENT && amount < 0)
4757 pre_ok = 1;
4758 if (HAVE_POST_DECREMENT && amount < 0)
4759 post_ok = 1;
4761 if (! (pre_ok || post_ok))
4762 return 0;
4764 /* It is not safe to add a side effect to a jump insn
4765 because if the incremented register is spilled and must be reloaded
4766 there would be no way to store the incremented value back in memory. */
4768 if (GET_CODE (insn) == JUMP_INSN)
4769 return 0;
4771 use = 0;
4772 if (pre_ok)
4773 use = find_use_as_address (PATTERN (insn), reg, 0);
4774 if (post_ok && (use == 0 || use == (rtx) 1))
4776 use = find_use_as_address (PATTERN (insn), reg, -amount);
4777 do_post = 1;
4780 if (use == 0 || use == (rtx) 1)
4781 return 0;
4783 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4784 return 0;
4786 /* See if this combination of instruction and addressing mode exists. */
4787 if (! validate_change (insn, &XEXP (use, 0),
4788 gen_rtx_fmt_e (amount > 0
4789 ? (do_post ? POST_INC : PRE_INC)
4790 : (do_post ? POST_DEC : PRE_DEC),
4791 Pmode, reg), 0))
4792 return 0;
4794 /* Record that this insn now has an implicit side effect on X. */
4795 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4796 return 1;
4799 #endif /* AUTO_INC_DEC */
4801 /* Find the place in the rtx X where REG is used as a memory address.
4802 Return the MEM rtx that so uses it.
4803 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4804 (plus REG (const_int PLUSCONST)).
4806 If such an address does not appear, return 0.
4807 If REG appears more than once, or is used other than in such an address,
4808 return (rtx)1. */
4811 find_use_as_address (x, reg, plusconst)
4812 register rtx x;
4813 rtx reg;
4814 HOST_WIDE_INT plusconst;
4816 enum rtx_code code = GET_CODE (x);
4817 const char *fmt = GET_RTX_FORMAT (code);
4818 register int i;
4819 register rtx value = 0;
4820 register rtx tem;
4822 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4823 return x;
4825 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4826 && XEXP (XEXP (x, 0), 0) == reg
4827 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4828 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4829 return x;
4831 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4833 /* If REG occurs inside a MEM used in a bit-field reference,
4834 that is unacceptable. */
4835 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4836 return (rtx) (HOST_WIDE_INT) 1;
4839 if (x == reg)
4840 return (rtx) (HOST_WIDE_INT) 1;
4842 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4844 if (fmt[i] == 'e')
4846 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4847 if (value == 0)
4848 value = tem;
4849 else if (tem != 0)
4850 return (rtx) (HOST_WIDE_INT) 1;
4852 else if (fmt[i] == 'E')
4854 register int j;
4855 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4857 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4858 if (value == 0)
4859 value = tem;
4860 else if (tem != 0)
4861 return (rtx) (HOST_WIDE_INT) 1;
4866 return value;
4869 /* Write information about registers and basic blocks into FILE.
4870 This is part of making a debugging dump. */
4872 void
4873 dump_regset (r, outf)
4874 regset r;
4875 FILE *outf;
4877 int i;
4878 if (r == NULL)
4880 fputs (" (nil)", outf);
4881 return;
4884 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4886 fprintf (outf, " %d", i);
4887 if (i < FIRST_PSEUDO_REGISTER)
4888 fprintf (outf, " [%s]",
4889 reg_names[i]);
4893 void
4894 debug_regset (r)
4895 regset r;
4897 dump_regset (r, stderr);
4898 putc ('\n', stderr);
4901 void
4902 dump_flow_info (file)
4903 FILE *file;
4905 register int i;
4906 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4908 fprintf (file, "%d registers.\n", max_regno);
4909 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4910 if (REG_N_REFS (i))
4912 enum reg_class class, altclass;
4913 fprintf (file, "\nRegister %d used %d times across %d insns",
4914 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4915 if (REG_BASIC_BLOCK (i) >= 0)
4916 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4917 if (REG_N_SETS (i))
4918 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4919 (REG_N_SETS (i) == 1) ? "" : "s");
4920 if (REG_USERVAR_P (regno_reg_rtx[i]))
4921 fprintf (file, "; user var");
4922 if (REG_N_DEATHS (i) != 1)
4923 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4924 if (REG_N_CALLS_CROSSED (i) == 1)
4925 fprintf (file, "; crosses 1 call");
4926 else if (REG_N_CALLS_CROSSED (i))
4927 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4928 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4929 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4930 class = reg_preferred_class (i);
4931 altclass = reg_alternate_class (i);
4932 if (class != GENERAL_REGS || altclass != ALL_REGS)
4934 if (altclass == ALL_REGS || class == ALL_REGS)
4935 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4936 else if (altclass == NO_REGS)
4937 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4938 else
4939 fprintf (file, "; pref %s, else %s",
4940 reg_class_names[(int) class],
4941 reg_class_names[(int) altclass]);
4943 if (REGNO_POINTER_FLAG (i))
4944 fprintf (file, "; pointer");
4945 fprintf (file, ".\n");
4948 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4949 for (i = 0; i < n_basic_blocks; i++)
4951 register basic_block bb = BASIC_BLOCK (i);
4952 register edge e;
4954 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
4955 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
4957 fprintf (file, "Predecessors: ");
4958 for (e = bb->pred; e ; e = e->pred_next)
4959 dump_edge_info (file, e, 0);
4961 fprintf (file, "\nSuccessors: ");
4962 for (e = bb->succ; e ; e = e->succ_next)
4963 dump_edge_info (file, e, 1);
4965 fprintf (file, "\nRegisters live at start:");
4966 dump_regset (bb->global_live_at_start, file);
4968 fprintf (file, "\nRegisters live at end:");
4969 dump_regset (bb->global_live_at_end, file);
4971 putc('\n', file);
4974 putc('\n', file);
4977 void
4978 debug_flow_info ()
4980 dump_flow_info (stderr);
4983 static void
4984 dump_edge_info (file, e, do_succ)
4985 FILE *file;
4986 edge e;
4987 int do_succ;
4989 basic_block side = (do_succ ? e->dest : e->src);
4991 if (side == ENTRY_BLOCK_PTR)
4992 fputs (" ENTRY", file);
4993 else if (side == EXIT_BLOCK_PTR)
4994 fputs (" EXIT", file);
4995 else
4996 fprintf (file, " %d", side->index);
4998 if (e->flags)
5000 static const char * const bitnames[] = {
5001 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5003 int comma = 0;
5004 int i, flags = e->flags;
5006 fputc (' ', file);
5007 fputc ('(', file);
5008 for (i = 0; flags; i++)
5009 if (flags & (1 << i))
5011 flags &= ~(1 << i);
5013 if (comma)
5014 fputc (',', file);
5015 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5016 fputs (bitnames[i], file);
5017 else
5018 fprintf (file, "%d", i);
5019 comma = 1;
5021 fputc (')', file);
5026 /* Print out one basic block with live information at start and end. */
5027 void
5028 dump_bb (bb, outf)
5029 basic_block bb;
5030 FILE *outf;
5032 rtx insn;
5033 rtx last;
5034 edge e;
5036 fprintf (outf, ";; Basic block %d, loop depth %d",
5037 bb->index, bb->loop_depth - 1);
5038 if (bb->eh_beg != -1 || bb->eh_end != -1)
5039 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5040 putc ('\n', outf);
5042 fputs (";; Predecessors: ", outf);
5043 for (e = bb->pred; e ; e = e->pred_next)
5044 dump_edge_info (outf, e, 0);
5045 putc ('\n', outf);
5047 fputs (";; Registers live at start:", outf);
5048 dump_regset (bb->global_live_at_start, outf);
5049 putc ('\n', outf);
5051 for (insn = bb->head, last = NEXT_INSN (bb->end);
5052 insn != last;
5053 insn = NEXT_INSN (insn))
5054 print_rtl_single (outf, insn);
5056 fputs (";; Registers live at end:", outf);
5057 dump_regset (bb->global_live_at_end, outf);
5058 putc ('\n', outf);
5060 fputs (";; Successors: ", outf);
5061 for (e = bb->succ; e; e = e->succ_next)
5062 dump_edge_info (outf, e, 1);
5063 putc ('\n', outf);
5066 void
5067 debug_bb (bb)
5068 basic_block bb;
5070 dump_bb (bb, stderr);
5073 void
5074 debug_bb_n (n)
5075 int n;
5077 dump_bb (BASIC_BLOCK(n), stderr);
5080 /* Like print_rtl, but also print out live information for the start of each
5081 basic block. */
5083 void
5084 print_rtl_with_bb (outf, rtx_first)
5085 FILE *outf;
5086 rtx rtx_first;
5088 register rtx tmp_rtx;
5090 if (rtx_first == 0)
5091 fprintf (outf, "(nil)\n");
5092 else
5094 int i;
5095 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5096 int max_uid = get_max_uid ();
5097 basic_block *start = (basic_block *)
5098 xcalloc (max_uid, sizeof (basic_block));
5099 basic_block *end = (basic_block *)
5100 xcalloc (max_uid, sizeof (basic_block));
5101 enum bb_state *in_bb_p = (enum bb_state *)
5102 xcalloc (max_uid, sizeof (enum bb_state));
5104 for (i = n_basic_blocks - 1; i >= 0; i--)
5106 basic_block bb = BASIC_BLOCK (i);
5107 rtx x;
5109 start[INSN_UID (bb->head)] = bb;
5110 end[INSN_UID (bb->end)] = bb;
5111 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5113 enum bb_state state = IN_MULTIPLE_BB;
5114 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5115 state = IN_ONE_BB;
5116 in_bb_p[INSN_UID(x)] = state;
5118 if (x == bb->end)
5119 break;
5123 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5125 int did_output;
5126 basic_block bb;
5128 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5130 fprintf (outf, ";; Start of basic block %d, registers live:",
5131 bb->index);
5132 dump_regset (bb->global_live_at_start, outf);
5133 putc ('\n', outf);
5136 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5137 && GET_CODE (tmp_rtx) != NOTE
5138 && GET_CODE (tmp_rtx) != BARRIER)
5139 fprintf (outf, ";; Insn is not within a basic block\n");
5140 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5141 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5143 did_output = print_rtl_single (outf, tmp_rtx);
5145 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5146 fprintf (outf, ";; End of basic block %d\n", bb->index);
5148 if (did_output)
5149 putc ('\n', outf);
5152 free (start);
5153 free (end);
5154 free (in_bb_p);
5157 if (current_function_epilogue_delay_list != 0)
5159 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5160 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5161 tmp_rtx = XEXP (tmp_rtx, 1))
5162 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5166 /* Compute dominator relationships using new flow graph structures. */
5167 void
5168 compute_flow_dominators (dominators, post_dominators)
5169 sbitmap *dominators;
5170 sbitmap *post_dominators;
5172 int bb;
5173 sbitmap *temp_bitmap;
5174 edge e;
5175 basic_block *worklist, *tos;
5177 /* Allocate a worklist array/queue. Entries are only added to the
5178 list if they were not already on the list. So the size is
5179 bounded by the number of basic blocks. */
5180 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5181 * n_basic_blocks);
5183 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5184 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5186 if (dominators)
5188 /* The optimistic setting of dominators requires us to put every
5189 block on the work list initially. */
5190 for (bb = 0; bb < n_basic_blocks; bb++)
5192 *tos++ = BASIC_BLOCK (bb);
5193 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5196 /* We want a maximal solution, so initially assume everything dominates
5197 everything else. */
5198 sbitmap_vector_ones (dominators, n_basic_blocks);
5200 /* Mark successors of the entry block so we can identify them below. */
5201 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5202 e->dest->aux = ENTRY_BLOCK_PTR;
5204 /* Iterate until the worklist is empty. */
5205 while (tos != worklist)
5207 /* Take the first entry off the worklist. */
5208 basic_block b = *--tos;
5209 bb = b->index;
5211 /* Compute the intersection of the dominators of all the
5212 predecessor blocks.
5214 If one of the predecessor blocks is the ENTRY block, then the
5215 intersection of the dominators of the predecessor blocks is
5216 defined as the null set. We can identify such blocks by the
5217 special value in the AUX field in the block structure. */
5218 if (b->aux == ENTRY_BLOCK_PTR)
5220 /* Do not clear the aux field for blocks which are
5221 successors of the ENTRY block. That way we we never
5222 add them to the worklist again.
5224 The intersect of dominators of the preds of this block is
5225 defined as the null set. */
5226 sbitmap_zero (temp_bitmap[bb]);
5228 else
5230 /* Clear the aux field of this block so it can be added to
5231 the worklist again if necessary. */
5232 b->aux = NULL;
5233 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5236 /* Make sure each block always dominates itself. */
5237 SET_BIT (temp_bitmap[bb], bb);
5239 /* If the out state of this block changed, then we need to
5240 add the successors of this block to the worklist if they
5241 are not already on the worklist. */
5242 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5244 for (e = b->succ; e; e = e->succ_next)
5246 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5248 *tos++ = e->dest;
5249 e->dest->aux = e;
5256 if (post_dominators)
5258 /* The optimistic setting of dominators requires us to put every
5259 block on the work list initially. */
5260 for (bb = 0; bb < n_basic_blocks; bb++)
5262 *tos++ = BASIC_BLOCK (bb);
5263 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5266 /* We want a maximal solution, so initially assume everything post
5267 dominates everything else. */
5268 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5270 /* Mark predecessors of the exit block so we can identify them below. */
5271 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5272 e->src->aux = EXIT_BLOCK_PTR;
5274 /* Iterate until the worklist is empty. */
5275 while (tos != worklist)
5277 /* Take the first entry off the worklist. */
5278 basic_block b = *--tos;
5279 bb = b->index;
5281 /* Compute the intersection of the post dominators of all the
5282 successor blocks.
5284 If one of the successor blocks is the EXIT block, then the
5285 intersection of the dominators of the successor blocks is
5286 defined as the null set. We can identify such blocks by the
5287 special value in the AUX field in the block structure. */
5288 if (b->aux == EXIT_BLOCK_PTR)
5290 /* Do not clear the aux field for blocks which are
5291 predecessors of the EXIT block. That way we we never
5292 add them to the worklist again.
5294 The intersect of dominators of the succs of this block is
5295 defined as the null set. */
5296 sbitmap_zero (temp_bitmap[bb]);
5298 else
5300 /* Clear the aux field of this block so it can be added to
5301 the worklist again if necessary. */
5302 b->aux = NULL;
5303 sbitmap_intersection_of_succs (temp_bitmap[bb],
5304 post_dominators, bb);
5307 /* Make sure each block always post dominates itself. */
5308 SET_BIT (temp_bitmap[bb], bb);
5310 /* If the out state of this block changed, then we need to
5311 add the successors of this block to the worklist if they
5312 are not already on the worklist. */
5313 if (sbitmap_a_and_b (post_dominators[bb],
5314 post_dominators[bb],
5315 temp_bitmap[bb]))
5317 for (e = b->pred; e; e = e->pred_next)
5319 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5321 *tos++ = e->src;
5322 e->src->aux = e;
5328 free (temp_bitmap);
5331 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5333 void
5334 compute_immediate_dominators (idom, dominators)
5335 int *idom;
5336 sbitmap *dominators;
5338 sbitmap *tmp;
5339 int b;
5341 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5343 /* Begin with tmp(n) = dom(n) - { n }. */
5344 for (b = n_basic_blocks; --b >= 0; )
5346 sbitmap_copy (tmp[b], dominators[b]);
5347 RESET_BIT (tmp[b], b);
5350 /* Subtract out all of our dominator's dominators. */
5351 for (b = n_basic_blocks; --b >= 0; )
5353 sbitmap tmp_b = tmp[b];
5354 int s;
5356 for (s = n_basic_blocks; --s >= 0; )
5357 if (TEST_BIT (tmp_b, s))
5358 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5361 /* Find the one bit set in the bitmap and put it in the output array. */
5362 for (b = n_basic_blocks; --b >= 0; )
5364 int t;
5365 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5368 sbitmap_vector_free (tmp);
5371 /* Count for a single SET rtx, X. */
5373 static void
5374 count_reg_sets_1 (x)
5375 rtx x;
5377 register int regno;
5378 register rtx reg = SET_DEST (x);
5380 /* Find the register that's set/clobbered. */
5381 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5382 || GET_CODE (reg) == SIGN_EXTRACT
5383 || GET_CODE (reg) == STRICT_LOW_PART)
5384 reg = XEXP (reg, 0);
5386 if (GET_CODE (reg) == PARALLEL
5387 && GET_MODE (reg) == BLKmode)
5389 register int i;
5390 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5391 count_reg_sets_1 (XVECEXP (reg, 0, i));
5392 return;
5395 if (GET_CODE (reg) == REG)
5397 regno = REGNO (reg);
5398 if (regno >= FIRST_PSEUDO_REGISTER)
5400 /* Count (weighted) references, stores, etc. This counts a
5401 register twice if it is modified, but that is correct. */
5402 REG_N_SETS (regno)++;
5403 REG_N_REFS (regno) += loop_depth + 1;
5408 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5409 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5411 static void
5412 count_reg_sets (x)
5413 rtx x;
5415 register RTX_CODE code = GET_CODE (x);
5417 if (code == SET || code == CLOBBER)
5418 count_reg_sets_1 (x);
5419 else if (code == PARALLEL)
5421 register int i;
5422 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5424 code = GET_CODE (XVECEXP (x, 0, i));
5425 if (code == SET || code == CLOBBER)
5426 count_reg_sets_1 (XVECEXP (x, 0, i));
5431 /* Increment REG_N_REFS by the current loop depth each register reference
5432 found in X. */
5434 static void
5435 count_reg_references (x)
5436 rtx x;
5438 register RTX_CODE code;
5440 retry:
5441 code = GET_CODE (x);
5442 switch (code)
5444 case LABEL_REF:
5445 case SYMBOL_REF:
5446 case CONST_INT:
5447 case CONST:
5448 case CONST_DOUBLE:
5449 case PC:
5450 case ADDR_VEC:
5451 case ADDR_DIFF_VEC:
5452 case ASM_INPUT:
5453 return;
5455 #ifdef HAVE_cc0
5456 case CC0:
5457 return;
5458 #endif
5460 case CLOBBER:
5461 /* If we are clobbering a MEM, mark any registers inside the address
5462 as being used. */
5463 if (GET_CODE (XEXP (x, 0)) == MEM)
5464 count_reg_references (XEXP (XEXP (x, 0), 0));
5465 return;
5467 case SUBREG:
5468 /* While we're here, optimize this case. */
5469 x = SUBREG_REG (x);
5471 /* In case the SUBREG is not of a register, don't optimize */
5472 if (GET_CODE (x) != REG)
5474 count_reg_references (x);
5475 return;
5478 /* ... fall through ... */
5480 case REG:
5481 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5482 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5483 return;
5485 case SET:
5487 register rtx testreg = SET_DEST (x);
5488 int mark_dest = 0;
5490 /* If storing into MEM, don't show it as being used. But do
5491 show the address as being used. */
5492 if (GET_CODE (testreg) == MEM)
5494 count_reg_references (XEXP (testreg, 0));
5495 count_reg_references (SET_SRC (x));
5496 return;
5499 /* Storing in STRICT_LOW_PART is like storing in a reg
5500 in that this SET might be dead, so ignore it in TESTREG.
5501 but in some other ways it is like using the reg.
5503 Storing in a SUBREG or a bit field is like storing the entire
5504 register in that if the register's value is not used
5505 then this SET is not needed. */
5506 while (GET_CODE (testreg) == STRICT_LOW_PART
5507 || GET_CODE (testreg) == ZERO_EXTRACT
5508 || GET_CODE (testreg) == SIGN_EXTRACT
5509 || GET_CODE (testreg) == SUBREG)
5511 /* Modifying a single register in an alternate mode
5512 does not use any of the old value. But these other
5513 ways of storing in a register do use the old value. */
5514 if (GET_CODE (testreg) == SUBREG
5515 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5517 else
5518 mark_dest = 1;
5520 testreg = XEXP (testreg, 0);
5523 /* If this is a store into a register,
5524 recursively scan the value being stored. */
5526 if ((GET_CODE (testreg) == PARALLEL
5527 && GET_MODE (testreg) == BLKmode)
5528 || GET_CODE (testreg) == REG)
5530 count_reg_references (SET_SRC (x));
5531 if (mark_dest)
5532 count_reg_references (SET_DEST (x));
5533 return;
5536 break;
5538 default:
5539 break;
5542 /* Recursively scan the operands of this expression. */
5545 register const char *fmt = GET_RTX_FORMAT (code);
5546 register int i;
5548 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5550 if (fmt[i] == 'e')
5552 /* Tail recursive case: save a function call level. */
5553 if (i == 0)
5555 x = XEXP (x, 0);
5556 goto retry;
5558 count_reg_references (XEXP (x, i));
5560 else if (fmt[i] == 'E')
5562 register int j;
5563 for (j = 0; j < XVECLEN (x, i); j++)
5564 count_reg_references (XVECEXP (x, i, j));
5570 /* Recompute register set/reference counts immediately prior to register
5571 allocation.
5573 This avoids problems with set/reference counts changing to/from values
5574 which have special meanings to the register allocators.
5576 Additionally, the reference counts are the primary component used by the
5577 register allocators to prioritize pseudos for allocation to hard regs.
5578 More accurate reference counts generally lead to better register allocation.
5580 F is the first insn to be scanned.
5582 LOOP_STEP denotes how much loop_depth should be incremented per
5583 loop nesting level in order to increase the ref count more for
5584 references in a loop.
5586 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5587 possibly other information which is used by the register allocators. */
5589 void
5590 recompute_reg_usage (f, loop_step)
5591 rtx f ATTRIBUTE_UNUSED;
5592 int loop_step ATTRIBUTE_UNUSED;
5594 rtx insn;
5595 int i, max_reg;
5596 int index;
5598 /* Clear out the old data. */
5599 max_reg = max_reg_num ();
5600 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5602 REG_N_SETS (i) = 0;
5603 REG_N_REFS (i) = 0;
5606 /* Scan each insn in the chain and count how many times each register is
5607 set/used. */
5608 for (index = 0; index < n_basic_blocks; index++)
5610 basic_block bb = BASIC_BLOCK (index);
5611 loop_depth = bb->loop_depth;
5612 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5614 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5616 rtx links;
5618 /* This call will increment REG_N_SETS for each SET or CLOBBER
5619 of a register in INSN. It will also increment REG_N_REFS
5620 by the loop depth for each set of a register in INSN. */
5621 count_reg_sets (PATTERN (insn));
5623 /* count_reg_sets does not detect autoincrement address modes, so
5624 detect them here by looking at the notes attached to INSN. */
5625 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5627 if (REG_NOTE_KIND (links) == REG_INC)
5628 /* Count (weighted) references, stores, etc. This counts a
5629 register twice if it is modified, but that is correct. */
5630 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5633 /* This call will increment REG_N_REFS by the current loop depth for
5634 each reference to a register in INSN. */
5635 count_reg_references (PATTERN (insn));
5637 /* count_reg_references will not include counts for arguments to
5638 function calls, so detect them here by examining the
5639 CALL_INSN_FUNCTION_USAGE data. */
5640 if (GET_CODE (insn) == CALL_INSN)
5642 rtx note;
5644 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5645 note;
5646 note = XEXP (note, 1))
5647 if (GET_CODE (XEXP (note, 0)) == USE)
5648 count_reg_references (XEXP (XEXP (note, 0), 0));
5651 if (insn == bb->end)
5652 break;
5657 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5658 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5659 of the number of registers that died. */
5662 count_or_remove_death_notes (blocks, kill)
5663 sbitmap blocks;
5664 int kill;
5666 int i, count = 0;
5668 for (i = n_basic_blocks - 1; i >= 0; --i)
5670 basic_block bb;
5671 rtx insn;
5673 if (blocks && ! TEST_BIT (blocks, i))
5674 continue;
5676 bb = BASIC_BLOCK (i);
5678 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5680 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5682 rtx *pprev = &REG_NOTES (insn);
5683 rtx link = *pprev;
5685 while (link)
5687 switch (REG_NOTE_KIND (link))
5689 case REG_DEAD:
5690 if (GET_CODE (XEXP (link, 0)) == REG)
5692 rtx reg = XEXP (link, 0);
5693 int n;
5695 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5696 n = 1;
5697 else
5698 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5699 count += n;
5701 /* FALLTHRU */
5703 case REG_UNUSED:
5704 if (kill)
5706 rtx next = XEXP (link, 1);
5707 free_EXPR_LIST_node (link);
5708 *pprev = link = next;
5709 break;
5711 /* FALLTHRU */
5713 default:
5714 pprev = &XEXP (link, 1);
5715 link = *pprev;
5716 break;
5721 if (insn == bb->end)
5722 break;
5726 return count;
5729 /* Record INSN's block as BB. */
5731 void
5732 set_block_for_insn (insn, bb)
5733 rtx insn;
5734 basic_block bb;
5736 size_t uid = INSN_UID (insn);
5737 if (uid >= basic_block_for_insn->num_elements)
5739 int new_size;
5741 /* Add one-eighth the size so we don't keep calling xrealloc. */
5742 new_size = uid + (uid + 7) / 8;
5744 VARRAY_GROW (basic_block_for_insn, new_size);
5746 VARRAY_BB (basic_block_for_insn, uid) = bb;
5749 /* Record INSN's block number as BB. */
5750 /* ??? This has got to go. */
5752 void
5753 set_block_num (insn, bb)
5754 rtx insn;
5755 int bb;
5757 set_block_for_insn (insn, BASIC_BLOCK (bb));
5760 /* Verify the CFG consistency. This function check some CFG invariants and
5761 aborts when something is wrong. Hope that this function will help to
5762 convert many optimization passes to preserve CFG consistent.
5764 Currently it does following checks:
5766 - test head/end pointers
5767 - overlapping of basic blocks
5768 - edge list corectness
5769 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5770 - tails of basic blocks (ensure that boundary is necesary)
5771 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5772 and NOTE_INSN_BASIC_BLOCK
5773 - check that all insns are in the basic blocks
5774 (except the switch handling code, barriers and notes)
5776 In future it can be extended check a lot of other stuff as well
5777 (reachability of basic blocks, life information, etc. etc.). */
5779 void
5780 verify_flow_info ()
5782 const int max_uid = get_max_uid ();
5783 const rtx rtx_first = get_insns ();
5784 basic_block *bb_info;
5785 rtx x;
5786 int i, err = 0;
5788 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5790 /* First pass check head/end pointers and set bb_info array used by
5791 later passes. */
5792 for (i = n_basic_blocks - 1; i >= 0; i--)
5794 basic_block bb = BASIC_BLOCK (i);
5796 /* Check the head pointer and make sure that it is pointing into
5797 insn list. */
5798 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5799 if (x == bb->head)
5800 break;
5801 if (!x)
5803 error ("Head insn %d for block %d not found in the insn stream.",
5804 INSN_UID (bb->head), bb->index);
5805 err = 1;
5808 /* Check the end pointer and make sure that it is pointing into
5809 insn list. */
5810 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5812 if (bb_info[INSN_UID (x)] != NULL)
5814 error ("Insn %d is in multiple basic blocks (%d and %d)",
5815 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5816 err = 1;
5818 bb_info[INSN_UID (x)] = bb;
5820 if (x == bb->end)
5821 break;
5823 if (!x)
5825 error ("End insn %d for block %d not found in the insn stream.",
5826 INSN_UID (bb->end), bb->index);
5827 err = 1;
5831 /* Now check the basic blocks (boundaries etc.) */
5832 for (i = n_basic_blocks - 1; i >= 0; i--)
5834 basic_block bb = BASIC_BLOCK (i);
5835 /* Check corectness of edge lists */
5836 edge e;
5838 e = bb->succ;
5839 while (e)
5841 if (e->src != bb)
5843 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5844 bb->index);
5845 fprintf (stderr, "Predecessor: ");
5846 dump_edge_info (stderr, e, 0);
5847 fprintf (stderr, "\nSuccessor: ");
5848 dump_edge_info (stderr, e, 1);
5849 fflush (stderr);
5850 err = 1;
5852 if (e->dest != EXIT_BLOCK_PTR)
5854 edge e2 = e->dest->pred;
5855 while (e2 && e2 != e)
5856 e2 = e2->pred_next;
5857 if (!e2)
5859 error ("Basic block %i edge lists are corrupted", bb->index);
5860 err = 1;
5863 e = e->succ_next;
5866 e = bb->pred;
5867 while (e)
5869 if (e->dest != bb)
5871 error ("Basic block %d pred edge is corrupted", bb->index);
5872 fputs ("Predecessor: ", stderr);
5873 dump_edge_info (stderr, e, 0);
5874 fputs ("\nSuccessor: ", stderr);
5875 dump_edge_info (stderr, e, 1);
5876 fputc ('\n', stderr);
5877 err = 1;
5879 if (e->src != ENTRY_BLOCK_PTR)
5881 edge e2 = e->src->succ;
5882 while (e2 && e2 != e)
5883 e2 = e2->succ_next;
5884 if (!e2)
5886 error ("Basic block %i edge lists are corrupted", bb->index);
5887 err = 1;
5890 e = e->pred_next;
5893 /* OK pointers are correct. Now check the header of basic
5894 block. It ought to contain optional CODE_LABEL followed
5895 by NOTE_BASIC_BLOCK. */
5896 x = bb->head;
5897 if (GET_CODE (x) == CODE_LABEL)
5899 if (bb->end == x)
5901 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5902 bb->index);
5903 err = 1;
5905 x = NEXT_INSN (x);
5907 if (GET_CODE (x) != NOTE
5908 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5909 || NOTE_BASIC_BLOCK (x) != bb)
5911 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5912 bb->index);
5913 err = 1;
5916 if (bb->end == x)
5918 /* Do checks for empty blocks here */
5920 else
5922 x = NEXT_INSN (x);
5923 while (x)
5925 if (GET_CODE (x) == NOTE
5926 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5928 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5929 INSN_UID (x), bb->index);
5930 err = 1;
5933 if (x == bb->end)
5934 break;
5936 if (GET_CODE (x) == JUMP_INSN
5937 || GET_CODE (x) == CODE_LABEL
5938 || GET_CODE (x) == BARRIER)
5940 error ("In basic block %d:", bb->index);
5941 fatal_insn ("Flow control insn inside a basic block", x);
5944 x = NEXT_INSN (x);
5949 x = rtx_first;
5950 while (x)
5952 if (!bb_info[INSN_UID (x)])
5954 switch (GET_CODE (x))
5956 case BARRIER:
5957 case NOTE:
5958 break;
5960 case CODE_LABEL:
5961 /* An addr_vec is placed outside any block block. */
5962 if (NEXT_INSN (x)
5963 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5964 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5965 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5967 x = NEXT_INSN (x);
5970 /* But in any case, non-deletable labels can appear anywhere. */
5971 break;
5973 default:
5974 fatal_insn ("Insn outside basic block", x);
5978 x = NEXT_INSN (x);
5981 if (err)
5982 abort ();
5984 /* Clean up. */
5985 free (bb_info);
5988 /* Functions to access an edge list with a vector representation.
5989 Enough data is kept such that given an index number, the
5990 pred and succ that edge reprsents can be determined, or
5991 given a pred and a succ, it's index number can be returned.
5992 This allows algorithms which comsume a lot of memory to
5993 represent the normally full matrix of edge (pred,succ) with a
5994 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
5995 wasted space in the client code due to sparse flow graphs. */
5997 /* This functions initializes the edge list. Basically the entire
5998 flowgraph is processed, and all edges are assigned a number,
5999 and the data structure is filed in. */
6000 struct edge_list *
6001 create_edge_list ()
6003 struct edge_list *elist;
6004 edge e;
6005 int num_edges;
6006 int x;
6007 int block_count;
6009 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6011 num_edges = 0;
6013 /* Determine the number of edges in the flow graph by counting successor
6014 edges on each basic block. */
6015 for (x = 0; x < n_basic_blocks; x++)
6017 basic_block bb = BASIC_BLOCK (x);
6019 for (e = bb->succ; e; e = e->succ_next)
6020 num_edges++;
6022 /* Don't forget successors of the entry block. */
6023 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6024 num_edges++;
6026 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6027 elist->num_blocks = block_count;
6028 elist->num_edges = num_edges;
6029 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6031 num_edges = 0;
6033 /* Follow successors of the entry block, and register these edges. */
6034 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6036 elist->index_to_edge[num_edges] = e;
6037 num_edges++;
6040 for (x = 0; x < n_basic_blocks; x++)
6042 basic_block bb = BASIC_BLOCK (x);
6044 /* Follow all successors of blocks, and register these edges. */
6045 for (e = bb->succ; e; e = e->succ_next)
6047 elist->index_to_edge[num_edges] = e;
6048 num_edges++;
6051 return elist;
6054 /* This function free's memory associated with an edge list. */
6055 void
6056 free_edge_list (elist)
6057 struct edge_list *elist;
6059 if (elist)
6061 free (elist->index_to_edge);
6062 free (elist);
6066 /* This function provides debug output showing an edge list. */
6067 void
6068 print_edge_list (f, elist)
6069 FILE *f;
6070 struct edge_list *elist;
6072 int x;
6073 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6074 elist->num_blocks - 2, elist->num_edges);
6076 for (x = 0; x < elist->num_edges; x++)
6078 fprintf (f, " %-4d - edge(", x);
6079 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6080 fprintf (f,"entry,");
6081 else
6082 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6084 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6085 fprintf (f,"exit)\n");
6086 else
6087 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6091 /* This function provides an internal consistancy check of an edge list,
6092 verifying that all edges are present, and that there are no
6093 extra edges. */
6094 void
6095 verify_edge_list (f, elist)
6096 FILE *f;
6097 struct edge_list *elist;
6099 int x, pred, succ, index;
6100 edge e;
6102 for (x = 0; x < n_basic_blocks; x++)
6104 basic_block bb = BASIC_BLOCK (x);
6106 for (e = bb->succ; e; e = e->succ_next)
6108 pred = e->src->index;
6109 succ = e->dest->index;
6110 index = EDGE_INDEX (elist, e->src, e->dest);
6111 if (index == EDGE_INDEX_NO_EDGE)
6113 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6114 continue;
6116 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6117 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6118 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6119 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6120 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6121 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6124 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6126 pred = e->src->index;
6127 succ = e->dest->index;
6128 index = EDGE_INDEX (elist, e->src, e->dest);
6129 if (index == EDGE_INDEX_NO_EDGE)
6131 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6132 continue;
6134 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6135 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6136 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6137 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6138 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6139 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6141 /* We've verified that all the edges are in the list, no lets make sure
6142 there are no spurious edges in the list. */
6144 for (pred = 0 ; pred < n_basic_blocks; pred++)
6145 for (succ = 0 ; succ < n_basic_blocks; succ++)
6147 basic_block p = BASIC_BLOCK (pred);
6148 basic_block s = BASIC_BLOCK (succ);
6150 int found_edge = 0;
6152 for (e = p->succ; e; e = e->succ_next)
6153 if (e->dest == s)
6155 found_edge = 1;
6156 break;
6158 for (e = s->pred; e; e = e->pred_next)
6159 if (e->src == p)
6161 found_edge = 1;
6162 break;
6164 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6165 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6166 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6167 pred, succ);
6168 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6169 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6170 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6171 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6172 BASIC_BLOCK (succ)));
6174 for (succ = 0 ; succ < n_basic_blocks; succ++)
6176 basic_block p = ENTRY_BLOCK_PTR;
6177 basic_block s = BASIC_BLOCK (succ);
6179 int found_edge = 0;
6181 for (e = p->succ; e; e = e->succ_next)
6182 if (e->dest == s)
6184 found_edge = 1;
6185 break;
6187 for (e = s->pred; e; e = e->pred_next)
6188 if (e->src == p)
6190 found_edge = 1;
6191 break;
6193 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6194 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6195 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6196 succ);
6197 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6198 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6199 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6200 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6201 BASIC_BLOCK (succ)));
6203 for (pred = 0 ; pred < n_basic_blocks; pred++)
6205 basic_block p = BASIC_BLOCK (pred);
6206 basic_block s = EXIT_BLOCK_PTR;
6208 int found_edge = 0;
6210 for (e = p->succ; e; e = e->succ_next)
6211 if (e->dest == s)
6213 found_edge = 1;
6214 break;
6216 for (e = s->pred; e; e = e->pred_next)
6217 if (e->src == p)
6219 found_edge = 1;
6220 break;
6222 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6223 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6224 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6225 pred);
6226 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6227 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6228 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6229 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6230 EXIT_BLOCK_PTR));
6234 /* This routine will determine what, if any, edge there is between
6235 a specified predecessor and successor. */
6238 find_edge_index (edge_list, pred, succ)
6239 struct edge_list *edge_list;
6240 basic_block pred, succ;
6242 int x;
6243 for (x = 0; x < NUM_EDGES (edge_list); x++)
6245 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6246 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6247 return x;
6249 return (EDGE_INDEX_NO_EDGE);
6252 /* This function will remove an edge from the flow graph. */
6253 void
6254 remove_edge (e)
6255 edge e;
6257 edge last_pred = NULL;
6258 edge last_succ = NULL;
6259 edge tmp;
6260 basic_block src, dest;
6261 src = e->src;
6262 dest = e->dest;
6263 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6264 last_succ = tmp;
6266 if (!tmp)
6267 abort ();
6268 if (last_succ)
6269 last_succ->succ_next = e->succ_next;
6270 else
6271 src->succ = e->succ_next;
6273 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6274 last_pred = tmp;
6276 if (!tmp)
6277 abort ();
6278 if (last_pred)
6279 last_pred->pred_next = e->pred_next;
6280 else
6281 dest->pred = e->pred_next;
6283 n_edges--;
6284 free (e);
6287 /* This routine will remove any fake successor edges for a basic block.
6288 When the edge is removed, it is also removed from whatever predecessor
6289 list it is in. */
6290 static void
6291 remove_fake_successors (bb)
6292 basic_block bb;
6294 edge e;
6295 for (e = bb->succ; e ; )
6297 edge tmp = e;
6298 e = e->succ_next;
6299 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6300 remove_edge (tmp);
6304 /* This routine will remove all fake edges from the flow graph. If
6305 we remove all fake successors, it will automatically remove all
6306 fake predecessors. */
6307 void
6308 remove_fake_edges ()
6310 int x;
6312 for (x = 0; x < n_basic_blocks; x++)
6313 remove_fake_successors (BASIC_BLOCK (x));
6315 /* We've handled all successors except the entry block's. */
6316 remove_fake_successors (ENTRY_BLOCK_PTR);
6319 /* This functions will add a fake edge between any block which has no
6320 successors, and the exit block. Some data flow equations require these
6321 edges to exist. */
6322 void
6323 add_noreturn_fake_exit_edges ()
6325 int x;
6327 for (x = 0; x < n_basic_blocks; x++)
6328 if (BASIC_BLOCK (x)->succ == NULL)
6329 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6332 /* Dump the list of basic blocks in the bitmap NODES. */
6333 static void
6334 flow_nodes_print (str, nodes, file)
6335 const char *str;
6336 const sbitmap nodes;
6337 FILE *file;
6339 int node;
6341 fprintf (file, "%s { ", str);
6342 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6343 fputs ("}\n", file);
6347 /* Dump the list of exiting edges in the array EDGES. */
6348 static void
6349 flow_exits_print (str, edges, num_edges, file)
6350 const char *str;
6351 const edge *edges;
6352 int num_edges;
6353 FILE *file;
6355 int i;
6357 fprintf (file, "%s { ", str);
6358 for (i = 0; i < num_edges; i++)
6359 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6360 fputs ("}\n", file);
6364 /* Dump loop related CFG information. */
6365 static void
6366 flow_loops_cfg_dump (loops, file)
6367 const struct loops *loops;
6368 FILE *file;
6370 int i;
6372 if (! loops->num || ! file || ! loops->cfg.dom)
6373 return;
6375 for (i = 0; i < n_basic_blocks; i++)
6377 edge succ;
6379 fprintf (file, ";; %d succs { ", i);
6380 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6381 fprintf (file, "%d ", succ->dest->index);
6382 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6386 /* Dump the DFS node order. */
6387 if (loops->cfg.dfs_order)
6389 fputs (";; DFS order: ", file);
6390 for (i = 0; i < n_basic_blocks; i++)
6391 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6392 fputs ("\n", file);
6397 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6398 static int
6399 flow_loop_nested_p (outer, loop)
6400 struct loop *outer;
6401 struct loop *loop;
6403 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6407 /* Dump the loop information specified by LOOPS to the stream FILE. */
6408 void
6409 flow_loops_dump (loops, file, verbose)
6410 const struct loops *loops;
6411 FILE *file;
6412 int verbose;
6414 int i;
6415 int num_loops;
6417 num_loops = loops->num;
6418 if (! num_loops || ! file)
6419 return;
6421 fprintf (file, ";; %d loops found, %d levels\n",
6422 num_loops, loops->levels);
6424 for (i = 0; i < num_loops; i++)
6426 struct loop *loop = &loops->array[i];
6428 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6429 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6430 loop->header->index, loop->latch->index,
6431 loop->pre_header ? loop->pre_header->index : -1,
6432 loop->depth, loop->level,
6433 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6434 fprintf (file, ";; %d", loop->num_nodes);
6435 flow_nodes_print (" nodes", loop->nodes, file);
6436 fprintf (file, ";; %d", loop->num_exits);
6437 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6439 if (loop->shared)
6441 int j;
6443 for (j = 0; j < i; j++)
6445 struct loop *oloop = &loops->array[j];
6447 if (loop->header == oloop->header)
6449 int disjoint;
6450 int smaller;
6452 smaller = loop->num_nodes < oloop->num_nodes;
6454 /* If the union of LOOP and OLOOP is different than
6455 the larger of LOOP and OLOOP then LOOP and OLOOP
6456 must be disjoint. */
6457 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6458 smaller ? oloop : loop);
6459 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6460 loop->header->index, i, j,
6461 disjoint ? "disjoint" : "nested");
6466 if (verbose)
6468 /* Print diagnostics to compare our concept of a loop with
6469 what the loop notes say. */
6470 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6471 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6472 != NOTE_INSN_LOOP_BEG)
6473 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6474 INSN_UID (PREV_INSN (loop->first->head)));
6475 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6476 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6477 != NOTE_INSN_LOOP_END)
6478 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6479 INSN_UID (NEXT_INSN (loop->last->end)));
6483 if (verbose)
6484 flow_loops_cfg_dump (loops, file);
6488 /* Free all the memory allocated for LOOPS. */
6489 void
6490 flow_loops_free (loops)
6491 struct loops *loops;
6493 if (loops->array)
6495 int i;
6497 if (! loops->num)
6498 abort ();
6500 /* Free the loop descriptors. */
6501 for (i = 0; i < loops->num; i++)
6503 struct loop *loop = &loops->array[i];
6505 if (loop->nodes)
6506 sbitmap_free (loop->nodes);
6507 if (loop->exits)
6508 free (loop->exits);
6510 free (loops->array);
6511 loops->array = NULL;
6513 if (loops->cfg.dom)
6514 sbitmap_vector_free (loops->cfg.dom);
6515 if (loops->cfg.dfs_order)
6516 free (loops->cfg.dfs_order);
6518 sbitmap_free (loops->shared_headers);
6523 /* Find the exits from the loop using the bitmap of loop nodes NODES
6524 and store in EXITS array. Return the number of exits from the
6525 loop. */
6526 static int
6527 flow_loop_exits_find (nodes, exits)
6528 const sbitmap nodes;
6529 edge **exits;
6531 edge e;
6532 int node;
6533 int num_exits;
6535 *exits = NULL;
6537 /* Check all nodes within the loop to see if there are any
6538 successors not in the loop. Note that a node may have multiple
6539 exiting edges. */
6540 num_exits = 0;
6541 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6542 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6544 basic_block dest = e->dest;
6546 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6547 num_exits++;
6551 if (! num_exits)
6552 return 0;
6554 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6556 /* Store all exiting edges into an array. */
6557 num_exits = 0;
6558 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6559 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6561 basic_block dest = e->dest;
6563 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6564 (*exits)[num_exits++] = e;
6568 return num_exits;
6572 /* Find the nodes contained within the loop with header HEADER and
6573 latch LATCH and store in NODES. Return the number of nodes within
6574 the loop. */
6575 static int
6576 flow_loop_nodes_find (header, latch, nodes)
6577 basic_block header;
6578 basic_block latch;
6579 sbitmap nodes;
6581 basic_block *stack;
6582 int sp;
6583 int num_nodes = 0;
6585 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6586 sp = 0;
6588 /* Start with only the loop header in the set of loop nodes. */
6589 sbitmap_zero (nodes);
6590 SET_BIT (nodes, header->index);
6591 num_nodes++;
6592 header->loop_depth++;
6594 /* Push the loop latch on to the stack. */
6595 if (! TEST_BIT (nodes, latch->index))
6597 SET_BIT (nodes, latch->index);
6598 latch->loop_depth++;
6599 num_nodes++;
6600 stack[sp++] = latch;
6603 while (sp)
6605 basic_block node;
6606 edge e;
6608 node = stack[--sp];
6609 for (e = node->pred; e; e = e->pred_next)
6611 basic_block ancestor = e->src;
6613 /* If each ancestor not marked as part of loop, add to set of
6614 loop nodes and push on to stack. */
6615 if (ancestor != ENTRY_BLOCK_PTR
6616 && ! TEST_BIT (nodes, ancestor->index))
6618 SET_BIT (nodes, ancestor->index);
6619 ancestor->loop_depth++;
6620 num_nodes++;
6621 stack[sp++] = ancestor;
6625 free (stack);
6626 return num_nodes;
6630 /* Compute the depth first search order and store in the array
6631 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6632 number of nodes visited. */
6633 static int
6634 flow_depth_first_order_compute (dfs_order)
6635 int *dfs_order;
6637 edge e;
6638 edge *stack;
6639 int sp;
6640 int dfsnum = 0;
6641 sbitmap visited;
6643 /* Allocate stack for back-tracking up CFG. */
6644 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6645 sp = 0;
6647 /* Allocate bitmap to track nodes that have been visited. */
6648 visited = sbitmap_alloc (n_basic_blocks);
6650 /* None of the nodes in the CFG have been visited yet. */
6651 sbitmap_zero (visited);
6653 /* Start with the first successor edge from the entry block. */
6654 e = ENTRY_BLOCK_PTR->succ;
6655 while (e)
6657 basic_block src = e->src;
6658 basic_block dest = e->dest;
6660 /* Mark that we have visited this node. */
6661 if (src != ENTRY_BLOCK_PTR)
6662 SET_BIT (visited, src->index);
6664 /* If this node has not been visited before, push the current
6665 edge on to the stack and proceed with the first successor
6666 edge of this node. */
6667 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6668 && dest->succ)
6670 stack[sp++] = e;
6671 e = dest->succ;
6673 else
6675 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6676 && ! dest->succ)
6678 /* DEST has no successors (for example, a non-returning
6679 function is called) so do not push the current edge
6680 but carry on with its next successor. */
6681 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6682 SET_BIT (visited, dest->index);
6685 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6687 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6689 /* Pop edge off stack. */
6690 e = stack[--sp];
6691 src = e->src;
6693 e = e->succ_next;
6696 free (stack);
6697 sbitmap_free (visited);
6699 /* The number of nodes visited should not be greater than
6700 n_basic_blocks. */
6701 if (dfsnum > n_basic_blocks)
6702 abort ();
6704 /* There are some nodes left in the CFG that are unreachable. */
6705 if (dfsnum < n_basic_blocks)
6706 abort ();
6707 return dfsnum;
6711 /* Return the block for the pre-header of the loop with header
6712 HEADER where DOM specifies the dominator information. Return NULL if
6713 there is no pre-header. */
6714 static basic_block
6715 flow_loop_pre_header_find (header, dom)
6716 basic_block header;
6717 const sbitmap *dom;
6719 basic_block pre_header;
6720 edge e;
6722 /* If block p is a predecessor of the header and is the only block
6723 that the header does not dominate, then it is the pre-header. */
6724 pre_header = NULL;
6725 for (e = header->pred; e; e = e->pred_next)
6727 basic_block node = e->src;
6729 if (node != ENTRY_BLOCK_PTR
6730 && ! TEST_BIT (dom[node->index], header->index))
6732 if (pre_header == NULL)
6733 pre_header = node;
6734 else
6736 /* There are multiple edges into the header from outside
6737 the loop so there is no pre-header block. */
6738 pre_header = NULL;
6739 break;
6743 return pre_header;
6747 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6748 previously added. The insertion algorithm assumes that the loops
6749 are added in the order found by a depth first search of the CFG. */
6750 static void
6751 flow_loop_tree_node_add (prevloop, loop)
6752 struct loop *prevloop;
6753 struct loop *loop;
6756 if (flow_loop_nested_p (prevloop, loop))
6758 prevloop->inner = loop;
6759 loop->outer = prevloop;
6760 return;
6763 while (prevloop->outer)
6765 if (flow_loop_nested_p (prevloop->outer, loop))
6767 prevloop->next = loop;
6768 loop->outer = prevloop->outer;
6769 return;
6771 prevloop = prevloop->outer;
6774 prevloop->next = loop;
6775 loop->outer = NULL;
6779 /* Build the loop hierarchy tree for LOOPS. */
6780 static void
6781 flow_loops_tree_build (loops)
6782 struct loops *loops;
6784 int i;
6785 int num_loops;
6787 num_loops = loops->num;
6788 if (! num_loops)
6789 return;
6791 /* Root the loop hierarchy tree with the first loop found.
6792 Since we used a depth first search this should be the
6793 outermost loop. */
6794 loops->tree = &loops->array[0];
6795 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6797 /* Add the remaining loops to the tree. */
6798 for (i = 1; i < num_loops; i++)
6799 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6803 /* Helper function to compute loop nesting depth and enclosed loop level
6804 for the natural loop specified by LOOP at the loop depth DEPTH.
6805 Returns the loop level. */
6806 static int
6807 flow_loop_level_compute (loop, depth)
6808 struct loop *loop;
6809 int depth;
6811 struct loop *inner;
6812 int level = 1;
6814 if (! loop)
6815 return 0;
6817 /* Traverse loop tree assigning depth and computing level as the
6818 maximum level of all the inner loops of this loop. The loop
6819 level is equivalent to the height of the loop in the loop tree
6820 and corresponds to the number of enclosed loop levels (including
6821 itself). */
6822 for (inner = loop->inner; inner; inner = inner->next)
6824 int ilevel;
6826 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6828 if (ilevel > level)
6829 level = ilevel;
6831 loop->level = level;
6832 loop->depth = depth;
6833 return level;
6837 /* Compute the loop nesting depth and enclosed loop level for the loop
6838 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6839 level. */
6841 static int
6842 flow_loops_level_compute (loops)
6843 struct loops *loops;
6845 struct loop *loop;
6846 int level;
6847 int levels = 0;
6849 /* Traverse all the outer level loops. */
6850 for (loop = loops->tree; loop; loop = loop->next)
6852 level = flow_loop_level_compute (loop, 1);
6853 if (level > levels)
6854 levels = level;
6856 return levels;
6860 /* Find all the natural loops in the function and save in LOOPS structure
6861 and recalculate loop_depth information in basic block structures.
6862 Return the number of natural loops found. */
6864 int
6865 flow_loops_find (loops)
6866 struct loops *loops;
6868 int i;
6869 int b;
6870 int num_loops;
6871 edge e;
6872 sbitmap headers;
6873 sbitmap *dom;
6874 int *dfs_order;
6876 loops->num = 0;
6877 loops->array = NULL;
6878 loops->tree = NULL;
6879 dfs_order = NULL;
6881 /* Taking care of this degenerate case makes the rest of
6882 this code simpler. */
6883 if (n_basic_blocks == 0)
6884 return 0;
6886 /* Compute the dominators. */
6887 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6888 compute_flow_dominators (dom, NULL);
6890 /* Count the number of loop edges (back edges). This should be the
6891 same as the number of natural loops. Also clear the loop_depth
6892 and as we work from inner->outer in a loop nest we call
6893 find_loop_nodes_find which will increment loop_depth for nodes
6894 within the current loop, which happens to enclose inner loops. */
6896 num_loops = 0;
6897 for (b = 0; b < n_basic_blocks; b++)
6899 BASIC_BLOCK (b)->loop_depth = 0;
6900 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6902 basic_block latch = e->src;
6904 /* Look for back edges where a predecessor is dominated
6905 by this block. A natural loop has a single entry
6906 node (header) that dominates all the nodes in the
6907 loop. It also has single back edge to the header
6908 from a latch node. Note that multiple natural loops
6909 may share the same header. */
6910 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6911 num_loops++;
6915 if (num_loops)
6917 /* Compute depth first search order of the CFG so that outer
6918 natural loops will be found before inner natural loops. */
6919 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6920 flow_depth_first_order_compute (dfs_order);
6922 /* Allocate loop structures. */
6923 loops->array
6924 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6926 headers = sbitmap_alloc (n_basic_blocks);
6927 sbitmap_zero (headers);
6929 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6930 sbitmap_zero (loops->shared_headers);
6932 /* Find and record information about all the natural loops
6933 in the CFG. */
6934 num_loops = 0;
6935 for (b = 0; b < n_basic_blocks; b++)
6937 basic_block header;
6939 /* Search the nodes of the CFG in DFS order that we can find
6940 outer loops first. */
6941 header = BASIC_BLOCK (dfs_order[b]);
6943 /* Look for all the possible latch blocks for this header. */
6944 for (e = header->pred; e; e = e->pred_next)
6946 basic_block latch = e->src;
6948 /* Look for back edges where a predecessor is dominated
6949 by this block. A natural loop has a single entry
6950 node (header) that dominates all the nodes in the
6951 loop. It also has single back edge to the header
6952 from a latch node. Note that multiple natural loops
6953 may share the same header. */
6954 if (latch != ENTRY_BLOCK_PTR
6955 && TEST_BIT (dom[latch->index], header->index))
6957 struct loop *loop;
6959 loop = loops->array + num_loops;
6961 loop->header = header;
6962 loop->latch = latch;
6964 /* Keep track of blocks that are loop headers so
6965 that we can tell which loops should be merged. */
6966 if (TEST_BIT (headers, header->index))
6967 SET_BIT (loops->shared_headers, header->index);
6968 SET_BIT (headers, header->index);
6970 /* Find nodes contained within the loop. */
6971 loop->nodes = sbitmap_alloc (n_basic_blocks);
6972 loop->num_nodes
6973 = flow_loop_nodes_find (header, latch, loop->nodes);
6975 /* Compute first and last blocks within the loop.
6976 These are often the same as the loop header and
6977 loop latch respectively, but this is not always
6978 the case. */
6979 loop->first
6980 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
6981 loop->last
6982 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
6984 /* Find edges which exit the loop. Note that a node
6985 may have several exit edges. */
6986 loop->num_exits
6987 = flow_loop_exits_find (loop->nodes, &loop->exits);
6989 /* Look to see if the loop has a pre-header node. */
6990 loop->pre_header
6991 = flow_loop_pre_header_find (header, dom);
6993 num_loops++;
6998 /* Natural loops with shared headers may either be disjoint or
6999 nested. Disjoint loops with shared headers cannot be inner
7000 loops and should be merged. For now just mark loops that share
7001 headers. */
7002 for (i = 0; i < num_loops; i++)
7003 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7004 loops->array[i].shared = 1;
7006 sbitmap_free (headers);
7009 loops->num = num_loops;
7011 /* Save CFG derived information to avoid recomputing it. */
7012 loops->cfg.dom = dom;
7013 loops->cfg.dfs_order = dfs_order;
7015 /* Build the loop hierarchy tree. */
7016 flow_loops_tree_build (loops);
7018 /* Assign the loop nesting depth and enclosed loop level for each
7019 loop. */
7020 loops->levels = flow_loops_level_compute (loops);
7022 return num_loops;
7026 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7028 flow_loop_outside_edge_p (loop, e)
7029 const struct loop *loop;
7030 edge e;
7032 if (e->dest != loop->header)
7033 abort ();
7034 return (e->src == ENTRY_BLOCK_PTR)
7035 || ! TEST_BIT (loop->nodes, e->src->index);
7040 typedef struct reorder_block_def {
7041 int flags;
7042 int index;
7043 basic_block add_jump;
7044 edge succ;
7045 rtx end;
7046 int block_begin;
7047 int block_end;
7048 } *reorder_block_def;
7050 #define REORDER_BLOCK_HEAD 0x1
7051 #define REORDER_BLOCK_VISITED 0x2
7052 #define REORDER_MOVED_BLOCK_END 0x3
7054 #define REORDER_BLOCK_FLAGS(bb) \
7055 ((reorder_block_def) (bb)->aux)->flags
7057 #define REORDER_BLOCK_INDEX(bb) \
7058 ((reorder_block_def) (bb)->aux)->index
7060 #define REORDER_BLOCK_ADD_JUMP(bb) \
7061 ((reorder_block_def) (bb)->aux)->add_jump
7063 #define REORDER_BLOCK_SUCC(bb) \
7064 ((reorder_block_def) (bb)->aux)->succ
7066 #define REORDER_BLOCK_OLD_END(bb) \
7067 ((reorder_block_def) (bb)->aux)->end
7069 #define REORDER_BLOCK_BEGIN(bb) \
7070 ((reorder_block_def) (bb)->aux)->block_begin
7072 #define REORDER_BLOCK_END(bb) \
7073 ((reorder_block_def) (bb)->aux)->block_end
7076 static int reorder_index;
7077 static basic_block reorder_last_visited;
7079 enum reorder_skip_type {REORDER_SKIP_BEFORE, REORDER_SKIP_AFTER,
7080 REORDER_SKIP_BLOCK_END};
7082 static rtx skip_insns_between_block PARAMS ((basic_block,
7083 enum reorder_skip_type));
7085 /* Skip over insns BEFORE or AFTER BB which are typically associated with
7086 basic block BB. */
7088 static rtx
7089 skip_insns_between_block (bb, skip_type)
7090 basic_block bb;
7091 enum reorder_skip_type skip_type;
7093 rtx insn, last_insn;
7095 if (skip_type == REORDER_SKIP_BEFORE)
7097 if (bb == ENTRY_BLOCK_PTR)
7098 return 0;
7100 last_insn = bb->head;
7101 for (insn = PREV_INSN (bb->head);
7102 insn && insn != BASIC_BLOCK (bb->index - 1)->end;
7103 last_insn = insn, insn = PREV_INSN (insn))
7105 if (NEXT_INSN (insn) != last_insn)
7106 break;
7108 if (GET_CODE (insn) == NOTE
7109 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_END
7110 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK
7111 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
7112 continue;
7114 break;
7118 else
7120 last_insn = bb->end;
7122 if (bb == EXIT_BLOCK_PTR)
7123 return 0;
7125 for (insn = NEXT_INSN (bb->end);
7126 insn;
7127 last_insn = insn, insn = NEXT_INSN (insn))
7129 if (bb->index + 1 != n_basic_blocks
7130 && insn == BASIC_BLOCK (bb->index + 1)->head)
7131 break;
7133 if (GET_CODE (insn) == BARRIER
7134 || GET_CODE (insn) == JUMP_INSN
7135 || (GET_CODE (insn) == NOTE
7136 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
7137 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)))
7138 continue;
7140 if (GET_CODE (insn) == CODE_LABEL
7141 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
7142 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
7143 || GET_CODE (PATTERN
7144 (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
7146 insn = NEXT_INSN (insn);
7147 continue;
7149 break;
7152 if (skip_type == REORDER_SKIP_BLOCK_END)
7154 int found_block_end = 0;
7156 for (; insn; last_insn = insn, insn = NEXT_INSN (insn))
7158 if (bb->index + 1 != n_basic_blocks
7159 && insn == BASIC_BLOCK (bb->index + 1)->head)
7160 break;
7162 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
7164 found_block_end = 1;
7165 continue;
7168 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
7169 continue;
7171 if (GET_CODE (insn) == NOTE
7172 && NOTE_LINE_NUMBER (insn) >= 0
7173 && NEXT_INSN (insn)
7174 && (NOTE_LINE_NUMBER (NEXT_INSN (insn))
7175 == NOTE_INSN_BLOCK_END))
7176 continue;
7177 break;
7180 if (! found_block_end)
7181 last_insn = 0;
7185 return last_insn;
7189 /* Return common destination for blocks BB0 and BB1. */
7191 static basic_block
7192 get_common_dest (bb0, bb1)
7193 basic_block bb0, bb1;
7195 edge e0, e1;
7197 for (e0 = bb0->succ; e0; e0 = e0->succ_next)
7199 for (e1 = bb1->succ; e1; e1 = e1->succ_next)
7201 if (e0->dest == e1->dest)
7203 return e0->dest;
7207 return 0;
7211 /* Move the destination block for edge E after chain end block CEB
7212 Adding jumps and labels is deferred until fixup_reorder_chain. */
7214 static basic_block
7215 chain_reorder_blocks (e, ceb)
7216 edge e;
7217 basic_block ceb;
7219 basic_block sb = e->src;
7220 basic_block db = e->dest;
7221 rtx cebe_insn, cebbe_insn, dbh_insn, dbe_insn;
7222 edge ee, last_edge;
7224 enum cond_types {NO_COND, PREDICT_THEN_WITH_ELSE, PREDICT_ELSE,
7225 PREDICT_THEN_NO_ELSE, PREDICT_NOT_THEN_NO_ELSE};
7226 enum cond_types cond_type;
7227 enum cond_block_types {NO_COND_BLOCK, THEN_BLOCK, ELSE_BLOCK,
7228 NO_ELSE_BLOCK};
7229 enum cond_block_types cond_block_type;
7231 if (rtl_dump_file)
7232 fprintf (rtl_dump_file,
7233 "Edge from basic block %d to basic block %d last visited %d\n",
7234 sb->index, db->index, ceb->index);
7236 dbh_insn = skip_insns_between_block (db, REORDER_SKIP_BEFORE);
7237 cebe_insn = skip_insns_between_block (ceb, REORDER_SKIP_AFTER);
7238 cebbe_insn = skip_insns_between_block (ceb, REORDER_SKIP_BLOCK_END);
7241 int block_begins = 0;
7242 rtx insn;
7244 for (insn = dbh_insn; insn && insn != db->end; insn = NEXT_INSN (insn))
7246 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
7248 block_begins += 1;
7249 break;
7252 REORDER_BLOCK_BEGIN (sb) = block_begins;
7255 if (cebbe_insn)
7257 int block_ends = 0;
7258 rtx insn;
7260 for (insn = cebe_insn; insn; insn = NEXT_INSN (insn))
7262 if (PREV_INSN (insn) == cebbe_insn)
7263 break;
7264 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
7266 block_ends += 1;
7267 continue;
7270 REORDER_BLOCK_END (ceb) = block_ends;
7273 /* Blocks are in original order. */
7274 if (sb->index == ceb->index
7275 && ceb->index + 1 == db->index && NEXT_INSN (cebe_insn))
7276 return db;
7278 /* Get the type of block and type of condition. */
7279 cond_type = NO_COND;
7280 cond_block_type = NO_COND_BLOCK;
7281 if (GET_CODE (sb->end) == JUMP_INSN && ! simplejump_p (sb->end)
7282 && condjump_p (sb->end))
7284 if (e->flags & EDGE_FALLTHRU)
7285 cond_block_type = THEN_BLOCK;
7286 else if (get_common_dest (sb->succ->dest, sb))
7287 cond_block_type = NO_ELSE_BLOCK;
7288 else
7289 cond_block_type = ELSE_BLOCK;
7291 if (sb->succ->succ_next
7292 && get_common_dest (sb->succ->dest, sb))
7294 if (cond_block_type == THEN_BLOCK)
7296 if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
7297 & REORDER_BLOCK_VISITED))
7298 cond_type = PREDICT_THEN_NO_ELSE;
7299 else
7300 cond_type = PREDICT_NOT_THEN_NO_ELSE;
7302 else if (cond_block_type == NO_ELSE_BLOCK)
7304 if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
7305 & REORDER_BLOCK_VISITED))
7306 cond_type = PREDICT_NOT_THEN_NO_ELSE;
7307 else
7308 cond_type = PREDICT_THEN_NO_ELSE;
7311 else
7313 if (cond_block_type == THEN_BLOCK)
7315 if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
7316 & REORDER_BLOCK_VISITED))
7317 cond_type = PREDICT_THEN_WITH_ELSE;
7318 else
7319 cond_type = PREDICT_ELSE;
7321 else if (cond_block_type == ELSE_BLOCK
7322 && sb->succ->dest != EXIT_BLOCK_PTR)
7324 if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
7325 & REORDER_BLOCK_VISITED))
7326 cond_type = PREDICT_ELSE;
7327 else
7328 cond_type = PREDICT_THEN_WITH_ELSE;
7333 if (rtl_dump_file)
7335 static const char * cond_type_str [] = {"not cond jump", "predict then",
7336 "predict else",
7337 "predict then w/o else",
7338 "predict not then w/o else"};
7339 static const char * cond_block_type_str [] = {"not then or else block",
7340 "then block",
7341 "else block",
7342 "then w/o else block"};
7344 fprintf (rtl_dump_file, " %s (looking at %s)\n",
7345 cond_type_str[(int)cond_type],
7346 cond_block_type_str[(int)cond_block_type]);
7349 /* Reflect that then block will move and we'll jump to it. */
7350 if (cond_block_type != THEN_BLOCK
7351 && (cond_type == PREDICT_ELSE
7352 || cond_type == PREDICT_NOT_THEN_NO_ELSE))
7354 if (rtl_dump_file)
7355 fprintf (rtl_dump_file,
7356 " then jump from block %d to block %d\n",
7357 sb->index, sb->succ->dest->index);
7359 /* Jump to reordered then block. */
7360 REORDER_BLOCK_ADD_JUMP (sb) = sb->succ->dest;
7363 /* Reflect that then block will jump back when we have no else. */
7364 if (cond_block_type != THEN_BLOCK
7365 && cond_type == PREDICT_NOT_THEN_NO_ELSE)
7367 for (ee = sb->succ->dest->succ;
7368 ee && ! (ee->flags & EDGE_FALLTHRU);
7369 ee = ee->succ_next)
7370 continue;
7372 if (ee && ! (GET_CODE (sb->succ->dest->end) == JUMP_INSN
7373 && ! simplejump_p (sb->succ->dest->end)))
7375 REORDER_BLOCK_ADD_JUMP (sb->succ->dest) = ee->dest;
7379 /* Reflect that else block will jump back. */
7380 if (cond_block_type == ELSE_BLOCK
7381 && (cond_type == PREDICT_THEN_WITH_ELSE || cond_type == PREDICT_ELSE))
7383 last_edge=db->succ;
7385 if (last_edge
7386 && last_edge->dest != EXIT_BLOCK_PTR
7387 && GET_CODE (last_edge->dest->head) == CODE_LABEL
7388 && ! (GET_CODE (db->end) == JUMP_INSN))
7390 if (rtl_dump_file)
7391 fprintf (rtl_dump_file,
7392 " else jump from block %d to block %d\n",
7393 db->index, last_edge->dest->index);
7395 REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
7399 /* This block's successor has already been reordered. This can happen
7400 when we reorder a chain starting at a then or else. */
7401 for (last_edge = db->succ;
7402 last_edge && ! (last_edge->flags & EDGE_FALLTHRU);
7403 last_edge = last_edge->succ_next)
7404 continue;
7406 if (last_edge
7407 && last_edge->dest != EXIT_BLOCK_PTR
7408 && (REORDER_BLOCK_FLAGS (last_edge->dest)
7409 & REORDER_BLOCK_VISITED))
7411 if (rtl_dump_file)
7412 fprintf (rtl_dump_file,
7413 " end of chain jump from block %d to block %d\n",
7414 db->index, last_edge->dest->index);
7416 REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
7419 dbh_insn = skip_insns_between_block (db, REORDER_SKIP_BEFORE);
7420 cebe_insn = skip_insns_between_block (ceb, REORDER_SKIP_AFTER);
7421 dbe_insn = skip_insns_between_block (db, REORDER_SKIP_AFTER);
7423 /* Leave behind any lexical block markers. */
7424 if (debug_info_level > DINFO_LEVEL_TERSE
7425 && ceb->index + 1 < db->index)
7427 rtx insn, last_insn = get_last_insn ();
7428 insn = NEXT_INSN (ceb->end);
7429 if (! insn)
7430 insn = REORDER_BLOCK_OLD_END (ceb);
7432 if (NEXT_INSN (cebe_insn) == 0)
7433 set_last_insn (cebe_insn);
7434 for (; insn && insn != db->head/*dbh_insn*/;
7435 insn = NEXT_INSN (insn))
7437 if (GET_CODE (insn) == NOTE
7438 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG))
7440 cebe_insn = emit_note_after (NOTE_INSN_BLOCK_BEG, cebe_insn);
7441 delete_insn (insn);
7443 if (GET_CODE (insn) == NOTE
7444 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
7446 cebe_insn = emit_note_after (NOTE_INSN_BLOCK_END, cebe_insn);
7447 delete_insn (insn);
7450 set_last_insn (last_insn);
7453 /* Rechain predicted block. */
7454 NEXT_INSN (cebe_insn) = dbh_insn;
7455 PREV_INSN (dbh_insn) = cebe_insn;
7457 REORDER_BLOCK_OLD_END (db) = NEXT_INSN (dbe_insn);
7458 if (db->index != n_basic_blocks - 1)
7459 NEXT_INSN (dbe_insn) = 0;
7461 return db;
7465 /* Reorder blocks starting at block B. */
7467 static void
7468 make_reorder_chain (bb)
7469 basic_block bb;
7471 edge e;
7472 basic_block visited_edge = NULL;
7473 rtx block_end;
7474 int probability;
7476 if (bb == EXIT_BLOCK_PTR)
7477 return;
7479 /* Find the most probable block. */
7480 e = bb->succ;
7481 block_end = bb->end;
7482 if (GET_CODE (block_end) == JUMP_INSN && condjump_p (block_end))
7484 rtx note = find_reg_note (block_end, REG_BR_PROB, 0);
7486 if (note)
7487 probability = XINT (XEXP (note, 0), 0);
7488 else
7489 probability = 0;
7491 if (probability >= REG_BR_PROB_BASE / 2)
7492 e = bb->succ->succ_next;
7495 /* Add chosen successor to chain and recurse on it. */
7496 if (e && e->dest != EXIT_BLOCK_PTR
7497 && e->dest != e->src
7498 && (! (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
7499 || (REORDER_BLOCK_FLAGS (e->dest) == REORDER_BLOCK_HEAD)))
7501 if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
7503 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_HEAD;
7504 REORDER_BLOCK_INDEX (bb) = reorder_index++;
7505 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
7508 if (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
7509 REORDER_BLOCK_FLAGS (e->dest) &= ~REORDER_BLOCK_HEAD;
7511 REORDER_BLOCK_SUCC (bb) = e;
7513 visited_edge = e->dest;
7515 reorder_last_visited = chain_reorder_blocks (e, bb);
7517 if (e->dest
7518 && ! (REORDER_BLOCK_FLAGS (e->dest)
7519 & REORDER_BLOCK_VISITED))
7520 make_reorder_chain (e->dest);
7522 else
7524 if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
7526 REORDER_BLOCK_INDEX (bb) = reorder_index++;
7527 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
7531 /* Recurse on the successors. */
7532 for (e = bb->succ; e; e = e->succ_next)
7534 if (e->dest && e->dest == EXIT_BLOCK_PTR)
7535 continue;
7537 if (e->dest
7538 && e->dest != e->src
7539 && e->dest != visited_edge
7540 && ! (REORDER_BLOCK_FLAGS (e->dest)
7541 & REORDER_BLOCK_VISITED))
7543 reorder_last_visited
7544 = chain_reorder_blocks (e, reorder_last_visited);
7545 make_reorder_chain (e->dest);
7551 /* Fixup jumps and labels after reordering basic blocks. */
7553 static void
7554 fixup_reorder_chain ()
7556 int i, j;
7557 rtx insn;
7559 /* Set the new last insn. */
7560 for (i = 0;
7561 i < n_basic_blocks - 1
7562 && REORDER_BLOCK_INDEX (BASIC_BLOCK (i)) != n_basic_blocks;
7563 i++)
7564 continue;
7566 for (insn = BASIC_BLOCK (i)->head;
7567 NEXT_INSN (insn) != 0;
7568 insn = NEXT_INSN (insn))
7569 continue;
7571 set_last_insn (insn);
7573 /* Add jumps and labels to fixup blocks. */
7574 for (i = 0; i < n_basic_blocks - 1; i++)
7576 if (REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i)))
7578 rtx new_label = gen_label_rtx ();
7579 rtx label_insn, jump_insn, barrier_insn;
7581 label_insn = emit_label_before (new_label,
7582 REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i))->head);
7583 REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i))->head = label_insn;
7585 jump_insn = emit_jump_insn_after (gen_jump (label_insn),
7586 BASIC_BLOCK (i)->end);
7587 JUMP_LABEL (jump_insn) = label_insn;
7588 ++LABEL_NUSES (label_insn);
7589 barrier_insn = emit_barrier_after (jump_insn);
7590 if (GET_CODE (BASIC_BLOCK (i)->end) != JUMP_INSN)
7591 BASIC_BLOCK (i)->end = barrier_insn;
7592 /* Add block for jump. Typically this is when a then is not
7593 predicted and we are jumping to the moved then block. */
7594 else
7596 basic_block b;
7598 b = (basic_block) obstack_alloc (function_obstack, sizeof (*b));
7599 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
7600 BASIC_BLOCK (n_basic_blocks - 1) = b;
7601 b->index = n_basic_blocks - 1;
7602 b->head = emit_note_before (NOTE_INSN_BASIC_BLOCK, jump_insn);
7603 NOTE_BASIC_BLOCK (b->head) = b;
7604 b->end = barrier_insn;
7607 basic_block nb = BASIC_BLOCK (n_basic_blocks - 1);
7608 nb->global_live_at_start
7609 = OBSTACK_ALLOC_REG_SET (function_obstack);
7610 nb->global_live_at_end
7611 = OBSTACK_ALLOC_REG_SET (function_obstack);
7613 COPY_REG_SET (nb->global_live_at_start,
7614 BASIC_BLOCK (i)->global_live_at_start);
7615 COPY_REG_SET (nb->global_live_at_end,
7616 BASIC_BLOCK (i)->global_live_at_start);
7617 if (BASIC_BLOCK (i)->local_set)
7619 OBSTACK_ALLOC_REG_SET (function_obstack);
7620 COPY_REG_SET (nb->local_set, BASIC_BLOCK (i)->local_set);
7622 else
7623 BASIC_BLOCK (nb->index)->local_set = 0;
7625 nb->aux = xcalloc (1, sizeof (struct reorder_block_def));
7626 REORDER_BLOCK_INDEX (BASIC_BLOCK (n_basic_blocks - 1))
7627 = REORDER_BLOCK_INDEX (BASIC_BLOCK (i)) + 1;
7628 /* Relink to new block. */
7629 nb->succ = BASIC_BLOCK (i)->succ;
7631 make_edge (0, BASIC_BLOCK (i), nb, 0);
7632 BASIC_BLOCK (i)->succ->succ_next
7633 = BASIC_BLOCK (i)->succ->succ_next->succ_next;
7634 nb->succ->succ_next = 0;
7635 /* Fix reorder block index to reflect new block. */
7636 for (j = 0; j < n_basic_blocks - 1; j++)
7638 basic_block bbj = BASIC_BLOCK (j);
7639 basic_block bbi = BASIC_BLOCK (i);
7640 if (REORDER_BLOCK_INDEX (bbj)
7641 >= REORDER_BLOCK_INDEX (bbi) + 1)
7642 REORDER_BLOCK_INDEX (bbj)++;
7651 /* Reorder basic blocks. */
7653 void
7654 reorder_basic_blocks ()
7656 int i, j;
7657 struct loops loops_info;
7658 int num_loops;
7659 rtx last_insn;
7661 if (profile_arc_flag)
7662 return;
7664 if (n_basic_blocks <= 1)
7665 return;
7667 /* Exception edges are not currently handled. */
7668 for (i = 0; i < n_basic_blocks; i++)
7670 edge e;
7672 for (e = BASIC_BLOCK (i)->succ; e && ! (e->flags & EDGE_EH);
7673 e = e->succ_next)
7674 continue;
7676 if (e && (e->flags & EDGE_EH))
7677 return;
7680 reorder_index = 0;
7682 /* Find natural loops using the CFG. */
7683 num_loops = flow_loops_find (&loops_info);
7685 /* Dump loop information. */
7686 flow_loops_dump (&loops_info, rtl_dump_file, 0);
7688 /* Estimate using heuristics if no profiling info is available. */
7689 if (! flag_branch_probabilities)
7690 estimate_probability (&loops_info);
7692 reorder_last_visited = BASIC_BLOCK (0);
7694 for (i = 0; i < n_basic_blocks; i++)
7696 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
7699 last_insn
7700 = NEXT_INSN (skip_insns_between_block (BASIC_BLOCK (n_basic_blocks - 1),
7701 REORDER_SKIP_AFTER));
7703 make_reorder_chain (BASIC_BLOCK (0));
7705 fixup_reorder_chain ();
7707 #ifdef ENABLE_CHECKING
7709 rtx insn, last_insn;
7710 last_insn = get_insns ();
7711 for (insn = NEXT_INSN (get_insns ());
7712 insn && PREV_INSN (insn) == last_insn
7713 && NEXT_INSN (PREV_INSN (insn)) == insn;
7714 last_insn = insn,
7715 insn = NEXT_INSN (insn))
7716 continue;
7718 if (insn)
7720 if (rtl_dump_file)
7721 fprintf (rtl_dump_file, "insn chaining error at %d\n",
7722 INSN_UID (last_insn));
7723 abort();
7726 #endif
7728 /* Put basic_block_info in new order. */
7729 for (i = 0; i < n_basic_blocks - 1; i++)
7731 for (j = i; i != REORDER_BLOCK_INDEX (BASIC_BLOCK (j)); j++)
7732 continue;
7734 if (REORDER_BLOCK_INDEX (BASIC_BLOCK (j)) == i
7735 && i != j)
7737 basic_block tempbb;
7738 int temprbi;
7739 int rbi = REORDER_BLOCK_INDEX (BASIC_BLOCK (j));
7741 temprbi = BASIC_BLOCK (rbi)->index;
7742 BASIC_BLOCK (rbi)->index = BASIC_BLOCK (j)->index;
7743 BASIC_BLOCK (j)->index = temprbi;
7744 tempbb = BASIC_BLOCK (rbi);
7745 BASIC_BLOCK (rbi) = BASIC_BLOCK (j);
7746 BASIC_BLOCK (j) = tempbb;
7750 NEXT_INSN (BASIC_BLOCK (n_basic_blocks - 1)->end) = last_insn;
7752 for (i = 0; i < n_basic_blocks - 1; i++)
7754 free (BASIC_BLOCK (i)->aux);
7757 /* Free loop information. */
7758 flow_loops_free (&loops_info);