* cppexp.c (parse_charconst): Null does not end character
[official-gcc.git] / gcc / flow.c
blobea386232ea7bea821b3b63981722c4570cb76c66
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
44 ** life_analysis **
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
76 REG_DEAD notes.
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
95 that is never used.
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
113 /* TODO:
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
118 - log links creation
119 - pre/post modify transformation
122 #include "config.h"
123 #include "system.h"
124 #include "tree.h"
125 #include "rtl.h"
126 #include "tm_p.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "hard-reg-set.h"
131 #include "flags.h"
132 #include "output.h"
133 #include "function.h"
134 #include "except.h"
135 #include "toplev.h"
136 #include "recog.h"
137 #include "insn-flags.h"
138 #include "expr.h"
140 #include "obstack.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
173 int n_basic_blocks;
175 /* Number of edges in the current function. */
177 int n_edges;
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
186 = {{NULL, /* head */
187 NULL, /* end */
188 NULL, /* pred */
189 NULL, /* succ */
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
193 NULL, /* aux */
194 ENTRY_BLOCK, /* index */
195 0, /* loop_depth */
196 -1, -1 /* eh_beg, eh_end */
199 NULL, /* head */
200 NULL, /* end */
201 NULL, /* pred */
202 NULL, /* succ */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
206 NULL, /* aux */
207 EXIT_BLOCK, /* index */
208 0, /* loop_depth */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
214 int flow2_completed;
216 /* Maximum register number used in this function, plus one. */
218 int max_regno;
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of the reg_n_info table. */
226 unsigned int reg_n_max;
228 /* Size of a regset for the current function,
229 in (1) bytes and (2) elements. */
231 int regset_bytes;
232 int regset_size;
234 /* Regset of regs live when calls to `setjmp'-like functions happen. */
235 /* ??? Does this exist only for the setjmp-clobbered warning message? */
237 regset regs_live_at_setjmp;
239 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
240 that have to go in the same hard reg.
241 The first two regs in the list are a pair, and the next two
242 are another pair, etc. */
243 rtx regs_may_share;
245 /* Set of registers that may be eliminable. These are handled specially
246 in updating regs_ever_live. */
248 static HARD_REG_SET elim_reg_set;
250 /* The basic block structure for every insn, indexed by uid. */
252 varray_type basic_block_for_insn;
254 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
255 /* ??? Should probably be using LABEL_NUSES instead. It would take a
256 bit of surgery to be able to use or co-opt the routines in jump. */
258 static rtx label_value_list;
260 /* For use in communicating between propagate_block and its subroutines.
261 Holds all information needed to compute life and def-use information. */
263 struct propagate_block_info
265 /* The basic block we're considering. */
266 basic_block bb;
268 /* Bit N is set if register N is conditionally or unconditionally live. */
269 regset reg_live;
271 /* Element N is the next insn that uses (hard or pseudo) register N
272 within the current basic block; or zero, if there is no such insn. */
273 rtx *reg_next_use;
275 /* Contains a list of all the MEMs we are tracking for dead store
276 elimination. */
277 rtx mem_set_list;
279 /* If non-null, record the set of registers set in the basic block. */
280 regset local_set;
282 /* Non-zero if the value of CC0 is live. */
283 int cc0_live;
285 /* Flags controling the set of information propagate_block collects. */
286 int flags;
289 /* Forward declarations */
290 static int count_basic_blocks PARAMS ((rtx));
291 static rtx find_basic_blocks_1 PARAMS ((rtx));
292 static void clear_edges PARAMS ((void));
293 static void make_edges PARAMS ((rtx));
294 static void make_label_edge PARAMS ((sbitmap *, basic_block,
295 rtx, int));
296 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
297 basic_block, rtx, int));
298 static void mark_critical_edges PARAMS ((void));
299 static void move_stray_eh_region_notes PARAMS ((void));
300 static void record_active_eh_regions PARAMS ((rtx));
302 static void commit_one_edge_insertion PARAMS ((edge));
304 static void delete_unreachable_blocks PARAMS ((void));
305 static void delete_eh_regions PARAMS ((void));
306 static int can_delete_note_p PARAMS ((rtx));
307 static int delete_block PARAMS ((basic_block));
308 static void expunge_block PARAMS ((basic_block));
309 static int can_delete_label_p PARAMS ((rtx));
310 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
311 basic_block));
312 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
313 basic_block));
314 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
315 static void try_merge_blocks PARAMS ((void));
316 static void tidy_fallthru_edges PARAMS ((void));
317 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
318 static void verify_wide_reg PARAMS ((int, rtx, rtx));
319 static void verify_local_live_at_start PARAMS ((regset, basic_block));
320 static int set_noop_p PARAMS ((rtx));
321 static int noop_move_p PARAMS ((rtx));
322 static void delete_noop_moves PARAMS ((rtx));
323 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
324 static void notice_stack_pointer_modification PARAMS ((rtx));
325 static void mark_reg PARAMS ((rtx, void *));
326 static void mark_regs_live_at_end PARAMS ((regset));
327 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
328 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
329 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
330 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
331 static void propagate_block PARAMS ((basic_block, regset,
332 regset, int));
333 static int insn_dead_p PARAMS ((struct propagate_block_info *,
334 rtx, int, rtx));
335 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
336 rtx, rtx, rtx));
337 static void mark_set_regs PARAMS ((struct propagate_block_info *,
338 regset, rtx, rtx));
339 static void mark_set_1 PARAMS ((struct propagate_block_info *,
340 regset, rtx, rtx, rtx));
341 static int mark_set_reg PARAMS ((struct propagate_block_info *,
342 regset, rtx, rtx,
343 int *, int *));
344 #ifdef AUTO_INC_DEC
345 static void find_auto_inc PARAMS ((struct propagate_block_info *,
346 rtx, rtx));
347 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
348 rtx));
349 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
350 #endif
351 static void mark_used_reg PARAMS ((struct propagate_block_info *,
352 regset, rtx, rtx, rtx));
353 static void mark_used_regs PARAMS ((struct propagate_block_info *,
354 regset, rtx, rtx, rtx));
355 void dump_flow_info PARAMS ((FILE *));
356 void debug_flow_info PARAMS ((void));
357 static void dump_edge_info PARAMS ((FILE *, edge, int));
359 static void count_reg_sets_1 PARAMS ((rtx, int));
360 static void count_reg_sets PARAMS ((rtx, int));
361 static void count_reg_references PARAMS ((rtx, int));
362 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
363 rtx));
364 static void remove_fake_successors PARAMS ((basic_block));
365 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
366 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
367 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
368 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
369 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
370 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
371 static int flow_depth_first_order_compute PARAMS ((int *));
372 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
373 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
374 static void flow_loops_tree_build PARAMS ((struct loops *));
375 static int flow_loop_level_compute PARAMS ((struct loop *, int));
376 static int flow_loops_level_compute PARAMS ((struct loops *));
378 /* Find basic blocks of the current function.
379 F is the first insn of the function and NREGS the number of register
380 numbers in use. */
382 void
383 find_basic_blocks (f, nregs, file)
384 rtx f;
385 int nregs ATTRIBUTE_UNUSED;
386 FILE *file ATTRIBUTE_UNUSED;
388 int max_uid;
390 /* Flush out existing data. */
391 if (basic_block_info != NULL)
393 int i;
395 clear_edges ();
397 /* Clear bb->aux on all extant basic blocks. We'll use this as a
398 tag for reuse during create_basic_block, just in case some pass
399 copies around basic block notes improperly. */
400 for (i = 0; i < n_basic_blocks; ++i)
401 BASIC_BLOCK (i)->aux = NULL;
403 VARRAY_FREE (basic_block_info);
406 n_basic_blocks = count_basic_blocks (f);
408 /* Size the basic block table. The actual structures will be allocated
409 by find_basic_blocks_1, since we want to keep the structure pointers
410 stable across calls to find_basic_blocks. */
411 /* ??? This whole issue would be much simpler if we called find_basic_blocks
412 exactly once, and thereafter we don't have a single long chain of
413 instructions at all until close to the end of compilation when we
414 actually lay them out. */
416 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
418 label_value_list = find_basic_blocks_1 (f);
420 /* Record the block to which an insn belongs. */
421 /* ??? This should be done another way, by which (perhaps) a label is
422 tagged directly with the basic block that it starts. It is used for
423 more than that currently, but IMO that is the only valid use. */
425 max_uid = get_max_uid ();
426 #ifdef AUTO_INC_DEC
427 /* Leave space for insns life_analysis makes in some cases for auto-inc.
428 These cases are rare, so we don't need too much space. */
429 max_uid += max_uid / 10;
430 #endif
432 compute_bb_for_insn (max_uid);
434 /* Discover the edges of our cfg. */
435 record_active_eh_regions (f);
436 make_edges (label_value_list);
438 /* Do very simple cleanup now, for the benefit of code that runs between
439 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
440 tidy_fallthru_edges ();
442 mark_critical_edges ();
444 #ifdef ENABLE_CHECKING
445 verify_flow_info ();
446 #endif
449 /* Count the basic blocks of the function. */
451 static int
452 count_basic_blocks (f)
453 rtx f;
455 register rtx insn;
456 register RTX_CODE prev_code;
457 register int count = 0;
458 int eh_region = 0;
459 int call_had_abnormal_edge = 0;
460 rtx prev_call = NULL_RTX;
462 prev_code = JUMP_INSN;
463 for (insn = f; insn; insn = NEXT_INSN (insn))
465 register RTX_CODE code = GET_CODE (insn);
467 if (code == CODE_LABEL
468 || (GET_RTX_CLASS (code) == 'i'
469 && (prev_code == JUMP_INSN
470 || prev_code == BARRIER
471 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
473 count++;
476 /* Record whether this call created an edge. */
477 if (code == CALL_INSN)
479 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
480 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
481 prev_call = insn;
482 call_had_abnormal_edge = 0;
484 /* If there is an EH region or rethrow, we have an edge. */
485 if ((eh_region && region > 0)
486 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
487 call_had_abnormal_edge = 1;
488 else
490 /* If there is a nonlocal goto label and the specified
491 region number isn't -1, we have an edge. (0 means
492 no throw, but might have a nonlocal goto). */
493 if (nonlocal_goto_handler_labels && region >= 0)
494 call_had_abnormal_edge = 1;
497 else if (code != NOTE)
498 prev_call = NULL_RTX;
500 if (code != NOTE)
501 prev_code = code;
502 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
503 ++eh_region;
504 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
505 --eh_region;
509 /* The rest of the compiler works a bit smoother when we don't have to
510 check for the edge case of do-nothing functions with no basic blocks. */
511 if (count == 0)
513 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
514 count = 1;
517 return count;
520 /* Find all basic blocks of the function whose first insn is F.
522 Collect and return a list of labels whose addresses are taken. This
523 will be used in make_edges for use with computed gotos. */
525 static rtx
526 find_basic_blocks_1 (f)
527 rtx f;
529 register rtx insn, next;
530 int call_has_abnormal_edge = 0;
531 int i = 0;
532 rtx bb_note = NULL_RTX;
533 rtx eh_list = NULL_RTX;
534 rtx label_value_list = NULL_RTX;
535 rtx head = NULL_RTX;
536 rtx end = NULL_RTX;
538 /* We process the instructions in a slightly different way than we did
539 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
540 closed out the previous block, so that it gets attached at the proper
541 place. Since this form should be equivalent to the previous,
542 count_basic_blocks continues to use the old form as a check. */
544 for (insn = f; insn; insn = next)
546 enum rtx_code code = GET_CODE (insn);
548 next = NEXT_INSN (insn);
550 if (code == CALL_INSN)
552 /* Record whether this call created an edge. */
553 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
554 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
555 call_has_abnormal_edge = 0;
557 /* If there is an EH region or rethrow, we have an edge. */
558 if ((eh_list && region > 0)
559 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
560 call_has_abnormal_edge = 1;
561 else
563 /* If there is a nonlocal goto label and the specified
564 region number isn't -1, we have an edge. (0 means
565 no throw, but might have a nonlocal goto). */
566 if (nonlocal_goto_handler_labels && region >= 0)
567 call_has_abnormal_edge = 1;
571 switch (code)
573 case NOTE:
575 int kind = NOTE_LINE_NUMBER (insn);
577 /* Keep a LIFO list of the currently active exception notes. */
578 if (kind == NOTE_INSN_EH_REGION_BEG)
579 eh_list = alloc_INSN_LIST (insn, eh_list);
580 else if (kind == NOTE_INSN_EH_REGION_END)
582 rtx t = eh_list;
583 eh_list = XEXP (eh_list, 1);
584 free_INSN_LIST_node (t);
587 /* Look for basic block notes with which to keep the
588 basic_block_info pointers stable. Unthread the note now;
589 we'll put it back at the right place in create_basic_block.
590 Or not at all if we've already found a note in this block. */
591 else if (kind == NOTE_INSN_BASIC_BLOCK)
593 if (bb_note == NULL_RTX)
594 bb_note = insn;
595 next = flow_delete_insn (insn);
598 break;
601 case CODE_LABEL:
602 /* A basic block starts at a label. If we've closed one off due
603 to a barrier or some such, no need to do it again. */
604 if (head != NULL_RTX)
606 /* While we now have edge lists with which other portions of
607 the compiler might determine a call ending a basic block
608 does not imply an abnormal edge, it will be a bit before
609 everything can be updated. So continue to emit a noop at
610 the end of such a block. */
611 if (GET_CODE (end) == CALL_INSN
612 && ! SIBLING_CALL_P (end))
614 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
615 end = emit_insn_after (nop, end);
618 create_basic_block (i++, head, end, bb_note);
619 bb_note = NULL_RTX;
621 head = end = insn;
622 break;
624 case JUMP_INSN:
625 /* A basic block ends at a jump. */
626 if (head == NULL_RTX)
627 head = insn;
628 else
630 /* ??? Make a special check for table jumps. The way this
631 happens is truly and amazingly gross. We are about to
632 create a basic block that contains just a code label and
633 an addr*vec jump insn. Worse, an addr_diff_vec creates
634 its own natural loop.
636 Prevent this bit of brain damage, pasting things together
637 correctly in make_edges.
639 The correct solution involves emitting the table directly
640 on the tablejump instruction as a note, or JUMP_LABEL. */
642 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
643 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
645 head = end = NULL;
646 n_basic_blocks--;
647 break;
650 end = insn;
651 goto new_bb_inclusive;
653 case BARRIER:
654 /* A basic block ends at a barrier. It may be that an unconditional
655 jump already closed the basic block -- no need to do it again. */
656 if (head == NULL_RTX)
657 break;
659 /* While we now have edge lists with which other portions of the
660 compiler might determine a call ending a basic block does not
661 imply an abnormal edge, it will be a bit before everything can
662 be updated. So continue to emit a noop at the end of such a
663 block. */
664 if (GET_CODE (end) == CALL_INSN
665 && ! SIBLING_CALL_P (end))
667 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
668 end = emit_insn_after (nop, end);
670 goto new_bb_exclusive;
672 case CALL_INSN:
673 /* A basic block ends at a call that can either throw or
674 do a non-local goto. */
675 if (call_has_abnormal_edge)
677 new_bb_inclusive:
678 if (head == NULL_RTX)
679 head = insn;
680 end = insn;
682 new_bb_exclusive:
683 create_basic_block (i++, head, end, bb_note);
684 head = end = NULL_RTX;
685 bb_note = NULL_RTX;
686 break;
688 /* FALLTHRU */
690 default:
691 if (GET_RTX_CLASS (code) == 'i')
693 if (head == NULL_RTX)
694 head = insn;
695 end = insn;
697 break;
700 if (GET_RTX_CLASS (code) == 'i')
702 rtx note;
704 /* Make a list of all labels referred to other than by jumps
705 (which just don't have the REG_LABEL notes).
707 Make a special exception for labels followed by an ADDR*VEC,
708 as this would be a part of the tablejump setup code.
710 Make a special exception for the eh_return_stub_label, which
711 we know isn't part of any otherwise visible control flow. */
713 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
714 if (REG_NOTE_KIND (note) == REG_LABEL)
716 rtx lab = XEXP (note, 0), next;
718 if (lab == eh_return_stub_label)
720 else if ((next = next_nonnote_insn (lab)) != NULL
721 && GET_CODE (next) == JUMP_INSN
722 && (GET_CODE (PATTERN (next)) == ADDR_VEC
723 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
725 else
726 label_value_list
727 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
732 if (head != NULL_RTX)
733 create_basic_block (i++, head, end, bb_note);
735 if (i != n_basic_blocks)
736 abort ();
738 return label_value_list;
741 /* Tidy the CFG by deleting unreachable code and whatnot. */
743 void
744 cleanup_cfg (f)
745 rtx f;
747 delete_unreachable_blocks ();
748 move_stray_eh_region_notes ();
749 record_active_eh_regions (f);
750 try_merge_blocks ();
751 mark_critical_edges ();
753 /* Kill the data we won't maintain. */
754 label_value_list = NULL_RTX;
757 /* Create a new basic block consisting of the instructions between
758 HEAD and END inclusive. Reuses the note and basic block struct
759 in BB_NOTE, if any. */
761 void
762 create_basic_block (index, head, end, bb_note)
763 int index;
764 rtx head, end, bb_note;
766 basic_block bb;
768 if (bb_note
769 && ! RTX_INTEGRATED_P (bb_note)
770 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
771 && bb->aux == NULL)
773 /* If we found an existing note, thread it back onto the chain. */
775 if (GET_CODE (head) == CODE_LABEL)
776 add_insn_after (bb_note, head);
777 else
779 add_insn_before (bb_note, head);
780 head = bb_note;
783 else
785 /* Otherwise we must create a note and a basic block structure.
786 Since we allow basic block structs in rtl, give the struct
787 the same lifetime by allocating it off the function obstack
788 rather than using malloc. */
790 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
791 memset (bb, 0, sizeof (*bb));
793 if (GET_CODE (head) == CODE_LABEL)
794 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
795 else
797 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
798 head = bb_note;
800 NOTE_BASIC_BLOCK (bb_note) = bb;
803 /* Always include the bb note in the block. */
804 if (NEXT_INSN (end) == bb_note)
805 end = bb_note;
807 bb->head = head;
808 bb->end = end;
809 bb->index = index;
810 BASIC_BLOCK (index) = bb;
812 /* Tag the block so that we know it has been used when considering
813 other basic block notes. */
814 bb->aux = bb;
817 /* Records the basic block struct in BB_FOR_INSN, for every instruction
818 indexed by INSN_UID. MAX is the size of the array. */
820 void
821 compute_bb_for_insn (max)
822 int max;
824 int i;
826 if (basic_block_for_insn)
827 VARRAY_FREE (basic_block_for_insn);
828 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
830 for (i = 0; i < n_basic_blocks; ++i)
832 basic_block bb = BASIC_BLOCK (i);
833 rtx insn, end;
835 end = bb->end;
836 insn = bb->head;
837 while (1)
839 int uid = INSN_UID (insn);
840 if (uid < max)
841 VARRAY_BB (basic_block_for_insn, uid) = bb;
842 if (insn == end)
843 break;
844 insn = NEXT_INSN (insn);
849 /* Free the memory associated with the edge structures. */
851 static void
852 clear_edges ()
854 int i;
855 edge n, e;
857 for (i = 0; i < n_basic_blocks; ++i)
859 basic_block bb = BASIC_BLOCK (i);
861 for (e = bb->succ; e ; e = n)
863 n = e->succ_next;
864 free (e);
867 bb->succ = 0;
868 bb->pred = 0;
871 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
873 n = e->succ_next;
874 free (e);
877 ENTRY_BLOCK_PTR->succ = 0;
878 EXIT_BLOCK_PTR->pred = 0;
880 n_edges = 0;
883 /* Identify the edges between basic blocks.
885 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
886 that are otherwise unreachable may be reachable with a non-local goto.
888 BB_EH_END is an array indexed by basic block number in which we record
889 the list of exception regions active at the end of the basic block. */
891 static void
892 make_edges (label_value_list)
893 rtx label_value_list;
895 int i;
896 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
897 sbitmap *edge_cache = NULL;
899 /* Assume no computed jump; revise as we create edges. */
900 current_function_has_computed_jump = 0;
902 /* Heavy use of computed goto in machine-generated code can lead to
903 nearly fully-connected CFGs. In that case we spend a significant
904 amount of time searching the edge lists for duplicates. */
905 if (forced_labels || label_value_list)
907 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
908 sbitmap_vector_zero (edge_cache, n_basic_blocks);
911 /* By nature of the way these get numbered, block 0 is always the entry. */
912 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
914 for (i = 0; i < n_basic_blocks; ++i)
916 basic_block bb = BASIC_BLOCK (i);
917 rtx insn, x;
918 enum rtx_code code;
919 int force_fallthru = 0;
921 /* Examine the last instruction of the block, and discover the
922 ways we can leave the block. */
924 insn = bb->end;
925 code = GET_CODE (insn);
927 /* A branch. */
928 if (code == JUMP_INSN)
930 rtx tmp;
932 /* ??? Recognize a tablejump and do the right thing. */
933 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
934 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
935 && GET_CODE (tmp) == JUMP_INSN
936 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
937 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
939 rtvec vec;
940 int j;
942 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
943 vec = XVEC (PATTERN (tmp), 0);
944 else
945 vec = XVEC (PATTERN (tmp), 1);
947 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
948 make_label_edge (edge_cache, bb,
949 XEXP (RTVEC_ELT (vec, j), 0), 0);
951 /* Some targets (eg, ARM) emit a conditional jump that also
952 contains the out-of-range target. Scan for these and
953 add an edge if necessary. */
954 if ((tmp = single_set (insn)) != NULL
955 && SET_DEST (tmp) == pc_rtx
956 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
957 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
958 make_label_edge (edge_cache, bb,
959 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
961 #ifdef CASE_DROPS_THROUGH
962 /* Silly VAXen. The ADDR_VEC is going to be in the way of
963 us naturally detecting fallthru into the next block. */
964 force_fallthru = 1;
965 #endif
968 /* If this is a computed jump, then mark it as reaching
969 everything on the label_value_list and forced_labels list. */
970 else if (computed_jump_p (insn))
972 current_function_has_computed_jump = 1;
974 for (x = label_value_list; x; x = XEXP (x, 1))
975 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
977 for (x = forced_labels; x; x = XEXP (x, 1))
978 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
981 /* Returns create an exit out. */
982 else if (returnjump_p (insn))
983 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
985 /* Otherwise, we have a plain conditional or unconditional jump. */
986 else
988 if (! JUMP_LABEL (insn))
989 abort ();
990 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
994 /* If this is a sibling call insn, then this is in effect a
995 combined call and return, and so we need an edge to the
996 exit block. No need to worry about EH edges, since we
997 wouldn't have created the sibling call in the first place. */
999 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1000 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1001 else
1003 /* If this is a CALL_INSN, then mark it as reaching the active EH
1004 handler for this CALL_INSN. If we're handling asynchronous
1005 exceptions then any insn can reach any of the active handlers.
1007 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1009 if (code == CALL_INSN || asynchronous_exceptions)
1011 /* Add any appropriate EH edges. We do this unconditionally
1012 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1013 on the call, and this needn't be within an EH region. */
1014 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1016 /* If we have asynchronous exceptions, do the same for *all*
1017 exception regions active in the block. */
1018 if (asynchronous_exceptions
1019 && bb->eh_beg != bb->eh_end)
1021 if (bb->eh_beg >= 0)
1022 make_eh_edge (edge_cache, eh_nest_info, bb,
1023 NULL_RTX, bb->eh_beg);
1025 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1026 if (GET_CODE (x) == NOTE
1027 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1028 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1030 int region = NOTE_EH_HANDLER (x);
1031 make_eh_edge (edge_cache, eh_nest_info, bb,
1032 NULL_RTX, region);
1036 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1038 /* ??? This could be made smarter: in some cases it's possible
1039 to tell that certain calls will not do a nonlocal goto.
1041 For example, if the nested functions that do the nonlocal
1042 gotos do not have their addresses taken, then only calls to
1043 those functions or to other nested functions that use them
1044 could possibly do nonlocal gotos. */
1045 /* We do know that a REG_EH_REGION note with a value less
1046 than 0 is guaranteed not to perform a non-local goto. */
1047 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1048 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1049 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1050 make_label_edge (edge_cache, bb, XEXP (x, 0),
1051 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1055 /* We know something about the structure of the function __throw in
1056 libgcc2.c. It is the only function that ever contains eh_stub
1057 labels. It modifies its return address so that the last block
1058 returns to one of the eh_stub labels within it. So we have to
1059 make additional edges in the flow graph. */
1060 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1061 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1063 /* Find out if we can drop through to the next block. */
1064 insn = next_nonnote_insn (insn);
1065 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1066 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1067 else if (i + 1 < n_basic_blocks)
1069 rtx tmp = BLOCK_HEAD (i + 1);
1070 if (GET_CODE (tmp) == NOTE)
1071 tmp = next_nonnote_insn (tmp);
1072 if (force_fallthru || insn == tmp)
1073 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1077 free_eh_nesting_info (eh_nest_info);
1078 if (edge_cache)
1079 sbitmap_vector_free (edge_cache);
1082 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1083 about the edge that is accumulated between calls. */
1085 void
1086 make_edge (edge_cache, src, dst, flags)
1087 sbitmap *edge_cache;
1088 basic_block src, dst;
1089 int flags;
1091 int use_edge_cache;
1092 edge e;
1094 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1095 many edges to them, and we didn't allocate memory for it. */
1096 use_edge_cache = (edge_cache
1097 && src != ENTRY_BLOCK_PTR
1098 && dst != EXIT_BLOCK_PTR);
1100 /* Make sure we don't add duplicate edges. */
1101 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1102 for (e = src->succ; e ; e = e->succ_next)
1103 if (e->dest == dst)
1105 e->flags |= flags;
1106 return;
1109 e = (edge) xcalloc (1, sizeof (*e));
1110 n_edges++;
1112 e->succ_next = src->succ;
1113 e->pred_next = dst->pred;
1114 e->src = src;
1115 e->dest = dst;
1116 e->flags = flags;
1118 src->succ = e;
1119 dst->pred = e;
1121 if (use_edge_cache)
1122 SET_BIT (edge_cache[src->index], dst->index);
1125 /* Create an edge from a basic block to a label. */
1127 static void
1128 make_label_edge (edge_cache, src, label, flags)
1129 sbitmap *edge_cache;
1130 basic_block src;
1131 rtx label;
1132 int flags;
1134 if (GET_CODE (label) != CODE_LABEL)
1135 abort ();
1137 /* If the label was never emitted, this insn is junk, but avoid a
1138 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1139 as a result of a syntax error and a diagnostic has already been
1140 printed. */
1142 if (INSN_UID (label) == 0)
1143 return;
1145 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1148 /* Create the edges generated by INSN in REGION. */
1150 static void
1151 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1152 sbitmap *edge_cache;
1153 eh_nesting_info *eh_nest_info;
1154 basic_block src;
1155 rtx insn;
1156 int region;
1158 handler_info **handler_list;
1159 int num, is_call;
1161 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1162 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1163 while (--num >= 0)
1165 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1166 EDGE_ABNORMAL | EDGE_EH | is_call);
1170 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1171 dangerous if we intend to move basic blocks around. Move such notes
1172 into the following block. */
1174 static void
1175 move_stray_eh_region_notes ()
1177 int i;
1178 basic_block b1, b2;
1180 if (n_basic_blocks < 2)
1181 return;
1183 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1184 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1186 rtx insn, next, list = NULL_RTX;
1188 b1 = BASIC_BLOCK (i);
1189 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1191 next = NEXT_INSN (insn);
1192 if (GET_CODE (insn) == NOTE
1193 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1194 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1196 /* Unlink from the insn chain. */
1197 NEXT_INSN (PREV_INSN (insn)) = next;
1198 PREV_INSN (next) = PREV_INSN (insn);
1200 /* Queue it. */
1201 NEXT_INSN (insn) = list;
1202 list = insn;
1206 if (list == NULL_RTX)
1207 continue;
1209 /* Find where to insert these things. */
1210 insn = b2->head;
1211 if (GET_CODE (insn) == CODE_LABEL)
1212 insn = NEXT_INSN (insn);
1214 while (list)
1216 next = NEXT_INSN (list);
1217 add_insn_after (list, insn);
1218 list = next;
1223 /* Recompute eh_beg/eh_end for each basic block. */
1225 static void
1226 record_active_eh_regions (f)
1227 rtx f;
1229 rtx insn, eh_list = NULL_RTX;
1230 int i = 0;
1231 basic_block bb = BASIC_BLOCK (0);
1233 for (insn = f; insn ; insn = NEXT_INSN (insn))
1235 if (bb->head == insn)
1236 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1238 if (GET_CODE (insn) == NOTE)
1240 int kind = NOTE_LINE_NUMBER (insn);
1241 if (kind == NOTE_INSN_EH_REGION_BEG)
1242 eh_list = alloc_INSN_LIST (insn, eh_list);
1243 else if (kind == NOTE_INSN_EH_REGION_END)
1245 rtx t = XEXP (eh_list, 1);
1246 free_INSN_LIST_node (eh_list);
1247 eh_list = t;
1251 if (bb->end == insn)
1253 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1254 i += 1;
1255 if (i == n_basic_blocks)
1256 break;
1257 bb = BASIC_BLOCK (i);
1262 /* Identify critical edges and set the bits appropriately. */
1264 static void
1265 mark_critical_edges ()
1267 int i, n = n_basic_blocks;
1268 basic_block bb;
1270 /* We begin with the entry block. This is not terribly important now,
1271 but could be if a front end (Fortran) implemented alternate entry
1272 points. */
1273 bb = ENTRY_BLOCK_PTR;
1274 i = -1;
1276 while (1)
1278 edge e;
1280 /* (1) Critical edges must have a source with multiple successors. */
1281 if (bb->succ && bb->succ->succ_next)
1283 for (e = bb->succ; e ; e = e->succ_next)
1285 /* (2) Critical edges must have a destination with multiple
1286 predecessors. Note that we know there is at least one
1287 predecessor -- the edge we followed to get here. */
1288 if (e->dest->pred->pred_next)
1289 e->flags |= EDGE_CRITICAL;
1290 else
1291 e->flags &= ~EDGE_CRITICAL;
1294 else
1296 for (e = bb->succ; e ; e = e->succ_next)
1297 e->flags &= ~EDGE_CRITICAL;
1300 if (++i >= n)
1301 break;
1302 bb = BASIC_BLOCK (i);
1306 /* Split a (typically critical) edge. Return the new block.
1307 Abort on abnormal edges.
1309 ??? The code generally expects to be called on critical edges.
1310 The case of a block ending in an unconditional jump to a
1311 block with multiple predecessors is not handled optimally. */
1313 basic_block
1314 split_edge (edge_in)
1315 edge edge_in;
1317 basic_block old_pred, bb, old_succ;
1318 edge edge_out;
1319 rtx bb_note;
1320 int i, j;
1322 /* Abnormal edges cannot be split. */
1323 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1324 abort ();
1326 old_pred = edge_in->src;
1327 old_succ = edge_in->dest;
1329 /* Remove the existing edge from the destination's pred list. */
1331 edge *pp;
1332 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1333 continue;
1334 *pp = edge_in->pred_next;
1335 edge_in->pred_next = NULL;
1338 /* Create the new structures. */
1339 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1340 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1341 n_edges++;
1343 memset (bb, 0, sizeof (*bb));
1344 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1345 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1347 /* ??? This info is likely going to be out of date very soon. */
1348 if (old_succ->global_live_at_start)
1350 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1351 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1353 else
1355 CLEAR_REG_SET (bb->global_live_at_start);
1356 CLEAR_REG_SET (bb->global_live_at_end);
1359 /* Wire them up. */
1360 bb->pred = edge_in;
1361 bb->succ = edge_out;
1363 edge_in->dest = bb;
1364 edge_in->flags &= ~EDGE_CRITICAL;
1366 edge_out->pred_next = old_succ->pred;
1367 edge_out->succ_next = NULL;
1368 edge_out->src = bb;
1369 edge_out->dest = old_succ;
1370 edge_out->flags = EDGE_FALLTHRU;
1371 edge_out->probability = REG_BR_PROB_BASE;
1373 old_succ->pred = edge_out;
1375 /* Tricky case -- if there existed a fallthru into the successor
1376 (and we're not it) we must add a new unconditional jump around
1377 the new block we're actually interested in.
1379 Further, if that edge is critical, this means a second new basic
1380 block must be created to hold it. In order to simplify correct
1381 insn placement, do this before we touch the existing basic block
1382 ordering for the block we were really wanting. */
1383 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1385 edge e;
1386 for (e = edge_out->pred_next; e ; e = e->pred_next)
1387 if (e->flags & EDGE_FALLTHRU)
1388 break;
1390 if (e)
1392 basic_block jump_block;
1393 rtx pos;
1395 if ((e->flags & EDGE_CRITICAL) == 0
1396 && e->src != ENTRY_BLOCK_PTR)
1398 /* Non critical -- we can simply add a jump to the end
1399 of the existing predecessor. */
1400 jump_block = e->src;
1402 else
1404 /* We need a new block to hold the jump. The simplest
1405 way to do the bulk of the work here is to recursively
1406 call ourselves. */
1407 jump_block = split_edge (e);
1408 e = jump_block->succ;
1411 /* Now add the jump insn ... */
1412 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1413 jump_block->end);
1414 jump_block->end = pos;
1415 if (basic_block_for_insn)
1416 set_block_for_insn (pos, jump_block);
1417 emit_barrier_after (pos);
1419 /* ... let jump know that label is in use, ... */
1420 JUMP_LABEL (pos) = old_succ->head;
1421 ++LABEL_NUSES (old_succ->head);
1423 /* ... and clear fallthru on the outgoing edge. */
1424 e->flags &= ~EDGE_FALLTHRU;
1426 /* Continue splitting the interesting edge. */
1430 /* Place the new block just in front of the successor. */
1431 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1432 if (old_succ == EXIT_BLOCK_PTR)
1433 j = n_basic_blocks - 1;
1434 else
1435 j = old_succ->index;
1436 for (i = n_basic_blocks - 1; i > j; --i)
1438 basic_block tmp = BASIC_BLOCK (i - 1);
1439 BASIC_BLOCK (i) = tmp;
1440 tmp->index = i;
1442 BASIC_BLOCK (i) = bb;
1443 bb->index = i;
1445 /* Create the basic block note.
1447 Where we place the note can have a noticable impact on the generated
1448 code. Consider this cfg:
1455 +->1-->2--->E
1457 +--+
1459 If we need to insert an insn on the edge from block 0 to block 1,
1460 we want to ensure the instructions we insert are outside of any
1461 loop notes that physically sit between block 0 and block 1. Otherwise
1462 we confuse the loop optimizer into thinking the loop is a phony. */
1463 if (old_succ != EXIT_BLOCK_PTR
1464 && PREV_INSN (old_succ->head)
1465 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1466 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1467 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1468 PREV_INSN (old_succ->head));
1469 else if (old_succ != EXIT_BLOCK_PTR)
1470 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1471 else
1472 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1473 NOTE_BASIC_BLOCK (bb_note) = bb;
1474 bb->head = bb->end = bb_note;
1476 /* Not quite simple -- for non-fallthru edges, we must adjust the
1477 predecessor's jump instruction to target our new block. */
1478 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1480 rtx tmp, insn = old_pred->end;
1481 rtx old_label = old_succ->head;
1482 rtx new_label = gen_label_rtx ();
1484 if (GET_CODE (insn) != JUMP_INSN)
1485 abort ();
1487 /* ??? Recognize a tablejump and adjust all matching cases. */
1488 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1489 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1490 && GET_CODE (tmp) == JUMP_INSN
1491 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1492 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1494 rtvec vec;
1495 int j;
1497 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1498 vec = XVEC (PATTERN (tmp), 0);
1499 else
1500 vec = XVEC (PATTERN (tmp), 1);
1502 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1503 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1505 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1506 --LABEL_NUSES (old_label);
1507 ++LABEL_NUSES (new_label);
1510 /* Handle casesi dispatch insns */
1511 if ((tmp = single_set (insn)) != NULL
1512 && SET_DEST (tmp) == pc_rtx
1513 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1514 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1515 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1517 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1518 new_label);
1519 --LABEL_NUSES (old_label);
1520 ++LABEL_NUSES (new_label);
1523 else
1525 /* This would have indicated an abnormal edge. */
1526 if (computed_jump_p (insn))
1527 abort ();
1529 /* A return instruction can't be redirected. */
1530 if (returnjump_p (insn))
1531 abort ();
1533 /* If the insn doesn't go where we think, we're confused. */
1534 if (JUMP_LABEL (insn) != old_label)
1535 abort ();
1537 redirect_jump (insn, new_label);
1540 emit_label_before (new_label, bb_note);
1541 bb->head = new_label;
1544 return bb;
1547 /* Queue instructions for insertion on an edge between two basic blocks.
1548 The new instructions and basic blocks (if any) will not appear in the
1549 CFG until commit_edge_insertions is called. */
1551 void
1552 insert_insn_on_edge (pattern, e)
1553 rtx pattern;
1554 edge e;
1556 /* We cannot insert instructions on an abnormal critical edge.
1557 It will be easier to find the culprit if we die now. */
1558 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1559 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1560 abort ();
1562 if (e->insns == NULL_RTX)
1563 start_sequence ();
1564 else
1565 push_to_sequence (e->insns);
1567 emit_insn (pattern);
1569 e->insns = get_insns ();
1570 end_sequence();
1573 /* Update the CFG for the instructions queued on edge E. */
1575 static void
1576 commit_one_edge_insertion (e)
1577 edge e;
1579 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1580 basic_block bb;
1582 /* Pull the insns off the edge now since the edge might go away. */
1583 insns = e->insns;
1584 e->insns = NULL_RTX;
1586 /* Figure out where to put these things. If the destination has
1587 one predecessor, insert there. Except for the exit block. */
1588 if (e->dest->pred->pred_next == NULL
1589 && e->dest != EXIT_BLOCK_PTR)
1591 bb = e->dest;
1593 /* Get the location correct wrt a code label, and "nice" wrt
1594 a basic block note, and before everything else. */
1595 tmp = bb->head;
1596 if (GET_CODE (tmp) == CODE_LABEL)
1597 tmp = NEXT_INSN (tmp);
1598 if (GET_CODE (tmp) == NOTE
1599 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1600 tmp = NEXT_INSN (tmp);
1601 if (tmp == bb->head)
1602 before = tmp;
1603 else
1604 after = PREV_INSN (tmp);
1607 /* If the source has one successor and the edge is not abnormal,
1608 insert there. Except for the entry block. */
1609 else if ((e->flags & EDGE_ABNORMAL) == 0
1610 && e->src->succ->succ_next == NULL
1611 && e->src != ENTRY_BLOCK_PTR)
1613 bb = e->src;
1614 /* It is possible to have a non-simple jump here. Consider a target
1615 where some forms of unconditional jumps clobber a register. This
1616 happens on the fr30 for example.
1618 We know this block has a single successor, so we can just emit
1619 the queued insns before the jump. */
1620 if (GET_CODE (bb->end) == JUMP_INSN)
1622 before = bb->end;
1624 else
1626 /* We'd better be fallthru, or we've lost track of what's what. */
1627 if ((e->flags & EDGE_FALLTHRU) == 0)
1628 abort ();
1630 after = bb->end;
1634 /* Otherwise we must split the edge. */
1635 else
1637 bb = split_edge (e);
1638 after = bb->end;
1641 /* Now that we've found the spot, do the insertion. */
1643 /* Set the new block number for these insns, if structure is allocated. */
1644 if (basic_block_for_insn)
1646 rtx i;
1647 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1648 set_block_for_insn (i, bb);
1651 if (before)
1653 emit_insns_before (insns, before);
1654 if (before == bb->head)
1655 bb->head = insns;
1657 else
1659 rtx last = emit_insns_after (insns, after);
1660 if (after == bb->end)
1662 bb->end = last;
1664 if (GET_CODE (last) == JUMP_INSN)
1666 if (returnjump_p (last))
1668 /* ??? Remove all outgoing edges from BB and add one
1669 for EXIT. This is not currently a problem because
1670 this only happens for the (single) epilogue, which
1671 already has a fallthru edge to EXIT. */
1673 e = bb->succ;
1674 if (e->dest != EXIT_BLOCK_PTR
1675 || e->succ_next != NULL
1676 || (e->flags & EDGE_FALLTHRU) == 0)
1677 abort ();
1678 e->flags &= ~EDGE_FALLTHRU;
1680 emit_barrier_after (last);
1682 else
1683 abort ();
1689 /* Update the CFG for all queued instructions. */
1691 void
1692 commit_edge_insertions ()
1694 int i;
1695 basic_block bb;
1697 #ifdef ENABLE_CHECKING
1698 verify_flow_info ();
1699 #endif
1701 i = -1;
1702 bb = ENTRY_BLOCK_PTR;
1703 while (1)
1705 edge e, next;
1707 for (e = bb->succ; e ; e = next)
1709 next = e->succ_next;
1710 if (e->insns)
1711 commit_one_edge_insertion (e);
1714 if (++i >= n_basic_blocks)
1715 break;
1716 bb = BASIC_BLOCK (i);
1720 /* Delete all unreachable basic blocks. */
1722 static void
1723 delete_unreachable_blocks ()
1725 basic_block *worklist, *tos;
1726 int deleted_handler;
1727 edge e;
1728 int i, n;
1730 n = n_basic_blocks;
1731 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1733 /* Use basic_block->aux as a marker. Clear them all. */
1735 for (i = 0; i < n; ++i)
1736 BASIC_BLOCK (i)->aux = NULL;
1738 /* Add our starting points to the worklist. Almost always there will
1739 be only one. It isn't inconcievable that we might one day directly
1740 support Fortran alternate entry points. */
1742 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1744 *tos++ = e->dest;
1746 /* Mark the block with a handy non-null value. */
1747 e->dest->aux = e;
1750 /* Iterate: find everything reachable from what we've already seen. */
1752 while (tos != worklist)
1754 basic_block b = *--tos;
1756 for (e = b->succ; e ; e = e->succ_next)
1757 if (!e->dest->aux)
1759 *tos++ = e->dest;
1760 e->dest->aux = e;
1764 /* Delete all unreachable basic blocks. Count down so that we don't
1765 interfere with the block renumbering that happens in delete_block. */
1767 deleted_handler = 0;
1769 for (i = n - 1; i >= 0; --i)
1771 basic_block b = BASIC_BLOCK (i);
1773 if (b->aux != NULL)
1774 /* This block was found. Tidy up the mark. */
1775 b->aux = NULL;
1776 else
1777 deleted_handler |= delete_block (b);
1780 tidy_fallthru_edges ();
1782 /* If we deleted an exception handler, we may have EH region begin/end
1783 blocks to remove as well. */
1784 if (deleted_handler)
1785 delete_eh_regions ();
1787 free (worklist);
1790 /* Find EH regions for which there is no longer a handler, and delete them. */
1792 static void
1793 delete_eh_regions ()
1795 rtx insn;
1797 update_rethrow_references ();
1799 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1800 if (GET_CODE (insn) == NOTE)
1802 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1803 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1805 int num = NOTE_EH_HANDLER (insn);
1806 /* A NULL handler indicates a region is no longer needed,
1807 as long as its rethrow label isn't used. */
1808 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1810 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1811 NOTE_SOURCE_FILE (insn) = 0;
1817 /* Return true if NOTE is not one of the ones that must be kept paired,
1818 so that we may simply delete them. */
1820 static int
1821 can_delete_note_p (note)
1822 rtx note;
1824 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1825 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1828 /* Unlink a chain of insns between START and FINISH, leaving notes
1829 that must be paired. */
1831 void
1832 flow_delete_insn_chain (start, finish)
1833 rtx start, finish;
1835 /* Unchain the insns one by one. It would be quicker to delete all
1836 of these with a single unchaining, rather than one at a time, but
1837 we need to keep the NOTE's. */
1839 rtx next;
1841 while (1)
1843 next = NEXT_INSN (start);
1844 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1846 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1848 else
1849 next = flow_delete_insn (start);
1851 if (start == finish)
1852 break;
1853 start = next;
1857 /* Delete the insns in a (non-live) block. We physically delete every
1858 non-deleted-note insn, and update the flow graph appropriately.
1860 Return nonzero if we deleted an exception handler. */
1862 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1863 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1865 static int
1866 delete_block (b)
1867 basic_block b;
1869 int deleted_handler = 0;
1870 rtx insn, end, tmp;
1872 /* If the head of this block is a CODE_LABEL, then it might be the
1873 label for an exception handler which can't be reached.
1875 We need to remove the label from the exception_handler_label list
1876 and remove the associated NOTE_INSN_EH_REGION_BEG and
1877 NOTE_INSN_EH_REGION_END notes. */
1879 insn = b->head;
1881 never_reached_warning (insn);
1883 if (GET_CODE (insn) == CODE_LABEL)
1885 rtx x, *prev = &exception_handler_labels;
1887 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1889 if (XEXP (x, 0) == insn)
1891 /* Found a match, splice this label out of the EH label list. */
1892 *prev = XEXP (x, 1);
1893 XEXP (x, 1) = NULL_RTX;
1894 XEXP (x, 0) = NULL_RTX;
1896 /* Remove the handler from all regions */
1897 remove_handler (insn);
1898 deleted_handler = 1;
1899 break;
1901 prev = &XEXP (x, 1);
1904 /* This label may be referenced by code solely for its value, or
1905 referenced by static data, or something. We have determined
1906 that it is not reachable, but cannot delete the label itself.
1907 Save code space and continue to delete the balance of the block,
1908 along with properly updating the cfg. */
1909 if (!can_delete_label_p (insn))
1911 /* If we've only got one of these, skip the whole deleting
1912 insns thing. */
1913 if (insn == b->end)
1914 goto no_delete_insns;
1915 insn = NEXT_INSN (insn);
1919 /* Include any jump table following the basic block. */
1920 end = b->end;
1921 if (GET_CODE (end) == JUMP_INSN
1922 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1923 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1924 && GET_CODE (tmp) == JUMP_INSN
1925 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1926 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1927 end = tmp;
1929 /* Include any barrier that may follow the basic block. */
1930 tmp = next_nonnote_insn (end);
1931 if (tmp && GET_CODE (tmp) == BARRIER)
1932 end = tmp;
1934 /* Selectively delete the entire chain. */
1935 flow_delete_insn_chain (insn, end);
1937 no_delete_insns:
1939 /* Remove the edges into and out of this block. Note that there may
1940 indeed be edges in, if we are removing an unreachable loop. */
1942 edge e, next, *q;
1944 for (e = b->pred; e ; e = next)
1946 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1947 continue;
1948 *q = e->succ_next;
1949 next = e->pred_next;
1950 n_edges--;
1951 free (e);
1953 for (e = b->succ; e ; e = next)
1955 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1956 continue;
1957 *q = e->pred_next;
1958 next = e->succ_next;
1959 n_edges--;
1960 free (e);
1963 b->pred = NULL;
1964 b->succ = NULL;
1967 /* Remove the basic block from the array, and compact behind it. */
1968 expunge_block (b);
1970 return deleted_handler;
1973 /* Remove block B from the basic block array and compact behind it. */
1975 static void
1976 expunge_block (b)
1977 basic_block b;
1979 int i, n = n_basic_blocks;
1981 for (i = b->index; i + 1 < n; ++i)
1983 basic_block x = BASIC_BLOCK (i + 1);
1984 BASIC_BLOCK (i) = x;
1985 x->index = i;
1988 basic_block_info->num_elements--;
1989 n_basic_blocks--;
1992 /* Delete INSN by patching it out. Return the next insn. */
1995 flow_delete_insn (insn)
1996 rtx insn;
1998 rtx prev = PREV_INSN (insn);
1999 rtx next = NEXT_INSN (insn);
2000 rtx note;
2002 PREV_INSN (insn) = NULL_RTX;
2003 NEXT_INSN (insn) = NULL_RTX;
2005 if (prev)
2006 NEXT_INSN (prev) = next;
2007 if (next)
2008 PREV_INSN (next) = prev;
2009 else
2010 set_last_insn (prev);
2012 if (GET_CODE (insn) == CODE_LABEL)
2013 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2015 /* If deleting a jump, decrement the use count of the label. Deleting
2016 the label itself should happen in the normal course of block merging. */
2017 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2018 LABEL_NUSES (JUMP_LABEL (insn))--;
2020 /* Also if deleting an insn that references a label. */
2021 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2022 LABEL_NUSES (XEXP (note, 0))--;
2024 return next;
2027 /* True if a given label can be deleted. */
2029 static int
2030 can_delete_label_p (label)
2031 rtx label;
2033 rtx x;
2035 if (LABEL_PRESERVE_P (label))
2036 return 0;
2038 for (x = forced_labels; x ; x = XEXP (x, 1))
2039 if (label == XEXP (x, 0))
2040 return 0;
2041 for (x = label_value_list; x ; x = XEXP (x, 1))
2042 if (label == XEXP (x, 0))
2043 return 0;
2044 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2045 if (label == XEXP (x, 0))
2046 return 0;
2048 /* User declared labels must be preserved. */
2049 if (LABEL_NAME (label) != 0)
2050 return 0;
2052 return 1;
2055 /* Blocks A and B are to be merged into a single block A. The insns
2056 are already contiguous, hence `nomove'. */
2058 void
2059 merge_blocks_nomove (a, b)
2060 basic_block a, b;
2062 edge e;
2063 rtx b_head, b_end, a_end;
2064 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2065 int b_empty = 0;
2067 /* If there was a CODE_LABEL beginning B, delete it. */
2068 b_head = b->head;
2069 b_end = b->end;
2070 if (GET_CODE (b_head) == CODE_LABEL)
2072 /* Detect basic blocks with nothing but a label. This can happen
2073 in particular at the end of a function. */
2074 if (b_head == b_end)
2075 b_empty = 1;
2076 del_first = del_last = b_head;
2077 b_head = NEXT_INSN (b_head);
2080 /* Delete the basic block note. */
2081 if (GET_CODE (b_head) == NOTE
2082 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2084 if (b_head == b_end)
2085 b_empty = 1;
2086 if (! del_last)
2087 del_first = b_head;
2088 del_last = b_head;
2089 b_head = NEXT_INSN (b_head);
2092 /* If there was a jump out of A, delete it. */
2093 a_end = a->end;
2094 if (GET_CODE (a_end) == JUMP_INSN)
2096 rtx prev;
2098 prev = prev_nonnote_insn (a_end);
2099 if (!prev)
2100 prev = a->head;
2102 del_first = a_end;
2104 #ifdef HAVE_cc0
2105 /* If this was a conditional jump, we need to also delete
2106 the insn that set cc0. */
2107 if (prev && sets_cc0_p (prev))
2109 rtx tmp = prev;
2110 prev = prev_nonnote_insn (prev);
2111 if (!prev)
2112 prev = a->head;
2113 del_first = tmp;
2115 #endif
2117 a_end = prev;
2120 /* Delete everything marked above as well as crap that might be
2121 hanging out between the two blocks. */
2122 flow_delete_insn_chain (del_first, del_last);
2124 /* Normally there should only be one successor of A and that is B, but
2125 partway though the merge of blocks for conditional_execution we'll
2126 be merging a TEST block with THEN and ELSE successors. Free the
2127 whole lot of them and hope the caller knows what they're doing. */
2128 while (a->succ)
2129 remove_edge (a->succ);
2131 /* Adjust the edges out of B for the new owner. */
2132 for (e = b->succ; e ; e = e->succ_next)
2133 e->src = a;
2134 a->succ = b->succ;
2136 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2137 b->pred = b->succ = NULL;
2139 /* Reassociate the insns of B with A. */
2140 if (!b_empty)
2142 if (basic_block_for_insn)
2144 BLOCK_FOR_INSN (b_head) = a;
2145 while (b_head != b_end)
2147 b_head = NEXT_INSN (b_head);
2148 BLOCK_FOR_INSN (b_head) = a;
2151 a_end = b_end;
2153 a->end = a_end;
2155 expunge_block (b);
2158 /* Blocks A and B are to be merged into a single block. A has no incoming
2159 fallthru edge, so it can be moved before B without adding or modifying
2160 any jumps (aside from the jump from A to B). */
2162 static int
2163 merge_blocks_move_predecessor_nojumps (a, b)
2164 basic_block a, b;
2166 rtx start, end, barrier;
2167 int index;
2169 start = a->head;
2170 end = a->end;
2172 /* We want to delete the BARRIER after the end of the insns we are
2173 going to move. If we don't find a BARRIER, then do nothing. This
2174 can happen in some cases if we have labels we can not delete.
2176 Similarly, do nothing if we can not delete the label at the start
2177 of the target block. */
2178 barrier = next_nonnote_insn (end);
2179 if (GET_CODE (barrier) != BARRIER
2180 || (GET_CODE (b->head) == CODE_LABEL
2181 && ! can_delete_label_p (b->head)))
2182 return 0;
2183 else
2184 flow_delete_insn (barrier);
2186 /* Move block and loop notes out of the chain so that we do not
2187 disturb their order.
2189 ??? A better solution would be to squeeze out all the non-nested notes
2190 and adjust the block trees appropriately. Even better would be to have
2191 a tighter connection between block trees and rtl so that this is not
2192 necessary. */
2193 start = squeeze_notes (start, end);
2195 /* Scramble the insn chain. */
2196 if (end != PREV_INSN (b->head))
2197 reorder_insns (start, end, PREV_INSN (b->head));
2199 if (rtl_dump_file)
2201 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2202 a->index, b->index);
2205 /* Swap the records for the two blocks around. Although we are deleting B,
2206 A is now where B was and we want to compact the BB array from where
2207 A used to be. */
2208 BASIC_BLOCK(a->index) = b;
2209 BASIC_BLOCK(b->index) = a;
2210 index = a->index;
2211 a->index = b->index;
2212 b->index = index;
2214 /* Now blocks A and B are contiguous. Merge them. */
2215 merge_blocks_nomove (a, b);
2217 return 1;
2220 /* Blocks A and B are to be merged into a single block. B has no outgoing
2221 fallthru edge, so it can be moved after A without adding or modifying
2222 any jumps (aside from the jump from A to B). */
2224 static int
2225 merge_blocks_move_successor_nojumps (a, b)
2226 basic_block a, b;
2228 rtx start, end, barrier;
2230 start = b->head;
2231 end = b->end;
2233 /* We want to delete the BARRIER after the end of the insns we are
2234 going to move. If we don't find a BARRIER, then do nothing. This
2235 can happen in some cases if we have labels we can not delete.
2237 Similarly, do nothing if we can not delete the label at the start
2238 of the target block. */
2239 barrier = next_nonnote_insn (end);
2240 if (GET_CODE (barrier) != BARRIER
2241 || (GET_CODE (b->head) == CODE_LABEL
2242 && ! can_delete_label_p (b->head)))
2243 return 0;
2244 else
2245 flow_delete_insn (barrier);
2247 /* Move block and loop notes out of the chain so that we do not
2248 disturb their order.
2250 ??? A better solution would be to squeeze out all the non-nested notes
2251 and adjust the block trees appropriately. Even better would be to have
2252 a tighter connection between block trees and rtl so that this is not
2253 necessary. */
2254 start = squeeze_notes (start, end);
2256 /* Scramble the insn chain. */
2257 reorder_insns (start, end, a->end);
2259 /* Now blocks A and B are contiguous. Merge them. */
2260 merge_blocks_nomove (a, b);
2262 if (rtl_dump_file)
2264 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2265 b->index, a->index);
2268 return 1;
2271 /* Attempt to merge basic blocks that are potentially non-adjacent.
2272 Return true iff the attempt succeeded. */
2274 static int
2275 merge_blocks (e, b, c)
2276 edge e;
2277 basic_block b, c;
2279 /* If B has a fallthru edge to C, no need to move anything. */
2280 if (e->flags & EDGE_FALLTHRU)
2282 /* If a label still appears somewhere and we cannot delete the label,
2283 then we cannot merge the blocks. The edge was tidied already. */
2285 rtx insn, stop = NEXT_INSN (c->head);
2286 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2287 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2288 return 0;
2290 merge_blocks_nomove (b, c);
2292 if (rtl_dump_file)
2294 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2295 b->index, c->index);
2298 return 1;
2300 else
2302 edge tmp_edge;
2303 basic_block d;
2304 int c_has_outgoing_fallthru;
2305 int b_has_incoming_fallthru;
2307 /* We must make sure to not munge nesting of exception regions,
2308 lexical blocks, and loop notes.
2310 The first is taken care of by requiring that the active eh
2311 region at the end of one block always matches the active eh
2312 region at the beginning of the next block.
2314 The later two are taken care of by squeezing out all the notes. */
2316 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2317 executed and we may want to treat blocks which have two out
2318 edges, one normal, one abnormal as only having one edge for
2319 block merging purposes. */
2321 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2322 if (tmp_edge->flags & EDGE_FALLTHRU)
2323 break;
2324 c_has_outgoing_fallthru = (tmp_edge != NULL);
2326 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2327 if (tmp_edge->flags & EDGE_FALLTHRU)
2328 break;
2329 b_has_incoming_fallthru = (tmp_edge != NULL);
2331 /* If B does not have an incoming fallthru, and the exception regions
2332 match, then it can be moved immediately before C without introducing
2333 or modifying jumps.
2335 C can not be the first block, so we do not have to worry about
2336 accessing a non-existent block. */
2337 d = BASIC_BLOCK (c->index - 1);
2338 if (! b_has_incoming_fallthru
2339 && d->eh_end == b->eh_beg
2340 && b->eh_end == c->eh_beg)
2341 return merge_blocks_move_predecessor_nojumps (b, c);
2343 /* Otherwise, we're going to try to move C after B. Make sure the
2344 exception regions match.
2346 If B is the last basic block, then we must not try to access the
2347 block structure for block B + 1. Luckily in that case we do not
2348 need to worry about matching exception regions. */
2349 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2350 if (b->eh_end == c->eh_beg
2351 && (d == NULL || c->eh_end == d->eh_beg))
2353 /* If C does not have an outgoing fallthru, then it can be moved
2354 immediately after B without introducing or modifying jumps. */
2355 if (! c_has_outgoing_fallthru)
2356 return merge_blocks_move_successor_nojumps (b, c);
2358 /* Otherwise, we'll need to insert an extra jump, and possibly
2359 a new block to contain it. */
2360 /* ??? Not implemented yet. */
2363 return 0;
2367 /* Top level driver for merge_blocks. */
2369 static void
2370 try_merge_blocks ()
2372 int i;
2374 /* Attempt to merge blocks as made possible by edge removal. If a block
2375 has only one successor, and the successor has only one predecessor,
2376 they may be combined. */
2378 for (i = 0; i < n_basic_blocks; )
2380 basic_block c, b = BASIC_BLOCK (i);
2381 edge s;
2383 /* A loop because chains of blocks might be combineable. */
2384 while ((s = b->succ) != NULL
2385 && s->succ_next == NULL
2386 && (s->flags & EDGE_EH) == 0
2387 && (c = s->dest) != EXIT_BLOCK_PTR
2388 && c->pred->pred_next == NULL
2389 /* If the jump insn has side effects, we can't kill the edge. */
2390 && (GET_CODE (b->end) != JUMP_INSN
2391 || onlyjump_p (b->end))
2392 && merge_blocks (s, b, c))
2393 continue;
2395 /* Don't get confused by the index shift caused by deleting blocks. */
2396 i = b->index + 1;
2400 /* The given edge should potentially be a fallthru edge. If that is in
2401 fact true, delete the jump and barriers that are in the way. */
2403 void
2404 tidy_fallthru_edge (e, b, c)
2405 edge e;
2406 basic_block b, c;
2408 rtx q;
2410 /* ??? In a late-running flow pass, other folks may have deleted basic
2411 blocks by nopping out blocks, leaving multiple BARRIERs between here
2412 and the target label. They ought to be chastized and fixed.
2414 We can also wind up with a sequence of undeletable labels between
2415 one block and the next.
2417 So search through a sequence of barriers, labels, and notes for
2418 the head of block C and assert that we really do fall through. */
2420 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2421 return;
2423 /* Remove what will soon cease being the jump insn from the source block.
2424 If block B consisted only of this single jump, turn it into a deleted
2425 note. */
2426 q = b->end;
2427 if (GET_CODE (q) == JUMP_INSN)
2429 #ifdef HAVE_cc0
2430 /* If this was a conditional jump, we need to also delete
2431 the insn that set cc0. */
2432 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2433 q = PREV_INSN (q);
2434 #endif
2436 if (b->head == q)
2438 PUT_CODE (q, NOTE);
2439 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2440 NOTE_SOURCE_FILE (q) = 0;
2442 else
2443 b->end = q = PREV_INSN (q);
2446 /* Selectively unlink the sequence. */
2447 if (q != PREV_INSN (c->head))
2448 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2450 e->flags |= EDGE_FALLTHRU;
2453 /* Fix up edges that now fall through, or rather should now fall through
2454 but previously required a jump around now deleted blocks. Simplify
2455 the search by only examining blocks numerically adjacent, since this
2456 is how find_basic_blocks created them. */
2458 static void
2459 tidy_fallthru_edges ()
2461 int i;
2463 for (i = 1; i < n_basic_blocks; ++i)
2465 basic_block b = BASIC_BLOCK (i - 1);
2466 basic_block c = BASIC_BLOCK (i);
2467 edge s;
2469 /* We care about simple conditional or unconditional jumps with
2470 a single successor.
2472 If we had a conditional branch to the next instruction when
2473 find_basic_blocks was called, then there will only be one
2474 out edge for the block which ended with the conditional
2475 branch (since we do not create duplicate edges).
2477 Furthermore, the edge will be marked as a fallthru because we
2478 merge the flags for the duplicate edges. So we do not want to
2479 check that the edge is not a FALLTHRU edge. */
2480 if ((s = b->succ) != NULL
2481 && s->succ_next == NULL
2482 && s->dest == c
2483 /* If the jump insn has side effects, we can't tidy the edge. */
2484 && (GET_CODE (b->end) != JUMP_INSN
2485 || onlyjump_p (b->end)))
2486 tidy_fallthru_edge (s, b, c);
2490 /* Discover and record the loop depth at the head of each basic block. */
2492 void
2493 calculate_loop_depth (dump)
2494 FILE *dump;
2496 struct loops loops;
2498 /* The loop infrastructure does the real job for us. */
2499 flow_loops_find (&loops);
2501 if (dump)
2502 flow_loops_dump (&loops, dump, 0);
2504 flow_loops_free (&loops);
2507 /* Perform data flow analysis.
2508 F is the first insn of the function and NREGS the number of register numbers
2509 in use. */
2511 void
2512 life_analysis (f, nregs, file, remove_dead_code)
2513 rtx f;
2514 int nregs;
2515 FILE *file;
2516 int remove_dead_code;
2518 #ifdef ELIMINABLE_REGS
2519 register int i;
2520 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2521 #endif
2522 int flags;
2523 sbitmap all_blocks;
2525 /* Dead code elimination changes basic block structure and therefore
2526 breaks the SSA phi representation. Particularly, a phi node
2527 can have an alternative value for each incoming block, referenced
2528 by the block number. Removing dead code can bump entire blocks
2529 and therefore cause blocks to be renumbered, invalidating the
2530 numbering of phi alternatives. */
2531 if (remove_dead_code && in_ssa_form)
2532 abort ();
2534 /* Record which registers will be eliminated. We use this in
2535 mark_used_regs. */
2537 CLEAR_HARD_REG_SET (elim_reg_set);
2539 #ifdef ELIMINABLE_REGS
2540 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2541 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2542 #else
2543 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2544 #endif
2546 /* We want alias analysis information for local dead store elimination. */
2547 init_alias_analysis ();
2549 if (! optimize)
2550 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2551 else
2553 flags = PROP_FINAL;
2554 if (! remove_dead_code)
2555 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2558 /* The post-reload life analysis have (on a global basis) the same
2559 registers live as was computed by reload itself. elimination
2560 Otherwise offsets and such may be incorrect.
2562 Reload will make some registers as live even though they do not
2563 appear in the rtl. */
2564 if (reload_completed)
2565 flags &= ~PROP_REG_INFO;
2567 max_regno = nregs;
2569 /* Always remove no-op moves. Do this before other processing so
2570 that we don't have to keep re-scanning them. */
2571 delete_noop_moves (f);
2573 /* Some targets can emit simpler epilogues if they know that sp was
2574 not ever modified during the function. After reload, of course,
2575 we've already emitted the epilogue so there's no sense searching. */
2576 if (! reload_completed)
2577 notice_stack_pointer_modification (f);
2579 /* Allocate and zero out data structures that will record the
2580 data from lifetime analysis. */
2581 allocate_reg_life_data ();
2582 allocate_bb_life_data ();
2583 all_blocks = sbitmap_alloc (n_basic_blocks);
2584 sbitmap_ones (all_blocks);
2586 /* Find the set of registers live on function exit. */
2587 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2589 /* "Update" life info from zero. It'd be nice to begin the
2590 relaxation with just the exit and noreturn blocks, but that set
2591 is not immediately handy. */
2593 if (flags & PROP_REG_INFO)
2594 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2595 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2597 /* Clean up. */
2598 sbitmap_free (all_blocks);
2599 end_alias_analysis ();
2601 if (file)
2602 dump_flow_info (file);
2604 free_basic_block_vars (1);
2607 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2608 Search for REGNO. If found, abort if it is not wider than word_mode. */
2610 static int
2611 verify_wide_reg_1 (px, pregno)
2612 rtx *px;
2613 void *pregno;
2615 rtx x = *px;
2616 unsigned int regno = *(int *) pregno;
2618 if (GET_CODE (x) == REG && REGNO (x) == regno)
2620 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2621 abort ();
2622 return 1;
2624 return 0;
2627 /* A subroutine of verify_local_live_at_start. Search through insns
2628 between HEAD and END looking for register REGNO. */
2630 static void
2631 verify_wide_reg (regno, head, end)
2632 int regno;
2633 rtx head, end;
2635 while (1)
2637 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2638 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2639 return;
2640 if (head == end)
2641 break;
2642 head = NEXT_INSN (head);
2645 /* We didn't find the register at all. Something's way screwy. */
2646 abort ();
2649 /* A subroutine of update_life_info. Verify that there are no untoward
2650 changes in live_at_start during a local update. */
2652 static void
2653 verify_local_live_at_start (new_live_at_start, bb)
2654 regset new_live_at_start;
2655 basic_block bb;
2657 if (reload_completed)
2659 /* After reload, there are no pseudos, nor subregs of multi-word
2660 registers. The regsets should exactly match. */
2661 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2662 abort ();
2664 else
2666 int i;
2668 /* Find the set of changed registers. */
2669 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2671 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2673 /* No registers should die. */
2674 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2675 abort ();
2676 /* Verify that the now-live register is wider than word_mode. */
2677 verify_wide_reg (i, bb->head, bb->end);
2682 /* Updates life information starting with the basic blocks set in BLOCKS.
2684 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2685 expecting local modifications to basic blocks. If we find extra
2686 registers live at the beginning of a block, then we either killed
2687 useful data, or we have a broken split that wants data not provided.
2688 If we find registers removed from live_at_start, that means we have
2689 a broken peephole that is killing a register it shouldn't.
2691 ??? This is not true in one situation -- when a pre-reload splitter
2692 generates subregs of a multi-word pseudo, current life analysis will
2693 lose the kill. So we _can_ have a pseudo go live. How irritating.
2695 BLOCK_FOR_INSN is assumed to be correct.
2697 Including PROP_REG_INFO does not properly refresh regs_ever_live
2698 unless the caller resets it to zero. */
2700 void
2701 update_life_info (blocks, extent, prop_flags)
2702 sbitmap blocks;
2703 enum update_life_extent extent;
2704 int prop_flags;
2706 regset tmp;
2707 regset_head tmp_head;
2708 int i;
2710 tmp = INITIALIZE_REG_SET (tmp_head);
2712 /* For a global update, we go through the relaxation process again. */
2713 if (extent != UPDATE_LIFE_LOCAL)
2715 calculate_global_regs_live (blocks, blocks,
2716 prop_flags & PROP_SCAN_DEAD_CODE);
2718 /* If asked, remove notes from the blocks we'll update. */
2719 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2720 count_or_remove_death_notes (blocks, 1);
2723 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2725 basic_block bb = BASIC_BLOCK (i);
2727 COPY_REG_SET (tmp, bb->global_live_at_end);
2728 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2730 if (extent == UPDATE_LIFE_LOCAL)
2731 verify_local_live_at_start (tmp, bb);
2734 FREE_REG_SET (tmp);
2736 if (prop_flags & PROP_REG_INFO)
2738 /* The only pseudos that are live at the beginning of the function
2739 are those that were not set anywhere in the function. local-alloc
2740 doesn't know how to handle these correctly, so mark them as not
2741 local to any one basic block. */
2742 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2743 FIRST_PSEUDO_REGISTER, i,
2744 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2746 /* We have a problem with any pseudoreg that lives across the setjmp.
2747 ANSI says that if a user variable does not change in value between
2748 the setjmp and the longjmp, then the longjmp preserves it. This
2749 includes longjmp from a place where the pseudo appears dead.
2750 (In principle, the value still exists if it is in scope.)
2751 If the pseudo goes in a hard reg, some other value may occupy
2752 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2753 Conclusion: such a pseudo must not go in a hard reg. */
2754 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2755 FIRST_PSEUDO_REGISTER, i,
2757 if (regno_reg_rtx[i] != 0)
2759 REG_LIVE_LENGTH (i) = -1;
2760 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2766 /* Free the variables allocated by find_basic_blocks.
2768 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2770 void
2771 free_basic_block_vars (keep_head_end_p)
2772 int keep_head_end_p;
2774 if (basic_block_for_insn)
2776 VARRAY_FREE (basic_block_for_insn);
2777 basic_block_for_insn = NULL;
2780 if (! keep_head_end_p)
2782 clear_edges ();
2783 VARRAY_FREE (basic_block_info);
2784 n_basic_blocks = 0;
2786 ENTRY_BLOCK_PTR->aux = NULL;
2787 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2788 EXIT_BLOCK_PTR->aux = NULL;
2789 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2793 /* Return nonzero if the destination of SET equals the source. */
2794 static int
2795 set_noop_p (set)
2796 rtx set;
2798 rtx src = SET_SRC (set);
2799 rtx dst = SET_DEST (set);
2801 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2803 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2804 return 0;
2805 src = SUBREG_REG (src);
2806 dst = SUBREG_REG (dst);
2809 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2810 && REGNO (src) == REGNO (dst));
2813 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2814 value to itself. */
2815 static int
2816 noop_move_p (insn)
2817 rtx insn;
2819 rtx pat = PATTERN (insn);
2821 /* Insns carrying these notes are useful later on. */
2822 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2823 return 0;
2825 if (GET_CODE (pat) == SET && set_noop_p (pat))
2826 return 1;
2828 if (GET_CODE (pat) == PARALLEL)
2830 int i;
2831 /* If nothing but SETs of registers to themselves,
2832 this insn can also be deleted. */
2833 for (i = 0; i < XVECLEN (pat, 0); i++)
2835 rtx tem = XVECEXP (pat, 0, i);
2837 if (GET_CODE (tem) == USE
2838 || GET_CODE (tem) == CLOBBER)
2839 continue;
2841 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2842 return 0;
2845 return 1;
2847 return 0;
2850 /* Delete any insns that copy a register to itself. */
2852 static void
2853 delete_noop_moves (f)
2854 rtx f;
2856 rtx insn;
2857 for (insn = f; insn; insn = NEXT_INSN (insn))
2859 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2861 PUT_CODE (insn, NOTE);
2862 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2863 NOTE_SOURCE_FILE (insn) = 0;
2868 /* Determine if the stack pointer is constant over the life of the function.
2869 Only useful before prologues have been emitted. */
2871 static void
2872 notice_stack_pointer_modification_1 (x, pat, data)
2873 rtx x;
2874 rtx pat ATTRIBUTE_UNUSED;
2875 void *data ATTRIBUTE_UNUSED;
2877 if (x == stack_pointer_rtx
2878 /* The stack pointer is only modified indirectly as the result
2879 of a push until later in flow. See the comments in rtl.texi
2880 regarding Embedded Side-Effects on Addresses. */
2881 || (GET_CODE (x) == MEM
2882 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2883 || GET_CODE (XEXP (x, 0)) == PRE_INC
2884 || GET_CODE (XEXP (x, 0)) == POST_DEC
2885 || GET_CODE (XEXP (x, 0)) == POST_INC)
2886 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2887 current_function_sp_is_unchanging = 0;
2890 static void
2891 notice_stack_pointer_modification (f)
2892 rtx f;
2894 rtx insn;
2896 /* Assume that the stack pointer is unchanging if alloca hasn't
2897 been used. */
2898 current_function_sp_is_unchanging = !current_function_calls_alloca;
2899 if (! current_function_sp_is_unchanging)
2900 return;
2902 for (insn = f; insn; insn = NEXT_INSN (insn))
2904 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2906 /* Check if insn modifies the stack pointer. */
2907 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2908 NULL);
2909 if (! current_function_sp_is_unchanging)
2910 return;
2915 /* Mark a register in SET. Hard registers in large modes get all
2916 of their component registers set as well. */
2917 static void
2918 mark_reg (reg, xset)
2919 rtx reg;
2920 void *xset;
2922 regset set = (regset) xset;
2923 int regno = REGNO (reg);
2925 if (GET_MODE (reg) == BLKmode)
2926 abort ();
2928 SET_REGNO_REG_SET (set, regno);
2929 if (regno < FIRST_PSEUDO_REGISTER)
2931 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2932 while (--n > 0)
2933 SET_REGNO_REG_SET (set, regno + n);
2937 /* Mark those regs which are needed at the end of the function as live
2938 at the end of the last basic block. */
2939 static void
2940 mark_regs_live_at_end (set)
2941 regset set;
2943 int i;
2945 /* If exiting needs the right stack value, consider the stack pointer
2946 live at the end of the function. */
2947 if ((HAVE_epilogue && reload_completed)
2948 || ! EXIT_IGNORE_STACK
2949 || (! FRAME_POINTER_REQUIRED
2950 && ! current_function_calls_alloca
2951 && flag_omit_frame_pointer)
2952 || current_function_sp_is_unchanging)
2954 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2957 /* Mark the frame pointer if needed at the end of the function. If
2958 we end up eliminating it, it will be removed from the live list
2959 of each basic block by reload. */
2961 if (! reload_completed || frame_pointer_needed)
2963 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2964 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2965 /* If they are different, also mark the hard frame pointer as live */
2966 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2967 #endif
2970 #ifdef PIC_OFFSET_TABLE_REGNUM
2971 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2972 /* Many architectures have a GP register even without flag_pic.
2973 Assume the pic register is not in use, or will be handled by
2974 other means, if it is not fixed. */
2975 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2976 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2977 #endif
2978 #endif
2980 /* Mark all global registers, and all registers used by the epilogue
2981 as being live at the end of the function since they may be
2982 referenced by our caller. */
2983 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2984 if (global_regs[i]
2985 #ifdef EPILOGUE_USES
2986 || EPILOGUE_USES (i)
2987 #endif
2989 SET_REGNO_REG_SET (set, i);
2991 /* Mark all call-saved registers that we actaully used. */
2992 if (HAVE_epilogue && reload_completed)
2994 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2995 if (! call_used_regs[i] && regs_ever_live[i])
2996 SET_REGNO_REG_SET (set, i);
2999 /* Mark function return value. */
3000 diddle_return_value (mark_reg, set);
3003 /* Callback function for for_each_successor_phi. DATA is a regset.
3004 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3005 INSN, in the regset. */
3007 static int
3008 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3009 rtx insn ATTRIBUTE_UNUSED;
3010 int dest_regno ATTRIBUTE_UNUSED;
3011 int src_regno;
3012 void *data;
3014 regset live = (regset) data;
3015 SET_REGNO_REG_SET (live, src_regno);
3016 return 0;
3019 /* Propagate global life info around the graph of basic blocks. Begin
3020 considering blocks with their corresponding bit set in BLOCKS_IN.
3021 BLOCKS_OUT is set for every block that was changed. */
3023 static void
3024 calculate_global_regs_live (blocks_in, blocks_out, flags)
3025 sbitmap blocks_in, blocks_out;
3026 int flags;
3028 basic_block *queue, *qhead, *qtail, *qend;
3029 regset tmp, new_live_at_end;
3030 regset_head tmp_head;
3031 regset_head new_live_at_end_head;
3032 int i;
3034 tmp = INITIALIZE_REG_SET (tmp_head);
3035 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3037 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3038 because the `head == tail' style test for an empty queue doesn't
3039 work with a full queue. */
3040 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3041 qtail = queue;
3042 qhead = qend = queue + n_basic_blocks + 2;
3044 /* Clear out the garbage that might be hanging out in bb->aux. */
3045 for (i = n_basic_blocks - 1; i >= 0; --i)
3046 BASIC_BLOCK (i)->aux = NULL;
3048 /* Queue the blocks set in the initial mask. Do this in reverse block
3049 number order so that we are more likely for the first round to do
3050 useful work. We use AUX non-null to flag that the block is queued. */
3051 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3053 basic_block bb = BASIC_BLOCK (i);
3054 *--qhead = bb;
3055 bb->aux = bb;
3058 sbitmap_zero (blocks_out);
3060 while (qhead != qtail)
3062 int rescan, changed;
3063 basic_block bb;
3064 edge e;
3066 bb = *qhead++;
3067 if (qhead == qend)
3068 qhead = queue;
3069 bb->aux = NULL;
3071 /* Begin by propogating live_at_start from the successor blocks. */
3072 CLEAR_REG_SET (new_live_at_end);
3073 for (e = bb->succ; e ; e = e->succ_next)
3075 basic_block sb = e->dest;
3076 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3079 /* Regs used in phi nodes are not included in
3080 global_live_at_start, since they are live only along a
3081 particular edge. Set those regs that are live because of a
3082 phi node alternative corresponding to this particular block. */
3083 for_each_successor_phi (bb->index, &set_phi_alternative_reg,
3084 new_live_at_end);
3086 if (bb == ENTRY_BLOCK_PTR)
3088 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3089 continue;
3092 /* On our first pass through this block, we'll go ahead and continue.
3093 Recognize first pass by local_set NULL. On subsequent passes, we
3094 get to skip out early if live_at_end wouldn't have changed. */
3096 if (bb->local_set == NULL)
3098 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3099 rescan = 1;
3101 else
3103 /* If any bits were removed from live_at_end, we'll have to
3104 rescan the block. This wouldn't be necessary if we had
3105 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3106 local_live is really dependant on live_at_end. */
3107 CLEAR_REG_SET (tmp);
3108 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3109 new_live_at_end, BITMAP_AND_COMPL);
3111 if (! rescan)
3113 /* Find the set of changed bits. Take this opportunity
3114 to notice that this set is empty and early out. */
3115 CLEAR_REG_SET (tmp);
3116 changed = bitmap_operation (tmp, bb->global_live_at_end,
3117 new_live_at_end, BITMAP_XOR);
3118 if (! changed)
3119 continue;
3121 /* If any of the changed bits overlap with local_set,
3122 we'll have to rescan the block. Detect overlap by
3123 the AND with ~local_set turning off bits. */
3124 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3125 BITMAP_AND_COMPL);
3129 /* Let our caller know that BB changed enough to require its
3130 death notes updated. */
3131 SET_BIT (blocks_out, bb->index);
3133 if (! rescan)
3135 /* Add to live_at_start the set of all registers in
3136 new_live_at_end that aren't in the old live_at_end. */
3138 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3139 BITMAP_AND_COMPL);
3140 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3142 changed = bitmap_operation (bb->global_live_at_start,
3143 bb->global_live_at_start,
3144 tmp, BITMAP_IOR);
3145 if (! changed)
3146 continue;
3148 else
3150 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3152 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3153 into live_at_start. */
3154 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3156 /* If live_at start didn't change, no need to go farther. */
3157 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3158 continue;
3160 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3163 /* Queue all predecessors of BB so that we may re-examine
3164 their live_at_end. */
3165 for (e = bb->pred; e ; e = e->pred_next)
3167 basic_block pb = e->src;
3168 if (pb->aux == NULL)
3170 *qtail++ = pb;
3171 if (qtail == qend)
3172 qtail = queue;
3173 pb->aux = pb;
3178 FREE_REG_SET (tmp);
3179 FREE_REG_SET (new_live_at_end);
3181 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3183 basic_block bb = BASIC_BLOCK (i);
3184 FREE_REG_SET (bb->local_set);
3187 free (queue);
3190 /* Subroutines of life analysis. */
3192 /* Allocate the permanent data structures that represent the results
3193 of life analysis. Not static since used also for stupid life analysis. */
3195 void
3196 allocate_bb_life_data ()
3198 register int i;
3200 for (i = 0; i < n_basic_blocks; i++)
3202 basic_block bb = BASIC_BLOCK (i);
3204 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3205 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3208 ENTRY_BLOCK_PTR->global_live_at_end
3209 = OBSTACK_ALLOC_REG_SET (function_obstack);
3210 EXIT_BLOCK_PTR->global_live_at_start
3211 = OBSTACK_ALLOC_REG_SET (function_obstack);
3213 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3216 void
3217 allocate_reg_life_data ()
3219 int i;
3221 /* Recalculate the register space, in case it has grown. Old style
3222 vector oriented regsets would set regset_{size,bytes} here also. */
3223 allocate_reg_info (max_regno, FALSE, FALSE);
3225 /* Reset all the data we'll collect in propagate_block and its
3226 subroutines. */
3227 for (i = 0; i < max_regno; i++)
3229 REG_N_SETS (i) = 0;
3230 REG_N_REFS (i) = 0;
3231 REG_N_DEATHS (i) = 0;
3232 REG_N_CALLS_CROSSED (i) = 0;
3233 REG_LIVE_LENGTH (i) = 0;
3234 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3238 /* Delete dead instructions for propagate_block. */
3240 static void
3241 propagate_block_delete_insn (bb, insn)
3242 basic_block bb;
3243 rtx insn;
3245 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3247 /* If the insn referred to a label, and that label was attached to
3248 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3249 pretty much mandatory to delete it, because the ADDR_VEC may be
3250 referencing labels that no longer exist. */
3252 if (inote)
3254 rtx label = XEXP (inote, 0);
3255 rtx next;
3257 if (LABEL_NUSES (label) == 1
3258 && (next = next_nonnote_insn (label)) != NULL
3259 && GET_CODE (next) == JUMP_INSN
3260 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3261 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3263 rtx pat = PATTERN (next);
3264 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3265 int len = XVECLEN (pat, diff_vec_p);
3266 int i;
3268 for (i = 0; i < len; i++)
3269 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3271 flow_delete_insn (next);
3275 if (bb->end == insn)
3276 bb->end = PREV_INSN (insn);
3277 flow_delete_insn (insn);
3280 /* Delete dead libcalls for propagate_block. Return the insn
3281 before the libcall. */
3283 static rtx
3284 propagate_block_delete_libcall (bb, insn, note)
3285 basic_block bb;
3286 rtx insn, note;
3288 rtx first = XEXP (note, 0);
3289 rtx before = PREV_INSN (first);
3291 if (insn == bb->end)
3292 bb->end = before;
3294 flow_delete_insn_chain (first, insn);
3295 return before;
3298 /* Compute the registers live at the beginning of a basic block BB from
3299 those live at the end.
3301 When called, REG_LIVE contains those live at the end. On return, it
3302 contains those live at the beginning.
3304 LOCAL_SET, if non-null, will be set with all registers killed by
3305 this basic block. */
3307 static void
3308 propagate_block (bb, live, local_set, flags)
3309 basic_block bb;
3310 regset live;
3311 regset local_set;
3312 int flags;
3314 struct propagate_block_info pbi;
3315 rtx insn, prev;
3316 regset_head tmp_head;
3317 regset tmp;
3319 pbi.bb = bb;
3320 pbi.reg_live = live;
3321 pbi.mem_set_list = NULL_RTX;
3322 pbi.local_set = local_set;
3323 pbi.cc0_live = 0;
3324 pbi.flags = flags;
3326 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3327 pbi.reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3328 else
3329 pbi.reg_next_use = NULL;
3331 tmp = INITIALIZE_REG_SET (tmp_head);
3333 if (flags & PROP_REG_INFO)
3335 register int i;
3337 /* Process the regs live at the end of the block.
3338 Mark them as not local to any one basic block. */
3339 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3341 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3345 /* Scan the block an insn at a time from end to beginning. */
3347 for (insn = bb->end; ; insn = prev)
3349 prev = PREV_INSN (insn);
3351 if (GET_CODE (insn) == NOTE)
3353 /* If this is a call to `setjmp' et al,
3354 warn if any non-volatile datum is live. */
3356 if ((flags & PROP_REG_INFO)
3357 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3358 IOR_REG_SET (regs_live_at_setjmp, pbi.reg_live);
3361 /* Update the life-status of regs for this insn.
3362 First DEAD gets which regs are set in this insn
3363 then LIVE gets which regs are used in this insn.
3364 Then the regs live before the insn
3365 are those live after, with DEAD regs turned off,
3366 and then LIVE regs turned on. */
3368 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3370 register int i;
3371 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3372 int insn_is_dead = 0;
3373 int libcall_is_dead = 0;
3375 if (flags & PROP_SCAN_DEAD_CODE)
3377 insn_is_dead = insn_dead_p (&pbi, PATTERN (insn), 0,
3378 REG_NOTES (insn));
3379 libcall_is_dead = (insn_is_dead && note != 0
3380 && libcall_dead_p (&pbi, PATTERN (insn),
3381 note, insn));
3384 /* We almost certainly don't want to delete prologue or epilogue
3385 instructions. Warn about probable compiler losage. */
3386 if (insn_is_dead
3387 && reload_completed
3388 && (((HAVE_epilogue || HAVE_prologue)
3389 && prologue_epilogue_contains (insn))
3390 || (HAVE_sibcall_epilogue
3391 && sibcall_epilogue_contains (insn))))
3393 if (flags & PROP_KILL_DEAD_CODE)
3395 warning ("ICE: would have deleted prologue/epilogue insn");
3396 if (!inhibit_warnings)
3397 debug_rtx (insn);
3399 libcall_is_dead = insn_is_dead = 0;
3402 /* If an instruction consists of just dead store(s) on final pass,
3403 delete it. */
3404 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3406 if (libcall_is_dead)
3408 prev = propagate_block_delete_libcall (bb, insn, note);
3409 insn = NEXT_INSN (prev);
3411 else
3412 propagate_block_delete_insn (bb, insn);
3414 /* CC0 is now known to be dead. Either this insn used it,
3415 in which case it doesn't anymore, or clobbered it,
3416 so the next insn can't use it. */
3417 pbi.cc0_live = 0;
3419 goto flushed;
3422 /* See if this is an increment or decrement that can be
3423 merged into a following memory address. */
3424 #ifdef AUTO_INC_DEC
3426 register rtx x = single_set (insn);
3428 /* Does this instruction increment or decrement a register? */
3429 if (!reload_completed
3430 && (flags & PROP_AUTOINC)
3431 && x != 0
3432 && GET_CODE (SET_DEST (x)) == REG
3433 && (GET_CODE (SET_SRC (x)) == PLUS
3434 || GET_CODE (SET_SRC (x)) == MINUS)
3435 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3436 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3437 /* Ok, look for a following memory ref we can combine with.
3438 If one is found, change the memory ref to a PRE_INC
3439 or PRE_DEC, cancel this insn, and return 1.
3440 Return 0 if nothing has been done. */
3441 && try_pre_increment_1 (&pbi, insn))
3442 goto flushed;
3444 #endif /* AUTO_INC_DEC */
3446 CLEAR_REG_SET (tmp);
3448 /* If this is not the final pass, and this insn is copying the
3449 value of a library call and it's dead, don't scan the
3450 insns that perform the library call, so that the call's
3451 arguments are not marked live. */
3452 if (libcall_is_dead)
3454 /* Record the death of the dest reg. */
3455 mark_set_regs (&pbi, tmp, PATTERN (insn), insn);
3456 AND_COMPL_REG_SET (pbi.reg_live, tmp);
3458 insn = XEXP (note, 0);
3459 prev = PREV_INSN (insn);
3461 else if (GET_CODE (PATTERN (insn)) == SET
3462 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3463 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3464 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3465 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3466 /* We have an insn to pop a constant amount off the stack.
3467 (Such insns use PLUS regardless of the direction of the stack,
3468 and any insn to adjust the stack by a constant is always a pop.)
3469 These insns, if not dead stores, have no effect on life. */
3471 else
3473 /* Any regs live at the time of a call instruction
3474 must not go in a register clobbered by calls.
3475 Find all regs now live and record this for them. */
3477 if (GET_CODE (insn) == CALL_INSN
3478 && (flags & PROP_REG_INFO))
3479 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3480 { REG_N_CALLS_CROSSED (i)++; });
3482 /* Record sets. Do this even for dead instructions,
3483 since they would have killed the values if they hadn't
3484 been deleted. */
3485 mark_set_regs (&pbi, tmp, PATTERN (insn), insn);
3487 /* Each call clobbers all call-clobbered regs that are not
3488 global or fixed. Note that the function-value reg is a
3489 call-clobbered reg, and mark_set_regs has already had
3490 a chance to handle it. */
3491 if (GET_CODE (insn) == CALL_INSN)
3493 register int i;
3494 rtx cond;
3496 cond = NULL_RTX;
3497 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3498 cond = COND_EXEC_TEST (PATTERN (insn));
3500 /* Non-constant calls clobber memory. */
3501 if (! CONST_CALL_P (insn))
3502 free_EXPR_LIST_list (&pbi.mem_set_list);
3504 /* There may be extra registers to be clobbered. */
3505 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3506 note;
3507 note = XEXP (note, 1))
3508 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3509 mark_set_1 (&pbi, tmp, XEXP (XEXP (note, 0), 0),
3510 cond, insn);
3512 /* Calls change all call-used and global registers. */
3513 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3514 if (call_used_regs[i] && ! global_regs[i]
3515 && ! fixed_regs[i])
3517 int dummy;
3518 mark_set_reg (&pbi, tmp,
3519 gen_rtx_REG (reg_raw_mode[i], i),
3520 cond, &dummy, &dummy);
3524 /* Update live for the registers killed. */
3525 AND_COMPL_REG_SET (pbi.reg_live, tmp);
3526 CLEAR_REG_SET (tmp);
3528 /* If an insn doesn't use CC0, it becomes dead since we
3529 assume that every insn clobbers it. So show it dead here;
3530 mark_used_regs will set it live if it is referenced. */
3531 pbi.cc0_live = 0;
3533 /* Record uses. */
3534 if (! insn_is_dead)
3535 mark_used_regs (&pbi, tmp, PATTERN (insn), NULL_RTX, insn);
3537 /* Sometimes we may have inserted something before INSN
3538 (such as a move) when we make an auto-inc. So ensure
3539 we will scan those insns. */
3540 #ifdef AUTO_INC_DEC
3541 prev = PREV_INSN (insn);
3542 #endif
3544 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3546 register int i;
3547 rtx note, cond;
3549 cond = NULL_RTX;
3550 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3551 cond = COND_EXEC_TEST (PATTERN (insn));
3553 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3554 note;
3555 note = XEXP (note, 1))
3556 if (GET_CODE (XEXP (note, 0)) == USE)
3557 mark_used_regs (&pbi, tmp, XEXP (XEXP (note, 0), 0),
3558 cond, insn);
3560 /* The stack ptr is used (honorarily) by a CALL insn. */
3561 SET_REGNO_REG_SET (tmp, STACK_POINTER_REGNUM);
3563 /* Calls may also reference any of the global registers,
3564 so they are made live. */
3565 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3566 if (global_regs[i])
3567 mark_used_reg (&pbi, tmp,
3568 gen_rtx_REG (reg_raw_mode[i], i),
3569 cond, insn);
3572 /* Update live for the registers used. */
3573 IOR_REG_SET (pbi.reg_live, tmp);
3576 /* On final pass, update counts of how many insns in which
3577 each reg is live. */
3578 if (flags & PROP_REG_INFO)
3579 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3580 { REG_LIVE_LENGTH (i)++; });
3582 flushed:
3583 if (insn == bb->head)
3584 break;
3587 FREE_REG_SET (tmp);
3588 free_EXPR_LIST_list (&pbi.mem_set_list);
3590 if (pbi.reg_next_use)
3591 free (pbi.reg_next_use);
3595 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3596 (SET expressions whose destinations are registers dead after the insn).
3597 NEEDED is the regset that says which regs are alive after the insn.
3599 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3601 If X is the entire body of an insn, NOTES contains the reg notes
3602 pertaining to the insn. */
3604 static int
3605 insn_dead_p (pbi, x, call_ok, notes)
3606 struct propagate_block_info *pbi;
3607 rtx x;
3608 int call_ok;
3609 rtx notes ATTRIBUTE_UNUSED;
3611 enum rtx_code code = GET_CODE (x);
3613 #ifdef AUTO_INC_DEC
3614 /* If flow is invoked after reload, we must take existing AUTO_INC
3615 expresions into account. */
3616 if (reload_completed)
3618 for ( ; notes; notes = XEXP (notes, 1))
3620 if (REG_NOTE_KIND (notes) == REG_INC)
3622 int regno = REGNO (XEXP (notes, 0));
3624 /* Don't delete insns to set global regs. */
3625 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3626 || REGNO_REG_SET_P (pbi->reg_live, regno))
3627 return 0;
3631 #endif
3633 /* If setting something that's a reg or part of one,
3634 see if that register's altered value will be live. */
3636 if (code == SET)
3638 rtx r = SET_DEST (x);
3640 #ifdef HAVE_cc0
3641 if (GET_CODE (r) == CC0)
3642 return ! pbi->cc0_live;
3643 #endif
3645 /* A SET that is a subroutine call cannot be dead. */
3646 if (GET_CODE (SET_SRC (x)) == CALL)
3648 if (! call_ok)
3649 return 0;
3652 /* Don't eliminate loads from volatile memory or volatile asms. */
3653 else if (volatile_refs_p (SET_SRC (x)))
3654 return 0;
3656 if (GET_CODE (r) == MEM)
3658 rtx temp;
3660 if (MEM_VOLATILE_P (r))
3661 return 0;
3663 /* Walk the set of memory locations we are currently tracking
3664 and see if one is an identical match to this memory location.
3665 If so, this memory write is dead (remember, we're walking
3666 backwards from the end of the block to the start. */
3667 temp = pbi->mem_set_list;
3668 while (temp)
3670 if (rtx_equal_p (XEXP (temp, 0), r))
3671 return 1;
3672 temp = XEXP (temp, 1);
3675 else
3677 while (GET_CODE (r) == SUBREG
3678 || GET_CODE (r) == STRICT_LOW_PART
3679 || GET_CODE (r) == ZERO_EXTRACT)
3680 r = XEXP (r, 0);
3682 if (GET_CODE (r) == REG)
3684 int regno = REGNO (r);
3686 /* Obvious. */
3687 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3688 return 0;
3690 /* If this is a hard register, verify that subsequent
3691 words are not needed. */
3692 if (regno < FIRST_PSEUDO_REGISTER)
3694 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3696 while (--n > 0)
3697 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3698 return 0;
3701 /* Don't delete insns to set global regs. */
3702 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3703 return 0;
3705 /* Make sure insns to set the stack pointer aren't deleted. */
3706 if (regno == STACK_POINTER_REGNUM)
3707 return 0;
3709 /* Make sure insns to set the frame pointer aren't deleted. */
3710 if (regno == FRAME_POINTER_REGNUM
3711 && (! reload_completed || frame_pointer_needed))
3712 return 0;
3713 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3714 if (regno == HARD_FRAME_POINTER_REGNUM
3715 && (! reload_completed || frame_pointer_needed))
3716 return 0;
3717 #endif
3719 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3720 /* Make sure insns to set arg pointer are never deleted
3721 (if the arg pointer isn't fixed, there will be a USE
3722 for it, so we can treat it normally). */
3723 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3724 return 0;
3725 #endif
3727 /* Otherwise, the set is dead. */
3728 return 1;
3733 /* If performing several activities, insn is dead if each activity
3734 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3735 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3736 worth keeping. */
3737 else if (code == PARALLEL)
3739 int i = XVECLEN (x, 0);
3741 for (i--; i >= 0; i--)
3742 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3743 && GET_CODE (XVECEXP (x, 0, i)) != USE
3744 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3745 return 0;
3747 return 1;
3750 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3751 is not necessarily true for hard registers. */
3752 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3753 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3754 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3755 return 1;
3757 /* We do not check other CLOBBER or USE here. An insn consisting of just
3758 a CLOBBER or just a USE should not be deleted. */
3759 return 0;
3762 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3763 return 1 if the entire library call is dead.
3764 This is true if X copies a register (hard or pseudo)
3765 and if the hard return reg of the call insn is dead.
3766 (The caller should have tested the destination of X already for death.)
3768 If this insn doesn't just copy a register, then we don't
3769 have an ordinary libcall. In that case, cse could not have
3770 managed to substitute the source for the dest later on,
3771 so we can assume the libcall is dead.
3773 NEEDED is the bit vector of pseudoregs live before this insn.
3774 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3776 static int
3777 libcall_dead_p (pbi, x, note, insn)
3778 struct propagate_block_info *pbi;
3779 rtx x;
3780 rtx note;
3781 rtx insn;
3783 register RTX_CODE code = GET_CODE (x);
3785 if (code == SET)
3787 register rtx r = SET_SRC (x);
3788 if (GET_CODE (r) == REG)
3790 rtx call = XEXP (note, 0);
3791 rtx call_pat;
3792 register int i;
3794 /* Find the call insn. */
3795 while (call != insn && GET_CODE (call) != CALL_INSN)
3796 call = NEXT_INSN (call);
3798 /* If there is none, do nothing special,
3799 since ordinary death handling can understand these insns. */
3800 if (call == insn)
3801 return 0;
3803 /* See if the hard reg holding the value is dead.
3804 If this is a PARALLEL, find the call within it. */
3805 call_pat = PATTERN (call);
3806 if (GET_CODE (call_pat) == PARALLEL)
3808 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3809 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3810 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3811 break;
3813 /* This may be a library call that is returning a value
3814 via invisible pointer. Do nothing special, since
3815 ordinary death handling can understand these insns. */
3816 if (i < 0)
3817 return 0;
3819 call_pat = XVECEXP (call_pat, 0, i);
3822 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3825 return 1;
3828 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3829 live at function entry. Don't count global register variables, variables
3830 in registers that can be used for function arg passing, or variables in
3831 fixed hard registers. */
3834 regno_uninitialized (regno)
3835 int regno;
3837 if (n_basic_blocks == 0
3838 || (regno < FIRST_PSEUDO_REGISTER
3839 && (global_regs[regno]
3840 || fixed_regs[regno]
3841 || FUNCTION_ARG_REGNO_P (regno))))
3842 return 0;
3844 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3847 /* 1 if register REGNO was alive at a place where `setjmp' was called
3848 and was set more than once or is an argument.
3849 Such regs may be clobbered by `longjmp'. */
3852 regno_clobbered_at_setjmp (regno)
3853 int regno;
3855 if (n_basic_blocks == 0)
3856 return 0;
3858 return ((REG_N_SETS (regno) > 1
3859 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3860 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3863 /* INSN references memory, possibly using autoincrement addressing modes.
3864 Find any entries on the mem_set_list that need to be invalidated due
3865 to an address change. */
3866 static void
3867 invalidate_mems_from_autoinc (pbi, insn)
3868 struct propagate_block_info *pbi;
3869 rtx insn;
3871 rtx note = REG_NOTES (insn);
3872 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3874 if (REG_NOTE_KIND (note) == REG_INC)
3876 rtx temp = pbi->mem_set_list;
3877 rtx prev = NULL_RTX;
3878 rtx next;
3880 while (temp)
3882 next = XEXP (temp, 1);
3883 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3885 /* Splice temp out of list. */
3886 if (prev)
3887 XEXP (prev, 1) = next;
3888 else
3889 pbi->mem_set_list = next;
3890 free_EXPR_LIST_node (temp);
3892 else
3893 prev = temp;
3894 temp = next;
3900 /* Process the registers that are set within X. Their bits are set to
3901 1 in the regset DEAD, because they are dead prior to this insn.
3903 If INSN is nonzero, it is the insn being processed.
3905 FLAGS is the set of operations to perform. */
3907 static void
3908 mark_set_regs (pbi, new_dead, x, insn)
3909 struct propagate_block_info *pbi;
3910 regset new_dead;
3911 rtx x, insn;
3913 rtx cond = NULL_RTX;
3915 retry:
3916 switch (GET_CODE (x))
3918 case SET:
3919 case CLOBBER:
3920 mark_set_1 (pbi, new_dead, SET_DEST (x), cond, insn);
3921 return;
3923 case COND_EXEC:
3924 cond = COND_EXEC_TEST (x);
3925 x = COND_EXEC_CODE (x);
3926 goto retry;
3928 case PARALLEL:
3930 register int i;
3931 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3933 rtx sub = XVECEXP (x, 0, i);
3934 switch (GET_CODE (sub))
3936 case COND_EXEC:
3937 if (cond != NULL_RTX)
3938 abort ();
3940 cond = COND_EXEC_TEST (sub);
3941 sub = COND_EXEC_CODE (sub);
3942 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
3943 break;
3944 /* FALLTHRU */
3946 case SET:
3947 case CLOBBER:
3948 mark_set_1 (pbi, new_dead, SET_DEST (sub), cond, insn);
3949 break;
3951 default:
3952 break;
3955 break;
3958 default:
3959 break;
3963 /* Process a single SET rtx, X. */
3965 static void
3966 mark_set_1 (pbi, new_dead, reg, cond, insn)
3967 struct propagate_block_info *pbi;
3968 regset new_dead;
3969 rtx reg, cond, insn;
3971 register int regno = -1;
3972 int flags = pbi->flags;
3974 /* Some targets place small structures in registers for
3975 return values of functions. We have to detect this
3976 case specially here to get correct flow information. */
3977 if (GET_CODE (reg) == PARALLEL
3978 && GET_MODE (reg) == BLKmode)
3980 register int i;
3982 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3983 mark_set_1 (pbi, new_dead, XVECEXP (reg, 0, i), cond, insn);
3984 return;
3987 /* Modifying just one hardware register of a multi-reg value
3988 or just a byte field of a register
3989 does not mean the value from before this insn is now dead.
3990 But it does mean liveness of that register at the end of the block
3991 is significant.
3993 Within mark_set_1, however, we treat it as if the register is
3994 indeed modified. mark_used_regs will, however, also treat this
3995 register as being used. Thus, we treat these insns as setting a
3996 new value for the register as a function of its old value. This
3997 cases LOG_LINKS to be made appropriately and this will help combine. */
3999 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4000 || GET_CODE (reg) == SIGN_EXTRACT
4001 || GET_CODE (reg) == STRICT_LOW_PART)
4002 reg = XEXP (reg, 0);
4004 /* If this set is a MEM, then it kills any aliased writes.
4005 If this set is a REG, then it kills any MEMs which use the reg. */
4006 if (flags & PROP_SCAN_DEAD_CODE)
4008 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4010 rtx temp = pbi->mem_set_list;
4011 rtx prev = NULL_RTX;
4012 rtx next;
4014 while (temp)
4016 next = XEXP (temp, 1);
4017 if ((GET_CODE (reg) == MEM
4018 && output_dependence (XEXP (temp, 0), reg))
4019 || (GET_CODE (reg) == REG
4020 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4022 /* Splice this entry out of the list. */
4023 if (prev)
4024 XEXP (prev, 1) = next;
4025 else
4026 pbi->mem_set_list = next;
4027 free_EXPR_LIST_node (temp);
4029 else
4030 prev = temp;
4031 temp = next;
4035 /* If the memory reference had embedded side effects (autoincrement
4036 address modes. Then we may need to kill some entries on the
4037 memory set list. */
4038 if (insn && GET_CODE (reg) == MEM)
4039 invalidate_mems_from_autoinc (pbi, insn);
4041 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4042 /* We do not know the size of a BLKmode store, so we do not track
4043 them for redundant store elimination. */
4044 && GET_MODE (reg) != BLKmode
4045 /* There are no REG_INC notes for SP, so we can't assume we'll see
4046 everything that invalidates it. To be safe, don't eliminate any
4047 stores though SP; none of them should be redundant anyway. */
4048 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4049 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4052 if (GET_CODE (reg) == REG
4053 && (regno = REGNO (reg),
4054 ! (regno == FRAME_POINTER_REGNUM
4055 && (! reload_completed || frame_pointer_needed)))
4056 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4057 && ! (regno == HARD_FRAME_POINTER_REGNUM
4058 && (! reload_completed || frame_pointer_needed))
4059 #endif
4060 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4061 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4062 #endif
4065 int some_was_live, some_was_dead;
4067 /* Perform the pbi datastructure update. */
4068 if (! mark_set_reg (pbi, new_dead, reg, cond,
4069 &some_was_live, &some_was_dead))
4070 return;
4072 /* Additional data to record if this is the final pass. */
4073 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4074 | PROP_DEATH_NOTES | PROP_AUTOINC))
4076 register rtx y;
4077 register int blocknum = pbi->bb->index;
4079 y = NULL_RTX;
4080 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4081 y = pbi->reg_next_use[regno];
4083 /* If this is a hard reg, record this function uses the reg. */
4085 if (regno < FIRST_PSEUDO_REGISTER)
4087 register int i;
4088 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4090 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4091 for (i = regno; i < endregno; i++)
4093 /* The next use is no longer "next", since a store
4094 intervenes. */
4095 pbi->reg_next_use[i] = 0;
4098 if (flags & PROP_REG_INFO)
4099 for (i = regno; i < endregno; i++)
4101 regs_ever_live[i] = 1;
4102 REG_N_SETS (i)++;
4105 else
4107 /* The next use is no longer "next", since a store
4108 intervenes. */
4109 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4110 pbi->reg_next_use[regno] = 0;
4112 /* Keep track of which basic blocks each reg appears in. */
4114 if (flags & PROP_REG_INFO)
4116 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4117 REG_BASIC_BLOCK (regno) = blocknum;
4118 else if (REG_BASIC_BLOCK (regno) != blocknum)
4119 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4121 /* Count (weighted) references, stores, etc. This counts a
4122 register twice if it is modified, but that is correct. */
4123 REG_N_SETS (regno)++;
4124 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4126 /* The insns where a reg is live are normally counted
4127 elsewhere, but we want the count to include the insn
4128 where the reg is set, and the normal counting mechanism
4129 would not count it. */
4130 REG_LIVE_LENGTH (regno)++;
4134 if (! some_was_dead)
4136 if (flags & PROP_LOG_LINKS)
4138 /* Make a logical link from the next following insn
4139 that uses this register, back to this insn.
4140 The following insns have already been processed.
4142 We don't build a LOG_LINK for hard registers containing
4143 in ASM_OPERANDs. If these registers get replaced,
4144 we might wind up changing the semantics of the insn,
4145 even if reload can make what appear to be valid
4146 assignments later. */
4147 if (y && (BLOCK_NUM (y) == blocknum)
4148 && (regno >= FIRST_PSEUDO_REGISTER
4149 || asm_noperands (PATTERN (y)) < 0))
4150 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4153 else if (! some_was_live)
4155 if (flags & PROP_REG_INFO)
4156 REG_N_DEATHS (REGNO (reg))++;
4158 if (flags & PROP_DEATH_NOTES)
4160 /* Note that dead stores have already been deleted
4161 when possible. If we get here, we have found a
4162 dead store that cannot be eliminated (because the
4163 same insn does something useful). Indicate this
4164 by marking the reg being set as dying here. */
4165 REG_NOTES (insn)
4166 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4169 else
4171 if (flags & PROP_DEATH_NOTES)
4173 /* This is a case where we have a multi-word hard register
4174 and some, but not all, of the words of the register are
4175 needed in subsequent insns. Write REG_UNUSED notes
4176 for those parts that were not needed. This case should
4177 be rare. */
4179 int i;
4181 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4182 i >= 0; i--)
4183 if (! REGNO_REG_SET_P (pbi->reg_live, regno + i))
4184 REG_NOTES (insn)
4185 = (alloc_EXPR_LIST
4186 (REG_UNUSED,
4187 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4188 REG_NOTES (insn)));
4193 else if (GET_CODE (reg) == REG)
4195 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4196 pbi->reg_next_use[regno] = 0;
4199 /* If this is the last pass and this is a SCRATCH, show it will be dying
4200 here and count it. */
4201 else if (GET_CODE (reg) == SCRATCH)
4203 if (flags & PROP_DEATH_NOTES)
4204 REG_NOTES (insn)
4205 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4209 /* Update data structures for a (possibly conditional) store into REG.
4210 Return true if REG is now unconditionally dead. */
4212 static int
4213 mark_set_reg (pbi, new_dead, reg, cond, p_some_was_live, p_some_was_dead)
4214 struct propagate_block_info *pbi;
4215 regset new_dead;
4216 rtx reg;
4217 rtx cond ATTRIBUTE_UNUSED;
4218 int *p_some_was_live, *p_some_was_dead;
4220 int regno = REGNO (reg);
4221 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4222 int some_was_dead = ! some_was_live;
4224 /* Mark it as a significant register for this basic block. */
4225 if (pbi->local_set)
4226 SET_REGNO_REG_SET (pbi->local_set, regno);
4228 /* A hard reg in a wide mode may really be multiple registers.
4229 If so, mark all of them just like the first. */
4230 if (regno < FIRST_PSEUDO_REGISTER)
4232 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4233 while (--n > 0)
4235 int regno_n = regno + n;
4236 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4237 if (pbi->local_set)
4238 SET_REGNO_REG_SET (pbi->local_set, regno_n);
4240 some_was_live |= needed_regno;
4241 some_was_dead |= ! needed_regno;
4245 *p_some_was_live = some_was_live;
4246 *p_some_was_dead = some_was_dead;
4248 /* The stack pointer is never dead. Well, not strictly true, but it's
4249 very difficult to tell from here. Hopefully combine_stack_adjustments
4250 will fix up the most egregious errors. */
4251 if (regno == STACK_POINTER_REGNUM)
4252 return 0;
4254 /* Mark it as dead before this insn. */
4255 SET_REGNO_REG_SET (new_dead, regno);
4256 if (regno < FIRST_PSEUDO_REGISTER)
4258 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4259 while (--n > 0)
4260 SET_REGNO_REG_SET (new_dead, regno + n);
4263 /* Unconditionally dead. */
4264 return 1;
4267 #ifdef AUTO_INC_DEC
4269 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4270 reference. */
4272 static void
4273 find_auto_inc (pbi, x, insn)
4274 struct propagate_block_info *pbi;
4275 rtx x;
4276 rtx insn;
4278 rtx addr = XEXP (x, 0);
4279 HOST_WIDE_INT offset = 0;
4280 rtx set;
4282 /* Here we detect use of an index register which might be good for
4283 postincrement, postdecrement, preincrement, or predecrement. */
4285 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4286 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4288 if (GET_CODE (addr) == REG)
4290 register rtx y;
4291 register int size = GET_MODE_SIZE (GET_MODE (x));
4292 rtx use;
4293 rtx incr;
4294 int regno = REGNO (addr);
4296 /* Is the next use an increment that might make auto-increment? */
4297 if ((incr = pbi->reg_next_use[regno]) != 0
4298 && (set = single_set (incr)) != 0
4299 && GET_CODE (set) == SET
4300 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4301 /* Can't add side effects to jumps; if reg is spilled and
4302 reloaded, there's no way to store back the altered value. */
4303 && GET_CODE (insn) != JUMP_INSN
4304 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4305 && XEXP (y, 0) == addr
4306 && GET_CODE (XEXP (y, 1)) == CONST_INT
4307 && ((HAVE_POST_INCREMENT
4308 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4309 || (HAVE_POST_DECREMENT
4310 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4311 || (HAVE_PRE_INCREMENT
4312 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4313 || (HAVE_PRE_DECREMENT
4314 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4315 /* Make sure this reg appears only once in this insn. */
4316 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4317 use != 0 && use != (rtx) 1))
4319 rtx q = SET_DEST (set);
4320 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4321 ? (offset ? PRE_INC : POST_INC)
4322 : (offset ? PRE_DEC : POST_DEC));
4324 if (dead_or_set_p (incr, addr))
4326 /* This is the simple case. Try to make the auto-inc. If
4327 we can't, we are done. Otherwise, we will do any
4328 needed updates below. */
4329 if (! validate_change (insn, &XEXP (x, 0),
4330 gen_rtx_fmt_e (inc_code, Pmode, addr),
4332 return;
4334 else if (GET_CODE (q) == REG
4335 /* PREV_INSN used here to check the semi-open interval
4336 [insn,incr). */
4337 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4338 /* We must also check for sets of q as q may be
4339 a call clobbered hard register and there may
4340 be a call between PREV_INSN (insn) and incr. */
4341 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4343 /* We have *p followed sometime later by q = p+size.
4344 Both p and q must be live afterward,
4345 and q is not used between INSN and its assignment.
4346 Change it to q = p, ...*q..., q = q+size.
4347 Then fall into the usual case. */
4348 rtx insns, temp;
4349 basic_block bb;
4351 start_sequence ();
4352 emit_move_insn (q, addr);
4353 insns = get_insns ();
4354 end_sequence ();
4356 bb = BLOCK_FOR_INSN (insn);
4357 for (temp = insns; temp; temp = NEXT_INSN (temp))
4358 set_block_for_insn (temp, bb);
4360 /* If we can't make the auto-inc, or can't make the
4361 replacement into Y, exit. There's no point in making
4362 the change below if we can't do the auto-inc and doing
4363 so is not correct in the pre-inc case. */
4365 validate_change (insn, &XEXP (x, 0),
4366 gen_rtx_fmt_e (inc_code, Pmode, q),
4368 validate_change (incr, &XEXP (y, 0), q, 1);
4369 if (! apply_change_group ())
4370 return;
4372 /* We now know we'll be doing this change, so emit the
4373 new insn(s) and do the updates. */
4374 emit_insns_before (insns, insn);
4376 if (BLOCK_FOR_INSN (insn)->head == insn)
4377 BLOCK_FOR_INSN (insn)->head = insns;
4379 /* INCR will become a NOTE and INSN won't contain a
4380 use of ADDR. If a use of ADDR was just placed in
4381 the insn before INSN, make that the next use.
4382 Otherwise, invalidate it. */
4383 if (GET_CODE (PREV_INSN (insn)) == INSN
4384 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4385 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4386 pbi->reg_next_use[regno] = PREV_INSN (insn);
4387 else
4388 pbi->reg_next_use[regno] = 0;
4390 addr = q;
4391 regno = REGNO (q);
4393 /* REGNO is now used in INCR which is below INSN, but it
4394 previously wasn't live here. If we don't mark it as
4395 live, we'll put a REG_DEAD note for it on this insn,
4396 which is incorrect. */
4397 SET_REGNO_REG_SET (pbi->reg_live, regno);
4399 /* If there are any calls between INSN and INCR, show
4400 that REGNO now crosses them. */
4401 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4402 if (GET_CODE (temp) == CALL_INSN)
4403 REG_N_CALLS_CROSSED (regno)++;
4405 else
4406 return;
4408 /* If we haven't returned, it means we were able to make the
4409 auto-inc, so update the status. First, record that this insn
4410 has an implicit side effect. */
4412 REG_NOTES (insn)
4413 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4415 /* Modify the old increment-insn to simply copy
4416 the already-incremented value of our register. */
4417 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4418 abort ();
4420 /* If that makes it a no-op (copying the register into itself) delete
4421 it so it won't appear to be a "use" and a "set" of this
4422 register. */
4423 if (SET_DEST (set) == addr)
4425 PUT_CODE (incr, NOTE);
4426 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4427 NOTE_SOURCE_FILE (incr) = 0;
4430 if (regno >= FIRST_PSEUDO_REGISTER)
4432 /* Count an extra reference to the reg. When a reg is
4433 incremented, spilling it is worse, so we want to make
4434 that less likely. */
4435 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4437 /* Count the increment as a setting of the register,
4438 even though it isn't a SET in rtl. */
4439 REG_N_SETS (regno)++;
4444 #endif /* AUTO_INC_DEC */
4446 static void
4447 mark_used_reg (pbi, new_live, reg, cond, insn)
4448 struct propagate_block_info *pbi;
4449 regset new_live;
4450 rtx reg;
4451 rtx cond ATTRIBUTE_UNUSED;
4452 rtx insn;
4454 int regno = REGNO (reg);
4455 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4456 int some_was_dead = ! some_was_live;
4458 SET_REGNO_REG_SET (new_live, regno);
4460 /* A hard reg in a wide mode may really be multiple registers.
4461 If so, mark all of them just like the first. */
4462 if (regno < FIRST_PSEUDO_REGISTER)
4464 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4465 while (--n > 0)
4467 int regno_n = regno + n;
4468 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4470 SET_REGNO_REG_SET (new_live, regno_n);
4471 some_was_live |= needed_regno;
4472 some_was_dead |= ! needed_regno;
4476 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4478 /* Record where each reg is used, so when the reg is set we know
4479 the next insn that uses it. */
4480 pbi->reg_next_use[regno] = insn;
4483 if (pbi->flags & PROP_REG_INFO)
4485 if (regno < FIRST_PSEUDO_REGISTER)
4487 /* If this is a register we are going to try to eliminate,
4488 don't mark it live here. If we are successful in
4489 eliminating it, it need not be live unless it is used for
4490 pseudos, in which case it will have been set live when it
4491 was allocated to the pseudos. If the register will not
4492 be eliminated, reload will set it live at that point.
4494 Otherwise, record that this function uses this register. */
4496 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4498 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4499 if (n == 0)
4500 n = 1;
4502 regs_ever_live[regno + --n] = 1;
4503 while (n > 0);
4506 else
4508 /* Keep track of which basic block each reg appears in. */
4510 register int blocknum = pbi->bb->index;
4511 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4512 REG_BASIC_BLOCK (regno) = blocknum;
4513 else if (REG_BASIC_BLOCK (regno) != blocknum)
4514 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4516 /* Count (weighted) number of uses of each reg. */
4517 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4521 /* Record and count the insns in which a reg dies. If it is used in
4522 this insn and was dead below the insn then it dies in this insn.
4523 If it was set in this insn, we do not make a REG_DEAD note;
4524 likewise if we already made such a note. */
4526 if ((pbi->flags & PROP_DEATH_NOTES)
4527 && some_was_dead
4528 && ! dead_or_set_p (insn, reg))
4530 int n;
4532 /* Check for the case where the register dying partially
4533 overlaps the register set by this insn. */
4534 if (regno < FIRST_PSEUDO_REGISTER
4535 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4537 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4538 while (--n >= 0)
4539 some_was_live |= dead_or_set_regno_p (insn, regno + n);
4542 /* If none of the words in X is needed, make a REG_DEAD note.
4543 Otherwise, we must make partial REG_DEAD notes. */
4544 if (! some_was_live)
4546 REG_NOTES (insn)
4547 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4548 REG_N_DEATHS (regno)++;
4550 else
4552 /* Don't make a REG_DEAD note for a part of a register
4553 that is set in the insn. */
4555 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4556 for (; n >= regno; n--)
4557 if (!REGNO_REG_SET_P (pbi->reg_live, n)
4558 && ! dead_or_set_regno_p (insn, n))
4559 REG_NOTES (insn)
4560 = alloc_EXPR_LIST (REG_DEAD,
4561 gen_rtx_REG (reg_raw_mode[n], n),
4562 REG_NOTES (insn));
4567 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
4568 This is done assuming the registers needed from X are those that
4569 have 1-bits in PBI->REG_LIVE.
4571 INSN is the containing instruction. If INSN is dead, this function
4572 is not called. */
4574 static void
4575 mark_used_regs (pbi, new_live, x, cond, insn)
4576 struct propagate_block_info *pbi;
4577 regset new_live;
4578 rtx x, cond, insn;
4580 register RTX_CODE code;
4581 register int regno;
4582 int flags = pbi->flags;
4584 retry:
4585 code = GET_CODE (x);
4586 switch (code)
4588 case LABEL_REF:
4589 case SYMBOL_REF:
4590 case CONST_INT:
4591 case CONST:
4592 case CONST_DOUBLE:
4593 case PC:
4594 case ADDR_VEC:
4595 case ADDR_DIFF_VEC:
4596 return;
4598 #ifdef HAVE_cc0
4599 case CC0:
4600 pbi->cc0_live = 1;
4601 return;
4602 #endif
4604 case CLOBBER:
4605 /* If we are clobbering a MEM, mark any registers inside the address
4606 as being used. */
4607 if (GET_CODE (XEXP (x, 0)) == MEM)
4608 mark_used_regs (pbi, new_live, XEXP (XEXP (x, 0), 0), cond, insn);
4609 return;
4611 case MEM:
4612 /* Don't bother watching stores to mems if this is not the
4613 final pass. We'll not be deleting dead stores this round. */
4614 if (flags & PROP_SCAN_DEAD_CODE)
4616 /* Invalidate the data for the last MEM stored, but only if MEM is
4617 something that can be stored into. */
4618 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4619 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4620 ; /* needn't clear the memory set list */
4621 else
4623 rtx temp = pbi->mem_set_list;
4624 rtx prev = NULL_RTX;
4625 rtx next;
4627 while (temp)
4629 next = XEXP (temp, 1);
4630 if (anti_dependence (XEXP (temp, 0), x))
4632 /* Splice temp out of the list. */
4633 if (prev)
4634 XEXP (prev, 1) = next;
4635 else
4636 pbi->mem_set_list = next;
4637 free_EXPR_LIST_node (temp);
4639 else
4640 prev = temp;
4641 temp = next;
4645 /* If the memory reference had embedded side effects (autoincrement
4646 address modes. Then we may need to kill some entries on the
4647 memory set list. */
4648 if (insn)
4649 invalidate_mems_from_autoinc (pbi, insn);
4652 #ifdef AUTO_INC_DEC
4653 if (flags & PROP_AUTOINC)
4654 find_auto_inc (pbi, x, insn);
4655 #endif
4656 break;
4658 case SUBREG:
4659 if (GET_CODE (SUBREG_REG (x)) == REG
4660 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4661 && (GET_MODE_SIZE (GET_MODE (x))
4662 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4663 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4665 /* While we're here, optimize this case. */
4666 x = SUBREG_REG (x);
4667 if (GET_CODE (x) != REG)
4668 goto retry;
4669 /* FALLTHRU */
4671 case REG:
4672 /* See a register other than being set => mark it as needed. */
4673 mark_used_reg (pbi, new_live, x, cond, insn);
4674 return;
4676 case SET:
4678 register rtx testreg = SET_DEST (x);
4679 int mark_dest = 0;
4681 /* If storing into MEM, don't show it as being used. But do
4682 show the address as being used. */
4683 if (GET_CODE (testreg) == MEM)
4685 #ifdef AUTO_INC_DEC
4686 if (flags & PROP_AUTOINC)
4687 find_auto_inc (pbi, testreg, insn);
4688 #endif
4689 mark_used_regs (pbi, new_live, XEXP (testreg, 0), cond, insn);
4690 mark_used_regs (pbi, new_live, SET_SRC (x), cond, insn);
4691 return;
4694 /* Storing in STRICT_LOW_PART is like storing in a reg
4695 in that this SET might be dead, so ignore it in TESTREG.
4696 but in some other ways it is like using the reg.
4698 Storing in a SUBREG or a bit field is like storing the entire
4699 register in that if the register's value is not used
4700 then this SET is not needed. */
4701 while (GET_CODE (testreg) == STRICT_LOW_PART
4702 || GET_CODE (testreg) == ZERO_EXTRACT
4703 || GET_CODE (testreg) == SIGN_EXTRACT
4704 || GET_CODE (testreg) == SUBREG)
4706 if (GET_CODE (testreg) == SUBREG
4707 && GET_CODE (SUBREG_REG (testreg)) == REG
4708 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4709 && (GET_MODE_SIZE (GET_MODE (testreg))
4710 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4711 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4713 /* Modifying a single register in an alternate mode
4714 does not use any of the old value. But these other
4715 ways of storing in a register do use the old value. */
4716 if (GET_CODE (testreg) == SUBREG
4717 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4719 else
4720 mark_dest = 1;
4722 testreg = XEXP (testreg, 0);
4725 /* If this is a store into a register, recursively scan the
4726 value being stored. */
4728 if ((GET_CODE (testreg) == PARALLEL
4729 && GET_MODE (testreg) == BLKmode)
4730 || (GET_CODE (testreg) == REG
4731 && (regno = REGNO (testreg),
4732 ! (regno == FRAME_POINTER_REGNUM
4733 && (! reload_completed || frame_pointer_needed)))
4734 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4735 && ! (regno == HARD_FRAME_POINTER_REGNUM
4736 && (! reload_completed || frame_pointer_needed))
4737 #endif
4738 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4739 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4740 #endif
4742 /* We used to exclude global_regs here, but that seems wrong.
4743 Storing in them is like storing in mem. */
4745 mark_used_regs (pbi, new_live, SET_SRC (x), cond, insn);
4746 if (mark_dest)
4747 mark_used_regs (pbi, new_live, SET_DEST (x), cond, insn);
4748 return;
4751 break;
4753 case ASM_OPERANDS:
4754 case UNSPEC_VOLATILE:
4755 case TRAP_IF:
4756 case ASM_INPUT:
4758 /* Traditional and volatile asm instructions must be considered to use
4759 and clobber all hard registers, all pseudo-registers and all of
4760 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4762 Consider for instance a volatile asm that changes the fpu rounding
4763 mode. An insn should not be moved across this even if it only uses
4764 pseudo-regs because it might give an incorrectly rounded result.
4766 ?!? Unfortunately, marking all hard registers as live causes massive
4767 problems for the register allocator and marking all pseudos as live
4768 creates mountains of uninitialized variable warnings.
4770 So for now, just clear the memory set list and mark any regs
4771 we can find in ASM_OPERANDS as used. */
4772 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4773 free_EXPR_LIST_list (&pbi->mem_set_list);
4775 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4776 We can not just fall through here since then we would be confused
4777 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4778 traditional asms unlike their normal usage. */
4779 if (code == ASM_OPERANDS)
4781 int j;
4783 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4784 mark_used_regs (pbi, new_live, ASM_OPERANDS_INPUT (x, j),
4785 cond, insn);
4787 break;
4790 case COND_EXEC:
4791 if (cond != NULL_RTX)
4792 abort ();
4794 mark_used_regs (pbi, new_live, COND_EXEC_TEST (x), NULL_RTX, insn);
4796 cond = COND_EXEC_TEST (x);
4797 x = COND_EXEC_CODE (x);
4798 goto retry;
4800 case PHI:
4801 /* We _do_not_ want to scan operands of phi nodes. Operands of
4802 a phi function are evaluated only when control reaches this
4803 block along a particular edge. Therefore, regs that appear
4804 as arguments to phi should not be added to the global live at
4805 start. */
4806 return;
4808 default:
4809 break;
4812 /* Recursively scan the operands of this expression. */
4815 register const char *fmt = GET_RTX_FORMAT (code);
4816 register int i;
4818 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4820 if (fmt[i] == 'e')
4822 /* Tail recursive case: save a function call level. */
4823 if (i == 0)
4825 x = XEXP (x, 0);
4826 goto retry;
4828 mark_used_regs (pbi, new_live, XEXP (x, i), cond, insn);
4830 else if (fmt[i] == 'E')
4832 register int j;
4833 for (j = 0; j < XVECLEN (x, i); j++)
4834 mark_used_regs (pbi, new_live, XVECEXP (x, i, j), cond, insn);
4840 #ifdef AUTO_INC_DEC
4842 static int
4843 try_pre_increment_1 (pbi, insn)
4844 struct propagate_block_info *pbi;
4845 rtx insn;
4847 /* Find the next use of this reg. If in same basic block,
4848 make it do pre-increment or pre-decrement if appropriate. */
4849 rtx x = single_set (insn);
4850 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4851 * INTVAL (XEXP (SET_SRC (x), 1)));
4852 int regno = REGNO (SET_DEST (x));
4853 rtx y = pbi->reg_next_use[regno];
4854 if (y != 0
4855 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4856 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4857 mode would be better. */
4858 && ! dead_or_set_p (y, SET_DEST (x))
4859 && try_pre_increment (y, SET_DEST (x), amount))
4861 /* We have found a suitable auto-increment
4862 and already changed insn Y to do it.
4863 So flush this increment-instruction. */
4864 PUT_CODE (insn, NOTE);
4865 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4866 NOTE_SOURCE_FILE (insn) = 0;
4867 /* Count a reference to this reg for the increment
4868 insn we are deleting. When a reg is incremented.
4869 spilling it is worse, so we want to make that
4870 less likely. */
4871 if (regno >= FIRST_PSEUDO_REGISTER)
4873 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4874 REG_N_SETS (regno)++;
4876 return 1;
4878 return 0;
4881 /* Try to change INSN so that it does pre-increment or pre-decrement
4882 addressing on register REG in order to add AMOUNT to REG.
4883 AMOUNT is negative for pre-decrement.
4884 Returns 1 if the change could be made.
4885 This checks all about the validity of the result of modifying INSN. */
4887 static int
4888 try_pre_increment (insn, reg, amount)
4889 rtx insn, reg;
4890 HOST_WIDE_INT amount;
4892 register rtx use;
4894 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4895 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4896 int pre_ok = 0;
4897 /* Nonzero if we can try to make a post-increment or post-decrement.
4898 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4899 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4900 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4901 int post_ok = 0;
4903 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4904 int do_post = 0;
4906 /* From the sign of increment, see which possibilities are conceivable
4907 on this target machine. */
4908 if (HAVE_PRE_INCREMENT && amount > 0)
4909 pre_ok = 1;
4910 if (HAVE_POST_INCREMENT && amount > 0)
4911 post_ok = 1;
4913 if (HAVE_PRE_DECREMENT && amount < 0)
4914 pre_ok = 1;
4915 if (HAVE_POST_DECREMENT && amount < 0)
4916 post_ok = 1;
4918 if (! (pre_ok || post_ok))
4919 return 0;
4921 /* It is not safe to add a side effect to a jump insn
4922 because if the incremented register is spilled and must be reloaded
4923 there would be no way to store the incremented value back in memory. */
4925 if (GET_CODE (insn) == JUMP_INSN)
4926 return 0;
4928 use = 0;
4929 if (pre_ok)
4930 use = find_use_as_address (PATTERN (insn), reg, 0);
4931 if (post_ok && (use == 0 || use == (rtx) 1))
4933 use = find_use_as_address (PATTERN (insn), reg, -amount);
4934 do_post = 1;
4937 if (use == 0 || use == (rtx) 1)
4938 return 0;
4940 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4941 return 0;
4943 /* See if this combination of instruction and addressing mode exists. */
4944 if (! validate_change (insn, &XEXP (use, 0),
4945 gen_rtx_fmt_e (amount > 0
4946 ? (do_post ? POST_INC : PRE_INC)
4947 : (do_post ? POST_DEC : PRE_DEC),
4948 Pmode, reg), 0))
4949 return 0;
4951 /* Record that this insn now has an implicit side effect on X. */
4952 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4953 return 1;
4956 #endif /* AUTO_INC_DEC */
4958 /* Find the place in the rtx X where REG is used as a memory address.
4959 Return the MEM rtx that so uses it.
4960 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4961 (plus REG (const_int PLUSCONST)).
4963 If such an address does not appear, return 0.
4964 If REG appears more than once, or is used other than in such an address,
4965 return (rtx)1. */
4968 find_use_as_address (x, reg, plusconst)
4969 register rtx x;
4970 rtx reg;
4971 HOST_WIDE_INT plusconst;
4973 enum rtx_code code = GET_CODE (x);
4974 const char *fmt = GET_RTX_FORMAT (code);
4975 register int i;
4976 register rtx value = 0;
4977 register rtx tem;
4979 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4980 return x;
4982 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4983 && XEXP (XEXP (x, 0), 0) == reg
4984 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4985 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4986 return x;
4988 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4990 /* If REG occurs inside a MEM used in a bit-field reference,
4991 that is unacceptable. */
4992 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4993 return (rtx) (HOST_WIDE_INT) 1;
4996 if (x == reg)
4997 return (rtx) (HOST_WIDE_INT) 1;
4999 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5001 if (fmt[i] == 'e')
5003 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5004 if (value == 0)
5005 value = tem;
5006 else if (tem != 0)
5007 return (rtx) (HOST_WIDE_INT) 1;
5009 else if (fmt[i] == 'E')
5011 register int j;
5012 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5014 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5015 if (value == 0)
5016 value = tem;
5017 else if (tem != 0)
5018 return (rtx) (HOST_WIDE_INT) 1;
5023 return value;
5026 /* Write information about registers and basic blocks into FILE.
5027 This is part of making a debugging dump. */
5029 void
5030 dump_regset (r, outf)
5031 regset r;
5032 FILE *outf;
5034 int i;
5035 if (r == NULL)
5037 fputs (" (nil)", outf);
5038 return;
5041 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5043 fprintf (outf, " %d", i);
5044 if (i < FIRST_PSEUDO_REGISTER)
5045 fprintf (outf, " [%s]",
5046 reg_names[i]);
5050 void
5051 debug_regset (r)
5052 regset r;
5054 dump_regset (r, stderr);
5055 putc ('\n', stderr);
5058 void
5059 dump_flow_info (file)
5060 FILE *file;
5062 register int i;
5063 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5065 fprintf (file, "%d registers.\n", max_regno);
5066 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5067 if (REG_N_REFS (i))
5069 enum reg_class class, altclass;
5070 fprintf (file, "\nRegister %d used %d times across %d insns",
5071 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5072 if (REG_BASIC_BLOCK (i) >= 0)
5073 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5074 if (REG_N_SETS (i))
5075 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5076 (REG_N_SETS (i) == 1) ? "" : "s");
5077 if (REG_USERVAR_P (regno_reg_rtx[i]))
5078 fprintf (file, "; user var");
5079 if (REG_N_DEATHS (i) != 1)
5080 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5081 if (REG_N_CALLS_CROSSED (i) == 1)
5082 fprintf (file, "; crosses 1 call");
5083 else if (REG_N_CALLS_CROSSED (i))
5084 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5085 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5086 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5087 class = reg_preferred_class (i);
5088 altclass = reg_alternate_class (i);
5089 if (class != GENERAL_REGS || altclass != ALL_REGS)
5091 if (altclass == ALL_REGS || class == ALL_REGS)
5092 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5093 else if (altclass == NO_REGS)
5094 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5095 else
5096 fprintf (file, "; pref %s, else %s",
5097 reg_class_names[(int) class],
5098 reg_class_names[(int) altclass]);
5100 if (REGNO_POINTER_FLAG (i))
5101 fprintf (file, "; pointer");
5102 fprintf (file, ".\n");
5105 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5106 for (i = 0; i < n_basic_blocks; i++)
5108 register basic_block bb = BASIC_BLOCK (i);
5109 register edge e;
5111 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5112 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5114 fprintf (file, "Predecessors: ");
5115 for (e = bb->pred; e ; e = e->pred_next)
5116 dump_edge_info (file, e, 0);
5118 fprintf (file, "\nSuccessors: ");
5119 for (e = bb->succ; e ; e = e->succ_next)
5120 dump_edge_info (file, e, 1);
5122 fprintf (file, "\nRegisters live at start:");
5123 dump_regset (bb->global_live_at_start, file);
5125 fprintf (file, "\nRegisters live at end:");
5126 dump_regset (bb->global_live_at_end, file);
5128 putc('\n', file);
5131 putc('\n', file);
5134 void
5135 debug_flow_info ()
5137 dump_flow_info (stderr);
5140 static void
5141 dump_edge_info (file, e, do_succ)
5142 FILE *file;
5143 edge e;
5144 int do_succ;
5146 basic_block side = (do_succ ? e->dest : e->src);
5148 if (side == ENTRY_BLOCK_PTR)
5149 fputs (" ENTRY", file);
5150 else if (side == EXIT_BLOCK_PTR)
5151 fputs (" EXIT", file);
5152 else
5153 fprintf (file, " %d", side->index);
5155 if (e->flags)
5157 static const char * const bitnames[] = {
5158 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5160 int comma = 0;
5161 int i, flags = e->flags;
5163 fputc (' ', file);
5164 fputc ('(', file);
5165 for (i = 0; flags; i++)
5166 if (flags & (1 << i))
5168 flags &= ~(1 << i);
5170 if (comma)
5171 fputc (',', file);
5172 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5173 fputs (bitnames[i], file);
5174 else
5175 fprintf (file, "%d", i);
5176 comma = 1;
5178 fputc (')', file);
5183 /* Print out one basic block with live information at start and end. */
5184 void
5185 dump_bb (bb, outf)
5186 basic_block bb;
5187 FILE *outf;
5189 rtx insn;
5190 rtx last;
5191 edge e;
5193 fprintf (outf, ";; Basic block %d, loop depth %d",
5194 bb->index, bb->loop_depth);
5195 if (bb->eh_beg != -1 || bb->eh_end != -1)
5196 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5197 putc ('\n', outf);
5199 fputs (";; Predecessors: ", outf);
5200 for (e = bb->pred; e ; e = e->pred_next)
5201 dump_edge_info (outf, e, 0);
5202 putc ('\n', outf);
5204 fputs (";; Registers live at start:", outf);
5205 dump_regset (bb->global_live_at_start, outf);
5206 putc ('\n', outf);
5208 for (insn = bb->head, last = NEXT_INSN (bb->end);
5209 insn != last;
5210 insn = NEXT_INSN (insn))
5211 print_rtl_single (outf, insn);
5213 fputs (";; Registers live at end:", outf);
5214 dump_regset (bb->global_live_at_end, outf);
5215 putc ('\n', outf);
5217 fputs (";; Successors: ", outf);
5218 for (e = bb->succ; e; e = e->succ_next)
5219 dump_edge_info (outf, e, 1);
5220 putc ('\n', outf);
5223 void
5224 debug_bb (bb)
5225 basic_block bb;
5227 dump_bb (bb, stderr);
5230 void
5231 debug_bb_n (n)
5232 int n;
5234 dump_bb (BASIC_BLOCK(n), stderr);
5237 /* Like print_rtl, but also print out live information for the start of each
5238 basic block. */
5240 void
5241 print_rtl_with_bb (outf, rtx_first)
5242 FILE *outf;
5243 rtx rtx_first;
5245 register rtx tmp_rtx;
5247 if (rtx_first == 0)
5248 fprintf (outf, "(nil)\n");
5249 else
5251 int i;
5252 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5253 int max_uid = get_max_uid ();
5254 basic_block *start = (basic_block *)
5255 xcalloc (max_uid, sizeof (basic_block));
5256 basic_block *end = (basic_block *)
5257 xcalloc (max_uid, sizeof (basic_block));
5258 enum bb_state *in_bb_p = (enum bb_state *)
5259 xcalloc (max_uid, sizeof (enum bb_state));
5261 for (i = n_basic_blocks - 1; i >= 0; i--)
5263 basic_block bb = BASIC_BLOCK (i);
5264 rtx x;
5266 start[INSN_UID (bb->head)] = bb;
5267 end[INSN_UID (bb->end)] = bb;
5268 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5270 enum bb_state state = IN_MULTIPLE_BB;
5271 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5272 state = IN_ONE_BB;
5273 in_bb_p[INSN_UID(x)] = state;
5275 if (x == bb->end)
5276 break;
5280 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5282 int did_output;
5283 basic_block bb;
5285 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5287 fprintf (outf, ";; Start of basic block %d, registers live:",
5288 bb->index);
5289 dump_regset (bb->global_live_at_start, outf);
5290 putc ('\n', outf);
5293 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5294 && GET_CODE (tmp_rtx) != NOTE
5295 && GET_CODE (tmp_rtx) != BARRIER)
5296 fprintf (outf, ";; Insn is not within a basic block\n");
5297 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5298 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5300 did_output = print_rtl_single (outf, tmp_rtx);
5302 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5304 fprintf (outf, ";; End of basic block %d, registers live:\n",
5305 bb->index);
5306 dump_regset (bb->global_live_at_end, outf);
5307 putc ('\n', outf);
5310 if (did_output)
5311 putc ('\n', outf);
5314 free (start);
5315 free (end);
5316 free (in_bb_p);
5319 if (current_function_epilogue_delay_list != 0)
5321 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5322 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5323 tmp_rtx = XEXP (tmp_rtx, 1))
5324 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5328 /* Compute dominator relationships using new flow graph structures. */
5329 void
5330 compute_flow_dominators (dominators, post_dominators)
5331 sbitmap *dominators;
5332 sbitmap *post_dominators;
5334 int bb;
5335 sbitmap *temp_bitmap;
5336 edge e;
5337 basic_block *worklist, *workend, *qin, *qout;
5338 int qlen;
5340 /* Allocate a worklist array/queue. Entries are only added to the
5341 list if they were not already on the list. So the size is
5342 bounded by the number of basic blocks. */
5343 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5344 workend = &worklist[n_basic_blocks];
5346 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5347 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5349 if (dominators)
5351 /* The optimistic setting of dominators requires us to put every
5352 block on the work list initially. */
5353 qin = qout = worklist;
5354 for (bb = 0; bb < n_basic_blocks; bb++)
5356 *qin++ = BASIC_BLOCK (bb);
5357 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5359 qlen = n_basic_blocks;
5360 qin = worklist;
5362 /* We want a maximal solution, so initially assume everything dominates
5363 everything else. */
5364 sbitmap_vector_ones (dominators, n_basic_blocks);
5366 /* Mark successors of the entry block so we can identify them below. */
5367 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5368 e->dest->aux = ENTRY_BLOCK_PTR;
5370 /* Iterate until the worklist is empty. */
5371 while (qlen)
5373 /* Take the first entry off the worklist. */
5374 basic_block b = *qout++;
5375 if (qout >= workend)
5376 qout = worklist;
5377 qlen--;
5379 bb = b->index;
5381 /* Compute the intersection of the dominators of all the
5382 predecessor blocks.
5384 If one of the predecessor blocks is the ENTRY block, then the
5385 intersection of the dominators of the predecessor blocks is
5386 defined as the null set. We can identify such blocks by the
5387 special value in the AUX field in the block structure. */
5388 if (b->aux == ENTRY_BLOCK_PTR)
5390 /* Do not clear the aux field for blocks which are
5391 successors of the ENTRY block. That way we we never
5392 add them to the worklist again.
5394 The intersect of dominators of the preds of this block is
5395 defined as the null set. */
5396 sbitmap_zero (temp_bitmap[bb]);
5398 else
5400 /* Clear the aux field of this block so it can be added to
5401 the worklist again if necessary. */
5402 b->aux = NULL;
5403 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5406 /* Make sure each block always dominates itself. */
5407 SET_BIT (temp_bitmap[bb], bb);
5409 /* If the out state of this block changed, then we need to
5410 add the successors of this block to the worklist if they
5411 are not already on the worklist. */
5412 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5414 for (e = b->succ; e; e = e->succ_next)
5416 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5418 *qin++ = e->dest;
5419 if (qin >= workend)
5420 qin = worklist;
5421 qlen++;
5423 e->dest->aux = e;
5430 if (post_dominators)
5432 /* The optimistic setting of dominators requires us to put every
5433 block on the work list initially. */
5434 qin = qout = worklist;
5435 for (bb = 0; bb < n_basic_blocks; bb++)
5437 *qin++ = BASIC_BLOCK (bb);
5438 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5440 qlen = n_basic_blocks;
5441 qin = worklist;
5443 /* We want a maximal solution, so initially assume everything post
5444 dominates everything else. */
5445 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5447 /* Mark predecessors of the exit block so we can identify them below. */
5448 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5449 e->src->aux = EXIT_BLOCK_PTR;
5451 /* Iterate until the worklist is empty. */
5452 while (qlen)
5454 /* Take the first entry off the worklist. */
5455 basic_block b = *qout++;
5456 if (qout >= workend)
5457 qout = worklist;
5458 qlen--;
5460 bb = b->index;
5462 /* Compute the intersection of the post dominators of all the
5463 successor blocks.
5465 If one of the successor blocks is the EXIT block, then the
5466 intersection of the dominators of the successor blocks is
5467 defined as the null set. We can identify such blocks by the
5468 special value in the AUX field in the block structure. */
5469 if (b->aux == EXIT_BLOCK_PTR)
5471 /* Do not clear the aux field for blocks which are
5472 predecessors of the EXIT block. That way we we never
5473 add them to the worklist again.
5475 The intersect of dominators of the succs of this block is
5476 defined as the null set. */
5477 sbitmap_zero (temp_bitmap[bb]);
5479 else
5481 /* Clear the aux field of this block so it can be added to
5482 the worklist again if necessary. */
5483 b->aux = NULL;
5484 sbitmap_intersection_of_succs (temp_bitmap[bb],
5485 post_dominators, bb);
5488 /* Make sure each block always post dominates itself. */
5489 SET_BIT (temp_bitmap[bb], bb);
5491 /* If the out state of this block changed, then we need to
5492 add the successors of this block to the worklist if they
5493 are not already on the worklist. */
5494 if (sbitmap_a_and_b (post_dominators[bb],
5495 post_dominators[bb],
5496 temp_bitmap[bb]))
5498 for (e = b->pred; e; e = e->pred_next)
5500 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5502 *qin++ = e->src;
5503 if (qin >= workend)
5504 qin = worklist;
5505 qlen++;
5507 e->src->aux = e;
5514 free (worklist);
5515 free (temp_bitmap);
5518 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5520 void
5521 compute_immediate_dominators (idom, dominators)
5522 int *idom;
5523 sbitmap *dominators;
5525 sbitmap *tmp;
5526 int b;
5528 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5530 /* Begin with tmp(n) = dom(n) - { n }. */
5531 for (b = n_basic_blocks; --b >= 0; )
5533 sbitmap_copy (tmp[b], dominators[b]);
5534 RESET_BIT (tmp[b], b);
5537 /* Subtract out all of our dominator's dominators. */
5538 for (b = n_basic_blocks; --b >= 0; )
5540 sbitmap tmp_b = tmp[b];
5541 int s;
5543 for (s = n_basic_blocks; --s >= 0; )
5544 if (TEST_BIT (tmp_b, s))
5545 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5548 /* Find the one bit set in the bitmap and put it in the output array. */
5549 for (b = n_basic_blocks; --b >= 0; )
5551 int t;
5552 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5555 sbitmap_vector_free (tmp);
5558 /* Count for a single SET rtx, X. */
5560 static void
5561 count_reg_sets_1 (x, loop_depth)
5562 rtx x;
5563 int loop_depth;
5565 register int regno;
5566 register rtx reg = SET_DEST (x);
5568 /* Find the register that's set/clobbered. */
5569 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5570 || GET_CODE (reg) == SIGN_EXTRACT
5571 || GET_CODE (reg) == STRICT_LOW_PART)
5572 reg = XEXP (reg, 0);
5574 if (GET_CODE (reg) == PARALLEL
5575 && GET_MODE (reg) == BLKmode)
5577 register int i;
5578 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5579 count_reg_sets_1 (XVECEXP (reg, 0, i), loop_depth);
5580 return;
5583 if (GET_CODE (reg) == REG)
5585 regno = REGNO (reg);
5586 if (regno >= FIRST_PSEUDO_REGISTER)
5588 /* Count (weighted) references, stores, etc. This counts a
5589 register twice if it is modified, but that is correct. */
5590 REG_N_SETS (regno)++;
5591 REG_N_REFS (regno) += loop_depth + 1;
5596 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5597 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5599 static void
5600 count_reg_sets (x, loop_depth)
5601 rtx x;
5602 int loop_depth;
5604 register RTX_CODE code = GET_CODE (x);
5606 if (code == SET || code == CLOBBER)
5607 count_reg_sets_1 (x, loop_depth);
5608 else if (code == PARALLEL)
5610 register int i;
5611 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5613 code = GET_CODE (XVECEXP (x, 0, i));
5614 if (code == SET || code == CLOBBER)
5615 count_reg_sets_1 (XVECEXP (x, 0, i), loop_depth);
5620 /* Increment REG_N_REFS by the current loop depth each register reference
5621 found in X. */
5623 static void
5624 count_reg_references (x, loop_depth)
5625 rtx x;
5626 int loop_depth;
5628 register RTX_CODE code;
5630 retry:
5631 code = GET_CODE (x);
5632 switch (code)
5634 case LABEL_REF:
5635 case SYMBOL_REF:
5636 case CONST_INT:
5637 case CONST:
5638 case CONST_DOUBLE:
5639 case PC:
5640 case ADDR_VEC:
5641 case ADDR_DIFF_VEC:
5642 case ASM_INPUT:
5643 return;
5645 #ifdef HAVE_cc0
5646 case CC0:
5647 return;
5648 #endif
5650 case CLOBBER:
5651 /* If we are clobbering a MEM, mark any registers inside the address
5652 as being used. */
5653 if (GET_CODE (XEXP (x, 0)) == MEM)
5654 count_reg_references (XEXP (XEXP (x, 0), 0), loop_depth);
5655 return;
5657 case SUBREG:
5658 /* While we're here, optimize this case. */
5659 x = SUBREG_REG (x);
5661 /* In case the SUBREG is not of a register, don't optimize */
5662 if (GET_CODE (x) != REG)
5664 count_reg_references (x, loop_depth);
5665 return;
5668 /* ... fall through ... */
5670 case REG:
5671 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5672 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5673 return;
5675 case SET:
5677 register rtx testreg = SET_DEST (x);
5678 int mark_dest = 0;
5680 /* If storing into MEM, don't show it as being used. But do
5681 show the address as being used. */
5682 if (GET_CODE (testreg) == MEM)
5684 count_reg_references (XEXP (testreg, 0), loop_depth);
5685 count_reg_references (SET_SRC (x), loop_depth);
5686 return;
5689 /* Storing in STRICT_LOW_PART is like storing in a reg
5690 in that this SET might be dead, so ignore it in TESTREG.
5691 but in some other ways it is like using the reg.
5693 Storing in a SUBREG or a bit field is like storing the entire
5694 register in that if the register's value is not used
5695 then this SET is not needed. */
5696 while (GET_CODE (testreg) == STRICT_LOW_PART
5697 || GET_CODE (testreg) == ZERO_EXTRACT
5698 || GET_CODE (testreg) == SIGN_EXTRACT
5699 || GET_CODE (testreg) == SUBREG)
5701 /* Modifying a single register in an alternate mode
5702 does not use any of the old value. But these other
5703 ways of storing in a register do use the old value. */
5704 if (GET_CODE (testreg) == SUBREG
5705 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5707 else
5708 mark_dest = 1;
5710 testreg = XEXP (testreg, 0);
5713 /* If this is a store into a register,
5714 recursively scan the value being stored. */
5716 if ((GET_CODE (testreg) == PARALLEL
5717 && GET_MODE (testreg) == BLKmode)
5718 || GET_CODE (testreg) == REG)
5720 count_reg_references (SET_SRC (x), loop_depth);
5721 if (mark_dest)
5722 count_reg_references (SET_DEST (x), loop_depth);
5723 return;
5726 break;
5728 default:
5729 break;
5732 /* Recursively scan the operands of this expression. */
5735 register const char *fmt = GET_RTX_FORMAT (code);
5736 register int i;
5738 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5740 if (fmt[i] == 'e')
5742 /* Tail recursive case: save a function call level. */
5743 if (i == 0)
5745 x = XEXP (x, 0);
5746 goto retry;
5748 count_reg_references (XEXP (x, i), loop_depth);
5750 else if (fmt[i] == 'E')
5752 register int j;
5753 for (j = 0; j < XVECLEN (x, i); j++)
5754 count_reg_references (XVECEXP (x, i, j), loop_depth);
5760 /* Recompute register set/reference counts immediately prior to register
5761 allocation.
5763 This avoids problems with set/reference counts changing to/from values
5764 which have special meanings to the register allocators.
5766 Additionally, the reference counts are the primary component used by the
5767 register allocators to prioritize pseudos for allocation to hard regs.
5768 More accurate reference counts generally lead to better register allocation.
5770 F is the first insn to be scanned.
5772 LOOP_STEP denotes how much loop_depth should be incremented per
5773 loop nesting level in order to increase the ref count more for
5774 references in a loop.
5776 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5777 possibly other information which is used by the register allocators. */
5779 void
5780 recompute_reg_usage (f, loop_step)
5781 rtx f ATTRIBUTE_UNUSED;
5782 int loop_step ATTRIBUTE_UNUSED;
5784 rtx insn;
5785 int i, max_reg;
5786 int index;
5787 int loop_depth;
5789 /* Clear out the old data. */
5790 max_reg = max_reg_num ();
5791 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5793 REG_N_SETS (i) = 0;
5794 REG_N_REFS (i) = 0;
5797 /* Scan each insn in the chain and count how many times each register is
5798 set/used. */
5799 for (index = 0; index < n_basic_blocks; index++)
5801 basic_block bb = BASIC_BLOCK (index);
5802 loop_depth = bb->loop_depth;
5803 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5805 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5807 rtx links;
5809 /* This call will increment REG_N_SETS for each SET or CLOBBER
5810 of a register in INSN. It will also increment REG_N_REFS
5811 by the loop depth for each set of a register in INSN. */
5812 count_reg_sets (PATTERN (insn), loop_depth);
5814 /* count_reg_sets does not detect autoincrement address modes, so
5815 detect them here by looking at the notes attached to INSN. */
5816 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5818 if (REG_NOTE_KIND (links) == REG_INC)
5819 /* Count (weighted) references, stores, etc. This
5820 counts a register twice if it is modified, but
5821 that is correct. */
5822 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5825 /* This call will increment REG_N_REFS by the current loop depth
5826 for each reference to a register in INSN. */
5827 count_reg_references (PATTERN (insn), loop_depth);
5829 /* count_reg_references will not include counts for arguments to
5830 function calls, so detect them here by examining the
5831 CALL_INSN_FUNCTION_USAGE data. */
5832 if (GET_CODE (insn) == CALL_INSN)
5834 rtx note;
5836 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5837 note;
5838 note = XEXP (note, 1))
5839 if (GET_CODE (XEXP (note, 0)) == USE)
5840 count_reg_references (XEXP (XEXP (note, 0), 0),
5841 loop_depth);
5844 if (insn == bb->end)
5845 break;
5850 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5851 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5852 of the number of registers that died. */
5855 count_or_remove_death_notes (blocks, kill)
5856 sbitmap blocks;
5857 int kill;
5859 int i, count = 0;
5861 for (i = n_basic_blocks - 1; i >= 0; --i)
5863 basic_block bb;
5864 rtx insn;
5866 if (blocks && ! TEST_BIT (blocks, i))
5867 continue;
5869 bb = BASIC_BLOCK (i);
5871 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5873 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5875 rtx *pprev = &REG_NOTES (insn);
5876 rtx link = *pprev;
5878 while (link)
5880 switch (REG_NOTE_KIND (link))
5882 case REG_DEAD:
5883 if (GET_CODE (XEXP (link, 0)) == REG)
5885 rtx reg = XEXP (link, 0);
5886 int n;
5888 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5889 n = 1;
5890 else
5891 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5892 count += n;
5894 /* FALLTHRU */
5896 case REG_UNUSED:
5897 if (kill)
5899 rtx next = XEXP (link, 1);
5900 free_EXPR_LIST_node (link);
5901 *pprev = link = next;
5902 break;
5904 /* FALLTHRU */
5906 default:
5907 pprev = &XEXP (link, 1);
5908 link = *pprev;
5909 break;
5914 if (insn == bb->end)
5915 break;
5919 return count;
5922 /* Record INSN's block as BB. */
5924 void
5925 set_block_for_insn (insn, bb)
5926 rtx insn;
5927 basic_block bb;
5929 size_t uid = INSN_UID (insn);
5930 if (uid >= basic_block_for_insn->num_elements)
5932 int new_size;
5934 /* Add one-eighth the size so we don't keep calling xrealloc. */
5935 new_size = uid + (uid + 7) / 8;
5937 VARRAY_GROW (basic_block_for_insn, new_size);
5939 VARRAY_BB (basic_block_for_insn, uid) = bb;
5942 /* Record INSN's block number as BB. */
5943 /* ??? This has got to go. */
5945 void
5946 set_block_num (insn, bb)
5947 rtx insn;
5948 int bb;
5950 set_block_for_insn (insn, BASIC_BLOCK (bb));
5953 /* Verify the CFG consistency. This function check some CFG invariants and
5954 aborts when something is wrong. Hope that this function will help to
5955 convert many optimization passes to preserve CFG consistent.
5957 Currently it does following checks:
5959 - test head/end pointers
5960 - overlapping of basic blocks
5961 - edge list corectness
5962 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5963 - tails of basic blocks (ensure that boundary is necesary)
5964 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5965 and NOTE_INSN_BASIC_BLOCK
5966 - check that all insns are in the basic blocks
5967 (except the switch handling code, barriers and notes)
5968 - check that all returns are followed by barriers
5970 In future it can be extended check a lot of other stuff as well
5971 (reachability of basic blocks, life information, etc. etc.). */
5973 void
5974 verify_flow_info ()
5976 const int max_uid = get_max_uid ();
5977 const rtx rtx_first = get_insns ();
5978 basic_block *bb_info;
5979 rtx x;
5980 int i, err = 0;
5982 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5984 /* First pass check head/end pointers and set bb_info array used by
5985 later passes. */
5986 for (i = n_basic_blocks - 1; i >= 0; i--)
5988 basic_block bb = BASIC_BLOCK (i);
5990 /* Check the head pointer and make sure that it is pointing into
5991 insn list. */
5992 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5993 if (x == bb->head)
5994 break;
5995 if (!x)
5997 error ("Head insn %d for block %d not found in the insn stream.",
5998 INSN_UID (bb->head), bb->index);
5999 err = 1;
6002 /* Check the end pointer and make sure that it is pointing into
6003 insn list. */
6004 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6006 if (bb_info[INSN_UID (x)] != NULL)
6008 error ("Insn %d is in multiple basic blocks (%d and %d)",
6009 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6010 err = 1;
6012 bb_info[INSN_UID (x)] = bb;
6014 if (x == bb->end)
6015 break;
6017 if (!x)
6019 error ("End insn %d for block %d not found in the insn stream.",
6020 INSN_UID (bb->end), bb->index);
6021 err = 1;
6025 /* Now check the basic blocks (boundaries etc.) */
6026 for (i = n_basic_blocks - 1; i >= 0; i--)
6028 basic_block bb = BASIC_BLOCK (i);
6029 /* Check corectness of edge lists */
6030 edge e;
6032 e = bb->succ;
6033 while (e)
6035 if (e->src != bb)
6037 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6038 bb->index);
6039 fprintf (stderr, "Predecessor: ");
6040 dump_edge_info (stderr, e, 0);
6041 fprintf (stderr, "\nSuccessor: ");
6042 dump_edge_info (stderr, e, 1);
6043 fflush (stderr);
6044 err = 1;
6046 if (e->dest != EXIT_BLOCK_PTR)
6048 edge e2 = e->dest->pred;
6049 while (e2 && e2 != e)
6050 e2 = e2->pred_next;
6051 if (!e2)
6053 error ("Basic block %i edge lists are corrupted", bb->index);
6054 err = 1;
6057 e = e->succ_next;
6060 e = bb->pred;
6061 while (e)
6063 if (e->dest != bb)
6065 error ("Basic block %d pred edge is corrupted", bb->index);
6066 fputs ("Predecessor: ", stderr);
6067 dump_edge_info (stderr, e, 0);
6068 fputs ("\nSuccessor: ", stderr);
6069 dump_edge_info (stderr, e, 1);
6070 fputc ('\n', stderr);
6071 err = 1;
6073 if (e->src != ENTRY_BLOCK_PTR)
6075 edge e2 = e->src->succ;
6076 while (e2 && e2 != e)
6077 e2 = e2->succ_next;
6078 if (!e2)
6080 error ("Basic block %i edge lists are corrupted", bb->index);
6081 err = 1;
6084 e = e->pred_next;
6087 /* OK pointers are correct. Now check the header of basic
6088 block. It ought to contain optional CODE_LABEL followed
6089 by NOTE_BASIC_BLOCK. */
6090 x = bb->head;
6091 if (GET_CODE (x) == CODE_LABEL)
6093 if (bb->end == x)
6095 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6096 bb->index);
6097 err = 1;
6099 x = NEXT_INSN (x);
6101 if (GET_CODE (x) != NOTE
6102 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6103 || NOTE_BASIC_BLOCK (x) != bb)
6105 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6106 bb->index);
6107 err = 1;
6110 if (bb->end == x)
6112 /* Do checks for empty blocks here */
6114 else
6116 x = NEXT_INSN (x);
6117 while (x)
6119 if (GET_CODE (x) == NOTE
6120 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6122 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6123 INSN_UID (x), bb->index);
6124 err = 1;
6127 if (x == bb->end)
6128 break;
6130 if (GET_CODE (x) == JUMP_INSN
6131 || GET_CODE (x) == CODE_LABEL
6132 || GET_CODE (x) == BARRIER)
6134 error ("In basic block %d:", bb->index);
6135 fatal_insn ("Flow control insn inside a basic block", x);
6138 x = NEXT_INSN (x);
6143 x = rtx_first;
6144 while (x)
6146 if (!bb_info[INSN_UID (x)])
6148 switch (GET_CODE (x))
6150 case BARRIER:
6151 case NOTE:
6152 break;
6154 case CODE_LABEL:
6155 /* An addr_vec is placed outside any block block. */
6156 if (NEXT_INSN (x)
6157 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6158 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6159 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6161 x = NEXT_INSN (x);
6164 /* But in any case, non-deletable labels can appear anywhere. */
6165 break;
6167 default:
6168 fatal_insn ("Insn outside basic block", x);
6172 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6173 && GET_CODE (x) == JUMP_INSN
6174 && returnjump_p (x) && ! condjump_p (x)
6175 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6176 fatal_insn ("Return not followed by barrier", x);
6178 x = NEXT_INSN (x);
6181 if (err)
6182 abort ();
6184 /* Clean up. */
6185 free (bb_info);
6188 /* Functions to access an edge list with a vector representation.
6189 Enough data is kept such that given an index number, the
6190 pred and succ that edge reprsents can be determined, or
6191 given a pred and a succ, it's index number can be returned.
6192 This allows algorithms which comsume a lot of memory to
6193 represent the normally full matrix of edge (pred,succ) with a
6194 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6195 wasted space in the client code due to sparse flow graphs. */
6197 /* This functions initializes the edge list. Basically the entire
6198 flowgraph is processed, and all edges are assigned a number,
6199 and the data structure is filed in. */
6200 struct edge_list *
6201 create_edge_list ()
6203 struct edge_list *elist;
6204 edge e;
6205 int num_edges;
6206 int x;
6207 int block_count;
6209 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6211 num_edges = 0;
6213 /* Determine the number of edges in the flow graph by counting successor
6214 edges on each basic block. */
6215 for (x = 0; x < n_basic_blocks; x++)
6217 basic_block bb = BASIC_BLOCK (x);
6219 for (e = bb->succ; e; e = e->succ_next)
6220 num_edges++;
6222 /* Don't forget successors of the entry block. */
6223 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6224 num_edges++;
6226 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6227 elist->num_blocks = block_count;
6228 elist->num_edges = num_edges;
6229 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6231 num_edges = 0;
6233 /* Follow successors of the entry block, and register these edges. */
6234 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6236 elist->index_to_edge[num_edges] = e;
6237 num_edges++;
6240 for (x = 0; x < n_basic_blocks; x++)
6242 basic_block bb = BASIC_BLOCK (x);
6244 /* Follow all successors of blocks, and register these edges. */
6245 for (e = bb->succ; e; e = e->succ_next)
6247 elist->index_to_edge[num_edges] = e;
6248 num_edges++;
6251 return elist;
6254 /* This function free's memory associated with an edge list. */
6255 void
6256 free_edge_list (elist)
6257 struct edge_list *elist;
6259 if (elist)
6261 free (elist->index_to_edge);
6262 free (elist);
6266 /* This function provides debug output showing an edge list. */
6267 void
6268 print_edge_list (f, elist)
6269 FILE *f;
6270 struct edge_list *elist;
6272 int x;
6273 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6274 elist->num_blocks - 2, elist->num_edges);
6276 for (x = 0; x < elist->num_edges; x++)
6278 fprintf (f, " %-4d - edge(", x);
6279 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6280 fprintf (f,"entry,");
6281 else
6282 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6284 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6285 fprintf (f,"exit)\n");
6286 else
6287 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6291 /* This function provides an internal consistancy check of an edge list,
6292 verifying that all edges are present, and that there are no
6293 extra edges. */
6294 void
6295 verify_edge_list (f, elist)
6296 FILE *f;
6297 struct edge_list *elist;
6299 int x, pred, succ, index;
6300 edge e;
6302 for (x = 0; x < n_basic_blocks; x++)
6304 basic_block bb = BASIC_BLOCK (x);
6306 for (e = bb->succ; e; e = e->succ_next)
6308 pred = e->src->index;
6309 succ = e->dest->index;
6310 index = EDGE_INDEX (elist, e->src, e->dest);
6311 if (index == EDGE_INDEX_NO_EDGE)
6313 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6314 continue;
6316 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6317 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6318 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6319 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6320 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6321 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6324 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6326 pred = e->src->index;
6327 succ = e->dest->index;
6328 index = EDGE_INDEX (elist, e->src, e->dest);
6329 if (index == EDGE_INDEX_NO_EDGE)
6331 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6332 continue;
6334 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6335 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6336 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6337 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6338 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6339 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6341 /* We've verified that all the edges are in the list, no lets make sure
6342 there are no spurious edges in the list. */
6344 for (pred = 0 ; pred < n_basic_blocks; pred++)
6345 for (succ = 0 ; succ < n_basic_blocks; succ++)
6347 basic_block p = BASIC_BLOCK (pred);
6348 basic_block s = BASIC_BLOCK (succ);
6350 int found_edge = 0;
6352 for (e = p->succ; e; e = e->succ_next)
6353 if (e->dest == s)
6355 found_edge = 1;
6356 break;
6358 for (e = s->pred; e; e = e->pred_next)
6359 if (e->src == p)
6361 found_edge = 1;
6362 break;
6364 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6365 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6366 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6367 pred, succ);
6368 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6369 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6370 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6371 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6372 BASIC_BLOCK (succ)));
6374 for (succ = 0 ; succ < n_basic_blocks; succ++)
6376 basic_block p = ENTRY_BLOCK_PTR;
6377 basic_block s = BASIC_BLOCK (succ);
6379 int found_edge = 0;
6381 for (e = p->succ; e; e = e->succ_next)
6382 if (e->dest == s)
6384 found_edge = 1;
6385 break;
6387 for (e = s->pred; e; e = e->pred_next)
6388 if (e->src == p)
6390 found_edge = 1;
6391 break;
6393 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6394 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6395 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6396 succ);
6397 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6398 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6399 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6400 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6401 BASIC_BLOCK (succ)));
6403 for (pred = 0 ; pred < n_basic_blocks; pred++)
6405 basic_block p = BASIC_BLOCK (pred);
6406 basic_block s = EXIT_BLOCK_PTR;
6408 int found_edge = 0;
6410 for (e = p->succ; e; e = e->succ_next)
6411 if (e->dest == s)
6413 found_edge = 1;
6414 break;
6416 for (e = s->pred; e; e = e->pred_next)
6417 if (e->src == p)
6419 found_edge = 1;
6420 break;
6422 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6423 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6424 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6425 pred);
6426 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6427 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6428 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6429 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6430 EXIT_BLOCK_PTR));
6434 /* This routine will determine what, if any, edge there is between
6435 a specified predecessor and successor. */
6438 find_edge_index (edge_list, pred, succ)
6439 struct edge_list *edge_list;
6440 basic_block pred, succ;
6442 int x;
6443 for (x = 0; x < NUM_EDGES (edge_list); x++)
6445 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6446 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6447 return x;
6449 return (EDGE_INDEX_NO_EDGE);
6452 /* This function will remove an edge from the flow graph. */
6453 void
6454 remove_edge (e)
6455 edge e;
6457 edge last_pred = NULL;
6458 edge last_succ = NULL;
6459 edge tmp;
6460 basic_block src, dest;
6461 src = e->src;
6462 dest = e->dest;
6463 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6464 last_succ = tmp;
6466 if (!tmp)
6467 abort ();
6468 if (last_succ)
6469 last_succ->succ_next = e->succ_next;
6470 else
6471 src->succ = e->succ_next;
6473 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6474 last_pred = tmp;
6476 if (!tmp)
6477 abort ();
6478 if (last_pred)
6479 last_pred->pred_next = e->pred_next;
6480 else
6481 dest->pred = e->pred_next;
6483 n_edges--;
6484 free (e);
6487 /* This routine will remove any fake successor edges for a basic block.
6488 When the edge is removed, it is also removed from whatever predecessor
6489 list it is in. */
6490 static void
6491 remove_fake_successors (bb)
6492 basic_block bb;
6494 edge e;
6495 for (e = bb->succ; e ; )
6497 edge tmp = e;
6498 e = e->succ_next;
6499 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6500 remove_edge (tmp);
6504 /* This routine will remove all fake edges from the flow graph. If
6505 we remove all fake successors, it will automatically remove all
6506 fake predecessors. */
6507 void
6508 remove_fake_edges ()
6510 int x;
6512 for (x = 0; x < n_basic_blocks; x++)
6513 remove_fake_successors (BASIC_BLOCK (x));
6515 /* We've handled all successors except the entry block's. */
6516 remove_fake_successors (ENTRY_BLOCK_PTR);
6519 /* This functions will add a fake edge between any block which has no
6520 successors, and the exit block. Some data flow equations require these
6521 edges to exist. */
6522 void
6523 add_noreturn_fake_exit_edges ()
6525 int x;
6527 for (x = 0; x < n_basic_blocks; x++)
6528 if (BASIC_BLOCK (x)->succ == NULL)
6529 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6532 /* Dump the list of basic blocks in the bitmap NODES. */
6533 static void
6534 flow_nodes_print (str, nodes, file)
6535 const char *str;
6536 const sbitmap nodes;
6537 FILE *file;
6539 int node;
6541 fprintf (file, "%s { ", str);
6542 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6543 fputs ("}\n", file);
6547 /* Dump the list of exiting edges in the array EDGES. */
6548 static void
6549 flow_exits_print (str, edges, num_edges, file)
6550 const char *str;
6551 const edge *edges;
6552 int num_edges;
6553 FILE *file;
6555 int i;
6557 fprintf (file, "%s { ", str);
6558 for (i = 0; i < num_edges; i++)
6559 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6560 fputs ("}\n", file);
6564 /* Dump loop related CFG information. */
6565 static void
6566 flow_loops_cfg_dump (loops, file)
6567 const struct loops *loops;
6568 FILE *file;
6570 int i;
6572 if (! loops->num || ! file || ! loops->cfg.dom)
6573 return;
6575 for (i = 0; i < n_basic_blocks; i++)
6577 edge succ;
6579 fprintf (file, ";; %d succs { ", i);
6580 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6581 fprintf (file, "%d ", succ->dest->index);
6582 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6586 /* Dump the DFS node order. */
6587 if (loops->cfg.dfs_order)
6589 fputs (";; DFS order: ", file);
6590 for (i = 0; i < n_basic_blocks; i++)
6591 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6592 fputs ("\n", file);
6597 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6598 static int
6599 flow_loop_nested_p (outer, loop)
6600 struct loop *outer;
6601 struct loop *loop;
6603 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6607 /* Dump the loop information specified by LOOPS to the stream FILE. */
6608 void
6609 flow_loops_dump (loops, file, verbose)
6610 const struct loops *loops;
6611 FILE *file;
6612 int verbose;
6614 int i;
6615 int num_loops;
6617 num_loops = loops->num;
6618 if (! num_loops || ! file)
6619 return;
6621 fprintf (file, ";; %d loops found, %d levels\n",
6622 num_loops, loops->levels);
6624 for (i = 0; i < num_loops; i++)
6626 struct loop *loop = &loops->array[i];
6628 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6629 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6630 loop->header->index, loop->latch->index,
6631 loop->pre_header ? loop->pre_header->index : -1,
6632 loop->depth, loop->level,
6633 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6634 fprintf (file, ";; %d", loop->num_nodes);
6635 flow_nodes_print (" nodes", loop->nodes, file);
6636 fprintf (file, ";; %d", loop->num_exits);
6637 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6639 if (loop->shared)
6641 int j;
6643 for (j = 0; j < i; j++)
6645 struct loop *oloop = &loops->array[j];
6647 if (loop->header == oloop->header)
6649 int disjoint;
6650 int smaller;
6652 smaller = loop->num_nodes < oloop->num_nodes;
6654 /* If the union of LOOP and OLOOP is different than
6655 the larger of LOOP and OLOOP then LOOP and OLOOP
6656 must be disjoint. */
6657 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6658 smaller ? oloop : loop);
6659 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6660 loop->header->index, i, j,
6661 disjoint ? "disjoint" : "nested");
6666 if (verbose)
6668 /* Print diagnostics to compare our concept of a loop with
6669 what the loop notes say. */
6670 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6671 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6672 != NOTE_INSN_LOOP_BEG)
6673 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6674 INSN_UID (PREV_INSN (loop->first->head)));
6675 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6676 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6677 != NOTE_INSN_LOOP_END)
6678 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6679 INSN_UID (NEXT_INSN (loop->last->end)));
6683 if (verbose)
6684 flow_loops_cfg_dump (loops, file);
6688 /* Free all the memory allocated for LOOPS. */
6689 void
6690 flow_loops_free (loops)
6691 struct loops *loops;
6693 if (loops->array)
6695 int i;
6697 if (! loops->num)
6698 abort ();
6700 /* Free the loop descriptors. */
6701 for (i = 0; i < loops->num; i++)
6703 struct loop *loop = &loops->array[i];
6705 if (loop->nodes)
6706 sbitmap_free (loop->nodes);
6707 if (loop->exits)
6708 free (loop->exits);
6710 free (loops->array);
6711 loops->array = NULL;
6713 if (loops->cfg.dom)
6714 sbitmap_vector_free (loops->cfg.dom);
6715 if (loops->cfg.dfs_order)
6716 free (loops->cfg.dfs_order);
6718 sbitmap_free (loops->shared_headers);
6723 /* Find the exits from the loop using the bitmap of loop nodes NODES
6724 and store in EXITS array. Return the number of exits from the
6725 loop. */
6726 static int
6727 flow_loop_exits_find (nodes, exits)
6728 const sbitmap nodes;
6729 edge **exits;
6731 edge e;
6732 int node;
6733 int num_exits;
6735 *exits = NULL;
6737 /* Check all nodes within the loop to see if there are any
6738 successors not in the loop. Note that a node may have multiple
6739 exiting edges. */
6740 num_exits = 0;
6741 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6742 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6744 basic_block dest = e->dest;
6746 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6747 num_exits++;
6751 if (! num_exits)
6752 return 0;
6754 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6756 /* Store all exiting edges into an array. */
6757 num_exits = 0;
6758 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6759 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6761 basic_block dest = e->dest;
6763 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6764 (*exits)[num_exits++] = e;
6768 return num_exits;
6772 /* Find the nodes contained within the loop with header HEADER and
6773 latch LATCH and store in NODES. Return the number of nodes within
6774 the loop. */
6775 static int
6776 flow_loop_nodes_find (header, latch, nodes)
6777 basic_block header;
6778 basic_block latch;
6779 sbitmap nodes;
6781 basic_block *stack;
6782 int sp;
6783 int num_nodes = 0;
6785 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6786 sp = 0;
6788 /* Start with only the loop header in the set of loop nodes. */
6789 sbitmap_zero (nodes);
6790 SET_BIT (nodes, header->index);
6791 num_nodes++;
6792 header->loop_depth++;
6794 /* Push the loop latch on to the stack. */
6795 if (! TEST_BIT (nodes, latch->index))
6797 SET_BIT (nodes, latch->index);
6798 latch->loop_depth++;
6799 num_nodes++;
6800 stack[sp++] = latch;
6803 while (sp)
6805 basic_block node;
6806 edge e;
6808 node = stack[--sp];
6809 for (e = node->pred; e; e = e->pred_next)
6811 basic_block ancestor = e->src;
6813 /* If each ancestor not marked as part of loop, add to set of
6814 loop nodes and push on to stack. */
6815 if (ancestor != ENTRY_BLOCK_PTR
6816 && ! TEST_BIT (nodes, ancestor->index))
6818 SET_BIT (nodes, ancestor->index);
6819 ancestor->loop_depth++;
6820 num_nodes++;
6821 stack[sp++] = ancestor;
6825 free (stack);
6826 return num_nodes;
6830 /* Compute the depth first search order and store in the array
6831 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6832 number of nodes visited. */
6833 static int
6834 flow_depth_first_order_compute (dfs_order)
6835 int *dfs_order;
6837 edge e;
6838 edge *stack;
6839 int sp;
6840 int dfsnum = 0;
6841 sbitmap visited;
6843 /* Allocate stack for back-tracking up CFG. */
6844 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6845 sp = 0;
6847 /* Allocate bitmap to track nodes that have been visited. */
6848 visited = sbitmap_alloc (n_basic_blocks);
6850 /* None of the nodes in the CFG have been visited yet. */
6851 sbitmap_zero (visited);
6853 /* Start with the first successor edge from the entry block. */
6854 e = ENTRY_BLOCK_PTR->succ;
6855 while (e)
6857 basic_block src = e->src;
6858 basic_block dest = e->dest;
6860 /* Mark that we have visited this node. */
6861 if (src != ENTRY_BLOCK_PTR)
6862 SET_BIT (visited, src->index);
6864 /* If this node has not been visited before, push the current
6865 edge on to the stack and proceed with the first successor
6866 edge of this node. */
6867 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6868 && dest->succ)
6870 stack[sp++] = e;
6871 e = dest->succ;
6873 else
6875 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6876 && ! dest->succ)
6878 /* DEST has no successors (for example, a non-returning
6879 function is called) so do not push the current edge
6880 but carry on with its next successor. */
6881 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6882 SET_BIT (visited, dest->index);
6885 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6887 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6889 /* Pop edge off stack. */
6890 e = stack[--sp];
6891 src = e->src;
6893 e = e->succ_next;
6896 free (stack);
6897 sbitmap_free (visited);
6899 /* The number of nodes visited should not be greater than
6900 n_basic_blocks. */
6901 if (dfsnum > n_basic_blocks)
6902 abort ();
6904 /* There are some nodes left in the CFG that are unreachable. */
6905 if (dfsnum < n_basic_blocks)
6906 abort ();
6907 return dfsnum;
6911 /* Return the block for the pre-header of the loop with header
6912 HEADER where DOM specifies the dominator information. Return NULL if
6913 there is no pre-header. */
6914 static basic_block
6915 flow_loop_pre_header_find (header, dom)
6916 basic_block header;
6917 const sbitmap *dom;
6919 basic_block pre_header;
6920 edge e;
6922 /* If block p is a predecessor of the header and is the only block
6923 that the header does not dominate, then it is the pre-header. */
6924 pre_header = NULL;
6925 for (e = header->pred; e; e = e->pred_next)
6927 basic_block node = e->src;
6929 if (node != ENTRY_BLOCK_PTR
6930 && ! TEST_BIT (dom[node->index], header->index))
6932 if (pre_header == NULL)
6933 pre_header = node;
6934 else
6936 /* There are multiple edges into the header from outside
6937 the loop so there is no pre-header block. */
6938 pre_header = NULL;
6939 break;
6943 return pre_header;
6947 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6948 previously added. The insertion algorithm assumes that the loops
6949 are added in the order found by a depth first search of the CFG. */
6950 static void
6951 flow_loop_tree_node_add (prevloop, loop)
6952 struct loop *prevloop;
6953 struct loop *loop;
6956 if (flow_loop_nested_p (prevloop, loop))
6958 prevloop->inner = loop;
6959 loop->outer = prevloop;
6960 return;
6963 while (prevloop->outer)
6965 if (flow_loop_nested_p (prevloop->outer, loop))
6967 prevloop->next = loop;
6968 loop->outer = prevloop->outer;
6969 return;
6971 prevloop = prevloop->outer;
6974 prevloop->next = loop;
6975 loop->outer = NULL;
6979 /* Build the loop hierarchy tree for LOOPS. */
6980 static void
6981 flow_loops_tree_build (loops)
6982 struct loops *loops;
6984 int i;
6985 int num_loops;
6987 num_loops = loops->num;
6988 if (! num_loops)
6989 return;
6991 /* Root the loop hierarchy tree with the first loop found.
6992 Since we used a depth first search this should be the
6993 outermost loop. */
6994 loops->tree = &loops->array[0];
6995 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6997 /* Add the remaining loops to the tree. */
6998 for (i = 1; i < num_loops; i++)
6999 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7003 /* Helper function to compute loop nesting depth and enclosed loop level
7004 for the natural loop specified by LOOP at the loop depth DEPTH.
7005 Returns the loop level. */
7006 static int
7007 flow_loop_level_compute (loop, depth)
7008 struct loop *loop;
7009 int depth;
7011 struct loop *inner;
7012 int level = 1;
7014 if (! loop)
7015 return 0;
7017 /* Traverse loop tree assigning depth and computing level as the
7018 maximum level of all the inner loops of this loop. The loop
7019 level is equivalent to the height of the loop in the loop tree
7020 and corresponds to the number of enclosed loop levels (including
7021 itself). */
7022 for (inner = loop->inner; inner; inner = inner->next)
7024 int ilevel;
7026 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7028 if (ilevel > level)
7029 level = ilevel;
7031 loop->level = level;
7032 loop->depth = depth;
7033 return level;
7037 /* Compute the loop nesting depth and enclosed loop level for the loop
7038 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7039 level. */
7041 static int
7042 flow_loops_level_compute (loops)
7043 struct loops *loops;
7045 struct loop *loop;
7046 int level;
7047 int levels = 0;
7049 /* Traverse all the outer level loops. */
7050 for (loop = loops->tree; loop; loop = loop->next)
7052 level = flow_loop_level_compute (loop, 1);
7053 if (level > levels)
7054 levels = level;
7056 return levels;
7060 /* Find all the natural loops in the function and save in LOOPS structure
7061 and recalculate loop_depth information in basic block structures.
7062 Return the number of natural loops found. */
7064 int
7065 flow_loops_find (loops)
7066 struct loops *loops;
7068 int i;
7069 int b;
7070 int num_loops;
7071 edge e;
7072 sbitmap headers;
7073 sbitmap *dom;
7074 int *dfs_order;
7076 loops->num = 0;
7077 loops->array = NULL;
7078 loops->tree = NULL;
7079 dfs_order = NULL;
7081 /* Taking care of this degenerate case makes the rest of
7082 this code simpler. */
7083 if (n_basic_blocks == 0)
7084 return 0;
7086 /* Compute the dominators. */
7087 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7088 compute_flow_dominators (dom, NULL);
7090 /* Count the number of loop edges (back edges). This should be the
7091 same as the number of natural loops. Also clear the loop_depth
7092 and as we work from inner->outer in a loop nest we call
7093 find_loop_nodes_find which will increment loop_depth for nodes
7094 within the current loop, which happens to enclose inner loops. */
7096 num_loops = 0;
7097 for (b = 0; b < n_basic_blocks; b++)
7099 BASIC_BLOCK (b)->loop_depth = 0;
7100 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7102 basic_block latch = e->src;
7104 /* Look for back edges where a predecessor is dominated
7105 by this block. A natural loop has a single entry
7106 node (header) that dominates all the nodes in the
7107 loop. It also has single back edge to the header
7108 from a latch node. Note that multiple natural loops
7109 may share the same header. */
7110 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7111 num_loops++;
7115 if (num_loops)
7117 /* Compute depth first search order of the CFG so that outer
7118 natural loops will be found before inner natural loops. */
7119 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7120 flow_depth_first_order_compute (dfs_order);
7122 /* Allocate loop structures. */
7123 loops->array
7124 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7126 headers = sbitmap_alloc (n_basic_blocks);
7127 sbitmap_zero (headers);
7129 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7130 sbitmap_zero (loops->shared_headers);
7132 /* Find and record information about all the natural loops
7133 in the CFG. */
7134 num_loops = 0;
7135 for (b = 0; b < n_basic_blocks; b++)
7137 basic_block header;
7139 /* Search the nodes of the CFG in DFS order that we can find
7140 outer loops first. */
7141 header = BASIC_BLOCK (dfs_order[b]);
7143 /* Look for all the possible latch blocks for this header. */
7144 for (e = header->pred; e; e = e->pred_next)
7146 basic_block latch = e->src;
7148 /* Look for back edges where a predecessor is dominated
7149 by this block. A natural loop has a single entry
7150 node (header) that dominates all the nodes in the
7151 loop. It also has single back edge to the header
7152 from a latch node. Note that multiple natural loops
7153 may share the same header. */
7154 if (latch != ENTRY_BLOCK_PTR
7155 && TEST_BIT (dom[latch->index], header->index))
7157 struct loop *loop;
7159 loop = loops->array + num_loops;
7161 loop->header = header;
7162 loop->latch = latch;
7164 /* Keep track of blocks that are loop headers so
7165 that we can tell which loops should be merged. */
7166 if (TEST_BIT (headers, header->index))
7167 SET_BIT (loops->shared_headers, header->index);
7168 SET_BIT (headers, header->index);
7170 /* Find nodes contained within the loop. */
7171 loop->nodes = sbitmap_alloc (n_basic_blocks);
7172 loop->num_nodes
7173 = flow_loop_nodes_find (header, latch, loop->nodes);
7175 /* Compute first and last blocks within the loop.
7176 These are often the same as the loop header and
7177 loop latch respectively, but this is not always
7178 the case. */
7179 loop->first
7180 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7181 loop->last
7182 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7184 /* Find edges which exit the loop. Note that a node
7185 may have several exit edges. */
7186 loop->num_exits
7187 = flow_loop_exits_find (loop->nodes, &loop->exits);
7189 /* Look to see if the loop has a pre-header node. */
7190 loop->pre_header
7191 = flow_loop_pre_header_find (header, dom);
7193 num_loops++;
7198 /* Natural loops with shared headers may either be disjoint or
7199 nested. Disjoint loops with shared headers cannot be inner
7200 loops and should be merged. For now just mark loops that share
7201 headers. */
7202 for (i = 0; i < num_loops; i++)
7203 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7204 loops->array[i].shared = 1;
7206 sbitmap_free (headers);
7209 loops->num = num_loops;
7211 /* Save CFG derived information to avoid recomputing it. */
7212 loops->cfg.dom = dom;
7213 loops->cfg.dfs_order = dfs_order;
7215 /* Build the loop hierarchy tree. */
7216 flow_loops_tree_build (loops);
7218 /* Assign the loop nesting depth and enclosed loop level for each
7219 loop. */
7220 loops->levels = flow_loops_level_compute (loops);
7222 return num_loops;
7226 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7228 flow_loop_outside_edge_p (loop, e)
7229 const struct loop *loop;
7230 edge e;
7232 if (e->dest != loop->header)
7233 abort ();
7234 return (e->src == ENTRY_BLOCK_PTR)
7235 || ! TEST_BIT (loop->nodes, e->src->index);
7239 /* Clear LOG_LINKS fields of insns in a chain. */
7240 void
7241 clear_log_links (insns)
7242 rtx insns;
7244 rtx i;
7245 for (i = insns; i; i = NEXT_INSN (i))
7246 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')
7247 LOG_LINKS (i) = 0;