1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
136 #include "insn-flags.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
165 #define LOCAL_REGNO(REGNO) 0
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
171 /* The obstack on which the flow graph components are allocated. */
173 struct obstack flow_obstack
;
174 static char *flow_firstobj
;
176 /* Number of basic blocks in the current function. */
180 /* Number of edges in the current function. */
184 /* The basic block array. */
186 varray_type basic_block_info
;
188 /* The special entry and exit blocks. */
190 struct basic_block_def entry_exit_blocks
[2]
195 NULL
, /* local_set */
196 NULL
, /* cond_local_set */
197 NULL
, /* global_live_at_start */
198 NULL
, /* global_live_at_end */
200 ENTRY_BLOCK
, /* index */
202 -1, -1, /* eh_beg, eh_end */
210 NULL
, /* local_set */
211 NULL
, /* cond_local_set */
212 NULL
, /* global_live_at_start */
213 NULL
, /* global_live_at_end */
215 EXIT_BLOCK
, /* index */
217 -1, -1, /* eh_beg, eh_end */
222 /* Nonzero if the second flow pass has completed. */
225 /* Maximum register number used in this function, plus one. */
229 /* Indexed by n, giving various register information */
231 varray_type reg_n_info
;
233 /* Size of a regset for the current function,
234 in (1) bytes and (2) elements. */
239 /* Regset of regs live when calls to `setjmp'-like functions happen. */
240 /* ??? Does this exist only for the setjmp-clobbered warning message? */
242 regset regs_live_at_setjmp
;
244 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
245 that have to go in the same hard reg.
246 The first two regs in the list are a pair, and the next two
247 are another pair, etc. */
250 /* Set of registers that may be eliminable. These are handled specially
251 in updating regs_ever_live. */
253 static HARD_REG_SET elim_reg_set
;
255 /* The basic block structure for every insn, indexed by uid. */
257 varray_type basic_block_for_insn
;
259 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
260 /* ??? Should probably be using LABEL_NUSES instead. It would take a
261 bit of surgery to be able to use or co-opt the routines in jump. */
263 static rtx label_value_list
;
264 static rtx tail_recursion_label_list
;
266 /* Holds information for tracking conditional register life information. */
267 struct reg_cond_life_info
269 /* An EXPR_LIST of conditions under which a register is dead. */
272 /* ??? Could store mask of bytes that are dead, so that we could finally
273 track lifetimes of multi-word registers accessed via subregs. */
276 /* For use in communicating between propagate_block and its subroutines.
277 Holds all information needed to compute life and def-use information. */
279 struct propagate_block_info
281 /* The basic block we're considering. */
284 /* Bit N is set if register N is conditionally or unconditionally live. */
287 /* Bit N is set if register N is set this insn. */
290 /* Element N is the next insn that uses (hard or pseudo) register N
291 within the current basic block; or zero, if there is no such insn. */
294 /* Contains a list of all the MEMs we are tracking for dead store
298 /* If non-null, record the set of registers set unconditionally in the
302 /* If non-null, record the set of registers set conditionally in the
304 regset cond_local_set
;
306 #ifdef HAVE_conditional_execution
307 /* Indexed by register number, holds a reg_cond_life_info for each
308 register that is not unconditionally live or dead. */
309 splay_tree reg_cond_dead
;
311 /* Bit N is set if register N is in an expression in reg_cond_dead. */
315 /* Non-zero if the value of CC0 is live. */
318 /* Flags controling the set of information propagate_block collects. */
322 /* Store the data structures necessary for depth-first search. */
323 struct depth_first_search_dsS
{
324 /* stack for backtracking during the algorithm */
327 /* number of edges in the stack. That is, positions 0, ..., sp-1
331 /* record of basic blocks already seen by depth-first search */
332 sbitmap visited_blocks
;
334 typedef struct depth_first_search_dsS
*depth_first_search_ds
;
336 /* Forward declarations */
337 static int count_basic_blocks
PARAMS ((rtx
));
338 static void find_basic_blocks_1
PARAMS ((rtx
));
339 static rtx find_label_refs
PARAMS ((rtx
, rtx
));
340 static void clear_edges
PARAMS ((void));
341 static void make_edges
PARAMS ((rtx
));
342 static void make_label_edge
PARAMS ((sbitmap
*, basic_block
,
344 static void make_eh_edge
PARAMS ((sbitmap
*, eh_nesting_info
*,
345 basic_block
, rtx
, int));
346 static void mark_critical_edges
PARAMS ((void));
347 static void move_stray_eh_region_notes
PARAMS ((void));
348 static void record_active_eh_regions
PARAMS ((rtx
));
350 static void commit_one_edge_insertion
PARAMS ((edge
));
352 static void delete_unreachable_blocks
PARAMS ((void));
353 static void delete_eh_regions
PARAMS ((void));
354 static int can_delete_note_p
PARAMS ((rtx
));
355 static void expunge_block
PARAMS ((basic_block
));
356 static int can_delete_label_p
PARAMS ((rtx
));
357 static int tail_recursion_label_p
PARAMS ((rtx
));
358 static int merge_blocks_move_predecessor_nojumps
PARAMS ((basic_block
,
360 static int merge_blocks_move_successor_nojumps
PARAMS ((basic_block
,
362 static int merge_blocks
PARAMS ((edge
,basic_block
,basic_block
));
363 static void try_merge_blocks
PARAMS ((void));
364 static void tidy_fallthru_edges
PARAMS ((void));
365 static int verify_wide_reg_1
PARAMS ((rtx
*, void *));
366 static void verify_wide_reg
PARAMS ((int, rtx
, rtx
));
367 static void verify_local_live_at_start
PARAMS ((regset
, basic_block
));
368 static int set_noop_p
PARAMS ((rtx
));
369 static int noop_move_p
PARAMS ((rtx
));
370 static void delete_noop_moves
PARAMS ((rtx
));
371 static void notice_stack_pointer_modification_1
PARAMS ((rtx
, rtx
, void *));
372 static void notice_stack_pointer_modification
PARAMS ((rtx
));
373 static void mark_reg
PARAMS ((rtx
, void *));
374 static void mark_regs_live_at_end
PARAMS ((regset
));
375 static int set_phi_alternative_reg
PARAMS ((rtx
, int, int, void *));
376 static void calculate_global_regs_live
PARAMS ((sbitmap
, sbitmap
, int));
377 static void propagate_block_delete_insn
PARAMS ((basic_block
, rtx
));
378 static rtx propagate_block_delete_libcall
PARAMS ((basic_block
, rtx
, rtx
));
379 static int insn_dead_p
PARAMS ((struct propagate_block_info
*,
381 static int libcall_dead_p
PARAMS ((struct propagate_block_info
*,
383 static void mark_set_regs
PARAMS ((struct propagate_block_info
*,
385 static void mark_set_1
PARAMS ((struct propagate_block_info
*,
386 enum rtx_code
, rtx
, rtx
,
388 #ifdef HAVE_conditional_execution
389 static int mark_regno_cond_dead
PARAMS ((struct propagate_block_info
*,
391 static void free_reg_cond_life_info
PARAMS ((splay_tree_value
));
392 static int flush_reg_cond_reg_1
PARAMS ((splay_tree_node
, void *));
393 static void flush_reg_cond_reg
PARAMS ((struct propagate_block_info
*,
395 static rtx ior_reg_cond
PARAMS ((rtx
, rtx
));
396 static rtx not_reg_cond
PARAMS ((rtx
));
397 static rtx nand_reg_cond
PARAMS ((rtx
, rtx
));
400 static void attempt_auto_inc
PARAMS ((struct propagate_block_info
*,
401 rtx
, rtx
, rtx
, rtx
, rtx
));
402 static void find_auto_inc
PARAMS ((struct propagate_block_info
*,
404 static int try_pre_increment_1
PARAMS ((struct propagate_block_info
*,
406 static int try_pre_increment
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
));
408 static void mark_used_reg
PARAMS ((struct propagate_block_info
*,
410 static void mark_used_regs
PARAMS ((struct propagate_block_info
*,
412 void dump_flow_info
PARAMS ((FILE *));
413 void debug_flow_info
PARAMS ((void));
414 static void dump_edge_info
PARAMS ((FILE *, edge
, int));
415 static void print_rtl_and_abort
PARAMS ((void));
417 static void invalidate_mems_from_autoinc
PARAMS ((struct propagate_block_info
*,
419 static void invalidate_mems_from_set
PARAMS ((struct propagate_block_info
*,
421 static void remove_fake_successors
PARAMS ((basic_block
));
422 static void flow_nodes_print
PARAMS ((const char *, const sbitmap
,
424 static void flow_edge_list_print
PARAMS ((const char *, const edge
*,
426 static void flow_loops_cfg_dump
PARAMS ((const struct loops
*,
428 static int flow_loop_nested_p
PARAMS ((struct loop
*,
430 static int flow_loop_entry_edges_find
PARAMS ((basic_block
, const sbitmap
,
432 static int flow_loop_exit_edges_find
PARAMS ((const sbitmap
, edge
**));
433 static int flow_loop_nodes_find
PARAMS ((basic_block
, basic_block
, sbitmap
));
434 static int flow_depth_first_order_compute
PARAMS ((int *, int *));
435 static void flow_dfs_compute_reverse_init
436 PARAMS ((depth_first_search_ds
));
437 static void flow_dfs_compute_reverse_add_bb
438 PARAMS ((depth_first_search_ds
, basic_block
));
439 static basic_block flow_dfs_compute_reverse_execute
440 PARAMS ((depth_first_search_ds
));
441 static void flow_dfs_compute_reverse_finish
442 PARAMS ((depth_first_search_ds
));
443 static void flow_loop_pre_header_scan
PARAMS ((struct loop
*));
444 static basic_block flow_loop_pre_header_find
PARAMS ((basic_block
,
446 static void flow_loop_tree_node_add
PARAMS ((struct loop
*, struct loop
*));
447 static void flow_loops_tree_build
PARAMS ((struct loops
*));
448 static int flow_loop_level_compute
PARAMS ((struct loop
*, int));
449 static int flow_loops_level_compute
PARAMS ((struct loops
*));
450 static void allocate_bb_life_data
PARAMS ((void));
452 /* Find basic blocks of the current function.
453 F is the first insn of the function and NREGS the number of register
457 find_basic_blocks (f
, nregs
, file
)
459 int nregs ATTRIBUTE_UNUSED
;
460 FILE *file ATTRIBUTE_UNUSED
;
464 /* Flush out existing data. */
465 if (basic_block_info
!= NULL
)
471 /* Clear bb->aux on all extant basic blocks. We'll use this as a
472 tag for reuse during create_basic_block, just in case some pass
473 copies around basic block notes improperly. */
474 for (i
= 0; i
< n_basic_blocks
; ++i
)
475 BASIC_BLOCK (i
)->aux
= NULL
;
477 VARRAY_FREE (basic_block_info
);
480 n_basic_blocks
= count_basic_blocks (f
);
482 /* Size the basic block table. The actual structures will be allocated
483 by find_basic_blocks_1, since we want to keep the structure pointers
484 stable across calls to find_basic_blocks. */
485 /* ??? This whole issue would be much simpler if we called find_basic_blocks
486 exactly once, and thereafter we don't have a single long chain of
487 instructions at all until close to the end of compilation when we
488 actually lay them out. */
490 VARRAY_BB_INIT (basic_block_info
, n_basic_blocks
, "basic_block_info");
492 find_basic_blocks_1 (f
);
494 /* Record the block to which an insn belongs. */
495 /* ??? This should be done another way, by which (perhaps) a label is
496 tagged directly with the basic block that it starts. It is used for
497 more than that currently, but IMO that is the only valid use. */
499 max_uid
= get_max_uid ();
501 /* Leave space for insns life_analysis makes in some cases for auto-inc.
502 These cases are rare, so we don't need too much space. */
503 max_uid
+= max_uid
/ 10;
506 compute_bb_for_insn (max_uid
);
508 /* Discover the edges of our cfg. */
509 record_active_eh_regions (f
);
510 make_edges (label_value_list
);
512 /* Do very simple cleanup now, for the benefit of code that runs between
513 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
514 tidy_fallthru_edges ();
516 mark_critical_edges ();
518 #ifdef ENABLE_CHECKING
524 check_function_return_warnings ()
526 if (warn_missing_noreturn
527 && !TREE_THIS_VOLATILE (cfun
->decl
)
528 && EXIT_BLOCK_PTR
->pred
== NULL
)
529 warning ("function might be possible candidate for attribute `noreturn'");
531 /* If we have a path to EXIT, then we do return. */
532 if (TREE_THIS_VOLATILE (cfun
->decl
)
533 && EXIT_BLOCK_PTR
->pred
!= NULL
)
534 warning ("`noreturn' function does return");
536 /* If the clobber_return_insn appears in some basic block, then we
537 do reach the end without returning a value. */
538 else if (warn_return_type
539 && cfun
->x_clobber_return_insn
!= NULL
540 && EXIT_BLOCK_PTR
->pred
!= NULL
)
542 int max_uid
= get_max_uid ();
544 /* If clobber_return_insn was excised by jump1, then renumber_insns
545 can make max_uid smaller than the number still recorded in our rtx.
546 That's fine, since this is a quick way of verifying that the insn
547 is no longer in the chain. */
548 if (INSN_UID (cfun
->x_clobber_return_insn
) < max_uid
)
550 /* Recompute insn->block mapping, since the initial mapping is
551 set before we delete unreachable blocks. */
552 compute_bb_for_insn (max_uid
);
554 if (BLOCK_FOR_INSN (cfun
->x_clobber_return_insn
) != NULL
)
555 warning ("control reaches end of non-void function");
560 /* Count the basic blocks of the function. */
563 count_basic_blocks (f
)
567 register RTX_CODE prev_code
;
568 register int count
= 0;
570 int call_had_abnormal_edge
= 0;
572 prev_code
= JUMP_INSN
;
573 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
575 register RTX_CODE code
= GET_CODE (insn
);
577 if (code
== CODE_LABEL
578 || (GET_RTX_CLASS (code
) == 'i'
579 && (prev_code
== JUMP_INSN
580 || prev_code
== BARRIER
581 || (prev_code
== CALL_INSN
&& call_had_abnormal_edge
))))
584 /* Record whether this call created an edge. */
585 if (code
== CALL_INSN
)
587 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
588 int region
= (note
? INTVAL (XEXP (note
, 0)) : 1);
590 call_had_abnormal_edge
= 0;
592 /* If there is an EH region or rethrow, we have an edge. */
593 if ((eh_region
&& region
> 0)
594 || find_reg_note (insn
, REG_EH_RETHROW
, NULL_RTX
))
595 call_had_abnormal_edge
= 1;
596 else if (nonlocal_goto_handler_labels
&& region
>= 0)
597 /* If there is a nonlocal goto label and the specified
598 region number isn't -1, we have an edge. (0 means
599 no throw, but might have a nonlocal goto). */
600 call_had_abnormal_edge
= 1;
605 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
607 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
)
611 /* The rest of the compiler works a bit smoother when we don't have to
612 check for the edge case of do-nothing functions with no basic blocks. */
615 emit_insn (gen_rtx_USE (VOIDmode
, const0_rtx
));
622 /* Scan a list of insns for labels referred to other than by jumps.
623 This is used to scan the alternatives of a call placeholder. */
625 find_label_refs (f
, lvl
)
631 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
636 /* Make a list of all labels referred to other than by jumps
637 (which just don't have the REG_LABEL notes).
639 Make a special exception for labels followed by an ADDR*VEC,
640 as this would be a part of the tablejump setup code.
642 Make a special exception for the eh_return_stub_label, which
643 we know isn't part of any otherwise visible control flow. */
645 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
646 if (REG_NOTE_KIND (note
) == REG_LABEL
)
648 rtx lab
= XEXP (note
, 0), next
;
650 if (lab
== eh_return_stub_label
)
652 else if ((next
= next_nonnote_insn (lab
)) != NULL
653 && GET_CODE (next
) == JUMP_INSN
654 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
655 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
657 else if (GET_CODE (lab
) == NOTE
)
660 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
667 /* Find all basic blocks of the function whose first insn is F.
669 Collect and return a list of labels whose addresses are taken. This
670 will be used in make_edges for use with computed gotos. */
673 find_basic_blocks_1 (f
)
676 register rtx insn
, next
;
678 rtx bb_note
= NULL_RTX
;
679 rtx eh_list
= NULL_RTX
;
685 /* We process the instructions in a slightly different way than we did
686 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
687 closed out the previous block, so that it gets attached at the proper
688 place. Since this form should be equivalent to the previous,
689 count_basic_blocks continues to use the old form as a check. */
691 for (insn
= f
; insn
; insn
= next
)
693 enum rtx_code code
= GET_CODE (insn
);
695 next
= NEXT_INSN (insn
);
701 int kind
= NOTE_LINE_NUMBER (insn
);
703 /* Keep a LIFO list of the currently active exception notes. */
704 if (kind
== NOTE_INSN_EH_REGION_BEG
)
705 eh_list
= alloc_INSN_LIST (insn
, eh_list
);
706 else if (kind
== NOTE_INSN_EH_REGION_END
)
710 eh_list
= XEXP (eh_list
, 1);
711 free_INSN_LIST_node (t
);
714 /* Look for basic block notes with which to keep the
715 basic_block_info pointers stable. Unthread the note now;
716 we'll put it back at the right place in create_basic_block.
717 Or not at all if we've already found a note in this block. */
718 else if (kind
== NOTE_INSN_BASIC_BLOCK
)
720 if (bb_note
== NULL_RTX
)
723 next
= flow_delete_insn (insn
);
729 /* A basic block starts at a label. If we've closed one off due
730 to a barrier or some such, no need to do it again. */
731 if (head
!= NULL_RTX
)
733 /* While we now have edge lists with which other portions of
734 the compiler might determine a call ending a basic block
735 does not imply an abnormal edge, it will be a bit before
736 everything can be updated. So continue to emit a noop at
737 the end of such a block. */
738 if (GET_CODE (end
) == CALL_INSN
&& ! SIBLING_CALL_P (end
))
740 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
741 end
= emit_insn_after (nop
, end
);
744 create_basic_block (i
++, head
, end
, bb_note
);
752 /* A basic block ends at a jump. */
753 if (head
== NULL_RTX
)
757 /* ??? Make a special check for table jumps. The way this
758 happens is truly and amazingly gross. We are about to
759 create a basic block that contains just a code label and
760 an addr*vec jump insn. Worse, an addr_diff_vec creates
761 its own natural loop.
763 Prevent this bit of brain damage, pasting things together
764 correctly in make_edges.
766 The correct solution involves emitting the table directly
767 on the tablejump instruction as a note, or JUMP_LABEL. */
769 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
770 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
778 goto new_bb_inclusive
;
781 /* A basic block ends at a barrier. It may be that an unconditional
782 jump already closed the basic block -- no need to do it again. */
783 if (head
== NULL_RTX
)
786 /* While we now have edge lists with which other portions of the
787 compiler might determine a call ending a basic block does not
788 imply an abnormal edge, it will be a bit before everything can
789 be updated. So continue to emit a noop at the end of such a
791 if (GET_CODE (end
) == CALL_INSN
&& ! SIBLING_CALL_P (end
))
793 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
794 end
= emit_insn_after (nop
, end
);
796 goto new_bb_exclusive
;
800 /* Record whether this call created an edge. */
801 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
802 int region
= (note
? INTVAL (XEXP (note
, 0)) : 1);
803 int call_has_abnormal_edge
= 0;
805 if (GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
807 /* Scan each of the alternatives for label refs. */
808 lvl
= find_label_refs (XEXP (PATTERN (insn
), 0), lvl
);
809 lvl
= find_label_refs (XEXP (PATTERN (insn
), 1), lvl
);
810 lvl
= find_label_refs (XEXP (PATTERN (insn
), 2), lvl
);
811 /* Record its tail recursion label, if any. */
812 if (XEXP (PATTERN (insn
), 3) != NULL_RTX
)
813 trll
= alloc_EXPR_LIST (0, XEXP (PATTERN (insn
), 3), trll
);
816 /* If there is an EH region or rethrow, we have an edge. */
817 if ((eh_list
&& region
> 0)
818 || find_reg_note (insn
, REG_EH_RETHROW
, NULL_RTX
))
819 call_has_abnormal_edge
= 1;
820 else if (nonlocal_goto_handler_labels
&& region
>= 0)
821 /* If there is a nonlocal goto label and the specified
822 region number isn't -1, we have an edge. (0 means
823 no throw, but might have a nonlocal goto). */
824 call_has_abnormal_edge
= 1;
826 /* A basic block ends at a call that can either throw or
827 do a non-local goto. */
828 if (call_has_abnormal_edge
)
831 if (head
== NULL_RTX
)
836 create_basic_block (i
++, head
, end
, bb_note
);
837 head
= end
= NULL_RTX
;
845 if (GET_RTX_CLASS (code
) == 'i')
847 if (head
== NULL_RTX
)
854 if (GET_RTX_CLASS (code
) == 'i')
858 /* Make a list of all labels referred to other than by jumps
859 (which just don't have the REG_LABEL notes).
861 Make a special exception for labels followed by an ADDR*VEC,
862 as this would be a part of the tablejump setup code.
864 Make a special exception for the eh_return_stub_label, which
865 we know isn't part of any otherwise visible control flow. */
867 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
868 if (REG_NOTE_KIND (note
) == REG_LABEL
)
870 rtx lab
= XEXP (note
, 0), next
;
872 if (lab
== eh_return_stub_label
)
874 else if ((next
= next_nonnote_insn (lab
)) != NULL
875 && GET_CODE (next
) == JUMP_INSN
876 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
877 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
879 else if (GET_CODE (lab
) == NOTE
)
882 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
887 if (head
!= NULL_RTX
)
888 create_basic_block (i
++, head
, end
, bb_note
);
890 flow_delete_insn (bb_note
);
892 if (i
!= n_basic_blocks
)
895 label_value_list
= lvl
;
896 tail_recursion_label_list
= trll
;
899 /* Tidy the CFG by deleting unreachable code and whatnot. */
905 delete_unreachable_blocks ();
906 move_stray_eh_region_notes ();
907 record_active_eh_regions (f
);
909 mark_critical_edges ();
911 /* Kill the data we won't maintain. */
912 free_EXPR_LIST_list (&label_value_list
);
913 free_EXPR_LIST_list (&tail_recursion_label_list
);
916 /* Create a new basic block consisting of the instructions between
917 HEAD and END inclusive. Reuses the note and basic block struct
918 in BB_NOTE, if any. */
921 create_basic_block (index
, head
, end
, bb_note
)
923 rtx head
, end
, bb_note
;
928 && ! RTX_INTEGRATED_P (bb_note
)
929 && (bb
= NOTE_BASIC_BLOCK (bb_note
)) != NULL
932 /* If we found an existing note, thread it back onto the chain. */
936 if (GET_CODE (head
) == CODE_LABEL
)
940 after
= PREV_INSN (head
);
944 if (after
!= bb_note
&& NEXT_INSN (after
) != bb_note
)
945 reorder_insns (bb_note
, bb_note
, after
);
949 /* Otherwise we must create a note and a basic block structure.
950 Since we allow basic block structs in rtl, give the struct
951 the same lifetime by allocating it off the function obstack
952 rather than using malloc. */
954 bb
= (basic_block
) obstack_alloc (&flow_obstack
, sizeof (*bb
));
955 memset (bb
, 0, sizeof (*bb
));
957 if (GET_CODE (head
) == CODE_LABEL
)
958 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, head
);
961 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, head
);
964 NOTE_BASIC_BLOCK (bb_note
) = bb
;
967 /* Always include the bb note in the block. */
968 if (NEXT_INSN (end
) == bb_note
)
974 BASIC_BLOCK (index
) = bb
;
976 /* Tag the block so that we know it has been used when considering
977 other basic block notes. */
981 /* Records the basic block struct in BB_FOR_INSN, for every instruction
982 indexed by INSN_UID. MAX is the size of the array. */
985 compute_bb_for_insn (max
)
990 if (basic_block_for_insn
)
991 VARRAY_FREE (basic_block_for_insn
);
992 VARRAY_BB_INIT (basic_block_for_insn
, max
, "basic_block_for_insn");
994 for (i
= 0; i
< n_basic_blocks
; ++i
)
996 basic_block bb
= BASIC_BLOCK (i
);
1003 int uid
= INSN_UID (insn
);
1005 VARRAY_BB (basic_block_for_insn
, uid
) = bb
;
1008 insn
= NEXT_INSN (insn
);
1013 /* Free the memory associated with the edge structures. */
1021 for (i
= 0; i
< n_basic_blocks
; ++i
)
1023 basic_block bb
= BASIC_BLOCK (i
);
1025 for (e
= bb
->succ
; e
; e
= n
)
1035 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= n
)
1041 ENTRY_BLOCK_PTR
->succ
= 0;
1042 EXIT_BLOCK_PTR
->pred
= 0;
1047 /* Identify the edges between basic blocks.
1049 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1050 that are otherwise unreachable may be reachable with a non-local goto.
1052 BB_EH_END is an array indexed by basic block number in which we record
1053 the list of exception regions active at the end of the basic block. */
1056 make_edges (label_value_list
)
1057 rtx label_value_list
;
1060 eh_nesting_info
*eh_nest_info
= init_eh_nesting_info ();
1061 sbitmap
*edge_cache
= NULL
;
1063 /* Assume no computed jump; revise as we create edges. */
1064 current_function_has_computed_jump
= 0;
1066 /* Heavy use of computed goto in machine-generated code can lead to
1067 nearly fully-connected CFGs. In that case we spend a significant
1068 amount of time searching the edge lists for duplicates. */
1069 if (forced_labels
|| label_value_list
)
1071 edge_cache
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
1072 sbitmap_vector_zero (edge_cache
, n_basic_blocks
);
1075 /* By nature of the way these get numbered, block 0 is always the entry. */
1076 make_edge (edge_cache
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (0), EDGE_FALLTHRU
);
1078 for (i
= 0; i
< n_basic_blocks
; ++i
)
1080 basic_block bb
= BASIC_BLOCK (i
);
1083 int force_fallthru
= 0;
1085 /* Examine the last instruction of the block, and discover the
1086 ways we can leave the block. */
1089 code
= GET_CODE (insn
);
1092 if (code
== JUMP_INSN
)
1096 /* ??? Recognize a tablejump and do the right thing. */
1097 if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
1098 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
1099 && GET_CODE (tmp
) == JUMP_INSN
1100 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
1101 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
1106 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
1107 vec
= XVEC (PATTERN (tmp
), 0);
1109 vec
= XVEC (PATTERN (tmp
), 1);
1111 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1112 make_label_edge (edge_cache
, bb
,
1113 XEXP (RTVEC_ELT (vec
, j
), 0), 0);
1115 /* Some targets (eg, ARM) emit a conditional jump that also
1116 contains the out-of-range target. Scan for these and
1117 add an edge if necessary. */
1118 if ((tmp
= single_set (insn
)) != NULL
1119 && SET_DEST (tmp
) == pc_rtx
1120 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
1121 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
1122 make_label_edge (edge_cache
, bb
,
1123 XEXP (XEXP (SET_SRC (tmp
), 2), 0), 0);
1125 #ifdef CASE_DROPS_THROUGH
1126 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1127 us naturally detecting fallthru into the next block. */
1132 /* If this is a computed jump, then mark it as reaching
1133 everything on the label_value_list and forced_labels list. */
1134 else if (computed_jump_p (insn
))
1136 current_function_has_computed_jump
= 1;
1138 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
1139 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
1141 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
1142 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
1145 /* Returns create an exit out. */
1146 else if (returnjump_p (insn
))
1147 make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, 0);
1149 /* Otherwise, we have a plain conditional or unconditional jump. */
1152 if (! JUMP_LABEL (insn
))
1154 make_label_edge (edge_cache
, bb
, JUMP_LABEL (insn
), 0);
1158 /* If this is a sibling call insn, then this is in effect a
1159 combined call and return, and so we need an edge to the
1160 exit block. No need to worry about EH edges, since we
1161 wouldn't have created the sibling call in the first place. */
1163 if (code
== CALL_INSN
&& SIBLING_CALL_P (insn
))
1164 make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
,
1165 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
1167 /* If this is a CALL_INSN, then mark it as reaching the active EH
1168 handler for this CALL_INSN. If we're handling asynchronous
1169 exceptions then any insn can reach any of the active handlers.
1171 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1173 else if (code
== CALL_INSN
|| asynchronous_exceptions
)
1175 /* Add any appropriate EH edges. We do this unconditionally
1176 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1177 on the call, and this needn't be within an EH region. */
1178 make_eh_edge (edge_cache
, eh_nest_info
, bb
, insn
, bb
->eh_end
);
1180 /* If we have asynchronous exceptions, do the same for *all*
1181 exception regions active in the block. */
1182 if (asynchronous_exceptions
1183 && bb
->eh_beg
!= bb
->eh_end
)
1185 if (bb
->eh_beg
>= 0)
1186 make_eh_edge (edge_cache
, eh_nest_info
, bb
,
1187 NULL_RTX
, bb
->eh_beg
);
1189 for (x
= bb
->head
; x
!= bb
->end
; x
= NEXT_INSN (x
))
1190 if (GET_CODE (x
) == NOTE
1191 && (NOTE_LINE_NUMBER (x
) == NOTE_INSN_EH_REGION_BEG
1192 || NOTE_LINE_NUMBER (x
) == NOTE_INSN_EH_REGION_END
))
1194 int region
= NOTE_EH_HANDLER (x
);
1195 make_eh_edge (edge_cache
, eh_nest_info
, bb
,
1200 if (code
== CALL_INSN
&& nonlocal_goto_handler_labels
)
1202 /* ??? This could be made smarter: in some cases it's possible
1203 to tell that certain calls will not do a nonlocal goto.
1205 For example, if the nested functions that do the nonlocal
1206 gotos do not have their addresses taken, then only calls to
1207 those functions or to other nested functions that use them
1208 could possibly do nonlocal gotos. */
1209 /* We do know that a REG_EH_REGION note with a value less
1210 than 0 is guaranteed not to perform a non-local goto. */
1211 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1212 if (!note
|| INTVAL (XEXP (note
, 0)) >= 0)
1213 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
1214 make_label_edge (edge_cache
, bb
, XEXP (x
, 0),
1215 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
1219 /* We know something about the structure of the function __throw in
1220 libgcc2.c. It is the only function that ever contains eh_stub
1221 labels. It modifies its return address so that the last block
1222 returns to one of the eh_stub labels within it. So we have to
1223 make additional edges in the flow graph. */
1224 if (i
+ 1 == n_basic_blocks
&& eh_return_stub_label
!= 0)
1225 make_label_edge (edge_cache
, bb
, eh_return_stub_label
, EDGE_EH
);
1227 /* Find out if we can drop through to the next block. */
1228 insn
= next_nonnote_insn (insn
);
1229 if (!insn
|| (i
+ 1 == n_basic_blocks
&& force_fallthru
))
1230 make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
1231 else if (i
+ 1 < n_basic_blocks
)
1233 rtx tmp
= BLOCK_HEAD (i
+ 1);
1234 if (GET_CODE (tmp
) == NOTE
)
1235 tmp
= next_nonnote_insn (tmp
);
1236 if (force_fallthru
|| insn
== tmp
)
1237 make_edge (edge_cache
, bb
, BASIC_BLOCK (i
+ 1), EDGE_FALLTHRU
);
1241 free_eh_nesting_info (eh_nest_info
);
1243 sbitmap_vector_free (edge_cache
);
1246 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1247 about the edge that is accumulated between calls. */
1250 make_edge (edge_cache
, src
, dst
, flags
)
1251 sbitmap
*edge_cache
;
1252 basic_block src
, dst
;
1258 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1259 many edges to them, and we didn't allocate memory for it. */
1260 use_edge_cache
= (edge_cache
1261 && src
!= ENTRY_BLOCK_PTR
1262 && dst
!= EXIT_BLOCK_PTR
);
1264 /* Make sure we don't add duplicate edges. */
1265 if (! use_edge_cache
|| TEST_BIT (edge_cache
[src
->index
], dst
->index
))
1266 for (e
= src
->succ
; e
; e
= e
->succ_next
)
1273 e
= (edge
) xcalloc (1, sizeof (*e
));
1276 e
->succ_next
= src
->succ
;
1277 e
->pred_next
= dst
->pred
;
1286 SET_BIT (edge_cache
[src
->index
], dst
->index
);
1289 /* Create an edge from a basic block to a label. */
1292 make_label_edge (edge_cache
, src
, label
, flags
)
1293 sbitmap
*edge_cache
;
1298 if (GET_CODE (label
) != CODE_LABEL
)
1301 /* If the label was never emitted, this insn is junk, but avoid a
1302 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1303 as a result of a syntax error and a diagnostic has already been
1306 if (INSN_UID (label
) == 0)
1309 make_edge (edge_cache
, src
, BLOCK_FOR_INSN (label
), flags
);
1312 /* Create the edges generated by INSN in REGION. */
1315 make_eh_edge (edge_cache
, eh_nest_info
, src
, insn
, region
)
1316 sbitmap
*edge_cache
;
1317 eh_nesting_info
*eh_nest_info
;
1322 handler_info
**handler_list
;
1325 is_call
= (insn
&& GET_CODE (insn
) == CALL_INSN
? EDGE_ABNORMAL_CALL
: 0);
1326 num
= reachable_handlers (region
, eh_nest_info
, insn
, &handler_list
);
1329 make_label_edge (edge_cache
, src
, handler_list
[num
]->handler_label
,
1330 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
1334 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1335 dangerous if we intend to move basic blocks around. Move such notes
1336 into the following block. */
1339 move_stray_eh_region_notes ()
1344 if (n_basic_blocks
< 2)
1347 b2
= BASIC_BLOCK (n_basic_blocks
- 1);
1348 for (i
= n_basic_blocks
- 2; i
>= 0; --i
, b2
= b1
)
1350 rtx insn
, next
, list
= NULL_RTX
;
1352 b1
= BASIC_BLOCK (i
);
1353 for (insn
= NEXT_INSN (b1
->end
); insn
!= b2
->head
; insn
= next
)
1355 next
= NEXT_INSN (insn
);
1356 if (GET_CODE (insn
) == NOTE
1357 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
1358 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
))
1360 /* Unlink from the insn chain. */
1361 NEXT_INSN (PREV_INSN (insn
)) = next
;
1362 PREV_INSN (next
) = PREV_INSN (insn
);
1365 NEXT_INSN (insn
) = list
;
1370 if (list
== NULL_RTX
)
1373 /* Find where to insert these things. */
1375 if (GET_CODE (insn
) == CODE_LABEL
)
1376 insn
= NEXT_INSN (insn
);
1380 next
= NEXT_INSN (list
);
1381 add_insn_after (list
, insn
);
1387 /* Recompute eh_beg/eh_end for each basic block. */
1390 record_active_eh_regions (f
)
1393 rtx insn
, eh_list
= NULL_RTX
;
1395 basic_block bb
= BASIC_BLOCK (0);
1397 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
1399 if (bb
->head
== insn
)
1400 bb
->eh_beg
= (eh_list
? NOTE_EH_HANDLER (XEXP (eh_list
, 0)) : -1);
1402 if (GET_CODE (insn
) == NOTE
)
1404 int kind
= NOTE_LINE_NUMBER (insn
);
1405 if (kind
== NOTE_INSN_EH_REGION_BEG
)
1406 eh_list
= alloc_INSN_LIST (insn
, eh_list
);
1407 else if (kind
== NOTE_INSN_EH_REGION_END
)
1409 rtx t
= XEXP (eh_list
, 1);
1410 free_INSN_LIST_node (eh_list
);
1415 if (bb
->end
== insn
)
1417 bb
->eh_end
= (eh_list
? NOTE_EH_HANDLER (XEXP (eh_list
, 0)) : -1);
1419 if (i
== n_basic_blocks
)
1421 bb
= BASIC_BLOCK (i
);
1426 /* Identify critical edges and set the bits appropriately. */
1429 mark_critical_edges ()
1431 int i
, n
= n_basic_blocks
;
1434 /* We begin with the entry block. This is not terribly important now,
1435 but could be if a front end (Fortran) implemented alternate entry
1437 bb
= ENTRY_BLOCK_PTR
;
1444 /* (1) Critical edges must have a source with multiple successors. */
1445 if (bb
->succ
&& bb
->succ
->succ_next
)
1447 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1449 /* (2) Critical edges must have a destination with multiple
1450 predecessors. Note that we know there is at least one
1451 predecessor -- the edge we followed to get here. */
1452 if (e
->dest
->pred
->pred_next
)
1453 e
->flags
|= EDGE_CRITICAL
;
1455 e
->flags
&= ~EDGE_CRITICAL
;
1460 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1461 e
->flags
&= ~EDGE_CRITICAL
;
1466 bb
= BASIC_BLOCK (i
);
1470 /* Split a block BB after insn INSN creating a new fallthru edge.
1471 Return the new edge. Note that to keep other parts of the compiler happy,
1472 this function renumbers all the basic blocks so that the new
1473 one has a number one greater than the block split. */
1476 split_block (bb
, insn
)
1486 /* There is no point splitting the block after its end. */
1487 if (bb
->end
== insn
)
1490 /* Create the new structures. */
1491 new_bb
= (basic_block
) obstack_alloc (&flow_obstack
, sizeof (*new_bb
));
1492 new_edge
= (edge
) xcalloc (1, sizeof (*new_edge
));
1495 memset (new_bb
, 0, sizeof (*new_bb
));
1497 new_bb
->head
= NEXT_INSN (insn
);
1498 new_bb
->end
= bb
->end
;
1501 new_bb
->succ
= bb
->succ
;
1502 bb
->succ
= new_edge
;
1503 new_bb
->pred
= new_edge
;
1504 new_bb
->count
= bb
->count
;
1505 new_bb
->loop_depth
= bb
->loop_depth
;
1508 new_edge
->dest
= new_bb
;
1509 new_edge
->flags
= EDGE_FALLTHRU
;
1510 new_edge
->probability
= REG_BR_PROB_BASE
;
1511 new_edge
->count
= bb
->count
;
1513 /* Redirect the src of the successor edges of bb to point to new_bb. */
1514 for (e
= new_bb
->succ
; e
; e
= e
->succ_next
)
1517 /* Place the new block just after the block being split. */
1518 VARRAY_GROW (basic_block_info
, ++n_basic_blocks
);
1520 /* Some parts of the compiler expect blocks to be number in
1521 sequential order so insert the new block immediately after the
1522 block being split.. */
1524 for (i
= n_basic_blocks
- 1; i
> j
+ 1; --i
)
1526 basic_block tmp
= BASIC_BLOCK (i
- 1);
1527 BASIC_BLOCK (i
) = tmp
;
1531 BASIC_BLOCK (i
) = new_bb
;
1534 /* Create the basic block note. */
1535 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
,
1537 NOTE_BASIC_BLOCK (bb_note
) = new_bb
;
1538 new_bb
->head
= bb_note
;
1540 update_bb_for_insn (new_bb
);
1542 if (bb
->global_live_at_start
)
1544 new_bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1545 new_bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1546 COPY_REG_SET (new_bb
->global_live_at_end
, bb
->global_live_at_end
);
1548 /* We now have to calculate which registers are live at the end
1549 of the split basic block and at the start of the new basic
1550 block. Start with those registers that are known to be live
1551 at the end of the original basic block and get
1552 propagate_block to determine which registers are live. */
1553 COPY_REG_SET (new_bb
->global_live_at_start
, bb
->global_live_at_end
);
1554 propagate_block (new_bb
, new_bb
->global_live_at_start
, NULL
, NULL
, 0);
1555 COPY_REG_SET (bb
->global_live_at_end
,
1556 new_bb
->global_live_at_start
);
1563 /* Split a (typically critical) edge. Return the new block.
1564 Abort on abnormal edges.
1566 ??? The code generally expects to be called on critical edges.
1567 The case of a block ending in an unconditional jump to a
1568 block with multiple predecessors is not handled optimally. */
1571 split_edge (edge_in
)
1574 basic_block old_pred
, bb
, old_succ
;
1579 /* Abnormal edges cannot be split. */
1580 if ((edge_in
->flags
& EDGE_ABNORMAL
) != 0)
1583 old_pred
= edge_in
->src
;
1584 old_succ
= edge_in
->dest
;
1586 /* Remove the existing edge from the destination's pred list. */
1589 for (pp
= &old_succ
->pred
; *pp
!= edge_in
; pp
= &(*pp
)->pred_next
)
1591 *pp
= edge_in
->pred_next
;
1592 edge_in
->pred_next
= NULL
;
1595 /* Create the new structures. */
1596 bb
= (basic_block
) obstack_alloc (&flow_obstack
, sizeof (*bb
));
1597 edge_out
= (edge
) xcalloc (1, sizeof (*edge_out
));
1600 memset (bb
, 0, sizeof (*bb
));
1602 /* ??? This info is likely going to be out of date very soon. */
1603 if (old_succ
->global_live_at_start
)
1605 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1606 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1607 COPY_REG_SET (bb
->global_live_at_start
, old_succ
->global_live_at_start
);
1608 COPY_REG_SET (bb
->global_live_at_end
, old_succ
->global_live_at_start
);
1613 bb
->succ
= edge_out
;
1614 bb
->count
= edge_in
->count
;
1617 edge_in
->flags
&= ~EDGE_CRITICAL
;
1619 edge_out
->pred_next
= old_succ
->pred
;
1620 edge_out
->succ_next
= NULL
;
1622 edge_out
->dest
= old_succ
;
1623 edge_out
->flags
= EDGE_FALLTHRU
;
1624 edge_out
->probability
= REG_BR_PROB_BASE
;
1625 edge_out
->count
= edge_in
->count
;
1627 old_succ
->pred
= edge_out
;
1629 /* Tricky case -- if there existed a fallthru into the successor
1630 (and we're not it) we must add a new unconditional jump around
1631 the new block we're actually interested in.
1633 Further, if that edge is critical, this means a second new basic
1634 block must be created to hold it. In order to simplify correct
1635 insn placement, do this before we touch the existing basic block
1636 ordering for the block we were really wanting. */
1637 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1640 for (e
= edge_out
->pred_next
; e
; e
= e
->pred_next
)
1641 if (e
->flags
& EDGE_FALLTHRU
)
1646 basic_block jump_block
;
1649 if ((e
->flags
& EDGE_CRITICAL
) == 0
1650 && e
->src
!= ENTRY_BLOCK_PTR
)
1652 /* Non critical -- we can simply add a jump to the end
1653 of the existing predecessor. */
1654 jump_block
= e
->src
;
1658 /* We need a new block to hold the jump. The simplest
1659 way to do the bulk of the work here is to recursively
1661 jump_block
= split_edge (e
);
1662 e
= jump_block
->succ
;
1665 /* Now add the jump insn ... */
1666 pos
= emit_jump_insn_after (gen_jump (old_succ
->head
),
1668 jump_block
->end
= pos
;
1669 if (basic_block_for_insn
)
1670 set_block_for_insn (pos
, jump_block
);
1671 emit_barrier_after (pos
);
1673 /* ... let jump know that label is in use, ... */
1674 JUMP_LABEL (pos
) = old_succ
->head
;
1675 ++LABEL_NUSES (old_succ
->head
);
1677 /* ... and clear fallthru on the outgoing edge. */
1678 e
->flags
&= ~EDGE_FALLTHRU
;
1680 /* Continue splitting the interesting edge. */
1684 /* Place the new block just in front of the successor. */
1685 VARRAY_GROW (basic_block_info
, ++n_basic_blocks
);
1686 if (old_succ
== EXIT_BLOCK_PTR
)
1687 j
= n_basic_blocks
- 1;
1689 j
= old_succ
->index
;
1690 for (i
= n_basic_blocks
- 1; i
> j
; --i
)
1692 basic_block tmp
= BASIC_BLOCK (i
- 1);
1693 BASIC_BLOCK (i
) = tmp
;
1696 BASIC_BLOCK (i
) = bb
;
1699 /* Create the basic block note.
1701 Where we place the note can have a noticable impact on the generated
1702 code. Consider this cfg:
1712 If we need to insert an insn on the edge from block 0 to block 1,
1713 we want to ensure the instructions we insert are outside of any
1714 loop notes that physically sit between block 0 and block 1. Otherwise
1715 we confuse the loop optimizer into thinking the loop is a phony. */
1716 if (old_succ
!= EXIT_BLOCK_PTR
1717 && PREV_INSN (old_succ
->head
)
1718 && GET_CODE (PREV_INSN (old_succ
->head
)) == NOTE
1719 && NOTE_LINE_NUMBER (PREV_INSN (old_succ
->head
)) == NOTE_INSN_LOOP_BEG
)
1720 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
,
1721 PREV_INSN (old_succ
->head
));
1722 else if (old_succ
!= EXIT_BLOCK_PTR
)
1723 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, old_succ
->head
);
1725 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, get_last_insn ());
1726 NOTE_BASIC_BLOCK (bb_note
) = bb
;
1727 bb
->head
= bb
->end
= bb_note
;
1729 /* Not quite simple -- for non-fallthru edges, we must adjust the
1730 predecessor's jump instruction to target our new block. */
1731 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1733 rtx tmp
, insn
= old_pred
->end
;
1734 rtx old_label
= old_succ
->head
;
1735 rtx new_label
= gen_label_rtx ();
1737 if (GET_CODE (insn
) != JUMP_INSN
)
1740 /* ??? Recognize a tablejump and adjust all matching cases. */
1741 if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
1742 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
1743 && GET_CODE (tmp
) == JUMP_INSN
1744 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
1745 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
1750 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
1751 vec
= XVEC (PATTERN (tmp
), 0);
1753 vec
= XVEC (PATTERN (tmp
), 1);
1755 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1756 if (XEXP (RTVEC_ELT (vec
, j
), 0) == old_label
)
1758 RTVEC_ELT (vec
, j
) = gen_rtx_LABEL_REF (VOIDmode
, new_label
);
1759 --LABEL_NUSES (old_label
);
1760 ++LABEL_NUSES (new_label
);
1763 /* Handle casesi dispatch insns */
1764 if ((tmp
= single_set (insn
)) != NULL
1765 && SET_DEST (tmp
) == pc_rtx
1766 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
1767 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
1768 && XEXP (XEXP (SET_SRC (tmp
), 2), 0) == old_label
)
1770 XEXP (SET_SRC (tmp
), 2) = gen_rtx_LABEL_REF (VOIDmode
,
1772 --LABEL_NUSES (old_label
);
1773 ++LABEL_NUSES (new_label
);
1778 /* This would have indicated an abnormal edge. */
1779 if (computed_jump_p (insn
))
1782 /* A return instruction can't be redirected. */
1783 if (returnjump_p (insn
))
1786 /* If the insn doesn't go where we think, we're confused. */
1787 if (JUMP_LABEL (insn
) != old_label
)
1790 redirect_jump (insn
, new_label
, 0);
1793 emit_label_before (new_label
, bb_note
);
1794 bb
->head
= new_label
;
1800 /* Queue instructions for insertion on an edge between two basic blocks.
1801 The new instructions and basic blocks (if any) will not appear in the
1802 CFG until commit_edge_insertions is called. */
1805 insert_insn_on_edge (pattern
, e
)
1809 /* We cannot insert instructions on an abnormal critical edge.
1810 It will be easier to find the culprit if we die now. */
1811 if ((e
->flags
& (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1812 == (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1815 if (e
->insns
== NULL_RTX
)
1818 push_to_sequence (e
->insns
);
1820 emit_insn (pattern
);
1822 e
->insns
= get_insns ();
1826 /* Update the CFG for the instructions queued on edge E. */
1829 commit_one_edge_insertion (e
)
1832 rtx before
= NULL_RTX
, after
= NULL_RTX
, insns
, tmp
, last
;
1835 /* Pull the insns off the edge now since the edge might go away. */
1837 e
->insns
= NULL_RTX
;
1839 /* Figure out where to put these things. If the destination has
1840 one predecessor, insert there. Except for the exit block. */
1841 if (e
->dest
->pred
->pred_next
== NULL
1842 && e
->dest
!= EXIT_BLOCK_PTR
)
1846 /* Get the location correct wrt a code label, and "nice" wrt
1847 a basic block note, and before everything else. */
1849 if (GET_CODE (tmp
) == CODE_LABEL
)
1850 tmp
= NEXT_INSN (tmp
);
1851 if (NOTE_INSN_BASIC_BLOCK_P (tmp
))
1852 tmp
= NEXT_INSN (tmp
);
1853 if (tmp
== bb
->head
)
1856 after
= PREV_INSN (tmp
);
1859 /* If the source has one successor and the edge is not abnormal,
1860 insert there. Except for the entry block. */
1861 else if ((e
->flags
& EDGE_ABNORMAL
) == 0
1862 && e
->src
->succ
->succ_next
== NULL
1863 && e
->src
!= ENTRY_BLOCK_PTR
)
1866 /* It is possible to have a non-simple jump here. Consider a target
1867 where some forms of unconditional jumps clobber a register. This
1868 happens on the fr30 for example.
1870 We know this block has a single successor, so we can just emit
1871 the queued insns before the jump. */
1872 if (GET_CODE (bb
->end
) == JUMP_INSN
)
1878 /* We'd better be fallthru, or we've lost track of what's what. */
1879 if ((e
->flags
& EDGE_FALLTHRU
) == 0)
1886 /* Otherwise we must split the edge. */
1889 bb
= split_edge (e
);
1893 /* Now that we've found the spot, do the insertion. */
1895 /* Set the new block number for these insns, if structure is allocated. */
1896 if (basic_block_for_insn
)
1899 for (i
= insns
; i
!= NULL_RTX
; i
= NEXT_INSN (i
))
1900 set_block_for_insn (i
, bb
);
1905 emit_insns_before (insns
, before
);
1906 if (before
== bb
->head
)
1909 last
= prev_nonnote_insn (before
);
1913 last
= emit_insns_after (insns
, after
);
1914 if (after
== bb
->end
)
1918 if (returnjump_p (last
))
1920 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1921 This is not currently a problem because this only happens
1922 for the (single) epilogue, which already has a fallthru edge
1926 if (e
->dest
!= EXIT_BLOCK_PTR
1927 || e
->succ_next
!= NULL
1928 || (e
->flags
& EDGE_FALLTHRU
) == 0)
1930 e
->flags
&= ~EDGE_FALLTHRU
;
1932 emit_barrier_after (last
);
1936 flow_delete_insn (before
);
1938 else if (GET_CODE (last
) == JUMP_INSN
)
1942 /* Update the CFG for all queued instructions. */
1945 commit_edge_insertions ()
1950 #ifdef ENABLE_CHECKING
1951 verify_flow_info ();
1955 bb
= ENTRY_BLOCK_PTR
;
1960 for (e
= bb
->succ
; e
; e
= next
)
1962 next
= e
->succ_next
;
1964 commit_one_edge_insertion (e
);
1967 if (++i
>= n_basic_blocks
)
1969 bb
= BASIC_BLOCK (i
);
1973 /* Delete all unreachable basic blocks. */
1976 delete_unreachable_blocks ()
1978 basic_block
*worklist
, *tos
;
1979 int deleted_handler
;
1984 tos
= worklist
= (basic_block
*) xmalloc (sizeof (basic_block
) * n
);
1986 /* Use basic_block->aux as a marker. Clear them all. */
1988 for (i
= 0; i
< n
; ++i
)
1989 BASIC_BLOCK (i
)->aux
= NULL
;
1991 /* Add our starting points to the worklist. Almost always there will
1992 be only one. It isn't inconcievable that we might one day directly
1993 support Fortran alternate entry points. */
1995 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
1999 /* Mark the block with a handy non-null value. */
2003 /* Iterate: find everything reachable from what we've already seen. */
2005 while (tos
!= worklist
)
2007 basic_block b
= *--tos
;
2009 for (e
= b
->succ
; e
; e
= e
->succ_next
)
2017 /* Delete all unreachable basic blocks. Count down so that we don't
2018 interfere with the block renumbering that happens in flow_delete_block. */
2020 deleted_handler
= 0;
2022 for (i
= n
- 1; i
>= 0; --i
)
2024 basic_block b
= BASIC_BLOCK (i
);
2027 /* This block was found. Tidy up the mark. */
2030 deleted_handler
|= flow_delete_block (b
);
2033 tidy_fallthru_edges ();
2035 /* If we deleted an exception handler, we may have EH region begin/end
2036 blocks to remove as well. */
2037 if (deleted_handler
)
2038 delete_eh_regions ();
2043 /* Find EH regions for which there is no longer a handler, and delete them. */
2046 delete_eh_regions ()
2050 update_rethrow_references ();
2052 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2053 if (GET_CODE (insn
) == NOTE
)
2055 if ((NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
2056 || (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
))
2058 int num
= NOTE_EH_HANDLER (insn
);
2059 /* A NULL handler indicates a region is no longer needed,
2060 as long as its rethrow label isn't used. */
2061 if (get_first_handler (num
) == NULL
&& ! rethrow_used (num
))
2063 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2064 NOTE_SOURCE_FILE (insn
) = 0;
2070 /* Return true if NOTE is not one of the ones that must be kept paired,
2071 so that we may simply delete them. */
2074 can_delete_note_p (note
)
2077 return (NOTE_LINE_NUMBER (note
) == NOTE_INSN_DELETED
2078 || NOTE_LINE_NUMBER (note
) == NOTE_INSN_BASIC_BLOCK
);
2081 /* Unlink a chain of insns between START and FINISH, leaving notes
2082 that must be paired. */
2085 flow_delete_insn_chain (start
, finish
)
2088 /* Unchain the insns one by one. It would be quicker to delete all
2089 of these with a single unchaining, rather than one at a time, but
2090 we need to keep the NOTE's. */
2096 next
= NEXT_INSN (start
);
2097 if (GET_CODE (start
) == NOTE
&& !can_delete_note_p (start
))
2099 else if (GET_CODE (start
) == CODE_LABEL
2100 && ! can_delete_label_p (start
))
2102 const char *name
= LABEL_NAME (start
);
2103 PUT_CODE (start
, NOTE
);
2104 NOTE_LINE_NUMBER (start
) = NOTE_INSN_DELETED_LABEL
;
2105 NOTE_SOURCE_FILE (start
) = name
;
2108 next
= flow_delete_insn (start
);
2110 if (start
== finish
)
2116 /* Delete the insns in a (non-live) block. We physically delete every
2117 non-deleted-note insn, and update the flow graph appropriately.
2119 Return nonzero if we deleted an exception handler. */
2121 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2122 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2125 flow_delete_block (b
)
2128 int deleted_handler
= 0;
2131 /* If the head of this block is a CODE_LABEL, then it might be the
2132 label for an exception handler which can't be reached.
2134 We need to remove the label from the exception_handler_label list
2135 and remove the associated NOTE_INSN_EH_REGION_BEG and
2136 NOTE_INSN_EH_REGION_END notes. */
2140 never_reached_warning (insn
);
2142 if (GET_CODE (insn
) == CODE_LABEL
)
2144 rtx x
, *prev
= &exception_handler_labels
;
2146 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
2148 if (XEXP (x
, 0) == insn
)
2150 /* Found a match, splice this label out of the EH label list. */
2151 *prev
= XEXP (x
, 1);
2152 XEXP (x
, 1) = NULL_RTX
;
2153 XEXP (x
, 0) = NULL_RTX
;
2155 /* Remove the handler from all regions */
2156 remove_handler (insn
);
2157 deleted_handler
= 1;
2160 prev
= &XEXP (x
, 1);
2164 /* Include any jump table following the basic block. */
2166 if (GET_CODE (end
) == JUMP_INSN
2167 && (tmp
= JUMP_LABEL (end
)) != NULL_RTX
2168 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
2169 && GET_CODE (tmp
) == JUMP_INSN
2170 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
2171 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
2174 /* Include any barrier that may follow the basic block. */
2175 tmp
= next_nonnote_insn (end
);
2176 if (tmp
&& GET_CODE (tmp
) == BARRIER
)
2179 /* Selectively delete the entire chain. */
2180 flow_delete_insn_chain (insn
, end
);
2182 /* Remove the edges into and out of this block. Note that there may
2183 indeed be edges in, if we are removing an unreachable loop. */
2187 for (e
= b
->pred
; e
; e
= next
)
2189 for (q
= &e
->src
->succ
; *q
!= e
; q
= &(*q
)->succ_next
)
2192 next
= e
->pred_next
;
2196 for (e
= b
->succ
; e
; e
= next
)
2198 for (q
= &e
->dest
->pred
; *q
!= e
; q
= &(*q
)->pred_next
)
2201 next
= e
->succ_next
;
2210 /* Remove the basic block from the array, and compact behind it. */
2213 return deleted_handler
;
2216 /* Remove block B from the basic block array and compact behind it. */
2222 int i
, n
= n_basic_blocks
;
2224 for (i
= b
->index
; i
+ 1 < n
; ++i
)
2226 basic_block x
= BASIC_BLOCK (i
+ 1);
2227 BASIC_BLOCK (i
) = x
;
2231 basic_block_info
->num_elements
--;
2235 /* Delete INSN by patching it out. Return the next insn. */
2238 flow_delete_insn (insn
)
2241 rtx prev
= PREV_INSN (insn
);
2242 rtx next
= NEXT_INSN (insn
);
2245 PREV_INSN (insn
) = NULL_RTX
;
2246 NEXT_INSN (insn
) = NULL_RTX
;
2247 INSN_DELETED_P (insn
) = 1;
2250 NEXT_INSN (prev
) = next
;
2252 PREV_INSN (next
) = prev
;
2254 set_last_insn (prev
);
2256 if (GET_CODE (insn
) == CODE_LABEL
)
2257 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
2259 /* If deleting a jump, decrement the use count of the label. Deleting
2260 the label itself should happen in the normal course of block merging. */
2261 if (GET_CODE (insn
) == JUMP_INSN
2262 && JUMP_LABEL (insn
)
2263 && GET_CODE (JUMP_LABEL (insn
)) == CODE_LABEL
)
2264 LABEL_NUSES (JUMP_LABEL (insn
))--;
2266 /* Also if deleting an insn that references a label. */
2267 else if ((note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
)) != NULL_RTX
2268 && GET_CODE (XEXP (note
, 0)) == CODE_LABEL
)
2269 LABEL_NUSES (XEXP (note
, 0))--;
2274 /* True if a given label can be deleted. */
2277 can_delete_label_p (label
)
2282 if (LABEL_PRESERVE_P (label
))
2285 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
2286 if (label
== XEXP (x
, 0))
2288 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
2289 if (label
== XEXP (x
, 0))
2291 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
2292 if (label
== XEXP (x
, 0))
2295 /* User declared labels must be preserved. */
2296 if (LABEL_NAME (label
) != 0)
2303 tail_recursion_label_p (label
)
2308 for (x
= tail_recursion_label_list
; x
; x
= XEXP (x
, 1))
2309 if (label
== XEXP (x
, 0))
2315 /* Blocks A and B are to be merged into a single block A. The insns
2316 are already contiguous, hence `nomove'. */
2319 merge_blocks_nomove (a
, b
)
2323 rtx b_head
, b_end
, a_end
;
2324 rtx del_first
= NULL_RTX
, del_last
= NULL_RTX
;
2327 /* If there was a CODE_LABEL beginning B, delete it. */
2330 if (GET_CODE (b_head
) == CODE_LABEL
)
2332 /* Detect basic blocks with nothing but a label. This can happen
2333 in particular at the end of a function. */
2334 if (b_head
== b_end
)
2336 del_first
= del_last
= b_head
;
2337 b_head
= NEXT_INSN (b_head
);
2340 /* Delete the basic block note. */
2341 if (NOTE_INSN_BASIC_BLOCK_P (b_head
))
2343 if (b_head
== b_end
)
2348 b_head
= NEXT_INSN (b_head
);
2351 /* If there was a jump out of A, delete it. */
2353 if (GET_CODE (a_end
) == JUMP_INSN
)
2357 for (prev
= PREV_INSN (a_end
); ; prev
= PREV_INSN (prev
))
2358 if (GET_CODE (prev
) != NOTE
2359 || NOTE_LINE_NUMBER (prev
) == NOTE_INSN_BASIC_BLOCK
2366 /* If this was a conditional jump, we need to also delete
2367 the insn that set cc0. */
2368 if (prev
&& sets_cc0_p (prev
))
2371 prev
= prev_nonnote_insn (prev
);
2380 else if (GET_CODE (NEXT_INSN (a_end
)) == BARRIER
)
2381 del_first
= NEXT_INSN (a_end
);
2383 /* Delete everything marked above as well as crap that might be
2384 hanging out between the two blocks. */
2385 flow_delete_insn_chain (del_first
, del_last
);
2387 /* Normally there should only be one successor of A and that is B, but
2388 partway though the merge of blocks for conditional_execution we'll
2389 be merging a TEST block with THEN and ELSE successors. Free the
2390 whole lot of them and hope the caller knows what they're doing. */
2392 remove_edge (a
->succ
);
2394 /* Adjust the edges out of B for the new owner. */
2395 for (e
= b
->succ
; e
; e
= e
->succ_next
)
2399 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2400 b
->pred
= b
->succ
= NULL
;
2402 /* Reassociate the insns of B with A. */
2405 if (basic_block_for_insn
)
2407 BLOCK_FOR_INSN (b_head
) = a
;
2408 while (b_head
!= b_end
)
2410 b_head
= NEXT_INSN (b_head
);
2411 BLOCK_FOR_INSN (b_head
) = a
;
2421 /* Blocks A and B are to be merged into a single block. A has no incoming
2422 fallthru edge, so it can be moved before B without adding or modifying
2423 any jumps (aside from the jump from A to B). */
2426 merge_blocks_move_predecessor_nojumps (a
, b
)
2429 rtx start
, end
, barrier
;
2435 barrier
= next_nonnote_insn (end
);
2436 if (GET_CODE (barrier
) != BARRIER
)
2438 flow_delete_insn (barrier
);
2440 /* Move block and loop notes out of the chain so that we do not
2441 disturb their order.
2443 ??? A better solution would be to squeeze out all the non-nested notes
2444 and adjust the block trees appropriately. Even better would be to have
2445 a tighter connection between block trees and rtl so that this is not
2447 start
= squeeze_notes (start
, end
);
2449 /* Scramble the insn chain. */
2450 if (end
!= PREV_INSN (b
->head
))
2451 reorder_insns (start
, end
, PREV_INSN (b
->head
));
2455 fprintf (rtl_dump_file
, "Moved block %d before %d and merged.\n",
2456 a
->index
, b
->index
);
2459 /* Swap the records for the two blocks around. Although we are deleting B,
2460 A is now where B was and we want to compact the BB array from where
2462 BASIC_BLOCK (a
->index
) = b
;
2463 BASIC_BLOCK (b
->index
) = a
;
2465 a
->index
= b
->index
;
2468 /* Now blocks A and B are contiguous. Merge them. */
2469 merge_blocks_nomove (a
, b
);
2474 /* Blocks A and B are to be merged into a single block. B has no outgoing
2475 fallthru edge, so it can be moved after A without adding or modifying
2476 any jumps (aside from the jump from A to B). */
2479 merge_blocks_move_successor_nojumps (a
, b
)
2482 rtx start
, end
, barrier
;
2486 barrier
= NEXT_INSN (end
);
2488 /* Recognize a jump table following block B. */
2489 if (GET_CODE (barrier
) == CODE_LABEL
2490 && NEXT_INSN (barrier
)
2491 && GET_CODE (NEXT_INSN (barrier
)) == JUMP_INSN
2492 && (GET_CODE (PATTERN (NEXT_INSN (barrier
))) == ADDR_VEC
2493 || GET_CODE (PATTERN (NEXT_INSN (barrier
))) == ADDR_DIFF_VEC
))
2495 end
= NEXT_INSN (barrier
);
2496 barrier
= NEXT_INSN (end
);
2499 /* There had better have been a barrier there. Delete it. */
2500 if (GET_CODE (barrier
) != BARRIER
)
2502 flow_delete_insn (barrier
);
2504 /* Move block and loop notes out of the chain so that we do not
2505 disturb their order.
2507 ??? A better solution would be to squeeze out all the non-nested notes
2508 and adjust the block trees appropriately. Even better would be to have
2509 a tighter connection between block trees and rtl so that this is not
2511 start
= squeeze_notes (start
, end
);
2513 /* Scramble the insn chain. */
2514 reorder_insns (start
, end
, a
->end
);
2516 /* Now blocks A and B are contiguous. Merge them. */
2517 merge_blocks_nomove (a
, b
);
2521 fprintf (rtl_dump_file
, "Moved block %d after %d and merged.\n",
2522 b
->index
, a
->index
);
2528 /* Attempt to merge basic blocks that are potentially non-adjacent.
2529 Return true iff the attempt succeeded. */
2532 merge_blocks (e
, b
, c
)
2536 /* If C has a tail recursion label, do not merge. There is no
2537 edge recorded from the call_placeholder back to this label, as
2538 that would make optimize_sibling_and_tail_recursive_calls more
2539 complex for no gain. */
2540 if (GET_CODE (c
->head
) == CODE_LABEL
2541 && tail_recursion_label_p (c
->head
))
2544 /* If B has a fallthru edge to C, no need to move anything. */
2545 if (e
->flags
& EDGE_FALLTHRU
)
2547 merge_blocks_nomove (b
, c
);
2551 fprintf (rtl_dump_file
, "Merged %d and %d without moving.\n",
2552 b
->index
, c
->index
);
2561 int c_has_outgoing_fallthru
;
2562 int b_has_incoming_fallthru
;
2564 /* We must make sure to not munge nesting of exception regions,
2565 lexical blocks, and loop notes.
2567 The first is taken care of by requiring that the active eh
2568 region at the end of one block always matches the active eh
2569 region at the beginning of the next block.
2571 The later two are taken care of by squeezing out all the notes. */
2573 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2574 executed and we may want to treat blocks which have two out
2575 edges, one normal, one abnormal as only having one edge for
2576 block merging purposes. */
2578 for (tmp_edge
= c
->succ
; tmp_edge
; tmp_edge
= tmp_edge
->succ_next
)
2579 if (tmp_edge
->flags
& EDGE_FALLTHRU
)
2581 c_has_outgoing_fallthru
= (tmp_edge
!= NULL
);
2583 for (tmp_edge
= b
->pred
; tmp_edge
; tmp_edge
= tmp_edge
->pred_next
)
2584 if (tmp_edge
->flags
& EDGE_FALLTHRU
)
2586 b_has_incoming_fallthru
= (tmp_edge
!= NULL
);
2588 /* If B does not have an incoming fallthru, and the exception regions
2589 match, then it can be moved immediately before C without introducing
2592 C can not be the first block, so we do not have to worry about
2593 accessing a non-existent block. */
2594 d
= BASIC_BLOCK (c
->index
- 1);
2595 if (! b_has_incoming_fallthru
2596 && d
->eh_end
== b
->eh_beg
2597 && b
->eh_end
== c
->eh_beg
)
2598 return merge_blocks_move_predecessor_nojumps (b
, c
);
2600 /* Otherwise, we're going to try to move C after B. Make sure the
2601 exception regions match.
2603 If B is the last basic block, then we must not try to access the
2604 block structure for block B + 1. Luckily in that case we do not
2605 need to worry about matching exception regions. */
2606 d
= (b
->index
+ 1 < n_basic_blocks
? BASIC_BLOCK (b
->index
+ 1) : NULL
);
2607 if (b
->eh_end
== c
->eh_beg
2608 && (d
== NULL
|| c
->eh_end
== d
->eh_beg
))
2610 /* If C does not have an outgoing fallthru, then it can be moved
2611 immediately after B without introducing or modifying jumps. */
2612 if (! c_has_outgoing_fallthru
)
2613 return merge_blocks_move_successor_nojumps (b
, c
);
2615 /* Otherwise, we'll need to insert an extra jump, and possibly
2616 a new block to contain it. */
2617 /* ??? Not implemented yet. */
2624 /* Top level driver for merge_blocks. */
2631 /* Attempt to merge blocks as made possible by edge removal. If a block
2632 has only one successor, and the successor has only one predecessor,
2633 they may be combined. */
2635 for (i
= 0; i
< n_basic_blocks
;)
2637 basic_block c
, b
= BASIC_BLOCK (i
);
2640 /* A loop because chains of blocks might be combineable. */
2641 while ((s
= b
->succ
) != NULL
2642 && s
->succ_next
== NULL
2643 && (s
->flags
& EDGE_EH
) == 0
2644 && (c
= s
->dest
) != EXIT_BLOCK_PTR
2645 && c
->pred
->pred_next
== NULL
2646 /* If the jump insn has side effects, we can't kill the edge. */
2647 && (GET_CODE (b
->end
) != JUMP_INSN
2648 || onlyjump_p (b
->end
))
2649 && merge_blocks (s
, b
, c
))
2652 /* Don't get confused by the index shift caused by deleting blocks. */
2657 /* The given edge should potentially be a fallthru edge. If that is in
2658 fact true, delete the jump and barriers that are in the way. */
2661 tidy_fallthru_edge (e
, b
, c
)
2667 /* ??? In a late-running flow pass, other folks may have deleted basic
2668 blocks by nopping out blocks, leaving multiple BARRIERs between here
2669 and the target label. They ought to be chastized and fixed.
2671 We can also wind up with a sequence of undeletable labels between
2672 one block and the next.
2674 So search through a sequence of barriers, labels, and notes for
2675 the head of block C and assert that we really do fall through. */
2677 if (next_real_insn (b
->end
) != next_real_insn (PREV_INSN (c
->head
)))
2680 /* Remove what will soon cease being the jump insn from the source block.
2681 If block B consisted only of this single jump, turn it into a deleted
2684 if (GET_CODE (q
) == JUMP_INSN
2686 && (any_uncondjump_p (q
)
2687 || (b
->succ
== e
&& e
->succ_next
== NULL
)))
2690 /* If this was a conditional jump, we need to also delete
2691 the insn that set cc0. */
2692 if (any_condjump_p (q
) && sets_cc0_p (PREV_INSN (q
)))
2699 NOTE_LINE_NUMBER (q
) = NOTE_INSN_DELETED
;
2700 NOTE_SOURCE_FILE (q
) = 0;
2708 /* Selectively unlink the sequence. */
2709 if (q
!= PREV_INSN (c
->head
))
2710 flow_delete_insn_chain (NEXT_INSN (q
), PREV_INSN (c
->head
));
2712 e
->flags
|= EDGE_FALLTHRU
;
2715 /* Fix up edges that now fall through, or rather should now fall through
2716 but previously required a jump around now deleted blocks. Simplify
2717 the search by only examining blocks numerically adjacent, since this
2718 is how find_basic_blocks created them. */
2721 tidy_fallthru_edges ()
2725 for (i
= 1; i
< n_basic_blocks
; ++i
)
2727 basic_block b
= BASIC_BLOCK (i
- 1);
2728 basic_block c
= BASIC_BLOCK (i
);
2731 /* We care about simple conditional or unconditional jumps with
2734 If we had a conditional branch to the next instruction when
2735 find_basic_blocks was called, then there will only be one
2736 out edge for the block which ended with the conditional
2737 branch (since we do not create duplicate edges).
2739 Furthermore, the edge will be marked as a fallthru because we
2740 merge the flags for the duplicate edges. So we do not want to
2741 check that the edge is not a FALLTHRU edge. */
2742 if ((s
= b
->succ
) != NULL
2743 && s
->succ_next
== NULL
2745 /* If the jump insn has side effects, we can't tidy the edge. */
2746 && (GET_CODE (b
->end
) != JUMP_INSN
2747 || onlyjump_p (b
->end
)))
2748 tidy_fallthru_edge (s
, b
, c
);
2752 /* Perform data flow analysis.
2753 F is the first insn of the function; FLAGS is a set of PROP_* flags
2754 to be used in accumulating flow info. */
2757 life_analysis (f
, file
, flags
)
2762 #ifdef ELIMINABLE_REGS
2764 static struct {int from
, to
; } eliminables
[] = ELIMINABLE_REGS
;
2767 /* Record which registers will be eliminated. We use this in
2770 CLEAR_HARD_REG_SET (elim_reg_set
);
2772 #ifdef ELIMINABLE_REGS
2773 for (i
= 0; i
< (int) ARRAY_SIZE (eliminables
); i
++)
2774 SET_HARD_REG_BIT (elim_reg_set
, eliminables
[i
].from
);
2776 SET_HARD_REG_BIT (elim_reg_set
, FRAME_POINTER_REGNUM
);
2780 flags
&= ~(PROP_LOG_LINKS
| PROP_AUTOINC
);
2782 /* The post-reload life analysis have (on a global basis) the same
2783 registers live as was computed by reload itself. elimination
2784 Otherwise offsets and such may be incorrect.
2786 Reload will make some registers as live even though they do not
2789 We don't want to create new auto-incs after reload, since they
2790 are unlikely to be useful and can cause problems with shared
2792 if (reload_completed
)
2793 flags
&= ~(PROP_REG_INFO
| PROP_AUTOINC
);
2795 /* We want alias analysis information for local dead store elimination. */
2796 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
2797 init_alias_analysis ();
2799 /* Always remove no-op moves. Do this before other processing so
2800 that we don't have to keep re-scanning them. */
2801 delete_noop_moves (f
);
2803 /* Some targets can emit simpler epilogues if they know that sp was
2804 not ever modified during the function. After reload, of course,
2805 we've already emitted the epilogue so there's no sense searching. */
2806 if (! reload_completed
)
2807 notice_stack_pointer_modification (f
);
2809 /* Allocate and zero out data structures that will record the
2810 data from lifetime analysis. */
2811 allocate_reg_life_data ();
2812 allocate_bb_life_data ();
2814 /* Find the set of registers live on function exit. */
2815 mark_regs_live_at_end (EXIT_BLOCK_PTR
->global_live_at_start
);
2817 /* "Update" life info from zero. It'd be nice to begin the
2818 relaxation with just the exit and noreturn blocks, but that set
2819 is not immediately handy. */
2821 if (flags
& PROP_REG_INFO
)
2822 memset (regs_ever_live
, 0, sizeof (regs_ever_live
));
2823 update_life_info (NULL
, UPDATE_LIFE_GLOBAL
, flags
);
2826 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
2827 end_alias_analysis ();
2830 dump_flow_info (file
);
2832 free_basic_block_vars (1);
2835 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2836 Search for REGNO. If found, abort if it is not wider than word_mode. */
2839 verify_wide_reg_1 (px
, pregno
)
2844 unsigned int regno
= *(int *) pregno
;
2846 if (GET_CODE (x
) == REG
&& REGNO (x
) == regno
)
2848 if (GET_MODE_BITSIZE (GET_MODE (x
)) <= BITS_PER_WORD
)
2855 /* A subroutine of verify_local_live_at_start. Search through insns
2856 between HEAD and END looking for register REGNO. */
2859 verify_wide_reg (regno
, head
, end
)
2866 && for_each_rtx (&PATTERN (head
), verify_wide_reg_1
, ®no
))
2870 head
= NEXT_INSN (head
);
2873 /* We didn't find the register at all. Something's way screwy. */
2875 fprintf (rtl_dump_file
, "Aborting in verify_wide_reg; reg %d\n", regno
);
2876 print_rtl_and_abort ();
2879 /* A subroutine of update_life_info. Verify that there are no untoward
2880 changes in live_at_start during a local update. */
2883 verify_local_live_at_start (new_live_at_start
, bb
)
2884 regset new_live_at_start
;
2887 if (reload_completed
)
2889 /* After reload, there are no pseudos, nor subregs of multi-word
2890 registers. The regsets should exactly match. */
2891 if (! REG_SET_EQUAL_P (new_live_at_start
, bb
->global_live_at_start
))
2895 fprintf (rtl_dump_file
,
2896 "live_at_start mismatch in bb %d, aborting\n",
2898 debug_bitmap_file (rtl_dump_file
, bb
->global_live_at_start
);
2899 debug_bitmap_file (rtl_dump_file
, new_live_at_start
);
2901 print_rtl_and_abort ();
2908 /* Find the set of changed registers. */
2909 XOR_REG_SET (new_live_at_start
, bb
->global_live_at_start
);
2911 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start
, 0, i
,
2913 /* No registers should die. */
2914 if (REGNO_REG_SET_P (bb
->global_live_at_start
, i
))
2917 fprintf (rtl_dump_file
,
2918 "Register %d died unexpectedly in block %d\n", i
,
2920 print_rtl_and_abort ();
2923 /* Verify that the now-live register is wider than word_mode. */
2924 verify_wide_reg (i
, bb
->head
, bb
->end
);
2929 /* Updates life information starting with the basic blocks set in BLOCKS.
2930 If BLOCKS is null, consider it to be the universal set.
2932 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2933 we are only expecting local modifications to basic blocks. If we find
2934 extra registers live at the beginning of a block, then we either killed
2935 useful data, or we have a broken split that wants data not provided.
2936 If we find registers removed from live_at_start, that means we have
2937 a broken peephole that is killing a register it shouldn't.
2939 ??? This is not true in one situation -- when a pre-reload splitter
2940 generates subregs of a multi-word pseudo, current life analysis will
2941 lose the kill. So we _can_ have a pseudo go live. How irritating.
2943 Including PROP_REG_INFO does not properly refresh regs_ever_live
2944 unless the caller resets it to zero. */
2947 update_life_info (blocks
, extent
, prop_flags
)
2949 enum update_life_extent extent
;
2953 regset_head tmp_head
;
2956 tmp
= INITIALIZE_REG_SET (tmp_head
);
2958 /* For a global update, we go through the relaxation process again. */
2959 if (extent
!= UPDATE_LIFE_LOCAL
)
2961 calculate_global_regs_live (blocks
, blocks
,
2962 prop_flags
& PROP_SCAN_DEAD_CODE
);
2964 /* If asked, remove notes from the blocks we'll update. */
2965 if (extent
== UPDATE_LIFE_GLOBAL_RM_NOTES
)
2966 count_or_remove_death_notes (blocks
, 1);
2971 EXECUTE_IF_SET_IN_SBITMAP (blocks
, 0, i
,
2973 basic_block bb
= BASIC_BLOCK (i
);
2975 COPY_REG_SET (tmp
, bb
->global_live_at_end
);
2976 propagate_block (bb
, tmp
, NULL
, NULL
, prop_flags
);
2978 if (extent
== UPDATE_LIFE_LOCAL
)
2979 verify_local_live_at_start (tmp
, bb
);
2984 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
2986 basic_block bb
= BASIC_BLOCK (i
);
2988 COPY_REG_SET (tmp
, bb
->global_live_at_end
);
2989 propagate_block (bb
, tmp
, NULL
, NULL
, prop_flags
);
2991 if (extent
== UPDATE_LIFE_LOCAL
)
2992 verify_local_live_at_start (tmp
, bb
);
2998 if (prop_flags
& PROP_REG_INFO
)
3000 /* The only pseudos that are live at the beginning of the function
3001 are those that were not set anywhere in the function. local-alloc
3002 doesn't know how to handle these correctly, so mark them as not
3003 local to any one basic block. */
3004 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR
->global_live_at_end
,
3005 FIRST_PSEUDO_REGISTER
, i
,
3006 { REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
; });
3008 /* We have a problem with any pseudoreg that lives across the setjmp.
3009 ANSI says that if a user variable does not change in value between
3010 the setjmp and the longjmp, then the longjmp preserves it. This
3011 includes longjmp from a place where the pseudo appears dead.
3012 (In principle, the value still exists if it is in scope.)
3013 If the pseudo goes in a hard reg, some other value may occupy
3014 that hard reg where this pseudo is dead, thus clobbering the pseudo.
3015 Conclusion: such a pseudo must not go in a hard reg. */
3016 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp
,
3017 FIRST_PSEUDO_REGISTER
, i
,
3019 if (regno_reg_rtx
[i
] != 0)
3021 REG_LIVE_LENGTH (i
) = -1;
3022 REG_BASIC_BLOCK (i
) = REG_BLOCK_UNKNOWN
;
3028 /* Free the variables allocated by find_basic_blocks.
3030 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
3033 free_basic_block_vars (keep_head_end_p
)
3034 int keep_head_end_p
;
3036 if (basic_block_for_insn
)
3038 VARRAY_FREE (basic_block_for_insn
);
3039 basic_block_for_insn
= NULL
;
3042 if (! keep_head_end_p
)
3045 VARRAY_FREE (basic_block_info
);
3048 ENTRY_BLOCK_PTR
->aux
= NULL
;
3049 ENTRY_BLOCK_PTR
->global_live_at_end
= NULL
;
3050 EXIT_BLOCK_PTR
->aux
= NULL
;
3051 EXIT_BLOCK_PTR
->global_live_at_start
= NULL
;
3055 /* Return nonzero if the destination of SET equals the source. */
3061 rtx src
= SET_SRC (set
);
3062 rtx dst
= SET_DEST (set
);
3064 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
3066 if (SUBREG_WORD (src
) != SUBREG_WORD (dst
))
3068 src
= SUBREG_REG (src
);
3069 dst
= SUBREG_REG (dst
);
3072 return (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
3073 && REGNO (src
) == REGNO (dst
));
3076 /* Return nonzero if an insn consists only of SETs, each of which only sets a
3083 rtx pat
= PATTERN (insn
);
3085 /* Insns carrying these notes are useful later on. */
3086 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
3089 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
3092 if (GET_CODE (pat
) == PARALLEL
)
3095 /* If nothing but SETs of registers to themselves,
3096 this insn can also be deleted. */
3097 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3099 rtx tem
= XVECEXP (pat
, 0, i
);
3101 if (GET_CODE (tem
) == USE
3102 || GET_CODE (tem
) == CLOBBER
)
3105 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
3114 /* Delete any insns that copy a register to itself. */
3117 delete_noop_moves (f
)
3121 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
3123 if (GET_CODE (insn
) == INSN
&& noop_move_p (insn
))
3125 PUT_CODE (insn
, NOTE
);
3126 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
3127 NOTE_SOURCE_FILE (insn
) = 0;
3132 /* Determine if the stack pointer is constant over the life of the function.
3133 Only useful before prologues have been emitted. */
3136 notice_stack_pointer_modification_1 (x
, pat
, data
)
3138 rtx pat ATTRIBUTE_UNUSED
;
3139 void *data ATTRIBUTE_UNUSED
;
3141 if (x
== stack_pointer_rtx
3142 /* The stack pointer is only modified indirectly as the result
3143 of a push until later in flow. See the comments in rtl.texi
3144 regarding Embedded Side-Effects on Addresses. */
3145 || (GET_CODE (x
) == MEM
3146 && (GET_CODE (XEXP (x
, 0)) == PRE_DEC
3147 || GET_CODE (XEXP (x
, 0)) == PRE_INC
3148 || GET_CODE (XEXP (x
, 0)) == POST_DEC
3149 || GET_CODE (XEXP (x
, 0)) == POST_INC
)
3150 && XEXP (XEXP (x
, 0), 0) == stack_pointer_rtx
))
3151 current_function_sp_is_unchanging
= 0;
3155 notice_stack_pointer_modification (f
)
3160 /* Assume that the stack pointer is unchanging if alloca hasn't
3162 current_function_sp_is_unchanging
= !current_function_calls_alloca
;
3163 if (! current_function_sp_is_unchanging
)
3166 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
3170 /* Check if insn modifies the stack pointer. */
3171 note_stores (PATTERN (insn
), notice_stack_pointer_modification_1
,
3173 if (! current_function_sp_is_unchanging
)
3179 /* Mark a register in SET. Hard registers in large modes get all
3180 of their component registers set as well. */
3183 mark_reg (reg
, xset
)
3187 regset set
= (regset
) xset
;
3188 int regno
= REGNO (reg
);
3190 if (GET_MODE (reg
) == BLKmode
)
3193 SET_REGNO_REG_SET (set
, regno
);
3194 if (regno
< FIRST_PSEUDO_REGISTER
)
3196 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
3198 SET_REGNO_REG_SET (set
, regno
+ n
);
3202 /* Mark those regs which are needed at the end of the function as live
3203 at the end of the last basic block. */
3206 mark_regs_live_at_end (set
)
3211 /* If exiting needs the right stack value, consider the stack pointer
3212 live at the end of the function. */
3213 if ((HAVE_epilogue
&& reload_completed
)
3214 || ! EXIT_IGNORE_STACK
3215 || (! FRAME_POINTER_REQUIRED
3216 && ! current_function_calls_alloca
3217 && flag_omit_frame_pointer
)
3218 || current_function_sp_is_unchanging
)
3220 SET_REGNO_REG_SET (set
, STACK_POINTER_REGNUM
);
3223 /* Mark the frame pointer if needed at the end of the function. If
3224 we end up eliminating it, it will be removed from the live list
3225 of each basic block by reload. */
3227 if (! reload_completed
|| frame_pointer_needed
)
3229 SET_REGNO_REG_SET (set
, FRAME_POINTER_REGNUM
);
3230 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3231 /* If they are different, also mark the hard frame pointer as live. */
3232 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM
))
3233 SET_REGNO_REG_SET (set
, HARD_FRAME_POINTER_REGNUM
);
3237 #ifdef PIC_OFFSET_TABLE_REGNUM
3238 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3239 /* Many architectures have a GP register even without flag_pic.
3240 Assume the pic register is not in use, or will be handled by
3241 other means, if it is not fixed. */
3242 if (fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
3243 SET_REGNO_REG_SET (set
, PIC_OFFSET_TABLE_REGNUM
);
3247 /* Mark all global registers, and all registers used by the epilogue
3248 as being live at the end of the function since they may be
3249 referenced by our caller. */
3250 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3251 if (global_regs
[i
] || EPILOGUE_USES (i
))
3252 SET_REGNO_REG_SET (set
, i
);
3254 /* Mark all call-saved registers that we actaully used. */
3255 if (HAVE_epilogue
&& reload_completed
)
3257 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3258 if (regs_ever_live
[i
] && ! call_used_regs
[i
] && ! LOCAL_REGNO (i
))
3259 SET_REGNO_REG_SET (set
, i
);
3262 /* Mark function return value. */
3263 diddle_return_value (mark_reg
, set
);
3266 /* Callback function for for_each_successor_phi. DATA is a regset.
3267 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3268 INSN, in the regset. */
3271 set_phi_alternative_reg (insn
, dest_regno
, src_regno
, data
)
3272 rtx insn ATTRIBUTE_UNUSED
;
3273 int dest_regno ATTRIBUTE_UNUSED
;
3277 regset live
= (regset
) data
;
3278 SET_REGNO_REG_SET (live
, src_regno
);
3282 /* Propagate global life info around the graph of basic blocks. Begin
3283 considering blocks with their corresponding bit set in BLOCKS_IN.
3284 If BLOCKS_IN is null, consider it the universal set.
3286 BLOCKS_OUT is set for every block that was changed. */
3289 calculate_global_regs_live (blocks_in
, blocks_out
, flags
)
3290 sbitmap blocks_in
, blocks_out
;
3293 basic_block
*queue
, *qhead
, *qtail
, *qend
;
3294 regset tmp
, new_live_at_end
;
3295 regset_head tmp_head
;
3296 regset_head new_live_at_end_head
;
3299 tmp
= INITIALIZE_REG_SET (tmp_head
);
3300 new_live_at_end
= INITIALIZE_REG_SET (new_live_at_end_head
);
3302 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3303 because the `head == tail' style test for an empty queue doesn't
3304 work with a full queue. */
3305 queue
= (basic_block
*) xmalloc ((n_basic_blocks
+ 2) * sizeof (*queue
));
3307 qhead
= qend
= queue
+ n_basic_blocks
+ 2;
3309 /* Clear out the garbage that might be hanging out in bb->aux. */
3310 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
3311 BASIC_BLOCK (i
)->aux
= NULL
;
3313 /* Queue the blocks set in the initial mask. Do this in reverse block
3314 number order so that we are more likely for the first round to do
3315 useful work. We use AUX non-null to flag that the block is queued. */
3318 EXECUTE_IF_SET_IN_SBITMAP (blocks_in
, 0, i
,
3320 basic_block bb
= BASIC_BLOCK (i
);
3327 for (i
= 0; i
< n_basic_blocks
; ++i
)
3329 basic_block bb
= BASIC_BLOCK (i
);
3336 sbitmap_zero (blocks_out
);
3338 while (qhead
!= qtail
)
3340 int rescan
, changed
;
3349 /* Begin by propogating live_at_start from the successor blocks. */
3350 CLEAR_REG_SET (new_live_at_end
);
3351 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
3353 basic_block sb
= e
->dest
;
3354 IOR_REG_SET (new_live_at_end
, sb
->global_live_at_start
);
3357 /* Force the stack pointer to be live -- which might not already be
3358 the case for blocks within infinite loops. */
3359 SET_REGNO_REG_SET (new_live_at_end
, STACK_POINTER_REGNUM
);
3361 /* Similarly for the frame pointer before reload. Any reference
3362 to any pseudo before reload is a potential reference of the
3364 if (! reload_completed
)
3365 SET_REGNO_REG_SET (new_live_at_end
, FRAME_POINTER_REGNUM
);
3367 /* Regs used in phi nodes are not included in
3368 global_live_at_start, since they are live only along a
3369 particular edge. Set those regs that are live because of a
3370 phi node alternative corresponding to this particular block. */
3372 for_each_successor_phi (bb
, &set_phi_alternative_reg
,
3375 if (bb
== ENTRY_BLOCK_PTR
)
3377 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3381 /* On our first pass through this block, we'll go ahead and continue.
3382 Recognize first pass by local_set NULL. On subsequent passes, we
3383 get to skip out early if live_at_end wouldn't have changed. */
3385 if (bb
->local_set
== NULL
)
3387 bb
->local_set
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3388 bb
->cond_local_set
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3393 /* If any bits were removed from live_at_end, we'll have to
3394 rescan the block. This wouldn't be necessary if we had
3395 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3396 local_live is really dependent on live_at_end. */
3397 CLEAR_REG_SET (tmp
);
3398 rescan
= bitmap_operation (tmp
, bb
->global_live_at_end
,
3399 new_live_at_end
, BITMAP_AND_COMPL
);
3403 /* If any of the registers in the new live_at_end set are
3404 conditionally set in this basic block, we must rescan.
3405 This is because conditional lifetimes at the end of the
3406 block do not just take the live_at_end set into account,
3407 but also the liveness at the start of each successor
3408 block. We can miss changes in those sets if we only
3409 compare the new live_at_end against the previous one. */
3410 CLEAR_REG_SET (tmp
);
3411 rescan
= bitmap_operation (tmp
, new_live_at_end
,
3412 bb
->cond_local_set
, BITMAP_AND
);
3417 /* Find the set of changed bits. Take this opportunity
3418 to notice that this set is empty and early out. */
3419 CLEAR_REG_SET (tmp
);
3420 changed
= bitmap_operation (tmp
, bb
->global_live_at_end
,
3421 new_live_at_end
, BITMAP_XOR
);
3425 /* If any of the changed bits overlap with local_set,
3426 we'll have to rescan the block. Detect overlap by
3427 the AND with ~local_set turning off bits. */
3428 rescan
= bitmap_operation (tmp
, tmp
, bb
->local_set
,
3433 /* Let our caller know that BB changed enough to require its
3434 death notes updated. */
3436 SET_BIT (blocks_out
, bb
->index
);
3440 /* Add to live_at_start the set of all registers in
3441 new_live_at_end that aren't in the old live_at_end. */
3443 bitmap_operation (tmp
, new_live_at_end
, bb
->global_live_at_end
,
3445 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3447 changed
= bitmap_operation (bb
->global_live_at_start
,
3448 bb
->global_live_at_start
,
3455 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3457 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3458 into live_at_start. */
3459 propagate_block (bb
, new_live_at_end
, bb
->local_set
,
3460 bb
->cond_local_set
, flags
);
3462 /* If live_at start didn't change, no need to go farther. */
3463 if (REG_SET_EQUAL_P (bb
->global_live_at_start
, new_live_at_end
))
3466 COPY_REG_SET (bb
->global_live_at_start
, new_live_at_end
);
3469 /* Queue all predecessors of BB so that we may re-examine
3470 their live_at_end. */
3471 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
3473 basic_block pb
= e
->src
;
3474 if (pb
->aux
== NULL
)
3485 FREE_REG_SET (new_live_at_end
);
3489 EXECUTE_IF_SET_IN_SBITMAP (blocks_out
, 0, i
,
3491 basic_block bb
= BASIC_BLOCK (i
);
3492 FREE_REG_SET (bb
->local_set
);
3493 FREE_REG_SET (bb
->cond_local_set
);
3498 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
3500 basic_block bb
= BASIC_BLOCK (i
);
3501 FREE_REG_SET (bb
->local_set
);
3502 FREE_REG_SET (bb
->cond_local_set
);
3509 /* Subroutines of life analysis. */
3511 /* Allocate the permanent data structures that represent the results
3512 of life analysis. Not static since used also for stupid life analysis. */
3515 allocate_bb_life_data ()
3519 for (i
= 0; i
< n_basic_blocks
; i
++)
3521 basic_block bb
= BASIC_BLOCK (i
);
3523 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3524 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3527 ENTRY_BLOCK_PTR
->global_live_at_end
3528 = OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3529 EXIT_BLOCK_PTR
->global_live_at_start
3530 = OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3532 regs_live_at_setjmp
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3536 allocate_reg_life_data ()
3540 max_regno
= max_reg_num ();
3542 /* Recalculate the register space, in case it has grown. Old style
3543 vector oriented regsets would set regset_{size,bytes} here also. */
3544 allocate_reg_info (max_regno
, FALSE
, FALSE
);
3546 /* Reset all the data we'll collect in propagate_block and its
3548 for (i
= 0; i
< max_regno
; i
++)
3552 REG_N_DEATHS (i
) = 0;
3553 REG_N_CALLS_CROSSED (i
) = 0;
3554 REG_LIVE_LENGTH (i
) = 0;
3555 REG_BASIC_BLOCK (i
) = REG_BLOCK_UNKNOWN
;
3559 /* Delete dead instructions for propagate_block. */
3562 propagate_block_delete_insn (bb
, insn
)
3566 rtx inote
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
3568 /* If the insn referred to a label, and that label was attached to
3569 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3570 pretty much mandatory to delete it, because the ADDR_VEC may be
3571 referencing labels that no longer exist. */
3575 rtx label
= XEXP (inote
, 0);
3578 if (LABEL_NUSES (label
) == 1
3579 && (next
= next_nonnote_insn (label
)) != NULL
3580 && GET_CODE (next
) == JUMP_INSN
3581 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
3582 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
3584 rtx pat
= PATTERN (next
);
3585 int diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
3586 int len
= XVECLEN (pat
, diff_vec_p
);
3589 for (i
= 0; i
< len
; i
++)
3590 LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0))--;
3592 flow_delete_insn (next
);
3596 if (bb
->end
== insn
)
3597 bb
->end
= PREV_INSN (insn
);
3598 flow_delete_insn (insn
);
3601 /* Delete dead libcalls for propagate_block. Return the insn
3602 before the libcall. */
3605 propagate_block_delete_libcall (bb
, insn
, note
)
3609 rtx first
= XEXP (note
, 0);
3610 rtx before
= PREV_INSN (first
);
3612 if (insn
== bb
->end
)
3615 flow_delete_insn_chain (first
, insn
);
3619 /* Update the life-status of regs for one insn. Return the previous insn. */
3622 propagate_one_insn (pbi
, insn
)
3623 struct propagate_block_info
*pbi
;
3626 rtx prev
= PREV_INSN (insn
);
3627 int flags
= pbi
->flags
;
3628 int insn_is_dead
= 0;
3629 int libcall_is_dead
= 0;
3633 if (! INSN_P (insn
))
3636 note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
3637 if (flags
& PROP_SCAN_DEAD_CODE
)
3639 insn_is_dead
= insn_dead_p (pbi
, PATTERN (insn
), 0,
3641 libcall_is_dead
= (insn_is_dead
&& note
!= 0
3642 && libcall_dead_p (pbi
, note
, insn
));
3645 /* We almost certainly don't want to delete prologue or epilogue
3646 instructions. Warn about probable compiler losage. */
3649 && (((HAVE_epilogue
|| HAVE_prologue
)
3650 && prologue_epilogue_contains (insn
))
3651 || (HAVE_sibcall_epilogue
3652 && sibcall_epilogue_contains (insn
)))
3653 && find_reg_note (insn
, REG_MAYBE_DEAD
, NULL_RTX
) == 0)
3655 if (flags
& PROP_KILL_DEAD_CODE
)
3657 warning ("ICE: would have deleted prologue/epilogue insn");
3658 if (!inhibit_warnings
)
3661 libcall_is_dead
= insn_is_dead
= 0;
3664 /* If an instruction consists of just dead store(s) on final pass,
3666 if ((flags
& PROP_KILL_DEAD_CODE
) && insn_is_dead
)
3668 /* Record sets. Do this even for dead instructions, since they
3669 would have killed the values if they hadn't been deleted. */
3670 mark_set_regs (pbi
, PATTERN (insn
), insn
);
3672 /* CC0 is now known to be dead. Either this insn used it,
3673 in which case it doesn't anymore, or clobbered it,
3674 so the next insn can't use it. */
3677 if (libcall_is_dead
)
3679 prev
= propagate_block_delete_libcall (pbi
->bb
, insn
, note
);
3680 insn
= NEXT_INSN (prev
);
3683 propagate_block_delete_insn (pbi
->bb
, insn
);
3688 /* See if this is an increment or decrement that can be merged into
3689 a following memory address. */
3692 register rtx x
= single_set (insn
);
3694 /* Does this instruction increment or decrement a register? */
3695 if ((flags
& PROP_AUTOINC
)
3697 && GET_CODE (SET_DEST (x
)) == REG
3698 && (GET_CODE (SET_SRC (x
)) == PLUS
3699 || GET_CODE (SET_SRC (x
)) == MINUS
)
3700 && XEXP (SET_SRC (x
), 0) == SET_DEST (x
)
3701 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
3702 /* Ok, look for a following memory ref we can combine with.
3703 If one is found, change the memory ref to a PRE_INC
3704 or PRE_DEC, cancel this insn, and return 1.
3705 Return 0 if nothing has been done. */
3706 && try_pre_increment_1 (pbi
, insn
))
3709 #endif /* AUTO_INC_DEC */
3711 CLEAR_REG_SET (pbi
->new_set
);
3713 /* If this is not the final pass, and this insn is copying the value of
3714 a library call and it's dead, don't scan the insns that perform the
3715 library call, so that the call's arguments are not marked live. */
3716 if (libcall_is_dead
)
3718 /* Record the death of the dest reg. */
3719 mark_set_regs (pbi
, PATTERN (insn
), insn
);
3721 insn
= XEXP (note
, 0);
3722 return PREV_INSN (insn
);
3724 else if (GET_CODE (PATTERN (insn
)) == SET
3725 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
3726 && GET_CODE (SET_SRC (PATTERN (insn
))) == PLUS
3727 && XEXP (SET_SRC (PATTERN (insn
)), 0) == stack_pointer_rtx
3728 && GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == CONST_INT
)
3729 /* We have an insn to pop a constant amount off the stack.
3730 (Such insns use PLUS regardless of the direction of the stack,
3731 and any insn to adjust the stack by a constant is always a pop.)
3732 These insns, if not dead stores, have no effect on life. */
3736 /* Any regs live at the time of a call instruction must not go
3737 in a register clobbered by calls. Find all regs now live and
3738 record this for them. */
3740 if (GET_CODE (insn
) == CALL_INSN
&& (flags
& PROP_REG_INFO
))
3741 EXECUTE_IF_SET_IN_REG_SET (pbi
->reg_live
, 0, i
,
3742 { REG_N_CALLS_CROSSED (i
)++; });
3744 /* Record sets. Do this even for dead instructions, since they
3745 would have killed the values if they hadn't been deleted. */
3746 mark_set_regs (pbi
, PATTERN (insn
), insn
);
3748 if (GET_CODE (insn
) == CALL_INSN
)
3754 if (GET_CODE (PATTERN (insn
)) == COND_EXEC
)
3755 cond
= COND_EXEC_TEST (PATTERN (insn
));
3757 /* Non-constant calls clobber memory. */
3758 if (! CONST_CALL_P (insn
))
3759 free_EXPR_LIST_list (&pbi
->mem_set_list
);
3761 /* There may be extra registers to be clobbered. */
3762 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
3764 note
= XEXP (note
, 1))
3765 if (GET_CODE (XEXP (note
, 0)) == CLOBBER
)
3766 mark_set_1 (pbi
, CLOBBER
, XEXP (XEXP (note
, 0), 0),
3767 cond
, insn
, pbi
->flags
);
3769 /* Calls change all call-used and global registers. */
3770 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3771 if (call_used_regs
[i
] && ! global_regs
[i
]
3774 /* We do not want REG_UNUSED notes for these registers. */
3775 mark_set_1 (pbi
, CLOBBER
, gen_rtx_REG (reg_raw_mode
[i
], i
),
3777 pbi
->flags
& ~(PROP_DEATH_NOTES
| PROP_REG_INFO
));
3781 /* If an insn doesn't use CC0, it becomes dead since we assume
3782 that every insn clobbers it. So show it dead here;
3783 mark_used_regs will set it live if it is referenced. */
3788 mark_used_regs (pbi
, PATTERN (insn
), NULL_RTX
, insn
);
3790 /* Sometimes we may have inserted something before INSN (such as a move)
3791 when we make an auto-inc. So ensure we will scan those insns. */
3793 prev
= PREV_INSN (insn
);
3796 if (! insn_is_dead
&& GET_CODE (insn
) == CALL_INSN
)
3802 if (GET_CODE (PATTERN (insn
)) == COND_EXEC
)
3803 cond
= COND_EXEC_TEST (PATTERN (insn
));
3805 /* Calls use their arguments. */
3806 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
3808 note
= XEXP (note
, 1))
3809 if (GET_CODE (XEXP (note
, 0)) == USE
)
3810 mark_used_regs (pbi
, XEXP (XEXP (note
, 0), 0),
3813 /* The stack ptr is used (honorarily) by a CALL insn. */
3814 SET_REGNO_REG_SET (pbi
->reg_live
, STACK_POINTER_REGNUM
);
3816 /* Calls may also reference any of the global registers,
3817 so they are made live. */
3818 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3820 mark_used_reg (pbi
, gen_rtx_REG (reg_raw_mode
[i
], i
),
3825 /* On final pass, update counts of how many insns in which each reg
3827 if (flags
& PROP_REG_INFO
)
3828 EXECUTE_IF_SET_IN_REG_SET (pbi
->reg_live
, 0, i
,
3829 { REG_LIVE_LENGTH (i
)++; });
3834 /* Initialize a propagate_block_info struct for public consumption.
3835 Note that the structure itself is opaque to this file, but that
3836 the user can use the regsets provided here. */
3838 struct propagate_block_info
*
3839 init_propagate_block_info (bb
, live
, local_set
, cond_local_set
, flags
)
3841 regset live
, local_set
, cond_local_set
;
3844 struct propagate_block_info
*pbi
= xmalloc (sizeof (*pbi
));
3847 pbi
->reg_live
= live
;
3848 pbi
->mem_set_list
= NULL_RTX
;
3849 pbi
->local_set
= local_set
;
3850 pbi
->cond_local_set
= cond_local_set
;
3854 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
3855 pbi
->reg_next_use
= (rtx
*) xcalloc (max_reg_num (), sizeof (rtx
));
3857 pbi
->reg_next_use
= NULL
;
3859 pbi
->new_set
= BITMAP_XMALLOC ();
3861 #ifdef HAVE_conditional_execution
3862 pbi
->reg_cond_dead
= splay_tree_new (splay_tree_compare_ints
, NULL
,
3863 free_reg_cond_life_info
);
3864 pbi
->reg_cond_reg
= BITMAP_XMALLOC ();
3866 /* If this block ends in a conditional branch, for each register live
3867 from one side of the branch and not the other, record the register
3868 as conditionally dead. */
3869 if ((flags
& (PROP_DEATH_NOTES
| PROP_SCAN_DEAD_CODE
))
3870 && GET_CODE (bb
->end
) == JUMP_INSN
3871 && any_condjump_p (bb
->end
))
3873 regset_head diff_head
;
3874 regset diff
= INITIALIZE_REG_SET (diff_head
);
3875 basic_block bb_true
, bb_false
;
3876 rtx cond_true
, cond_false
, set_src
;
3879 /* Identify the successor blocks. */
3880 bb_true
= bb
->succ
->dest
;
3881 if (bb
->succ
->succ_next
!= NULL
)
3883 bb_false
= bb
->succ
->succ_next
->dest
;
3885 if (bb
->succ
->flags
& EDGE_FALLTHRU
)
3887 basic_block t
= bb_false
;
3891 else if (! (bb
->succ
->succ_next
->flags
& EDGE_FALLTHRU
))
3896 /* This can happen with a conditional jump to the next insn. */
3897 if (JUMP_LABEL (bb
->end
) != bb_true
->head
)
3900 /* Simplest way to do nothing. */
3904 /* Extract the condition from the branch. */
3905 set_src
= SET_SRC (pc_set (bb
->end
));
3906 cond_true
= XEXP (set_src
, 0);
3907 cond_false
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true
)),
3908 GET_MODE (cond_true
), XEXP (cond_true
, 0),
3909 XEXP (cond_true
, 1));
3910 if (GET_CODE (XEXP (set_src
, 1)) == PC
)
3913 cond_false
= cond_true
;
3917 /* Compute which register lead different lives in the successors. */
3918 if (bitmap_operation (diff
, bb_true
->global_live_at_start
,
3919 bb_false
->global_live_at_start
, BITMAP_XOR
))
3921 rtx reg
= XEXP (cond_true
, 0);
3923 if (GET_CODE (reg
) == SUBREG
)
3924 reg
= SUBREG_REG (reg
);
3926 if (GET_CODE (reg
) != REG
)
3929 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (reg
));
3931 /* For each such register, mark it conditionally dead. */
3932 EXECUTE_IF_SET_IN_REG_SET
3935 struct reg_cond_life_info
*rcli
;
3938 rcli
= (struct reg_cond_life_info
*) xmalloc (sizeof (*rcli
));
3940 if (REGNO_REG_SET_P (bb_true
->global_live_at_start
, i
))
3944 rcli
->condition
= alloc_EXPR_LIST (0, cond
, NULL_RTX
);
3946 splay_tree_insert (pbi
->reg_cond_dead
, i
,
3947 (splay_tree_value
) rcli
);
3951 FREE_REG_SET (diff
);
3955 /* If this block has no successors, any stores to the frame that aren't
3956 used later in the block are dead. So make a pass over the block
3957 recording any such that are made and show them dead at the end. We do
3958 a very conservative and simple job here. */
3960 && ! (TREE_CODE (TREE_TYPE (current_function_decl
)) == FUNCTION_TYPE
3961 && (TYPE_RETURNS_STACK_DEPRESSED
3962 (TREE_TYPE (current_function_decl
))))
3963 && (flags
& PROP_SCAN_DEAD_CODE
)
3964 && (bb
->succ
== NULL
3965 || (bb
->succ
->succ_next
== NULL
3966 && bb
->succ
->dest
== EXIT_BLOCK_PTR
)))
3969 for (insn
= bb
->end
; insn
!= bb
->head
; insn
= PREV_INSN (insn
))
3970 if (GET_CODE (insn
) == INSN
3971 && GET_CODE (PATTERN (insn
)) == SET
3972 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
3974 rtx mem
= SET_DEST (PATTERN (insn
));
3976 if (XEXP (mem
, 0) == frame_pointer_rtx
3977 || (GET_CODE (XEXP (mem
, 0)) == PLUS
3978 && XEXP (XEXP (mem
, 0), 0) == frame_pointer_rtx
3979 && GET_CODE (XEXP (XEXP (mem
, 0), 1)) == CONST_INT
))
3982 /* Store a copy of mem, otherwise the address may be scrogged
3983 by find_auto_inc. This matters because insn_dead_p uses
3984 an rtx_equal_p check to determine if two addresses are
3985 the same. This works before find_auto_inc, but fails
3986 after find_auto_inc, causing discrepencies between the
3987 set of live registers calculated during the
3988 calculate_global_regs_live phase and what actually exists
3989 after flow completes, leading to aborts. */
3990 if (flags
& PROP_AUTOINC
)
3991 mem
= shallow_copy_rtx (mem
);
3993 pbi
->mem_set_list
= alloc_EXPR_LIST (0, mem
, pbi
->mem_set_list
);
4001 /* Release a propagate_block_info struct. */
4004 free_propagate_block_info (pbi
)
4005 struct propagate_block_info
*pbi
;
4007 free_EXPR_LIST_list (&pbi
->mem_set_list
);
4009 BITMAP_XFREE (pbi
->new_set
);
4011 #ifdef HAVE_conditional_execution
4012 splay_tree_delete (pbi
->reg_cond_dead
);
4013 BITMAP_XFREE (pbi
->reg_cond_reg
);
4016 if (pbi
->reg_next_use
)
4017 free (pbi
->reg_next_use
);
4022 /* Compute the registers live at the beginning of a basic block BB from
4023 those live at the end.
4025 When called, REG_LIVE contains those live at the end. On return, it
4026 contains those live at the beginning.
4028 LOCAL_SET, if non-null, will be set with all registers killed
4029 unconditionally by this basic block.
4030 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
4031 killed conditionally by this basic block. If there is any unconditional
4032 set of a register, then the corresponding bit will be set in LOCAL_SET
4033 and cleared in COND_LOCAL_SET.
4034 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
4035 case, the resulting set will be equal to the union of the two sets that
4036 would otherwise be computed. */
4039 propagate_block (bb
, live
, local_set
, cond_local_set
, flags
)
4043 regset cond_local_set
;
4046 struct propagate_block_info
*pbi
;
4049 pbi
= init_propagate_block_info (bb
, live
, local_set
, cond_local_set
, flags
);
4051 if (flags
& PROP_REG_INFO
)
4055 /* Process the regs live at the end of the block.
4056 Mark them as not local to any one basic block. */
4057 EXECUTE_IF_SET_IN_REG_SET (live
, 0, i
,
4058 { REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
; });
4061 /* Scan the block an insn at a time from end to beginning. */
4063 for (insn
= bb
->end
;; insn
= prev
)
4065 /* If this is a call to `setjmp' et al, warn if any
4066 non-volatile datum is live. */
4067 if ((flags
& PROP_REG_INFO
)
4068 && GET_CODE (insn
) == NOTE
4069 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
4070 IOR_REG_SET (regs_live_at_setjmp
, pbi
->reg_live
);
4072 prev
= propagate_one_insn (pbi
, insn
);
4074 if (insn
== bb
->head
)
4078 free_propagate_block_info (pbi
);
4081 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4082 (SET expressions whose destinations are registers dead after the insn).
4083 NEEDED is the regset that says which regs are alive after the insn.
4085 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4087 If X is the entire body of an insn, NOTES contains the reg notes
4088 pertaining to the insn. */
4091 insn_dead_p (pbi
, x
, call_ok
, notes
)
4092 struct propagate_block_info
*pbi
;
4095 rtx notes ATTRIBUTE_UNUSED
;
4097 enum rtx_code code
= GET_CODE (x
);
4100 /* If flow is invoked after reload, we must take existing AUTO_INC
4101 expresions into account. */
4102 if (reload_completed
)
4104 for (; notes
; notes
= XEXP (notes
, 1))
4106 if (REG_NOTE_KIND (notes
) == REG_INC
)
4108 int regno
= REGNO (XEXP (notes
, 0));
4110 /* Don't delete insns to set global regs. */
4111 if ((regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
4112 || REGNO_REG_SET_P (pbi
->reg_live
, regno
))
4119 /* If setting something that's a reg or part of one,
4120 see if that register's altered value will be live. */
4124 rtx r
= SET_DEST (x
);
4127 if (GET_CODE (r
) == CC0
)
4128 return ! pbi
->cc0_live
;
4131 /* A SET that is a subroutine call cannot be dead. */
4132 if (GET_CODE (SET_SRC (x
)) == CALL
)
4138 /* Don't eliminate loads from volatile memory or volatile asms. */
4139 else if (volatile_refs_p (SET_SRC (x
)))
4142 if (GET_CODE (r
) == MEM
)
4146 if (MEM_VOLATILE_P (r
))
4149 /* Walk the set of memory locations we are currently tracking
4150 and see if one is an identical match to this memory location.
4151 If so, this memory write is dead (remember, we're walking
4152 backwards from the end of the block to the start). */
4153 temp
= pbi
->mem_set_list
;
4156 rtx mem
= XEXP (temp
, 0);
4158 if (rtx_equal_p (mem
, r
))
4161 /* Check if memory reference matches an auto increment. Only
4162 post increment/decrement or modify are valid. */
4163 if (GET_MODE (mem
) == GET_MODE (r
)
4164 && (GET_CODE (XEXP (mem
, 0)) == POST_DEC
4165 || GET_CODE (XEXP (mem
, 0)) == POST_INC
4166 || GET_CODE (XEXP (mem
, 0)) == POST_MODIFY
)
4167 && GET_MODE (XEXP (mem
, 0)) == GET_MODE (r
)
4168 && rtx_equal_p (XEXP (XEXP (mem
, 0), 0), XEXP (r
, 0)))
4171 temp
= XEXP (temp
, 1);
4176 while (GET_CODE (r
) == SUBREG
4177 || GET_CODE (r
) == STRICT_LOW_PART
4178 || GET_CODE (r
) == ZERO_EXTRACT
)
4181 if (GET_CODE (r
) == REG
)
4183 int regno
= REGNO (r
);
4186 if (REGNO_REG_SET_P (pbi
->reg_live
, regno
))
4189 /* If this is a hard register, verify that subsequent
4190 words are not needed. */
4191 if (regno
< FIRST_PSEUDO_REGISTER
)
4193 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (r
));
4196 if (REGNO_REG_SET_P (pbi
->reg_live
, regno
+n
))
4200 /* Don't delete insns to set global regs. */
4201 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
4204 /* Make sure insns to set the stack pointer aren't deleted. */
4205 if (regno
== STACK_POINTER_REGNUM
)
4208 /* Make sure insns to set the frame pointer aren't deleted. */
4209 if (regno
== FRAME_POINTER_REGNUM
4210 && (! reload_completed
|| frame_pointer_needed
))
4212 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4213 if (regno
== HARD_FRAME_POINTER_REGNUM
4214 && (! reload_completed
|| frame_pointer_needed
))
4218 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4219 /* Make sure insns to set arg pointer are never deleted
4220 (if the arg pointer isn't fixed, there will be a USE
4221 for it, so we can treat it normally). */
4222 if (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
4226 #ifdef PIC_OFFSET_TABLE_REGNUM
4227 /* Before reload, do not allow sets of the pic register
4228 to be deleted. Reload can insert references to
4229 constant pool memory anywhere in the function, making
4230 the PIC register live where it wasn't before. */
4231 if (regno
== PIC_OFFSET_TABLE_REGNUM
&& fixed_regs
[regno
]
4232 && ! reload_completed
)
4236 /* Otherwise, the set is dead. */
4242 /* If performing several activities, insn is dead if each activity
4243 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4244 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4246 else if (code
== PARALLEL
)
4248 int i
= XVECLEN (x
, 0);
4250 for (i
--; i
>= 0; i
--)
4251 if (GET_CODE (XVECEXP (x
, 0, i
)) != CLOBBER
4252 && GET_CODE (XVECEXP (x
, 0, i
)) != USE
4253 && ! insn_dead_p (pbi
, XVECEXP (x
, 0, i
), call_ok
, NULL_RTX
))
4259 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4260 is not necessarily true for hard registers. */
4261 else if (code
== CLOBBER
&& GET_CODE (XEXP (x
, 0)) == REG
4262 && REGNO (XEXP (x
, 0)) >= FIRST_PSEUDO_REGISTER
4263 && ! REGNO_REG_SET_P (pbi
->reg_live
, REGNO (XEXP (x
, 0))))
4266 /* We do not check other CLOBBER or USE here. An insn consisting of just
4267 a CLOBBER or just a USE should not be deleted. */
4271 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4272 return 1 if the entire library call is dead.
4273 This is true if INSN copies a register (hard or pseudo)
4274 and if the hard return reg of the call insn is dead.
4275 (The caller should have tested the destination of the SET inside
4276 INSN already for death.)
4278 If this insn doesn't just copy a register, then we don't
4279 have an ordinary libcall. In that case, cse could not have
4280 managed to substitute the source for the dest later on,
4281 so we can assume the libcall is dead.
4283 PBI is the block info giving pseudoregs live before this insn.
4284 NOTE is the REG_RETVAL note of the insn. */
4287 libcall_dead_p (pbi
, note
, insn
)
4288 struct propagate_block_info
*pbi
;
4292 rtx x
= single_set (insn
);
4296 register rtx r
= SET_SRC (x
);
4297 if (GET_CODE (r
) == REG
)
4299 rtx call
= XEXP (note
, 0);
4303 /* Find the call insn. */
4304 while (call
!= insn
&& GET_CODE (call
) != CALL_INSN
)
4305 call
= NEXT_INSN (call
);
4307 /* If there is none, do nothing special,
4308 since ordinary death handling can understand these insns. */
4312 /* See if the hard reg holding the value is dead.
4313 If this is a PARALLEL, find the call within it. */
4314 call_pat
= PATTERN (call
);
4315 if (GET_CODE (call_pat
) == PARALLEL
)
4317 for (i
= XVECLEN (call_pat
, 0) - 1; i
>= 0; i
--)
4318 if (GET_CODE (XVECEXP (call_pat
, 0, i
)) == SET
4319 && GET_CODE (SET_SRC (XVECEXP (call_pat
, 0, i
))) == CALL
)
4322 /* This may be a library call that is returning a value
4323 via invisible pointer. Do nothing special, since
4324 ordinary death handling can understand these insns. */
4328 call_pat
= XVECEXP (call_pat
, 0, i
);
4331 return insn_dead_p (pbi
, call_pat
, 1, REG_NOTES (call
));
4337 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4338 live at function entry. Don't count global register variables, variables
4339 in registers that can be used for function arg passing, or variables in
4340 fixed hard registers. */
4343 regno_uninitialized (regno
)
4346 if (n_basic_blocks
== 0
4347 || (regno
< FIRST_PSEUDO_REGISTER
4348 && (global_regs
[regno
]
4349 || fixed_regs
[regno
]
4350 || FUNCTION_ARG_REGNO_P (regno
))))
4353 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
);
4356 /* 1 if register REGNO was alive at a place where `setjmp' was called
4357 and was set more than once or is an argument.
4358 Such regs may be clobbered by `longjmp'. */
4361 regno_clobbered_at_setjmp (regno
)
4364 if (n_basic_blocks
== 0)
4367 return ((REG_N_SETS (regno
) > 1
4368 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
))
4369 && REGNO_REG_SET_P (regs_live_at_setjmp
, regno
));
4372 /* INSN references memory, possibly using autoincrement addressing modes.
4373 Find any entries on the mem_set_list that need to be invalidated due
4374 to an address change. */
4377 invalidate_mems_from_autoinc (pbi
, insn
)
4378 struct propagate_block_info
*pbi
;
4381 rtx note
= REG_NOTES (insn
);
4382 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
4384 if (REG_NOTE_KIND (note
) == REG_INC
)
4386 rtx temp
= pbi
->mem_set_list
;
4387 rtx prev
= NULL_RTX
;
4392 next
= XEXP (temp
, 1);
4393 if (reg_overlap_mentioned_p (XEXP (note
, 0), XEXP (temp
, 0)))
4395 /* Splice temp out of list. */
4397 XEXP (prev
, 1) = next
;
4399 pbi
->mem_set_list
= next
;
4400 free_EXPR_LIST_node (temp
);
4410 /* EXP is either a MEM or a REG. Remove any dependant entries
4411 from pbi->mem_set_list. */
4414 invalidate_mems_from_set (pbi
, exp
)
4415 struct propagate_block_info
*pbi
;
4418 rtx temp
= pbi
->mem_set_list
;
4419 rtx prev
= NULL_RTX
;
4424 next
= XEXP (temp
, 1);
4425 if ((GET_CODE (exp
) == MEM
4426 && output_dependence (XEXP (temp
, 0), exp
))
4427 || (GET_CODE (exp
) == REG
4428 && reg_overlap_mentioned_p (exp
, XEXP (temp
, 0))))
4430 /* Splice this entry out of the list. */
4432 XEXP (prev
, 1) = next
;
4434 pbi
->mem_set_list
= next
;
4435 free_EXPR_LIST_node (temp
);
4443 /* Process the registers that are set within X. Their bits are set to
4444 1 in the regset DEAD, because they are dead prior to this insn.
4446 If INSN is nonzero, it is the insn being processed.
4448 FLAGS is the set of operations to perform. */
4451 mark_set_regs (pbi
, x
, insn
)
4452 struct propagate_block_info
*pbi
;
4455 rtx cond
= NULL_RTX
;
4460 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
4462 if (REG_NOTE_KIND (link
) == REG_INC
)
4463 mark_set_1 (pbi
, SET
, XEXP (link
, 0),
4464 (GET_CODE (x
) == COND_EXEC
4465 ? COND_EXEC_TEST (x
) : NULL_RTX
),
4469 switch (code
= GET_CODE (x
))
4473 mark_set_1 (pbi
, code
, SET_DEST (x
), cond
, insn
, pbi
->flags
);
4477 cond
= COND_EXEC_TEST (x
);
4478 x
= COND_EXEC_CODE (x
);
4484 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
4486 rtx sub
= XVECEXP (x
, 0, i
);
4487 switch (code
= GET_CODE (sub
))
4490 if (cond
!= NULL_RTX
)
4493 cond
= COND_EXEC_TEST (sub
);
4494 sub
= COND_EXEC_CODE (sub
);
4495 if (GET_CODE (sub
) != SET
&& GET_CODE (sub
) != CLOBBER
)
4501 mark_set_1 (pbi
, code
, SET_DEST (sub
), cond
, insn
, pbi
->flags
);
4516 /* Process a single SET rtx, X. */
4519 mark_set_1 (pbi
, code
, reg
, cond
, insn
, flags
)
4520 struct propagate_block_info
*pbi
;
4522 rtx reg
, cond
, insn
;
4525 int regno_first
= -1, regno_last
= -1;
4529 /* Some targets place small structures in registers for
4530 return values of functions. We have to detect this
4531 case specially here to get correct flow information. */
4532 if (GET_CODE (reg
) == PARALLEL
4533 && GET_MODE (reg
) == BLKmode
)
4535 for (i
= XVECLEN (reg
, 0) - 1; i
>= 0; i
--)
4536 mark_set_1 (pbi
, code
, XVECEXP (reg
, 0, i
), cond
, insn
, flags
);
4540 /* Modifying just one hardware register of a multi-reg value or just a
4541 byte field of a register does not mean the value from before this insn
4542 is now dead. Of course, if it was dead after it's unused now. */
4544 switch (GET_CODE (reg
))
4548 case STRICT_LOW_PART
:
4549 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4551 reg
= XEXP (reg
, 0);
4552 while (GET_CODE (reg
) == SUBREG
4553 || GET_CODE (reg
) == ZERO_EXTRACT
4554 || GET_CODE (reg
) == SIGN_EXTRACT
4555 || GET_CODE (reg
) == STRICT_LOW_PART
);
4556 if (GET_CODE (reg
) == MEM
)
4558 not_dead
= REGNO_REG_SET_P (pbi
->reg_live
, REGNO (reg
));
4562 regno_last
= regno_first
= REGNO (reg
);
4563 if (regno_first
< FIRST_PSEUDO_REGISTER
)
4564 regno_last
+= HARD_REGNO_NREGS (regno_first
, GET_MODE (reg
)) - 1;
4568 if (GET_CODE (SUBREG_REG (reg
)) == REG
)
4570 enum machine_mode outer_mode
= GET_MODE (reg
);
4571 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (reg
));
4573 /* Identify the range of registers affected. This is moderately
4574 tricky for hard registers. See alter_subreg. */
4576 regno_last
= regno_first
= REGNO (SUBREG_REG (reg
));
4577 if (regno_first
< FIRST_PSEUDO_REGISTER
)
4579 #ifdef ALTER_HARD_SUBREG
4580 regno_first
= ALTER_HARD_SUBREG (outer_mode
, SUBREG_WORD (reg
),
4581 inner_mode
, regno_first
);
4583 regno_first
+= SUBREG_WORD (reg
);
4585 regno_last
= (regno_first
4586 + HARD_REGNO_NREGS (regno_first
, outer_mode
) - 1);
4588 /* Since we've just adjusted the register number ranges, make
4589 sure REG matches. Otherwise some_was_live will be clear
4590 when it shouldn't have been, and we'll create incorrect
4591 REG_UNUSED notes. */
4592 reg
= gen_rtx_REG (outer_mode
, regno_first
);
4596 /* If the number of words in the subreg is less than the number
4597 of words in the full register, we have a well-defined partial
4598 set. Otherwise the high bits are undefined.
4600 This is only really applicable to pseudos, since we just took
4601 care of multi-word hard registers. */
4602 if (((GET_MODE_SIZE (outer_mode
)
4603 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
4604 < ((GET_MODE_SIZE (inner_mode
)
4605 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
))
4606 not_dead
= REGNO_REG_SET_P (pbi
->reg_live
, regno_first
);
4608 reg
= SUBREG_REG (reg
);
4612 reg
= SUBREG_REG (reg
);
4619 /* If this set is a MEM, then it kills any aliased writes.
4620 If this set is a REG, then it kills any MEMs which use the reg. */
4621 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
4623 if (GET_CODE (reg
) == MEM
|| GET_CODE (reg
) == REG
)
4624 invalidate_mems_from_set (pbi
, reg
);
4626 /* If the memory reference had embedded side effects (autoincrement
4627 address modes. Then we may need to kill some entries on the
4629 if (insn
&& GET_CODE (reg
) == MEM
)
4630 invalidate_mems_from_autoinc (pbi
, insn
);
4632 if (GET_CODE (reg
) == MEM
&& ! side_effects_p (reg
)
4633 /* ??? With more effort we could track conditional memory life. */
4635 /* We do not know the size of a BLKmode store, so we do not track
4636 them for redundant store elimination. */
4637 && GET_MODE (reg
) != BLKmode
4638 /* There are no REG_INC notes for SP, so we can't assume we'll see
4639 everything that invalidates it. To be safe, don't eliminate any
4640 stores though SP; none of them should be redundant anyway. */
4641 && ! reg_mentioned_p (stack_pointer_rtx
, reg
))
4644 /* Store a copy of mem, otherwise the address may be
4645 scrogged by find_auto_inc. */
4646 if (flags
& PROP_AUTOINC
)
4647 reg
= shallow_copy_rtx (reg
);
4649 pbi
->mem_set_list
= alloc_EXPR_LIST (0, reg
, pbi
->mem_set_list
);
4653 if (GET_CODE (reg
) == REG
4654 && ! (regno_first
== FRAME_POINTER_REGNUM
4655 && (! reload_completed
|| frame_pointer_needed
))
4656 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4657 && ! (regno_first
== HARD_FRAME_POINTER_REGNUM
4658 && (! reload_completed
|| frame_pointer_needed
))
4660 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4661 && ! (regno_first
== ARG_POINTER_REGNUM
&& fixed_regs
[regno_first
])
4665 int some_was_live
= 0, some_was_dead
= 0;
4667 for (i
= regno_first
; i
<= regno_last
; ++i
)
4669 int needed_regno
= REGNO_REG_SET_P (pbi
->reg_live
, i
);
4672 /* Order of the set operation matters here since both
4673 sets may be the same. */
4674 CLEAR_REGNO_REG_SET (pbi
->cond_local_set
, i
);
4675 if (cond
!= NULL_RTX
4676 && ! REGNO_REG_SET_P (pbi
->local_set
, i
))
4677 SET_REGNO_REG_SET (pbi
->cond_local_set
, i
);
4679 SET_REGNO_REG_SET (pbi
->local_set
, i
);
4681 if (code
!= CLOBBER
)
4682 SET_REGNO_REG_SET (pbi
->new_set
, i
);
4684 some_was_live
|= needed_regno
;
4685 some_was_dead
|= ! needed_regno
;
4688 #ifdef HAVE_conditional_execution
4689 /* Consider conditional death in deciding that the register needs
4691 if (some_was_live
&& ! not_dead
4692 /* The stack pointer is never dead. Well, not strictly true,
4693 but it's very difficult to tell from here. Hopefully
4694 combine_stack_adjustments will fix up the most egregious
4696 && regno_first
!= STACK_POINTER_REGNUM
)
4698 for (i
= regno_first
; i
<= regno_last
; ++i
)
4699 if (! mark_regno_cond_dead (pbi
, i
, cond
))
4704 /* Additional data to record if this is the final pass. */
4705 if (flags
& (PROP_LOG_LINKS
| PROP_REG_INFO
4706 | PROP_DEATH_NOTES
| PROP_AUTOINC
))
4709 register int blocknum
= pbi
->bb
->index
;
4712 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
4714 y
= pbi
->reg_next_use
[regno_first
];
4716 /* The next use is no longer next, since a store intervenes. */
4717 for (i
= regno_first
; i
<= regno_last
; ++i
)
4718 pbi
->reg_next_use
[i
] = 0;
4721 if (flags
& PROP_REG_INFO
)
4723 for (i
= regno_first
; i
<= regno_last
; ++i
)
4725 /* Count (weighted) references, stores, etc. This counts a
4726 register twice if it is modified, but that is correct. */
4727 REG_N_SETS (i
) += 1;
4728 REG_N_REFS (i
) += (optimize_size
? 1
4729 : pbi
->bb
->loop_depth
+ 1);
4731 /* The insns where a reg is live are normally counted
4732 elsewhere, but we want the count to include the insn
4733 where the reg is set, and the normal counting mechanism
4734 would not count it. */
4735 REG_LIVE_LENGTH (i
) += 1;
4738 /* If this is a hard reg, record this function uses the reg. */
4739 if (regno_first
< FIRST_PSEUDO_REGISTER
)
4741 for (i
= regno_first
; i
<= regno_last
; i
++)
4742 regs_ever_live
[i
] = 1;
4746 /* Keep track of which basic blocks each reg appears in. */
4747 if (REG_BASIC_BLOCK (regno_first
) == REG_BLOCK_UNKNOWN
)
4748 REG_BASIC_BLOCK (regno_first
) = blocknum
;
4749 else if (REG_BASIC_BLOCK (regno_first
) != blocknum
)
4750 REG_BASIC_BLOCK (regno_first
) = REG_BLOCK_GLOBAL
;
4754 if (! some_was_dead
)
4756 if (flags
& PROP_LOG_LINKS
)
4758 /* Make a logical link from the next following insn
4759 that uses this register, back to this insn.
4760 The following insns have already been processed.
4762 We don't build a LOG_LINK for hard registers containing
4763 in ASM_OPERANDs. If these registers get replaced,
4764 we might wind up changing the semantics of the insn,
4765 even if reload can make what appear to be valid
4766 assignments later. */
4767 if (y
&& (BLOCK_NUM (y
) == blocknum
)
4768 && (regno_first
>= FIRST_PSEUDO_REGISTER
4769 || asm_noperands (PATTERN (y
)) < 0))
4770 LOG_LINKS (y
) = alloc_INSN_LIST (insn
, LOG_LINKS (y
));
4775 else if (! some_was_live
)
4777 if (flags
& PROP_REG_INFO
)
4778 REG_N_DEATHS (regno_first
) += 1;
4780 if (flags
& PROP_DEATH_NOTES
)
4782 /* Note that dead stores have already been deleted
4783 when possible. If we get here, we have found a
4784 dead store that cannot be eliminated (because the
4785 same insn does something useful). Indicate this
4786 by marking the reg being set as dying here. */
4788 = alloc_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
4793 if (flags
& PROP_DEATH_NOTES
)
4795 /* This is a case where we have a multi-word hard register
4796 and some, but not all, of the words of the register are
4797 needed in subsequent insns. Write REG_UNUSED notes
4798 for those parts that were not needed. This case should
4801 for (i
= regno_first
; i
<= regno_last
; ++i
)
4802 if (! REGNO_REG_SET_P (pbi
->reg_live
, i
))
4804 = alloc_EXPR_LIST (REG_UNUSED
,
4805 gen_rtx_REG (reg_raw_mode
[i
], i
),
4811 /* Mark the register as being dead. */
4814 /* The stack pointer is never dead. Well, not strictly true,
4815 but it's very difficult to tell from here. Hopefully
4816 combine_stack_adjustments will fix up the most egregious
4818 && regno_first
!= STACK_POINTER_REGNUM
)
4820 for (i
= regno_first
; i
<= regno_last
; ++i
)
4821 CLEAR_REGNO_REG_SET (pbi
->reg_live
, i
);
4824 else if (GET_CODE (reg
) == REG
)
4826 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
4827 pbi
->reg_next_use
[regno_first
] = 0;
4830 /* If this is the last pass and this is a SCRATCH, show it will be dying
4831 here and count it. */
4832 else if (GET_CODE (reg
) == SCRATCH
)
4834 if (flags
& PROP_DEATH_NOTES
)
4836 = alloc_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
4840 #ifdef HAVE_conditional_execution
4841 /* Mark REGNO conditionally dead.
4842 Return true if the register is now unconditionally dead. */
4845 mark_regno_cond_dead (pbi
, regno
, cond
)
4846 struct propagate_block_info
*pbi
;
4850 /* If this is a store to a predicate register, the value of the
4851 predicate is changing, we don't know that the predicate as seen
4852 before is the same as that seen after. Flush all dependent
4853 conditions from reg_cond_dead. This will make all such
4854 conditionally live registers unconditionally live. */
4855 if (REGNO_REG_SET_P (pbi
->reg_cond_reg
, regno
))
4856 flush_reg_cond_reg (pbi
, regno
);
4858 /* If this is an unconditional store, remove any conditional
4859 life that may have existed. */
4860 if (cond
== NULL_RTX
)
4861 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
4864 splay_tree_node node
;
4865 struct reg_cond_life_info
*rcli
;
4868 /* Otherwise this is a conditional set. Record that fact.
4869 It may have been conditionally used, or there may be a
4870 subsequent set with a complimentary condition. */
4872 node
= splay_tree_lookup (pbi
->reg_cond_dead
, regno
);
4875 /* The register was unconditionally live previously.
4876 Record the current condition as the condition under
4877 which it is dead. */
4878 rcli
= (struct reg_cond_life_info
*) xmalloc (sizeof (*rcli
));
4879 rcli
->condition
= alloc_EXPR_LIST (0, cond
, NULL_RTX
);
4880 splay_tree_insert (pbi
->reg_cond_dead
, regno
,
4881 (splay_tree_value
) rcli
);
4883 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
4885 /* Not unconditionaly dead. */
4890 /* The register was conditionally live previously.
4891 Add the new condition to the old. */
4892 rcli
= (struct reg_cond_life_info
*) node
->value
;
4893 ncond
= rcli
->condition
;
4894 ncond
= ior_reg_cond (ncond
, cond
);
4896 /* If the register is now unconditionally dead,
4897 remove the entry in the splay_tree. */
4898 if (ncond
== const1_rtx
)
4899 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
4902 rcli
->condition
= ncond
;
4904 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
4906 /* Not unconditionaly dead. */
4915 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4918 free_reg_cond_life_info (value
)
4919 splay_tree_value value
;
4921 struct reg_cond_life_info
*rcli
= (struct reg_cond_life_info
*) value
;
4922 free_EXPR_LIST_list (&rcli
->condition
);
4926 /* Helper function for flush_reg_cond_reg. */
4929 flush_reg_cond_reg_1 (node
, data
)
4930 splay_tree_node node
;
4933 struct reg_cond_life_info
*rcli
;
4934 int *xdata
= (int *) data
;
4935 unsigned int regno
= xdata
[0];
4938 /* Don't need to search if last flushed value was farther on in
4939 the in-order traversal. */
4940 if (xdata
[1] >= (int) node
->key
)
4943 /* Splice out portions of the expression that refer to regno. */
4944 rcli
= (struct reg_cond_life_info
*) node
->value
;
4945 c
= *(prev
= &rcli
->condition
);
4948 if (regno
== REGNO (XEXP (XEXP (c
, 0), 0)))
4950 rtx next
= XEXP (c
, 1);
4951 free_EXPR_LIST_node (c
);
4955 c
= *(prev
= &XEXP (c
, 1));
4958 /* If the entire condition is now NULL, signal the node to be removed. */
4959 if (! rcli
->condition
)
4961 xdata
[1] = node
->key
;
4968 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4971 flush_reg_cond_reg (pbi
, regno
)
4972 struct propagate_block_info
*pbi
;
4979 while (splay_tree_foreach (pbi
->reg_cond_dead
,
4980 flush_reg_cond_reg_1
, pair
) == -1)
4981 splay_tree_remove (pbi
->reg_cond_dead
, pair
[1]);
4983 CLEAR_REGNO_REG_SET (pbi
->reg_cond_reg
, regno
);
4986 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4987 We actually use EXPR_LIST to chain the sub-expressions together
4988 instead of IOR because it's easier to manipulate and we have
4989 the lists.c functions to reuse nodes.
4991 Return a new rtl expression as appropriate. */
4994 ior_reg_cond (old
, x
)
4997 enum rtx_code x_code
;
5001 /* We expect these conditions to be of the form (eq reg 0). */
5002 x_code
= GET_CODE (x
);
5003 if (GET_RTX_CLASS (x_code
) != '<'
5004 || GET_CODE (x_reg
= XEXP (x
, 0)) != REG
5005 || XEXP (x
, 1) != const0_rtx
)
5008 /* Search the expression for an existing sub-expression of X_REG. */
5009 for (c
= old
; c
; c
= XEXP (c
, 1))
5011 rtx y
= XEXP (c
, 0);
5012 if (REGNO (XEXP (y
, 0)) == REGNO (x_reg
))
5014 /* If we find X already present in OLD, we need do nothing. */
5015 if (GET_CODE (y
) == x_code
)
5018 /* If we find X being a compliment of a condition in OLD,
5019 then the entire condition is true. */
5020 if (GET_CODE (y
) == reverse_condition (x_code
))
5025 /* Otherwise just add to the chain. */
5026 return alloc_EXPR_LIST (0, x
, old
);
5033 enum rtx_code x_code
;
5036 /* We expect these conditions to be of the form (eq reg 0). */
5037 x_code
= GET_CODE (x
);
5038 if (GET_RTX_CLASS (x_code
) != '<'
5039 || GET_CODE (x_reg
= XEXP (x
, 0)) != REG
5040 || XEXP (x
, 1) != const0_rtx
)
5043 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code
),
5044 VOIDmode
, x_reg
, const0_rtx
),
5049 nand_reg_cond (old
, x
)
5052 enum rtx_code x_code
;
5056 /* We expect these conditions to be of the form (eq reg 0). */
5057 x_code
= GET_CODE (x
);
5058 if (GET_RTX_CLASS (x_code
) != '<'
5059 || GET_CODE (x_reg
= XEXP (x
, 0)) != REG
5060 || XEXP (x
, 1) != const0_rtx
)
5063 /* Search the expression for an existing sub-expression of X_REG. */
5065 for (c
= *(prev
= &old
); c
; c
= *(prev
= &XEXP (c
, 1)))
5067 rtx y
= XEXP (c
, 0);
5068 if (REGNO (XEXP (y
, 0)) == REGNO (x_reg
))
5070 /* If we find X already present in OLD, then we need to
5072 if (GET_CODE (y
) == x_code
)
5074 *prev
= XEXP (c
, 1);
5075 free_EXPR_LIST_node (c
);
5076 return old
? old
: const0_rtx
;
5079 /* If we find X being a compliment of a condition in OLD,
5080 then we need do nothing. */
5081 if (GET_CODE (y
) == reverse_condition (x_code
))
5086 /* Otherwise, by implication, the register in question is now live for
5087 the inverse of the condition X. */
5088 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code
),
5089 VOIDmode
, x_reg
, const0_rtx
),
5092 #endif /* HAVE_conditional_execution */
5096 /* Try to substitute the auto-inc expression INC as the address inside
5097 MEM which occurs in INSN. Currently, the address of MEM is an expression
5098 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5099 that has a single set whose source is a PLUS of INCR_REG and something
5103 attempt_auto_inc (pbi
, inc
, insn
, mem
, incr
, incr_reg
)
5104 struct propagate_block_info
*pbi
;
5105 rtx inc
, insn
, mem
, incr
, incr_reg
;
5107 int regno
= REGNO (incr_reg
);
5108 rtx set
= single_set (incr
);
5109 rtx q
= SET_DEST (set
);
5110 rtx y
= SET_SRC (set
);
5111 int opnum
= XEXP (y
, 0) == incr_reg
? 0 : 1;
5113 /* Make sure this reg appears only once in this insn. */
5114 if (count_occurrences (PATTERN (insn
), incr_reg
, 1) != 1)
5117 if (dead_or_set_p (incr
, incr_reg
)
5118 /* Mustn't autoinc an eliminable register. */
5119 && (regno
>= FIRST_PSEUDO_REGISTER
5120 || ! TEST_HARD_REG_BIT (elim_reg_set
, regno
)))
5122 /* This is the simple case. Try to make the auto-inc. If
5123 we can't, we are done. Otherwise, we will do any
5124 needed updates below. */
5125 if (! validate_change (insn
, &XEXP (mem
, 0), inc
, 0))
5128 else if (GET_CODE (q
) == REG
5129 /* PREV_INSN used here to check the semi-open interval
5131 && ! reg_used_between_p (q
, PREV_INSN (insn
), incr
)
5132 /* We must also check for sets of q as q may be
5133 a call clobbered hard register and there may
5134 be a call between PREV_INSN (insn) and incr. */
5135 && ! reg_set_between_p (q
, PREV_INSN (insn
), incr
))
5137 /* We have *p followed sometime later by q = p+size.
5138 Both p and q must be live afterward,
5139 and q is not used between INSN and its assignment.
5140 Change it to q = p, ...*q..., q = q+size.
5141 Then fall into the usual case. */
5145 emit_move_insn (q
, incr_reg
);
5146 insns
= get_insns ();
5149 if (basic_block_for_insn
)
5150 for (temp
= insns
; temp
; temp
= NEXT_INSN (temp
))
5151 set_block_for_insn (temp
, pbi
->bb
);
5153 /* If we can't make the auto-inc, or can't make the
5154 replacement into Y, exit. There's no point in making
5155 the change below if we can't do the auto-inc and doing
5156 so is not correct in the pre-inc case. */
5159 validate_change (insn
, &XEXP (mem
, 0), inc
, 1);
5160 validate_change (incr
, &XEXP (y
, opnum
), q
, 1);
5161 if (! apply_change_group ())
5164 /* We now know we'll be doing this change, so emit the
5165 new insn(s) and do the updates. */
5166 emit_insns_before (insns
, insn
);
5168 if (pbi
->bb
->head
== insn
)
5169 pbi
->bb
->head
= insns
;
5171 /* INCR will become a NOTE and INSN won't contain a
5172 use of INCR_REG. If a use of INCR_REG was just placed in
5173 the insn before INSN, make that the next use.
5174 Otherwise, invalidate it. */
5175 if (GET_CODE (PREV_INSN (insn
)) == INSN
5176 && GET_CODE (PATTERN (PREV_INSN (insn
))) == SET
5177 && SET_SRC (PATTERN (PREV_INSN (insn
))) == incr_reg
)
5178 pbi
->reg_next_use
[regno
] = PREV_INSN (insn
);
5180 pbi
->reg_next_use
[regno
] = 0;
5185 /* REGNO is now used in INCR which is below INSN, but
5186 it previously wasn't live here. If we don't mark
5187 it as live, we'll put a REG_DEAD note for it
5188 on this insn, which is incorrect. */
5189 SET_REGNO_REG_SET (pbi
->reg_live
, regno
);
5191 /* If there are any calls between INSN and INCR, show
5192 that REGNO now crosses them. */
5193 for (temp
= insn
; temp
!= incr
; temp
= NEXT_INSN (temp
))
5194 if (GET_CODE (temp
) == CALL_INSN
)
5195 REG_N_CALLS_CROSSED (regno
)++;
5200 /* If we haven't returned, it means we were able to make the
5201 auto-inc, so update the status. First, record that this insn
5202 has an implicit side effect. */
5204 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_INC
, incr_reg
, REG_NOTES (insn
));
5206 /* Modify the old increment-insn to simply copy
5207 the already-incremented value of our register. */
5208 if (! validate_change (incr
, &SET_SRC (set
), incr_reg
, 0))
5211 /* If that makes it a no-op (copying the register into itself) delete
5212 it so it won't appear to be a "use" and a "set" of this
5214 if (REGNO (SET_DEST (set
)) == REGNO (incr_reg
))
5216 /* If the original source was dead, it's dead now. */
5219 while ((note
= find_reg_note (incr
, REG_DEAD
, NULL_RTX
)) != NULL_RTX
)
5221 remove_note (incr
, note
);
5222 if (XEXP (note
, 0) != incr_reg
)
5223 CLEAR_REGNO_REG_SET (pbi
->reg_live
, REGNO (XEXP (note
, 0)));
5226 PUT_CODE (incr
, NOTE
);
5227 NOTE_LINE_NUMBER (incr
) = NOTE_INSN_DELETED
;
5228 NOTE_SOURCE_FILE (incr
) = 0;
5231 if (regno
>= FIRST_PSEUDO_REGISTER
)
5233 /* Count an extra reference to the reg. When a reg is
5234 incremented, spilling it is worse, so we want to make
5235 that less likely. */
5236 REG_N_REFS (regno
) += (optimize_size
? 1 : pbi
->bb
->loop_depth
+ 1);
5238 /* Count the increment as a setting of the register,
5239 even though it isn't a SET in rtl. */
5240 REG_N_SETS (regno
)++;
5244 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5248 find_auto_inc (pbi
, x
, insn
)
5249 struct propagate_block_info
*pbi
;
5253 rtx addr
= XEXP (x
, 0);
5254 HOST_WIDE_INT offset
= 0;
5255 rtx set
, y
, incr
, inc_val
;
5257 int size
= GET_MODE_SIZE (GET_MODE (x
));
5259 if (GET_CODE (insn
) == JUMP_INSN
)
5262 /* Here we detect use of an index register which might be good for
5263 postincrement, postdecrement, preincrement, or predecrement. */
5265 if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
5266 offset
= INTVAL (XEXP (addr
, 1)), addr
= XEXP (addr
, 0);
5268 if (GET_CODE (addr
) != REG
)
5271 regno
= REGNO (addr
);
5273 /* Is the next use an increment that might make auto-increment? */
5274 incr
= pbi
->reg_next_use
[regno
];
5275 if (incr
== 0 || BLOCK_NUM (incr
) != BLOCK_NUM (insn
))
5277 set
= single_set (incr
);
5278 if (set
== 0 || GET_CODE (set
) != SET
)
5282 if (GET_CODE (y
) != PLUS
)
5285 if (REG_P (XEXP (y
, 0)) && REGNO (XEXP (y
, 0)) == REGNO (addr
))
5286 inc_val
= XEXP (y
, 1);
5287 else if (REG_P (XEXP (y
, 1)) && REGNO (XEXP (y
, 1)) == REGNO (addr
))
5288 inc_val
= XEXP (y
, 0);
5292 if (GET_CODE (inc_val
) == CONST_INT
)
5294 if (HAVE_POST_INCREMENT
5295 && (INTVAL (inc_val
) == size
&& offset
== 0))
5296 attempt_auto_inc (pbi
, gen_rtx_POST_INC (Pmode
, addr
), insn
, x
,
5298 else if (HAVE_POST_DECREMENT
5299 && (INTVAL (inc_val
) == -size
&& offset
== 0))
5300 attempt_auto_inc (pbi
, gen_rtx_POST_DEC (Pmode
, addr
), insn
, x
,
5302 else if (HAVE_PRE_INCREMENT
5303 && (INTVAL (inc_val
) == size
&& offset
== size
))
5304 attempt_auto_inc (pbi
, gen_rtx_PRE_INC (Pmode
, addr
), insn
, x
,
5306 else if (HAVE_PRE_DECREMENT
5307 && (INTVAL (inc_val
) == -size
&& offset
== -size
))
5308 attempt_auto_inc (pbi
, gen_rtx_PRE_DEC (Pmode
, addr
), insn
, x
,
5310 else if (HAVE_POST_MODIFY_DISP
&& offset
== 0)
5311 attempt_auto_inc (pbi
, gen_rtx_POST_MODIFY (Pmode
, addr
,
5312 gen_rtx_PLUS (Pmode
,
5315 insn
, x
, incr
, addr
);
5317 else if (GET_CODE (inc_val
) == REG
5318 && ! reg_set_between_p (inc_val
, PREV_INSN (insn
),
5322 if (HAVE_POST_MODIFY_REG
&& offset
== 0)
5323 attempt_auto_inc (pbi
, gen_rtx_POST_MODIFY (Pmode
, addr
,
5324 gen_rtx_PLUS (Pmode
,
5327 insn
, x
, incr
, addr
);
5331 #endif /* AUTO_INC_DEC */
5334 mark_used_reg (pbi
, reg
, cond
, insn
)
5335 struct propagate_block_info
*pbi
;
5337 rtx cond ATTRIBUTE_UNUSED
;
5340 int regno
= REGNO (reg
);
5341 int some_was_live
= REGNO_REG_SET_P (pbi
->reg_live
, regno
);
5342 int some_was_dead
= ! some_was_live
;
5346 /* A hard reg in a wide mode may really be multiple registers.
5347 If so, mark all of them just like the first. */
5348 if (regno
< FIRST_PSEUDO_REGISTER
)
5350 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5353 int needed_regno
= REGNO_REG_SET_P (pbi
->reg_live
, regno
+ n
);
5354 some_was_live
|= needed_regno
;
5355 some_was_dead
|= ! needed_regno
;
5359 if (pbi
->flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
5361 /* Record where each reg is used, so when the reg is set we know
5362 the next insn that uses it. */
5363 pbi
->reg_next_use
[regno
] = insn
;
5366 if (pbi
->flags
& PROP_REG_INFO
)
5368 if (regno
< FIRST_PSEUDO_REGISTER
)
5370 /* If this is a register we are going to try to eliminate,
5371 don't mark it live here. If we are successful in
5372 eliminating it, it need not be live unless it is used for
5373 pseudos, in which case it will have been set live when it
5374 was allocated to the pseudos. If the register will not
5375 be eliminated, reload will set it live at that point.
5377 Otherwise, record that this function uses this register. */
5378 /* ??? The PPC backend tries to "eliminate" on the pic
5379 register to itself. This should be fixed. In the mean
5380 time, hack around it. */
5382 if (! (TEST_HARD_REG_BIT (elim_reg_set
, regno
)
5383 && (regno
== FRAME_POINTER_REGNUM
5384 || regno
== ARG_POINTER_REGNUM
)))
5386 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5388 regs_ever_live
[regno
+ --n
] = 1;
5394 /* Keep track of which basic block each reg appears in. */
5396 register int blocknum
= pbi
->bb
->index
;
5397 if (REG_BASIC_BLOCK (regno
) == REG_BLOCK_UNKNOWN
)
5398 REG_BASIC_BLOCK (regno
) = blocknum
;
5399 else if (REG_BASIC_BLOCK (regno
) != blocknum
)
5400 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
5402 /* Count (weighted) number of uses of each reg. */
5403 REG_N_REFS (regno
) += (optimize_size
? 1
5404 : pbi
->bb
->loop_depth
+ 1);
5408 /* Find out if any of the register was set this insn. */
5409 some_not_set
= ! REGNO_REG_SET_P (pbi
->new_set
, regno
);
5410 if (regno
< FIRST_PSEUDO_REGISTER
)
5412 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5414 some_not_set
|= ! REGNO_REG_SET_P (pbi
->new_set
, regno
+ n
);
5417 /* Record and count the insns in which a reg dies. If it is used in
5418 this insn and was dead below the insn then it dies in this insn.
5419 If it was set in this insn, we do not make a REG_DEAD note;
5420 likewise if we already made such a note. */
5421 if ((pbi
->flags
& (PROP_DEATH_NOTES
| PROP_REG_INFO
))
5425 /* Check for the case where the register dying partially
5426 overlaps the register set by this insn. */
5427 if (regno
< FIRST_PSEUDO_REGISTER
5428 && HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) > 1)
5430 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5432 some_was_live
|= REGNO_REG_SET_P (pbi
->new_set
, regno
+ n
);
5435 /* If none of the words in X is needed, make a REG_DEAD note.
5436 Otherwise, we must make partial REG_DEAD notes. */
5437 if (! some_was_live
)
5439 if ((pbi
->flags
& PROP_DEATH_NOTES
)
5440 && ! find_regno_note (insn
, REG_DEAD
, regno
))
5442 = alloc_EXPR_LIST (REG_DEAD
, reg
, REG_NOTES (insn
));
5444 if (pbi
->flags
& PROP_REG_INFO
)
5445 REG_N_DEATHS (regno
)++;
5449 /* Don't make a REG_DEAD note for a part of a register
5450 that is set in the insn. */
5452 n
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) - 1;
5453 for (; n
>= regno
; n
--)
5454 if (! REGNO_REG_SET_P (pbi
->reg_live
, n
)
5455 && ! dead_or_set_regno_p (insn
, n
))
5457 = alloc_EXPR_LIST (REG_DEAD
,
5458 gen_rtx_REG (reg_raw_mode
[n
], n
),
5463 SET_REGNO_REG_SET (pbi
->reg_live
, regno
);
5464 if (regno
< FIRST_PSEUDO_REGISTER
)
5466 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5468 SET_REGNO_REG_SET (pbi
->reg_live
, regno
+ n
);
5471 #ifdef HAVE_conditional_execution
5472 /* If this is a conditional use, record that fact. If it is later
5473 conditionally set, we'll know to kill the register. */
5474 if (cond
!= NULL_RTX
)
5476 splay_tree_node node
;
5477 struct reg_cond_life_info
*rcli
;
5482 node
= splay_tree_lookup (pbi
->reg_cond_dead
, regno
);
5485 /* The register was unconditionally live previously.
5486 No need to do anything. */
5490 /* The register was conditionally live previously.
5491 Subtract the new life cond from the old death cond. */
5492 rcli
= (struct reg_cond_life_info
*) node
->value
;
5493 ncond
= rcli
->condition
;
5494 ncond
= nand_reg_cond (ncond
, cond
);
5496 /* If the register is now unconditionally live, remove the
5497 entry in the splay_tree. */
5498 if (ncond
== const0_rtx
)
5500 rcli
->condition
= NULL_RTX
;
5501 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
5505 rcli
->condition
= ncond
;
5506 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
5512 /* The register was not previously live at all. Record
5513 the condition under which it is still dead. */
5514 rcli
= (struct reg_cond_life_info
*) xmalloc (sizeof (*rcli
));
5515 rcli
->condition
= not_reg_cond (cond
);
5516 splay_tree_insert (pbi
->reg_cond_dead
, regno
,
5517 (splay_tree_value
) rcli
);
5519 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
5522 else if (some_was_live
)
5524 splay_tree_node node
;
5525 struct reg_cond_life_info
*rcli
;
5527 node
= splay_tree_lookup (pbi
->reg_cond_dead
, regno
);
5530 /* The register was conditionally live previously, but is now
5531 unconditionally so. Remove it from the conditionally dead
5532 list, so that a conditional set won't cause us to think
5534 rcli
= (struct reg_cond_life_info
*) node
->value
;
5535 rcli
->condition
= NULL_RTX
;
5536 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
5543 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5544 This is done assuming the registers needed from X are those that
5545 have 1-bits in PBI->REG_LIVE.
5547 INSN is the containing instruction. If INSN is dead, this function
5551 mark_used_regs (pbi
, x
, cond
, insn
)
5552 struct propagate_block_info
*pbi
;
5555 register RTX_CODE code
;
5557 int flags
= pbi
->flags
;
5560 code
= GET_CODE (x
);
5580 /* If we are clobbering a MEM, mark any registers inside the address
5582 if (GET_CODE (XEXP (x
, 0)) == MEM
)
5583 mark_used_regs (pbi
, XEXP (XEXP (x
, 0), 0), cond
, insn
);
5587 /* Don't bother watching stores to mems if this is not the
5588 final pass. We'll not be deleting dead stores this round. */
5589 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
5591 /* Invalidate the data for the last MEM stored, but only if MEM is
5592 something that can be stored into. */
5593 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
5594 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
5595 /* Needn't clear the memory set list. */
5599 rtx temp
= pbi
->mem_set_list
;
5600 rtx prev
= NULL_RTX
;
5605 next
= XEXP (temp
, 1);
5606 if (anti_dependence (XEXP (temp
, 0), x
))
5608 /* Splice temp out of the list. */
5610 XEXP (prev
, 1) = next
;
5612 pbi
->mem_set_list
= next
;
5613 free_EXPR_LIST_node (temp
);
5621 /* If the memory reference had embedded side effects (autoincrement
5622 address modes. Then we may need to kill some entries on the
5625 invalidate_mems_from_autoinc (pbi
, insn
);
5629 if (flags
& PROP_AUTOINC
)
5630 find_auto_inc (pbi
, x
, insn
);
5635 #ifdef CLASS_CANNOT_CHANGE_MODE
5636 if (GET_CODE (SUBREG_REG (x
)) == REG
5637 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
5638 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x
),
5639 GET_MODE (SUBREG_REG (x
))))
5640 REG_CHANGES_MODE (REGNO (SUBREG_REG (x
))) = 1;
5643 /* While we're here, optimize this case. */
5645 if (GET_CODE (x
) != REG
)
5650 /* See a register other than being set => mark it as needed. */
5651 mark_used_reg (pbi
, x
, cond
, insn
);
5656 register rtx testreg
= SET_DEST (x
);
5659 /* If storing into MEM, don't show it as being used. But do
5660 show the address as being used. */
5661 if (GET_CODE (testreg
) == MEM
)
5664 if (flags
& PROP_AUTOINC
)
5665 find_auto_inc (pbi
, testreg
, insn
);
5667 mark_used_regs (pbi
, XEXP (testreg
, 0), cond
, insn
);
5668 mark_used_regs (pbi
, SET_SRC (x
), cond
, insn
);
5672 /* Storing in STRICT_LOW_PART is like storing in a reg
5673 in that this SET might be dead, so ignore it in TESTREG.
5674 but in some other ways it is like using the reg.
5676 Storing in a SUBREG or a bit field is like storing the entire
5677 register in that if the register's value is not used
5678 then this SET is not needed. */
5679 while (GET_CODE (testreg
) == STRICT_LOW_PART
5680 || GET_CODE (testreg
) == ZERO_EXTRACT
5681 || GET_CODE (testreg
) == SIGN_EXTRACT
5682 || GET_CODE (testreg
) == SUBREG
)
5684 #ifdef CLASS_CANNOT_CHANGE_MODE
5685 if (GET_CODE (testreg
) == SUBREG
5686 && GET_CODE (SUBREG_REG (testreg
)) == REG
5687 && REGNO (SUBREG_REG (testreg
)) >= FIRST_PSEUDO_REGISTER
5688 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg
)),
5689 GET_MODE (testreg
)))
5690 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg
))) = 1;
5693 /* Modifying a single register in an alternate mode
5694 does not use any of the old value. But these other
5695 ways of storing in a register do use the old value. */
5696 if (GET_CODE (testreg
) == SUBREG
5697 && !(REG_SIZE (SUBREG_REG (testreg
)) > REG_SIZE (testreg
)))
5702 testreg
= XEXP (testreg
, 0);
5705 /* If this is a store into a register, recursively scan the
5706 value being stored. */
5708 if ((GET_CODE (testreg
) == PARALLEL
5709 && GET_MODE (testreg
) == BLKmode
)
5710 || (GET_CODE (testreg
) == REG
5711 && (regno
= REGNO (testreg
),
5712 ! (regno
== FRAME_POINTER_REGNUM
5713 && (! reload_completed
|| frame_pointer_needed
)))
5714 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5715 && ! (regno
== HARD_FRAME_POINTER_REGNUM
5716 && (! reload_completed
|| frame_pointer_needed
))
5718 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5719 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
5724 mark_used_regs (pbi
, SET_DEST (x
), cond
, insn
);
5725 mark_used_regs (pbi
, SET_SRC (x
), cond
, insn
);
5732 case UNSPEC_VOLATILE
:
5736 /* Traditional and volatile asm instructions must be considered to use
5737 and clobber all hard registers, all pseudo-registers and all of
5738 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5740 Consider for instance a volatile asm that changes the fpu rounding
5741 mode. An insn should not be moved across this even if it only uses
5742 pseudo-regs because it might give an incorrectly rounded result.
5744 ?!? Unfortunately, marking all hard registers as live causes massive
5745 problems for the register allocator and marking all pseudos as live
5746 creates mountains of uninitialized variable warnings.
5748 So for now, just clear the memory set list and mark any regs
5749 we can find in ASM_OPERANDS as used. */
5750 if (code
!= ASM_OPERANDS
|| MEM_VOLATILE_P (x
))
5751 free_EXPR_LIST_list (&pbi
->mem_set_list
);
5753 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5754 We can not just fall through here since then we would be confused
5755 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5756 traditional asms unlike their normal usage. */
5757 if (code
== ASM_OPERANDS
)
5761 for (j
= 0; j
< ASM_OPERANDS_INPUT_LENGTH (x
); j
++)
5762 mark_used_regs (pbi
, ASM_OPERANDS_INPUT (x
, j
), cond
, insn
);
5768 if (cond
!= NULL_RTX
)
5771 mark_used_regs (pbi
, COND_EXEC_TEST (x
), NULL_RTX
, insn
);
5773 cond
= COND_EXEC_TEST (x
);
5774 x
= COND_EXEC_CODE (x
);
5778 /* We _do_not_ want to scan operands of phi nodes. Operands of
5779 a phi function are evaluated only when control reaches this
5780 block along a particular edge. Therefore, regs that appear
5781 as arguments to phi should not be added to the global live at
5789 /* Recursively scan the operands of this expression. */
5792 register const char *fmt
= GET_RTX_FORMAT (code
);
5795 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5799 /* Tail recursive case: save a function call level. */
5805 mark_used_regs (pbi
, XEXP (x
, i
), cond
, insn
);
5807 else if (fmt
[i
] == 'E')
5810 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5811 mark_used_regs (pbi
, XVECEXP (x
, i
, j
), cond
, insn
);
5820 try_pre_increment_1 (pbi
, insn
)
5821 struct propagate_block_info
*pbi
;
5824 /* Find the next use of this reg. If in same basic block,
5825 make it do pre-increment or pre-decrement if appropriate. */
5826 rtx x
= single_set (insn
);
5827 HOST_WIDE_INT amount
= ((GET_CODE (SET_SRC (x
)) == PLUS
? 1 : -1)
5828 * INTVAL (XEXP (SET_SRC (x
), 1)));
5829 int regno
= REGNO (SET_DEST (x
));
5830 rtx y
= pbi
->reg_next_use
[regno
];
5832 && SET_DEST (x
) != stack_pointer_rtx
5833 && BLOCK_NUM (y
) == BLOCK_NUM (insn
)
5834 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5835 mode would be better. */
5836 && ! dead_or_set_p (y
, SET_DEST (x
))
5837 && try_pre_increment (y
, SET_DEST (x
), amount
))
5839 /* We have found a suitable auto-increment and already changed
5840 insn Y to do it. So flush this increment instruction. */
5841 propagate_block_delete_insn (pbi
->bb
, insn
);
5843 /* Count a reference to this reg for the increment insn we are
5844 deleting. When a reg is incremented, spilling it is worse,
5845 so we want to make that less likely. */
5846 if (regno
>= FIRST_PSEUDO_REGISTER
)
5848 REG_N_REFS (regno
) += (optimize_size
? 1
5849 : pbi
->bb
->loop_depth
+ 1);
5850 REG_N_SETS (regno
)++;
5853 /* Flush any remembered memories depending on the value of
5854 the incremented register. */
5855 invalidate_mems_from_set (pbi
, SET_DEST (x
));
5862 /* Try to change INSN so that it does pre-increment or pre-decrement
5863 addressing on register REG in order to add AMOUNT to REG.
5864 AMOUNT is negative for pre-decrement.
5865 Returns 1 if the change could be made.
5866 This checks all about the validity of the result of modifying INSN. */
5869 try_pre_increment (insn
, reg
, amount
)
5871 HOST_WIDE_INT amount
;
5875 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5876 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5878 /* Nonzero if we can try to make a post-increment or post-decrement.
5879 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5880 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5881 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5884 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5887 /* From the sign of increment, see which possibilities are conceivable
5888 on this target machine. */
5889 if (HAVE_PRE_INCREMENT
&& amount
> 0)
5891 if (HAVE_POST_INCREMENT
&& amount
> 0)
5894 if (HAVE_PRE_DECREMENT
&& amount
< 0)
5896 if (HAVE_POST_DECREMENT
&& amount
< 0)
5899 if (! (pre_ok
|| post_ok
))
5902 /* It is not safe to add a side effect to a jump insn
5903 because if the incremented register is spilled and must be reloaded
5904 there would be no way to store the incremented value back in memory. */
5906 if (GET_CODE (insn
) == JUMP_INSN
)
5911 use
= find_use_as_address (PATTERN (insn
), reg
, 0);
5912 if (post_ok
&& (use
== 0 || use
== (rtx
) 1))
5914 use
= find_use_as_address (PATTERN (insn
), reg
, -amount
);
5918 if (use
== 0 || use
== (rtx
) 1)
5921 if (GET_MODE_SIZE (GET_MODE (use
)) != (amount
> 0 ? amount
: - amount
))
5924 /* See if this combination of instruction and addressing mode exists. */
5925 if (! validate_change (insn
, &XEXP (use
, 0),
5926 gen_rtx_fmt_e (amount
> 0
5927 ? (do_post
? POST_INC
: PRE_INC
)
5928 : (do_post
? POST_DEC
: PRE_DEC
),
5932 /* Record that this insn now has an implicit side effect on X. */
5933 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_INC
, reg
, REG_NOTES (insn
));
5937 #endif /* AUTO_INC_DEC */
5939 /* Find the place in the rtx X where REG is used as a memory address.
5940 Return the MEM rtx that so uses it.
5941 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5942 (plus REG (const_int PLUSCONST)).
5944 If such an address does not appear, return 0.
5945 If REG appears more than once, or is used other than in such an address,
5949 find_use_as_address (x
, reg
, plusconst
)
5952 HOST_WIDE_INT plusconst
;
5954 enum rtx_code code
= GET_CODE (x
);
5955 const char *fmt
= GET_RTX_FORMAT (code
);
5957 register rtx value
= 0;
5960 if (code
== MEM
&& XEXP (x
, 0) == reg
&& plusconst
== 0)
5963 if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == PLUS
5964 && XEXP (XEXP (x
, 0), 0) == reg
5965 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
5966 && INTVAL (XEXP (XEXP (x
, 0), 1)) == plusconst
)
5969 if (code
== SIGN_EXTRACT
|| code
== ZERO_EXTRACT
)
5971 /* If REG occurs inside a MEM used in a bit-field reference,
5972 that is unacceptable. */
5973 if (find_use_as_address (XEXP (x
, 0), reg
, 0) != 0)
5974 return (rtx
) (HOST_WIDE_INT
) 1;
5978 return (rtx
) (HOST_WIDE_INT
) 1;
5980 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5984 tem
= find_use_as_address (XEXP (x
, i
), reg
, plusconst
);
5988 return (rtx
) (HOST_WIDE_INT
) 1;
5990 else if (fmt
[i
] == 'E')
5993 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
5995 tem
= find_use_as_address (XVECEXP (x
, i
, j
), reg
, plusconst
);
5999 return (rtx
) (HOST_WIDE_INT
) 1;
6007 /* Write information about registers and basic blocks into FILE.
6008 This is part of making a debugging dump. */
6011 dump_regset (r
, outf
)
6018 fputs (" (nil)", outf
);
6022 EXECUTE_IF_SET_IN_REG_SET (r
, 0, i
,
6024 fprintf (outf
, " %d", i
);
6025 if (i
< FIRST_PSEUDO_REGISTER
)
6026 fprintf (outf
, " [%s]",
6035 dump_regset (r
, stderr
);
6036 putc ('\n', stderr
);
6040 dump_flow_info (file
)
6044 static const char * const reg_class_names
[] = REG_CLASS_NAMES
;
6046 fprintf (file
, "%d registers.\n", max_regno
);
6047 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
6050 enum reg_class
class, altclass
;
6051 fprintf (file
, "\nRegister %d used %d times across %d insns",
6052 i
, REG_N_REFS (i
), REG_LIVE_LENGTH (i
));
6053 if (REG_BASIC_BLOCK (i
) >= 0)
6054 fprintf (file
, " in block %d", REG_BASIC_BLOCK (i
));
6056 fprintf (file
, "; set %d time%s", REG_N_SETS (i
),
6057 (REG_N_SETS (i
) == 1) ? "" : "s");
6058 if (REG_USERVAR_P (regno_reg_rtx
[i
]))
6059 fprintf (file
, "; user var");
6060 if (REG_N_DEATHS (i
) != 1)
6061 fprintf (file
, "; dies in %d places", REG_N_DEATHS (i
));
6062 if (REG_N_CALLS_CROSSED (i
) == 1)
6063 fprintf (file
, "; crosses 1 call");
6064 else if (REG_N_CALLS_CROSSED (i
))
6065 fprintf (file
, "; crosses %d calls", REG_N_CALLS_CROSSED (i
));
6066 if (PSEUDO_REGNO_BYTES (i
) != UNITS_PER_WORD
)
6067 fprintf (file
, "; %d bytes", PSEUDO_REGNO_BYTES (i
));
6068 class = reg_preferred_class (i
);
6069 altclass
= reg_alternate_class (i
);
6070 if (class != GENERAL_REGS
|| altclass
!= ALL_REGS
)
6072 if (altclass
== ALL_REGS
|| class == ALL_REGS
)
6073 fprintf (file
, "; pref %s", reg_class_names
[(int) class]);
6074 else if (altclass
== NO_REGS
)
6075 fprintf (file
, "; %s or none", reg_class_names
[(int) class]);
6077 fprintf (file
, "; pref %s, else %s",
6078 reg_class_names
[(int) class],
6079 reg_class_names
[(int) altclass
]);
6081 if (REGNO_POINTER_FLAG (i
))
6082 fprintf (file
, "; pointer");
6083 fprintf (file
, ".\n");
6086 fprintf (file
, "\n%d basic blocks, %d edges.\n", n_basic_blocks
, n_edges
);
6087 for (i
= 0; i
< n_basic_blocks
; i
++)
6089 register basic_block bb
= BASIC_BLOCK (i
);
6092 fprintf (file
, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
6093 i
, INSN_UID (bb
->head
), INSN_UID (bb
->end
), bb
->loop_depth
, bb
->count
);
6095 fprintf (file
, "Predecessors: ");
6096 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
6097 dump_edge_info (file
, e
, 0);
6099 fprintf (file
, "\nSuccessors: ");
6100 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6101 dump_edge_info (file
, e
, 1);
6103 fprintf (file
, "\nRegisters live at start:");
6104 dump_regset (bb
->global_live_at_start
, file
);
6106 fprintf (file
, "\nRegisters live at end:");
6107 dump_regset (bb
->global_live_at_end
, file
);
6118 dump_flow_info (stderr
);
6122 dump_edge_info (file
, e
, do_succ
)
6127 basic_block side
= (do_succ
? e
->dest
: e
->src
);
6129 if (side
== ENTRY_BLOCK_PTR
)
6130 fputs (" ENTRY", file
);
6131 else if (side
== EXIT_BLOCK_PTR
)
6132 fputs (" EXIT", file
);
6134 fprintf (file
, " %d", side
->index
);
6137 fprintf (file
, " count:%d", e
->count
);
6141 static const char * const bitnames
[] = {
6142 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6145 int i
, flags
= e
->flags
;
6149 for (i
= 0; flags
; i
++)
6150 if (flags
& (1 << i
))
6156 if (i
< (int) ARRAY_SIZE (bitnames
))
6157 fputs (bitnames
[i
], file
);
6159 fprintf (file
, "%d", i
);
6166 /* Print out one basic block with live information at start and end. */
6177 fprintf (outf
, ";; Basic block %d, loop depth %d, count %d",
6178 bb
->index
, bb
->loop_depth
, bb
->count
);
6179 if (bb
->eh_beg
!= -1 || bb
->eh_end
!= -1)
6180 fprintf (outf
, ", eh regions %d/%d", bb
->eh_beg
, bb
->eh_end
);
6183 fputs (";; Predecessors: ", outf
);
6184 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
6185 dump_edge_info (outf
, e
, 0);
6188 fputs (";; Registers live at start:", outf
);
6189 dump_regset (bb
->global_live_at_start
, outf
);
6192 for (insn
= bb
->head
, last
= NEXT_INSN (bb
->end
);
6194 insn
= NEXT_INSN (insn
))
6195 print_rtl_single (outf
, insn
);
6197 fputs (";; Registers live at end:", outf
);
6198 dump_regset (bb
->global_live_at_end
, outf
);
6201 fputs (";; Successors: ", outf
);
6202 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6203 dump_edge_info (outf
, e
, 1);
6211 dump_bb (bb
, stderr
);
6218 dump_bb (BASIC_BLOCK (n
), stderr
);
6221 /* Like print_rtl, but also print out live information for the start of each
6225 print_rtl_with_bb (outf
, rtx_first
)
6229 register rtx tmp_rtx
;
6232 fprintf (outf
, "(nil)\n");
6236 enum bb_state
{ NOT_IN_BB
, IN_ONE_BB
, IN_MULTIPLE_BB
};
6237 int max_uid
= get_max_uid ();
6238 basic_block
*start
= (basic_block
*)
6239 xcalloc (max_uid
, sizeof (basic_block
));
6240 basic_block
*end
= (basic_block
*)
6241 xcalloc (max_uid
, sizeof (basic_block
));
6242 enum bb_state
*in_bb_p
= (enum bb_state
*)
6243 xcalloc (max_uid
, sizeof (enum bb_state
));
6245 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
6247 basic_block bb
= BASIC_BLOCK (i
);
6250 start
[INSN_UID (bb
->head
)] = bb
;
6251 end
[INSN_UID (bb
->end
)] = bb
;
6252 for (x
= bb
->head
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
6254 enum bb_state state
= IN_MULTIPLE_BB
;
6255 if (in_bb_p
[INSN_UID (x
)] == NOT_IN_BB
)
6257 in_bb_p
[INSN_UID (x
)] = state
;
6264 for (tmp_rtx
= rtx_first
; NULL
!= tmp_rtx
; tmp_rtx
= NEXT_INSN (tmp_rtx
))
6269 if ((bb
= start
[INSN_UID (tmp_rtx
)]) != NULL
)
6271 fprintf (outf
, ";; Start of basic block %d, registers live:",
6273 dump_regset (bb
->global_live_at_start
, outf
);
6277 if (in_bb_p
[INSN_UID (tmp_rtx
)] == NOT_IN_BB
6278 && GET_CODE (tmp_rtx
) != NOTE
6279 && GET_CODE (tmp_rtx
) != BARRIER
)
6280 fprintf (outf
, ";; Insn is not within a basic block\n");
6281 else if (in_bb_p
[INSN_UID (tmp_rtx
)] == IN_MULTIPLE_BB
)
6282 fprintf (outf
, ";; Insn is in multiple basic blocks\n");
6284 did_output
= print_rtl_single (outf
, tmp_rtx
);
6286 if ((bb
= end
[INSN_UID (tmp_rtx
)]) != NULL
)
6288 fprintf (outf
, ";; End of basic block %d, registers live:\n",
6290 dump_regset (bb
->global_live_at_end
, outf
);
6303 if (current_function_epilogue_delay_list
!= 0)
6305 fprintf (outf
, "\n;; Insns in epilogue delay list:\n\n");
6306 for (tmp_rtx
= current_function_epilogue_delay_list
; tmp_rtx
!= 0;
6307 tmp_rtx
= XEXP (tmp_rtx
, 1))
6308 print_rtl_single (outf
, XEXP (tmp_rtx
, 0));
6312 /* Dump the rtl into the current debugging dump file, then abort. */
6314 print_rtl_and_abort ()
6318 print_rtl_with_bb (rtl_dump_file
, get_insns ());
6319 fclose (rtl_dump_file
);
6324 /* Recompute register set/reference counts immediately prior to register
6327 This avoids problems with set/reference counts changing to/from values
6328 which have special meanings to the register allocators.
6330 Additionally, the reference counts are the primary component used by the
6331 register allocators to prioritize pseudos for allocation to hard regs.
6332 More accurate reference counts generally lead to better register allocation.
6334 F is the first insn to be scanned.
6336 LOOP_STEP denotes how much loop_depth should be incremented per
6337 loop nesting level in order to increase the ref count more for
6338 references in a loop.
6340 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6341 possibly other information which is used by the register allocators. */
6344 recompute_reg_usage (f
, loop_step
)
6345 rtx f ATTRIBUTE_UNUSED
;
6346 int loop_step ATTRIBUTE_UNUSED
;
6348 allocate_reg_life_data ();
6349 update_life_info (NULL
, UPDATE_LIFE_LOCAL
, PROP_REG_INFO
);
6352 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6353 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6354 of the number of registers that died. */
6357 count_or_remove_death_notes (blocks
, kill
)
6363 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
6368 if (blocks
&& ! TEST_BIT (blocks
, i
))
6371 bb
= BASIC_BLOCK (i
);
6373 for (insn
= bb
->head
;; insn
= NEXT_INSN (insn
))
6377 rtx
*pprev
= ®_NOTES (insn
);
6382 switch (REG_NOTE_KIND (link
))
6385 if (GET_CODE (XEXP (link
, 0)) == REG
)
6387 rtx reg
= XEXP (link
, 0);
6390 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
6393 n
= HARD_REGNO_NREGS (REGNO (reg
), GET_MODE (reg
));
6401 rtx next
= XEXP (link
, 1);
6402 free_EXPR_LIST_node (link
);
6403 *pprev
= link
= next
;
6409 pprev
= &XEXP (link
, 1);
6416 if (insn
== bb
->end
)
6425 /* Update insns block within BB. */
6428 update_bb_for_insn (bb
)
6433 if (! basic_block_for_insn
)
6436 for (insn
= bb
->head
; ; insn
= NEXT_INSN (insn
))
6438 set_block_for_insn (insn
, bb
);
6440 if (insn
== bb
->end
)
6446 /* Record INSN's block as BB. */
6449 set_block_for_insn (insn
, bb
)
6453 size_t uid
= INSN_UID (insn
);
6454 if (uid
>= basic_block_for_insn
->num_elements
)
6458 /* Add one-eighth the size so we don't keep calling xrealloc. */
6459 new_size
= uid
+ (uid
+ 7) / 8;
6461 VARRAY_GROW (basic_block_for_insn
, new_size
);
6463 VARRAY_BB (basic_block_for_insn
, uid
) = bb
;
6466 /* Record INSN's block number as BB. */
6467 /* ??? This has got to go. */
6470 set_block_num (insn
, bb
)
6474 set_block_for_insn (insn
, BASIC_BLOCK (bb
));
6477 /* Verify the CFG consistency. This function check some CFG invariants and
6478 aborts when something is wrong. Hope that this function will help to
6479 convert many optimization passes to preserve CFG consistent.
6481 Currently it does following checks:
6483 - test head/end pointers
6484 - overlapping of basic blocks
6485 - edge list corectness
6486 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6487 - tails of basic blocks (ensure that boundary is necesary)
6488 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6489 and NOTE_INSN_BASIC_BLOCK
6490 - check that all insns are in the basic blocks
6491 (except the switch handling code, barriers and notes)
6492 - check that all returns are followed by barriers
6494 In future it can be extended check a lot of other stuff as well
6495 (reachability of basic blocks, life information, etc. etc.). */
6500 const int max_uid
= get_max_uid ();
6501 const rtx rtx_first
= get_insns ();
6502 rtx last_head
= get_last_insn ();
6503 basic_block
*bb_info
;
6505 int i
, last_bb_num_seen
, num_bb_notes
, err
= 0;
6507 bb_info
= (basic_block
*) xcalloc (max_uid
, sizeof (basic_block
));
6509 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
6511 basic_block bb
= BASIC_BLOCK (i
);
6512 rtx head
= bb
->head
;
6515 /* Verify the end of the basic block is in the INSN chain. */
6516 for (x
= last_head
; x
!= NULL_RTX
; x
= PREV_INSN (x
))
6521 error ("End insn %d for block %d not found in the insn stream.",
6522 INSN_UID (end
), bb
->index
);
6526 /* Work backwards from the end to the head of the basic block
6527 to verify the head is in the RTL chain. */
6528 for (; x
!= NULL_RTX
; x
= PREV_INSN (x
))
6530 /* While walking over the insn chain, verify insns appear
6531 in only one basic block and initialize the BB_INFO array
6532 used by other passes. */
6533 if (bb_info
[INSN_UID (x
)] != NULL
)
6535 error ("Insn %d is in multiple basic blocks (%d and %d)",
6536 INSN_UID (x
), bb
->index
, bb_info
[INSN_UID (x
)]->index
);
6539 bb_info
[INSN_UID (x
)] = bb
;
6546 error ("Head insn %d for block %d not found in the insn stream.",
6547 INSN_UID (head
), bb
->index
);
6554 /* Now check the basic blocks (boundaries etc.) */
6555 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
6557 basic_block bb
= BASIC_BLOCK (i
);
6558 /* Check corectness of edge lists */
6567 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6569 fprintf (stderr
, "Predecessor: ");
6570 dump_edge_info (stderr
, e
, 0);
6571 fprintf (stderr
, "\nSuccessor: ");
6572 dump_edge_info (stderr
, e
, 1);
6576 if (e
->dest
!= EXIT_BLOCK_PTR
)
6578 edge e2
= e
->dest
->pred
;
6579 while (e2
&& e2
!= e
)
6583 error ("Basic block %i edge lists are corrupted", bb
->index
);
6595 error ("Basic block %d pred edge is corrupted", bb
->index
);
6596 fputs ("Predecessor: ", stderr
);
6597 dump_edge_info (stderr
, e
, 0);
6598 fputs ("\nSuccessor: ", stderr
);
6599 dump_edge_info (stderr
, e
, 1);
6600 fputc ('\n', stderr
);
6603 if (e
->src
!= ENTRY_BLOCK_PTR
)
6605 edge e2
= e
->src
->succ
;
6606 while (e2
&& e2
!= e
)
6610 error ("Basic block %i edge lists are corrupted", bb
->index
);
6617 /* OK pointers are correct. Now check the header of basic
6618 block. It ought to contain optional CODE_LABEL followed
6619 by NOTE_BASIC_BLOCK. */
6621 if (GET_CODE (x
) == CODE_LABEL
)
6625 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6631 if (!NOTE_INSN_BASIC_BLOCK_P (x
) || NOTE_BASIC_BLOCK (x
) != bb
)
6633 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6640 /* Do checks for empty blocks here */
6647 if (NOTE_INSN_BASIC_BLOCK_P (x
))
6649 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6650 INSN_UID (x
), bb
->index
);
6657 if (GET_CODE (x
) == JUMP_INSN
6658 || GET_CODE (x
) == CODE_LABEL
6659 || GET_CODE (x
) == BARRIER
)
6661 error ("In basic block %d:", bb
->index
);
6662 fatal_insn ("Flow control insn inside a basic block", x
);
6670 last_bb_num_seen
= -1;
6675 if (NOTE_INSN_BASIC_BLOCK_P (x
))
6677 basic_block bb
= NOTE_BASIC_BLOCK (x
);
6679 if (bb
->index
!= last_bb_num_seen
+ 1)
6680 fatal ("Basic blocks not numbered consecutively");
6681 last_bb_num_seen
= bb
->index
;
6684 if (!bb_info
[INSN_UID (x
)])
6686 switch (GET_CODE (x
))
6693 /* An addr_vec is placed outside any block block. */
6695 && GET_CODE (NEXT_INSN (x
)) == JUMP_INSN
6696 && (GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_DIFF_VEC
6697 || GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_VEC
))
6702 /* But in any case, non-deletable labels can appear anywhere. */
6706 fatal_insn ("Insn outside basic block", x
);
6711 && GET_CODE (x
) == JUMP_INSN
6712 && returnjump_p (x
) && ! condjump_p (x
)
6713 && ! (NEXT_INSN (x
) && GET_CODE (NEXT_INSN (x
)) == BARRIER
))
6714 fatal_insn ("Return not followed by barrier", x
);
6719 if (num_bb_notes
!= n_basic_blocks
)
6720 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6721 num_bb_notes
, n_basic_blocks
);
6730 /* Functions to access an edge list with a vector representation.
6731 Enough data is kept such that given an index number, the
6732 pred and succ that edge represents can be determined, or
6733 given a pred and a succ, its index number can be returned.
6734 This allows algorithms which consume a lot of memory to
6735 represent the normally full matrix of edge (pred,succ) with a
6736 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6737 wasted space in the client code due to sparse flow graphs. */
6739 /* This functions initializes the edge list. Basically the entire
6740 flowgraph is processed, and all edges are assigned a number,
6741 and the data structure is filled in. */
6746 struct edge_list
*elist
;
6752 block_count
= n_basic_blocks
+ 2; /* Include the entry and exit blocks. */
6756 /* Determine the number of edges in the flow graph by counting successor
6757 edges on each basic block. */
6758 for (x
= 0; x
< n_basic_blocks
; x
++)
6760 basic_block bb
= BASIC_BLOCK (x
);
6762 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6765 /* Don't forget successors of the entry block. */
6766 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
6769 elist
= (struct edge_list
*) xmalloc (sizeof (struct edge_list
));
6770 elist
->num_blocks
= block_count
;
6771 elist
->num_edges
= num_edges
;
6772 elist
->index_to_edge
= (edge
*) xmalloc (sizeof (edge
) * num_edges
);
6776 /* Follow successors of the entry block, and register these edges. */
6777 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
6779 elist
->index_to_edge
[num_edges
] = e
;
6783 for (x
= 0; x
< n_basic_blocks
; x
++)
6785 basic_block bb
= BASIC_BLOCK (x
);
6787 /* Follow all successors of blocks, and register these edges. */
6788 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6790 elist
->index_to_edge
[num_edges
] = e
;
6797 /* This function free's memory associated with an edge list. */
6800 free_edge_list (elist
)
6801 struct edge_list
*elist
;
6805 free (elist
->index_to_edge
);
6810 /* This function provides debug output showing an edge list. */
6813 print_edge_list (f
, elist
)
6815 struct edge_list
*elist
;
6818 fprintf (f
, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6819 elist
->num_blocks
- 2, elist
->num_edges
);
6821 for (x
= 0; x
< elist
->num_edges
; x
++)
6823 fprintf (f
, " %-4d - edge(", x
);
6824 if (INDEX_EDGE_PRED_BB (elist
, x
) == ENTRY_BLOCK_PTR
)
6825 fprintf (f
, "entry,");
6827 fprintf (f
, "%d,", INDEX_EDGE_PRED_BB (elist
, x
)->index
);
6829 if (INDEX_EDGE_SUCC_BB (elist
, x
) == EXIT_BLOCK_PTR
)
6830 fprintf (f
, "exit)\n");
6832 fprintf (f
, "%d)\n", INDEX_EDGE_SUCC_BB (elist
, x
)->index
);
6836 /* This function provides an internal consistency check of an edge list,
6837 verifying that all edges are present, and that there are no
6841 verify_edge_list (f
, elist
)
6843 struct edge_list
*elist
;
6845 int x
, pred
, succ
, index
;
6848 for (x
= 0; x
< n_basic_blocks
; x
++)
6850 basic_block bb
= BASIC_BLOCK (x
);
6852 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6854 pred
= e
->src
->index
;
6855 succ
= e
->dest
->index
;
6856 index
= EDGE_INDEX (elist
, e
->src
, e
->dest
);
6857 if (index
== EDGE_INDEX_NO_EDGE
)
6859 fprintf (f
, "*p* No index for edge from %d to %d\n", pred
, succ
);
6862 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
6863 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
6864 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
6865 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
6866 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
6867 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
6870 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
6872 pred
= e
->src
->index
;
6873 succ
= e
->dest
->index
;
6874 index
= EDGE_INDEX (elist
, e
->src
, e
->dest
);
6875 if (index
== EDGE_INDEX_NO_EDGE
)
6877 fprintf (f
, "*p* No index for edge from %d to %d\n", pred
, succ
);
6880 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
6881 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
6882 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
6883 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
6884 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
6885 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
6887 /* We've verified that all the edges are in the list, no lets make sure
6888 there are no spurious edges in the list. */
6890 for (pred
= 0; pred
< n_basic_blocks
; pred
++)
6891 for (succ
= 0; succ
< n_basic_blocks
; succ
++)
6893 basic_block p
= BASIC_BLOCK (pred
);
6894 basic_block s
= BASIC_BLOCK (succ
);
6898 for (e
= p
->succ
; e
; e
= e
->succ_next
)
6904 for (e
= s
->pred
; e
; e
= e
->pred_next
)
6910 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), BASIC_BLOCK (succ
))
6911 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
6912 fprintf (f
, "*** Edge (%d, %d) appears to not have an index\n",
6914 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), BASIC_BLOCK (succ
))
6915 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
6916 fprintf (f
, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6917 pred
, succ
, EDGE_INDEX (elist
, BASIC_BLOCK (pred
),
6918 BASIC_BLOCK (succ
)));
6920 for (succ
= 0; succ
< n_basic_blocks
; succ
++)
6922 basic_block p
= ENTRY_BLOCK_PTR
;
6923 basic_block s
= BASIC_BLOCK (succ
);
6927 for (e
= p
->succ
; e
; e
= e
->succ_next
)
6933 for (e
= s
->pred
; e
; e
= e
->pred_next
)
6939 if (EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (succ
))
6940 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
6941 fprintf (f
, "*** Edge (entry, %d) appears to not have an index\n",
6943 if (EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (succ
))
6944 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
6945 fprintf (f
, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6946 succ
, EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
,
6947 BASIC_BLOCK (succ
)));
6949 for (pred
= 0; pred
< n_basic_blocks
; pred
++)
6951 basic_block p
= BASIC_BLOCK (pred
);
6952 basic_block s
= EXIT_BLOCK_PTR
;
6956 for (e
= p
->succ
; e
; e
= e
->succ_next
)
6962 for (e
= s
->pred
; e
; e
= e
->pred_next
)
6968 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), EXIT_BLOCK_PTR
)
6969 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
6970 fprintf (f
, "*** Edge (%d, exit) appears to not have an index\n",
6972 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), EXIT_BLOCK_PTR
)
6973 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
6974 fprintf (f
, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6975 pred
, EDGE_INDEX (elist
, BASIC_BLOCK (pred
),
6980 /* This routine will determine what, if any, edge there is between
6981 a specified predecessor and successor. */
6984 find_edge_index (edge_list
, pred
, succ
)
6985 struct edge_list
*edge_list
;
6986 basic_block pred
, succ
;
6989 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
6991 if (INDEX_EDGE_PRED_BB (edge_list
, x
) == pred
6992 && INDEX_EDGE_SUCC_BB (edge_list
, x
) == succ
)
6995 return (EDGE_INDEX_NO_EDGE
);
6998 /* This function will remove an edge from the flow graph. */
7004 edge last_pred
= NULL
;
7005 edge last_succ
= NULL
;
7007 basic_block src
, dest
;
7010 for (tmp
= src
->succ
; tmp
&& tmp
!= e
; tmp
= tmp
->succ_next
)
7016 last_succ
->succ_next
= e
->succ_next
;
7018 src
->succ
= e
->succ_next
;
7020 for (tmp
= dest
->pred
; tmp
&& tmp
!= e
; tmp
= tmp
->pred_next
)
7026 last_pred
->pred_next
= e
->pred_next
;
7028 dest
->pred
= e
->pred_next
;
7034 /* This routine will remove any fake successor edges for a basic block.
7035 When the edge is removed, it is also removed from whatever predecessor
7039 remove_fake_successors (bb
)
7043 for (e
= bb
->succ
; e
;)
7047 if ((tmp
->flags
& EDGE_FAKE
) == EDGE_FAKE
)
7052 /* This routine will remove all fake edges from the flow graph. If
7053 we remove all fake successors, it will automatically remove all
7054 fake predecessors. */
7057 remove_fake_edges ()
7061 for (x
= 0; x
< n_basic_blocks
; x
++)
7062 remove_fake_successors (BASIC_BLOCK (x
));
7064 /* We've handled all successors except the entry block's. */
7065 remove_fake_successors (ENTRY_BLOCK_PTR
);
7068 /* This function will add a fake edge between any block which has no
7069 successors, and the exit block. Some data flow equations require these
7073 add_noreturn_fake_exit_edges ()
7077 for (x
= 0; x
< n_basic_blocks
; x
++)
7078 if (BASIC_BLOCK (x
)->succ
== NULL
)
7079 make_edge (NULL
, BASIC_BLOCK (x
), EXIT_BLOCK_PTR
, EDGE_FAKE
);
7082 /* This function adds a fake edge between any infinite loops to the
7083 exit block. Some optimizations require a path from each node to
7086 See also Morgan, Figure 3.10, pp. 82-83.
7088 The current implementation is ugly, not attempting to minimize the
7089 number of inserted fake edges. To reduce the number of fake edges
7090 to insert, add fake edges from _innermost_ loops containing only
7091 nodes not reachable from the exit block. */
7094 connect_infinite_loops_to_exit ()
7096 basic_block unvisited_block
;
7098 /* Perform depth-first search in the reverse graph to find nodes
7099 reachable from the exit block. */
7100 struct depth_first_search_dsS dfs_ds
;
7102 flow_dfs_compute_reverse_init (&dfs_ds
);
7103 flow_dfs_compute_reverse_add_bb (&dfs_ds
, EXIT_BLOCK_PTR
);
7105 /* Repeatedly add fake edges, updating the unreachable nodes. */
7108 unvisited_block
= flow_dfs_compute_reverse_execute (&dfs_ds
);
7109 if (!unvisited_block
)
7111 make_edge (NULL
, unvisited_block
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
7112 flow_dfs_compute_reverse_add_bb (&dfs_ds
, unvisited_block
);
7115 flow_dfs_compute_reverse_finish (&dfs_ds
);
7120 /* Redirect an edge's successor from one block to another. */
7123 redirect_edge_succ (e
, new_succ
)
7125 basic_block new_succ
;
7129 /* Disconnect the edge from the old successor block. */
7130 for (pe
= &e
->dest
->pred
; *pe
!= e
; pe
= &(*pe
)->pred_next
)
7132 *pe
= (*pe
)->pred_next
;
7134 /* Reconnect the edge to the new successor block. */
7135 e
->pred_next
= new_succ
->pred
;
7140 /* Redirect an edge's predecessor from one block to another. */
7143 redirect_edge_pred (e
, new_pred
)
7145 basic_block new_pred
;
7149 /* Disconnect the edge from the old predecessor block. */
7150 for (pe
= &e
->src
->succ
; *pe
!= e
; pe
= &(*pe
)->succ_next
)
7152 *pe
= (*pe
)->succ_next
;
7154 /* Reconnect the edge to the new predecessor block. */
7155 e
->succ_next
= new_pred
->succ
;
7160 /* Dump the list of basic blocks in the bitmap NODES. */
7163 flow_nodes_print (str
, nodes
, file
)
7165 const sbitmap nodes
;
7173 fprintf (file
, "%s { ", str
);
7174 EXECUTE_IF_SET_IN_SBITMAP (nodes
, 0, node
, {fprintf (file
, "%d ", node
);});
7175 fputs ("}\n", file
);
7179 /* Dump the list of edges in the array EDGE_LIST. */
7182 flow_edge_list_print (str
, edge_list
, num_edges
, file
)
7184 const edge
*edge_list
;
7193 fprintf (file
, "%s { ", str
);
7194 for (i
= 0; i
< num_edges
; i
++)
7195 fprintf (file
, "%d->%d ", edge_list
[i
]->src
->index
,
7196 edge_list
[i
]->dest
->index
);
7197 fputs ("}\n", file
);
7201 /* Dump loop related CFG information. */
7204 flow_loops_cfg_dump (loops
, file
)
7205 const struct loops
*loops
;
7210 if (! loops
->num
|| ! file
|| ! loops
->cfg
.dom
)
7213 for (i
= 0; i
< n_basic_blocks
; i
++)
7217 fprintf (file
, ";; %d succs { ", i
);
7218 for (succ
= BASIC_BLOCK (i
)->succ
; succ
; succ
= succ
->succ_next
)
7219 fprintf (file
, "%d ", succ
->dest
->index
);
7220 flow_nodes_print ("} dom", loops
->cfg
.dom
[i
], file
);
7223 /* Dump the DFS node order. */
7224 if (loops
->cfg
.dfs_order
)
7226 fputs (";; DFS order: ", file
);
7227 for (i
= 0; i
< n_basic_blocks
; i
++)
7228 fprintf (file
, "%d ", loops
->cfg
.dfs_order
[i
]);
7231 /* Dump the reverse completion node order. */
7232 if (loops
->cfg
.rc_order
)
7234 fputs (";; RC order: ", file
);
7235 for (i
= 0; i
< n_basic_blocks
; i
++)
7236 fprintf (file
, "%d ", loops
->cfg
.rc_order
[i
]);
7241 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7244 flow_loop_nested_p (outer
, loop
)
7248 return sbitmap_a_subset_b_p (loop
->nodes
, outer
->nodes
);
7252 /* Dump the loop information specified by LOOP to the stream FILE
7253 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7255 flow_loop_dump (loop
, file
, loop_dump_aux
, verbose
)
7256 const struct loop
*loop
;
7258 void (*loop_dump_aux
) PARAMS((const struct loop
*, FILE *, int));
7261 if (! loop
|| ! loop
->header
)
7264 fprintf (file
, ";;\n;; Loop %d (%d to %d):%s%s\n",
7265 loop
->num
, INSN_UID (loop
->first
->head
),
7266 INSN_UID (loop
->last
->end
),
7267 loop
->shared
? " shared" : "",
7268 loop
->invalid
? " invalid" : "");
7269 fprintf (file
, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7270 loop
->header
->index
, loop
->latch
->index
,
7271 loop
->pre_header
? loop
->pre_header
->index
: -1,
7272 loop
->first
->index
, loop
->last
->index
);
7273 fprintf (file
, ";; depth %d, level %d, outer %ld\n",
7274 loop
->depth
, loop
->level
,
7275 (long) (loop
->outer
? loop
->outer
->num
: -1));
7277 if (loop
->pre_header_edges
)
7278 flow_edge_list_print (";; pre-header edges", loop
->pre_header_edges
,
7279 loop
->num_pre_header_edges
, file
);
7280 flow_edge_list_print (";; entry edges", loop
->entry_edges
,
7281 loop
->num_entries
, file
);
7282 fprintf (file
, ";; %d", loop
->num_nodes
);
7283 flow_nodes_print (" nodes", loop
->nodes
, file
);
7284 flow_edge_list_print (";; exit edges", loop
->exit_edges
,
7285 loop
->num_exits
, file
);
7286 if (loop
->exits_doms
)
7287 flow_nodes_print (";; exit doms", loop
->exits_doms
, file
);
7289 loop_dump_aux (loop
, file
, verbose
);
7293 /* Dump the loop information specified by LOOPS to the stream FILE,
7294 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7296 flow_loops_dump (loops
, file
, loop_dump_aux
, verbose
)
7297 const struct loops
*loops
;
7299 void (*loop_dump_aux
) PARAMS((const struct loop
*, FILE *, int));
7305 num_loops
= loops
->num
;
7306 if (! num_loops
|| ! file
)
7309 fprintf (file
, ";; %d loops found, %d levels\n",
7310 num_loops
, loops
->levels
);
7312 for (i
= 0; i
< num_loops
; i
++)
7314 struct loop
*loop
= &loops
->array
[i
];
7316 flow_loop_dump (loop
, file
, loop_dump_aux
, verbose
);
7322 for (j
= 0; j
< i
; j
++)
7324 struct loop
*oloop
= &loops
->array
[j
];
7326 if (loop
->header
== oloop
->header
)
7331 smaller
= loop
->num_nodes
< oloop
->num_nodes
;
7333 /* If the union of LOOP and OLOOP is different than
7334 the larger of LOOP and OLOOP then LOOP and OLOOP
7335 must be disjoint. */
7336 disjoint
= ! flow_loop_nested_p (smaller
? loop
: oloop
,
7337 smaller
? oloop
: loop
);
7339 ";; loop header %d shared by loops %d, %d %s\n",
7340 loop
->header
->index
, i
, j
,
7341 disjoint
? "disjoint" : "nested");
7348 flow_loops_cfg_dump (loops
, file
);
7352 /* Free all the memory allocated for LOOPS. */
7355 flow_loops_free (loops
)
7356 struct loops
*loops
;
7365 /* Free the loop descriptors. */
7366 for (i
= 0; i
< loops
->num
; i
++)
7368 struct loop
*loop
= &loops
->array
[i
];
7370 if (loop
->pre_header_edges
)
7371 free (loop
->pre_header_edges
);
7373 sbitmap_free (loop
->nodes
);
7374 if (loop
->entry_edges
)
7375 free (loop
->entry_edges
);
7376 if (loop
->exit_edges
)
7377 free (loop
->exit_edges
);
7378 if (loop
->exits_doms
)
7379 sbitmap_free (loop
->exits_doms
);
7381 free (loops
->array
);
7382 loops
->array
= NULL
;
7385 sbitmap_vector_free (loops
->cfg
.dom
);
7386 if (loops
->cfg
.dfs_order
)
7387 free (loops
->cfg
.dfs_order
);
7389 if (loops
->shared_headers
)
7390 sbitmap_free (loops
->shared_headers
);
7395 /* Find the entry edges into the loop with header HEADER and nodes
7396 NODES and store in ENTRY_EDGES array. Return the number of entry
7397 edges from the loop. */
7400 flow_loop_entry_edges_find (header
, nodes
, entry_edges
)
7402 const sbitmap nodes
;
7408 *entry_edges
= NULL
;
7411 for (e
= header
->pred
; e
; e
= e
->pred_next
)
7413 basic_block src
= e
->src
;
7415 if (src
== ENTRY_BLOCK_PTR
|| ! TEST_BIT (nodes
, src
->index
))
7422 *entry_edges
= (edge
*) xmalloc (num_entries
* sizeof (edge
*));
7425 for (e
= header
->pred
; e
; e
= e
->pred_next
)
7427 basic_block src
= e
->src
;
7429 if (src
== ENTRY_BLOCK_PTR
|| ! TEST_BIT (nodes
, src
->index
))
7430 (*entry_edges
)[num_entries
++] = e
;
7437 /* Find the exit edges from the loop using the bitmap of loop nodes
7438 NODES and store in EXIT_EDGES array. Return the number of
7439 exit edges from the loop. */
7442 flow_loop_exit_edges_find (nodes
, exit_edges
)
7443 const sbitmap nodes
;
7452 /* Check all nodes within the loop to see if there are any
7453 successors not in the loop. Note that a node may have multiple
7454 exiting edges ????? A node can have one jumping edge and one fallthru
7455 edge so only one of these can exit the loop. */
7457 EXECUTE_IF_SET_IN_SBITMAP (nodes
, 0, node
, {
7458 for (e
= BASIC_BLOCK (node
)->succ
; e
; e
= e
->succ_next
)
7460 basic_block dest
= e
->dest
;
7462 if (dest
== EXIT_BLOCK_PTR
|| ! TEST_BIT (nodes
, dest
->index
))
7470 *exit_edges
= (edge
*) xmalloc (num_exits
* sizeof (edge
*));
7472 /* Store all exiting edges into an array. */
7474 EXECUTE_IF_SET_IN_SBITMAP (nodes
, 0, node
, {
7475 for (e
= BASIC_BLOCK (node
)->succ
; e
; e
= e
->succ_next
)
7477 basic_block dest
= e
->dest
;
7479 if (dest
== EXIT_BLOCK_PTR
|| ! TEST_BIT (nodes
, dest
->index
))
7480 (*exit_edges
)[num_exits
++] = e
;
7488 /* Find the nodes contained within the loop with header HEADER and
7489 latch LATCH and store in NODES. Return the number of nodes within
7493 flow_loop_nodes_find (header
, latch
, nodes
)
7502 stack
= (basic_block
*) xmalloc (n_basic_blocks
* sizeof (basic_block
));
7505 /* Start with only the loop header in the set of loop nodes. */
7506 sbitmap_zero (nodes
);
7507 SET_BIT (nodes
, header
->index
);
7509 header
->loop_depth
++;
7511 /* Push the loop latch on to the stack. */
7512 if (! TEST_BIT (nodes
, latch
->index
))
7514 SET_BIT (nodes
, latch
->index
);
7515 latch
->loop_depth
++;
7517 stack
[sp
++] = latch
;
7526 for (e
= node
->pred
; e
; e
= e
->pred_next
)
7528 basic_block ancestor
= e
->src
;
7530 /* If each ancestor not marked as part of loop, add to set of
7531 loop nodes and push on to stack. */
7532 if (ancestor
!= ENTRY_BLOCK_PTR
7533 && ! TEST_BIT (nodes
, ancestor
->index
))
7535 SET_BIT (nodes
, ancestor
->index
);
7536 ancestor
->loop_depth
++;
7538 stack
[sp
++] = ancestor
;
7546 /* Compute the depth first search order and store in the array
7547 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7548 RC_ORDER is non-zero, return the reverse completion number for each
7549 node. Returns the number of nodes visited. A depth first search
7550 tries to get as far away from the starting point as quickly as
7554 flow_depth_first_order_compute (dfs_order
, rc_order
)
7561 int rcnum
= n_basic_blocks
- 1;
7564 /* Allocate stack for back-tracking up CFG. */
7565 stack
= (edge
*) xmalloc ((n_basic_blocks
+ 1) * sizeof (edge
));
7568 /* Allocate bitmap to track nodes that have been visited. */
7569 visited
= sbitmap_alloc (n_basic_blocks
);
7571 /* None of the nodes in the CFG have been visited yet. */
7572 sbitmap_zero (visited
);
7574 /* Push the first edge on to the stack. */
7575 stack
[sp
++] = ENTRY_BLOCK_PTR
->succ
;
7583 /* Look at the edge on the top of the stack. */
7588 /* Check if the edge destination has been visited yet. */
7589 if (dest
!= EXIT_BLOCK_PTR
&& ! TEST_BIT (visited
, dest
->index
))
7591 /* Mark that we have visited the destination. */
7592 SET_BIT (visited
, dest
->index
);
7595 dfs_order
[dfsnum
++] = dest
->index
;
7599 /* Since the DEST node has been visited for the first
7600 time, check its successors. */
7601 stack
[sp
++] = dest
->succ
;
7605 /* There are no successors for the DEST node so assign
7606 its reverse completion number. */
7608 rc_order
[rcnum
--] = dest
->index
;
7613 if (! e
->succ_next
&& src
!= ENTRY_BLOCK_PTR
)
7615 /* There are no more successors for the SRC node
7616 so assign its reverse completion number. */
7618 rc_order
[rcnum
--] = src
->index
;
7622 stack
[sp
- 1] = e
->succ_next
;
7629 sbitmap_free (visited
);
7631 /* The number of nodes visited should not be greater than
7633 if (dfsnum
> n_basic_blocks
)
7636 /* There are some nodes left in the CFG that are unreachable. */
7637 if (dfsnum
< n_basic_blocks
)
7642 /* Compute the depth first search order on the _reverse_ graph and
7643 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7644 Returns the number of nodes visited.
7646 The computation is split into three pieces:
7648 flow_dfs_compute_reverse_init () creates the necessary data
7651 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7652 structures. The block will start the search.
7654 flow_dfs_compute_reverse_execute () continues (or starts) the
7655 search using the block on the top of the stack, stopping when the
7658 flow_dfs_compute_reverse_finish () destroys the necessary data
7661 Thus, the user will probably call ..._init(), call ..._add_bb() to
7662 add a beginning basic block to the stack, call ..._execute(),
7663 possibly add another bb to the stack and again call ..._execute(),
7664 ..., and finally call _finish(). */
7666 /* Initialize the data structures used for depth-first search on the
7667 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7668 added to the basic block stack. DATA is the current depth-first
7669 search context. If INITIALIZE_STACK is non-zero, there is an
7670 element on the stack. */
7673 flow_dfs_compute_reverse_init (data
)
7674 depth_first_search_ds data
;
7676 /* Allocate stack for back-tracking up CFG. */
7678 (basic_block
*) xmalloc ((n_basic_blocks
- (INVALID_BLOCK
+ 1))
7679 * sizeof (basic_block
));
7682 /* Allocate bitmap to track nodes that have been visited. */
7683 data
->visited_blocks
= sbitmap_alloc (n_basic_blocks
- (INVALID_BLOCK
+ 1));
7685 /* None of the nodes in the CFG have been visited yet. */
7686 sbitmap_zero (data
->visited_blocks
);
7691 /* Add the specified basic block to the top of the dfs data
7692 structures. When the search continues, it will start at the
7696 flow_dfs_compute_reverse_add_bb (data
, bb
)
7697 depth_first_search_ds data
;
7700 data
->stack
[data
->sp
++] = bb
;
7704 /* Continue the depth-first search through the reverse graph starting
7705 with the block at the stack's top and ending when the stack is
7706 empty. Visited nodes are marked. Returns an unvisited basic
7707 block, or NULL if there is none available. */
7710 flow_dfs_compute_reverse_execute (data
)
7711 depth_first_search_ds data
;
7717 while (data
->sp
> 0)
7719 bb
= data
->stack
[--data
->sp
];
7721 /* Mark that we have visited this node. */
7722 if (!TEST_BIT (data
->visited_blocks
, bb
->index
- (INVALID_BLOCK
+ 1)))
7724 SET_BIT (data
->visited_blocks
, bb
->index
- (INVALID_BLOCK
+ 1));
7726 /* Perform depth-first search on adjacent vertices. */
7727 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
7728 flow_dfs_compute_reverse_add_bb (data
, e
->src
);
7732 /* Determine if there are unvisited basic blocks. */
7733 for (i
= n_basic_blocks
- (INVALID_BLOCK
+ 1); --i
>= 0;)
7734 if (!TEST_BIT (data
->visited_blocks
, i
))
7735 return BASIC_BLOCK (i
+ (INVALID_BLOCK
+ 1));
7739 /* Destroy the data structures needed for depth-first search on the
7743 flow_dfs_compute_reverse_finish (data
)
7744 depth_first_search_ds data
;
7747 sbitmap_free (data
->visited_blocks
);
7752 /* Find the root node of the loop pre-header extended basic block and
7753 the edges along the trace from the root node to the loop header. */
7756 flow_loop_pre_header_scan (loop
)
7762 loop
->num_pre_header_edges
= 0;
7764 if (loop
->num_entries
!= 1)
7767 ebb
= loop
->entry_edges
[0]->src
;
7769 if (ebb
!= ENTRY_BLOCK_PTR
)
7773 /* Count number of edges along trace from loop header to
7774 root of pre-header extended basic block. Usually this is
7775 only one or two edges. */
7777 while (ebb
->pred
->src
!= ENTRY_BLOCK_PTR
&& ! ebb
->pred
->pred_next
)
7779 ebb
= ebb
->pred
->src
;
7783 loop
->pre_header_edges
= (edge
*) xmalloc (num
* sizeof (edge
*));
7784 loop
->num_pre_header_edges
= num
;
7786 /* Store edges in order that they are followed. The source
7787 of the first edge is the root node of the pre-header extended
7788 basic block and the destination of the last last edge is
7790 for (e
= loop
->entry_edges
[0]; num
; e
= e
->src
->pred
)
7792 loop
->pre_header_edges
[--num
] = e
;
7798 /* Return the block for the pre-header of the loop with header
7799 HEADER where DOM specifies the dominator information. Return NULL if
7800 there is no pre-header. */
7803 flow_loop_pre_header_find (header
, dom
)
7807 basic_block pre_header
;
7810 /* If block p is a predecessor of the header and is the only block
7811 that the header does not dominate, then it is the pre-header. */
7813 for (e
= header
->pred
; e
; e
= e
->pred_next
)
7815 basic_block node
= e
->src
;
7817 if (node
!= ENTRY_BLOCK_PTR
7818 && ! TEST_BIT (dom
[node
->index
], header
->index
))
7820 if (pre_header
== NULL
)
7824 /* There are multiple edges into the header from outside
7825 the loop so there is no pre-header block. */
7834 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7835 previously added. The insertion algorithm assumes that the loops
7836 are added in the order found by a depth first search of the CFG. */
7839 flow_loop_tree_node_add (prevloop
, loop
)
7840 struct loop
*prevloop
;
7844 if (flow_loop_nested_p (prevloop
, loop
))
7846 prevloop
->inner
= loop
;
7847 loop
->outer
= prevloop
;
7851 while (prevloop
->outer
)
7853 if (flow_loop_nested_p (prevloop
->outer
, loop
))
7855 prevloop
->next
= loop
;
7856 loop
->outer
= prevloop
->outer
;
7859 prevloop
= prevloop
->outer
;
7862 prevloop
->next
= loop
;
7866 /* Build the loop hierarchy tree for LOOPS. */
7869 flow_loops_tree_build (loops
)
7870 struct loops
*loops
;
7875 num_loops
= loops
->num
;
7879 /* Root the loop hierarchy tree with the first loop found.
7880 Since we used a depth first search this should be the
7882 loops
->tree
= &loops
->array
[0];
7883 loops
->tree
->outer
= loops
->tree
->inner
= loops
->tree
->next
= NULL
;
7885 /* Add the remaining loops to the tree. */
7886 for (i
= 1; i
< num_loops
; i
++)
7887 flow_loop_tree_node_add (&loops
->array
[i
- 1], &loops
->array
[i
]);
7890 /* Helper function to compute loop nesting depth and enclosed loop level
7891 for the natural loop specified by LOOP at the loop depth DEPTH.
7892 Returns the loop level. */
7895 flow_loop_level_compute (loop
, depth
)
7905 /* Traverse loop tree assigning depth and computing level as the
7906 maximum level of all the inner loops of this loop. The loop
7907 level is equivalent to the height of the loop in the loop tree
7908 and corresponds to the number of enclosed loop levels (including
7910 for (inner
= loop
->inner
; inner
; inner
= inner
->next
)
7914 ilevel
= flow_loop_level_compute (inner
, depth
+ 1) + 1;
7919 loop
->level
= level
;
7920 loop
->depth
= depth
;
7924 /* Compute the loop nesting depth and enclosed loop level for the loop
7925 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7929 flow_loops_level_compute (loops
)
7930 struct loops
*loops
;
7936 /* Traverse all the outer level loops. */
7937 for (loop
= loops
->tree
; loop
; loop
= loop
->next
)
7939 level
= flow_loop_level_compute (loop
, 1);
7947 /* Find all the natural loops in the function and save in LOOPS structure
7948 and recalculate loop_depth information in basic block structures.
7949 FLAGS controls which loop information is collected.
7950 Return the number of natural loops found. */
7953 flow_loops_find (loops
, flags
)
7954 struct loops
*loops
;
7966 /* This function cannot be repeatedly called with different
7967 flags to build up the loop information. The loop tree
7968 must always be built if this function is called. */
7969 if (! (flags
& LOOP_TREE
))
7972 memset (loops
, 0, sizeof (*loops
));
7974 /* Taking care of this degenerate case makes the rest of
7975 this code simpler. */
7976 if (n_basic_blocks
== 0)
7982 /* Compute the dominators. */
7983 dom
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
7984 calculate_dominance_info (NULL
, dom
, CDI_DOMINATORS
);
7986 /* Count the number of loop edges (back edges). This should be the
7987 same as the number of natural loops. */
7990 for (b
= 0; b
< n_basic_blocks
; b
++)
7994 header
= BASIC_BLOCK (b
);
7995 header
->loop_depth
= 0;
7997 for (e
= header
->pred
; e
; e
= e
->pred_next
)
7999 basic_block latch
= e
->src
;
8001 /* Look for back edges where a predecessor is dominated
8002 by this block. A natural loop has a single entry
8003 node (header) that dominates all the nodes in the
8004 loop. It also has single back edge to the header
8005 from a latch node. Note that multiple natural loops
8006 may share the same header. */
8007 if (b
!= header
->index
)
8010 if (latch
!= ENTRY_BLOCK_PTR
&& TEST_BIT (dom
[latch
->index
], b
))
8017 /* Compute depth first search order of the CFG so that outer
8018 natural loops will be found before inner natural loops. */
8019 dfs_order
= (int *) xmalloc (n_basic_blocks
* sizeof (int));
8020 rc_order
= (int *) xmalloc (n_basic_blocks
* sizeof (int));
8021 flow_depth_first_order_compute (dfs_order
, rc_order
);
8023 /* Allocate loop structures. */
8025 = (struct loop
*) xcalloc (num_loops
, sizeof (struct loop
));
8027 headers
= sbitmap_alloc (n_basic_blocks
);
8028 sbitmap_zero (headers
);
8030 loops
->shared_headers
= sbitmap_alloc (n_basic_blocks
);
8031 sbitmap_zero (loops
->shared_headers
);
8033 /* Find and record information about all the natural loops
8036 for (b
= 0; b
< n_basic_blocks
; b
++)
8040 /* Search the nodes of the CFG in reverse completion order
8041 so that we can find outer loops first. */
8042 header
= BASIC_BLOCK (rc_order
[b
]);
8044 /* Look for all the possible latch blocks for this header. */
8045 for (e
= header
->pred
; e
; e
= e
->pred_next
)
8047 basic_block latch
= e
->src
;
8049 /* Look for back edges where a predecessor is dominated
8050 by this block. A natural loop has a single entry
8051 node (header) that dominates all the nodes in the
8052 loop. It also has single back edge to the header
8053 from a latch node. Note that multiple natural loops
8054 may share the same header. */
8055 if (latch
!= ENTRY_BLOCK_PTR
8056 && TEST_BIT (dom
[latch
->index
], header
->index
))
8060 loop
= loops
->array
+ num_loops
;
8062 loop
->header
= header
;
8063 loop
->latch
= latch
;
8064 loop
->num
= num_loops
;
8071 for (i
= 0; i
< num_loops
; i
++)
8073 struct loop
*loop
= &loops
->array
[i
];
8076 /* Keep track of blocks that are loop headers so
8077 that we can tell which loops should be merged. */
8078 if (TEST_BIT (headers
, loop
->header
->index
))
8079 SET_BIT (loops
->shared_headers
, loop
->header
->index
);
8080 SET_BIT (headers
, loop
->header
->index
);
8082 /* Find nodes contained within the loop. */
8083 loop
->nodes
= sbitmap_alloc (n_basic_blocks
);
8085 = flow_loop_nodes_find (loop
->header
, loop
->latch
, loop
->nodes
);
8087 /* Compute first and last blocks within the loop.
8088 These are often the same as the loop header and
8089 loop latch respectively, but this is not always
8092 = BASIC_BLOCK (sbitmap_first_set_bit (loop
->nodes
));
8094 = BASIC_BLOCK (sbitmap_last_set_bit (loop
->nodes
));
8096 if (flags
& LOOP_EDGES
)
8098 /* Find edges which enter the loop header.
8099 Note that the entry edges should only
8100 enter the header of a natural loop. */
8102 = flow_loop_entry_edges_find (loop
->header
,
8104 &loop
->entry_edges
);
8106 /* Find edges which exit the loop. */
8108 = flow_loop_exit_edges_find (loop
->nodes
,
8111 /* Determine which loop nodes dominate all the exits
8113 loop
->exits_doms
= sbitmap_alloc (n_basic_blocks
);
8114 sbitmap_copy (loop
->exits_doms
, loop
->nodes
);
8115 for (j
= 0; j
< loop
->num_exits
; j
++)
8116 sbitmap_a_and_b (loop
->exits_doms
, loop
->exits_doms
,
8117 dom
[loop
->exit_edges
[j
]->src
->index
]);
8119 /* The header of a natural loop must dominate
8121 if (! TEST_BIT (loop
->exits_doms
, loop
->header
->index
))
8125 if (flags
& LOOP_PRE_HEADER
)
8127 /* Look to see if the loop has a pre-header node. */
8129 = flow_loop_pre_header_find (loop
->header
, dom
);
8131 flow_loop_pre_header_scan (loop
);
8135 /* Natural loops with shared headers may either be disjoint or
8136 nested. Disjoint loops with shared headers cannot be inner
8137 loops and should be merged. For now just mark loops that share
8139 for (i
= 0; i
< num_loops
; i
++)
8140 if (TEST_BIT (loops
->shared_headers
, loops
->array
[i
].header
->index
))
8141 loops
->array
[i
].shared
= 1;
8143 sbitmap_free (headers
);
8146 loops
->num
= num_loops
;
8148 /* Save CFG derived information to avoid recomputing it. */
8149 loops
->cfg
.dom
= dom
;
8150 loops
->cfg
.dfs_order
= dfs_order
;
8151 loops
->cfg
.rc_order
= rc_order
;
8153 /* Build the loop hierarchy tree. */
8154 flow_loops_tree_build (loops
);
8156 /* Assign the loop nesting depth and enclosed loop level for each
8158 loops
->levels
= flow_loops_level_compute (loops
);
8164 /* Update the information regarding the loops in the CFG
8165 specified by LOOPS. */
8167 flow_loops_update (loops
, flags
)
8168 struct loops
*loops
;
8171 /* One day we may want to update the current loop data. For now
8172 throw away the old stuff and rebuild what we need. */
8174 flow_loops_free (loops
);
8176 return flow_loops_find (loops
, flags
);
8180 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8183 flow_loop_outside_edge_p (loop
, e
)
8184 const struct loop
*loop
;
8187 if (e
->dest
!= loop
->header
)
8189 return (e
->src
== ENTRY_BLOCK_PTR
)
8190 || ! TEST_BIT (loop
->nodes
, e
->src
->index
);
8193 /* Clear LOG_LINKS fields of insns in a chain.
8194 Also clear the global_live_at_{start,end} fields of the basic block
8198 clear_log_links (insns
)
8204 for (i
= insns
; i
; i
= NEXT_INSN (i
))
8208 for (b
= 0; b
< n_basic_blocks
; b
++)
8210 basic_block bb
= BASIC_BLOCK (b
);
8212 bb
->global_live_at_start
= NULL
;
8213 bb
->global_live_at_end
= NULL
;
8216 ENTRY_BLOCK_PTR
->global_live_at_end
= NULL
;
8217 EXIT_BLOCK_PTR
->global_live_at_start
= NULL
;
8220 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8221 correspond to the hard registers, if any, set in that map. This
8222 could be done far more efficiently by having all sorts of special-cases
8223 with moving single words, but probably isn't worth the trouble. */
8226 reg_set_to_hard_reg_set (to
, from
)
8232 EXECUTE_IF_SET_IN_BITMAP
8235 if (i
>= FIRST_PSEUDO_REGISTER
)
8237 SET_HARD_REG_BIT (*to
, i
);
8241 /* Called once at intialization time. */
8246 static int initialized
;
8250 gcc_obstack_init (&flow_obstack
);
8251 flow_firstobj
= (char *) obstack_alloc (&flow_obstack
, 0);
8256 obstack_free (&flow_obstack
, flow_firstobj
);
8257 flow_firstobj
= (char *) obstack_alloc (&flow_obstack
, 0);