1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
140 #include "splay-tree.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
145 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
146 the stack pointer does not matter. The value is tested only in
147 functions that have frame pointers.
148 No definition is equivalent to always zero. */
149 #ifndef EXIT_IGNORE_STACK
150 #define EXIT_IGNORE_STACK 0
153 #ifndef HAVE_epilogue
154 #define HAVE_epilogue 0
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
159 #ifndef HAVE_sibcall_epilogue
160 #define HAVE_sibcall_epilogue 0
164 #define LOCAL_REGNO(REGNO) 0
166 #ifndef EPILOGUE_USES
167 #define EPILOGUE_USES(REGNO) 0
170 /* The obstack on which the flow graph components are allocated. */
172 struct obstack flow_obstack
;
173 static char *flow_firstobj
;
175 /* Number of basic blocks in the current function. */
179 /* Number of edges in the current function. */
183 /* The basic block array. */
185 varray_type basic_block_info
;
187 /* The special entry and exit blocks. */
189 struct basic_block_def entry_exit_blocks
[2]
194 NULL
, /* local_set */
195 NULL
, /* cond_local_set */
196 NULL
, /* global_live_at_start */
197 NULL
, /* global_live_at_end */
199 ENTRY_BLOCK
, /* index */
208 NULL
, /* local_set */
209 NULL
, /* cond_local_set */
210 NULL
, /* global_live_at_start */
211 NULL
, /* global_live_at_end */
213 EXIT_BLOCK
, /* index */
219 /* Nonzero if the second flow pass has completed. */
222 /* Maximum register number used in this function, plus one. */
226 /* Indexed by n, giving various register information */
228 varray_type reg_n_info
;
230 /* Size of a regset for the current function,
231 in (1) bytes and (2) elements. */
236 /* Regset of regs live when calls to `setjmp'-like functions happen. */
237 /* ??? Does this exist only for the setjmp-clobbered warning message? */
239 regset regs_live_at_setjmp
;
241 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
242 that have to go in the same hard reg.
243 The first two regs in the list are a pair, and the next two
244 are another pair, etc. */
247 /* Callback that determines if it's ok for a function to have no
248 noreturn attribute. */
249 int (*lang_missing_noreturn_ok_p
) PARAMS ((tree
));
251 /* Set of registers that may be eliminable. These are handled specially
252 in updating regs_ever_live. */
254 static HARD_REG_SET elim_reg_set
;
256 /* The basic block structure for every insn, indexed by uid. */
258 varray_type basic_block_for_insn
;
260 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
261 /* ??? Should probably be using LABEL_NUSES instead. It would take a
262 bit of surgery to be able to use or co-opt the routines in jump. */
264 static rtx label_value_list
;
265 static rtx tail_recursion_label_list
;
267 /* Holds information for tracking conditional register life information. */
268 struct reg_cond_life_info
270 /* A boolean expression of conditions under which a register is dead. */
272 /* Conditions under which a register is dead at the basic block end. */
275 /* A boolean expression of conditions under which a register has been
279 /* ??? Could store mask of bytes that are dead, so that we could finally
280 track lifetimes of multi-word registers accessed via subregs. */
283 /* For use in communicating between propagate_block and its subroutines.
284 Holds all information needed to compute life and def-use information. */
286 struct propagate_block_info
288 /* The basic block we're considering. */
291 /* Bit N is set if register N is conditionally or unconditionally live. */
294 /* Bit N is set if register N is set this insn. */
297 /* Element N is the next insn that uses (hard or pseudo) register N
298 within the current basic block; or zero, if there is no such insn. */
301 /* Contains a list of all the MEMs we are tracking for dead store
305 /* If non-null, record the set of registers set unconditionally in the
309 /* If non-null, record the set of registers set conditionally in the
311 regset cond_local_set
;
313 #ifdef HAVE_conditional_execution
314 /* Indexed by register number, holds a reg_cond_life_info for each
315 register that is not unconditionally live or dead. */
316 splay_tree reg_cond_dead
;
318 /* Bit N is set if register N is in an expression in reg_cond_dead. */
322 /* The length of mem_set_list. */
323 int mem_set_list_len
;
325 /* Non-zero if the value of CC0 is live. */
328 /* Flags controling the set of information propagate_block collects. */
332 /* Maximum length of pbi->mem_set_list before we start dropping
333 new elements on the floor. */
334 #define MAX_MEM_SET_LIST_LEN 100
336 /* Store the data structures necessary for depth-first search. */
337 struct depth_first_search_dsS
{
338 /* stack for backtracking during the algorithm */
341 /* number of edges in the stack. That is, positions 0, ..., sp-1
345 /* record of basic blocks already seen by depth-first search */
346 sbitmap visited_blocks
;
348 typedef struct depth_first_search_dsS
*depth_first_search_ds
;
350 /* Forward declarations */
351 static int count_basic_blocks
PARAMS ((rtx
));
352 static void find_basic_blocks_1
PARAMS ((rtx
));
353 static rtx find_label_refs
PARAMS ((rtx
, rtx
));
354 static void clear_edges
PARAMS ((void));
355 static void make_edges
PARAMS ((rtx
));
356 static void make_label_edge
PARAMS ((sbitmap
*, basic_block
,
358 static void make_eh_edge
PARAMS ((sbitmap
*, basic_block
, rtx
));
359 static void mark_critical_edges
PARAMS ((void));
361 static void commit_one_edge_insertion
PARAMS ((edge
));
363 static void delete_unreachable_blocks
PARAMS ((void));
364 static int can_delete_note_p
PARAMS ((rtx
));
365 static void expunge_block
PARAMS ((basic_block
));
366 static int can_delete_label_p
PARAMS ((rtx
));
367 static int tail_recursion_label_p
PARAMS ((rtx
));
368 static int merge_blocks_move_predecessor_nojumps
PARAMS ((basic_block
,
370 static int merge_blocks_move_successor_nojumps
PARAMS ((basic_block
,
372 static int merge_blocks
PARAMS ((edge
,basic_block
,basic_block
));
373 static void try_merge_blocks
PARAMS ((void));
374 static void tidy_fallthru_edges
PARAMS ((void));
375 static int verify_wide_reg_1
PARAMS ((rtx
*, void *));
376 static void verify_wide_reg
PARAMS ((int, rtx
, rtx
));
377 static void verify_local_live_at_start
PARAMS ((regset
, basic_block
));
378 static int set_noop_p
PARAMS ((rtx
));
379 static int noop_move_p
PARAMS ((rtx
));
380 static void delete_noop_moves
PARAMS ((rtx
));
381 static void notice_stack_pointer_modification_1
PARAMS ((rtx
, rtx
, void *));
382 static void notice_stack_pointer_modification
PARAMS ((rtx
));
383 static void mark_reg
PARAMS ((rtx
, void *));
384 static void mark_regs_live_at_end
PARAMS ((regset
));
385 static int set_phi_alternative_reg
PARAMS ((rtx
, int, int, void *));
386 static void calculate_global_regs_live
PARAMS ((sbitmap
, sbitmap
, int));
387 static void propagate_block_delete_insn
PARAMS ((basic_block
, rtx
));
388 static rtx propagate_block_delete_libcall
PARAMS ((basic_block
, rtx
, rtx
));
389 static int insn_dead_p
PARAMS ((struct propagate_block_info
*,
391 static int libcall_dead_p
PARAMS ((struct propagate_block_info
*,
393 static void mark_set_regs
PARAMS ((struct propagate_block_info
*,
395 static void mark_set_1
PARAMS ((struct propagate_block_info
*,
396 enum rtx_code
, rtx
, rtx
,
398 #ifdef HAVE_conditional_execution
399 static int mark_regno_cond_dead
PARAMS ((struct propagate_block_info
*,
401 static void free_reg_cond_life_info
PARAMS ((splay_tree_value
));
402 static int flush_reg_cond_reg_1
PARAMS ((splay_tree_node
, void *));
403 static void flush_reg_cond_reg
PARAMS ((struct propagate_block_info
*,
405 static rtx elim_reg_cond
PARAMS ((rtx
, unsigned int));
406 static rtx ior_reg_cond
PARAMS ((rtx
, rtx
, int));
407 static rtx not_reg_cond
PARAMS ((rtx
));
408 static rtx and_reg_cond
PARAMS ((rtx
, rtx
, int));
411 static void attempt_auto_inc
PARAMS ((struct propagate_block_info
*,
412 rtx
, rtx
, rtx
, rtx
, rtx
));
413 static void find_auto_inc
PARAMS ((struct propagate_block_info
*,
415 static int try_pre_increment_1
PARAMS ((struct propagate_block_info
*,
417 static int try_pre_increment
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
));
419 static void mark_used_reg
PARAMS ((struct propagate_block_info
*,
421 static void mark_used_regs
PARAMS ((struct propagate_block_info
*,
423 void dump_flow_info
PARAMS ((FILE *));
424 void debug_flow_info
PARAMS ((void));
425 static void dump_edge_info
PARAMS ((FILE *, edge
, int));
426 static void print_rtl_and_abort
PARAMS ((void));
428 static void invalidate_mems_from_autoinc
PARAMS ((struct propagate_block_info
*,
430 static void invalidate_mems_from_set
PARAMS ((struct propagate_block_info
*,
432 static void remove_fake_successors
PARAMS ((basic_block
));
433 static void flow_nodes_print
PARAMS ((const char *, const sbitmap
,
435 static void flow_edge_list_print
PARAMS ((const char *, const edge
*,
437 static void flow_loops_cfg_dump
PARAMS ((const struct loops
*,
439 static int flow_loop_nested_p
PARAMS ((struct loop
*,
441 static int flow_loop_entry_edges_find
PARAMS ((basic_block
, const sbitmap
,
443 static int flow_loop_exit_edges_find
PARAMS ((const sbitmap
, edge
**));
444 static int flow_loop_nodes_find
PARAMS ((basic_block
, basic_block
, sbitmap
));
445 static int flow_depth_first_order_compute
PARAMS ((int *, int *));
446 static void flow_dfs_compute_reverse_init
447 PARAMS ((depth_first_search_ds
));
448 static void flow_dfs_compute_reverse_add_bb
449 PARAMS ((depth_first_search_ds
, basic_block
));
450 static basic_block flow_dfs_compute_reverse_execute
451 PARAMS ((depth_first_search_ds
));
452 static void flow_dfs_compute_reverse_finish
453 PARAMS ((depth_first_search_ds
));
454 static void flow_loop_pre_header_scan
PARAMS ((struct loop
*));
455 static basic_block flow_loop_pre_header_find
PARAMS ((basic_block
,
457 static void flow_loop_tree_node_add
PARAMS ((struct loop
*, struct loop
*));
458 static void flow_loops_tree_build
PARAMS ((struct loops
*));
459 static int flow_loop_level_compute
PARAMS ((struct loop
*, int));
460 static int flow_loops_level_compute
PARAMS ((struct loops
*));
461 static void allocate_bb_life_data
PARAMS ((void));
463 /* Find basic blocks of the current function.
464 F is the first insn of the function and NREGS the number of register
468 find_basic_blocks (f
, nregs
, file
)
470 int nregs ATTRIBUTE_UNUSED
;
471 FILE *file ATTRIBUTE_UNUSED
;
475 /* Flush out existing data. */
476 if (basic_block_info
!= NULL
)
482 /* Clear bb->aux on all extant basic blocks. We'll use this as a
483 tag for reuse during create_basic_block, just in case some pass
484 copies around basic block notes improperly. */
485 for (i
= 0; i
< n_basic_blocks
; ++i
)
486 BASIC_BLOCK (i
)->aux
= NULL
;
488 VARRAY_FREE (basic_block_info
);
491 n_basic_blocks
= count_basic_blocks (f
);
493 /* Size the basic block table. The actual structures will be allocated
494 by find_basic_blocks_1, since we want to keep the structure pointers
495 stable across calls to find_basic_blocks. */
496 /* ??? This whole issue would be much simpler if we called find_basic_blocks
497 exactly once, and thereafter we don't have a single long chain of
498 instructions at all until close to the end of compilation when we
499 actually lay them out. */
501 VARRAY_BB_INIT (basic_block_info
, n_basic_blocks
, "basic_block_info");
503 find_basic_blocks_1 (f
);
505 /* Record the block to which an insn belongs. */
506 /* ??? This should be done another way, by which (perhaps) a label is
507 tagged directly with the basic block that it starts. It is used for
508 more than that currently, but IMO that is the only valid use. */
510 max_uid
= get_max_uid ();
512 /* Leave space for insns life_analysis makes in some cases for auto-inc.
513 These cases are rare, so we don't need too much space. */
514 max_uid
+= max_uid
/ 10;
517 compute_bb_for_insn (max_uid
);
519 /* Discover the edges of our cfg. */
520 make_edges (label_value_list
);
522 /* Do very simple cleanup now, for the benefit of code that runs between
523 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
524 tidy_fallthru_edges ();
526 mark_critical_edges ();
528 #ifdef ENABLE_CHECKING
534 check_function_return_warnings ()
536 if (warn_missing_noreturn
537 && !TREE_THIS_VOLATILE (cfun
->decl
)
538 && EXIT_BLOCK_PTR
->pred
== NULL
539 && (lang_missing_noreturn_ok_p
540 && !lang_missing_noreturn_ok_p (cfun
->decl
)))
541 warning ("function might be possible candidate for attribute `noreturn'");
543 /* If we have a path to EXIT, then we do return. */
544 if (TREE_THIS_VOLATILE (cfun
->decl
)
545 && EXIT_BLOCK_PTR
->pred
!= NULL
)
546 warning ("`noreturn' function does return");
548 /* If the clobber_return_insn appears in some basic block, then we
549 do reach the end without returning a value. */
550 else if (warn_return_type
551 && cfun
->x_clobber_return_insn
!= NULL
552 && EXIT_BLOCK_PTR
->pred
!= NULL
)
554 int max_uid
= get_max_uid ();
556 /* If clobber_return_insn was excised by jump1, then renumber_insns
557 can make max_uid smaller than the number still recorded in our rtx.
558 That's fine, since this is a quick way of verifying that the insn
559 is no longer in the chain. */
560 if (INSN_UID (cfun
->x_clobber_return_insn
) < max_uid
)
562 /* Recompute insn->block mapping, since the initial mapping is
563 set before we delete unreachable blocks. */
564 compute_bb_for_insn (max_uid
);
566 if (BLOCK_FOR_INSN (cfun
->x_clobber_return_insn
) != NULL
)
567 warning ("control reaches end of non-void function");
572 /* Count the basic blocks of the function. */
575 count_basic_blocks (f
)
579 register RTX_CODE prev_code
;
580 register int count
= 0;
581 int saw_abnormal_edge
= 0;
583 prev_code
= JUMP_INSN
;
584 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
586 enum rtx_code code
= GET_CODE (insn
);
588 if (code
== CODE_LABEL
589 || (GET_RTX_CLASS (code
) == 'i'
590 && (prev_code
== JUMP_INSN
591 || prev_code
== BARRIER
592 || saw_abnormal_edge
)))
594 saw_abnormal_edge
= 0;
598 /* Record whether this insn created an edge. */
599 if (code
== CALL_INSN
)
603 /* If there is a nonlocal goto label and the specified
604 region number isn't -1, we have an edge. */
605 if (nonlocal_goto_handler_labels
606 && ((note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)) == 0
607 || INTVAL (XEXP (note
, 0)) >= 0))
608 saw_abnormal_edge
= 1;
610 else if (can_throw_internal (insn
))
611 saw_abnormal_edge
= 1;
613 else if (flag_non_call_exceptions
615 && can_throw_internal (insn
))
616 saw_abnormal_edge
= 1;
622 /* The rest of the compiler works a bit smoother when we don't have to
623 check for the edge case of do-nothing functions with no basic blocks. */
626 emit_insn (gen_rtx_USE (VOIDmode
, const0_rtx
));
633 /* Scan a list of insns for labels referred to other than by jumps.
634 This is used to scan the alternatives of a call placeholder. */
636 find_label_refs (f
, lvl
)
642 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
643 if (INSN_P (insn
) && GET_CODE (insn
) != JUMP_INSN
)
647 /* Make a list of all labels referred to other than by jumps
648 (which just don't have the REG_LABEL notes).
650 Make a special exception for labels followed by an ADDR*VEC,
651 as this would be a part of the tablejump setup code.
653 Make a special exception to registers loaded with label
654 values just before jump insns that use them. */
656 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
657 if (REG_NOTE_KIND (note
) == REG_LABEL
)
659 rtx lab
= XEXP (note
, 0), next
;
661 if ((next
= next_nonnote_insn (lab
)) != NULL
662 && GET_CODE (next
) == JUMP_INSN
663 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
664 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
666 else if (GET_CODE (lab
) == NOTE
)
668 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
669 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
672 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
679 /* Find all basic blocks of the function whose first insn is F.
681 Collect and return a list of labels whose addresses are taken. This
682 will be used in make_edges for use with computed gotos. */
685 find_basic_blocks_1 (f
)
688 register rtx insn
, next
;
690 rtx bb_note
= NULL_RTX
;
696 /* We process the instructions in a slightly different way than we did
697 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
698 closed out the previous block, so that it gets attached at the proper
699 place. Since this form should be equivalent to the previous,
700 count_basic_blocks continues to use the old form as a check. */
702 for (insn
= f
; insn
; insn
= next
)
704 enum rtx_code code
= GET_CODE (insn
);
706 next
= NEXT_INSN (insn
);
712 int kind
= NOTE_LINE_NUMBER (insn
);
714 /* Look for basic block notes with which to keep the
715 basic_block_info pointers stable. Unthread the note now;
716 we'll put it back at the right place in create_basic_block.
717 Or not at all if we've already found a note in this block. */
718 if (kind
== NOTE_INSN_BASIC_BLOCK
)
720 if (bb_note
== NULL_RTX
)
723 next
= flow_delete_insn (insn
);
729 /* A basic block starts at a label. If we've closed one off due
730 to a barrier or some such, no need to do it again. */
731 if (head
!= NULL_RTX
)
733 /* While we now have edge lists with which other portions of
734 the compiler might determine a call ending a basic block
735 does not imply an abnormal edge, it will be a bit before
736 everything can be updated. So continue to emit a noop at
737 the end of such a block. */
738 if (GET_CODE (end
) == CALL_INSN
&& ! SIBLING_CALL_P (end
))
740 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
741 end
= emit_insn_after (nop
, end
);
744 create_basic_block (i
++, head
, end
, bb_note
);
752 /* A basic block ends at a jump. */
753 if (head
== NULL_RTX
)
757 /* ??? Make a special check for table jumps. The way this
758 happens is truly and amazingly gross. We are about to
759 create a basic block that contains just a code label and
760 an addr*vec jump insn. Worse, an addr_diff_vec creates
761 its own natural loop.
763 Prevent this bit of brain damage, pasting things together
764 correctly in make_edges.
766 The correct solution involves emitting the table directly
767 on the tablejump instruction as a note, or JUMP_LABEL. */
769 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
770 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
778 goto new_bb_inclusive
;
781 /* A basic block ends at a barrier. It may be that an unconditional
782 jump already closed the basic block -- no need to do it again. */
783 if (head
== NULL_RTX
)
786 /* While we now have edge lists with which other portions of the
787 compiler might determine a call ending a basic block does not
788 imply an abnormal edge, it will be a bit before everything can
789 be updated. So continue to emit a noop at the end of such a
791 if (GET_CODE (end
) == CALL_INSN
&& ! SIBLING_CALL_P (end
))
793 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
794 end
= emit_insn_after (nop
, end
);
796 goto new_bb_exclusive
;
800 /* Record whether this call created an edge. */
801 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
802 int region
= (note
? INTVAL (XEXP (note
, 0)) : 0);
804 if (GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
806 /* Scan each of the alternatives for label refs. */
807 lvl
= find_label_refs (XEXP (PATTERN (insn
), 0), lvl
);
808 lvl
= find_label_refs (XEXP (PATTERN (insn
), 1), lvl
);
809 lvl
= find_label_refs (XEXP (PATTERN (insn
), 2), lvl
);
810 /* Record its tail recursion label, if any. */
811 if (XEXP (PATTERN (insn
), 3) != NULL_RTX
)
812 trll
= alloc_EXPR_LIST (0, XEXP (PATTERN (insn
), 3), trll
);
815 /* A basic block ends at a call that can either throw or
816 do a non-local goto. */
817 if ((nonlocal_goto_handler_labels
&& region
>= 0)
818 || can_throw_internal (insn
))
821 if (head
== NULL_RTX
)
826 create_basic_block (i
++, head
, end
, bb_note
);
827 head
= end
= NULL_RTX
;
835 /* Non-call exceptions generate new blocks just like calls. */
836 if (flag_non_call_exceptions
&& can_throw_internal (insn
))
837 goto new_bb_inclusive
;
839 if (head
== NULL_RTX
)
848 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
852 /* Make a list of all labels referred to other than by jumps.
854 Make a special exception for labels followed by an ADDR*VEC,
855 as this would be a part of the tablejump setup code.
857 Make a special exception to registers loaded with label
858 values just before jump insns that use them. */
860 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
861 if (REG_NOTE_KIND (note
) == REG_LABEL
)
863 rtx lab
= XEXP (note
, 0), next
;
865 if ((next
= next_nonnote_insn (lab
)) != NULL
866 && GET_CODE (next
) == JUMP_INSN
867 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
868 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
870 else if (GET_CODE (lab
) == NOTE
)
872 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
873 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
876 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
881 if (head
!= NULL_RTX
)
882 create_basic_block (i
++, head
, end
, bb_note
);
884 flow_delete_insn (bb_note
);
886 if (i
!= n_basic_blocks
)
889 label_value_list
= lvl
;
890 tail_recursion_label_list
= trll
;
893 /* Tidy the CFG by deleting unreachable code and whatnot. */
898 delete_unreachable_blocks ();
900 mark_critical_edges ();
902 /* Kill the data we won't maintain. */
903 free_EXPR_LIST_list (&label_value_list
);
904 free_EXPR_LIST_list (&tail_recursion_label_list
);
907 /* Create a new basic block consisting of the instructions between
908 HEAD and END inclusive. Reuses the note and basic block struct
909 in BB_NOTE, if any. */
912 create_basic_block (index
, head
, end
, bb_note
)
914 rtx head
, end
, bb_note
;
919 && ! RTX_INTEGRATED_P (bb_note
)
920 && (bb
= NOTE_BASIC_BLOCK (bb_note
)) != NULL
923 /* If we found an existing note, thread it back onto the chain. */
927 if (GET_CODE (head
) == CODE_LABEL
)
931 after
= PREV_INSN (head
);
935 if (after
!= bb_note
&& NEXT_INSN (after
) != bb_note
)
936 reorder_insns (bb_note
, bb_note
, after
);
940 /* Otherwise we must create a note and a basic block structure.
941 Since we allow basic block structs in rtl, give the struct
942 the same lifetime by allocating it off the function obstack
943 rather than using malloc. */
945 bb
= (basic_block
) obstack_alloc (&flow_obstack
, sizeof (*bb
));
946 memset (bb
, 0, sizeof (*bb
));
948 if (GET_CODE (head
) == CODE_LABEL
)
949 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, head
);
952 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, head
);
955 NOTE_BASIC_BLOCK (bb_note
) = bb
;
958 /* Always include the bb note in the block. */
959 if (NEXT_INSN (end
) == bb_note
)
965 BASIC_BLOCK (index
) = bb
;
967 /* Tag the block so that we know it has been used when considering
968 other basic block notes. */
972 /* Records the basic block struct in BB_FOR_INSN, for every instruction
973 indexed by INSN_UID. MAX is the size of the array. */
976 compute_bb_for_insn (max
)
981 if (basic_block_for_insn
)
982 VARRAY_FREE (basic_block_for_insn
);
983 VARRAY_BB_INIT (basic_block_for_insn
, max
, "basic_block_for_insn");
985 for (i
= 0; i
< n_basic_blocks
; ++i
)
987 basic_block bb
= BASIC_BLOCK (i
);
994 int uid
= INSN_UID (insn
);
996 VARRAY_BB (basic_block_for_insn
, uid
) = bb
;
999 insn
= NEXT_INSN (insn
);
1004 /* Free the memory associated with the edge structures. */
1012 for (i
= 0; i
< n_basic_blocks
; ++i
)
1014 basic_block bb
= BASIC_BLOCK (i
);
1016 for (e
= bb
->succ
; e
; e
= n
)
1026 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= n
)
1032 ENTRY_BLOCK_PTR
->succ
= 0;
1033 EXIT_BLOCK_PTR
->pred
= 0;
1038 /* Identify the edges between basic blocks.
1040 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1041 that are otherwise unreachable may be reachable with a non-local goto.
1043 BB_EH_END is an array indexed by basic block number in which we record
1044 the list of exception regions active at the end of the basic block. */
1047 make_edges (label_value_list
)
1048 rtx label_value_list
;
1051 sbitmap
*edge_cache
= NULL
;
1053 /* Assume no computed jump; revise as we create edges. */
1054 current_function_has_computed_jump
= 0;
1056 /* Heavy use of computed goto in machine-generated code can lead to
1057 nearly fully-connected CFGs. In that case we spend a significant
1058 amount of time searching the edge lists for duplicates. */
1059 if (forced_labels
|| label_value_list
)
1061 edge_cache
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
1062 sbitmap_vector_zero (edge_cache
, n_basic_blocks
);
1065 /* By nature of the way these get numbered, block 0 is always the entry. */
1066 make_edge (edge_cache
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (0), EDGE_FALLTHRU
);
1068 for (i
= 0; i
< n_basic_blocks
; ++i
)
1070 basic_block bb
= BASIC_BLOCK (i
);
1073 int force_fallthru
= 0;
1075 /* Examine the last instruction of the block, and discover the
1076 ways we can leave the block. */
1079 code
= GET_CODE (insn
);
1082 if (code
== JUMP_INSN
)
1086 /* Recognize exception handling placeholders. */
1087 if (GET_CODE (PATTERN (insn
)) == RESX
)
1088 make_eh_edge (edge_cache
, bb
, insn
);
1090 /* Recognize a non-local goto as a branch outside the
1091 current function. */
1092 else if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
1095 /* ??? Recognize a tablejump and do the right thing. */
1096 else if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
1097 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
1098 && GET_CODE (tmp
) == JUMP_INSN
1099 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
1100 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
1105 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
1106 vec
= XVEC (PATTERN (tmp
), 0);
1108 vec
= XVEC (PATTERN (tmp
), 1);
1110 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1111 make_label_edge (edge_cache
, bb
,
1112 XEXP (RTVEC_ELT (vec
, j
), 0), 0);
1114 /* Some targets (eg, ARM) emit a conditional jump that also
1115 contains the out-of-range target. Scan for these and
1116 add an edge if necessary. */
1117 if ((tmp
= single_set (insn
)) != NULL
1118 && SET_DEST (tmp
) == pc_rtx
1119 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
1120 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
1121 make_label_edge (edge_cache
, bb
,
1122 XEXP (XEXP (SET_SRC (tmp
), 2), 0), 0);
1124 #ifdef CASE_DROPS_THROUGH
1125 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1126 us naturally detecting fallthru into the next block. */
1131 /* If this is a computed jump, then mark it as reaching
1132 everything on the label_value_list and forced_labels list. */
1133 else if (computed_jump_p (insn
))
1135 current_function_has_computed_jump
= 1;
1137 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
1138 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
1140 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
1141 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
1144 /* Returns create an exit out. */
1145 else if (returnjump_p (insn
))
1146 make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, 0);
1148 /* Otherwise, we have a plain conditional or unconditional jump. */
1151 if (! JUMP_LABEL (insn
))
1153 make_label_edge (edge_cache
, bb
, JUMP_LABEL (insn
), 0);
1157 /* If this is a sibling call insn, then this is in effect a
1158 combined call and return, and so we need an edge to the
1159 exit block. No need to worry about EH edges, since we
1160 wouldn't have created the sibling call in the first place. */
1162 if (code
== CALL_INSN
&& SIBLING_CALL_P (insn
))
1163 make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
,
1164 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
1166 /* If this is a CALL_INSN, then mark it as reaching the active EH
1167 handler for this CALL_INSN. If we're handling non-call
1168 exceptions then any insn can reach any of the active handlers.
1170 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1172 else if (code
== CALL_INSN
|| flag_non_call_exceptions
)
1174 /* Add any appropriate EH edges. */
1175 make_eh_edge (edge_cache
, bb
, insn
);
1177 if (code
== CALL_INSN
&& nonlocal_goto_handler_labels
)
1179 /* ??? This could be made smarter: in some cases it's possible
1180 to tell that certain calls will not do a nonlocal goto.
1182 For example, if the nested functions that do the nonlocal
1183 gotos do not have their addresses taken, then only calls to
1184 those functions or to other nested functions that use them
1185 could possibly do nonlocal gotos. */
1186 /* We do know that a REG_EH_REGION note with a value less
1187 than 0 is guaranteed not to perform a non-local goto. */
1188 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1189 if (!note
|| INTVAL (XEXP (note
, 0)) >= 0)
1190 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
1191 make_label_edge (edge_cache
, bb
, XEXP (x
, 0),
1192 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
1196 /* Find out if we can drop through to the next block. */
1197 insn
= next_nonnote_insn (insn
);
1198 if (!insn
|| (i
+ 1 == n_basic_blocks
&& force_fallthru
))
1199 make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
1200 else if (i
+ 1 < n_basic_blocks
)
1202 rtx tmp
= BLOCK_HEAD (i
+ 1);
1203 if (GET_CODE (tmp
) == NOTE
)
1204 tmp
= next_nonnote_insn (tmp
);
1205 if (force_fallthru
|| insn
== tmp
)
1206 make_edge (edge_cache
, bb
, BASIC_BLOCK (i
+ 1), EDGE_FALLTHRU
);
1211 sbitmap_vector_free (edge_cache
);
1214 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1215 about the edge that is accumulated between calls. */
1218 make_edge (edge_cache
, src
, dst
, flags
)
1219 sbitmap
*edge_cache
;
1220 basic_block src
, dst
;
1226 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1227 many edges to them, and we didn't allocate memory for it. */
1228 use_edge_cache
= (edge_cache
1229 && src
!= ENTRY_BLOCK_PTR
1230 && dst
!= EXIT_BLOCK_PTR
);
1232 /* Make sure we don't add duplicate edges. */
1233 switch (use_edge_cache
)
1236 /* Quick test for non-existance of the edge. */
1237 if (! TEST_BIT (edge_cache
[src
->index
], dst
->index
))
1240 /* The edge exists; early exit if no work to do. */
1246 for (e
= src
->succ
; e
; e
= e
->succ_next
)
1255 e
= (edge
) xcalloc (1, sizeof (*e
));
1258 e
->succ_next
= src
->succ
;
1259 e
->pred_next
= dst
->pred
;
1268 SET_BIT (edge_cache
[src
->index
], dst
->index
);
1271 /* Create an edge from a basic block to a label. */
1274 make_label_edge (edge_cache
, src
, label
, flags
)
1275 sbitmap
*edge_cache
;
1280 if (GET_CODE (label
) != CODE_LABEL
)
1283 /* If the label was never emitted, this insn is junk, but avoid a
1284 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1285 as a result of a syntax error and a diagnostic has already been
1288 if (INSN_UID (label
) == 0)
1291 make_edge (edge_cache
, src
, BLOCK_FOR_INSN (label
), flags
);
1294 /* Create the edges generated by INSN in REGION. */
1297 make_eh_edge (edge_cache
, src
, insn
)
1298 sbitmap
*edge_cache
;
1302 int is_call
= (GET_CODE (insn
) == CALL_INSN
? EDGE_ABNORMAL_CALL
: 0);
1305 handlers
= reachable_handlers (insn
);
1307 for (i
= handlers
; i
; i
= XEXP (i
, 1))
1308 make_label_edge (edge_cache
, src
, XEXP (i
, 0),
1309 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
1311 free_INSN_LIST_list (&handlers
);
1314 /* Identify critical edges and set the bits appropriately. */
1317 mark_critical_edges ()
1319 int i
, n
= n_basic_blocks
;
1322 /* We begin with the entry block. This is not terribly important now,
1323 but could be if a front end (Fortran) implemented alternate entry
1325 bb
= ENTRY_BLOCK_PTR
;
1332 /* (1) Critical edges must have a source with multiple successors. */
1333 if (bb
->succ
&& bb
->succ
->succ_next
)
1335 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1337 /* (2) Critical edges must have a destination with multiple
1338 predecessors. Note that we know there is at least one
1339 predecessor -- the edge we followed to get here. */
1340 if (e
->dest
->pred
->pred_next
)
1341 e
->flags
|= EDGE_CRITICAL
;
1343 e
->flags
&= ~EDGE_CRITICAL
;
1348 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1349 e
->flags
&= ~EDGE_CRITICAL
;
1354 bb
= BASIC_BLOCK (i
);
1358 /* Split a block BB after insn INSN creating a new fallthru edge.
1359 Return the new edge. Note that to keep other parts of the compiler happy,
1360 this function renumbers all the basic blocks so that the new
1361 one has a number one greater than the block split. */
1364 split_block (bb
, insn
)
1374 /* There is no point splitting the block after its end. */
1375 if (bb
->end
== insn
)
1378 /* Create the new structures. */
1379 new_bb
= (basic_block
) obstack_alloc (&flow_obstack
, sizeof (*new_bb
));
1380 new_edge
= (edge
) xcalloc (1, sizeof (*new_edge
));
1383 memset (new_bb
, 0, sizeof (*new_bb
));
1385 new_bb
->head
= NEXT_INSN (insn
);
1386 new_bb
->end
= bb
->end
;
1389 new_bb
->succ
= bb
->succ
;
1390 bb
->succ
= new_edge
;
1391 new_bb
->pred
= new_edge
;
1392 new_bb
->count
= bb
->count
;
1393 new_bb
->loop_depth
= bb
->loop_depth
;
1396 new_edge
->dest
= new_bb
;
1397 new_edge
->flags
= EDGE_FALLTHRU
;
1398 new_edge
->probability
= REG_BR_PROB_BASE
;
1399 new_edge
->count
= bb
->count
;
1401 /* Redirect the src of the successor edges of bb to point to new_bb. */
1402 for (e
= new_bb
->succ
; e
; e
= e
->succ_next
)
1405 /* Place the new block just after the block being split. */
1406 VARRAY_GROW (basic_block_info
, ++n_basic_blocks
);
1408 /* Some parts of the compiler expect blocks to be number in
1409 sequential order so insert the new block immediately after the
1410 block being split.. */
1412 for (i
= n_basic_blocks
- 1; i
> j
+ 1; --i
)
1414 basic_block tmp
= BASIC_BLOCK (i
- 1);
1415 BASIC_BLOCK (i
) = tmp
;
1419 BASIC_BLOCK (i
) = new_bb
;
1422 /* Create the basic block note. */
1423 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
,
1425 NOTE_BASIC_BLOCK (bb_note
) = new_bb
;
1426 new_bb
->head
= bb_note
;
1428 update_bb_for_insn (new_bb
);
1430 if (bb
->global_live_at_start
)
1432 new_bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1433 new_bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1434 COPY_REG_SET (new_bb
->global_live_at_end
, bb
->global_live_at_end
);
1436 /* We now have to calculate which registers are live at the end
1437 of the split basic block and at the start of the new basic
1438 block. Start with those registers that are known to be live
1439 at the end of the original basic block and get
1440 propagate_block to determine which registers are live. */
1441 COPY_REG_SET (new_bb
->global_live_at_start
, bb
->global_live_at_end
);
1442 propagate_block (new_bb
, new_bb
->global_live_at_start
, NULL
, NULL
, 0);
1443 COPY_REG_SET (bb
->global_live_at_end
,
1444 new_bb
->global_live_at_start
);
1451 /* Split a (typically critical) edge. Return the new block.
1452 Abort on abnormal edges.
1454 ??? The code generally expects to be called on critical edges.
1455 The case of a block ending in an unconditional jump to a
1456 block with multiple predecessors is not handled optimally. */
1459 split_edge (edge_in
)
1462 basic_block old_pred
, bb
, old_succ
;
1467 /* Abnormal edges cannot be split. */
1468 if ((edge_in
->flags
& EDGE_ABNORMAL
) != 0)
1471 old_pred
= edge_in
->src
;
1472 old_succ
= edge_in
->dest
;
1474 /* Remove the existing edge from the destination's pred list. */
1477 for (pp
= &old_succ
->pred
; *pp
!= edge_in
; pp
= &(*pp
)->pred_next
)
1479 *pp
= edge_in
->pred_next
;
1480 edge_in
->pred_next
= NULL
;
1483 /* Create the new structures. */
1484 bb
= (basic_block
) obstack_alloc (&flow_obstack
, sizeof (*bb
));
1485 edge_out
= (edge
) xcalloc (1, sizeof (*edge_out
));
1488 memset (bb
, 0, sizeof (*bb
));
1490 /* ??? This info is likely going to be out of date very soon. */
1491 if (old_succ
->global_live_at_start
)
1493 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1494 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1495 COPY_REG_SET (bb
->global_live_at_start
, old_succ
->global_live_at_start
);
1496 COPY_REG_SET (bb
->global_live_at_end
, old_succ
->global_live_at_start
);
1501 bb
->succ
= edge_out
;
1502 bb
->count
= edge_in
->count
;
1505 edge_in
->flags
&= ~EDGE_CRITICAL
;
1507 edge_out
->pred_next
= old_succ
->pred
;
1508 edge_out
->succ_next
= NULL
;
1510 edge_out
->dest
= old_succ
;
1511 edge_out
->flags
= EDGE_FALLTHRU
;
1512 edge_out
->probability
= REG_BR_PROB_BASE
;
1513 edge_out
->count
= edge_in
->count
;
1515 old_succ
->pred
= edge_out
;
1517 /* Tricky case -- if there existed a fallthru into the successor
1518 (and we're not it) we must add a new unconditional jump around
1519 the new block we're actually interested in.
1521 Further, if that edge is critical, this means a second new basic
1522 block must be created to hold it. In order to simplify correct
1523 insn placement, do this before we touch the existing basic block
1524 ordering for the block we were really wanting. */
1525 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1528 for (e
= edge_out
->pred_next
; e
; e
= e
->pred_next
)
1529 if (e
->flags
& EDGE_FALLTHRU
)
1534 basic_block jump_block
;
1537 if ((e
->flags
& EDGE_CRITICAL
) == 0
1538 && e
->src
!= ENTRY_BLOCK_PTR
)
1540 /* Non critical -- we can simply add a jump to the end
1541 of the existing predecessor. */
1542 jump_block
= e
->src
;
1546 /* We need a new block to hold the jump. The simplest
1547 way to do the bulk of the work here is to recursively
1549 jump_block
= split_edge (e
);
1550 e
= jump_block
->succ
;
1553 /* Now add the jump insn ... */
1554 pos
= emit_jump_insn_after (gen_jump (old_succ
->head
),
1556 jump_block
->end
= pos
;
1557 if (basic_block_for_insn
)
1558 set_block_for_insn (pos
, jump_block
);
1559 emit_barrier_after (pos
);
1561 /* ... let jump know that label is in use, ... */
1562 JUMP_LABEL (pos
) = old_succ
->head
;
1563 ++LABEL_NUSES (old_succ
->head
);
1565 /* ... and clear fallthru on the outgoing edge. */
1566 e
->flags
&= ~EDGE_FALLTHRU
;
1568 /* Continue splitting the interesting edge. */
1572 /* Place the new block just in front of the successor. */
1573 VARRAY_GROW (basic_block_info
, ++n_basic_blocks
);
1574 if (old_succ
== EXIT_BLOCK_PTR
)
1575 j
= n_basic_blocks
- 1;
1577 j
= old_succ
->index
;
1578 for (i
= n_basic_blocks
- 1; i
> j
; --i
)
1580 basic_block tmp
= BASIC_BLOCK (i
- 1);
1581 BASIC_BLOCK (i
) = tmp
;
1584 BASIC_BLOCK (i
) = bb
;
1587 /* Create the basic block note.
1589 Where we place the note can have a noticable impact on the generated
1590 code. Consider this cfg:
1600 If we need to insert an insn on the edge from block 0 to block 1,
1601 we want to ensure the instructions we insert are outside of any
1602 loop notes that physically sit between block 0 and block 1. Otherwise
1603 we confuse the loop optimizer into thinking the loop is a phony. */
1604 if (old_succ
!= EXIT_BLOCK_PTR
1605 && PREV_INSN (old_succ
->head
)
1606 && GET_CODE (PREV_INSN (old_succ
->head
)) == NOTE
1607 && NOTE_LINE_NUMBER (PREV_INSN (old_succ
->head
)) == NOTE_INSN_LOOP_BEG
)
1608 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
,
1609 PREV_INSN (old_succ
->head
));
1610 else if (old_succ
!= EXIT_BLOCK_PTR
)
1611 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, old_succ
->head
);
1613 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, get_last_insn ());
1614 NOTE_BASIC_BLOCK (bb_note
) = bb
;
1615 bb
->head
= bb
->end
= bb_note
;
1617 /* Not quite simple -- for non-fallthru edges, we must adjust the
1618 predecessor's jump instruction to target our new block. */
1619 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1621 rtx tmp
, insn
= old_pred
->end
;
1622 rtx old_label
= old_succ
->head
;
1623 rtx new_label
= gen_label_rtx ();
1625 if (GET_CODE (insn
) != JUMP_INSN
)
1628 /* ??? Recognize a tablejump and adjust all matching cases. */
1629 if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
1630 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
1631 && GET_CODE (tmp
) == JUMP_INSN
1632 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
1633 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
1638 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
1639 vec
= XVEC (PATTERN (tmp
), 0);
1641 vec
= XVEC (PATTERN (tmp
), 1);
1643 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1644 if (XEXP (RTVEC_ELT (vec
, j
), 0) == old_label
)
1646 RTVEC_ELT (vec
, j
) = gen_rtx_LABEL_REF (VOIDmode
, new_label
);
1647 --LABEL_NUSES (old_label
);
1648 ++LABEL_NUSES (new_label
);
1651 /* Handle casesi dispatch insns */
1652 if ((tmp
= single_set (insn
)) != NULL
1653 && SET_DEST (tmp
) == pc_rtx
1654 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
1655 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
1656 && XEXP (XEXP (SET_SRC (tmp
), 2), 0) == old_label
)
1658 XEXP (SET_SRC (tmp
), 2) = gen_rtx_LABEL_REF (VOIDmode
,
1660 --LABEL_NUSES (old_label
);
1661 ++LABEL_NUSES (new_label
);
1666 /* This would have indicated an abnormal edge. */
1667 if (computed_jump_p (insn
))
1670 /* A return instruction can't be redirected. */
1671 if (returnjump_p (insn
))
1674 /* If the insn doesn't go where we think, we're confused. */
1675 if (JUMP_LABEL (insn
) != old_label
)
1678 redirect_jump (insn
, new_label
, 0);
1681 emit_label_before (new_label
, bb_note
);
1682 bb
->head
= new_label
;
1688 /* Queue instructions for insertion on an edge between two basic blocks.
1689 The new instructions and basic blocks (if any) will not appear in the
1690 CFG until commit_edge_insertions is called. */
1693 insert_insn_on_edge (pattern
, e
)
1697 /* We cannot insert instructions on an abnormal critical edge.
1698 It will be easier to find the culprit if we die now. */
1699 if ((e
->flags
& (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1700 == (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1703 if (e
->insns
== NULL_RTX
)
1706 push_to_sequence (e
->insns
);
1708 emit_insn (pattern
);
1710 e
->insns
= get_insns ();
1714 /* Update the CFG for the instructions queued on edge E. */
1717 commit_one_edge_insertion (e
)
1720 rtx before
= NULL_RTX
, after
= NULL_RTX
, insns
, tmp
, last
;
1723 /* Pull the insns off the edge now since the edge might go away. */
1725 e
->insns
= NULL_RTX
;
1727 /* Figure out where to put these things. If the destination has
1728 one predecessor, insert there. Except for the exit block. */
1729 if (e
->dest
->pred
->pred_next
== NULL
1730 && e
->dest
!= EXIT_BLOCK_PTR
)
1734 /* Get the location correct wrt a code label, and "nice" wrt
1735 a basic block note, and before everything else. */
1737 if (GET_CODE (tmp
) == CODE_LABEL
)
1738 tmp
= NEXT_INSN (tmp
);
1739 if (NOTE_INSN_BASIC_BLOCK_P (tmp
))
1740 tmp
= NEXT_INSN (tmp
);
1741 if (tmp
== bb
->head
)
1744 after
= PREV_INSN (tmp
);
1747 /* If the source has one successor and the edge is not abnormal,
1748 insert there. Except for the entry block. */
1749 else if ((e
->flags
& EDGE_ABNORMAL
) == 0
1750 && e
->src
->succ
->succ_next
== NULL
1751 && e
->src
!= ENTRY_BLOCK_PTR
)
1754 /* It is possible to have a non-simple jump here. Consider a target
1755 where some forms of unconditional jumps clobber a register. This
1756 happens on the fr30 for example.
1758 We know this block has a single successor, so we can just emit
1759 the queued insns before the jump. */
1760 if (GET_CODE (bb
->end
) == JUMP_INSN
)
1766 /* We'd better be fallthru, or we've lost track of what's what. */
1767 if ((e
->flags
& EDGE_FALLTHRU
) == 0)
1774 /* Otherwise we must split the edge. */
1777 bb
= split_edge (e
);
1781 /* Now that we've found the spot, do the insertion. */
1783 /* Set the new block number for these insns, if structure is allocated. */
1784 if (basic_block_for_insn
)
1787 for (i
= insns
; i
!= NULL_RTX
; i
= NEXT_INSN (i
))
1788 set_block_for_insn (i
, bb
);
1793 emit_insns_before (insns
, before
);
1794 if (before
== bb
->head
)
1797 last
= prev_nonnote_insn (before
);
1801 last
= emit_insns_after (insns
, after
);
1802 if (after
== bb
->end
)
1806 if (returnjump_p (last
))
1808 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1809 This is not currently a problem because this only happens
1810 for the (single) epilogue, which already has a fallthru edge
1814 if (e
->dest
!= EXIT_BLOCK_PTR
1815 || e
->succ_next
!= NULL
1816 || (e
->flags
& EDGE_FALLTHRU
) == 0)
1818 e
->flags
&= ~EDGE_FALLTHRU
;
1820 emit_barrier_after (last
);
1824 flow_delete_insn (before
);
1826 else if (GET_CODE (last
) == JUMP_INSN
)
1830 /* Update the CFG for all queued instructions. */
1833 commit_edge_insertions ()
1838 #ifdef ENABLE_CHECKING
1839 verify_flow_info ();
1843 bb
= ENTRY_BLOCK_PTR
;
1848 for (e
= bb
->succ
; e
; e
= next
)
1850 next
= e
->succ_next
;
1852 commit_one_edge_insertion (e
);
1855 if (++i
>= n_basic_blocks
)
1857 bb
= BASIC_BLOCK (i
);
1861 /* Add fake edges to the function exit for any non constant calls in
1862 the bitmap of blocks specified by BLOCKS or to the whole CFG if
1863 BLOCKS is zero. Return the nuber of blocks that were split. */
1866 flow_call_edges_add (blocks
)
1870 int blocks_split
= 0;
1874 /* Map bb indicies into basic block pointers since split_block
1875 will renumber the basic blocks. */
1877 bbs
= xmalloc (n_basic_blocks
* sizeof (*bbs
));
1881 for (i
= 0; i
< n_basic_blocks
; i
++)
1882 bbs
[bb_num
++] = BASIC_BLOCK (i
);
1886 EXECUTE_IF_SET_IN_SBITMAP (blocks
, 0, i
,
1888 bbs
[bb_num
++] = BASIC_BLOCK (i
);
1893 /* Now add fake edges to the function exit for any non constant
1894 calls since there is no way that we can determine if they will
1897 for (i
= 0; i
< bb_num
; i
++)
1899 basic_block bb
= bbs
[i
];
1903 for (insn
= bb
->end
; ; insn
= prev_insn
)
1905 prev_insn
= PREV_INSN (insn
);
1906 if (GET_CODE (insn
) == CALL_INSN
&& ! CONST_CALL_P (insn
))
1910 /* Note that the following may create a new basic block
1911 and renumber the existing basic blocks. */
1912 e
= split_block (bb
, insn
);
1916 make_edge (NULL
, bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
1918 if (insn
== bb
->head
)
1924 verify_flow_info ();
1927 return blocks_split
;
1930 /* Delete all unreachable basic blocks. */
1933 delete_unreachable_blocks ()
1935 basic_block
*worklist
, *tos
;
1940 tos
= worklist
= (basic_block
*) xmalloc (sizeof (basic_block
) * n
);
1942 /* Use basic_block->aux as a marker. Clear them all. */
1944 for (i
= 0; i
< n
; ++i
)
1945 BASIC_BLOCK (i
)->aux
= NULL
;
1947 /* Add our starting points to the worklist. Almost always there will
1948 be only one. It isn't inconcievable that we might one day directly
1949 support Fortran alternate entry points. */
1951 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
1955 /* Mark the block with a handy non-null value. */
1959 /* Iterate: find everything reachable from what we've already seen. */
1961 while (tos
!= worklist
)
1963 basic_block b
= *--tos
;
1965 for (e
= b
->succ
; e
; e
= e
->succ_next
)
1973 /* Delete all unreachable basic blocks. Count down so that we
1974 don't interfere with the block renumbering that happens in
1975 flow_delete_block. */
1977 for (i
= n
- 1; i
>= 0; --i
)
1979 basic_block b
= BASIC_BLOCK (i
);
1982 /* This block was found. Tidy up the mark. */
1985 flow_delete_block (b
);
1988 tidy_fallthru_edges ();
1993 /* Return true if NOTE is not one of the ones that must be kept paired,
1994 so that we may simply delete them. */
1997 can_delete_note_p (note
)
2000 return (NOTE_LINE_NUMBER (note
) == NOTE_INSN_DELETED
2001 || NOTE_LINE_NUMBER (note
) == NOTE_INSN_BASIC_BLOCK
);
2004 /* Unlink a chain of insns between START and FINISH, leaving notes
2005 that must be paired. */
2008 flow_delete_insn_chain (start
, finish
)
2011 /* Unchain the insns one by one. It would be quicker to delete all
2012 of these with a single unchaining, rather than one at a time, but
2013 we need to keep the NOTE's. */
2019 next
= NEXT_INSN (start
);
2020 if (GET_CODE (start
) == NOTE
&& !can_delete_note_p (start
))
2022 else if (GET_CODE (start
) == CODE_LABEL
2023 && ! can_delete_label_p (start
))
2025 const char *name
= LABEL_NAME (start
);
2026 PUT_CODE (start
, NOTE
);
2027 NOTE_LINE_NUMBER (start
) = NOTE_INSN_DELETED_LABEL
;
2028 NOTE_SOURCE_FILE (start
) = name
;
2031 next
= flow_delete_insn (start
);
2033 if (start
== finish
)
2039 /* Delete the insns in a (non-live) block. We physically delete every
2040 non-deleted-note insn, and update the flow graph appropriately.
2042 Return nonzero if we deleted an exception handler. */
2044 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2045 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2048 flow_delete_block (b
)
2051 int deleted_handler
= 0;
2054 /* If the head of this block is a CODE_LABEL, then it might be the
2055 label for an exception handler which can't be reached.
2057 We need to remove the label from the exception_handler_label list
2058 and remove the associated NOTE_INSN_EH_REGION_BEG and
2059 NOTE_INSN_EH_REGION_END notes. */
2063 never_reached_warning (insn
);
2065 if (GET_CODE (insn
) == CODE_LABEL
)
2066 maybe_remove_eh_handler (insn
);
2068 /* Include any jump table following the basic block. */
2070 if (GET_CODE (end
) == JUMP_INSN
2071 && (tmp
= JUMP_LABEL (end
)) != NULL_RTX
2072 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
2073 && GET_CODE (tmp
) == JUMP_INSN
2074 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
2075 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
2078 /* Include any barrier that may follow the basic block. */
2079 tmp
= next_nonnote_insn (end
);
2080 if (tmp
&& GET_CODE (tmp
) == BARRIER
)
2083 /* Selectively delete the entire chain. */
2084 flow_delete_insn_chain (insn
, end
);
2086 /* Remove the edges into and out of this block. Note that there may
2087 indeed be edges in, if we are removing an unreachable loop. */
2091 for (e
= b
->pred
; e
; e
= next
)
2093 for (q
= &e
->src
->succ
; *q
!= e
; q
= &(*q
)->succ_next
)
2096 next
= e
->pred_next
;
2100 for (e
= b
->succ
; e
; e
= next
)
2102 for (q
= &e
->dest
->pred
; *q
!= e
; q
= &(*q
)->pred_next
)
2105 next
= e
->succ_next
;
2114 /* Remove the basic block from the array, and compact behind it. */
2117 return deleted_handler
;
2120 /* Remove block B from the basic block array and compact behind it. */
2126 int i
, n
= n_basic_blocks
;
2128 for (i
= b
->index
; i
+ 1 < n
; ++i
)
2130 basic_block x
= BASIC_BLOCK (i
+ 1);
2131 BASIC_BLOCK (i
) = x
;
2135 basic_block_info
->num_elements
--;
2139 /* Delete INSN by patching it out. Return the next insn. */
2142 flow_delete_insn (insn
)
2145 rtx prev
= PREV_INSN (insn
);
2146 rtx next
= NEXT_INSN (insn
);
2149 PREV_INSN (insn
) = NULL_RTX
;
2150 NEXT_INSN (insn
) = NULL_RTX
;
2151 INSN_DELETED_P (insn
) = 1;
2154 NEXT_INSN (prev
) = next
;
2156 PREV_INSN (next
) = prev
;
2158 set_last_insn (prev
);
2160 if (GET_CODE (insn
) == CODE_LABEL
)
2161 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
2163 /* If deleting a jump, decrement the use count of the label. Deleting
2164 the label itself should happen in the normal course of block merging. */
2165 if (GET_CODE (insn
) == JUMP_INSN
2166 && JUMP_LABEL (insn
)
2167 && GET_CODE (JUMP_LABEL (insn
)) == CODE_LABEL
)
2168 LABEL_NUSES (JUMP_LABEL (insn
))--;
2170 /* Also if deleting an insn that references a label. */
2171 else if ((note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
)) != NULL_RTX
2172 && GET_CODE (XEXP (note
, 0)) == CODE_LABEL
)
2173 LABEL_NUSES (XEXP (note
, 0))--;
2178 /* True if a given label can be deleted. */
2181 can_delete_label_p (label
)
2186 if (LABEL_PRESERVE_P (label
))
2189 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
2190 if (label
== XEXP (x
, 0))
2192 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
2193 if (label
== XEXP (x
, 0))
2195 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
2196 if (label
== XEXP (x
, 0))
2199 /* User declared labels must be preserved. */
2200 if (LABEL_NAME (label
) != 0)
2207 tail_recursion_label_p (label
)
2212 for (x
= tail_recursion_label_list
; x
; x
= XEXP (x
, 1))
2213 if (label
== XEXP (x
, 0))
2219 /* Blocks A and B are to be merged into a single block A. The insns
2220 are already contiguous, hence `nomove'. */
2223 merge_blocks_nomove (a
, b
)
2227 rtx b_head
, b_end
, a_end
;
2228 rtx del_first
= NULL_RTX
, del_last
= NULL_RTX
;
2231 /* If there was a CODE_LABEL beginning B, delete it. */
2234 if (GET_CODE (b_head
) == CODE_LABEL
)
2236 /* Detect basic blocks with nothing but a label. This can happen
2237 in particular at the end of a function. */
2238 if (b_head
== b_end
)
2240 del_first
= del_last
= b_head
;
2241 b_head
= NEXT_INSN (b_head
);
2244 /* Delete the basic block note. */
2245 if (NOTE_INSN_BASIC_BLOCK_P (b_head
))
2247 if (b_head
== b_end
)
2252 b_head
= NEXT_INSN (b_head
);
2255 /* If there was a jump out of A, delete it. */
2257 if (GET_CODE (a_end
) == JUMP_INSN
)
2261 for (prev
= PREV_INSN (a_end
); ; prev
= PREV_INSN (prev
))
2262 if (GET_CODE (prev
) != NOTE
2263 || NOTE_LINE_NUMBER (prev
) == NOTE_INSN_BASIC_BLOCK
2270 /* If this was a conditional jump, we need to also delete
2271 the insn that set cc0. */
2272 if (prev
&& sets_cc0_p (prev
))
2275 prev
= prev_nonnote_insn (prev
);
2284 else if (GET_CODE (NEXT_INSN (a_end
)) == BARRIER
)
2285 del_first
= NEXT_INSN (a_end
);
2287 /* Delete everything marked above as well as crap that might be
2288 hanging out between the two blocks. */
2289 flow_delete_insn_chain (del_first
, del_last
);
2291 /* Normally there should only be one successor of A and that is B, but
2292 partway though the merge of blocks for conditional_execution we'll
2293 be merging a TEST block with THEN and ELSE successors. Free the
2294 whole lot of them and hope the caller knows what they're doing. */
2296 remove_edge (a
->succ
);
2298 /* Adjust the edges out of B for the new owner. */
2299 for (e
= b
->succ
; e
; e
= e
->succ_next
)
2303 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2304 b
->pred
= b
->succ
= NULL
;
2306 /* Reassociate the insns of B with A. */
2309 if (basic_block_for_insn
)
2311 BLOCK_FOR_INSN (b_head
) = a
;
2312 while (b_head
!= b_end
)
2314 b_head
= NEXT_INSN (b_head
);
2315 BLOCK_FOR_INSN (b_head
) = a
;
2325 /* Blocks A and B are to be merged into a single block. A has no incoming
2326 fallthru edge, so it can be moved before B without adding or modifying
2327 any jumps (aside from the jump from A to B). */
2330 merge_blocks_move_predecessor_nojumps (a
, b
)
2333 rtx start
, end
, barrier
;
2339 barrier
= next_nonnote_insn (end
);
2340 if (GET_CODE (barrier
) != BARRIER
)
2342 flow_delete_insn (barrier
);
2344 /* Move block and loop notes out of the chain so that we do not
2345 disturb their order.
2347 ??? A better solution would be to squeeze out all the non-nested notes
2348 and adjust the block trees appropriately. Even better would be to have
2349 a tighter connection between block trees and rtl so that this is not
2351 start
= squeeze_notes (start
, end
);
2353 /* Scramble the insn chain. */
2354 if (end
!= PREV_INSN (b
->head
))
2355 reorder_insns (start
, end
, PREV_INSN (b
->head
));
2359 fprintf (rtl_dump_file
, "Moved block %d before %d and merged.\n",
2360 a
->index
, b
->index
);
2363 /* Swap the records for the two blocks around. Although we are deleting B,
2364 A is now where B was and we want to compact the BB array from where
2366 BASIC_BLOCK (a
->index
) = b
;
2367 BASIC_BLOCK (b
->index
) = a
;
2369 a
->index
= b
->index
;
2372 /* Now blocks A and B are contiguous. Merge them. */
2373 merge_blocks_nomove (a
, b
);
2378 /* Blocks A and B are to be merged into a single block. B has no outgoing
2379 fallthru edge, so it can be moved after A without adding or modifying
2380 any jumps (aside from the jump from A to B). */
2383 merge_blocks_move_successor_nojumps (a
, b
)
2386 rtx start
, end
, barrier
;
2390 barrier
= NEXT_INSN (end
);
2392 /* Recognize a jump table following block B. */
2393 if (GET_CODE (barrier
) == CODE_LABEL
2394 && NEXT_INSN (barrier
)
2395 && GET_CODE (NEXT_INSN (barrier
)) == JUMP_INSN
2396 && (GET_CODE (PATTERN (NEXT_INSN (barrier
))) == ADDR_VEC
2397 || GET_CODE (PATTERN (NEXT_INSN (barrier
))) == ADDR_DIFF_VEC
))
2399 end
= NEXT_INSN (barrier
);
2400 barrier
= NEXT_INSN (end
);
2403 /* There had better have been a barrier there. Delete it. */
2404 if (GET_CODE (barrier
) != BARRIER
)
2406 flow_delete_insn (barrier
);
2408 /* Move block and loop notes out of the chain so that we do not
2409 disturb their order.
2411 ??? A better solution would be to squeeze out all the non-nested notes
2412 and adjust the block trees appropriately. Even better would be to have
2413 a tighter connection between block trees and rtl so that this is not
2415 start
= squeeze_notes (start
, end
);
2417 /* Scramble the insn chain. */
2418 reorder_insns (start
, end
, a
->end
);
2420 /* Now blocks A and B are contiguous. Merge them. */
2421 merge_blocks_nomove (a
, b
);
2425 fprintf (rtl_dump_file
, "Moved block %d after %d and merged.\n",
2426 b
->index
, a
->index
);
2432 /* Attempt to merge basic blocks that are potentially non-adjacent.
2433 Return true iff the attempt succeeded. */
2436 merge_blocks (e
, b
, c
)
2440 /* If C has a tail recursion label, do not merge. There is no
2441 edge recorded from the call_placeholder back to this label, as
2442 that would make optimize_sibling_and_tail_recursive_calls more
2443 complex for no gain. */
2444 if (GET_CODE (c
->head
) == CODE_LABEL
2445 && tail_recursion_label_p (c
->head
))
2448 /* If B has a fallthru edge to C, no need to move anything. */
2449 if (e
->flags
& EDGE_FALLTHRU
)
2451 merge_blocks_nomove (b
, c
);
2455 fprintf (rtl_dump_file
, "Merged %d and %d without moving.\n",
2456 b
->index
, c
->index
);
2464 int c_has_outgoing_fallthru
;
2465 int b_has_incoming_fallthru
;
2467 /* We must make sure to not munge nesting of exception regions,
2468 lexical blocks, and loop notes.
2470 The first is taken care of by requiring that the active eh
2471 region at the end of one block always matches the active eh
2472 region at the beginning of the next block.
2474 The later two are taken care of by squeezing out all the notes. */
2476 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2477 executed and we may want to treat blocks which have two out
2478 edges, one normal, one abnormal as only having one edge for
2479 block merging purposes. */
2481 for (tmp_edge
= c
->succ
; tmp_edge
; tmp_edge
= tmp_edge
->succ_next
)
2482 if (tmp_edge
->flags
& EDGE_FALLTHRU
)
2484 c_has_outgoing_fallthru
= (tmp_edge
!= NULL
);
2486 for (tmp_edge
= b
->pred
; tmp_edge
; tmp_edge
= tmp_edge
->pred_next
)
2487 if (tmp_edge
->flags
& EDGE_FALLTHRU
)
2489 b_has_incoming_fallthru
= (tmp_edge
!= NULL
);
2491 /* If B does not have an incoming fallthru, then it can be moved
2492 immediately before C without introducing or modifying jumps.
2493 C cannot be the first block, so we do not have to worry about
2494 accessing a non-existent block. */
2495 if (! b_has_incoming_fallthru
)
2496 return merge_blocks_move_predecessor_nojumps (b
, c
);
2498 /* Otherwise, we're going to try to move C after B. If C does
2499 not have an outgoing fallthru, then it can be moved
2500 immediately after B without introducing or modifying jumps. */
2501 if (! c_has_outgoing_fallthru
)
2502 return merge_blocks_move_successor_nojumps (b
, c
);
2504 /* Otherwise, we'll need to insert an extra jump, and possibly
2505 a new block to contain it. */
2506 /* ??? Not implemented yet. */
2512 /* Top level driver for merge_blocks. */
2519 /* Attempt to merge blocks as made possible by edge removal. If a block
2520 has only one successor, and the successor has only one predecessor,
2521 they may be combined. */
2523 for (i
= 0; i
< n_basic_blocks
;)
2525 basic_block c
, b
= BASIC_BLOCK (i
);
2528 /* A loop because chains of blocks might be combineable. */
2529 while ((s
= b
->succ
) != NULL
2530 && s
->succ_next
== NULL
2531 && (s
->flags
& EDGE_EH
) == 0
2532 && (c
= s
->dest
) != EXIT_BLOCK_PTR
2533 && c
->pred
->pred_next
== NULL
2534 /* If the jump insn has side effects, we can't kill the edge. */
2535 && (GET_CODE (b
->end
) != JUMP_INSN
2536 || onlyjump_p (b
->end
))
2537 && merge_blocks (s
, b
, c
))
2540 /* Don't get confused by the index shift caused by deleting blocks. */
2545 /* The given edge should potentially be a fallthru edge. If that is in
2546 fact true, delete the jump and barriers that are in the way. */
2549 tidy_fallthru_edge (e
, b
, c
)
2555 /* ??? In a late-running flow pass, other folks may have deleted basic
2556 blocks by nopping out blocks, leaving multiple BARRIERs between here
2557 and the target label. They ought to be chastized and fixed.
2559 We can also wind up with a sequence of undeletable labels between
2560 one block and the next.
2562 So search through a sequence of barriers, labels, and notes for
2563 the head of block C and assert that we really do fall through. */
2565 if (next_real_insn (b
->end
) != next_real_insn (PREV_INSN (c
->head
)))
2568 /* Remove what will soon cease being the jump insn from the source block.
2569 If block B consisted only of this single jump, turn it into a deleted
2572 if (GET_CODE (q
) == JUMP_INSN
2574 && (any_uncondjump_p (q
)
2575 || (b
->succ
== e
&& e
->succ_next
== NULL
)))
2578 /* If this was a conditional jump, we need to also delete
2579 the insn that set cc0. */
2580 if (any_condjump_p (q
) && sets_cc0_p (PREV_INSN (q
)))
2587 NOTE_LINE_NUMBER (q
) = NOTE_INSN_DELETED
;
2588 NOTE_SOURCE_FILE (q
) = 0;
2594 /* We don't want a block to end on a line-number note since that has
2595 the potential of changing the code between -g and not -g. */
2596 while (GET_CODE (q
) == NOTE
&& NOTE_LINE_NUMBER (q
) >= 0)
2603 /* Selectively unlink the sequence. */
2604 if (q
!= PREV_INSN (c
->head
))
2605 flow_delete_insn_chain (NEXT_INSN (q
), PREV_INSN (c
->head
));
2607 e
->flags
|= EDGE_FALLTHRU
;
2610 /* Fix up edges that now fall through, or rather should now fall through
2611 but previously required a jump around now deleted blocks. Simplify
2612 the search by only examining blocks numerically adjacent, since this
2613 is how find_basic_blocks created them. */
2616 tidy_fallthru_edges ()
2620 for (i
= 1; i
< n_basic_blocks
; ++i
)
2622 basic_block b
= BASIC_BLOCK (i
- 1);
2623 basic_block c
= BASIC_BLOCK (i
);
2626 /* We care about simple conditional or unconditional jumps with
2629 If we had a conditional branch to the next instruction when
2630 find_basic_blocks was called, then there will only be one
2631 out edge for the block which ended with the conditional
2632 branch (since we do not create duplicate edges).
2634 Furthermore, the edge will be marked as a fallthru because we
2635 merge the flags for the duplicate edges. So we do not want to
2636 check that the edge is not a FALLTHRU edge. */
2637 if ((s
= b
->succ
) != NULL
2638 && ! (s
->flags
& EDGE_COMPLEX
)
2639 && s
->succ_next
== NULL
2641 /* If the jump insn has side effects, we can't tidy the edge. */
2642 && (GET_CODE (b
->end
) != JUMP_INSN
2643 || onlyjump_p (b
->end
)))
2644 tidy_fallthru_edge (s
, b
, c
);
2648 /* Perform data flow analysis.
2649 F is the first insn of the function; FLAGS is a set of PROP_* flags
2650 to be used in accumulating flow info. */
2653 life_analysis (f
, file
, flags
)
2658 #ifdef ELIMINABLE_REGS
2660 static struct {int from
, to
; } eliminables
[] = ELIMINABLE_REGS
;
2663 /* Record which registers will be eliminated. We use this in
2666 CLEAR_HARD_REG_SET (elim_reg_set
);
2668 #ifdef ELIMINABLE_REGS
2669 for (i
= 0; i
< (int) ARRAY_SIZE (eliminables
); i
++)
2670 SET_HARD_REG_BIT (elim_reg_set
, eliminables
[i
].from
);
2672 SET_HARD_REG_BIT (elim_reg_set
, FRAME_POINTER_REGNUM
);
2676 flags
&= ~(PROP_LOG_LINKS
| PROP_AUTOINC
);
2678 /* The post-reload life analysis have (on a global basis) the same
2679 registers live as was computed by reload itself. elimination
2680 Otherwise offsets and such may be incorrect.
2682 Reload will make some registers as live even though they do not
2685 We don't want to create new auto-incs after reload, since they
2686 are unlikely to be useful and can cause problems with shared
2688 if (reload_completed
)
2689 flags
&= ~(PROP_REG_INFO
| PROP_AUTOINC
);
2691 /* We want alias analysis information for local dead store elimination. */
2692 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
2693 init_alias_analysis ();
2695 /* Always remove no-op moves. Do this before other processing so
2696 that we don't have to keep re-scanning them. */
2697 delete_noop_moves (f
);
2699 /* Some targets can emit simpler epilogues if they know that sp was
2700 not ever modified during the function. After reload, of course,
2701 we've already emitted the epilogue so there's no sense searching. */
2702 if (! reload_completed
)
2703 notice_stack_pointer_modification (f
);
2705 /* Allocate and zero out data structures that will record the
2706 data from lifetime analysis. */
2707 allocate_reg_life_data ();
2708 allocate_bb_life_data ();
2710 /* Find the set of registers live on function exit. */
2711 mark_regs_live_at_end (EXIT_BLOCK_PTR
->global_live_at_start
);
2713 /* "Update" life info from zero. It'd be nice to begin the
2714 relaxation with just the exit and noreturn blocks, but that set
2715 is not immediately handy. */
2717 if (flags
& PROP_REG_INFO
)
2718 memset (regs_ever_live
, 0, sizeof (regs_ever_live
));
2719 update_life_info (NULL
, UPDATE_LIFE_GLOBAL
, flags
);
2722 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
2723 end_alias_analysis ();
2726 dump_flow_info (file
);
2728 free_basic_block_vars (1);
2731 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2732 Search for REGNO. If found, abort if it is not wider than word_mode. */
2735 verify_wide_reg_1 (px
, pregno
)
2740 unsigned int regno
= *(int *) pregno
;
2742 if (GET_CODE (x
) == REG
&& REGNO (x
) == regno
)
2744 if (GET_MODE_BITSIZE (GET_MODE (x
)) <= BITS_PER_WORD
)
2751 /* A subroutine of verify_local_live_at_start. Search through insns
2752 between HEAD and END looking for register REGNO. */
2755 verify_wide_reg (regno
, head
, end
)
2762 && for_each_rtx (&PATTERN (head
), verify_wide_reg_1
, ®no
))
2766 head
= NEXT_INSN (head
);
2769 /* We didn't find the register at all. Something's way screwy. */
2771 fprintf (rtl_dump_file
, "Aborting in verify_wide_reg; reg %d\n", regno
);
2772 print_rtl_and_abort ();
2775 /* A subroutine of update_life_info. Verify that there are no untoward
2776 changes in live_at_start during a local update. */
2779 verify_local_live_at_start (new_live_at_start
, bb
)
2780 regset new_live_at_start
;
2783 if (reload_completed
)
2785 /* After reload, there are no pseudos, nor subregs of multi-word
2786 registers. The regsets should exactly match. */
2787 if (! REG_SET_EQUAL_P (new_live_at_start
, bb
->global_live_at_start
))
2791 fprintf (rtl_dump_file
,
2792 "live_at_start mismatch in bb %d, aborting\n",
2794 debug_bitmap_file (rtl_dump_file
, bb
->global_live_at_start
);
2795 debug_bitmap_file (rtl_dump_file
, new_live_at_start
);
2797 print_rtl_and_abort ();
2804 /* Find the set of changed registers. */
2805 XOR_REG_SET (new_live_at_start
, bb
->global_live_at_start
);
2807 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start
, 0, i
,
2809 /* No registers should die. */
2810 if (REGNO_REG_SET_P (bb
->global_live_at_start
, i
))
2813 fprintf (rtl_dump_file
,
2814 "Register %d died unexpectedly in block %d\n", i
,
2816 print_rtl_and_abort ();
2819 /* Verify that the now-live register is wider than word_mode. */
2820 verify_wide_reg (i
, bb
->head
, bb
->end
);
2825 /* Updates life information starting with the basic blocks set in BLOCKS.
2826 If BLOCKS is null, consider it to be the universal set.
2828 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2829 we are only expecting local modifications to basic blocks. If we find
2830 extra registers live at the beginning of a block, then we either killed
2831 useful data, or we have a broken split that wants data not provided.
2832 If we find registers removed from live_at_start, that means we have
2833 a broken peephole that is killing a register it shouldn't.
2835 ??? This is not true in one situation -- when a pre-reload splitter
2836 generates subregs of a multi-word pseudo, current life analysis will
2837 lose the kill. So we _can_ have a pseudo go live. How irritating.
2839 Including PROP_REG_INFO does not properly refresh regs_ever_live
2840 unless the caller resets it to zero. */
2843 update_life_info (blocks
, extent
, prop_flags
)
2845 enum update_life_extent extent
;
2849 regset_head tmp_head
;
2852 tmp
= INITIALIZE_REG_SET (tmp_head
);
2854 /* For a global update, we go through the relaxation process again. */
2855 if (extent
!= UPDATE_LIFE_LOCAL
)
2857 calculate_global_regs_live (blocks
, blocks
,
2858 prop_flags
& PROP_SCAN_DEAD_CODE
);
2860 /* If asked, remove notes from the blocks we'll update. */
2861 if (extent
== UPDATE_LIFE_GLOBAL_RM_NOTES
)
2862 count_or_remove_death_notes (blocks
, 1);
2867 EXECUTE_IF_SET_IN_SBITMAP (blocks
, 0, i
,
2869 basic_block bb
= BASIC_BLOCK (i
);
2871 COPY_REG_SET (tmp
, bb
->global_live_at_end
);
2872 propagate_block (bb
, tmp
, NULL
, NULL
, prop_flags
);
2874 if (extent
== UPDATE_LIFE_LOCAL
)
2875 verify_local_live_at_start (tmp
, bb
);
2880 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
2882 basic_block bb
= BASIC_BLOCK (i
);
2884 COPY_REG_SET (tmp
, bb
->global_live_at_end
);
2885 propagate_block (bb
, tmp
, NULL
, NULL
, prop_flags
);
2887 if (extent
== UPDATE_LIFE_LOCAL
)
2888 verify_local_live_at_start (tmp
, bb
);
2894 if (prop_flags
& PROP_REG_INFO
)
2896 /* The only pseudos that are live at the beginning of the function
2897 are those that were not set anywhere in the function. local-alloc
2898 doesn't know how to handle these correctly, so mark them as not
2899 local to any one basic block. */
2900 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR
->global_live_at_end
,
2901 FIRST_PSEUDO_REGISTER
, i
,
2902 { REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
; });
2904 /* We have a problem with any pseudoreg that lives across the setjmp.
2905 ANSI says that if a user variable does not change in value between
2906 the setjmp and the longjmp, then the longjmp preserves it. This
2907 includes longjmp from a place where the pseudo appears dead.
2908 (In principle, the value still exists if it is in scope.)
2909 If the pseudo goes in a hard reg, some other value may occupy
2910 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2911 Conclusion: such a pseudo must not go in a hard reg. */
2912 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp
,
2913 FIRST_PSEUDO_REGISTER
, i
,
2915 if (regno_reg_rtx
[i
] != 0)
2917 REG_LIVE_LENGTH (i
) = -1;
2918 REG_BASIC_BLOCK (i
) = REG_BLOCK_UNKNOWN
;
2924 /* Free the variables allocated by find_basic_blocks.
2926 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2929 free_basic_block_vars (keep_head_end_p
)
2930 int keep_head_end_p
;
2932 if (basic_block_for_insn
)
2934 VARRAY_FREE (basic_block_for_insn
);
2935 basic_block_for_insn
= NULL
;
2938 if (! keep_head_end_p
)
2941 VARRAY_FREE (basic_block_info
);
2944 ENTRY_BLOCK_PTR
->aux
= NULL
;
2945 ENTRY_BLOCK_PTR
->global_live_at_end
= NULL
;
2946 EXIT_BLOCK_PTR
->aux
= NULL
;
2947 EXIT_BLOCK_PTR
->global_live_at_start
= NULL
;
2951 /* Return nonzero if the destination of SET equals the source. */
2957 rtx src
= SET_SRC (set
);
2958 rtx dst
= SET_DEST (set
);
2960 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
2962 if (SUBREG_WORD (src
) != SUBREG_WORD (dst
))
2964 src
= SUBREG_REG (src
);
2965 dst
= SUBREG_REG (dst
);
2968 return (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
2969 && REGNO (src
) == REGNO (dst
));
2972 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2979 rtx pat
= PATTERN (insn
);
2981 /* Insns carrying these notes are useful later on. */
2982 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
2985 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
2988 if (GET_CODE (pat
) == PARALLEL
)
2991 /* If nothing but SETs of registers to themselves,
2992 this insn can also be deleted. */
2993 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2995 rtx tem
= XVECEXP (pat
, 0, i
);
2997 if (GET_CODE (tem
) == USE
2998 || GET_CODE (tem
) == CLOBBER
)
3001 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
3010 /* Delete any insns that copy a register to itself. */
3013 delete_noop_moves (f
)
3017 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
3019 if (GET_CODE (insn
) == INSN
&& noop_move_p (insn
))
3021 PUT_CODE (insn
, NOTE
);
3022 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
3023 NOTE_SOURCE_FILE (insn
) = 0;
3028 /* Determine if the stack pointer is constant over the life of the function.
3029 Only useful before prologues have been emitted. */
3032 notice_stack_pointer_modification_1 (x
, pat
, data
)
3034 rtx pat ATTRIBUTE_UNUSED
;
3035 void *data ATTRIBUTE_UNUSED
;
3037 if (x
== stack_pointer_rtx
3038 /* The stack pointer is only modified indirectly as the result
3039 of a push until later in flow. See the comments in rtl.texi
3040 regarding Embedded Side-Effects on Addresses. */
3041 || (GET_CODE (x
) == MEM
3042 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == 'a'
3043 && XEXP (XEXP (x
, 0), 0) == stack_pointer_rtx
))
3044 current_function_sp_is_unchanging
= 0;
3048 notice_stack_pointer_modification (f
)
3053 /* Assume that the stack pointer is unchanging if alloca hasn't
3055 current_function_sp_is_unchanging
= !current_function_calls_alloca
;
3056 if (! current_function_sp_is_unchanging
)
3059 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
3063 /* Check if insn modifies the stack pointer. */
3064 note_stores (PATTERN (insn
), notice_stack_pointer_modification_1
,
3066 if (! current_function_sp_is_unchanging
)
3072 /* Mark a register in SET. Hard registers in large modes get all
3073 of their component registers set as well. */
3076 mark_reg (reg
, xset
)
3080 regset set
= (regset
) xset
;
3081 int regno
= REGNO (reg
);
3083 if (GET_MODE (reg
) == BLKmode
)
3086 SET_REGNO_REG_SET (set
, regno
);
3087 if (regno
< FIRST_PSEUDO_REGISTER
)
3089 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
3091 SET_REGNO_REG_SET (set
, regno
+ n
);
3095 /* Mark those regs which are needed at the end of the function as live
3096 at the end of the last basic block. */
3099 mark_regs_live_at_end (set
)
3104 /* If exiting needs the right stack value, consider the stack pointer
3105 live at the end of the function. */
3106 if ((HAVE_epilogue
&& reload_completed
)
3107 || ! EXIT_IGNORE_STACK
3108 || (! FRAME_POINTER_REQUIRED
3109 && ! current_function_calls_alloca
3110 && flag_omit_frame_pointer
)
3111 || current_function_sp_is_unchanging
)
3113 SET_REGNO_REG_SET (set
, STACK_POINTER_REGNUM
);
3116 /* Mark the frame pointer if needed at the end of the function. If
3117 we end up eliminating it, it will be removed from the live list
3118 of each basic block by reload. */
3120 if (! reload_completed
|| frame_pointer_needed
)
3122 SET_REGNO_REG_SET (set
, FRAME_POINTER_REGNUM
);
3123 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3124 /* If they are different, also mark the hard frame pointer as live. */
3125 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM
))
3126 SET_REGNO_REG_SET (set
, HARD_FRAME_POINTER_REGNUM
);
3130 #ifdef PIC_OFFSET_TABLE_REGNUM
3131 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3132 /* Many architectures have a GP register even without flag_pic.
3133 Assume the pic register is not in use, or will be handled by
3134 other means, if it is not fixed. */
3135 if (fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
3136 SET_REGNO_REG_SET (set
, PIC_OFFSET_TABLE_REGNUM
);
3140 /* Mark all global registers, and all registers used by the epilogue
3141 as being live at the end of the function since they may be
3142 referenced by our caller. */
3143 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3144 if (global_regs
[i
] || EPILOGUE_USES (i
))
3145 SET_REGNO_REG_SET (set
, i
);
3147 if (HAVE_epilogue
&& reload_completed
)
3149 /* Mark all call-saved registers that we actually used. */
3150 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3151 if (regs_ever_live
[i
] && ! call_used_regs
[i
] && ! LOCAL_REGNO (i
))
3152 SET_REGNO_REG_SET (set
, i
);
3155 #ifdef EH_RETURN_DATA_REGNO
3156 /* Mark the registers that will contain data for the handler. */
3157 if (reload_completed
&& current_function_calls_eh_return
)
3160 unsigned regno
= EH_RETURN_DATA_REGNO(i
);
3161 if (regno
== INVALID_REGNUM
)
3163 SET_REGNO_REG_SET (set
, regno
);
3166 #ifdef EH_RETURN_STACKADJ_RTX
3167 if ((! HAVE_epilogue
|| ! reload_completed
)
3168 && current_function_calls_eh_return
)
3170 rtx tmp
= EH_RETURN_STACKADJ_RTX
;
3171 if (tmp
&& REG_P (tmp
))
3172 mark_reg (tmp
, set
);
3175 #ifdef EH_RETURN_HANDLER_RTX
3176 if ((! HAVE_epilogue
|| ! reload_completed
)
3177 && current_function_calls_eh_return
)
3179 rtx tmp
= EH_RETURN_HANDLER_RTX
;
3180 if (tmp
&& REG_P (tmp
))
3181 mark_reg (tmp
, set
);
3185 /* Mark function return value. */
3186 diddle_return_value (mark_reg
, set
);
3189 /* Callback function for for_each_successor_phi. DATA is a regset.
3190 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3191 INSN, in the regset. */
3194 set_phi_alternative_reg (insn
, dest_regno
, src_regno
, data
)
3195 rtx insn ATTRIBUTE_UNUSED
;
3196 int dest_regno ATTRIBUTE_UNUSED
;
3200 regset live
= (regset
) data
;
3201 SET_REGNO_REG_SET (live
, src_regno
);
3205 /* Propagate global life info around the graph of basic blocks. Begin
3206 considering blocks with their corresponding bit set in BLOCKS_IN.
3207 If BLOCKS_IN is null, consider it the universal set.
3209 BLOCKS_OUT is set for every block that was changed. */
3212 calculate_global_regs_live (blocks_in
, blocks_out
, flags
)
3213 sbitmap blocks_in
, blocks_out
;
3216 basic_block
*queue
, *qhead
, *qtail
, *qend
;
3217 regset tmp
, new_live_at_end
, call_used
;
3218 regset_head tmp_head
, call_used_head
;
3219 regset_head new_live_at_end_head
;
3222 tmp
= INITIALIZE_REG_SET (tmp_head
);
3223 new_live_at_end
= INITIALIZE_REG_SET (new_live_at_end_head
);
3224 call_used
= INITIALIZE_REG_SET (call_used_head
);
3226 /* Inconveniently, this is only redily available in hard reg set form. */
3227 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; ++i
)
3228 if (call_used_regs
[i
])
3229 SET_REGNO_REG_SET (call_used
, i
);
3231 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3232 because the `head == tail' style test for an empty queue doesn't
3233 work with a full queue. */
3234 queue
= (basic_block
*) xmalloc ((n_basic_blocks
+ 2) * sizeof (*queue
));
3236 qhead
= qend
= queue
+ n_basic_blocks
+ 2;
3238 /* Queue the blocks set in the initial mask. Do this in reverse block
3239 number order so that we are more likely for the first round to do
3240 useful work. We use AUX non-null to flag that the block is queued. */
3243 /* Clear out the garbage that might be hanging out in bb->aux. */
3244 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
3245 BASIC_BLOCK (i
)->aux
= NULL
;
3247 EXECUTE_IF_SET_IN_SBITMAP (blocks_in
, 0, i
,
3249 basic_block bb
= BASIC_BLOCK (i
);
3256 for (i
= 0; i
< n_basic_blocks
; ++i
)
3258 basic_block bb
= BASIC_BLOCK (i
);
3265 sbitmap_zero (blocks_out
);
3267 while (qhead
!= qtail
)
3269 int rescan
, changed
;
3278 /* Begin by propogating live_at_start from the successor blocks. */
3279 CLEAR_REG_SET (new_live_at_end
);
3280 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
3282 basic_block sb
= e
->dest
;
3284 /* Call-clobbered registers die across exception and call edges. */
3285 /* ??? Abnormal call edges ignored for the moment, as this gets
3286 confused by sibling call edges, which crashes reg-stack. */
3287 if (e
->flags
& EDGE_EH
)
3289 bitmap_operation (tmp
, sb
->global_live_at_start
,
3290 call_used
, BITMAP_AND_COMPL
);
3291 IOR_REG_SET (new_live_at_end
, tmp
);
3294 IOR_REG_SET (new_live_at_end
, sb
->global_live_at_start
);
3297 /* The all-important stack pointer must always be live. */
3298 SET_REGNO_REG_SET (new_live_at_end
, STACK_POINTER_REGNUM
);
3300 /* Before reload, there are a few registers that must be forced
3301 live everywhere -- which might not already be the case for
3302 blocks within infinite loops. */
3303 if (! reload_completed
)
3305 /* Any reference to any pseudo before reload is a potential
3306 reference of the frame pointer. */
3307 SET_REGNO_REG_SET (new_live_at_end
, FRAME_POINTER_REGNUM
);
3309 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3310 /* Pseudos with argument area equivalences may require
3311 reloading via the argument pointer. */
3312 if (fixed_regs
[ARG_POINTER_REGNUM
])
3313 SET_REGNO_REG_SET (new_live_at_end
, ARG_POINTER_REGNUM
);
3316 #ifdef PIC_OFFSET_TABLE_REGNUM
3317 /* Any constant, or pseudo with constant equivalences, may
3318 require reloading from memory using the pic register. */
3319 if (fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
3320 SET_REGNO_REG_SET (new_live_at_end
, PIC_OFFSET_TABLE_REGNUM
);
3324 /* Regs used in phi nodes are not included in
3325 global_live_at_start, since they are live only along a
3326 particular edge. Set those regs that are live because of a
3327 phi node alternative corresponding to this particular block. */
3329 for_each_successor_phi (bb
, &set_phi_alternative_reg
,
3332 if (bb
== ENTRY_BLOCK_PTR
)
3334 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3338 /* On our first pass through this block, we'll go ahead and continue.
3339 Recognize first pass by local_set NULL. On subsequent passes, we
3340 get to skip out early if live_at_end wouldn't have changed. */
3342 if (bb
->local_set
== NULL
)
3344 bb
->local_set
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3345 bb
->cond_local_set
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3350 /* If any bits were removed from live_at_end, we'll have to
3351 rescan the block. This wouldn't be necessary if we had
3352 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3353 local_live is really dependent on live_at_end. */
3354 CLEAR_REG_SET (tmp
);
3355 rescan
= bitmap_operation (tmp
, bb
->global_live_at_end
,
3356 new_live_at_end
, BITMAP_AND_COMPL
);
3360 /* If any of the registers in the new live_at_end set are
3361 conditionally set in this basic block, we must rescan.
3362 This is because conditional lifetimes at the end of the
3363 block do not just take the live_at_end set into account,
3364 but also the liveness at the start of each successor
3365 block. We can miss changes in those sets if we only
3366 compare the new live_at_end against the previous one. */
3367 CLEAR_REG_SET (tmp
);
3368 rescan
= bitmap_operation (tmp
, new_live_at_end
,
3369 bb
->cond_local_set
, BITMAP_AND
);
3374 /* Find the set of changed bits. Take this opportunity
3375 to notice that this set is empty and early out. */
3376 CLEAR_REG_SET (tmp
);
3377 changed
= bitmap_operation (tmp
, bb
->global_live_at_end
,
3378 new_live_at_end
, BITMAP_XOR
);
3382 /* If any of the changed bits overlap with local_set,
3383 we'll have to rescan the block. Detect overlap by
3384 the AND with ~local_set turning off bits. */
3385 rescan
= bitmap_operation (tmp
, tmp
, bb
->local_set
,
3390 /* Let our caller know that BB changed enough to require its
3391 death notes updated. */
3393 SET_BIT (blocks_out
, bb
->index
);
3397 /* Add to live_at_start the set of all registers in
3398 new_live_at_end that aren't in the old live_at_end. */
3400 bitmap_operation (tmp
, new_live_at_end
, bb
->global_live_at_end
,
3402 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3404 changed
= bitmap_operation (bb
->global_live_at_start
,
3405 bb
->global_live_at_start
,
3412 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3414 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3415 into live_at_start. */
3416 propagate_block (bb
, new_live_at_end
, bb
->local_set
,
3417 bb
->cond_local_set
, flags
);
3419 /* If live_at start didn't change, no need to go farther. */
3420 if (REG_SET_EQUAL_P (bb
->global_live_at_start
, new_live_at_end
))
3423 COPY_REG_SET (bb
->global_live_at_start
, new_live_at_end
);
3426 /* Queue all predecessors of BB so that we may re-examine
3427 their live_at_end. */
3428 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
3430 basic_block pb
= e
->src
;
3431 if (pb
->aux
== NULL
)
3442 FREE_REG_SET (new_live_at_end
);
3443 FREE_REG_SET (call_used
);
3447 EXECUTE_IF_SET_IN_SBITMAP (blocks_out
, 0, i
,
3449 basic_block bb
= BASIC_BLOCK (i
);
3450 FREE_REG_SET (bb
->local_set
);
3451 FREE_REG_SET (bb
->cond_local_set
);
3456 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
3458 basic_block bb
= BASIC_BLOCK (i
);
3459 FREE_REG_SET (bb
->local_set
);
3460 FREE_REG_SET (bb
->cond_local_set
);
3467 /* Subroutines of life analysis. */
3469 /* Allocate the permanent data structures that represent the results
3470 of life analysis. Not static since used also for stupid life analysis. */
3473 allocate_bb_life_data ()
3477 for (i
= 0; i
< n_basic_blocks
; i
++)
3479 basic_block bb
= BASIC_BLOCK (i
);
3481 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3482 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3485 ENTRY_BLOCK_PTR
->global_live_at_end
3486 = OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3487 EXIT_BLOCK_PTR
->global_live_at_start
3488 = OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3490 regs_live_at_setjmp
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3494 allocate_reg_life_data ()
3498 max_regno
= max_reg_num ();
3500 /* Recalculate the register space, in case it has grown. Old style
3501 vector oriented regsets would set regset_{size,bytes} here also. */
3502 allocate_reg_info (max_regno
, FALSE
, FALSE
);
3504 /* Reset all the data we'll collect in propagate_block and its
3506 for (i
= 0; i
< max_regno
; i
++)
3510 REG_N_DEATHS (i
) = 0;
3511 REG_N_CALLS_CROSSED (i
) = 0;
3512 REG_LIVE_LENGTH (i
) = 0;
3513 REG_BASIC_BLOCK (i
) = REG_BLOCK_UNKNOWN
;
3517 /* Delete dead instructions for propagate_block. */
3520 propagate_block_delete_insn (bb
, insn
)
3524 rtx inote
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
3526 /* If the insn referred to a label, and that label was attached to
3527 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3528 pretty much mandatory to delete it, because the ADDR_VEC may be
3529 referencing labels that no longer exist. */
3533 rtx label
= XEXP (inote
, 0);
3536 /* The label may be forced if it has been put in the constant
3537 pool. If that is the only use we must discard the table
3538 jump following it, but not the label itself. */
3539 if (LABEL_NUSES (label
) == 1 + LABEL_PRESERVE_P (label
)
3540 && (next
= next_nonnote_insn (label
)) != NULL
3541 && GET_CODE (next
) == JUMP_INSN
3542 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
3543 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
3545 rtx pat
= PATTERN (next
);
3546 int diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
3547 int len
= XVECLEN (pat
, diff_vec_p
);
3550 for (i
= 0; i
< len
; i
++)
3551 LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0))--;
3553 flow_delete_insn (next
);
3557 if (bb
->end
== insn
)
3558 bb
->end
= PREV_INSN (insn
);
3559 flow_delete_insn (insn
);
3562 /* Delete dead libcalls for propagate_block. Return the insn
3563 before the libcall. */
3566 propagate_block_delete_libcall (bb
, insn
, note
)
3570 rtx first
= XEXP (note
, 0);
3571 rtx before
= PREV_INSN (first
);
3573 if (insn
== bb
->end
)
3576 flow_delete_insn_chain (first
, insn
);
3580 /* Update the life-status of regs for one insn. Return the previous insn. */
3583 propagate_one_insn (pbi
, insn
)
3584 struct propagate_block_info
*pbi
;
3587 rtx prev
= PREV_INSN (insn
);
3588 int flags
= pbi
->flags
;
3589 int insn_is_dead
= 0;
3590 int libcall_is_dead
= 0;
3594 if (! INSN_P (insn
))
3597 note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
3598 if (flags
& PROP_SCAN_DEAD_CODE
)
3600 insn_is_dead
= insn_dead_p (pbi
, PATTERN (insn
), 0, REG_NOTES (insn
));
3601 libcall_is_dead
= (insn_is_dead
&& note
!= 0
3602 && libcall_dead_p (pbi
, note
, insn
));
3605 /* If an instruction consists of just dead store(s) on final pass,
3607 if ((flags
& PROP_KILL_DEAD_CODE
) && insn_is_dead
)
3609 /* If we're trying to delete a prologue or epilogue instruction
3610 that isn't flagged as possibly being dead, something is wrong.
3611 But if we are keeping the stack pointer depressed, we might well
3612 be deleting insns that are used to compute the amount to update
3613 it by, so they are fine. */
3614 if (reload_completed
3615 && !(TREE_CODE (TREE_TYPE (current_function_decl
)) == FUNCTION_TYPE
3616 && (TYPE_RETURNS_STACK_DEPRESSED
3617 (TREE_TYPE (current_function_decl
))))
3618 && (((HAVE_epilogue
|| HAVE_prologue
)
3619 && prologue_epilogue_contains (insn
))
3620 || (HAVE_sibcall_epilogue
3621 && sibcall_epilogue_contains (insn
)))
3622 && find_reg_note (insn
, REG_MAYBE_DEAD
, NULL_RTX
) == 0)
3625 /* Record sets. Do this even for dead instructions, since they
3626 would have killed the values if they hadn't been deleted. */
3627 mark_set_regs (pbi
, PATTERN (insn
), insn
);
3629 /* CC0 is now known to be dead. Either this insn used it,
3630 in which case it doesn't anymore, or clobbered it,
3631 so the next insn can't use it. */
3634 if (libcall_is_dead
)
3635 prev
= propagate_block_delete_libcall (pbi
->bb
, insn
, note
);
3637 propagate_block_delete_insn (pbi
->bb
, insn
);
3642 /* See if this is an increment or decrement that can be merged into
3643 a following memory address. */
3646 register rtx x
= single_set (insn
);
3648 /* Does this instruction increment or decrement a register? */
3649 if ((flags
& PROP_AUTOINC
)
3651 && GET_CODE (SET_DEST (x
)) == REG
3652 && (GET_CODE (SET_SRC (x
)) == PLUS
3653 || GET_CODE (SET_SRC (x
)) == MINUS
)
3654 && XEXP (SET_SRC (x
), 0) == SET_DEST (x
)
3655 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
3656 /* Ok, look for a following memory ref we can combine with.
3657 If one is found, change the memory ref to a PRE_INC
3658 or PRE_DEC, cancel this insn, and return 1.
3659 Return 0 if nothing has been done. */
3660 && try_pre_increment_1 (pbi
, insn
))
3663 #endif /* AUTO_INC_DEC */
3665 CLEAR_REG_SET (pbi
->new_set
);
3667 /* If this is not the final pass, and this insn is copying the value of
3668 a library call and it's dead, don't scan the insns that perform the
3669 library call, so that the call's arguments are not marked live. */
3670 if (libcall_is_dead
)
3672 /* Record the death of the dest reg. */
3673 mark_set_regs (pbi
, PATTERN (insn
), insn
);
3675 insn
= XEXP (note
, 0);
3676 return PREV_INSN (insn
);
3678 else if (GET_CODE (PATTERN (insn
)) == SET
3679 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
3680 && GET_CODE (SET_SRC (PATTERN (insn
))) == PLUS
3681 && XEXP (SET_SRC (PATTERN (insn
)), 0) == stack_pointer_rtx
3682 && GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == CONST_INT
)
3683 /* We have an insn to pop a constant amount off the stack.
3684 (Such insns use PLUS regardless of the direction of the stack,
3685 and any insn to adjust the stack by a constant is always a pop.)
3686 These insns, if not dead stores, have no effect on life. */
3690 /* Any regs live at the time of a call instruction must not go
3691 in a register clobbered by calls. Find all regs now live and
3692 record this for them. */
3694 if (GET_CODE (insn
) == CALL_INSN
&& (flags
& PROP_REG_INFO
))
3695 EXECUTE_IF_SET_IN_REG_SET (pbi
->reg_live
, 0, i
,
3696 { REG_N_CALLS_CROSSED (i
)++; });
3698 /* Record sets. Do this even for dead instructions, since they
3699 would have killed the values if they hadn't been deleted. */
3700 mark_set_regs (pbi
, PATTERN (insn
), insn
);
3702 if (GET_CODE (insn
) == CALL_INSN
)
3708 if (GET_CODE (PATTERN (insn
)) == COND_EXEC
)
3709 cond
= COND_EXEC_TEST (PATTERN (insn
));
3711 /* Non-constant calls clobber memory. */
3712 if (! CONST_CALL_P (insn
))
3714 free_EXPR_LIST_list (&pbi
->mem_set_list
);
3715 pbi
->mem_set_list_len
= 0;
3718 /* There may be extra registers to be clobbered. */
3719 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
3721 note
= XEXP (note
, 1))
3722 if (GET_CODE (XEXP (note
, 0)) == CLOBBER
)
3723 mark_set_1 (pbi
, CLOBBER
, XEXP (XEXP (note
, 0), 0),
3724 cond
, insn
, pbi
->flags
);
3726 /* Calls change all call-used and global registers. */
3727 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3728 if (call_used_regs
[i
] && ! global_regs
[i
]
3731 /* We do not want REG_UNUSED notes for these registers. */
3732 mark_set_1 (pbi
, CLOBBER
, gen_rtx_REG (reg_raw_mode
[i
], i
),
3734 pbi
->flags
& ~(PROP_DEATH_NOTES
| PROP_REG_INFO
));
3738 /* If an insn doesn't use CC0, it becomes dead since we assume
3739 that every insn clobbers it. So show it dead here;
3740 mark_used_regs will set it live if it is referenced. */
3745 mark_used_regs (pbi
, PATTERN (insn
), NULL_RTX
, insn
);
3747 /* Sometimes we may have inserted something before INSN (such as a move)
3748 when we make an auto-inc. So ensure we will scan those insns. */
3750 prev
= PREV_INSN (insn
);
3753 if (! insn_is_dead
&& GET_CODE (insn
) == CALL_INSN
)
3759 if (GET_CODE (PATTERN (insn
)) == COND_EXEC
)
3760 cond
= COND_EXEC_TEST (PATTERN (insn
));
3762 /* Calls use their arguments. */
3763 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
3765 note
= XEXP (note
, 1))
3766 if (GET_CODE (XEXP (note
, 0)) == USE
)
3767 mark_used_regs (pbi
, XEXP (XEXP (note
, 0), 0),
3770 /* The stack ptr is used (honorarily) by a CALL insn. */
3771 SET_REGNO_REG_SET (pbi
->reg_live
, STACK_POINTER_REGNUM
);
3773 /* Calls may also reference any of the global registers,
3774 so they are made live. */
3775 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3777 mark_used_reg (pbi
, gen_rtx_REG (reg_raw_mode
[i
], i
),
3782 /* On final pass, update counts of how many insns in which each reg
3784 if (flags
& PROP_REG_INFO
)
3785 EXECUTE_IF_SET_IN_REG_SET (pbi
->reg_live
, 0, i
,
3786 { REG_LIVE_LENGTH (i
)++; });
3791 /* Initialize a propagate_block_info struct for public consumption.
3792 Note that the structure itself is opaque to this file, but that
3793 the user can use the regsets provided here. */
3795 struct propagate_block_info
*
3796 init_propagate_block_info (bb
, live
, local_set
, cond_local_set
, flags
)
3798 regset live
, local_set
, cond_local_set
;
3801 struct propagate_block_info
*pbi
= xmalloc (sizeof (*pbi
));
3804 pbi
->reg_live
= live
;
3805 pbi
->mem_set_list
= NULL_RTX
;
3806 pbi
->mem_set_list_len
= 0;
3807 pbi
->local_set
= local_set
;
3808 pbi
->cond_local_set
= cond_local_set
;
3812 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
3813 pbi
->reg_next_use
= (rtx
*) xcalloc (max_reg_num (), sizeof (rtx
));
3815 pbi
->reg_next_use
= NULL
;
3817 pbi
->new_set
= BITMAP_XMALLOC ();
3819 #ifdef HAVE_conditional_execution
3820 pbi
->reg_cond_dead
= splay_tree_new (splay_tree_compare_ints
, NULL
,
3821 free_reg_cond_life_info
);
3822 pbi
->reg_cond_reg
= BITMAP_XMALLOC ();
3824 /* If this block ends in a conditional branch, for each register live
3825 from one side of the branch and not the other, record the register
3826 as conditionally dead. */
3827 if (GET_CODE (bb
->end
) == JUMP_INSN
3828 && any_condjump_p (bb
->end
))
3830 regset_head diff_head
;
3831 regset diff
= INITIALIZE_REG_SET (diff_head
);
3832 basic_block bb_true
, bb_false
;
3833 rtx cond_true
, cond_false
, set_src
;
3836 /* Identify the successor blocks. */
3837 bb_true
= bb
->succ
->dest
;
3838 if (bb
->succ
->succ_next
!= NULL
)
3840 bb_false
= bb
->succ
->succ_next
->dest
;
3842 if (bb
->succ
->flags
& EDGE_FALLTHRU
)
3844 basic_block t
= bb_false
;
3848 else if (! (bb
->succ
->succ_next
->flags
& EDGE_FALLTHRU
))
3853 /* This can happen with a conditional jump to the next insn. */
3854 if (JUMP_LABEL (bb
->end
) != bb_true
->head
)
3857 /* Simplest way to do nothing. */
3861 /* Extract the condition from the branch. */
3862 set_src
= SET_SRC (pc_set (bb
->end
));
3863 cond_true
= XEXP (set_src
, 0);
3864 cond_false
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true
)),
3865 GET_MODE (cond_true
), XEXP (cond_true
, 0),
3866 XEXP (cond_true
, 1));
3867 if (GET_CODE (XEXP (set_src
, 1)) == PC
)
3870 cond_false
= cond_true
;
3874 /* Compute which register lead different lives in the successors. */
3875 if (bitmap_operation (diff
, bb_true
->global_live_at_start
,
3876 bb_false
->global_live_at_start
, BITMAP_XOR
))
3878 rtx reg
= XEXP (cond_true
, 0);
3880 if (GET_CODE (reg
) == SUBREG
)
3881 reg
= SUBREG_REG (reg
);
3883 if (GET_CODE (reg
) != REG
)
3886 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (reg
));
3888 /* For each such register, mark it conditionally dead. */
3889 EXECUTE_IF_SET_IN_REG_SET
3892 struct reg_cond_life_info
*rcli
;
3895 rcli
= (struct reg_cond_life_info
*) xmalloc (sizeof (*rcli
));
3897 if (REGNO_REG_SET_P (bb_true
->global_live_at_start
, i
))
3901 rcli
->condition
= cond
;
3902 rcli
->stores
= const0_rtx
;
3903 rcli
->orig_condition
= cond
;
3905 splay_tree_insert (pbi
->reg_cond_dead
, i
,
3906 (splay_tree_value
) rcli
);
3910 FREE_REG_SET (diff
);
3914 /* If this block has no successors, any stores to the frame that aren't
3915 used later in the block are dead. So make a pass over the block
3916 recording any such that are made and show them dead at the end. We do
3917 a very conservative and simple job here. */
3919 && ! (TREE_CODE (TREE_TYPE (current_function_decl
)) == FUNCTION_TYPE
3920 && (TYPE_RETURNS_STACK_DEPRESSED
3921 (TREE_TYPE (current_function_decl
))))
3922 && (flags
& PROP_SCAN_DEAD_CODE
)
3923 && (bb
->succ
== NULL
3924 || (bb
->succ
->succ_next
== NULL
3925 && bb
->succ
->dest
== EXIT_BLOCK_PTR
3926 && ! current_function_calls_eh_return
)))
3929 for (insn
= bb
->end
; insn
!= bb
->head
; insn
= PREV_INSN (insn
))
3930 if (GET_CODE (insn
) == INSN
3931 && GET_CODE (PATTERN (insn
)) == SET
3932 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
3934 rtx mem
= SET_DEST (PATTERN (insn
));
3936 /* This optimization is performed by faking a store to the
3937 memory at the end of the block. This doesn't work for
3938 unchanging memories because multiple stores to unchanging
3939 memory is illegal and alias analysis doesn't consider it. */
3940 if (RTX_UNCHANGING_P (mem
))
3943 if (XEXP (mem
, 0) == frame_pointer_rtx
3944 || (GET_CODE (XEXP (mem
, 0)) == PLUS
3945 && XEXP (XEXP (mem
, 0), 0) == frame_pointer_rtx
3946 && GET_CODE (XEXP (XEXP (mem
, 0), 1)) == CONST_INT
))
3949 /* Store a copy of mem, otherwise the address may be scrogged
3950 by find_auto_inc. This matters because insn_dead_p uses
3951 an rtx_equal_p check to determine if two addresses are
3952 the same. This works before find_auto_inc, but fails
3953 after find_auto_inc, causing discrepencies between the
3954 set of live registers calculated during the
3955 calculate_global_regs_live phase and what actually exists
3956 after flow completes, leading to aborts. */
3957 if (flags
& PROP_AUTOINC
)
3958 mem
= shallow_copy_rtx (mem
);
3960 pbi
->mem_set_list
= alloc_EXPR_LIST (0, mem
, pbi
->mem_set_list
);
3961 if (++pbi
->mem_set_list_len
>= MAX_MEM_SET_LIST_LEN
)
3970 /* Release a propagate_block_info struct. */
3973 free_propagate_block_info (pbi
)
3974 struct propagate_block_info
*pbi
;
3976 free_EXPR_LIST_list (&pbi
->mem_set_list
);
3978 BITMAP_XFREE (pbi
->new_set
);
3980 #ifdef HAVE_conditional_execution
3981 splay_tree_delete (pbi
->reg_cond_dead
);
3982 BITMAP_XFREE (pbi
->reg_cond_reg
);
3985 if (pbi
->reg_next_use
)
3986 free (pbi
->reg_next_use
);
3991 /* Compute the registers live at the beginning of a basic block BB from
3992 those live at the end.
3994 When called, REG_LIVE contains those live at the end. On return, it
3995 contains those live at the beginning.
3997 LOCAL_SET, if non-null, will be set with all registers killed
3998 unconditionally by this basic block.
3999 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
4000 killed conditionally by this basic block. If there is any unconditional
4001 set of a register, then the corresponding bit will be set in LOCAL_SET
4002 and cleared in COND_LOCAL_SET.
4003 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
4004 case, the resulting set will be equal to the union of the two sets that
4005 would otherwise be computed. */
4008 propagate_block (bb
, live
, local_set
, cond_local_set
, flags
)
4012 regset cond_local_set
;
4015 struct propagate_block_info
*pbi
;
4018 pbi
= init_propagate_block_info (bb
, live
, local_set
, cond_local_set
, flags
);
4020 if (flags
& PROP_REG_INFO
)
4024 /* Process the regs live at the end of the block.
4025 Mark them as not local to any one basic block. */
4026 EXECUTE_IF_SET_IN_REG_SET (live
, 0, i
,
4027 { REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
; });
4030 /* Scan the block an insn at a time from end to beginning. */
4032 for (insn
= bb
->end
;; insn
= prev
)
4034 /* If this is a call to `setjmp' et al, warn if any
4035 non-volatile datum is live. */
4036 if ((flags
& PROP_REG_INFO
)
4037 && GET_CODE (insn
) == NOTE
4038 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
4039 IOR_REG_SET (regs_live_at_setjmp
, pbi
->reg_live
);
4041 prev
= propagate_one_insn (pbi
, insn
);
4043 if (insn
== bb
->head
)
4047 free_propagate_block_info (pbi
);
4050 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4051 (SET expressions whose destinations are registers dead after the insn).
4052 NEEDED is the regset that says which regs are alive after the insn.
4054 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4056 If X is the entire body of an insn, NOTES contains the reg notes
4057 pertaining to the insn. */
4060 insn_dead_p (pbi
, x
, call_ok
, notes
)
4061 struct propagate_block_info
*pbi
;
4064 rtx notes ATTRIBUTE_UNUSED
;
4066 enum rtx_code code
= GET_CODE (x
);
4069 /* If flow is invoked after reload, we must take existing AUTO_INC
4070 expresions into account. */
4071 if (reload_completed
)
4073 for (; notes
; notes
= XEXP (notes
, 1))
4075 if (REG_NOTE_KIND (notes
) == REG_INC
)
4077 int regno
= REGNO (XEXP (notes
, 0));
4079 /* Don't delete insns to set global regs. */
4080 if ((regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
4081 || REGNO_REG_SET_P (pbi
->reg_live
, regno
))
4088 /* If setting something that's a reg or part of one,
4089 see if that register's altered value will be live. */
4093 rtx r
= SET_DEST (x
);
4096 if (GET_CODE (r
) == CC0
)
4097 return ! pbi
->cc0_live
;
4100 /* A SET that is a subroutine call cannot be dead. */
4101 if (GET_CODE (SET_SRC (x
)) == CALL
)
4107 /* Don't eliminate loads from volatile memory or volatile asms. */
4108 else if (volatile_refs_p (SET_SRC (x
)))
4111 if (GET_CODE (r
) == MEM
)
4115 if (MEM_VOLATILE_P (r
))
4118 /* Walk the set of memory locations we are currently tracking
4119 and see if one is an identical match to this memory location.
4120 If so, this memory write is dead (remember, we're walking
4121 backwards from the end of the block to the start). Since
4122 rtx_equal_p does not check the alias set or flags, we also
4123 must have the potential for them to conflict (anti_dependence). */
4124 for (temp
= pbi
->mem_set_list
; temp
!= 0; temp
= XEXP (temp
, 1))
4125 if (anti_dependence (r
, XEXP (temp
, 0)))
4127 rtx mem
= XEXP (temp
, 0);
4129 if (rtx_equal_p (mem
, r
))
4132 /* Check if memory reference matches an auto increment. Only
4133 post increment/decrement or modify are valid. */
4134 if (GET_MODE (mem
) == GET_MODE (r
)
4135 && (GET_CODE (XEXP (mem
, 0)) == POST_DEC
4136 || GET_CODE (XEXP (mem
, 0)) == POST_INC
4137 || GET_CODE (XEXP (mem
, 0)) == POST_MODIFY
)
4138 && GET_MODE (XEXP (mem
, 0)) == GET_MODE (r
)
4139 && rtx_equal_p (XEXP (XEXP (mem
, 0), 0), XEXP (r
, 0)))
4146 while (GET_CODE (r
) == SUBREG
4147 || GET_CODE (r
) == STRICT_LOW_PART
4148 || GET_CODE (r
) == ZERO_EXTRACT
)
4151 if (GET_CODE (r
) == REG
)
4153 int regno
= REGNO (r
);
4156 if (REGNO_REG_SET_P (pbi
->reg_live
, regno
))
4159 /* If this is a hard register, verify that subsequent
4160 words are not needed. */
4161 if (regno
< FIRST_PSEUDO_REGISTER
)
4163 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (r
));
4166 if (REGNO_REG_SET_P (pbi
->reg_live
, regno
+n
))
4170 /* Don't delete insns to set global regs. */
4171 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
4174 /* Make sure insns to set the stack pointer aren't deleted. */
4175 if (regno
== STACK_POINTER_REGNUM
)
4178 /* ??? These bits might be redundant with the force live bits
4179 in calculate_global_regs_live. We would delete from
4180 sequential sets; whether this actually affects real code
4181 for anything but the stack pointer I don't know. */
4182 /* Make sure insns to set the frame pointer aren't deleted. */
4183 if (regno
== FRAME_POINTER_REGNUM
4184 && (! reload_completed
|| frame_pointer_needed
))
4186 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4187 if (regno
== HARD_FRAME_POINTER_REGNUM
4188 && (! reload_completed
|| frame_pointer_needed
))
4192 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4193 /* Make sure insns to set arg pointer are never deleted
4194 (if the arg pointer isn't fixed, there will be a USE
4195 for it, so we can treat it normally). */
4196 if (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
4200 /* Otherwise, the set is dead. */
4206 /* If performing several activities, insn is dead if each activity
4207 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4208 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4210 else if (code
== PARALLEL
)
4212 int i
= XVECLEN (x
, 0);
4214 for (i
--; i
>= 0; i
--)
4215 if (GET_CODE (XVECEXP (x
, 0, i
)) != CLOBBER
4216 && GET_CODE (XVECEXP (x
, 0, i
)) != USE
4217 && ! insn_dead_p (pbi
, XVECEXP (x
, 0, i
), call_ok
, NULL_RTX
))
4223 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4224 is not necessarily true for hard registers. */
4225 else if (code
== CLOBBER
&& GET_CODE (XEXP (x
, 0)) == REG
4226 && REGNO (XEXP (x
, 0)) >= FIRST_PSEUDO_REGISTER
4227 && ! REGNO_REG_SET_P (pbi
->reg_live
, REGNO (XEXP (x
, 0))))
4230 /* We do not check other CLOBBER or USE here. An insn consisting of just
4231 a CLOBBER or just a USE should not be deleted. */
4235 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4236 return 1 if the entire library call is dead.
4237 This is true if INSN copies a register (hard or pseudo)
4238 and if the hard return reg of the call insn is dead.
4239 (The caller should have tested the destination of the SET inside
4240 INSN already for death.)
4242 If this insn doesn't just copy a register, then we don't
4243 have an ordinary libcall. In that case, cse could not have
4244 managed to substitute the source for the dest later on,
4245 so we can assume the libcall is dead.
4247 PBI is the block info giving pseudoregs live before this insn.
4248 NOTE is the REG_RETVAL note of the insn. */
4251 libcall_dead_p (pbi
, note
, insn
)
4252 struct propagate_block_info
*pbi
;
4256 rtx x
= single_set (insn
);
4260 register rtx r
= SET_SRC (x
);
4261 if (GET_CODE (r
) == REG
)
4263 rtx call
= XEXP (note
, 0);
4267 /* Find the call insn. */
4268 while (call
!= insn
&& GET_CODE (call
) != CALL_INSN
)
4269 call
= NEXT_INSN (call
);
4271 /* If there is none, do nothing special,
4272 since ordinary death handling can understand these insns. */
4276 /* See if the hard reg holding the value is dead.
4277 If this is a PARALLEL, find the call within it. */
4278 call_pat
= PATTERN (call
);
4279 if (GET_CODE (call_pat
) == PARALLEL
)
4281 for (i
= XVECLEN (call_pat
, 0) - 1; i
>= 0; i
--)
4282 if (GET_CODE (XVECEXP (call_pat
, 0, i
)) == SET
4283 && GET_CODE (SET_SRC (XVECEXP (call_pat
, 0, i
))) == CALL
)
4286 /* This may be a library call that is returning a value
4287 via invisible pointer. Do nothing special, since
4288 ordinary death handling can understand these insns. */
4292 call_pat
= XVECEXP (call_pat
, 0, i
);
4295 return insn_dead_p (pbi
, call_pat
, 1, REG_NOTES (call
));
4301 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4302 live at function entry. Don't count global register variables, variables
4303 in registers that can be used for function arg passing, or variables in
4304 fixed hard registers. */
4307 regno_uninitialized (regno
)
4310 if (n_basic_blocks
== 0
4311 || (regno
< FIRST_PSEUDO_REGISTER
4312 && (global_regs
[regno
]
4313 || fixed_regs
[regno
]
4314 || FUNCTION_ARG_REGNO_P (regno
))))
4317 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
);
4320 /* 1 if register REGNO was alive at a place where `setjmp' was called
4321 and was set more than once or is an argument.
4322 Such regs may be clobbered by `longjmp'. */
4325 regno_clobbered_at_setjmp (regno
)
4328 if (n_basic_blocks
== 0)
4331 return ((REG_N_SETS (regno
) > 1
4332 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
))
4333 && REGNO_REG_SET_P (regs_live_at_setjmp
, regno
));
4336 /* INSN references memory, possibly using autoincrement addressing modes.
4337 Find any entries on the mem_set_list that need to be invalidated due
4338 to an address change. */
4341 invalidate_mems_from_autoinc (pbi
, insn
)
4342 struct propagate_block_info
*pbi
;
4345 rtx note
= REG_NOTES (insn
);
4346 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
4348 if (REG_NOTE_KIND (note
) == REG_INC
)
4350 rtx temp
= pbi
->mem_set_list
;
4351 rtx prev
= NULL_RTX
;
4356 next
= XEXP (temp
, 1);
4357 if (reg_overlap_mentioned_p (XEXP (note
, 0), XEXP (temp
, 0)))
4359 /* Splice temp out of list. */
4361 XEXP (prev
, 1) = next
;
4363 pbi
->mem_set_list
= next
;
4364 free_EXPR_LIST_node (temp
);
4365 pbi
->mem_set_list_len
--;
4375 /* EXP is either a MEM or a REG. Remove any dependant entries
4376 from pbi->mem_set_list. */
4379 invalidate_mems_from_set (pbi
, exp
)
4380 struct propagate_block_info
*pbi
;
4383 rtx temp
= pbi
->mem_set_list
;
4384 rtx prev
= NULL_RTX
;
4389 next
= XEXP (temp
, 1);
4390 if ((GET_CODE (exp
) == MEM
4391 && output_dependence (XEXP (temp
, 0), exp
))
4392 || (GET_CODE (exp
) == REG
4393 && reg_overlap_mentioned_p (exp
, XEXP (temp
, 0))))
4395 /* Splice this entry out of the list. */
4397 XEXP (prev
, 1) = next
;
4399 pbi
->mem_set_list
= next
;
4400 free_EXPR_LIST_node (temp
);
4401 pbi
->mem_set_list_len
--;
4409 /* Process the registers that are set within X. Their bits are set to
4410 1 in the regset DEAD, because they are dead prior to this insn.
4412 If INSN is nonzero, it is the insn being processed.
4414 FLAGS is the set of operations to perform. */
4417 mark_set_regs (pbi
, x
, insn
)
4418 struct propagate_block_info
*pbi
;
4421 rtx cond
= NULL_RTX
;
4426 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
4428 if (REG_NOTE_KIND (link
) == REG_INC
)
4429 mark_set_1 (pbi
, SET
, XEXP (link
, 0),
4430 (GET_CODE (x
) == COND_EXEC
4431 ? COND_EXEC_TEST (x
) : NULL_RTX
),
4435 switch (code
= GET_CODE (x
))
4439 mark_set_1 (pbi
, code
, SET_DEST (x
), cond
, insn
, pbi
->flags
);
4443 cond
= COND_EXEC_TEST (x
);
4444 x
= COND_EXEC_CODE (x
);
4450 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
4452 rtx sub
= XVECEXP (x
, 0, i
);
4453 switch (code
= GET_CODE (sub
))
4456 if (cond
!= NULL_RTX
)
4459 cond
= COND_EXEC_TEST (sub
);
4460 sub
= COND_EXEC_CODE (sub
);
4461 if (GET_CODE (sub
) != SET
&& GET_CODE (sub
) != CLOBBER
)
4467 mark_set_1 (pbi
, code
, SET_DEST (sub
), cond
, insn
, pbi
->flags
);
4482 /* Process a single SET rtx, X. */
4485 mark_set_1 (pbi
, code
, reg
, cond
, insn
, flags
)
4486 struct propagate_block_info
*pbi
;
4488 rtx reg
, cond
, insn
;
4491 int regno_first
= -1, regno_last
= -1;
4492 unsigned long not_dead
= 0;
4495 /* Modifying just one hardware register of a multi-reg value or just a
4496 byte field of a register does not mean the value from before this insn
4497 is now dead. Of course, if it was dead after it's unused now. */
4499 switch (GET_CODE (reg
))
4502 /* Some targets place small structures in registers for return values of
4503 functions. We have to detect this case specially here to get correct
4504 flow information. */
4505 for (i
= XVECLEN (reg
, 0) - 1; i
>= 0; i
--)
4506 if (XEXP (XVECEXP (reg
, 0, i
), 0) != 0)
4507 mark_set_1 (pbi
, code
, XEXP (XVECEXP (reg
, 0, i
), 0), cond
, insn
,
4513 case STRICT_LOW_PART
:
4514 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4516 reg
= XEXP (reg
, 0);
4517 while (GET_CODE (reg
) == SUBREG
4518 || GET_CODE (reg
) == ZERO_EXTRACT
4519 || GET_CODE (reg
) == SIGN_EXTRACT
4520 || GET_CODE (reg
) == STRICT_LOW_PART
);
4521 if (GET_CODE (reg
) == MEM
)
4523 not_dead
= (unsigned long) REGNO_REG_SET_P (pbi
->reg_live
, REGNO (reg
));
4527 regno_last
= regno_first
= REGNO (reg
);
4528 if (regno_first
< FIRST_PSEUDO_REGISTER
)
4529 regno_last
+= HARD_REGNO_NREGS (regno_first
, GET_MODE (reg
)) - 1;
4533 if (GET_CODE (SUBREG_REG (reg
)) == REG
)
4535 enum machine_mode outer_mode
= GET_MODE (reg
);
4536 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (reg
));
4538 /* Identify the range of registers affected. This is moderately
4539 tricky for hard registers. See alter_subreg. */
4541 regno_last
= regno_first
= REGNO (SUBREG_REG (reg
));
4542 if (regno_first
< FIRST_PSEUDO_REGISTER
)
4544 #ifdef ALTER_HARD_SUBREG
4545 regno_first
= ALTER_HARD_SUBREG (outer_mode
, SUBREG_WORD (reg
),
4546 inner_mode
, regno_first
);
4548 regno_first
+= SUBREG_WORD (reg
);
4550 regno_last
= (regno_first
4551 + HARD_REGNO_NREGS (regno_first
, outer_mode
) - 1);
4553 /* Since we've just adjusted the register number ranges, make
4554 sure REG matches. Otherwise some_was_live will be clear
4555 when it shouldn't have been, and we'll create incorrect
4556 REG_UNUSED notes. */
4557 reg
= gen_rtx_REG (outer_mode
, regno_first
);
4561 /* If the number of words in the subreg is less than the number
4562 of words in the full register, we have a well-defined partial
4563 set. Otherwise the high bits are undefined.
4565 This is only really applicable to pseudos, since we just took
4566 care of multi-word hard registers. */
4567 if (((GET_MODE_SIZE (outer_mode
)
4568 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
4569 < ((GET_MODE_SIZE (inner_mode
)
4570 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
))
4571 not_dead
= (unsigned long) REGNO_REG_SET_P (pbi
->reg_live
,
4574 reg
= SUBREG_REG (reg
);
4578 reg
= SUBREG_REG (reg
);
4585 /* If this set is a MEM, then it kills any aliased writes.
4586 If this set is a REG, then it kills any MEMs which use the reg. */
4587 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
4589 if (GET_CODE (reg
) == MEM
|| GET_CODE (reg
) == REG
)
4590 invalidate_mems_from_set (pbi
, reg
);
4592 /* If the memory reference had embedded side effects (autoincrement
4593 address modes. Then we may need to kill some entries on the
4595 if (insn
&& GET_CODE (reg
) == MEM
)
4596 invalidate_mems_from_autoinc (pbi
, insn
);
4598 if (pbi
->mem_set_list_len
< MAX_MEM_SET_LIST_LEN
4599 && GET_CODE (reg
) == MEM
&& ! side_effects_p (reg
)
4600 /* ??? With more effort we could track conditional memory life. */
4602 /* We do not know the size of a BLKmode store, so we do not track
4603 them for redundant store elimination. */
4604 && GET_MODE (reg
) != BLKmode
4605 /* There are no REG_INC notes for SP, so we can't assume we'll see
4606 everything that invalidates it. To be safe, don't eliminate any
4607 stores though SP; none of them should be redundant anyway. */
4608 && ! reg_mentioned_p (stack_pointer_rtx
, reg
))
4611 /* Store a copy of mem, otherwise the address may be
4612 scrogged by find_auto_inc. */
4613 if (flags
& PROP_AUTOINC
)
4614 reg
= shallow_copy_rtx (reg
);
4616 pbi
->mem_set_list
= alloc_EXPR_LIST (0, reg
, pbi
->mem_set_list
);
4617 pbi
->mem_set_list_len
++;
4621 if (GET_CODE (reg
) == REG
4622 && ! (regno_first
== FRAME_POINTER_REGNUM
4623 && (! reload_completed
|| frame_pointer_needed
))
4624 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4625 && ! (regno_first
== HARD_FRAME_POINTER_REGNUM
4626 && (! reload_completed
|| frame_pointer_needed
))
4628 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4629 && ! (regno_first
== ARG_POINTER_REGNUM
&& fixed_regs
[regno_first
])
4633 int some_was_live
= 0, some_was_dead
= 0;
4635 for (i
= regno_first
; i
<= regno_last
; ++i
)
4637 int needed_regno
= REGNO_REG_SET_P (pbi
->reg_live
, i
);
4640 /* Order of the set operation matters here since both
4641 sets may be the same. */
4642 CLEAR_REGNO_REG_SET (pbi
->cond_local_set
, i
);
4643 if (cond
!= NULL_RTX
4644 && ! REGNO_REG_SET_P (pbi
->local_set
, i
))
4645 SET_REGNO_REG_SET (pbi
->cond_local_set
, i
);
4647 SET_REGNO_REG_SET (pbi
->local_set
, i
);
4649 if (code
!= CLOBBER
)
4650 SET_REGNO_REG_SET (pbi
->new_set
, i
);
4652 some_was_live
|= needed_regno
;
4653 some_was_dead
|= ! needed_regno
;
4656 #ifdef HAVE_conditional_execution
4657 /* Consider conditional death in deciding that the register needs
4659 if (some_was_live
&& ! not_dead
4660 /* The stack pointer is never dead. Well, not strictly true,
4661 but it's very difficult to tell from here. Hopefully
4662 combine_stack_adjustments will fix up the most egregious
4664 && regno_first
!= STACK_POINTER_REGNUM
)
4666 for (i
= regno_first
; i
<= regno_last
; ++i
)
4667 if (! mark_regno_cond_dead (pbi
, i
, cond
))
4668 not_dead
|= ((unsigned long) 1) << (i
- regno_first
);
4672 /* Additional data to record if this is the final pass. */
4673 if (flags
& (PROP_LOG_LINKS
| PROP_REG_INFO
4674 | PROP_DEATH_NOTES
| PROP_AUTOINC
))
4677 register int blocknum
= pbi
->bb
->index
;
4680 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
4682 y
= pbi
->reg_next_use
[regno_first
];
4684 /* The next use is no longer next, since a store intervenes. */
4685 for (i
= regno_first
; i
<= regno_last
; ++i
)
4686 pbi
->reg_next_use
[i
] = 0;
4689 if (flags
& PROP_REG_INFO
)
4691 for (i
= regno_first
; i
<= regno_last
; ++i
)
4693 /* Count (weighted) references, stores, etc. This counts a
4694 register twice if it is modified, but that is correct. */
4695 REG_N_SETS (i
) += 1;
4696 REG_N_REFS (i
) += (optimize_size
? 1
4697 : pbi
->bb
->loop_depth
+ 1);
4699 /* The insns where a reg is live are normally counted
4700 elsewhere, but we want the count to include the insn
4701 where the reg is set, and the normal counting mechanism
4702 would not count it. */
4703 REG_LIVE_LENGTH (i
) += 1;
4706 /* If this is a hard reg, record this function uses the reg. */
4707 if (regno_first
< FIRST_PSEUDO_REGISTER
)
4709 for (i
= regno_first
; i
<= regno_last
; i
++)
4710 regs_ever_live
[i
] = 1;
4714 /* Keep track of which basic blocks each reg appears in. */
4715 if (REG_BASIC_BLOCK (regno_first
) == REG_BLOCK_UNKNOWN
)
4716 REG_BASIC_BLOCK (regno_first
) = blocknum
;
4717 else if (REG_BASIC_BLOCK (regno_first
) != blocknum
)
4718 REG_BASIC_BLOCK (regno_first
) = REG_BLOCK_GLOBAL
;
4722 if (! some_was_dead
)
4724 if (flags
& PROP_LOG_LINKS
)
4726 /* Make a logical link from the next following insn
4727 that uses this register, back to this insn.
4728 The following insns have already been processed.
4730 We don't build a LOG_LINK for hard registers containing
4731 in ASM_OPERANDs. If these registers get replaced,
4732 we might wind up changing the semantics of the insn,
4733 even if reload can make what appear to be valid
4734 assignments later. */
4735 if (y
&& (BLOCK_NUM (y
) == blocknum
)
4736 && (regno_first
>= FIRST_PSEUDO_REGISTER
4737 || asm_noperands (PATTERN (y
)) < 0))
4738 LOG_LINKS (y
) = alloc_INSN_LIST (insn
, LOG_LINKS (y
));
4743 else if (! some_was_live
)
4745 if (flags
& PROP_REG_INFO
)
4746 REG_N_DEATHS (regno_first
) += 1;
4748 if (flags
& PROP_DEATH_NOTES
)
4750 /* Note that dead stores have already been deleted
4751 when possible. If we get here, we have found a
4752 dead store that cannot be eliminated (because the
4753 same insn does something useful). Indicate this
4754 by marking the reg being set as dying here. */
4756 = alloc_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
4761 if (flags
& PROP_DEATH_NOTES
)
4763 /* This is a case where we have a multi-word hard register
4764 and some, but not all, of the words of the register are
4765 needed in subsequent insns. Write REG_UNUSED notes
4766 for those parts that were not needed. This case should
4769 for (i
= regno_first
; i
<= regno_last
; ++i
)
4770 if (! REGNO_REG_SET_P (pbi
->reg_live
, i
))
4772 = alloc_EXPR_LIST (REG_UNUSED
,
4773 gen_rtx_REG (reg_raw_mode
[i
], i
),
4779 /* Mark the register as being dead. */
4781 /* The stack pointer is never dead. Well, not strictly true,
4782 but it's very difficult to tell from here. Hopefully
4783 combine_stack_adjustments will fix up the most egregious
4785 && regno_first
!= STACK_POINTER_REGNUM
)
4787 for (i
= regno_first
; i
<= regno_last
; ++i
)
4788 if (!(not_dead
& (((unsigned long) 1) << (i
- regno_first
))))
4789 CLEAR_REGNO_REG_SET (pbi
->reg_live
, i
);
4792 else if (GET_CODE (reg
) == REG
)
4794 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
4795 pbi
->reg_next_use
[regno_first
] = 0;
4798 /* If this is the last pass and this is a SCRATCH, show it will be dying
4799 here and count it. */
4800 else if (GET_CODE (reg
) == SCRATCH
)
4802 if (flags
& PROP_DEATH_NOTES
)
4804 = alloc_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
4808 #ifdef HAVE_conditional_execution
4809 /* Mark REGNO conditionally dead.
4810 Return true if the register is now unconditionally dead. */
4813 mark_regno_cond_dead (pbi
, regno
, cond
)
4814 struct propagate_block_info
*pbi
;
4818 /* If this is a store to a predicate register, the value of the
4819 predicate is changing, we don't know that the predicate as seen
4820 before is the same as that seen after. Flush all dependent
4821 conditions from reg_cond_dead. This will make all such
4822 conditionally live registers unconditionally live. */
4823 if (REGNO_REG_SET_P (pbi
->reg_cond_reg
, regno
))
4824 flush_reg_cond_reg (pbi
, regno
);
4826 /* If this is an unconditional store, remove any conditional
4827 life that may have existed. */
4828 if (cond
== NULL_RTX
)
4829 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
4832 splay_tree_node node
;
4833 struct reg_cond_life_info
*rcli
;
4836 /* Otherwise this is a conditional set. Record that fact.
4837 It may have been conditionally used, or there may be a
4838 subsequent set with a complimentary condition. */
4840 node
= splay_tree_lookup (pbi
->reg_cond_dead
, regno
);
4843 /* The register was unconditionally live previously.
4844 Record the current condition as the condition under
4845 which it is dead. */
4846 rcli
= (struct reg_cond_life_info
*) xmalloc (sizeof (*rcli
));
4847 rcli
->condition
= cond
;
4848 rcli
->stores
= cond
;
4849 rcli
->orig_condition
= const0_rtx
;
4850 splay_tree_insert (pbi
->reg_cond_dead
, regno
,
4851 (splay_tree_value
) rcli
);
4853 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
4855 /* Not unconditionaly dead. */
4860 /* The register was conditionally live previously.
4861 Add the new condition to the old. */
4862 rcli
= (struct reg_cond_life_info
*) node
->value
;
4863 ncond
= rcli
->condition
;
4864 ncond
= ior_reg_cond (ncond
, cond
, 1);
4865 if (rcli
->stores
== const0_rtx
)
4866 rcli
->stores
= cond
;
4867 else if (rcli
->stores
!= const1_rtx
)
4868 rcli
->stores
= ior_reg_cond (rcli
->stores
, cond
, 1);
4870 /* If the register is now unconditionally dead, remove the entry
4871 in the splay_tree. A register is unconditionally dead if the
4872 dead condition ncond is true. A register is also unconditionally
4873 dead if the sum of all conditional stores is an unconditional
4874 store (stores is true), and the dead condition is identically the
4875 same as the original dead condition initialized at the end of
4876 the block. This is a pointer compare, not an rtx_equal_p
4878 if (ncond
== const1_rtx
4879 || (ncond
== rcli
->orig_condition
&& rcli
->stores
== const1_rtx
))
4880 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
4883 rcli
->condition
= ncond
;
4885 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
4887 /* Not unconditionaly dead. */
4896 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4899 free_reg_cond_life_info (value
)
4900 splay_tree_value value
;
4902 struct reg_cond_life_info
*rcli
= (struct reg_cond_life_info
*) value
;
4906 /* Helper function for flush_reg_cond_reg. */
4909 flush_reg_cond_reg_1 (node
, data
)
4910 splay_tree_node node
;
4913 struct reg_cond_life_info
*rcli
;
4914 int *xdata
= (int *) data
;
4915 unsigned int regno
= xdata
[0];
4917 /* Don't need to search if last flushed value was farther on in
4918 the in-order traversal. */
4919 if (xdata
[1] >= (int) node
->key
)
4922 /* Splice out portions of the expression that refer to regno. */
4923 rcli
= (struct reg_cond_life_info
*) node
->value
;
4924 rcli
->condition
= elim_reg_cond (rcli
->condition
, regno
);
4925 if (rcli
->stores
!= const0_rtx
&& rcli
->stores
!= const1_rtx
)
4926 rcli
->stores
= elim_reg_cond (rcli
->stores
, regno
);
4928 /* If the entire condition is now false, signal the node to be removed. */
4929 if (rcli
->condition
== const0_rtx
)
4931 xdata
[1] = node
->key
;
4934 else if (rcli
->condition
== const1_rtx
)
4940 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4943 flush_reg_cond_reg (pbi
, regno
)
4944 struct propagate_block_info
*pbi
;
4951 while (splay_tree_foreach (pbi
->reg_cond_dead
,
4952 flush_reg_cond_reg_1
, pair
) == -1)
4953 splay_tree_remove (pbi
->reg_cond_dead
, pair
[1]);
4955 CLEAR_REGNO_REG_SET (pbi
->reg_cond_reg
, regno
);
4958 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
4959 For ior/and, the ADD flag determines whether we want to add the new
4960 condition X to the old one unconditionally. If it is zero, we will
4961 only return a new expression if X allows us to simplify part of
4962 OLD, otherwise we return OLD unchanged to the caller.
4963 If ADD is nonzero, we will return a new condition in all cases. The
4964 toplevel caller of one of these functions should always pass 1 for
4968 ior_reg_cond (old
, x
, add
)
4974 if (GET_RTX_CLASS (GET_CODE (old
)) == '<')
4976 if (GET_RTX_CLASS (GET_CODE (x
)) == '<'
4977 && GET_CODE (x
) == reverse_condition (GET_CODE (old
))
4978 && REGNO (XEXP (x
, 0)) == REGNO (XEXP (old
, 0)))
4980 if (GET_CODE (x
) == GET_CODE (old
)
4981 && REGNO (XEXP (x
, 0)) == REGNO (XEXP (old
, 0)))
4985 return gen_rtx_IOR (0, old
, x
);
4988 switch (GET_CODE (old
))
4991 op0
= ior_reg_cond (XEXP (old
, 0), x
, 0);
4992 op1
= ior_reg_cond (XEXP (old
, 1), x
, 0);
4993 if (op0
!= XEXP (old
, 0) || op1
!= XEXP (old
, 1))
4995 if (op0
== const0_rtx
)
4997 if (op1
== const0_rtx
)
4999 if (op0
== const1_rtx
|| op1
== const1_rtx
)
5001 if (op0
== XEXP (old
, 0))
5002 op0
= gen_rtx_IOR (0, op0
, x
);
5004 op1
= gen_rtx_IOR (0, op1
, x
);
5005 return gen_rtx_IOR (0, op0
, op1
);
5009 return gen_rtx_IOR (0, old
, x
);
5012 op0
= ior_reg_cond (XEXP (old
, 0), x
, 0);
5013 op1
= ior_reg_cond (XEXP (old
, 1), x
, 0);
5014 if (op0
!= XEXP (old
, 0) || op1
!= XEXP (old
, 1))
5016 if (op0
== const1_rtx
)
5018 if (op1
== const1_rtx
)
5020 if (op0
== const0_rtx
|| op1
== const0_rtx
)
5022 if (op0
== XEXP (old
, 0))
5023 op0
= gen_rtx_IOR (0, op0
, x
);
5025 op1
= gen_rtx_IOR (0, op1
, x
);
5026 return gen_rtx_AND (0, op0
, op1
);
5030 return gen_rtx_IOR (0, old
, x
);
5033 op0
= and_reg_cond (XEXP (old
, 0), not_reg_cond (x
), 0);
5034 if (op0
!= XEXP (old
, 0))
5035 return not_reg_cond (op0
);
5038 return gen_rtx_IOR (0, old
, x
);
5049 enum rtx_code x_code
;
5051 if (x
== const0_rtx
)
5053 else if (x
== const1_rtx
)
5055 x_code
= GET_CODE (x
);
5058 if (GET_RTX_CLASS (x_code
) == '<'
5059 && GET_CODE (XEXP (x
, 0)) == REG
)
5061 if (XEXP (x
, 1) != const0_rtx
)
5064 return gen_rtx_fmt_ee (reverse_condition (x_code
),
5065 VOIDmode
, XEXP (x
, 0), const0_rtx
);
5067 return gen_rtx_NOT (0, x
);
5071 and_reg_cond (old
, x
, add
)
5077 if (GET_RTX_CLASS (GET_CODE (old
)) == '<')
5079 if (GET_RTX_CLASS (GET_CODE (x
)) == '<'
5080 && GET_CODE (x
) == reverse_condition (GET_CODE (old
))
5081 && REGNO (XEXP (x
, 0)) == REGNO (XEXP (old
, 0)))
5083 if (GET_CODE (x
) == GET_CODE (old
)
5084 && REGNO (XEXP (x
, 0)) == REGNO (XEXP (old
, 0)))
5088 return gen_rtx_AND (0, old
, x
);
5091 switch (GET_CODE (old
))
5094 op0
= and_reg_cond (XEXP (old
, 0), x
, 0);
5095 op1
= and_reg_cond (XEXP (old
, 1), x
, 0);
5096 if (op0
!= XEXP (old
, 0) || op1
!= XEXP (old
, 1))
5098 if (op0
== const0_rtx
)
5100 if (op1
== const0_rtx
)
5102 if (op0
== const1_rtx
|| op1
== const1_rtx
)
5104 if (op0
== XEXP (old
, 0))
5105 op0
= gen_rtx_AND (0, op0
, x
);
5107 op1
= gen_rtx_AND (0, op1
, x
);
5108 return gen_rtx_IOR (0, op0
, op1
);
5112 return gen_rtx_AND (0, old
, x
);
5115 op0
= and_reg_cond (XEXP (old
, 0), x
, 0);
5116 op1
= and_reg_cond (XEXP (old
, 1), x
, 0);
5117 if (op0
!= XEXP (old
, 0) || op1
!= XEXP (old
, 1))
5119 if (op0
== const1_rtx
)
5121 if (op1
== const1_rtx
)
5123 if (op0
== const0_rtx
|| op1
== const0_rtx
)
5125 if (op0
== XEXP (old
, 0))
5126 op0
= gen_rtx_AND (0, op0
, x
);
5128 op1
= gen_rtx_AND (0, op1
, x
);
5129 return gen_rtx_AND (0, op0
, op1
);
5134 /* If X is identical to one of the existing terms of the AND,
5135 then just return what we already have. */
5136 /* ??? There really should be some sort of recursive check here in
5137 case there are nested ANDs. */
5138 if ((GET_CODE (XEXP (old
, 0)) == GET_CODE (x
)
5139 && REGNO (XEXP (XEXP (old
, 0), 0)) == REGNO (XEXP (x
, 0)))
5140 || (GET_CODE (XEXP (old
, 1)) == GET_CODE (x
)
5141 && REGNO (XEXP (XEXP (old
, 1), 0)) == REGNO (XEXP (x
, 0))))
5144 return gen_rtx_AND (0, old
, x
);
5147 op0
= ior_reg_cond (XEXP (old
, 0), not_reg_cond (x
), 0);
5148 if (op0
!= XEXP (old
, 0))
5149 return not_reg_cond (op0
);
5152 return gen_rtx_AND (0, old
, x
);
5159 /* Given a condition X, remove references to reg REGNO and return the
5160 new condition. The removal will be done so that all conditions
5161 involving REGNO are considered to evaluate to false. This function
5162 is used when the value of REGNO changes. */
5165 elim_reg_cond (x
, regno
)
5171 if (GET_RTX_CLASS (GET_CODE (x
)) == '<')
5173 if (REGNO (XEXP (x
, 0)) == regno
)
5178 switch (GET_CODE (x
))
5181 op0
= elim_reg_cond (XEXP (x
, 0), regno
);
5182 op1
= elim_reg_cond (XEXP (x
, 1), regno
);
5183 if (op0
== const0_rtx
|| op1
== const0_rtx
)
5185 if (op0
== const1_rtx
)
5187 if (op1
== const1_rtx
)
5189 if (op0
== XEXP (x
, 0) && op1
== XEXP (x
, 1))
5191 return gen_rtx_AND (0, op0
, op1
);
5194 op0
= elim_reg_cond (XEXP (x
, 0), regno
);
5195 op1
= elim_reg_cond (XEXP (x
, 1), regno
);
5196 if (op0
== const1_rtx
|| op1
== const1_rtx
)
5198 if (op0
== const0_rtx
)
5200 if (op1
== const0_rtx
)
5202 if (op0
== XEXP (x
, 0) && op1
== XEXP (x
, 1))
5204 return gen_rtx_IOR (0, op0
, op1
);
5207 op0
= elim_reg_cond (XEXP (x
, 0), regno
);
5208 if (op0
== const0_rtx
)
5210 if (op0
== const1_rtx
)
5212 if (op0
!= XEXP (x
, 0))
5213 return not_reg_cond (op0
);
5220 #endif /* HAVE_conditional_execution */
5224 /* Try to substitute the auto-inc expression INC as the address inside
5225 MEM which occurs in INSN. Currently, the address of MEM is an expression
5226 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5227 that has a single set whose source is a PLUS of INCR_REG and something
5231 attempt_auto_inc (pbi
, inc
, insn
, mem
, incr
, incr_reg
)
5232 struct propagate_block_info
*pbi
;
5233 rtx inc
, insn
, mem
, incr
, incr_reg
;
5235 int regno
= REGNO (incr_reg
);
5236 rtx set
= single_set (incr
);
5237 rtx q
= SET_DEST (set
);
5238 rtx y
= SET_SRC (set
);
5239 int opnum
= XEXP (y
, 0) == incr_reg
? 0 : 1;
5241 /* Make sure this reg appears only once in this insn. */
5242 if (count_occurrences (PATTERN (insn
), incr_reg
, 1) != 1)
5245 if (dead_or_set_p (incr
, incr_reg
)
5246 /* Mustn't autoinc an eliminable register. */
5247 && (regno
>= FIRST_PSEUDO_REGISTER
5248 || ! TEST_HARD_REG_BIT (elim_reg_set
, regno
)))
5250 /* This is the simple case. Try to make the auto-inc. If
5251 we can't, we are done. Otherwise, we will do any
5252 needed updates below. */
5253 if (! validate_change (insn
, &XEXP (mem
, 0), inc
, 0))
5256 else if (GET_CODE (q
) == REG
5257 /* PREV_INSN used here to check the semi-open interval
5259 && ! reg_used_between_p (q
, PREV_INSN (insn
), incr
)
5260 /* We must also check for sets of q as q may be
5261 a call clobbered hard register and there may
5262 be a call between PREV_INSN (insn) and incr. */
5263 && ! reg_set_between_p (q
, PREV_INSN (insn
), incr
))
5265 /* We have *p followed sometime later by q = p+size.
5266 Both p and q must be live afterward,
5267 and q is not used between INSN and its assignment.
5268 Change it to q = p, ...*q..., q = q+size.
5269 Then fall into the usual case. */
5273 emit_move_insn (q
, incr_reg
);
5274 insns
= get_insns ();
5277 if (basic_block_for_insn
)
5278 for (temp
= insns
; temp
; temp
= NEXT_INSN (temp
))
5279 set_block_for_insn (temp
, pbi
->bb
);
5281 /* If we can't make the auto-inc, or can't make the
5282 replacement into Y, exit. There's no point in making
5283 the change below if we can't do the auto-inc and doing
5284 so is not correct in the pre-inc case. */
5287 validate_change (insn
, &XEXP (mem
, 0), inc
, 1);
5288 validate_change (incr
, &XEXP (y
, opnum
), q
, 1);
5289 if (! apply_change_group ())
5292 /* We now know we'll be doing this change, so emit the
5293 new insn(s) and do the updates. */
5294 emit_insns_before (insns
, insn
);
5296 if (pbi
->bb
->head
== insn
)
5297 pbi
->bb
->head
= insns
;
5299 /* INCR will become a NOTE and INSN won't contain a
5300 use of INCR_REG. If a use of INCR_REG was just placed in
5301 the insn before INSN, make that the next use.
5302 Otherwise, invalidate it. */
5303 if (GET_CODE (PREV_INSN (insn
)) == INSN
5304 && GET_CODE (PATTERN (PREV_INSN (insn
))) == SET
5305 && SET_SRC (PATTERN (PREV_INSN (insn
))) == incr_reg
)
5306 pbi
->reg_next_use
[regno
] = PREV_INSN (insn
);
5308 pbi
->reg_next_use
[regno
] = 0;
5313 /* REGNO is now used in INCR which is below INSN, but
5314 it previously wasn't live here. If we don't mark
5315 it as live, we'll put a REG_DEAD note for it
5316 on this insn, which is incorrect. */
5317 SET_REGNO_REG_SET (pbi
->reg_live
, regno
);
5319 /* If there are any calls between INSN and INCR, show
5320 that REGNO now crosses them. */
5321 for (temp
= insn
; temp
!= incr
; temp
= NEXT_INSN (temp
))
5322 if (GET_CODE (temp
) == CALL_INSN
)
5323 REG_N_CALLS_CROSSED (regno
)++;
5328 /* If we haven't returned, it means we were able to make the
5329 auto-inc, so update the status. First, record that this insn
5330 has an implicit side effect. */
5332 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_INC
, incr_reg
, REG_NOTES (insn
));
5334 /* Modify the old increment-insn to simply copy
5335 the already-incremented value of our register. */
5336 if (! validate_change (incr
, &SET_SRC (set
), incr_reg
, 0))
5339 /* If that makes it a no-op (copying the register into itself) delete
5340 it so it won't appear to be a "use" and a "set" of this
5342 if (REGNO (SET_DEST (set
)) == REGNO (incr_reg
))
5344 /* If the original source was dead, it's dead now. */
5347 while ((note
= find_reg_note (incr
, REG_DEAD
, NULL_RTX
)) != NULL_RTX
)
5349 remove_note (incr
, note
);
5350 if (XEXP (note
, 0) != incr_reg
)
5351 CLEAR_REGNO_REG_SET (pbi
->reg_live
, REGNO (XEXP (note
, 0)));
5354 PUT_CODE (incr
, NOTE
);
5355 NOTE_LINE_NUMBER (incr
) = NOTE_INSN_DELETED
;
5356 NOTE_SOURCE_FILE (incr
) = 0;
5359 if (regno
>= FIRST_PSEUDO_REGISTER
)
5361 /* Count an extra reference to the reg. When a reg is
5362 incremented, spilling it is worse, so we want to make
5363 that less likely. */
5364 REG_N_REFS (regno
) += (optimize_size
? 1 : pbi
->bb
->loop_depth
+ 1);
5366 /* Count the increment as a setting of the register,
5367 even though it isn't a SET in rtl. */
5368 REG_N_SETS (regno
)++;
5372 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5376 find_auto_inc (pbi
, x
, insn
)
5377 struct propagate_block_info
*pbi
;
5381 rtx addr
= XEXP (x
, 0);
5382 HOST_WIDE_INT offset
= 0;
5383 rtx set
, y
, incr
, inc_val
;
5385 int size
= GET_MODE_SIZE (GET_MODE (x
));
5387 if (GET_CODE (insn
) == JUMP_INSN
)
5390 /* Here we detect use of an index register which might be good for
5391 postincrement, postdecrement, preincrement, or predecrement. */
5393 if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
5394 offset
= INTVAL (XEXP (addr
, 1)), addr
= XEXP (addr
, 0);
5396 if (GET_CODE (addr
) != REG
)
5399 regno
= REGNO (addr
);
5401 /* Is the next use an increment that might make auto-increment? */
5402 incr
= pbi
->reg_next_use
[regno
];
5403 if (incr
== 0 || BLOCK_NUM (incr
) != BLOCK_NUM (insn
))
5405 set
= single_set (incr
);
5406 if (set
== 0 || GET_CODE (set
) != SET
)
5410 if (GET_CODE (y
) != PLUS
)
5413 if (REG_P (XEXP (y
, 0)) && REGNO (XEXP (y
, 0)) == REGNO (addr
))
5414 inc_val
= XEXP (y
, 1);
5415 else if (REG_P (XEXP (y
, 1)) && REGNO (XEXP (y
, 1)) == REGNO (addr
))
5416 inc_val
= XEXP (y
, 0);
5420 if (GET_CODE (inc_val
) == CONST_INT
)
5422 if (HAVE_POST_INCREMENT
5423 && (INTVAL (inc_val
) == size
&& offset
== 0))
5424 attempt_auto_inc (pbi
, gen_rtx_POST_INC (Pmode
, addr
), insn
, x
,
5426 else if (HAVE_POST_DECREMENT
5427 && (INTVAL (inc_val
) == -size
&& offset
== 0))
5428 attempt_auto_inc (pbi
, gen_rtx_POST_DEC (Pmode
, addr
), insn
, x
,
5430 else if (HAVE_PRE_INCREMENT
5431 && (INTVAL (inc_val
) == size
&& offset
== size
))
5432 attempt_auto_inc (pbi
, gen_rtx_PRE_INC (Pmode
, addr
), insn
, x
,
5434 else if (HAVE_PRE_DECREMENT
5435 && (INTVAL (inc_val
) == -size
&& offset
== -size
))
5436 attempt_auto_inc (pbi
, gen_rtx_PRE_DEC (Pmode
, addr
), insn
, x
,
5438 else if (HAVE_POST_MODIFY_DISP
&& offset
== 0)
5439 attempt_auto_inc (pbi
, gen_rtx_POST_MODIFY (Pmode
, addr
,
5440 gen_rtx_PLUS (Pmode
,
5443 insn
, x
, incr
, addr
);
5445 else if (GET_CODE (inc_val
) == REG
5446 && ! reg_set_between_p (inc_val
, PREV_INSN (insn
),
5450 if (HAVE_POST_MODIFY_REG
&& offset
== 0)
5451 attempt_auto_inc (pbi
, gen_rtx_POST_MODIFY (Pmode
, addr
,
5452 gen_rtx_PLUS (Pmode
,
5455 insn
, x
, incr
, addr
);
5459 #endif /* AUTO_INC_DEC */
5462 mark_used_reg (pbi
, reg
, cond
, insn
)
5463 struct propagate_block_info
*pbi
;
5465 rtx cond ATTRIBUTE_UNUSED
;
5468 int regno
= REGNO (reg
);
5469 int some_was_live
= REGNO_REG_SET_P (pbi
->reg_live
, regno
);
5470 int some_was_dead
= ! some_was_live
;
5474 /* A hard reg in a wide mode may really be multiple registers.
5475 If so, mark all of them just like the first. */
5476 if (regno
< FIRST_PSEUDO_REGISTER
)
5478 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5481 int needed_regno
= REGNO_REG_SET_P (pbi
->reg_live
, regno
+ n
);
5482 some_was_live
|= needed_regno
;
5483 some_was_dead
|= ! needed_regno
;
5487 if (pbi
->flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
5489 /* Record where each reg is used, so when the reg is set we know
5490 the next insn that uses it. */
5491 pbi
->reg_next_use
[regno
] = insn
;
5494 if (pbi
->flags
& PROP_REG_INFO
)
5496 if (regno
< FIRST_PSEUDO_REGISTER
)
5498 /* If this is a register we are going to try to eliminate,
5499 don't mark it live here. If we are successful in
5500 eliminating it, it need not be live unless it is used for
5501 pseudos, in which case it will have been set live when it
5502 was allocated to the pseudos. If the register will not
5503 be eliminated, reload will set it live at that point.
5505 Otherwise, record that this function uses this register. */
5506 /* ??? The PPC backend tries to "eliminate" on the pic
5507 register to itself. This should be fixed. In the mean
5508 time, hack around it. */
5510 if (! (TEST_HARD_REG_BIT (elim_reg_set
, regno
)
5511 && (regno
== FRAME_POINTER_REGNUM
5512 || regno
== ARG_POINTER_REGNUM
)))
5514 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5516 regs_ever_live
[regno
+ --n
] = 1;
5522 /* Keep track of which basic block each reg appears in. */
5524 register int blocknum
= pbi
->bb
->index
;
5525 if (REG_BASIC_BLOCK (regno
) == REG_BLOCK_UNKNOWN
)
5526 REG_BASIC_BLOCK (regno
) = blocknum
;
5527 else if (REG_BASIC_BLOCK (regno
) != blocknum
)
5528 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
5530 /* Count (weighted) number of uses of each reg. */
5531 REG_N_REFS (regno
) += (optimize_size
? 1
5532 : pbi
->bb
->loop_depth
+ 1);
5536 /* Find out if any of the register was set this insn. */
5537 some_not_set
= ! REGNO_REG_SET_P (pbi
->new_set
, regno
);
5538 if (regno
< FIRST_PSEUDO_REGISTER
)
5540 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5542 some_not_set
|= ! REGNO_REG_SET_P (pbi
->new_set
, regno
+ n
);
5545 /* Record and count the insns in which a reg dies. If it is used in
5546 this insn and was dead below the insn then it dies in this insn.
5547 If it was set in this insn, we do not make a REG_DEAD note;
5548 likewise if we already made such a note. */
5549 if ((pbi
->flags
& (PROP_DEATH_NOTES
| PROP_REG_INFO
))
5553 /* Check for the case where the register dying partially
5554 overlaps the register set by this insn. */
5555 if (regno
< FIRST_PSEUDO_REGISTER
5556 && HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) > 1)
5558 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5560 some_was_live
|= REGNO_REG_SET_P (pbi
->new_set
, regno
+ n
);
5563 /* If none of the words in X is needed, make a REG_DEAD note.
5564 Otherwise, we must make partial REG_DEAD notes. */
5565 if (! some_was_live
)
5567 if ((pbi
->flags
& PROP_DEATH_NOTES
)
5568 && ! find_regno_note (insn
, REG_DEAD
, regno
))
5570 = alloc_EXPR_LIST (REG_DEAD
, reg
, REG_NOTES (insn
));
5572 if (pbi
->flags
& PROP_REG_INFO
)
5573 REG_N_DEATHS (regno
)++;
5577 /* Don't make a REG_DEAD note for a part of a register
5578 that is set in the insn. */
5580 n
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) - 1;
5581 for (; n
>= regno
; n
--)
5582 if (! REGNO_REG_SET_P (pbi
->reg_live
, n
)
5583 && ! dead_or_set_regno_p (insn
, n
))
5585 = alloc_EXPR_LIST (REG_DEAD
,
5586 gen_rtx_REG (reg_raw_mode
[n
], n
),
5591 SET_REGNO_REG_SET (pbi
->reg_live
, regno
);
5592 if (regno
< FIRST_PSEUDO_REGISTER
)
5594 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5596 SET_REGNO_REG_SET (pbi
->reg_live
, regno
+ n
);
5599 #ifdef HAVE_conditional_execution
5600 /* If this is a conditional use, record that fact. If it is later
5601 conditionally set, we'll know to kill the register. */
5602 if (cond
!= NULL_RTX
)
5604 splay_tree_node node
;
5605 struct reg_cond_life_info
*rcli
;
5610 node
= splay_tree_lookup (pbi
->reg_cond_dead
, regno
);
5613 /* The register was unconditionally live previously.
5614 No need to do anything. */
5618 /* The register was conditionally live previously.
5619 Subtract the new life cond from the old death cond. */
5620 rcli
= (struct reg_cond_life_info
*) node
->value
;
5621 ncond
= rcli
->condition
;
5622 ncond
= and_reg_cond (ncond
, not_reg_cond (cond
), 1);
5624 /* If the register is now unconditionally live, remove the
5625 entry in the splay_tree. */
5626 if (ncond
== const0_rtx
)
5627 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
5630 rcli
->condition
= ncond
;
5631 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
5637 /* The register was not previously live at all. Record
5638 the condition under which it is still dead. */
5639 rcli
= (struct reg_cond_life_info
*) xmalloc (sizeof (*rcli
));
5640 rcli
->condition
= not_reg_cond (cond
);
5641 rcli
->stores
= const0_rtx
;
5642 rcli
->orig_condition
= const0_rtx
;
5643 splay_tree_insert (pbi
->reg_cond_dead
, regno
,
5644 (splay_tree_value
) rcli
);
5646 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
5649 else if (some_was_live
)
5651 splay_tree_node node
;
5653 node
= splay_tree_lookup (pbi
->reg_cond_dead
, regno
);
5656 /* The register was conditionally live previously, but is now
5657 unconditionally so. Remove it from the conditionally dead
5658 list, so that a conditional set won't cause us to think
5660 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
5667 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5668 This is done assuming the registers needed from X are those that
5669 have 1-bits in PBI->REG_LIVE.
5671 INSN is the containing instruction. If INSN is dead, this function
5675 mark_used_regs (pbi
, x
, cond
, insn
)
5676 struct propagate_block_info
*pbi
;
5679 register RTX_CODE code
;
5681 int flags
= pbi
->flags
;
5684 code
= GET_CODE (x
);
5704 /* If we are clobbering a MEM, mark any registers inside the address
5706 if (GET_CODE (XEXP (x
, 0)) == MEM
)
5707 mark_used_regs (pbi
, XEXP (XEXP (x
, 0), 0), cond
, insn
);
5711 /* Don't bother watching stores to mems if this is not the
5712 final pass. We'll not be deleting dead stores this round. */
5713 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
5715 /* Invalidate the data for the last MEM stored, but only if MEM is
5716 something that can be stored into. */
5717 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
5718 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
5719 /* Needn't clear the memory set list. */
5723 rtx temp
= pbi
->mem_set_list
;
5724 rtx prev
= NULL_RTX
;
5729 next
= XEXP (temp
, 1);
5730 if (anti_dependence (XEXP (temp
, 0), x
))
5732 /* Splice temp out of the list. */
5734 XEXP (prev
, 1) = next
;
5736 pbi
->mem_set_list
= next
;
5737 free_EXPR_LIST_node (temp
);
5738 pbi
->mem_set_list_len
--;
5746 /* If the memory reference had embedded side effects (autoincrement
5747 address modes. Then we may need to kill some entries on the
5750 invalidate_mems_from_autoinc (pbi
, insn
);
5754 if (flags
& PROP_AUTOINC
)
5755 find_auto_inc (pbi
, x
, insn
);
5760 #ifdef CLASS_CANNOT_CHANGE_MODE
5761 if (GET_CODE (SUBREG_REG (x
)) == REG
5762 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
5763 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x
),
5764 GET_MODE (SUBREG_REG (x
))))
5765 REG_CHANGES_MODE (REGNO (SUBREG_REG (x
))) = 1;
5768 /* While we're here, optimize this case. */
5770 if (GET_CODE (x
) != REG
)
5775 /* See a register other than being set => mark it as needed. */
5776 mark_used_reg (pbi
, x
, cond
, insn
);
5781 register rtx testreg
= SET_DEST (x
);
5784 /* If storing into MEM, don't show it as being used. But do
5785 show the address as being used. */
5786 if (GET_CODE (testreg
) == MEM
)
5789 if (flags
& PROP_AUTOINC
)
5790 find_auto_inc (pbi
, testreg
, insn
);
5792 mark_used_regs (pbi
, XEXP (testreg
, 0), cond
, insn
);
5793 mark_used_regs (pbi
, SET_SRC (x
), cond
, insn
);
5797 /* Storing in STRICT_LOW_PART is like storing in a reg
5798 in that this SET might be dead, so ignore it in TESTREG.
5799 but in some other ways it is like using the reg.
5801 Storing in a SUBREG or a bit field is like storing the entire
5802 register in that if the register's value is not used
5803 then this SET is not needed. */
5804 while (GET_CODE (testreg
) == STRICT_LOW_PART
5805 || GET_CODE (testreg
) == ZERO_EXTRACT
5806 || GET_CODE (testreg
) == SIGN_EXTRACT
5807 || GET_CODE (testreg
) == SUBREG
)
5809 #ifdef CLASS_CANNOT_CHANGE_MODE
5810 if (GET_CODE (testreg
) == SUBREG
5811 && GET_CODE (SUBREG_REG (testreg
)) == REG
5812 && REGNO (SUBREG_REG (testreg
)) >= FIRST_PSEUDO_REGISTER
5813 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg
)),
5814 GET_MODE (testreg
)))
5815 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg
))) = 1;
5818 /* Modifying a single register in an alternate mode
5819 does not use any of the old value. But these other
5820 ways of storing in a register do use the old value. */
5821 if (GET_CODE (testreg
) == SUBREG
5822 && !(REG_SIZE (SUBREG_REG (testreg
)) > REG_SIZE (testreg
)))
5827 testreg
= XEXP (testreg
, 0);
5830 /* If this is a store into a register or group of registers,
5831 recursively scan the value being stored. */
5833 if ((GET_CODE (testreg
) == PARALLEL
5834 && GET_MODE (testreg
) == BLKmode
)
5835 || (GET_CODE (testreg
) == REG
5836 && (regno
= REGNO (testreg
),
5837 ! (regno
== FRAME_POINTER_REGNUM
5838 && (! reload_completed
|| frame_pointer_needed
)))
5839 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5840 && ! (regno
== HARD_FRAME_POINTER_REGNUM
5841 && (! reload_completed
|| frame_pointer_needed
))
5843 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5844 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
5849 mark_used_regs (pbi
, SET_DEST (x
), cond
, insn
);
5850 mark_used_regs (pbi
, SET_SRC (x
), cond
, insn
);
5857 case UNSPEC_VOLATILE
:
5861 /* Traditional and volatile asm instructions must be considered to use
5862 and clobber all hard registers, all pseudo-registers and all of
5863 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5865 Consider for instance a volatile asm that changes the fpu rounding
5866 mode. An insn should not be moved across this even if it only uses
5867 pseudo-regs because it might give an incorrectly rounded result.
5869 ?!? Unfortunately, marking all hard registers as live causes massive
5870 problems for the register allocator and marking all pseudos as live
5871 creates mountains of uninitialized variable warnings.
5873 So for now, just clear the memory set list and mark any regs
5874 we can find in ASM_OPERANDS as used. */
5875 if (code
!= ASM_OPERANDS
|| MEM_VOLATILE_P (x
))
5877 free_EXPR_LIST_list (&pbi
->mem_set_list
);
5878 pbi
->mem_set_list_len
= 0;
5881 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5882 We can not just fall through here since then we would be confused
5883 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5884 traditional asms unlike their normal usage. */
5885 if (code
== ASM_OPERANDS
)
5889 for (j
= 0; j
< ASM_OPERANDS_INPUT_LENGTH (x
); j
++)
5890 mark_used_regs (pbi
, ASM_OPERANDS_INPUT (x
, j
), cond
, insn
);
5896 if (cond
!= NULL_RTX
)
5899 mark_used_regs (pbi
, COND_EXEC_TEST (x
), NULL_RTX
, insn
);
5901 cond
= COND_EXEC_TEST (x
);
5902 x
= COND_EXEC_CODE (x
);
5906 /* We _do_not_ want to scan operands of phi nodes. Operands of
5907 a phi function are evaluated only when control reaches this
5908 block along a particular edge. Therefore, regs that appear
5909 as arguments to phi should not be added to the global live at
5917 /* Recursively scan the operands of this expression. */
5920 register const char *fmt
= GET_RTX_FORMAT (code
);
5923 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5927 /* Tail recursive case: save a function call level. */
5933 mark_used_regs (pbi
, XEXP (x
, i
), cond
, insn
);
5935 else if (fmt
[i
] == 'E')
5938 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5939 mark_used_regs (pbi
, XVECEXP (x
, i
, j
), cond
, insn
);
5948 try_pre_increment_1 (pbi
, insn
)
5949 struct propagate_block_info
*pbi
;
5952 /* Find the next use of this reg. If in same basic block,
5953 make it do pre-increment or pre-decrement if appropriate. */
5954 rtx x
= single_set (insn
);
5955 HOST_WIDE_INT amount
= ((GET_CODE (SET_SRC (x
)) == PLUS
? 1 : -1)
5956 * INTVAL (XEXP (SET_SRC (x
), 1)));
5957 int regno
= REGNO (SET_DEST (x
));
5958 rtx y
= pbi
->reg_next_use
[regno
];
5960 && SET_DEST (x
) != stack_pointer_rtx
5961 && BLOCK_NUM (y
) == BLOCK_NUM (insn
)
5962 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5963 mode would be better. */
5964 && ! dead_or_set_p (y
, SET_DEST (x
))
5965 && try_pre_increment (y
, SET_DEST (x
), amount
))
5967 /* We have found a suitable auto-increment and already changed
5968 insn Y to do it. So flush this increment instruction. */
5969 propagate_block_delete_insn (pbi
->bb
, insn
);
5971 /* Count a reference to this reg for the increment insn we are
5972 deleting. When a reg is incremented, spilling it is worse,
5973 so we want to make that less likely. */
5974 if (regno
>= FIRST_PSEUDO_REGISTER
)
5976 REG_N_REFS (regno
) += (optimize_size
? 1
5977 : pbi
->bb
->loop_depth
+ 1);
5978 REG_N_SETS (regno
)++;
5981 /* Flush any remembered memories depending on the value of
5982 the incremented register. */
5983 invalidate_mems_from_set (pbi
, SET_DEST (x
));
5990 /* Try to change INSN so that it does pre-increment or pre-decrement
5991 addressing on register REG in order to add AMOUNT to REG.
5992 AMOUNT is negative for pre-decrement.
5993 Returns 1 if the change could be made.
5994 This checks all about the validity of the result of modifying INSN. */
5997 try_pre_increment (insn
, reg
, amount
)
5999 HOST_WIDE_INT amount
;
6003 /* Nonzero if we can try to make a pre-increment or pre-decrement.
6004 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
6006 /* Nonzero if we can try to make a post-increment or post-decrement.
6007 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
6008 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
6009 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
6012 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
6015 /* From the sign of increment, see which possibilities are conceivable
6016 on this target machine. */
6017 if (HAVE_PRE_INCREMENT
&& amount
> 0)
6019 if (HAVE_POST_INCREMENT
&& amount
> 0)
6022 if (HAVE_PRE_DECREMENT
&& amount
< 0)
6024 if (HAVE_POST_DECREMENT
&& amount
< 0)
6027 if (! (pre_ok
|| post_ok
))
6030 /* It is not safe to add a side effect to a jump insn
6031 because if the incremented register is spilled and must be reloaded
6032 there would be no way to store the incremented value back in memory. */
6034 if (GET_CODE (insn
) == JUMP_INSN
)
6039 use
= find_use_as_address (PATTERN (insn
), reg
, 0);
6040 if (post_ok
&& (use
== 0 || use
== (rtx
) 1))
6042 use
= find_use_as_address (PATTERN (insn
), reg
, -amount
);
6046 if (use
== 0 || use
== (rtx
) 1)
6049 if (GET_MODE_SIZE (GET_MODE (use
)) != (amount
> 0 ? amount
: - amount
))
6052 /* See if this combination of instruction and addressing mode exists. */
6053 if (! validate_change (insn
, &XEXP (use
, 0),
6054 gen_rtx_fmt_e (amount
> 0
6055 ? (do_post
? POST_INC
: PRE_INC
)
6056 : (do_post
? POST_DEC
: PRE_DEC
),
6060 /* Record that this insn now has an implicit side effect on X. */
6061 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_INC
, reg
, REG_NOTES (insn
));
6065 #endif /* AUTO_INC_DEC */
6067 /* Find the place in the rtx X where REG is used as a memory address.
6068 Return the MEM rtx that so uses it.
6069 If PLUSCONST is nonzero, search instead for a memory address equivalent to
6070 (plus REG (const_int PLUSCONST)).
6072 If such an address does not appear, return 0.
6073 If REG appears more than once, or is used other than in such an address,
6077 find_use_as_address (x
, reg
, plusconst
)
6080 HOST_WIDE_INT plusconst
;
6082 enum rtx_code code
= GET_CODE (x
);
6083 const char *fmt
= GET_RTX_FORMAT (code
);
6085 register rtx value
= 0;
6088 if (code
== MEM
&& XEXP (x
, 0) == reg
&& plusconst
== 0)
6091 if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == PLUS
6092 && XEXP (XEXP (x
, 0), 0) == reg
6093 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
6094 && INTVAL (XEXP (XEXP (x
, 0), 1)) == plusconst
)
6097 if (code
== SIGN_EXTRACT
|| code
== ZERO_EXTRACT
)
6099 /* If REG occurs inside a MEM used in a bit-field reference,
6100 that is unacceptable. */
6101 if (find_use_as_address (XEXP (x
, 0), reg
, 0) != 0)
6102 return (rtx
) (HOST_WIDE_INT
) 1;
6106 return (rtx
) (HOST_WIDE_INT
) 1;
6108 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6112 tem
= find_use_as_address (XEXP (x
, i
), reg
, plusconst
);
6116 return (rtx
) (HOST_WIDE_INT
) 1;
6118 else if (fmt
[i
] == 'E')
6121 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6123 tem
= find_use_as_address (XVECEXP (x
, i
, j
), reg
, plusconst
);
6127 return (rtx
) (HOST_WIDE_INT
) 1;
6135 /* Write information about registers and basic blocks into FILE.
6136 This is part of making a debugging dump. */
6139 dump_regset (r
, outf
)
6146 fputs (" (nil)", outf
);
6150 EXECUTE_IF_SET_IN_REG_SET (r
, 0, i
,
6152 fprintf (outf
, " %d", i
);
6153 if (i
< FIRST_PSEUDO_REGISTER
)
6154 fprintf (outf
, " [%s]",
6163 dump_regset (r
, stderr
);
6164 putc ('\n', stderr
);
6168 dump_flow_info (file
)
6172 static const char * const reg_class_names
[] = REG_CLASS_NAMES
;
6174 fprintf (file
, "%d registers.\n", max_regno
);
6175 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
6178 enum reg_class
class, altclass
;
6179 fprintf (file
, "\nRegister %d used %d times across %d insns",
6180 i
, REG_N_REFS (i
), REG_LIVE_LENGTH (i
));
6181 if (REG_BASIC_BLOCK (i
) >= 0)
6182 fprintf (file
, " in block %d", REG_BASIC_BLOCK (i
));
6184 fprintf (file
, "; set %d time%s", REG_N_SETS (i
),
6185 (REG_N_SETS (i
) == 1) ? "" : "s");
6186 if (REG_USERVAR_P (regno_reg_rtx
[i
]))
6187 fprintf (file
, "; user var");
6188 if (REG_N_DEATHS (i
) != 1)
6189 fprintf (file
, "; dies in %d places", REG_N_DEATHS (i
));
6190 if (REG_N_CALLS_CROSSED (i
) == 1)
6191 fprintf (file
, "; crosses 1 call");
6192 else if (REG_N_CALLS_CROSSED (i
))
6193 fprintf (file
, "; crosses %d calls", REG_N_CALLS_CROSSED (i
));
6194 if (PSEUDO_REGNO_BYTES (i
) != UNITS_PER_WORD
)
6195 fprintf (file
, "; %d bytes", PSEUDO_REGNO_BYTES (i
));
6196 class = reg_preferred_class (i
);
6197 altclass
= reg_alternate_class (i
);
6198 if (class != GENERAL_REGS
|| altclass
!= ALL_REGS
)
6200 if (altclass
== ALL_REGS
|| class == ALL_REGS
)
6201 fprintf (file
, "; pref %s", reg_class_names
[(int) class]);
6202 else if (altclass
== NO_REGS
)
6203 fprintf (file
, "; %s or none", reg_class_names
[(int) class]);
6205 fprintf (file
, "; pref %s, else %s",
6206 reg_class_names
[(int) class],
6207 reg_class_names
[(int) altclass
]);
6209 if (REG_POINTER (regno_reg_rtx
[i
]))
6210 fprintf (file
, "; pointer");
6211 fprintf (file
, ".\n");
6214 fprintf (file
, "\n%d basic blocks, %d edges.\n", n_basic_blocks
, n_edges
);
6215 for (i
= 0; i
< n_basic_blocks
; i
++)
6217 register basic_block bb
= BASIC_BLOCK (i
);
6220 fprintf (file
, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
6221 i
, INSN_UID (bb
->head
), INSN_UID (bb
->end
), bb
->loop_depth
, bb
->count
);
6223 fprintf (file
, "Predecessors: ");
6224 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
6225 dump_edge_info (file
, e
, 0);
6227 fprintf (file
, "\nSuccessors: ");
6228 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6229 dump_edge_info (file
, e
, 1);
6231 fprintf (file
, "\nRegisters live at start:");
6232 dump_regset (bb
->global_live_at_start
, file
);
6234 fprintf (file
, "\nRegisters live at end:");
6235 dump_regset (bb
->global_live_at_end
, file
);
6246 dump_flow_info (stderr
);
6250 dump_edge_info (file
, e
, do_succ
)
6255 basic_block side
= (do_succ
? e
->dest
: e
->src
);
6257 if (side
== ENTRY_BLOCK_PTR
)
6258 fputs (" ENTRY", file
);
6259 else if (side
== EXIT_BLOCK_PTR
)
6260 fputs (" EXIT", file
);
6262 fprintf (file
, " %d", side
->index
);
6265 fprintf (file
, " count:%d", e
->count
);
6269 static const char * const bitnames
[] = {
6270 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6273 int i
, flags
= e
->flags
;
6277 for (i
= 0; flags
; i
++)
6278 if (flags
& (1 << i
))
6284 if (i
< (int) ARRAY_SIZE (bitnames
))
6285 fputs (bitnames
[i
], file
);
6287 fprintf (file
, "%d", i
);
6294 /* Print out one basic block with live information at start and end. */
6305 fprintf (outf
, ";; Basic block %d, loop depth %d, count %d",
6306 bb
->index
, bb
->loop_depth
, bb
->count
);
6309 fputs (";; Predecessors: ", outf
);
6310 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
6311 dump_edge_info (outf
, e
, 0);
6314 fputs (";; Registers live at start:", outf
);
6315 dump_regset (bb
->global_live_at_start
, outf
);
6318 for (insn
= bb
->head
, last
= NEXT_INSN (bb
->end
);
6320 insn
= NEXT_INSN (insn
))
6321 print_rtl_single (outf
, insn
);
6323 fputs (";; Registers live at end:", outf
);
6324 dump_regset (bb
->global_live_at_end
, outf
);
6327 fputs (";; Successors: ", outf
);
6328 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6329 dump_edge_info (outf
, e
, 1);
6337 dump_bb (bb
, stderr
);
6344 dump_bb (BASIC_BLOCK (n
), stderr
);
6347 /* Like print_rtl, but also print out live information for the start of each
6351 print_rtl_with_bb (outf
, rtx_first
)
6355 register rtx tmp_rtx
;
6358 fprintf (outf
, "(nil)\n");
6362 enum bb_state
{ NOT_IN_BB
, IN_ONE_BB
, IN_MULTIPLE_BB
};
6363 int max_uid
= get_max_uid ();
6364 basic_block
*start
= (basic_block
*)
6365 xcalloc (max_uid
, sizeof (basic_block
));
6366 basic_block
*end
= (basic_block
*)
6367 xcalloc (max_uid
, sizeof (basic_block
));
6368 enum bb_state
*in_bb_p
= (enum bb_state
*)
6369 xcalloc (max_uid
, sizeof (enum bb_state
));
6371 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
6373 basic_block bb
= BASIC_BLOCK (i
);
6376 start
[INSN_UID (bb
->head
)] = bb
;
6377 end
[INSN_UID (bb
->end
)] = bb
;
6378 for (x
= bb
->head
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
6380 enum bb_state state
= IN_MULTIPLE_BB
;
6381 if (in_bb_p
[INSN_UID (x
)] == NOT_IN_BB
)
6383 in_bb_p
[INSN_UID (x
)] = state
;
6390 for (tmp_rtx
= rtx_first
; NULL
!= tmp_rtx
; tmp_rtx
= NEXT_INSN (tmp_rtx
))
6395 if ((bb
= start
[INSN_UID (tmp_rtx
)]) != NULL
)
6397 fprintf (outf
, ";; Start of basic block %d, registers live:",
6399 dump_regset (bb
->global_live_at_start
, outf
);
6403 if (in_bb_p
[INSN_UID (tmp_rtx
)] == NOT_IN_BB
6404 && GET_CODE (tmp_rtx
) != NOTE
6405 && GET_CODE (tmp_rtx
) != BARRIER
)
6406 fprintf (outf
, ";; Insn is not within a basic block\n");
6407 else if (in_bb_p
[INSN_UID (tmp_rtx
)] == IN_MULTIPLE_BB
)
6408 fprintf (outf
, ";; Insn is in multiple basic blocks\n");
6410 did_output
= print_rtl_single (outf
, tmp_rtx
);
6412 if ((bb
= end
[INSN_UID (tmp_rtx
)]) != NULL
)
6414 fprintf (outf
, ";; End of basic block %d, registers live:\n",
6416 dump_regset (bb
->global_live_at_end
, outf
);
6429 if (current_function_epilogue_delay_list
!= 0)
6431 fprintf (outf
, "\n;; Insns in epilogue delay list:\n\n");
6432 for (tmp_rtx
= current_function_epilogue_delay_list
; tmp_rtx
!= 0;
6433 tmp_rtx
= XEXP (tmp_rtx
, 1))
6434 print_rtl_single (outf
, XEXP (tmp_rtx
, 0));
6438 /* Dump the rtl into the current debugging dump file, then abort. */
6440 print_rtl_and_abort ()
6444 print_rtl_with_bb (rtl_dump_file
, get_insns ());
6445 fclose (rtl_dump_file
);
6450 /* Recompute register set/reference counts immediately prior to register
6453 This avoids problems with set/reference counts changing to/from values
6454 which have special meanings to the register allocators.
6456 Additionally, the reference counts are the primary component used by the
6457 register allocators to prioritize pseudos for allocation to hard regs.
6458 More accurate reference counts generally lead to better register allocation.
6460 F is the first insn to be scanned.
6462 LOOP_STEP denotes how much loop_depth should be incremented per
6463 loop nesting level in order to increase the ref count more for
6464 references in a loop.
6466 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6467 possibly other information which is used by the register allocators. */
6470 recompute_reg_usage (f
, loop_step
)
6471 rtx f ATTRIBUTE_UNUSED
;
6472 int loop_step ATTRIBUTE_UNUSED
;
6474 allocate_reg_life_data ();
6475 update_life_info (NULL
, UPDATE_LIFE_LOCAL
, PROP_REG_INFO
);
6478 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6479 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6480 of the number of registers that died. */
6483 count_or_remove_death_notes (blocks
, kill
)
6489 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
6494 if (blocks
&& ! TEST_BIT (blocks
, i
))
6497 bb
= BASIC_BLOCK (i
);
6499 for (insn
= bb
->head
;; insn
= NEXT_INSN (insn
))
6503 rtx
*pprev
= ®_NOTES (insn
);
6508 switch (REG_NOTE_KIND (link
))
6511 if (GET_CODE (XEXP (link
, 0)) == REG
)
6513 rtx reg
= XEXP (link
, 0);
6516 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
6519 n
= HARD_REGNO_NREGS (REGNO (reg
), GET_MODE (reg
));
6527 rtx next
= XEXP (link
, 1);
6528 free_EXPR_LIST_node (link
);
6529 *pprev
= link
= next
;
6535 pprev
= &XEXP (link
, 1);
6542 if (insn
== bb
->end
)
6551 /* Update insns block within BB. */
6554 update_bb_for_insn (bb
)
6559 if (! basic_block_for_insn
)
6562 for (insn
= bb
->head
; ; insn
= NEXT_INSN (insn
))
6564 set_block_for_insn (insn
, bb
);
6566 if (insn
== bb
->end
)
6572 /* Record INSN's block as BB. */
6575 set_block_for_insn (insn
, bb
)
6579 size_t uid
= INSN_UID (insn
);
6580 if (uid
>= basic_block_for_insn
->num_elements
)
6584 /* Add one-eighth the size so we don't keep calling xrealloc. */
6585 new_size
= uid
+ (uid
+ 7) / 8;
6587 VARRAY_GROW (basic_block_for_insn
, new_size
);
6589 VARRAY_BB (basic_block_for_insn
, uid
) = bb
;
6592 /* Record INSN's block number as BB. */
6593 /* ??? This has got to go. */
6596 set_block_num (insn
, bb
)
6600 set_block_for_insn (insn
, BASIC_BLOCK (bb
));
6603 /* Verify the CFG consistency. This function check some CFG invariants and
6604 aborts when something is wrong. Hope that this function will help to
6605 convert many optimization passes to preserve CFG consistent.
6607 Currently it does following checks:
6609 - test head/end pointers
6610 - overlapping of basic blocks
6611 - edge list corectness
6612 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6613 - tails of basic blocks (ensure that boundary is necesary)
6614 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6615 and NOTE_INSN_BASIC_BLOCK
6616 - check that all insns are in the basic blocks
6617 (except the switch handling code, barriers and notes)
6618 - check that all returns are followed by barriers
6620 In future it can be extended check a lot of other stuff as well
6621 (reachability of basic blocks, life information, etc. etc.). */
6626 const int max_uid
= get_max_uid ();
6627 const rtx rtx_first
= get_insns ();
6628 rtx last_head
= get_last_insn ();
6629 basic_block
*bb_info
;
6631 int i
, last_bb_num_seen
, num_bb_notes
, err
= 0;
6633 bb_info
= (basic_block
*) xcalloc (max_uid
, sizeof (basic_block
));
6635 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
6637 basic_block bb
= BASIC_BLOCK (i
);
6638 rtx head
= bb
->head
;
6641 /* Verify the end of the basic block is in the INSN chain. */
6642 for (x
= last_head
; x
!= NULL_RTX
; x
= PREV_INSN (x
))
6647 error ("End insn %d for block %d not found in the insn stream.",
6648 INSN_UID (end
), bb
->index
);
6652 /* Work backwards from the end to the head of the basic block
6653 to verify the head is in the RTL chain. */
6654 for (; x
!= NULL_RTX
; x
= PREV_INSN (x
))
6656 /* While walking over the insn chain, verify insns appear
6657 in only one basic block and initialize the BB_INFO array
6658 used by other passes. */
6659 if (bb_info
[INSN_UID (x
)] != NULL
)
6661 error ("Insn %d is in multiple basic blocks (%d and %d)",
6662 INSN_UID (x
), bb
->index
, bb_info
[INSN_UID (x
)]->index
);
6665 bb_info
[INSN_UID (x
)] = bb
;
6672 error ("Head insn %d for block %d not found in the insn stream.",
6673 INSN_UID (head
), bb
->index
);
6680 /* Now check the basic blocks (boundaries etc.) */
6681 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
6683 basic_block bb
= BASIC_BLOCK (i
);
6684 /* Check corectness of edge lists */
6693 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6695 fprintf (stderr
, "Predecessor: ");
6696 dump_edge_info (stderr
, e
, 0);
6697 fprintf (stderr
, "\nSuccessor: ");
6698 dump_edge_info (stderr
, e
, 1);
6702 if (e
->dest
!= EXIT_BLOCK_PTR
)
6704 edge e2
= e
->dest
->pred
;
6705 while (e2
&& e2
!= e
)
6709 error ("Basic block %i edge lists are corrupted", bb
->index
);
6721 error ("Basic block %d pred edge is corrupted", bb
->index
);
6722 fputs ("Predecessor: ", stderr
);
6723 dump_edge_info (stderr
, e
, 0);
6724 fputs ("\nSuccessor: ", stderr
);
6725 dump_edge_info (stderr
, e
, 1);
6726 fputc ('\n', stderr
);
6729 if (e
->src
!= ENTRY_BLOCK_PTR
)
6731 edge e2
= e
->src
->succ
;
6732 while (e2
&& e2
!= e
)
6736 error ("Basic block %i edge lists are corrupted", bb
->index
);
6743 /* OK pointers are correct. Now check the header of basic
6744 block. It ought to contain optional CODE_LABEL followed
6745 by NOTE_BASIC_BLOCK. */
6747 if (GET_CODE (x
) == CODE_LABEL
)
6751 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6757 if (!NOTE_INSN_BASIC_BLOCK_P (x
) || NOTE_BASIC_BLOCK (x
) != bb
)
6759 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6766 /* Do checks for empty blocks here */
6773 if (NOTE_INSN_BASIC_BLOCK_P (x
))
6775 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6776 INSN_UID (x
), bb
->index
);
6783 if (GET_CODE (x
) == JUMP_INSN
6784 || GET_CODE (x
) == CODE_LABEL
6785 || GET_CODE (x
) == BARRIER
)
6787 error ("In basic block %d:", bb
->index
);
6788 fatal_insn ("Flow control insn inside a basic block", x
);
6796 last_bb_num_seen
= -1;
6801 if (NOTE_INSN_BASIC_BLOCK_P (x
))
6803 basic_block bb
= NOTE_BASIC_BLOCK (x
);
6805 if (bb
->index
!= last_bb_num_seen
+ 1)
6806 /* Basic blocks not numbered consecutively. */
6809 last_bb_num_seen
= bb
->index
;
6812 if (!bb_info
[INSN_UID (x
)])
6814 switch (GET_CODE (x
))
6821 /* An addr_vec is placed outside any block block. */
6823 && GET_CODE (NEXT_INSN (x
)) == JUMP_INSN
6824 && (GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_DIFF_VEC
6825 || GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_VEC
))
6830 /* But in any case, non-deletable labels can appear anywhere. */
6834 fatal_insn ("Insn outside basic block", x
);
6839 && GET_CODE (x
) == JUMP_INSN
6840 && returnjump_p (x
) && ! condjump_p (x
)
6841 && ! (NEXT_INSN (x
) && GET_CODE (NEXT_INSN (x
)) == BARRIER
))
6842 fatal_insn ("Return not followed by barrier", x
);
6847 if (num_bb_notes
!= n_basic_blocks
)
6849 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6850 num_bb_notes
, n_basic_blocks
);
6859 /* Functions to access an edge list with a vector representation.
6860 Enough data is kept such that given an index number, the
6861 pred and succ that edge represents can be determined, or
6862 given a pred and a succ, its index number can be returned.
6863 This allows algorithms which consume a lot of memory to
6864 represent the normally full matrix of edge (pred,succ) with a
6865 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6866 wasted space in the client code due to sparse flow graphs. */
6868 /* This functions initializes the edge list. Basically the entire
6869 flowgraph is processed, and all edges are assigned a number,
6870 and the data structure is filled in. */
6875 struct edge_list
*elist
;
6881 block_count
= n_basic_blocks
+ 2; /* Include the entry and exit blocks. */
6885 /* Determine the number of edges in the flow graph by counting successor
6886 edges on each basic block. */
6887 for (x
= 0; x
< n_basic_blocks
; x
++)
6889 basic_block bb
= BASIC_BLOCK (x
);
6891 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6894 /* Don't forget successors of the entry block. */
6895 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
6898 elist
= (struct edge_list
*) xmalloc (sizeof (struct edge_list
));
6899 elist
->num_blocks
= block_count
;
6900 elist
->num_edges
= num_edges
;
6901 elist
->index_to_edge
= (edge
*) xmalloc (sizeof (edge
) * num_edges
);
6905 /* Follow successors of the entry block, and register these edges. */
6906 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
6908 elist
->index_to_edge
[num_edges
] = e
;
6912 for (x
= 0; x
< n_basic_blocks
; x
++)
6914 basic_block bb
= BASIC_BLOCK (x
);
6916 /* Follow all successors of blocks, and register these edges. */
6917 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6919 elist
->index_to_edge
[num_edges
] = e
;
6926 /* This function free's memory associated with an edge list. */
6929 free_edge_list (elist
)
6930 struct edge_list
*elist
;
6934 free (elist
->index_to_edge
);
6939 /* This function provides debug output showing an edge list. */
6942 print_edge_list (f
, elist
)
6944 struct edge_list
*elist
;
6947 fprintf (f
, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6948 elist
->num_blocks
- 2, elist
->num_edges
);
6950 for (x
= 0; x
< elist
->num_edges
; x
++)
6952 fprintf (f
, " %-4d - edge(", x
);
6953 if (INDEX_EDGE_PRED_BB (elist
, x
) == ENTRY_BLOCK_PTR
)
6954 fprintf (f
, "entry,");
6956 fprintf (f
, "%d,", INDEX_EDGE_PRED_BB (elist
, x
)->index
);
6958 if (INDEX_EDGE_SUCC_BB (elist
, x
) == EXIT_BLOCK_PTR
)
6959 fprintf (f
, "exit)\n");
6961 fprintf (f
, "%d)\n", INDEX_EDGE_SUCC_BB (elist
, x
)->index
);
6965 /* This function provides an internal consistency check of an edge list,
6966 verifying that all edges are present, and that there are no
6970 verify_edge_list (f
, elist
)
6972 struct edge_list
*elist
;
6974 int x
, pred
, succ
, index
;
6977 for (x
= 0; x
< n_basic_blocks
; x
++)
6979 basic_block bb
= BASIC_BLOCK (x
);
6981 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6983 pred
= e
->src
->index
;
6984 succ
= e
->dest
->index
;
6985 index
= EDGE_INDEX (elist
, e
->src
, e
->dest
);
6986 if (index
== EDGE_INDEX_NO_EDGE
)
6988 fprintf (f
, "*p* No index for edge from %d to %d\n", pred
, succ
);
6991 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
6992 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
6993 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
6994 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
6995 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
6996 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
6999 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
7001 pred
= e
->src
->index
;
7002 succ
= e
->dest
->index
;
7003 index
= EDGE_INDEX (elist
, e
->src
, e
->dest
);
7004 if (index
== EDGE_INDEX_NO_EDGE
)
7006 fprintf (f
, "*p* No index for edge from %d to %d\n", pred
, succ
);
7009 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
7010 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
7011 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
7012 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
7013 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
7014 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
7016 /* We've verified that all the edges are in the list, no lets make sure
7017 there are no spurious edges in the list. */
7019 for (pred
= 0; pred
< n_basic_blocks
; pred
++)
7020 for (succ
= 0; succ
< n_basic_blocks
; succ
++)
7022 basic_block p
= BASIC_BLOCK (pred
);
7023 basic_block s
= BASIC_BLOCK (succ
);
7027 for (e
= p
->succ
; e
; e
= e
->succ_next
)
7033 for (e
= s
->pred
; e
; e
= e
->pred_next
)
7039 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), BASIC_BLOCK (succ
))
7040 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
7041 fprintf (f
, "*** Edge (%d, %d) appears to not have an index\n",
7043 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), BASIC_BLOCK (succ
))
7044 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
7045 fprintf (f
, "*** Edge (%d, %d) has index %d, but there is no edge\n",
7046 pred
, succ
, EDGE_INDEX (elist
, BASIC_BLOCK (pred
),
7047 BASIC_BLOCK (succ
)));
7049 for (succ
= 0; succ
< n_basic_blocks
; succ
++)
7051 basic_block p
= ENTRY_BLOCK_PTR
;
7052 basic_block s
= BASIC_BLOCK (succ
);
7056 for (e
= p
->succ
; e
; e
= e
->succ_next
)
7062 for (e
= s
->pred
; e
; e
= e
->pred_next
)
7068 if (EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (succ
))
7069 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
7070 fprintf (f
, "*** Edge (entry, %d) appears to not have an index\n",
7072 if (EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (succ
))
7073 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
7074 fprintf (f
, "*** Edge (entry, %d) has index %d, but no edge exists\n",
7075 succ
, EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
,
7076 BASIC_BLOCK (succ
)));
7078 for (pred
= 0; pred
< n_basic_blocks
; pred
++)
7080 basic_block p
= BASIC_BLOCK (pred
);
7081 basic_block s
= EXIT_BLOCK_PTR
;
7085 for (e
= p
->succ
; e
; e
= e
->succ_next
)
7091 for (e
= s
->pred
; e
; e
= e
->pred_next
)
7097 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), EXIT_BLOCK_PTR
)
7098 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
7099 fprintf (f
, "*** Edge (%d, exit) appears to not have an index\n",
7101 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), EXIT_BLOCK_PTR
)
7102 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
7103 fprintf (f
, "*** Edge (%d, exit) has index %d, but no edge exists\n",
7104 pred
, EDGE_INDEX (elist
, BASIC_BLOCK (pred
),
7109 /* This routine will determine what, if any, edge there is between
7110 a specified predecessor and successor. */
7113 find_edge_index (edge_list
, pred
, succ
)
7114 struct edge_list
*edge_list
;
7115 basic_block pred
, succ
;
7118 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
7120 if (INDEX_EDGE_PRED_BB (edge_list
, x
) == pred
7121 && INDEX_EDGE_SUCC_BB (edge_list
, x
) == succ
)
7124 return (EDGE_INDEX_NO_EDGE
);
7127 /* This function will remove an edge from the flow graph. */
7133 edge last_pred
= NULL
;
7134 edge last_succ
= NULL
;
7136 basic_block src
, dest
;
7139 for (tmp
= src
->succ
; tmp
&& tmp
!= e
; tmp
= tmp
->succ_next
)
7145 last_succ
->succ_next
= e
->succ_next
;
7147 src
->succ
= e
->succ_next
;
7149 for (tmp
= dest
->pred
; tmp
&& tmp
!= e
; tmp
= tmp
->pred_next
)
7155 last_pred
->pred_next
= e
->pred_next
;
7157 dest
->pred
= e
->pred_next
;
7163 /* This routine will remove any fake successor edges for a basic block.
7164 When the edge is removed, it is also removed from whatever predecessor
7168 remove_fake_successors (bb
)
7172 for (e
= bb
->succ
; e
;)
7176 if ((tmp
->flags
& EDGE_FAKE
) == EDGE_FAKE
)
7181 /* This routine will remove all fake edges from the flow graph. If
7182 we remove all fake successors, it will automatically remove all
7183 fake predecessors. */
7186 remove_fake_edges ()
7190 for (x
= 0; x
< n_basic_blocks
; x
++)
7191 remove_fake_successors (BASIC_BLOCK (x
));
7193 /* We've handled all successors except the entry block's. */
7194 remove_fake_successors (ENTRY_BLOCK_PTR
);
7197 /* This function will add a fake edge between any block which has no
7198 successors, and the exit block. Some data flow equations require these
7202 add_noreturn_fake_exit_edges ()
7206 for (x
= 0; x
< n_basic_blocks
; x
++)
7207 if (BASIC_BLOCK (x
)->succ
== NULL
)
7208 make_edge (NULL
, BASIC_BLOCK (x
), EXIT_BLOCK_PTR
, EDGE_FAKE
);
7211 /* This function adds a fake edge between any infinite loops to the
7212 exit block. Some optimizations require a path from each node to
7215 See also Morgan, Figure 3.10, pp. 82-83.
7217 The current implementation is ugly, not attempting to minimize the
7218 number of inserted fake edges. To reduce the number of fake edges
7219 to insert, add fake edges from _innermost_ loops containing only
7220 nodes not reachable from the exit block. */
7223 connect_infinite_loops_to_exit ()
7225 basic_block unvisited_block
;
7227 /* Perform depth-first search in the reverse graph to find nodes
7228 reachable from the exit block. */
7229 struct depth_first_search_dsS dfs_ds
;
7231 flow_dfs_compute_reverse_init (&dfs_ds
);
7232 flow_dfs_compute_reverse_add_bb (&dfs_ds
, EXIT_BLOCK_PTR
);
7234 /* Repeatedly add fake edges, updating the unreachable nodes. */
7237 unvisited_block
= flow_dfs_compute_reverse_execute (&dfs_ds
);
7238 if (!unvisited_block
)
7240 make_edge (NULL
, unvisited_block
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
7241 flow_dfs_compute_reverse_add_bb (&dfs_ds
, unvisited_block
);
7244 flow_dfs_compute_reverse_finish (&dfs_ds
);
7249 /* Redirect an edge's successor from one block to another. */
7252 redirect_edge_succ (e
, new_succ
)
7254 basic_block new_succ
;
7258 /* Disconnect the edge from the old successor block. */
7259 for (pe
= &e
->dest
->pred
; *pe
!= e
; pe
= &(*pe
)->pred_next
)
7261 *pe
= (*pe
)->pred_next
;
7263 /* Reconnect the edge to the new successor block. */
7264 e
->pred_next
= new_succ
->pred
;
7269 /* Redirect an edge's predecessor from one block to another. */
7272 redirect_edge_pred (e
, new_pred
)
7274 basic_block new_pred
;
7278 /* Disconnect the edge from the old predecessor block. */
7279 for (pe
= &e
->src
->succ
; *pe
!= e
; pe
= &(*pe
)->succ_next
)
7281 *pe
= (*pe
)->succ_next
;
7283 /* Reconnect the edge to the new predecessor block. */
7284 e
->succ_next
= new_pred
->succ
;
7289 /* Dump the list of basic blocks in the bitmap NODES. */
7292 flow_nodes_print (str
, nodes
, file
)
7294 const sbitmap nodes
;
7302 fprintf (file
, "%s { ", str
);
7303 EXECUTE_IF_SET_IN_SBITMAP (nodes
, 0, node
, {fprintf (file
, "%d ", node
);});
7304 fputs ("}\n", file
);
7308 /* Dump the list of edges in the array EDGE_LIST. */
7311 flow_edge_list_print (str
, edge_list
, num_edges
, file
)
7313 const edge
*edge_list
;
7322 fprintf (file
, "%s { ", str
);
7323 for (i
= 0; i
< num_edges
; i
++)
7324 fprintf (file
, "%d->%d ", edge_list
[i
]->src
->index
,
7325 edge_list
[i
]->dest
->index
);
7326 fputs ("}\n", file
);
7330 /* Dump loop related CFG information. */
7333 flow_loops_cfg_dump (loops
, file
)
7334 const struct loops
*loops
;
7339 if (! loops
->num
|| ! file
|| ! loops
->cfg
.dom
)
7342 for (i
= 0; i
< n_basic_blocks
; i
++)
7346 fprintf (file
, ";; %d succs { ", i
);
7347 for (succ
= BASIC_BLOCK (i
)->succ
; succ
; succ
= succ
->succ_next
)
7348 fprintf (file
, "%d ", succ
->dest
->index
);
7349 flow_nodes_print ("} dom", loops
->cfg
.dom
[i
], file
);
7352 /* Dump the DFS node order. */
7353 if (loops
->cfg
.dfs_order
)
7355 fputs (";; DFS order: ", file
);
7356 for (i
= 0; i
< n_basic_blocks
; i
++)
7357 fprintf (file
, "%d ", loops
->cfg
.dfs_order
[i
]);
7360 /* Dump the reverse completion node order. */
7361 if (loops
->cfg
.rc_order
)
7363 fputs (";; RC order: ", file
);
7364 for (i
= 0; i
< n_basic_blocks
; i
++)
7365 fprintf (file
, "%d ", loops
->cfg
.rc_order
[i
]);
7370 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7373 flow_loop_nested_p (outer
, loop
)
7377 return sbitmap_a_subset_b_p (loop
->nodes
, outer
->nodes
);
7381 /* Dump the loop information specified by LOOP to the stream FILE
7382 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7384 flow_loop_dump (loop
, file
, loop_dump_aux
, verbose
)
7385 const struct loop
*loop
;
7387 void (*loop_dump_aux
) PARAMS((const struct loop
*, FILE *, int));
7390 if (! loop
|| ! loop
->header
)
7393 fprintf (file
, ";;\n;; Loop %d (%d to %d):%s%s\n",
7394 loop
->num
, INSN_UID (loop
->first
->head
),
7395 INSN_UID (loop
->last
->end
),
7396 loop
->shared
? " shared" : "",
7397 loop
->invalid
? " invalid" : "");
7398 fprintf (file
, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7399 loop
->header
->index
, loop
->latch
->index
,
7400 loop
->pre_header
? loop
->pre_header
->index
: -1,
7401 loop
->first
->index
, loop
->last
->index
);
7402 fprintf (file
, ";; depth %d, level %d, outer %ld\n",
7403 loop
->depth
, loop
->level
,
7404 (long) (loop
->outer
? loop
->outer
->num
: -1));
7406 if (loop
->pre_header_edges
)
7407 flow_edge_list_print (";; pre-header edges", loop
->pre_header_edges
,
7408 loop
->num_pre_header_edges
, file
);
7409 flow_edge_list_print (";; entry edges", loop
->entry_edges
,
7410 loop
->num_entries
, file
);
7411 fprintf (file
, ";; %d", loop
->num_nodes
);
7412 flow_nodes_print (" nodes", loop
->nodes
, file
);
7413 flow_edge_list_print (";; exit edges", loop
->exit_edges
,
7414 loop
->num_exits
, file
);
7415 if (loop
->exits_doms
)
7416 flow_nodes_print (";; exit doms", loop
->exits_doms
, file
);
7418 loop_dump_aux (loop
, file
, verbose
);
7422 /* Dump the loop information specified by LOOPS to the stream FILE,
7423 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7425 flow_loops_dump (loops
, file
, loop_dump_aux
, verbose
)
7426 const struct loops
*loops
;
7428 void (*loop_dump_aux
) PARAMS((const struct loop
*, FILE *, int));
7434 num_loops
= loops
->num
;
7435 if (! num_loops
|| ! file
)
7438 fprintf (file
, ";; %d loops found, %d levels\n",
7439 num_loops
, loops
->levels
);
7441 for (i
= 0; i
< num_loops
; i
++)
7443 struct loop
*loop
= &loops
->array
[i
];
7445 flow_loop_dump (loop
, file
, loop_dump_aux
, verbose
);
7451 for (j
= 0; j
< i
; j
++)
7453 struct loop
*oloop
= &loops
->array
[j
];
7455 if (loop
->header
== oloop
->header
)
7460 smaller
= loop
->num_nodes
< oloop
->num_nodes
;
7462 /* If the union of LOOP and OLOOP is different than
7463 the larger of LOOP and OLOOP then LOOP and OLOOP
7464 must be disjoint. */
7465 disjoint
= ! flow_loop_nested_p (smaller
? loop
: oloop
,
7466 smaller
? oloop
: loop
);
7468 ";; loop header %d shared by loops %d, %d %s\n",
7469 loop
->header
->index
, i
, j
,
7470 disjoint
? "disjoint" : "nested");
7477 flow_loops_cfg_dump (loops
, file
);
7481 /* Free all the memory allocated for LOOPS. */
7484 flow_loops_free (loops
)
7485 struct loops
*loops
;
7494 /* Free the loop descriptors. */
7495 for (i
= 0; i
< loops
->num
; i
++)
7497 struct loop
*loop
= &loops
->array
[i
];
7499 if (loop
->pre_header_edges
)
7500 free (loop
->pre_header_edges
);
7502 sbitmap_free (loop
->nodes
);
7503 if (loop
->entry_edges
)
7504 free (loop
->entry_edges
);
7505 if (loop
->exit_edges
)
7506 free (loop
->exit_edges
);
7507 if (loop
->exits_doms
)
7508 sbitmap_free (loop
->exits_doms
);
7510 free (loops
->array
);
7511 loops
->array
= NULL
;
7514 sbitmap_vector_free (loops
->cfg
.dom
);
7515 if (loops
->cfg
.dfs_order
)
7516 free (loops
->cfg
.dfs_order
);
7518 if (loops
->shared_headers
)
7519 sbitmap_free (loops
->shared_headers
);
7524 /* Find the entry edges into the loop with header HEADER and nodes
7525 NODES and store in ENTRY_EDGES array. Return the number of entry
7526 edges from the loop. */
7529 flow_loop_entry_edges_find (header
, nodes
, entry_edges
)
7531 const sbitmap nodes
;
7537 *entry_edges
= NULL
;
7540 for (e
= header
->pred
; e
; e
= e
->pred_next
)
7542 basic_block src
= e
->src
;
7544 if (src
== ENTRY_BLOCK_PTR
|| ! TEST_BIT (nodes
, src
->index
))
7551 *entry_edges
= (edge
*) xmalloc (num_entries
* sizeof (edge
*));
7554 for (e
= header
->pred
; e
; e
= e
->pred_next
)
7556 basic_block src
= e
->src
;
7558 if (src
== ENTRY_BLOCK_PTR
|| ! TEST_BIT (nodes
, src
->index
))
7559 (*entry_edges
)[num_entries
++] = e
;
7566 /* Find the exit edges from the loop using the bitmap of loop nodes
7567 NODES and store in EXIT_EDGES array. Return the number of
7568 exit edges from the loop. */
7571 flow_loop_exit_edges_find (nodes
, exit_edges
)
7572 const sbitmap nodes
;
7581 /* Check all nodes within the loop to see if there are any
7582 successors not in the loop. Note that a node may have multiple
7583 exiting edges ????? A node can have one jumping edge and one fallthru
7584 edge so only one of these can exit the loop. */
7586 EXECUTE_IF_SET_IN_SBITMAP (nodes
, 0, node
, {
7587 for (e
= BASIC_BLOCK (node
)->succ
; e
; e
= e
->succ_next
)
7589 basic_block dest
= e
->dest
;
7591 if (dest
== EXIT_BLOCK_PTR
|| ! TEST_BIT (nodes
, dest
->index
))
7599 *exit_edges
= (edge
*) xmalloc (num_exits
* sizeof (edge
*));
7601 /* Store all exiting edges into an array. */
7603 EXECUTE_IF_SET_IN_SBITMAP (nodes
, 0, node
, {
7604 for (e
= BASIC_BLOCK (node
)->succ
; e
; e
= e
->succ_next
)
7606 basic_block dest
= e
->dest
;
7608 if (dest
== EXIT_BLOCK_PTR
|| ! TEST_BIT (nodes
, dest
->index
))
7609 (*exit_edges
)[num_exits
++] = e
;
7617 /* Find the nodes contained within the loop with header HEADER and
7618 latch LATCH and store in NODES. Return the number of nodes within
7622 flow_loop_nodes_find (header
, latch
, nodes
)
7631 stack
= (basic_block
*) xmalloc (n_basic_blocks
* sizeof (basic_block
));
7634 /* Start with only the loop header in the set of loop nodes. */
7635 sbitmap_zero (nodes
);
7636 SET_BIT (nodes
, header
->index
);
7638 header
->loop_depth
++;
7640 /* Push the loop latch on to the stack. */
7641 if (! TEST_BIT (nodes
, latch
->index
))
7643 SET_BIT (nodes
, latch
->index
);
7644 latch
->loop_depth
++;
7646 stack
[sp
++] = latch
;
7655 for (e
= node
->pred
; e
; e
= e
->pred_next
)
7657 basic_block ancestor
= e
->src
;
7659 /* If each ancestor not marked as part of loop, add to set of
7660 loop nodes and push on to stack. */
7661 if (ancestor
!= ENTRY_BLOCK_PTR
7662 && ! TEST_BIT (nodes
, ancestor
->index
))
7664 SET_BIT (nodes
, ancestor
->index
);
7665 ancestor
->loop_depth
++;
7667 stack
[sp
++] = ancestor
;
7675 /* Compute the depth first search order and store in the array
7676 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7677 RC_ORDER is non-zero, return the reverse completion number for each
7678 node. Returns the number of nodes visited. A depth first search
7679 tries to get as far away from the starting point as quickly as
7683 flow_depth_first_order_compute (dfs_order
, rc_order
)
7690 int rcnum
= n_basic_blocks
- 1;
7693 /* Allocate stack for back-tracking up CFG. */
7694 stack
= (edge
*) xmalloc ((n_basic_blocks
+ 1) * sizeof (edge
));
7697 /* Allocate bitmap to track nodes that have been visited. */
7698 visited
= sbitmap_alloc (n_basic_blocks
);
7700 /* None of the nodes in the CFG have been visited yet. */
7701 sbitmap_zero (visited
);
7703 /* Push the first edge on to the stack. */
7704 stack
[sp
++] = ENTRY_BLOCK_PTR
->succ
;
7712 /* Look at the edge on the top of the stack. */
7717 /* Check if the edge destination has been visited yet. */
7718 if (dest
!= EXIT_BLOCK_PTR
&& ! TEST_BIT (visited
, dest
->index
))
7720 /* Mark that we have visited the destination. */
7721 SET_BIT (visited
, dest
->index
);
7724 dfs_order
[dfsnum
++] = dest
->index
;
7728 /* Since the DEST node has been visited for the first
7729 time, check its successors. */
7730 stack
[sp
++] = dest
->succ
;
7734 /* There are no successors for the DEST node so assign
7735 its reverse completion number. */
7737 rc_order
[rcnum
--] = dest
->index
;
7742 if (! e
->succ_next
&& src
!= ENTRY_BLOCK_PTR
)
7744 /* There are no more successors for the SRC node
7745 so assign its reverse completion number. */
7747 rc_order
[rcnum
--] = src
->index
;
7751 stack
[sp
- 1] = e
->succ_next
;
7758 sbitmap_free (visited
);
7760 /* The number of nodes visited should not be greater than
7762 if (dfsnum
> n_basic_blocks
)
7765 /* There are some nodes left in the CFG that are unreachable. */
7766 if (dfsnum
< n_basic_blocks
)
7771 /* Compute the depth first search order on the _reverse_ graph and
7772 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7773 Returns the number of nodes visited.
7775 The computation is split into three pieces:
7777 flow_dfs_compute_reverse_init () creates the necessary data
7780 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7781 structures. The block will start the search.
7783 flow_dfs_compute_reverse_execute () continues (or starts) the
7784 search using the block on the top of the stack, stopping when the
7787 flow_dfs_compute_reverse_finish () destroys the necessary data
7790 Thus, the user will probably call ..._init(), call ..._add_bb() to
7791 add a beginning basic block to the stack, call ..._execute(),
7792 possibly add another bb to the stack and again call ..._execute(),
7793 ..., and finally call _finish(). */
7795 /* Initialize the data structures used for depth-first search on the
7796 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7797 added to the basic block stack. DATA is the current depth-first
7798 search context. If INITIALIZE_STACK is non-zero, there is an
7799 element on the stack. */
7802 flow_dfs_compute_reverse_init (data
)
7803 depth_first_search_ds data
;
7805 /* Allocate stack for back-tracking up CFG. */
7807 (basic_block
*) xmalloc ((n_basic_blocks
- (INVALID_BLOCK
+ 1))
7808 * sizeof (basic_block
));
7811 /* Allocate bitmap to track nodes that have been visited. */
7812 data
->visited_blocks
= sbitmap_alloc (n_basic_blocks
- (INVALID_BLOCK
+ 1));
7814 /* None of the nodes in the CFG have been visited yet. */
7815 sbitmap_zero (data
->visited_blocks
);
7820 /* Add the specified basic block to the top of the dfs data
7821 structures. When the search continues, it will start at the
7825 flow_dfs_compute_reverse_add_bb (data
, bb
)
7826 depth_first_search_ds data
;
7829 data
->stack
[data
->sp
++] = bb
;
7833 /* Continue the depth-first search through the reverse graph starting
7834 with the block at the stack's top and ending when the stack is
7835 empty. Visited nodes are marked. Returns an unvisited basic
7836 block, or NULL if there is none available. */
7839 flow_dfs_compute_reverse_execute (data
)
7840 depth_first_search_ds data
;
7846 while (data
->sp
> 0)
7848 bb
= data
->stack
[--data
->sp
];
7850 /* Mark that we have visited this node. */
7851 if (!TEST_BIT (data
->visited_blocks
, bb
->index
- (INVALID_BLOCK
+ 1)))
7853 SET_BIT (data
->visited_blocks
, bb
->index
- (INVALID_BLOCK
+ 1));
7855 /* Perform depth-first search on adjacent vertices. */
7856 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
7857 flow_dfs_compute_reverse_add_bb (data
, e
->src
);
7861 /* Determine if there are unvisited basic blocks. */
7862 for (i
= n_basic_blocks
- (INVALID_BLOCK
+ 1); --i
>= 0;)
7863 if (!TEST_BIT (data
->visited_blocks
, i
))
7864 return BASIC_BLOCK (i
+ (INVALID_BLOCK
+ 1));
7868 /* Destroy the data structures needed for depth-first search on the
7872 flow_dfs_compute_reverse_finish (data
)
7873 depth_first_search_ds data
;
7876 sbitmap_free (data
->visited_blocks
);
7881 /* Find the root node of the loop pre-header extended basic block and
7882 the edges along the trace from the root node to the loop header. */
7885 flow_loop_pre_header_scan (loop
)
7891 loop
->num_pre_header_edges
= 0;
7893 if (loop
->num_entries
!= 1)
7896 ebb
= loop
->entry_edges
[0]->src
;
7898 if (ebb
!= ENTRY_BLOCK_PTR
)
7902 /* Count number of edges along trace from loop header to
7903 root of pre-header extended basic block. Usually this is
7904 only one or two edges. */
7906 while (ebb
->pred
->src
!= ENTRY_BLOCK_PTR
&& ! ebb
->pred
->pred_next
)
7908 ebb
= ebb
->pred
->src
;
7912 loop
->pre_header_edges
= (edge
*) xmalloc (num
* sizeof (edge
*));
7913 loop
->num_pre_header_edges
= num
;
7915 /* Store edges in order that they are followed. The source
7916 of the first edge is the root node of the pre-header extended
7917 basic block and the destination of the last last edge is
7919 for (e
= loop
->entry_edges
[0]; num
; e
= e
->src
->pred
)
7921 loop
->pre_header_edges
[--num
] = e
;
7927 /* Return the block for the pre-header of the loop with header
7928 HEADER where DOM specifies the dominator information. Return NULL if
7929 there is no pre-header. */
7932 flow_loop_pre_header_find (header
, dom
)
7936 basic_block pre_header
;
7939 /* If block p is a predecessor of the header and is the only block
7940 that the header does not dominate, then it is the pre-header. */
7942 for (e
= header
->pred
; e
; e
= e
->pred_next
)
7944 basic_block node
= e
->src
;
7946 if (node
!= ENTRY_BLOCK_PTR
7947 && ! TEST_BIT (dom
[node
->index
], header
->index
))
7949 if (pre_header
== NULL
)
7953 /* There are multiple edges into the header from outside
7954 the loop so there is no pre-header block. */
7963 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7964 previously added. The insertion algorithm assumes that the loops
7965 are added in the order found by a depth first search of the CFG. */
7968 flow_loop_tree_node_add (prevloop
, loop
)
7969 struct loop
*prevloop
;
7973 if (flow_loop_nested_p (prevloop
, loop
))
7975 prevloop
->inner
= loop
;
7976 loop
->outer
= prevloop
;
7980 while (prevloop
->outer
)
7982 if (flow_loop_nested_p (prevloop
->outer
, loop
))
7984 prevloop
->next
= loop
;
7985 loop
->outer
= prevloop
->outer
;
7988 prevloop
= prevloop
->outer
;
7991 prevloop
->next
= loop
;
7995 /* Build the loop hierarchy tree for LOOPS. */
7998 flow_loops_tree_build (loops
)
7999 struct loops
*loops
;
8004 num_loops
= loops
->num
;
8008 /* Root the loop hierarchy tree with the first loop found.
8009 Since we used a depth first search this should be the
8011 loops
->tree
= &loops
->array
[0];
8012 loops
->tree
->outer
= loops
->tree
->inner
= loops
->tree
->next
= NULL
;
8014 /* Add the remaining loops to the tree. */
8015 for (i
= 1; i
< num_loops
; i
++)
8016 flow_loop_tree_node_add (&loops
->array
[i
- 1], &loops
->array
[i
]);
8019 /* Helper function to compute loop nesting depth and enclosed loop level
8020 for the natural loop specified by LOOP at the loop depth DEPTH.
8021 Returns the loop level. */
8024 flow_loop_level_compute (loop
, depth
)
8034 /* Traverse loop tree assigning depth and computing level as the
8035 maximum level of all the inner loops of this loop. The loop
8036 level is equivalent to the height of the loop in the loop tree
8037 and corresponds to the number of enclosed loop levels (including
8039 for (inner
= loop
->inner
; inner
; inner
= inner
->next
)
8043 ilevel
= flow_loop_level_compute (inner
, depth
+ 1) + 1;
8048 loop
->level
= level
;
8049 loop
->depth
= depth
;
8053 /* Compute the loop nesting depth and enclosed loop level for the loop
8054 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
8058 flow_loops_level_compute (loops
)
8059 struct loops
*loops
;
8065 /* Traverse all the outer level loops. */
8066 for (loop
= loops
->tree
; loop
; loop
= loop
->next
)
8068 level
= flow_loop_level_compute (loop
, 1);
8076 /* Scan a single natural loop specified by LOOP collecting information
8077 about it specified by FLAGS. */
8080 flow_loop_scan (loops
, loop
, flags
)
8081 struct loops
*loops
;
8085 /* Determine prerequisites. */
8086 if ((flags
& LOOP_EXITS_DOMS
) && ! loop
->exit_edges
)
8087 flags
|= LOOP_EXIT_EDGES
;
8089 if (flags
& LOOP_ENTRY_EDGES
)
8091 /* Find edges which enter the loop header.
8092 Note that the entry edges should only
8093 enter the header of a natural loop. */
8095 = flow_loop_entry_edges_find (loop
->header
,
8097 &loop
->entry_edges
);
8100 if (flags
& LOOP_EXIT_EDGES
)
8102 /* Find edges which exit the loop. */
8104 = flow_loop_exit_edges_find (loop
->nodes
,
8108 if (flags
& LOOP_EXITS_DOMS
)
8112 /* Determine which loop nodes dominate all the exits
8114 loop
->exits_doms
= sbitmap_alloc (n_basic_blocks
);
8115 sbitmap_copy (loop
->exits_doms
, loop
->nodes
);
8116 for (j
= 0; j
< loop
->num_exits
; j
++)
8117 sbitmap_a_and_b (loop
->exits_doms
, loop
->exits_doms
,
8118 loops
->cfg
.dom
[loop
->exit_edges
[j
]->src
->index
]);
8120 /* The header of a natural loop must dominate
8122 if (! TEST_BIT (loop
->exits_doms
, loop
->header
->index
))
8126 if (flags
& LOOP_PRE_HEADER
)
8128 /* Look to see if the loop has a pre-header node. */
8130 = flow_loop_pre_header_find (loop
->header
, loops
->cfg
.dom
);
8132 /* Find the blocks within the extended basic block of
8133 the loop pre-header. */
8134 flow_loop_pre_header_scan (loop
);
8140 /* Find all the natural loops in the function and save in LOOPS structure
8141 and recalculate loop_depth information in basic block structures.
8142 FLAGS controls which loop information is collected.
8143 Return the number of natural loops found. */
8146 flow_loops_find (loops
, flags
)
8147 struct loops
*loops
;
8159 /* This function cannot be repeatedly called with different
8160 flags to build up the loop information. The loop tree
8161 must always be built if this function is called. */
8162 if (! (flags
& LOOP_TREE
))
8165 memset (loops
, 0, sizeof (*loops
));
8167 /* Taking care of this degenerate case makes the rest of
8168 this code simpler. */
8169 if (n_basic_blocks
== 0)
8175 /* Compute the dominators. */
8176 dom
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
8177 calculate_dominance_info (NULL
, dom
, CDI_DOMINATORS
);
8179 /* Count the number of loop edges (back edges). This should be the
8180 same as the number of natural loops. */
8183 for (b
= 0; b
< n_basic_blocks
; b
++)
8187 header
= BASIC_BLOCK (b
);
8188 header
->loop_depth
= 0;
8190 for (e
= header
->pred
; e
; e
= e
->pred_next
)
8192 basic_block latch
= e
->src
;
8194 /* Look for back edges where a predecessor is dominated
8195 by this block. A natural loop has a single entry
8196 node (header) that dominates all the nodes in the
8197 loop. It also has single back edge to the header
8198 from a latch node. Note that multiple natural loops
8199 may share the same header. */
8200 if (b
!= header
->index
)
8203 if (latch
!= ENTRY_BLOCK_PTR
&& TEST_BIT (dom
[latch
->index
], b
))
8210 /* Compute depth first search order of the CFG so that outer
8211 natural loops will be found before inner natural loops. */
8212 dfs_order
= (int *) xmalloc (n_basic_blocks
* sizeof (int));
8213 rc_order
= (int *) xmalloc (n_basic_blocks
* sizeof (int));
8214 flow_depth_first_order_compute (dfs_order
, rc_order
);
8216 /* Save CFG derived information to avoid recomputing it. */
8217 loops
->cfg
.dom
= dom
;
8218 loops
->cfg
.dfs_order
= dfs_order
;
8219 loops
->cfg
.rc_order
= rc_order
;
8221 /* Allocate loop structures. */
8223 = (struct loop
*) xcalloc (num_loops
, sizeof (struct loop
));
8225 headers
= sbitmap_alloc (n_basic_blocks
);
8226 sbitmap_zero (headers
);
8228 loops
->shared_headers
= sbitmap_alloc (n_basic_blocks
);
8229 sbitmap_zero (loops
->shared_headers
);
8231 /* Find and record information about all the natural loops
8234 for (b
= 0; b
< n_basic_blocks
; b
++)
8238 /* Search the nodes of the CFG in reverse completion order
8239 so that we can find outer loops first. */
8240 header
= BASIC_BLOCK (rc_order
[b
]);
8242 /* Look for all the possible latch blocks for this header. */
8243 for (e
= header
->pred
; e
; e
= e
->pred_next
)
8245 basic_block latch
= e
->src
;
8247 /* Look for back edges where a predecessor is dominated
8248 by this block. A natural loop has a single entry
8249 node (header) that dominates all the nodes in the
8250 loop. It also has single back edge to the header
8251 from a latch node. Note that multiple natural loops
8252 may share the same header. */
8253 if (latch
!= ENTRY_BLOCK_PTR
8254 && TEST_BIT (dom
[latch
->index
], header
->index
))
8258 loop
= loops
->array
+ num_loops
;
8260 loop
->header
= header
;
8261 loop
->latch
= latch
;
8262 loop
->num
= num_loops
;
8269 for (i
= 0; i
< num_loops
; i
++)
8271 struct loop
*loop
= &loops
->array
[i
];
8273 /* Keep track of blocks that are loop headers so
8274 that we can tell which loops should be merged. */
8275 if (TEST_BIT (headers
, loop
->header
->index
))
8276 SET_BIT (loops
->shared_headers
, loop
->header
->index
);
8277 SET_BIT (headers
, loop
->header
->index
);
8279 /* Find nodes contained within the loop. */
8280 loop
->nodes
= sbitmap_alloc (n_basic_blocks
);
8282 = flow_loop_nodes_find (loop
->header
, loop
->latch
, loop
->nodes
);
8284 /* Compute first and last blocks within the loop.
8285 These are often the same as the loop header and
8286 loop latch respectively, but this is not always
8289 = BASIC_BLOCK (sbitmap_first_set_bit (loop
->nodes
));
8291 = BASIC_BLOCK (sbitmap_last_set_bit (loop
->nodes
));
8293 flow_loop_scan (loops
, loop
, flags
);
8296 /* Natural loops with shared headers may either be disjoint or
8297 nested. Disjoint loops with shared headers cannot be inner
8298 loops and should be merged. For now just mark loops that share
8300 for (i
= 0; i
< num_loops
; i
++)
8301 if (TEST_BIT (loops
->shared_headers
, loops
->array
[i
].header
->index
))
8302 loops
->array
[i
].shared
= 1;
8304 sbitmap_free (headers
);
8307 loops
->num
= num_loops
;
8309 /* Build the loop hierarchy tree. */
8310 flow_loops_tree_build (loops
);
8312 /* Assign the loop nesting depth and enclosed loop level for each
8314 loops
->levels
= flow_loops_level_compute (loops
);
8320 /* Update the information regarding the loops in the CFG
8321 specified by LOOPS. */
8323 flow_loops_update (loops
, flags
)
8324 struct loops
*loops
;
8327 /* One day we may want to update the current loop data. For now
8328 throw away the old stuff and rebuild what we need. */
8330 flow_loops_free (loops
);
8332 return flow_loops_find (loops
, flags
);
8336 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8339 flow_loop_outside_edge_p (loop
, e
)
8340 const struct loop
*loop
;
8343 if (e
->dest
!= loop
->header
)
8345 return (e
->src
== ENTRY_BLOCK_PTR
)
8346 || ! TEST_BIT (loop
->nodes
, e
->src
->index
);
8349 /* Clear LOG_LINKS fields of insns in a chain.
8350 Also clear the global_live_at_{start,end} fields of the basic block
8354 clear_log_links (insns
)
8360 for (i
= insns
; i
; i
= NEXT_INSN (i
))
8364 for (b
= 0; b
< n_basic_blocks
; b
++)
8366 basic_block bb
= BASIC_BLOCK (b
);
8368 bb
->global_live_at_start
= NULL
;
8369 bb
->global_live_at_end
= NULL
;
8372 ENTRY_BLOCK_PTR
->global_live_at_end
= NULL
;
8373 EXIT_BLOCK_PTR
->global_live_at_start
= NULL
;
8376 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8377 correspond to the hard registers, if any, set in that map. This
8378 could be done far more efficiently by having all sorts of special-cases
8379 with moving single words, but probably isn't worth the trouble. */
8382 reg_set_to_hard_reg_set (to
, from
)
8388 EXECUTE_IF_SET_IN_BITMAP
8391 if (i
>= FIRST_PSEUDO_REGISTER
)
8393 SET_HARD_REG_BIT (*to
, i
);
8397 /* Called once at intialization time. */
8402 static int initialized
;
8406 gcc_obstack_init (&flow_obstack
);
8407 flow_firstobj
= (char *) obstack_alloc (&flow_obstack
, 0);
8412 obstack_free (&flow_obstack
, flow_firstobj
);
8413 flow_firstobj
= (char *) obstack_alloc (&flow_obstack
, 0);