1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
136 #include "insn-flags.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
165 #define LOCAL_REGNO(REGNO) 0
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
171 /* The obstack on which the flow graph components are allocated. */
173 struct obstack flow_obstack
;
174 static char *flow_firstobj
;
176 /* Number of basic blocks in the current function. */
180 /* Number of edges in the current function. */
184 /* The basic block array. */
186 varray_type basic_block_info
;
188 /* The special entry and exit blocks. */
190 struct basic_block_def entry_exit_blocks
[2]
195 NULL
, /* local_set */
196 NULL
, /* cond_local_set */
197 NULL
, /* global_live_at_start */
198 NULL
, /* global_live_at_end */
200 ENTRY_BLOCK
, /* index */
202 -1, -1, /* eh_beg, eh_end */
210 NULL
, /* local_set */
211 NULL
, /* cond_local_set */
212 NULL
, /* global_live_at_start */
213 NULL
, /* global_live_at_end */
215 EXIT_BLOCK
, /* index */
217 -1, -1, /* eh_beg, eh_end */
222 /* Nonzero if the second flow pass has completed. */
225 /* Maximum register number used in this function, plus one. */
229 /* Indexed by n, giving various register information */
231 varray_type reg_n_info
;
233 /* Size of a regset for the current function,
234 in (1) bytes and (2) elements. */
239 /* Regset of regs live when calls to `setjmp'-like functions happen. */
240 /* ??? Does this exist only for the setjmp-clobbered warning message? */
242 regset regs_live_at_setjmp
;
244 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
245 that have to go in the same hard reg.
246 The first two regs in the list are a pair, and the next two
247 are another pair, etc. */
250 /* Callback that determines if it's ok for a function to have no
251 noreturn attribute. */
252 int (*lang_missing_noreturn_ok_p
) PARAMS ((tree
));
254 /* Set of registers that may be eliminable. These are handled specially
255 in updating regs_ever_live. */
257 static HARD_REG_SET elim_reg_set
;
259 /* The basic block structure for every insn, indexed by uid. */
261 varray_type basic_block_for_insn
;
263 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
264 /* ??? Should probably be using LABEL_NUSES instead. It would take a
265 bit of surgery to be able to use or co-opt the routines in jump. */
267 static rtx label_value_list
;
268 static rtx tail_recursion_label_list
;
270 /* Holds information for tracking conditional register life information. */
271 struct reg_cond_life_info
273 /* An EXPR_LIST of conditions under which a register is dead. */
276 /* ??? Could store mask of bytes that are dead, so that we could finally
277 track lifetimes of multi-word registers accessed via subregs. */
280 /* For use in communicating between propagate_block and its subroutines.
281 Holds all information needed to compute life and def-use information. */
283 struct propagate_block_info
285 /* The basic block we're considering. */
288 /* Bit N is set if register N is conditionally or unconditionally live. */
291 /* Bit N is set if register N is set this insn. */
294 /* Element N is the next insn that uses (hard or pseudo) register N
295 within the current basic block; or zero, if there is no such insn. */
298 /* Contains a list of all the MEMs we are tracking for dead store
302 /* If non-null, record the set of registers set unconditionally in the
306 /* If non-null, record the set of registers set conditionally in the
308 regset cond_local_set
;
310 #ifdef HAVE_conditional_execution
311 /* Indexed by register number, holds a reg_cond_life_info for each
312 register that is not unconditionally live or dead. */
313 splay_tree reg_cond_dead
;
315 /* Bit N is set if register N is in an expression in reg_cond_dead. */
319 /* The length of mem_set_list. */
320 int mem_set_list_len
;
322 /* Non-zero if the value of CC0 is live. */
325 /* Flags controling the set of information propagate_block collects. */
329 /* Maximum length of pbi->mem_set_list before we start dropping
330 new elements on the floor. */
331 #define MAX_MEM_SET_LIST_LEN 100
333 /* Store the data structures necessary for depth-first search. */
334 struct depth_first_search_dsS
{
335 /* stack for backtracking during the algorithm */
338 /* number of edges in the stack. That is, positions 0, ..., sp-1
342 /* record of basic blocks already seen by depth-first search */
343 sbitmap visited_blocks
;
345 typedef struct depth_first_search_dsS
*depth_first_search_ds
;
347 /* Forward declarations */
348 static int count_basic_blocks
PARAMS ((rtx
));
349 static void find_basic_blocks_1
PARAMS ((rtx
));
350 static rtx find_label_refs
PARAMS ((rtx
, rtx
));
351 static void clear_edges
PARAMS ((void));
352 static void make_edges
PARAMS ((rtx
));
353 static void make_label_edge
PARAMS ((sbitmap
*, basic_block
,
355 static void make_eh_edge
PARAMS ((sbitmap
*, eh_nesting_info
*,
356 basic_block
, rtx
, int));
357 static void mark_critical_edges
PARAMS ((void));
358 static void move_stray_eh_region_notes
PARAMS ((void));
359 static void record_active_eh_regions
PARAMS ((rtx
));
361 static void commit_one_edge_insertion
PARAMS ((edge
));
363 static void delete_unreachable_blocks
PARAMS ((void));
364 static void delete_eh_regions
PARAMS ((void));
365 static int can_delete_note_p
PARAMS ((rtx
));
366 static void expunge_block
PARAMS ((basic_block
));
367 static int can_delete_label_p
PARAMS ((rtx
));
368 static int tail_recursion_label_p
PARAMS ((rtx
));
369 static int merge_blocks_move_predecessor_nojumps
PARAMS ((basic_block
,
371 static int merge_blocks_move_successor_nojumps
PARAMS ((basic_block
,
373 static int merge_blocks
PARAMS ((edge
,basic_block
,basic_block
));
374 static void try_merge_blocks
PARAMS ((void));
375 static void tidy_fallthru_edges
PARAMS ((void));
376 static int verify_wide_reg_1
PARAMS ((rtx
*, void *));
377 static void verify_wide_reg
PARAMS ((int, rtx
, rtx
));
378 static void verify_local_live_at_start
PARAMS ((regset
, basic_block
));
379 static int set_noop_p
PARAMS ((rtx
));
380 static int noop_move_p
PARAMS ((rtx
));
381 static void delete_noop_moves
PARAMS ((rtx
));
382 static void notice_stack_pointer_modification_1
PARAMS ((rtx
, rtx
, void *));
383 static void notice_stack_pointer_modification
PARAMS ((rtx
));
384 static void mark_reg
PARAMS ((rtx
, void *));
385 static void mark_regs_live_at_end
PARAMS ((regset
));
386 static int set_phi_alternative_reg
PARAMS ((rtx
, int, int, void *));
387 static void calculate_global_regs_live
PARAMS ((sbitmap
, sbitmap
, int));
388 static void propagate_block_delete_insn
PARAMS ((basic_block
, rtx
));
389 static rtx propagate_block_delete_libcall
PARAMS ((basic_block
, rtx
, rtx
));
390 static int insn_dead_p
PARAMS ((struct propagate_block_info
*,
392 static int libcall_dead_p
PARAMS ((struct propagate_block_info
*,
394 static void mark_set_regs
PARAMS ((struct propagate_block_info
*,
396 static void mark_set_1
PARAMS ((struct propagate_block_info
*,
397 enum rtx_code
, rtx
, rtx
,
399 #ifdef HAVE_conditional_execution
400 static int mark_regno_cond_dead
PARAMS ((struct propagate_block_info
*,
402 static void free_reg_cond_life_info
PARAMS ((splay_tree_value
));
403 static int flush_reg_cond_reg_1
PARAMS ((splay_tree_node
, void *));
404 static void flush_reg_cond_reg
PARAMS ((struct propagate_block_info
*,
406 static rtx elim_reg_cond
PARAMS ((rtx
, unsigned int));
407 static rtx ior_reg_cond
PARAMS ((rtx
, rtx
, int));
408 static rtx not_reg_cond
PARAMS ((rtx
));
409 static rtx and_reg_cond
PARAMS ((rtx
, rtx
, int));
412 static void attempt_auto_inc
PARAMS ((struct propagate_block_info
*,
413 rtx
, rtx
, rtx
, rtx
, rtx
));
414 static void find_auto_inc
PARAMS ((struct propagate_block_info
*,
416 static int try_pre_increment_1
PARAMS ((struct propagate_block_info
*,
418 static int try_pre_increment
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
));
420 static void mark_used_reg
PARAMS ((struct propagate_block_info
*,
422 static void mark_used_regs
PARAMS ((struct propagate_block_info
*,
424 void dump_flow_info
PARAMS ((FILE *));
425 void debug_flow_info
PARAMS ((void));
426 static void dump_edge_info
PARAMS ((FILE *, edge
, int));
427 static void print_rtl_and_abort
PARAMS ((void));
429 static void invalidate_mems_from_autoinc
PARAMS ((struct propagate_block_info
*,
431 static void invalidate_mems_from_set
PARAMS ((struct propagate_block_info
*,
433 static void remove_fake_successors
PARAMS ((basic_block
));
434 static void flow_nodes_print
PARAMS ((const char *, const sbitmap
,
436 static void flow_edge_list_print
PARAMS ((const char *, const edge
*,
438 static void flow_loops_cfg_dump
PARAMS ((const struct loops
*,
440 static int flow_loop_nested_p
PARAMS ((struct loop
*,
442 static int flow_loop_entry_edges_find
PARAMS ((basic_block
, const sbitmap
,
444 static int flow_loop_exit_edges_find
PARAMS ((const sbitmap
, edge
**));
445 static int flow_loop_nodes_find
PARAMS ((basic_block
, basic_block
, sbitmap
));
446 static int flow_depth_first_order_compute
PARAMS ((int *, int *));
447 static void flow_dfs_compute_reverse_init
448 PARAMS ((depth_first_search_ds
));
449 static void flow_dfs_compute_reverse_add_bb
450 PARAMS ((depth_first_search_ds
, basic_block
));
451 static basic_block flow_dfs_compute_reverse_execute
452 PARAMS ((depth_first_search_ds
));
453 static void flow_dfs_compute_reverse_finish
454 PARAMS ((depth_first_search_ds
));
455 static void flow_loop_pre_header_scan
PARAMS ((struct loop
*));
456 static basic_block flow_loop_pre_header_find
PARAMS ((basic_block
,
458 static void flow_loop_tree_node_add
PARAMS ((struct loop
*, struct loop
*));
459 static void flow_loops_tree_build
PARAMS ((struct loops
*));
460 static int flow_loop_level_compute
PARAMS ((struct loop
*, int));
461 static int flow_loops_level_compute
PARAMS ((struct loops
*));
462 static void allocate_bb_life_data
PARAMS ((void));
464 /* Find basic blocks of the current function.
465 F is the first insn of the function and NREGS the number of register
469 find_basic_blocks (f
, nregs
, file
)
471 int nregs ATTRIBUTE_UNUSED
;
472 FILE *file ATTRIBUTE_UNUSED
;
476 /* Flush out existing data. */
477 if (basic_block_info
!= NULL
)
483 /* Clear bb->aux on all extant basic blocks. We'll use this as a
484 tag for reuse during create_basic_block, just in case some pass
485 copies around basic block notes improperly. */
486 for (i
= 0; i
< n_basic_blocks
; ++i
)
487 BASIC_BLOCK (i
)->aux
= NULL
;
489 VARRAY_FREE (basic_block_info
);
492 n_basic_blocks
= count_basic_blocks (f
);
494 /* Size the basic block table. The actual structures will be allocated
495 by find_basic_blocks_1, since we want to keep the structure pointers
496 stable across calls to find_basic_blocks. */
497 /* ??? This whole issue would be much simpler if we called find_basic_blocks
498 exactly once, and thereafter we don't have a single long chain of
499 instructions at all until close to the end of compilation when we
500 actually lay them out. */
502 VARRAY_BB_INIT (basic_block_info
, n_basic_blocks
, "basic_block_info");
504 find_basic_blocks_1 (f
);
506 /* Record the block to which an insn belongs. */
507 /* ??? This should be done another way, by which (perhaps) a label is
508 tagged directly with the basic block that it starts. It is used for
509 more than that currently, but IMO that is the only valid use. */
511 max_uid
= get_max_uid ();
513 /* Leave space for insns life_analysis makes in some cases for auto-inc.
514 These cases are rare, so we don't need too much space. */
515 max_uid
+= max_uid
/ 10;
518 compute_bb_for_insn (max_uid
);
520 /* Discover the edges of our cfg. */
521 record_active_eh_regions (f
);
522 make_edges (label_value_list
);
524 /* Do very simple cleanup now, for the benefit of code that runs between
525 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
526 tidy_fallthru_edges ();
528 mark_critical_edges ();
530 #ifdef ENABLE_CHECKING
536 check_function_return_warnings ()
538 if (warn_missing_noreturn
539 && !TREE_THIS_VOLATILE (cfun
->decl
)
540 && EXIT_BLOCK_PTR
->pred
== NULL
541 && (lang_missing_noreturn_ok_p
542 && !lang_missing_noreturn_ok_p (cfun
->decl
)))
543 warning ("function might be possible candidate for attribute `noreturn'");
545 /* If we have a path to EXIT, then we do return. */
546 if (TREE_THIS_VOLATILE (cfun
->decl
)
547 && EXIT_BLOCK_PTR
->pred
!= NULL
)
548 warning ("`noreturn' function does return");
550 /* If the clobber_return_insn appears in some basic block, then we
551 do reach the end without returning a value. */
552 else if (warn_return_type
553 && cfun
->x_clobber_return_insn
!= NULL
554 && EXIT_BLOCK_PTR
->pred
!= NULL
)
556 int max_uid
= get_max_uid ();
558 /* If clobber_return_insn was excised by jump1, then renumber_insns
559 can make max_uid smaller than the number still recorded in our rtx.
560 That's fine, since this is a quick way of verifying that the insn
561 is no longer in the chain. */
562 if (INSN_UID (cfun
->x_clobber_return_insn
) < max_uid
)
564 /* Recompute insn->block mapping, since the initial mapping is
565 set before we delete unreachable blocks. */
566 compute_bb_for_insn (max_uid
);
568 if (BLOCK_FOR_INSN (cfun
->x_clobber_return_insn
) != NULL
)
569 warning ("control reaches end of non-void function");
574 /* Count the basic blocks of the function. */
577 count_basic_blocks (f
)
581 register RTX_CODE prev_code
;
582 register int count
= 0;
584 int call_had_abnormal_edge
= 0;
586 prev_code
= JUMP_INSN
;
587 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
589 register RTX_CODE code
= GET_CODE (insn
);
591 if (code
== CODE_LABEL
592 || (GET_RTX_CLASS (code
) == 'i'
593 && (prev_code
== JUMP_INSN
594 || prev_code
== BARRIER
595 || (prev_code
== CALL_INSN
&& call_had_abnormal_edge
))))
598 /* Record whether this call created an edge. */
599 if (code
== CALL_INSN
)
601 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
602 int region
= (note
? INTVAL (XEXP (note
, 0)) : 1);
604 call_had_abnormal_edge
= 0;
606 /* If there is an EH region or rethrow, we have an edge. */
607 if ((eh_region
&& region
> 0)
608 || find_reg_note (insn
, REG_EH_RETHROW
, NULL_RTX
))
609 call_had_abnormal_edge
= 1;
610 else if (nonlocal_goto_handler_labels
&& region
>= 0)
611 /* If there is a nonlocal goto label and the specified
612 region number isn't -1, we have an edge. (0 means
613 no throw, but might have a nonlocal goto). */
614 call_had_abnormal_edge
= 1;
619 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
621 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
)
625 /* The rest of the compiler works a bit smoother when we don't have to
626 check for the edge case of do-nothing functions with no basic blocks. */
629 emit_insn (gen_rtx_USE (VOIDmode
, const0_rtx
));
636 /* Scan a list of insns for labels referred to other than by jumps.
637 This is used to scan the alternatives of a call placeholder. */
639 find_label_refs (f
, lvl
)
645 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
646 if (INSN_P (insn
) && GET_CODE (insn
) != JUMP_INSN
)
650 /* Make a list of all labels referred to other than by jumps
651 (which just don't have the REG_LABEL notes).
653 Make a special exception for labels followed by an ADDR*VEC,
654 as this would be a part of the tablejump setup code.
656 Make a special exception for the eh_return_stub_label, which
657 we know isn't part of any otherwise visible control flow.
659 Make a special exception to registers loaded with label
660 values just before jump insns that use them. */
662 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
663 if (REG_NOTE_KIND (note
) == REG_LABEL
)
665 rtx lab
= XEXP (note
, 0), next
;
667 if (lab
== eh_return_stub_label
)
669 else if ((next
= next_nonnote_insn (lab
)) != NULL
670 && GET_CODE (next
) == JUMP_INSN
671 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
672 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
674 else if (GET_CODE (lab
) == NOTE
)
676 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
677 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
680 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
687 /* Find all basic blocks of the function whose first insn is F.
689 Collect and return a list of labels whose addresses are taken. This
690 will be used in make_edges for use with computed gotos. */
693 find_basic_blocks_1 (f
)
696 register rtx insn
, next
;
698 rtx bb_note
= NULL_RTX
;
699 rtx eh_list
= NULL_RTX
;
705 /* We process the instructions in a slightly different way than we did
706 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
707 closed out the previous block, so that it gets attached at the proper
708 place. Since this form should be equivalent to the previous,
709 count_basic_blocks continues to use the old form as a check. */
711 for (insn
= f
; insn
; insn
= next
)
713 enum rtx_code code
= GET_CODE (insn
);
715 next
= NEXT_INSN (insn
);
721 int kind
= NOTE_LINE_NUMBER (insn
);
723 /* Keep a LIFO list of the currently active exception notes. */
724 if (kind
== NOTE_INSN_EH_REGION_BEG
)
725 eh_list
= alloc_INSN_LIST (insn
, eh_list
);
726 else if (kind
== NOTE_INSN_EH_REGION_END
)
730 eh_list
= XEXP (eh_list
, 1);
731 free_INSN_LIST_node (t
);
734 /* Look for basic block notes with which to keep the
735 basic_block_info pointers stable. Unthread the note now;
736 we'll put it back at the right place in create_basic_block.
737 Or not at all if we've already found a note in this block. */
738 else if (kind
== NOTE_INSN_BASIC_BLOCK
)
740 if (bb_note
== NULL_RTX
)
743 next
= flow_delete_insn (insn
);
749 /* A basic block starts at a label. If we've closed one off due
750 to a barrier or some such, no need to do it again. */
751 if (head
!= NULL_RTX
)
753 /* While we now have edge lists with which other portions of
754 the compiler might determine a call ending a basic block
755 does not imply an abnormal edge, it will be a bit before
756 everything can be updated. So continue to emit a noop at
757 the end of such a block. */
758 if (GET_CODE (end
) == CALL_INSN
&& ! SIBLING_CALL_P (end
))
760 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
761 end
= emit_insn_after (nop
, end
);
764 create_basic_block (i
++, head
, end
, bb_note
);
772 /* A basic block ends at a jump. */
773 if (head
== NULL_RTX
)
777 /* ??? Make a special check for table jumps. The way this
778 happens is truly and amazingly gross. We are about to
779 create a basic block that contains just a code label and
780 an addr*vec jump insn. Worse, an addr_diff_vec creates
781 its own natural loop.
783 Prevent this bit of brain damage, pasting things together
784 correctly in make_edges.
786 The correct solution involves emitting the table directly
787 on the tablejump instruction as a note, or JUMP_LABEL. */
789 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
790 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
798 goto new_bb_inclusive
;
801 /* A basic block ends at a barrier. It may be that an unconditional
802 jump already closed the basic block -- no need to do it again. */
803 if (head
== NULL_RTX
)
806 /* While we now have edge lists with which other portions of the
807 compiler might determine a call ending a basic block does not
808 imply an abnormal edge, it will be a bit before everything can
809 be updated. So continue to emit a noop at the end of such a
811 if (GET_CODE (end
) == CALL_INSN
&& ! SIBLING_CALL_P (end
))
813 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
814 end
= emit_insn_after (nop
, end
);
816 goto new_bb_exclusive
;
820 /* Record whether this call created an edge. */
821 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
822 int region
= (note
? INTVAL (XEXP (note
, 0)) : 1);
823 int call_has_abnormal_edge
= 0;
825 if (GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
827 /* Scan each of the alternatives for label refs. */
828 lvl
= find_label_refs (XEXP (PATTERN (insn
), 0), lvl
);
829 lvl
= find_label_refs (XEXP (PATTERN (insn
), 1), lvl
);
830 lvl
= find_label_refs (XEXP (PATTERN (insn
), 2), lvl
);
831 /* Record its tail recursion label, if any. */
832 if (XEXP (PATTERN (insn
), 3) != NULL_RTX
)
833 trll
= alloc_EXPR_LIST (0, XEXP (PATTERN (insn
), 3), trll
);
836 /* If there is an EH region or rethrow, we have an edge. */
837 if ((eh_list
&& region
> 0)
838 || find_reg_note (insn
, REG_EH_RETHROW
, NULL_RTX
))
839 call_has_abnormal_edge
= 1;
840 else if (nonlocal_goto_handler_labels
&& region
>= 0)
841 /* If there is a nonlocal goto label and the specified
842 region number isn't -1, we have an edge. (0 means
843 no throw, but might have a nonlocal goto). */
844 call_has_abnormal_edge
= 1;
846 /* A basic block ends at a call that can either throw or
847 do a non-local goto. */
848 if (call_has_abnormal_edge
)
851 if (head
== NULL_RTX
)
856 create_basic_block (i
++, head
, end
, bb_note
);
857 head
= end
= NULL_RTX
;
865 if (GET_RTX_CLASS (code
) == 'i')
867 if (head
== NULL_RTX
)
874 if (GET_RTX_CLASS (code
) == 'i'
875 && GET_CODE (insn
) != JUMP_INSN
)
879 /* Make a list of all labels referred to other than by jumps.
881 Make a special exception for labels followed by an ADDR*VEC,
882 as this would be a part of the tablejump setup code.
884 Make a special exception for the eh_return_stub_label, which
885 we know isn't part of any otherwise visible control flow.
887 Make a special exception to registers loaded with label
888 values just before jump insns that use them. */
890 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
891 if (REG_NOTE_KIND (note
) == REG_LABEL
)
893 rtx lab
= XEXP (note
, 0), next
;
895 if (lab
== eh_return_stub_label
)
897 else if ((next
= next_nonnote_insn (lab
)) != NULL
898 && GET_CODE (next
) == JUMP_INSN
899 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
900 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
902 else if (GET_CODE (lab
) == NOTE
)
904 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
905 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
908 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
913 if (head
!= NULL_RTX
)
914 create_basic_block (i
++, head
, end
, bb_note
);
916 flow_delete_insn (bb_note
);
918 if (i
!= n_basic_blocks
)
921 label_value_list
= lvl
;
922 tail_recursion_label_list
= trll
;
925 /* Tidy the CFG by deleting unreachable code and whatnot. */
931 delete_unreachable_blocks ();
932 move_stray_eh_region_notes ();
933 record_active_eh_regions (f
);
935 mark_critical_edges ();
937 /* Kill the data we won't maintain. */
938 free_EXPR_LIST_list (&label_value_list
);
939 free_EXPR_LIST_list (&tail_recursion_label_list
);
942 /* Create a new basic block consisting of the instructions between
943 HEAD and END inclusive. Reuses the note and basic block struct
944 in BB_NOTE, if any. */
947 create_basic_block (index
, head
, end
, bb_note
)
949 rtx head
, end
, bb_note
;
954 && ! RTX_INTEGRATED_P (bb_note
)
955 && (bb
= NOTE_BASIC_BLOCK (bb_note
)) != NULL
958 /* If we found an existing note, thread it back onto the chain. */
962 if (GET_CODE (head
) == CODE_LABEL
)
966 after
= PREV_INSN (head
);
970 if (after
!= bb_note
&& NEXT_INSN (after
) != bb_note
)
971 reorder_insns (bb_note
, bb_note
, after
);
975 /* Otherwise we must create a note and a basic block structure.
976 Since we allow basic block structs in rtl, give the struct
977 the same lifetime by allocating it off the function obstack
978 rather than using malloc. */
980 bb
= (basic_block
) obstack_alloc (&flow_obstack
, sizeof (*bb
));
981 memset (bb
, 0, sizeof (*bb
));
983 if (GET_CODE (head
) == CODE_LABEL
)
984 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, head
);
987 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, head
);
990 NOTE_BASIC_BLOCK (bb_note
) = bb
;
993 /* Always include the bb note in the block. */
994 if (NEXT_INSN (end
) == bb_note
)
1000 BASIC_BLOCK (index
) = bb
;
1002 /* Tag the block so that we know it has been used when considering
1003 other basic block notes. */
1007 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1008 indexed by INSN_UID. MAX is the size of the array. */
1011 compute_bb_for_insn (max
)
1016 if (basic_block_for_insn
)
1017 VARRAY_FREE (basic_block_for_insn
);
1018 VARRAY_BB_INIT (basic_block_for_insn
, max
, "basic_block_for_insn");
1020 for (i
= 0; i
< n_basic_blocks
; ++i
)
1022 basic_block bb
= BASIC_BLOCK (i
);
1029 int uid
= INSN_UID (insn
);
1031 VARRAY_BB (basic_block_for_insn
, uid
) = bb
;
1034 insn
= NEXT_INSN (insn
);
1039 /* Free the memory associated with the edge structures. */
1047 for (i
= 0; i
< n_basic_blocks
; ++i
)
1049 basic_block bb
= BASIC_BLOCK (i
);
1051 for (e
= bb
->succ
; e
; e
= n
)
1061 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= n
)
1067 ENTRY_BLOCK_PTR
->succ
= 0;
1068 EXIT_BLOCK_PTR
->pred
= 0;
1073 /* Identify the edges between basic blocks.
1075 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1076 that are otherwise unreachable may be reachable with a non-local goto.
1078 BB_EH_END is an array indexed by basic block number in which we record
1079 the list of exception regions active at the end of the basic block. */
1082 make_edges (label_value_list
)
1083 rtx label_value_list
;
1086 eh_nesting_info
*eh_nest_info
= init_eh_nesting_info ();
1087 sbitmap
*edge_cache
= NULL
;
1089 /* Assume no computed jump; revise as we create edges. */
1090 current_function_has_computed_jump
= 0;
1092 /* Heavy use of computed goto in machine-generated code can lead to
1093 nearly fully-connected CFGs. In that case we spend a significant
1094 amount of time searching the edge lists for duplicates. */
1095 if (forced_labels
|| label_value_list
)
1097 edge_cache
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
1098 sbitmap_vector_zero (edge_cache
, n_basic_blocks
);
1101 /* By nature of the way these get numbered, block 0 is always the entry. */
1102 make_edge (edge_cache
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (0), EDGE_FALLTHRU
);
1104 for (i
= 0; i
< n_basic_blocks
; ++i
)
1106 basic_block bb
= BASIC_BLOCK (i
);
1109 int force_fallthru
= 0;
1111 /* Examine the last instruction of the block, and discover the
1112 ways we can leave the block. */
1115 code
= GET_CODE (insn
);
1118 if (code
== JUMP_INSN
)
1122 /* Recognize a non-local goto as a branch outside the
1123 current function. */
1124 if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
1127 /* ??? Recognize a tablejump and do the right thing. */
1128 else if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
1129 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
1130 && GET_CODE (tmp
) == JUMP_INSN
1131 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
1132 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
1137 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
1138 vec
= XVEC (PATTERN (tmp
), 0);
1140 vec
= XVEC (PATTERN (tmp
), 1);
1142 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1143 make_label_edge (edge_cache
, bb
,
1144 XEXP (RTVEC_ELT (vec
, j
), 0), 0);
1146 /* Some targets (eg, ARM) emit a conditional jump that also
1147 contains the out-of-range target. Scan for these and
1148 add an edge if necessary. */
1149 if ((tmp
= single_set (insn
)) != NULL
1150 && SET_DEST (tmp
) == pc_rtx
1151 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
1152 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
1153 make_label_edge (edge_cache
, bb
,
1154 XEXP (XEXP (SET_SRC (tmp
), 2), 0), 0);
1156 #ifdef CASE_DROPS_THROUGH
1157 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1158 us naturally detecting fallthru into the next block. */
1163 /* If this is a computed jump, then mark it as reaching
1164 everything on the label_value_list and forced_labels list. */
1165 else if (computed_jump_p (insn
))
1167 current_function_has_computed_jump
= 1;
1169 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
1170 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
1172 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
1173 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
1176 /* Returns create an exit out. */
1177 else if (returnjump_p (insn
))
1178 make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, 0);
1180 /* Otherwise, we have a plain conditional or unconditional jump. */
1183 if (! JUMP_LABEL (insn
))
1185 make_label_edge (edge_cache
, bb
, JUMP_LABEL (insn
), 0);
1189 /* If this is a sibling call insn, then this is in effect a
1190 combined call and return, and so we need an edge to the
1191 exit block. No need to worry about EH edges, since we
1192 wouldn't have created the sibling call in the first place. */
1194 if (code
== CALL_INSN
&& SIBLING_CALL_P (insn
))
1195 make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
,
1196 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
1198 /* If this is a CALL_INSN, then mark it as reaching the active EH
1199 handler for this CALL_INSN. If we're handling asynchronous
1200 exceptions then any insn can reach any of the active handlers.
1202 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1204 else if (code
== CALL_INSN
|| asynchronous_exceptions
)
1206 /* Add any appropriate EH edges. We do this unconditionally
1207 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1208 on the call, and this needn't be within an EH region. */
1209 make_eh_edge (edge_cache
, eh_nest_info
, bb
, insn
, bb
->eh_end
);
1211 /* If we have asynchronous exceptions, do the same for *all*
1212 exception regions active in the block. */
1213 if (asynchronous_exceptions
1214 && bb
->eh_beg
!= bb
->eh_end
)
1216 if (bb
->eh_beg
>= 0)
1217 make_eh_edge (edge_cache
, eh_nest_info
, bb
,
1218 NULL_RTX
, bb
->eh_beg
);
1220 for (x
= bb
->head
; x
!= bb
->end
; x
= NEXT_INSN (x
))
1221 if (GET_CODE (x
) == NOTE
1222 && (NOTE_LINE_NUMBER (x
) == NOTE_INSN_EH_REGION_BEG
1223 || NOTE_LINE_NUMBER (x
) == NOTE_INSN_EH_REGION_END
))
1225 int region
= NOTE_EH_HANDLER (x
);
1226 make_eh_edge (edge_cache
, eh_nest_info
, bb
,
1231 if (code
== CALL_INSN
&& nonlocal_goto_handler_labels
)
1233 /* ??? This could be made smarter: in some cases it's possible
1234 to tell that certain calls will not do a nonlocal goto.
1236 For example, if the nested functions that do the nonlocal
1237 gotos do not have their addresses taken, then only calls to
1238 those functions or to other nested functions that use them
1239 could possibly do nonlocal gotos. */
1240 /* We do know that a REG_EH_REGION note with a value less
1241 than 0 is guaranteed not to perform a non-local goto. */
1242 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1243 if (!note
|| INTVAL (XEXP (note
, 0)) >= 0)
1244 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
1245 make_label_edge (edge_cache
, bb
, XEXP (x
, 0),
1246 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
1250 /* We know something about the structure of the function __throw in
1251 libgcc2.c. It is the only function that ever contains eh_stub
1252 labels. It modifies its return address so that the last block
1253 returns to one of the eh_stub labels within it. So we have to
1254 make additional edges in the flow graph. */
1255 if (i
+ 1 == n_basic_blocks
&& eh_return_stub_label
!= 0)
1256 make_label_edge (edge_cache
, bb
, eh_return_stub_label
, EDGE_EH
);
1258 /* Find out if we can drop through to the next block. */
1259 insn
= next_nonnote_insn (insn
);
1260 if (!insn
|| (i
+ 1 == n_basic_blocks
&& force_fallthru
))
1261 make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
1262 else if (i
+ 1 < n_basic_blocks
)
1264 rtx tmp
= BLOCK_HEAD (i
+ 1);
1265 if (GET_CODE (tmp
) == NOTE
)
1266 tmp
= next_nonnote_insn (tmp
);
1267 if (force_fallthru
|| insn
== tmp
)
1268 make_edge (edge_cache
, bb
, BASIC_BLOCK (i
+ 1), EDGE_FALLTHRU
);
1272 free_eh_nesting_info (eh_nest_info
);
1274 sbitmap_vector_free (edge_cache
);
1277 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1278 about the edge that is accumulated between calls. */
1281 make_edge (edge_cache
, src
, dst
, flags
)
1282 sbitmap
*edge_cache
;
1283 basic_block src
, dst
;
1289 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1290 many edges to them, and we didn't allocate memory for it. */
1291 use_edge_cache
= (edge_cache
1292 && src
!= ENTRY_BLOCK_PTR
1293 && dst
!= EXIT_BLOCK_PTR
);
1295 /* Make sure we don't add duplicate edges. */
1296 switch (use_edge_cache
)
1299 /* Quick test for non-existance of the edge. */
1300 if (! TEST_BIT (edge_cache
[src
->index
], dst
->index
))
1303 /* The edge exists; early exit if no work to do. */
1309 for (e
= src
->succ
; e
; e
= e
->succ_next
)
1318 e
= (edge
) xcalloc (1, sizeof (*e
));
1321 e
->succ_next
= src
->succ
;
1322 e
->pred_next
= dst
->pred
;
1331 SET_BIT (edge_cache
[src
->index
], dst
->index
);
1334 /* Create an edge from a basic block to a label. */
1337 make_label_edge (edge_cache
, src
, label
, flags
)
1338 sbitmap
*edge_cache
;
1343 if (GET_CODE (label
) != CODE_LABEL
)
1346 /* If the label was never emitted, this insn is junk, but avoid a
1347 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1348 as a result of a syntax error and a diagnostic has already been
1351 if (INSN_UID (label
) == 0)
1354 make_edge (edge_cache
, src
, BLOCK_FOR_INSN (label
), flags
);
1357 /* Create the edges generated by INSN in REGION. */
1360 make_eh_edge (edge_cache
, eh_nest_info
, src
, insn
, region
)
1361 sbitmap
*edge_cache
;
1362 eh_nesting_info
*eh_nest_info
;
1367 handler_info
**handler_list
;
1370 is_call
= (insn
&& GET_CODE (insn
) == CALL_INSN
? EDGE_ABNORMAL_CALL
: 0);
1371 num
= reachable_handlers (region
, eh_nest_info
, insn
, &handler_list
);
1374 make_label_edge (edge_cache
, src
, handler_list
[num
]->handler_label
,
1375 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
1379 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1380 dangerous if we intend to move basic blocks around. Move such notes
1381 into the following block. */
1384 move_stray_eh_region_notes ()
1389 if (n_basic_blocks
< 2)
1392 b2
= BASIC_BLOCK (n_basic_blocks
- 1);
1393 for (i
= n_basic_blocks
- 2; i
>= 0; --i
, b2
= b1
)
1395 rtx insn
, next
, list
= NULL_RTX
;
1397 b1
= BASIC_BLOCK (i
);
1398 for (insn
= NEXT_INSN (b1
->end
); insn
!= b2
->head
; insn
= next
)
1400 next
= NEXT_INSN (insn
);
1401 if (GET_CODE (insn
) == NOTE
1402 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
1403 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
))
1405 /* Unlink from the insn chain. */
1406 NEXT_INSN (PREV_INSN (insn
)) = next
;
1407 PREV_INSN (next
) = PREV_INSN (insn
);
1410 NEXT_INSN (insn
) = list
;
1415 if (list
== NULL_RTX
)
1418 /* Find where to insert these things. */
1420 if (GET_CODE (insn
) == CODE_LABEL
)
1421 insn
= NEXT_INSN (insn
);
1425 next
= NEXT_INSN (list
);
1426 add_insn_after (list
, insn
);
1432 /* Recompute eh_beg/eh_end for each basic block. */
1435 record_active_eh_regions (f
)
1438 rtx insn
, eh_list
= NULL_RTX
;
1440 basic_block bb
= BASIC_BLOCK (0);
1442 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
1444 if (bb
->head
== insn
)
1445 bb
->eh_beg
= (eh_list
? NOTE_EH_HANDLER (XEXP (eh_list
, 0)) : -1);
1447 if (GET_CODE (insn
) == NOTE
)
1449 int kind
= NOTE_LINE_NUMBER (insn
);
1450 if (kind
== NOTE_INSN_EH_REGION_BEG
)
1451 eh_list
= alloc_INSN_LIST (insn
, eh_list
);
1452 else if (kind
== NOTE_INSN_EH_REGION_END
)
1454 rtx t
= XEXP (eh_list
, 1);
1455 free_INSN_LIST_node (eh_list
);
1460 if (bb
->end
== insn
)
1462 bb
->eh_end
= (eh_list
? NOTE_EH_HANDLER (XEXP (eh_list
, 0)) : -1);
1464 if (i
== n_basic_blocks
)
1466 bb
= BASIC_BLOCK (i
);
1471 /* Identify critical edges and set the bits appropriately. */
1474 mark_critical_edges ()
1476 int i
, n
= n_basic_blocks
;
1479 /* We begin with the entry block. This is not terribly important now,
1480 but could be if a front end (Fortran) implemented alternate entry
1482 bb
= ENTRY_BLOCK_PTR
;
1489 /* (1) Critical edges must have a source with multiple successors. */
1490 if (bb
->succ
&& bb
->succ
->succ_next
)
1492 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1494 /* (2) Critical edges must have a destination with multiple
1495 predecessors. Note that we know there is at least one
1496 predecessor -- the edge we followed to get here. */
1497 if (e
->dest
->pred
->pred_next
)
1498 e
->flags
|= EDGE_CRITICAL
;
1500 e
->flags
&= ~EDGE_CRITICAL
;
1505 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1506 e
->flags
&= ~EDGE_CRITICAL
;
1511 bb
= BASIC_BLOCK (i
);
1515 /* Split a block BB after insn INSN creating a new fallthru edge.
1516 Return the new edge. Note that to keep other parts of the compiler happy,
1517 this function renumbers all the basic blocks so that the new
1518 one has a number one greater than the block split. */
1521 split_block (bb
, insn
)
1531 /* There is no point splitting the block after its end. */
1532 if (bb
->end
== insn
)
1535 /* Create the new structures. */
1536 new_bb
= (basic_block
) obstack_alloc (&flow_obstack
, sizeof (*new_bb
));
1537 new_edge
= (edge
) xcalloc (1, sizeof (*new_edge
));
1540 memset (new_bb
, 0, sizeof (*new_bb
));
1542 new_bb
->head
= NEXT_INSN (insn
);
1543 new_bb
->end
= bb
->end
;
1546 new_bb
->succ
= bb
->succ
;
1547 bb
->succ
= new_edge
;
1548 new_bb
->pred
= new_edge
;
1549 new_bb
->count
= bb
->count
;
1550 new_bb
->loop_depth
= bb
->loop_depth
;
1553 new_edge
->dest
= new_bb
;
1554 new_edge
->flags
= EDGE_FALLTHRU
;
1555 new_edge
->probability
= REG_BR_PROB_BASE
;
1556 new_edge
->count
= bb
->count
;
1558 /* Redirect the src of the successor edges of bb to point to new_bb. */
1559 for (e
= new_bb
->succ
; e
; e
= e
->succ_next
)
1562 /* Place the new block just after the block being split. */
1563 VARRAY_GROW (basic_block_info
, ++n_basic_blocks
);
1565 /* Some parts of the compiler expect blocks to be number in
1566 sequential order so insert the new block immediately after the
1567 block being split.. */
1569 for (i
= n_basic_blocks
- 1; i
> j
+ 1; --i
)
1571 basic_block tmp
= BASIC_BLOCK (i
- 1);
1572 BASIC_BLOCK (i
) = tmp
;
1576 BASIC_BLOCK (i
) = new_bb
;
1579 /* Create the basic block note. */
1580 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
,
1582 NOTE_BASIC_BLOCK (bb_note
) = new_bb
;
1583 new_bb
->head
= bb_note
;
1585 update_bb_for_insn (new_bb
);
1587 if (bb
->global_live_at_start
)
1589 new_bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1590 new_bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1591 COPY_REG_SET (new_bb
->global_live_at_end
, bb
->global_live_at_end
);
1593 /* We now have to calculate which registers are live at the end
1594 of the split basic block and at the start of the new basic
1595 block. Start with those registers that are known to be live
1596 at the end of the original basic block and get
1597 propagate_block to determine which registers are live. */
1598 COPY_REG_SET (new_bb
->global_live_at_start
, bb
->global_live_at_end
);
1599 propagate_block (new_bb
, new_bb
->global_live_at_start
, NULL
, NULL
, 0);
1600 COPY_REG_SET (bb
->global_live_at_end
,
1601 new_bb
->global_live_at_start
);
1608 /* Split a (typically critical) edge. Return the new block.
1609 Abort on abnormal edges.
1611 ??? The code generally expects to be called on critical edges.
1612 The case of a block ending in an unconditional jump to a
1613 block with multiple predecessors is not handled optimally. */
1616 split_edge (edge_in
)
1619 basic_block old_pred
, bb
, old_succ
;
1624 /* Abnormal edges cannot be split. */
1625 if ((edge_in
->flags
& EDGE_ABNORMAL
) != 0)
1628 old_pred
= edge_in
->src
;
1629 old_succ
= edge_in
->dest
;
1631 /* Remove the existing edge from the destination's pred list. */
1634 for (pp
= &old_succ
->pred
; *pp
!= edge_in
; pp
= &(*pp
)->pred_next
)
1636 *pp
= edge_in
->pred_next
;
1637 edge_in
->pred_next
= NULL
;
1640 /* Create the new structures. */
1641 bb
= (basic_block
) obstack_alloc (&flow_obstack
, sizeof (*bb
));
1642 edge_out
= (edge
) xcalloc (1, sizeof (*edge_out
));
1645 memset (bb
, 0, sizeof (*bb
));
1647 /* ??? This info is likely going to be out of date very soon. */
1648 if (old_succ
->global_live_at_start
)
1650 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1651 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1652 COPY_REG_SET (bb
->global_live_at_start
, old_succ
->global_live_at_start
);
1653 COPY_REG_SET (bb
->global_live_at_end
, old_succ
->global_live_at_start
);
1658 bb
->succ
= edge_out
;
1659 bb
->count
= edge_in
->count
;
1662 edge_in
->flags
&= ~EDGE_CRITICAL
;
1664 edge_out
->pred_next
= old_succ
->pred
;
1665 edge_out
->succ_next
= NULL
;
1667 edge_out
->dest
= old_succ
;
1668 edge_out
->flags
= EDGE_FALLTHRU
;
1669 edge_out
->probability
= REG_BR_PROB_BASE
;
1670 edge_out
->count
= edge_in
->count
;
1672 old_succ
->pred
= edge_out
;
1674 /* Tricky case -- if there existed a fallthru into the successor
1675 (and we're not it) we must add a new unconditional jump around
1676 the new block we're actually interested in.
1678 Further, if that edge is critical, this means a second new basic
1679 block must be created to hold it. In order to simplify correct
1680 insn placement, do this before we touch the existing basic block
1681 ordering for the block we were really wanting. */
1682 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1685 for (e
= edge_out
->pred_next
; e
; e
= e
->pred_next
)
1686 if (e
->flags
& EDGE_FALLTHRU
)
1691 basic_block jump_block
;
1694 if ((e
->flags
& EDGE_CRITICAL
) == 0
1695 && e
->src
!= ENTRY_BLOCK_PTR
)
1697 /* Non critical -- we can simply add a jump to the end
1698 of the existing predecessor. */
1699 jump_block
= e
->src
;
1703 /* We need a new block to hold the jump. The simplest
1704 way to do the bulk of the work here is to recursively
1706 jump_block
= split_edge (e
);
1707 e
= jump_block
->succ
;
1710 /* Now add the jump insn ... */
1711 pos
= emit_jump_insn_after (gen_jump (old_succ
->head
),
1713 jump_block
->end
= pos
;
1714 if (basic_block_for_insn
)
1715 set_block_for_insn (pos
, jump_block
);
1716 emit_barrier_after (pos
);
1718 /* ... let jump know that label is in use, ... */
1719 JUMP_LABEL (pos
) = old_succ
->head
;
1720 ++LABEL_NUSES (old_succ
->head
);
1722 /* ... and clear fallthru on the outgoing edge. */
1723 e
->flags
&= ~EDGE_FALLTHRU
;
1725 /* Continue splitting the interesting edge. */
1729 /* Place the new block just in front of the successor. */
1730 VARRAY_GROW (basic_block_info
, ++n_basic_blocks
);
1731 if (old_succ
== EXIT_BLOCK_PTR
)
1732 j
= n_basic_blocks
- 1;
1734 j
= old_succ
->index
;
1735 for (i
= n_basic_blocks
- 1; i
> j
; --i
)
1737 basic_block tmp
= BASIC_BLOCK (i
- 1);
1738 BASIC_BLOCK (i
) = tmp
;
1741 BASIC_BLOCK (i
) = bb
;
1744 /* Create the basic block note.
1746 Where we place the note can have a noticable impact on the generated
1747 code. Consider this cfg:
1757 If we need to insert an insn on the edge from block 0 to block 1,
1758 we want to ensure the instructions we insert are outside of any
1759 loop notes that physically sit between block 0 and block 1. Otherwise
1760 we confuse the loop optimizer into thinking the loop is a phony. */
1761 if (old_succ
!= EXIT_BLOCK_PTR
1762 && PREV_INSN (old_succ
->head
)
1763 && GET_CODE (PREV_INSN (old_succ
->head
)) == NOTE
1764 && NOTE_LINE_NUMBER (PREV_INSN (old_succ
->head
)) == NOTE_INSN_LOOP_BEG
)
1765 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
,
1766 PREV_INSN (old_succ
->head
));
1767 else if (old_succ
!= EXIT_BLOCK_PTR
)
1768 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, old_succ
->head
);
1770 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, get_last_insn ());
1771 NOTE_BASIC_BLOCK (bb_note
) = bb
;
1772 bb
->head
= bb
->end
= bb_note
;
1774 /* Not quite simple -- for non-fallthru edges, we must adjust the
1775 predecessor's jump instruction to target our new block. */
1776 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1778 rtx tmp
, insn
= old_pred
->end
;
1779 rtx old_label
= old_succ
->head
;
1780 rtx new_label
= gen_label_rtx ();
1782 if (GET_CODE (insn
) != JUMP_INSN
)
1785 /* ??? Recognize a tablejump and adjust all matching cases. */
1786 if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
1787 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
1788 && GET_CODE (tmp
) == JUMP_INSN
1789 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
1790 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
1795 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
1796 vec
= XVEC (PATTERN (tmp
), 0);
1798 vec
= XVEC (PATTERN (tmp
), 1);
1800 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1801 if (XEXP (RTVEC_ELT (vec
, j
), 0) == old_label
)
1803 RTVEC_ELT (vec
, j
) = gen_rtx_LABEL_REF (VOIDmode
, new_label
);
1804 --LABEL_NUSES (old_label
);
1805 ++LABEL_NUSES (new_label
);
1808 /* Handle casesi dispatch insns */
1809 if ((tmp
= single_set (insn
)) != NULL
1810 && SET_DEST (tmp
) == pc_rtx
1811 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
1812 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
1813 && XEXP (XEXP (SET_SRC (tmp
), 2), 0) == old_label
)
1815 XEXP (SET_SRC (tmp
), 2) = gen_rtx_LABEL_REF (VOIDmode
,
1817 --LABEL_NUSES (old_label
);
1818 ++LABEL_NUSES (new_label
);
1823 /* This would have indicated an abnormal edge. */
1824 if (computed_jump_p (insn
))
1827 /* A return instruction can't be redirected. */
1828 if (returnjump_p (insn
))
1831 /* If the insn doesn't go where we think, we're confused. */
1832 if (JUMP_LABEL (insn
) != old_label
)
1835 redirect_jump (insn
, new_label
, 0);
1838 emit_label_before (new_label
, bb_note
);
1839 bb
->head
= new_label
;
1845 /* Queue instructions for insertion on an edge between two basic blocks.
1846 The new instructions and basic blocks (if any) will not appear in the
1847 CFG until commit_edge_insertions is called. */
1850 insert_insn_on_edge (pattern
, e
)
1854 /* We cannot insert instructions on an abnormal critical edge.
1855 It will be easier to find the culprit if we die now. */
1856 if ((e
->flags
& (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1857 == (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1860 if (e
->insns
== NULL_RTX
)
1863 push_to_sequence (e
->insns
);
1865 emit_insn (pattern
);
1867 e
->insns
= get_insns ();
1871 /* Update the CFG for the instructions queued on edge E. */
1874 commit_one_edge_insertion (e
)
1877 rtx before
= NULL_RTX
, after
= NULL_RTX
, insns
, tmp
, last
;
1880 /* Pull the insns off the edge now since the edge might go away. */
1882 e
->insns
= NULL_RTX
;
1884 /* Figure out where to put these things. If the destination has
1885 one predecessor, insert there. Except for the exit block. */
1886 if (e
->dest
->pred
->pred_next
== NULL
1887 && e
->dest
!= EXIT_BLOCK_PTR
)
1891 /* Get the location correct wrt a code label, and "nice" wrt
1892 a basic block note, and before everything else. */
1894 if (GET_CODE (tmp
) == CODE_LABEL
)
1895 tmp
= NEXT_INSN (tmp
);
1896 if (NOTE_INSN_BASIC_BLOCK_P (tmp
))
1897 tmp
= NEXT_INSN (tmp
);
1898 if (tmp
== bb
->head
)
1901 after
= PREV_INSN (tmp
);
1904 /* If the source has one successor and the edge is not abnormal,
1905 insert there. Except for the entry block. */
1906 else if ((e
->flags
& EDGE_ABNORMAL
) == 0
1907 && e
->src
->succ
->succ_next
== NULL
1908 && e
->src
!= ENTRY_BLOCK_PTR
)
1911 /* It is possible to have a non-simple jump here. Consider a target
1912 where some forms of unconditional jumps clobber a register. This
1913 happens on the fr30 for example.
1915 We know this block has a single successor, so we can just emit
1916 the queued insns before the jump. */
1917 if (GET_CODE (bb
->end
) == JUMP_INSN
)
1923 /* We'd better be fallthru, or we've lost track of what's what. */
1924 if ((e
->flags
& EDGE_FALLTHRU
) == 0)
1931 /* Otherwise we must split the edge. */
1934 bb
= split_edge (e
);
1938 /* Now that we've found the spot, do the insertion. */
1940 /* Set the new block number for these insns, if structure is allocated. */
1941 if (basic_block_for_insn
)
1944 for (i
= insns
; i
!= NULL_RTX
; i
= NEXT_INSN (i
))
1945 set_block_for_insn (i
, bb
);
1950 emit_insns_before (insns
, before
);
1951 if (before
== bb
->head
)
1954 last
= prev_nonnote_insn (before
);
1958 last
= emit_insns_after (insns
, after
);
1959 if (after
== bb
->end
)
1963 if (returnjump_p (last
))
1965 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1966 This is not currently a problem because this only happens
1967 for the (single) epilogue, which already has a fallthru edge
1971 if (e
->dest
!= EXIT_BLOCK_PTR
1972 || e
->succ_next
!= NULL
1973 || (e
->flags
& EDGE_FALLTHRU
) == 0)
1975 e
->flags
&= ~EDGE_FALLTHRU
;
1977 emit_barrier_after (last
);
1981 flow_delete_insn (before
);
1983 else if (GET_CODE (last
) == JUMP_INSN
)
1987 /* Update the CFG for all queued instructions. */
1990 commit_edge_insertions ()
1995 #ifdef ENABLE_CHECKING
1996 verify_flow_info ();
2000 bb
= ENTRY_BLOCK_PTR
;
2005 for (e
= bb
->succ
; e
; e
= next
)
2007 next
= e
->succ_next
;
2009 commit_one_edge_insertion (e
);
2012 if (++i
>= n_basic_blocks
)
2014 bb
= BASIC_BLOCK (i
);
2018 /* Add fake edges to the function exit for any non constant calls in
2019 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2020 BLOCKS is zero. Return the nuber of blocks that were split. */
2023 flow_call_edges_add (blocks
)
2027 int blocks_split
= 0;
2031 /* Map bb indicies into basic block pointers since split_block
2032 will renumber the basic blocks. */
2034 bbs
= xmalloc (n_basic_blocks
* sizeof (*bbs
));
2038 for (i
= 0; i
< n_basic_blocks
; i
++)
2039 bbs
[bb_num
++] = BASIC_BLOCK (i
);
2043 EXECUTE_IF_SET_IN_SBITMAP (blocks
, 0, i
,
2045 bbs
[bb_num
++] = BASIC_BLOCK (i
);
2050 /* Now add fake edges to the function exit for any non constant
2051 calls since there is no way that we can determine if they will
2054 for (i
= 0; i
< bb_num
; i
++)
2056 basic_block bb
= bbs
[i
];
2060 for (insn
= bb
->end
; ; insn
= prev_insn
)
2062 prev_insn
= PREV_INSN (insn
);
2063 if (GET_CODE (insn
) == CALL_INSN
&& ! CONST_CALL_P (insn
))
2067 /* Note that the following may create a new basic block
2068 and renumber the existing basic blocks. */
2069 e
= split_block (bb
, insn
);
2073 make_edge (NULL
, bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
2075 if (insn
== bb
->head
)
2081 verify_flow_info ();
2084 return blocks_split
;
2087 /* Delete all unreachable basic blocks. */
2090 delete_unreachable_blocks ()
2092 basic_block
*worklist
, *tos
;
2093 int deleted_handler
;
2098 tos
= worklist
= (basic_block
*) xmalloc (sizeof (basic_block
) * n
);
2100 /* Use basic_block->aux as a marker. Clear them all. */
2102 for (i
= 0; i
< n
; ++i
)
2103 BASIC_BLOCK (i
)->aux
= NULL
;
2105 /* Add our starting points to the worklist. Almost always there will
2106 be only one. It isn't inconcievable that we might one day directly
2107 support Fortran alternate entry points. */
2109 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
2113 /* Mark the block with a handy non-null value. */
2117 /* Iterate: find everything reachable from what we've already seen. */
2119 while (tos
!= worklist
)
2121 basic_block b
= *--tos
;
2123 for (e
= b
->succ
; e
; e
= e
->succ_next
)
2131 /* Delete all unreachable basic blocks. Count down so that we don't
2132 interfere with the block renumbering that happens in flow_delete_block. */
2134 deleted_handler
= 0;
2136 for (i
= n
- 1; i
>= 0; --i
)
2138 basic_block b
= BASIC_BLOCK (i
);
2141 /* This block was found. Tidy up the mark. */
2144 deleted_handler
|= flow_delete_block (b
);
2147 tidy_fallthru_edges ();
2149 /* If we deleted an exception handler, we may have EH region begin/end
2150 blocks to remove as well. */
2151 if (deleted_handler
)
2152 delete_eh_regions ();
2157 /* Find EH regions for which there is no longer a handler, and delete them. */
2160 delete_eh_regions ()
2164 update_rethrow_references ();
2166 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2167 if (GET_CODE (insn
) == NOTE
)
2169 if ((NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
2170 || (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
))
2172 int num
= NOTE_EH_HANDLER (insn
);
2173 /* A NULL handler indicates a region is no longer needed,
2174 as long as its rethrow label isn't used. */
2175 if (get_first_handler (num
) == NULL
&& ! rethrow_used (num
))
2177 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2178 NOTE_SOURCE_FILE (insn
) = 0;
2184 /* Return true if NOTE is not one of the ones that must be kept paired,
2185 so that we may simply delete them. */
2188 can_delete_note_p (note
)
2191 return (NOTE_LINE_NUMBER (note
) == NOTE_INSN_DELETED
2192 || NOTE_LINE_NUMBER (note
) == NOTE_INSN_BASIC_BLOCK
);
2195 /* Unlink a chain of insns between START and FINISH, leaving notes
2196 that must be paired. */
2199 flow_delete_insn_chain (start
, finish
)
2202 /* Unchain the insns one by one. It would be quicker to delete all
2203 of these with a single unchaining, rather than one at a time, but
2204 we need to keep the NOTE's. */
2210 next
= NEXT_INSN (start
);
2211 if (GET_CODE (start
) == NOTE
&& !can_delete_note_p (start
))
2213 else if (GET_CODE (start
) == CODE_LABEL
2214 && ! can_delete_label_p (start
))
2216 const char *name
= LABEL_NAME (start
);
2217 PUT_CODE (start
, NOTE
);
2218 NOTE_LINE_NUMBER (start
) = NOTE_INSN_DELETED_LABEL
;
2219 NOTE_SOURCE_FILE (start
) = name
;
2222 next
= flow_delete_insn (start
);
2224 if (start
== finish
)
2230 /* Delete the insns in a (non-live) block. We physically delete every
2231 non-deleted-note insn, and update the flow graph appropriately.
2233 Return nonzero if we deleted an exception handler. */
2235 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2236 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2239 flow_delete_block (b
)
2242 int deleted_handler
= 0;
2245 /* If the head of this block is a CODE_LABEL, then it might be the
2246 label for an exception handler which can't be reached.
2248 We need to remove the label from the exception_handler_label list
2249 and remove the associated NOTE_INSN_EH_REGION_BEG and
2250 NOTE_INSN_EH_REGION_END notes. */
2254 never_reached_warning (insn
);
2256 if (GET_CODE (insn
) == CODE_LABEL
)
2258 rtx x
, *prev
= &exception_handler_labels
;
2260 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
2262 if (XEXP (x
, 0) == insn
)
2264 /* Found a match, splice this label out of the EH label list. */
2265 *prev
= XEXP (x
, 1);
2266 XEXP (x
, 1) = NULL_RTX
;
2267 XEXP (x
, 0) = NULL_RTX
;
2269 /* Remove the handler from all regions */
2270 remove_handler (insn
);
2271 deleted_handler
= 1;
2274 prev
= &XEXP (x
, 1);
2278 /* Include any jump table following the basic block. */
2280 if (GET_CODE (end
) == JUMP_INSN
2281 && (tmp
= JUMP_LABEL (end
)) != NULL_RTX
2282 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
2283 && GET_CODE (tmp
) == JUMP_INSN
2284 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
2285 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
2288 /* Include any barrier that may follow the basic block. */
2289 tmp
= next_nonnote_insn (end
);
2290 if (tmp
&& GET_CODE (tmp
) == BARRIER
)
2293 /* Selectively delete the entire chain. */
2294 flow_delete_insn_chain (insn
, end
);
2296 /* Remove the edges into and out of this block. Note that there may
2297 indeed be edges in, if we are removing an unreachable loop. */
2301 for (e
= b
->pred
; e
; e
= next
)
2303 for (q
= &e
->src
->succ
; *q
!= e
; q
= &(*q
)->succ_next
)
2306 next
= e
->pred_next
;
2310 for (e
= b
->succ
; e
; e
= next
)
2312 for (q
= &e
->dest
->pred
; *q
!= e
; q
= &(*q
)->pred_next
)
2315 next
= e
->succ_next
;
2324 /* Remove the basic block from the array, and compact behind it. */
2327 return deleted_handler
;
2330 /* Remove block B from the basic block array and compact behind it. */
2336 int i
, n
= n_basic_blocks
;
2338 for (i
= b
->index
; i
+ 1 < n
; ++i
)
2340 basic_block x
= BASIC_BLOCK (i
+ 1);
2341 BASIC_BLOCK (i
) = x
;
2345 basic_block_info
->num_elements
--;
2349 /* Delete INSN by patching it out. Return the next insn. */
2352 flow_delete_insn (insn
)
2355 rtx prev
= PREV_INSN (insn
);
2356 rtx next
= NEXT_INSN (insn
);
2359 PREV_INSN (insn
) = NULL_RTX
;
2360 NEXT_INSN (insn
) = NULL_RTX
;
2361 INSN_DELETED_P (insn
) = 1;
2364 NEXT_INSN (prev
) = next
;
2366 PREV_INSN (next
) = prev
;
2368 set_last_insn (prev
);
2370 if (GET_CODE (insn
) == CODE_LABEL
)
2371 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
2373 /* If deleting a jump, decrement the use count of the label. Deleting
2374 the label itself should happen in the normal course of block merging. */
2375 if (GET_CODE (insn
) == JUMP_INSN
2376 && JUMP_LABEL (insn
)
2377 && GET_CODE (JUMP_LABEL (insn
)) == CODE_LABEL
)
2378 LABEL_NUSES (JUMP_LABEL (insn
))--;
2380 /* Also if deleting an insn that references a label. */
2381 else if ((note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
)) != NULL_RTX
2382 && GET_CODE (XEXP (note
, 0)) == CODE_LABEL
)
2383 LABEL_NUSES (XEXP (note
, 0))--;
2388 /* True if a given label can be deleted. */
2391 can_delete_label_p (label
)
2396 if (LABEL_PRESERVE_P (label
))
2399 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
2400 if (label
== XEXP (x
, 0))
2402 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
2403 if (label
== XEXP (x
, 0))
2405 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
2406 if (label
== XEXP (x
, 0))
2409 /* User declared labels must be preserved. */
2410 if (LABEL_NAME (label
) != 0)
2417 tail_recursion_label_p (label
)
2422 for (x
= tail_recursion_label_list
; x
; x
= XEXP (x
, 1))
2423 if (label
== XEXP (x
, 0))
2429 /* Blocks A and B are to be merged into a single block A. The insns
2430 are already contiguous, hence `nomove'. */
2433 merge_blocks_nomove (a
, b
)
2437 rtx b_head
, b_end
, a_end
;
2438 rtx del_first
= NULL_RTX
, del_last
= NULL_RTX
;
2441 /* If there was a CODE_LABEL beginning B, delete it. */
2444 if (GET_CODE (b_head
) == CODE_LABEL
)
2446 /* Detect basic blocks with nothing but a label. This can happen
2447 in particular at the end of a function. */
2448 if (b_head
== b_end
)
2450 del_first
= del_last
= b_head
;
2451 b_head
= NEXT_INSN (b_head
);
2454 /* Delete the basic block note. */
2455 if (NOTE_INSN_BASIC_BLOCK_P (b_head
))
2457 if (b_head
== b_end
)
2462 b_head
= NEXT_INSN (b_head
);
2465 /* If there was a jump out of A, delete it. */
2467 if (GET_CODE (a_end
) == JUMP_INSN
)
2471 for (prev
= PREV_INSN (a_end
); ; prev
= PREV_INSN (prev
))
2472 if (GET_CODE (prev
) != NOTE
2473 || NOTE_LINE_NUMBER (prev
) == NOTE_INSN_BASIC_BLOCK
2480 /* If this was a conditional jump, we need to also delete
2481 the insn that set cc0. */
2482 if (prev
&& sets_cc0_p (prev
))
2485 prev
= prev_nonnote_insn (prev
);
2494 else if (GET_CODE (NEXT_INSN (a_end
)) == BARRIER
)
2495 del_first
= NEXT_INSN (a_end
);
2497 /* Delete everything marked above as well as crap that might be
2498 hanging out between the two blocks. */
2499 flow_delete_insn_chain (del_first
, del_last
);
2501 /* Normally there should only be one successor of A and that is B, but
2502 partway though the merge of blocks for conditional_execution we'll
2503 be merging a TEST block with THEN and ELSE successors. Free the
2504 whole lot of them and hope the caller knows what they're doing. */
2506 remove_edge (a
->succ
);
2508 /* Adjust the edges out of B for the new owner. */
2509 for (e
= b
->succ
; e
; e
= e
->succ_next
)
2513 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2514 b
->pred
= b
->succ
= NULL
;
2516 /* Reassociate the insns of B with A. */
2519 if (basic_block_for_insn
)
2521 BLOCK_FOR_INSN (b_head
) = a
;
2522 while (b_head
!= b_end
)
2524 b_head
= NEXT_INSN (b_head
);
2525 BLOCK_FOR_INSN (b_head
) = a
;
2535 /* Blocks A and B are to be merged into a single block. A has no incoming
2536 fallthru edge, so it can be moved before B without adding or modifying
2537 any jumps (aside from the jump from A to B). */
2540 merge_blocks_move_predecessor_nojumps (a
, b
)
2543 rtx start
, end
, barrier
;
2549 barrier
= next_nonnote_insn (end
);
2550 if (GET_CODE (barrier
) != BARRIER
)
2552 flow_delete_insn (barrier
);
2554 /* Move block and loop notes out of the chain so that we do not
2555 disturb their order.
2557 ??? A better solution would be to squeeze out all the non-nested notes
2558 and adjust the block trees appropriately. Even better would be to have
2559 a tighter connection between block trees and rtl so that this is not
2561 start
= squeeze_notes (start
, end
);
2563 /* Scramble the insn chain. */
2564 if (end
!= PREV_INSN (b
->head
))
2565 reorder_insns (start
, end
, PREV_INSN (b
->head
));
2569 fprintf (rtl_dump_file
, "Moved block %d before %d and merged.\n",
2570 a
->index
, b
->index
);
2573 /* Swap the records for the two blocks around. Although we are deleting B,
2574 A is now where B was and we want to compact the BB array from where
2576 BASIC_BLOCK (a
->index
) = b
;
2577 BASIC_BLOCK (b
->index
) = a
;
2579 a
->index
= b
->index
;
2582 /* Now blocks A and B are contiguous. Merge them. */
2583 merge_blocks_nomove (a
, b
);
2588 /* Blocks A and B are to be merged into a single block. B has no outgoing
2589 fallthru edge, so it can be moved after A without adding or modifying
2590 any jumps (aside from the jump from A to B). */
2593 merge_blocks_move_successor_nojumps (a
, b
)
2596 rtx start
, end
, barrier
;
2600 barrier
= NEXT_INSN (end
);
2602 /* Recognize a jump table following block B. */
2603 if (GET_CODE (barrier
) == CODE_LABEL
2604 && NEXT_INSN (barrier
)
2605 && GET_CODE (NEXT_INSN (barrier
)) == JUMP_INSN
2606 && (GET_CODE (PATTERN (NEXT_INSN (barrier
))) == ADDR_VEC
2607 || GET_CODE (PATTERN (NEXT_INSN (barrier
))) == ADDR_DIFF_VEC
))
2609 end
= NEXT_INSN (barrier
);
2610 barrier
= NEXT_INSN (end
);
2613 /* There had better have been a barrier there. Delete it. */
2614 if (GET_CODE (barrier
) != BARRIER
)
2616 flow_delete_insn (barrier
);
2618 /* Move block and loop notes out of the chain so that we do not
2619 disturb their order.
2621 ??? A better solution would be to squeeze out all the non-nested notes
2622 and adjust the block trees appropriately. Even better would be to have
2623 a tighter connection between block trees and rtl so that this is not
2625 start
= squeeze_notes (start
, end
);
2627 /* Scramble the insn chain. */
2628 reorder_insns (start
, end
, a
->end
);
2630 /* Now blocks A and B are contiguous. Merge them. */
2631 merge_blocks_nomove (a
, b
);
2635 fprintf (rtl_dump_file
, "Moved block %d after %d and merged.\n",
2636 b
->index
, a
->index
);
2642 /* Attempt to merge basic blocks that are potentially non-adjacent.
2643 Return true iff the attempt succeeded. */
2646 merge_blocks (e
, b
, c
)
2650 /* If C has a tail recursion label, do not merge. There is no
2651 edge recorded from the call_placeholder back to this label, as
2652 that would make optimize_sibling_and_tail_recursive_calls more
2653 complex for no gain. */
2654 if (GET_CODE (c
->head
) == CODE_LABEL
2655 && tail_recursion_label_p (c
->head
))
2658 /* If B has a fallthru edge to C, no need to move anything. */
2659 if (e
->flags
& EDGE_FALLTHRU
)
2661 merge_blocks_nomove (b
, c
);
2665 fprintf (rtl_dump_file
, "Merged %d and %d without moving.\n",
2666 b
->index
, c
->index
);
2675 int c_has_outgoing_fallthru
;
2676 int b_has_incoming_fallthru
;
2678 /* We must make sure to not munge nesting of exception regions,
2679 lexical blocks, and loop notes.
2681 The first is taken care of by requiring that the active eh
2682 region at the end of one block always matches the active eh
2683 region at the beginning of the next block.
2685 The later two are taken care of by squeezing out all the notes. */
2687 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2688 executed and we may want to treat blocks which have two out
2689 edges, one normal, one abnormal as only having one edge for
2690 block merging purposes. */
2692 for (tmp_edge
= c
->succ
; tmp_edge
; tmp_edge
= tmp_edge
->succ_next
)
2693 if (tmp_edge
->flags
& EDGE_FALLTHRU
)
2695 c_has_outgoing_fallthru
= (tmp_edge
!= NULL
);
2697 for (tmp_edge
= b
->pred
; tmp_edge
; tmp_edge
= tmp_edge
->pred_next
)
2698 if (tmp_edge
->flags
& EDGE_FALLTHRU
)
2700 b_has_incoming_fallthru
= (tmp_edge
!= NULL
);
2702 /* If B does not have an incoming fallthru, and the exception regions
2703 match, then it can be moved immediately before C without introducing
2706 C can not be the first block, so we do not have to worry about
2707 accessing a non-existent block. */
2708 d
= BASIC_BLOCK (c
->index
- 1);
2709 if (! b_has_incoming_fallthru
2710 && d
->eh_end
== b
->eh_beg
2711 && b
->eh_end
== c
->eh_beg
)
2712 return merge_blocks_move_predecessor_nojumps (b
, c
);
2714 /* Otherwise, we're going to try to move C after B. Make sure the
2715 exception regions match.
2717 If B is the last basic block, then we must not try to access the
2718 block structure for block B + 1. Luckily in that case we do not
2719 need to worry about matching exception regions. */
2720 d
= (b
->index
+ 1 < n_basic_blocks
? BASIC_BLOCK (b
->index
+ 1) : NULL
);
2721 if (b
->eh_end
== c
->eh_beg
2722 && (d
== NULL
|| c
->eh_end
== d
->eh_beg
))
2724 /* If C does not have an outgoing fallthru, then it can be moved
2725 immediately after B without introducing or modifying jumps. */
2726 if (! c_has_outgoing_fallthru
)
2727 return merge_blocks_move_successor_nojumps (b
, c
);
2729 /* Otherwise, we'll need to insert an extra jump, and possibly
2730 a new block to contain it. */
2731 /* ??? Not implemented yet. */
2738 /* Top level driver for merge_blocks. */
2745 /* Attempt to merge blocks as made possible by edge removal. If a block
2746 has only one successor, and the successor has only one predecessor,
2747 they may be combined. */
2749 for (i
= 0; i
< n_basic_blocks
;)
2751 basic_block c
, b
= BASIC_BLOCK (i
);
2754 /* A loop because chains of blocks might be combineable. */
2755 while ((s
= b
->succ
) != NULL
2756 && s
->succ_next
== NULL
2757 && (s
->flags
& EDGE_EH
) == 0
2758 && (c
= s
->dest
) != EXIT_BLOCK_PTR
2759 && c
->pred
->pred_next
== NULL
2760 /* If the jump insn has side effects, we can't kill the edge. */
2761 && (GET_CODE (b
->end
) != JUMP_INSN
2762 || onlyjump_p (b
->end
))
2763 && merge_blocks (s
, b
, c
))
2766 /* Don't get confused by the index shift caused by deleting blocks. */
2771 /* The given edge should potentially be a fallthru edge. If that is in
2772 fact true, delete the jump and barriers that are in the way. */
2775 tidy_fallthru_edge (e
, b
, c
)
2781 /* ??? In a late-running flow pass, other folks may have deleted basic
2782 blocks by nopping out blocks, leaving multiple BARRIERs between here
2783 and the target label. They ought to be chastized and fixed.
2785 We can also wind up with a sequence of undeletable labels between
2786 one block and the next.
2788 So search through a sequence of barriers, labels, and notes for
2789 the head of block C and assert that we really do fall through. */
2791 if (next_real_insn (b
->end
) != next_real_insn (PREV_INSN (c
->head
)))
2794 /* Remove what will soon cease being the jump insn from the source block.
2795 If block B consisted only of this single jump, turn it into a deleted
2798 if (GET_CODE (q
) == JUMP_INSN
2800 && (any_uncondjump_p (q
)
2801 || (b
->succ
== e
&& e
->succ_next
== NULL
)))
2804 /* If this was a conditional jump, we need to also delete
2805 the insn that set cc0. */
2806 if (any_condjump_p (q
) && sets_cc0_p (PREV_INSN (q
)))
2813 NOTE_LINE_NUMBER (q
) = NOTE_INSN_DELETED
;
2814 NOTE_SOURCE_FILE (q
) = 0;
2822 /* Selectively unlink the sequence. */
2823 if (q
!= PREV_INSN (c
->head
))
2824 flow_delete_insn_chain (NEXT_INSN (q
), PREV_INSN (c
->head
));
2826 e
->flags
|= EDGE_FALLTHRU
;
2829 /* Fix up edges that now fall through, or rather should now fall through
2830 but previously required a jump around now deleted blocks. Simplify
2831 the search by only examining blocks numerically adjacent, since this
2832 is how find_basic_blocks created them. */
2835 tidy_fallthru_edges ()
2839 for (i
= 1; i
< n_basic_blocks
; ++i
)
2841 basic_block b
= BASIC_BLOCK (i
- 1);
2842 basic_block c
= BASIC_BLOCK (i
);
2845 /* We care about simple conditional or unconditional jumps with
2848 If we had a conditional branch to the next instruction when
2849 find_basic_blocks was called, then there will only be one
2850 out edge for the block which ended with the conditional
2851 branch (since we do not create duplicate edges).
2853 Furthermore, the edge will be marked as a fallthru because we
2854 merge the flags for the duplicate edges. So we do not want to
2855 check that the edge is not a FALLTHRU edge. */
2856 if ((s
= b
->succ
) != NULL
2857 && s
->succ_next
== NULL
2859 /* If the jump insn has side effects, we can't tidy the edge. */
2860 && (GET_CODE (b
->end
) != JUMP_INSN
2861 || onlyjump_p (b
->end
)))
2862 tidy_fallthru_edge (s
, b
, c
);
2866 /* Perform data flow analysis.
2867 F is the first insn of the function; FLAGS is a set of PROP_* flags
2868 to be used in accumulating flow info. */
2871 life_analysis (f
, file
, flags
)
2876 #ifdef ELIMINABLE_REGS
2878 static struct {int from
, to
; } eliminables
[] = ELIMINABLE_REGS
;
2881 /* Record which registers will be eliminated. We use this in
2884 CLEAR_HARD_REG_SET (elim_reg_set
);
2886 #ifdef ELIMINABLE_REGS
2887 for (i
= 0; i
< (int) ARRAY_SIZE (eliminables
); i
++)
2888 SET_HARD_REG_BIT (elim_reg_set
, eliminables
[i
].from
);
2890 SET_HARD_REG_BIT (elim_reg_set
, FRAME_POINTER_REGNUM
);
2894 flags
&= ~(PROP_LOG_LINKS
| PROP_AUTOINC
);
2896 /* The post-reload life analysis have (on a global basis) the same
2897 registers live as was computed by reload itself. elimination
2898 Otherwise offsets and such may be incorrect.
2900 Reload will make some registers as live even though they do not
2903 We don't want to create new auto-incs after reload, since they
2904 are unlikely to be useful and can cause problems with shared
2906 if (reload_completed
)
2907 flags
&= ~(PROP_REG_INFO
| PROP_AUTOINC
);
2909 /* We want alias analysis information for local dead store elimination. */
2910 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
2911 init_alias_analysis ();
2913 /* Always remove no-op moves. Do this before other processing so
2914 that we don't have to keep re-scanning them. */
2915 delete_noop_moves (f
);
2917 /* Some targets can emit simpler epilogues if they know that sp was
2918 not ever modified during the function. After reload, of course,
2919 we've already emitted the epilogue so there's no sense searching. */
2920 if (! reload_completed
)
2921 notice_stack_pointer_modification (f
);
2923 /* Allocate and zero out data structures that will record the
2924 data from lifetime analysis. */
2925 allocate_reg_life_data ();
2926 allocate_bb_life_data ();
2928 /* Find the set of registers live on function exit. */
2929 mark_regs_live_at_end (EXIT_BLOCK_PTR
->global_live_at_start
);
2931 /* "Update" life info from zero. It'd be nice to begin the
2932 relaxation with just the exit and noreturn blocks, but that set
2933 is not immediately handy. */
2935 if (flags
& PROP_REG_INFO
)
2936 memset (regs_ever_live
, 0, sizeof (regs_ever_live
));
2937 update_life_info (NULL
, UPDATE_LIFE_GLOBAL
, flags
);
2940 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
2941 end_alias_analysis ();
2944 dump_flow_info (file
);
2946 free_basic_block_vars (1);
2949 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2950 Search for REGNO. If found, abort if it is not wider than word_mode. */
2953 verify_wide_reg_1 (px
, pregno
)
2958 unsigned int regno
= *(int *) pregno
;
2960 if (GET_CODE (x
) == REG
&& REGNO (x
) == regno
)
2962 if (GET_MODE_BITSIZE (GET_MODE (x
)) <= BITS_PER_WORD
)
2969 /* A subroutine of verify_local_live_at_start. Search through insns
2970 between HEAD and END looking for register REGNO. */
2973 verify_wide_reg (regno
, head
, end
)
2980 && for_each_rtx (&PATTERN (head
), verify_wide_reg_1
, ®no
))
2984 head
= NEXT_INSN (head
);
2987 /* We didn't find the register at all. Something's way screwy. */
2989 fprintf (rtl_dump_file
, "Aborting in verify_wide_reg; reg %d\n", regno
);
2990 print_rtl_and_abort ();
2993 /* A subroutine of update_life_info. Verify that there are no untoward
2994 changes in live_at_start during a local update. */
2997 verify_local_live_at_start (new_live_at_start
, bb
)
2998 regset new_live_at_start
;
3001 if (reload_completed
)
3003 /* After reload, there are no pseudos, nor subregs of multi-word
3004 registers. The regsets should exactly match. */
3005 if (! REG_SET_EQUAL_P (new_live_at_start
, bb
->global_live_at_start
))
3009 fprintf (rtl_dump_file
,
3010 "live_at_start mismatch in bb %d, aborting\n",
3012 debug_bitmap_file (rtl_dump_file
, bb
->global_live_at_start
);
3013 debug_bitmap_file (rtl_dump_file
, new_live_at_start
);
3015 print_rtl_and_abort ();
3022 /* Find the set of changed registers. */
3023 XOR_REG_SET (new_live_at_start
, bb
->global_live_at_start
);
3025 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start
, 0, i
,
3027 /* No registers should die. */
3028 if (REGNO_REG_SET_P (bb
->global_live_at_start
, i
))
3031 fprintf (rtl_dump_file
,
3032 "Register %d died unexpectedly in block %d\n", i
,
3034 print_rtl_and_abort ();
3037 /* Verify that the now-live register is wider than word_mode. */
3038 verify_wide_reg (i
, bb
->head
, bb
->end
);
3043 /* Updates life information starting with the basic blocks set in BLOCKS.
3044 If BLOCKS is null, consider it to be the universal set.
3046 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
3047 we are only expecting local modifications to basic blocks. If we find
3048 extra registers live at the beginning of a block, then we either killed
3049 useful data, or we have a broken split that wants data not provided.
3050 If we find registers removed from live_at_start, that means we have
3051 a broken peephole that is killing a register it shouldn't.
3053 ??? This is not true in one situation -- when a pre-reload splitter
3054 generates subregs of a multi-word pseudo, current life analysis will
3055 lose the kill. So we _can_ have a pseudo go live. How irritating.
3057 Including PROP_REG_INFO does not properly refresh regs_ever_live
3058 unless the caller resets it to zero. */
3061 update_life_info (blocks
, extent
, prop_flags
)
3063 enum update_life_extent extent
;
3067 regset_head tmp_head
;
3070 tmp
= INITIALIZE_REG_SET (tmp_head
);
3072 /* For a global update, we go through the relaxation process again. */
3073 if (extent
!= UPDATE_LIFE_LOCAL
)
3075 calculate_global_regs_live (blocks
, blocks
,
3076 prop_flags
& PROP_SCAN_DEAD_CODE
);
3078 /* If asked, remove notes from the blocks we'll update. */
3079 if (extent
== UPDATE_LIFE_GLOBAL_RM_NOTES
)
3080 count_or_remove_death_notes (blocks
, 1);
3085 EXECUTE_IF_SET_IN_SBITMAP (blocks
, 0, i
,
3087 basic_block bb
= BASIC_BLOCK (i
);
3089 COPY_REG_SET (tmp
, bb
->global_live_at_end
);
3090 propagate_block (bb
, tmp
, NULL
, NULL
, prop_flags
);
3092 if (extent
== UPDATE_LIFE_LOCAL
)
3093 verify_local_live_at_start (tmp
, bb
);
3098 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
3100 basic_block bb
= BASIC_BLOCK (i
);
3102 COPY_REG_SET (tmp
, bb
->global_live_at_end
);
3103 propagate_block (bb
, tmp
, NULL
, NULL
, prop_flags
);
3105 if (extent
== UPDATE_LIFE_LOCAL
)
3106 verify_local_live_at_start (tmp
, bb
);
3112 if (prop_flags
& PROP_REG_INFO
)
3114 /* The only pseudos that are live at the beginning of the function
3115 are those that were not set anywhere in the function. local-alloc
3116 doesn't know how to handle these correctly, so mark them as not
3117 local to any one basic block. */
3118 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR
->global_live_at_end
,
3119 FIRST_PSEUDO_REGISTER
, i
,
3120 { REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
; });
3122 /* We have a problem with any pseudoreg that lives across the setjmp.
3123 ANSI says that if a user variable does not change in value between
3124 the setjmp and the longjmp, then the longjmp preserves it. This
3125 includes longjmp from a place where the pseudo appears dead.
3126 (In principle, the value still exists if it is in scope.)
3127 If the pseudo goes in a hard reg, some other value may occupy
3128 that hard reg where this pseudo is dead, thus clobbering the pseudo.
3129 Conclusion: such a pseudo must not go in a hard reg. */
3130 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp
,
3131 FIRST_PSEUDO_REGISTER
, i
,
3133 if (regno_reg_rtx
[i
] != 0)
3135 REG_LIVE_LENGTH (i
) = -1;
3136 REG_BASIC_BLOCK (i
) = REG_BLOCK_UNKNOWN
;
3142 /* Free the variables allocated by find_basic_blocks.
3144 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
3147 free_basic_block_vars (keep_head_end_p
)
3148 int keep_head_end_p
;
3150 if (basic_block_for_insn
)
3152 VARRAY_FREE (basic_block_for_insn
);
3153 basic_block_for_insn
= NULL
;
3156 if (! keep_head_end_p
)
3159 VARRAY_FREE (basic_block_info
);
3162 ENTRY_BLOCK_PTR
->aux
= NULL
;
3163 ENTRY_BLOCK_PTR
->global_live_at_end
= NULL
;
3164 EXIT_BLOCK_PTR
->aux
= NULL
;
3165 EXIT_BLOCK_PTR
->global_live_at_start
= NULL
;
3169 /* Return nonzero if the destination of SET equals the source. */
3175 rtx src
= SET_SRC (set
);
3176 rtx dst
= SET_DEST (set
);
3178 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
3180 if (SUBREG_WORD (src
) != SUBREG_WORD (dst
))
3182 src
= SUBREG_REG (src
);
3183 dst
= SUBREG_REG (dst
);
3186 return (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
3187 && REGNO (src
) == REGNO (dst
));
3190 /* Return nonzero if an insn consists only of SETs, each of which only sets a
3197 rtx pat
= PATTERN (insn
);
3199 /* Insns carrying these notes are useful later on. */
3200 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
3203 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
3206 if (GET_CODE (pat
) == PARALLEL
)
3209 /* If nothing but SETs of registers to themselves,
3210 this insn can also be deleted. */
3211 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3213 rtx tem
= XVECEXP (pat
, 0, i
);
3215 if (GET_CODE (tem
) == USE
3216 || GET_CODE (tem
) == CLOBBER
)
3219 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
3228 /* Delete any insns that copy a register to itself. */
3231 delete_noop_moves (f
)
3235 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
3237 if (GET_CODE (insn
) == INSN
&& noop_move_p (insn
))
3239 PUT_CODE (insn
, NOTE
);
3240 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
3241 NOTE_SOURCE_FILE (insn
) = 0;
3246 /* Determine if the stack pointer is constant over the life of the function.
3247 Only useful before prologues have been emitted. */
3250 notice_stack_pointer_modification_1 (x
, pat
, data
)
3252 rtx pat ATTRIBUTE_UNUSED
;
3253 void *data ATTRIBUTE_UNUSED
;
3255 if (x
== stack_pointer_rtx
3256 /* The stack pointer is only modified indirectly as the result
3257 of a push until later in flow. See the comments in rtl.texi
3258 regarding Embedded Side-Effects on Addresses. */
3259 || (GET_CODE (x
) == MEM
3260 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == 'a'
3261 && XEXP (XEXP (x
, 0), 0) == stack_pointer_rtx
))
3262 current_function_sp_is_unchanging
= 0;
3266 notice_stack_pointer_modification (f
)
3271 /* Assume that the stack pointer is unchanging if alloca hasn't
3273 current_function_sp_is_unchanging
= !current_function_calls_alloca
;
3274 if (! current_function_sp_is_unchanging
)
3277 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
3281 /* Check if insn modifies the stack pointer. */
3282 note_stores (PATTERN (insn
), notice_stack_pointer_modification_1
,
3284 if (! current_function_sp_is_unchanging
)
3290 /* Mark a register in SET. Hard registers in large modes get all
3291 of their component registers set as well. */
3294 mark_reg (reg
, xset
)
3298 regset set
= (regset
) xset
;
3299 int regno
= REGNO (reg
);
3301 if (GET_MODE (reg
) == BLKmode
)
3304 SET_REGNO_REG_SET (set
, regno
);
3305 if (regno
< FIRST_PSEUDO_REGISTER
)
3307 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
3309 SET_REGNO_REG_SET (set
, regno
+ n
);
3313 /* Mark those regs which are needed at the end of the function as live
3314 at the end of the last basic block. */
3317 mark_regs_live_at_end (set
)
3322 /* If exiting needs the right stack value, consider the stack pointer
3323 live at the end of the function. */
3324 if ((HAVE_epilogue
&& reload_completed
)
3325 || ! EXIT_IGNORE_STACK
3326 || (! FRAME_POINTER_REQUIRED
3327 && ! current_function_calls_alloca
3328 && flag_omit_frame_pointer
)
3329 || current_function_sp_is_unchanging
)
3331 SET_REGNO_REG_SET (set
, STACK_POINTER_REGNUM
);
3334 /* Mark the frame pointer if needed at the end of the function. If
3335 we end up eliminating it, it will be removed from the live list
3336 of each basic block by reload. */
3338 if (! reload_completed
|| frame_pointer_needed
)
3340 SET_REGNO_REG_SET (set
, FRAME_POINTER_REGNUM
);
3341 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3342 /* If they are different, also mark the hard frame pointer as live. */
3343 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM
))
3344 SET_REGNO_REG_SET (set
, HARD_FRAME_POINTER_REGNUM
);
3348 #ifdef PIC_OFFSET_TABLE_REGNUM
3349 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3350 /* Many architectures have a GP register even without flag_pic.
3351 Assume the pic register is not in use, or will be handled by
3352 other means, if it is not fixed. */
3353 if (fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
3354 SET_REGNO_REG_SET (set
, PIC_OFFSET_TABLE_REGNUM
);
3358 /* Mark all global registers, and all registers used by the epilogue
3359 as being live at the end of the function since they may be
3360 referenced by our caller. */
3361 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3362 if (global_regs
[i
] || EPILOGUE_USES (i
))
3363 SET_REGNO_REG_SET (set
, i
);
3365 /* Mark all call-saved registers that we actaully used. */
3366 if (HAVE_epilogue
&& reload_completed
)
3368 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3369 if (regs_ever_live
[i
] && ! call_used_regs
[i
] && ! LOCAL_REGNO (i
))
3370 SET_REGNO_REG_SET (set
, i
);
3373 /* Mark function return value. */
3374 diddle_return_value (mark_reg
, set
);
3377 /* Callback function for for_each_successor_phi. DATA is a regset.
3378 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3379 INSN, in the regset. */
3382 set_phi_alternative_reg (insn
, dest_regno
, src_regno
, data
)
3383 rtx insn ATTRIBUTE_UNUSED
;
3384 int dest_regno ATTRIBUTE_UNUSED
;
3388 regset live
= (regset
) data
;
3389 SET_REGNO_REG_SET (live
, src_regno
);
3393 /* Propagate global life info around the graph of basic blocks. Begin
3394 considering blocks with their corresponding bit set in BLOCKS_IN.
3395 If BLOCKS_IN is null, consider it the universal set.
3397 BLOCKS_OUT is set for every block that was changed. */
3400 calculate_global_regs_live (blocks_in
, blocks_out
, flags
)
3401 sbitmap blocks_in
, blocks_out
;
3404 basic_block
*queue
, *qhead
, *qtail
, *qend
;
3405 regset tmp
, new_live_at_end
;
3406 regset_head tmp_head
;
3407 regset_head new_live_at_end_head
;
3410 tmp
= INITIALIZE_REG_SET (tmp_head
);
3411 new_live_at_end
= INITIALIZE_REG_SET (new_live_at_end_head
);
3413 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3414 because the `head == tail' style test for an empty queue doesn't
3415 work with a full queue. */
3416 queue
= (basic_block
*) xmalloc ((n_basic_blocks
+ 2) * sizeof (*queue
));
3418 qhead
= qend
= queue
+ n_basic_blocks
+ 2;
3420 /* Queue the blocks set in the initial mask. Do this in reverse block
3421 number order so that we are more likely for the first round to do
3422 useful work. We use AUX non-null to flag that the block is queued. */
3425 /* Clear out the garbage that might be hanging out in bb->aux. */
3426 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
3427 BASIC_BLOCK (i
)->aux
= NULL
;
3429 EXECUTE_IF_SET_IN_SBITMAP (blocks_in
, 0, i
,
3431 basic_block bb
= BASIC_BLOCK (i
);
3438 for (i
= 0; i
< n_basic_blocks
; ++i
)
3440 basic_block bb
= BASIC_BLOCK (i
);
3447 sbitmap_zero (blocks_out
);
3449 while (qhead
!= qtail
)
3451 int rescan
, changed
;
3460 /* Begin by propogating live_at_start from the successor blocks. */
3461 CLEAR_REG_SET (new_live_at_end
);
3462 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
3464 basic_block sb
= e
->dest
;
3465 IOR_REG_SET (new_live_at_end
, sb
->global_live_at_start
);
3468 /* The all-important stack pointer must always be live. */
3469 SET_REGNO_REG_SET (new_live_at_end
, STACK_POINTER_REGNUM
);
3471 /* Before reload, there are a few registers that must be forced
3472 live everywhere -- which might not already be the case for
3473 blocks within infinite loops. */
3474 if (! reload_completed
)
3476 /* Any reference to any pseudo before reload is a potential
3477 reference of the frame pointer. */
3478 SET_REGNO_REG_SET (new_live_at_end
, FRAME_POINTER_REGNUM
);
3480 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3481 /* Pseudos with argument area equivalences may require
3482 reloading via the argument pointer. */
3483 if (fixed_regs
[ARG_POINTER_REGNUM
])
3484 SET_REGNO_REG_SET (new_live_at_end
, ARG_POINTER_REGNUM
);
3487 #ifdef PIC_OFFSET_TABLE_REGNUM
3488 /* Any constant, or pseudo with constant equivalences, may
3489 require reloading from memory using the pic register. */
3490 if (fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
3491 SET_REGNO_REG_SET (new_live_at_end
, PIC_OFFSET_TABLE_REGNUM
);
3495 /* Regs used in phi nodes are not included in
3496 global_live_at_start, since they are live only along a
3497 particular edge. Set those regs that are live because of a
3498 phi node alternative corresponding to this particular block. */
3500 for_each_successor_phi (bb
, &set_phi_alternative_reg
,
3503 if (bb
== ENTRY_BLOCK_PTR
)
3505 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3509 /* On our first pass through this block, we'll go ahead and continue.
3510 Recognize first pass by local_set NULL. On subsequent passes, we
3511 get to skip out early if live_at_end wouldn't have changed. */
3513 if (bb
->local_set
== NULL
)
3515 bb
->local_set
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3516 bb
->cond_local_set
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3521 /* If any bits were removed from live_at_end, we'll have to
3522 rescan the block. This wouldn't be necessary if we had
3523 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3524 local_live is really dependent on live_at_end. */
3525 CLEAR_REG_SET (tmp
);
3526 rescan
= bitmap_operation (tmp
, bb
->global_live_at_end
,
3527 new_live_at_end
, BITMAP_AND_COMPL
);
3531 /* If any of the registers in the new live_at_end set are
3532 conditionally set in this basic block, we must rescan.
3533 This is because conditional lifetimes at the end of the
3534 block do not just take the live_at_end set into account,
3535 but also the liveness at the start of each successor
3536 block. We can miss changes in those sets if we only
3537 compare the new live_at_end against the previous one. */
3538 CLEAR_REG_SET (tmp
);
3539 rescan
= bitmap_operation (tmp
, new_live_at_end
,
3540 bb
->cond_local_set
, BITMAP_AND
);
3545 /* Find the set of changed bits. Take this opportunity
3546 to notice that this set is empty and early out. */
3547 CLEAR_REG_SET (tmp
);
3548 changed
= bitmap_operation (tmp
, bb
->global_live_at_end
,
3549 new_live_at_end
, BITMAP_XOR
);
3553 /* If any of the changed bits overlap with local_set,
3554 we'll have to rescan the block. Detect overlap by
3555 the AND with ~local_set turning off bits. */
3556 rescan
= bitmap_operation (tmp
, tmp
, bb
->local_set
,
3561 /* Let our caller know that BB changed enough to require its
3562 death notes updated. */
3564 SET_BIT (blocks_out
, bb
->index
);
3568 /* Add to live_at_start the set of all registers in
3569 new_live_at_end that aren't in the old live_at_end. */
3571 bitmap_operation (tmp
, new_live_at_end
, bb
->global_live_at_end
,
3573 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3575 changed
= bitmap_operation (bb
->global_live_at_start
,
3576 bb
->global_live_at_start
,
3583 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3585 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3586 into live_at_start. */
3587 propagate_block (bb
, new_live_at_end
, bb
->local_set
,
3588 bb
->cond_local_set
, flags
);
3590 /* If live_at start didn't change, no need to go farther. */
3591 if (REG_SET_EQUAL_P (bb
->global_live_at_start
, new_live_at_end
))
3594 COPY_REG_SET (bb
->global_live_at_start
, new_live_at_end
);
3597 /* Queue all predecessors of BB so that we may re-examine
3598 their live_at_end. */
3599 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
3601 basic_block pb
= e
->src
;
3602 if (pb
->aux
== NULL
)
3613 FREE_REG_SET (new_live_at_end
);
3617 EXECUTE_IF_SET_IN_SBITMAP (blocks_out
, 0, i
,
3619 basic_block bb
= BASIC_BLOCK (i
);
3620 FREE_REG_SET (bb
->local_set
);
3621 FREE_REG_SET (bb
->cond_local_set
);
3626 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
3628 basic_block bb
= BASIC_BLOCK (i
);
3629 FREE_REG_SET (bb
->local_set
);
3630 FREE_REG_SET (bb
->cond_local_set
);
3637 /* Subroutines of life analysis. */
3639 /* Allocate the permanent data structures that represent the results
3640 of life analysis. Not static since used also for stupid life analysis. */
3643 allocate_bb_life_data ()
3647 for (i
= 0; i
< n_basic_blocks
; i
++)
3649 basic_block bb
= BASIC_BLOCK (i
);
3651 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3652 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3655 ENTRY_BLOCK_PTR
->global_live_at_end
3656 = OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3657 EXIT_BLOCK_PTR
->global_live_at_start
3658 = OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3660 regs_live_at_setjmp
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
3664 allocate_reg_life_data ()
3668 max_regno
= max_reg_num ();
3670 /* Recalculate the register space, in case it has grown. Old style
3671 vector oriented regsets would set regset_{size,bytes} here also. */
3672 allocate_reg_info (max_regno
, FALSE
, FALSE
);
3674 /* Reset all the data we'll collect in propagate_block and its
3676 for (i
= 0; i
< max_regno
; i
++)
3680 REG_N_DEATHS (i
) = 0;
3681 REG_N_CALLS_CROSSED (i
) = 0;
3682 REG_LIVE_LENGTH (i
) = 0;
3683 REG_BASIC_BLOCK (i
) = REG_BLOCK_UNKNOWN
;
3687 /* Delete dead instructions for propagate_block. */
3690 propagate_block_delete_insn (bb
, insn
)
3694 rtx inote
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
3696 /* If the insn referred to a label, and that label was attached to
3697 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3698 pretty much mandatory to delete it, because the ADDR_VEC may be
3699 referencing labels that no longer exist. */
3703 rtx label
= XEXP (inote
, 0);
3706 if (LABEL_NUSES (label
) == 1
3707 && (next
= next_nonnote_insn (label
)) != NULL
3708 && GET_CODE (next
) == JUMP_INSN
3709 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
3710 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
3712 rtx pat
= PATTERN (next
);
3713 int diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
3714 int len
= XVECLEN (pat
, diff_vec_p
);
3717 for (i
= 0; i
< len
; i
++)
3718 LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0))--;
3720 flow_delete_insn (next
);
3724 if (bb
->end
== insn
)
3725 bb
->end
= PREV_INSN (insn
);
3726 flow_delete_insn (insn
);
3729 /* Delete dead libcalls for propagate_block. Return the insn
3730 before the libcall. */
3733 propagate_block_delete_libcall (bb
, insn
, note
)
3737 rtx first
= XEXP (note
, 0);
3738 rtx before
= PREV_INSN (first
);
3740 if (insn
== bb
->end
)
3743 flow_delete_insn_chain (first
, insn
);
3747 /* Update the life-status of regs for one insn. Return the previous insn. */
3750 propagate_one_insn (pbi
, insn
)
3751 struct propagate_block_info
*pbi
;
3754 rtx prev
= PREV_INSN (insn
);
3755 int flags
= pbi
->flags
;
3756 int insn_is_dead
= 0;
3757 int libcall_is_dead
= 0;
3761 if (! INSN_P (insn
))
3764 note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
3765 if (flags
& PROP_SCAN_DEAD_CODE
)
3767 insn_is_dead
= insn_dead_p (pbi
, PATTERN (insn
), 0, REG_NOTES (insn
));
3768 libcall_is_dead
= (insn_is_dead
&& note
!= 0
3769 && libcall_dead_p (pbi
, note
, insn
));
3772 /* If an instruction consists of just dead store(s) on final pass,
3774 if ((flags
& PROP_KILL_DEAD_CODE
) && insn_is_dead
)
3776 /* If we're trying to delete a prologue or epilogue instruction
3777 that isn't flagged as possibly being dead, something is wrong.
3778 But if we are keeping the stack pointer depressed, we might well
3779 be deleting insns that are used to compute the amount to update
3780 it by, so they are fine. */
3781 if (reload_completed
3782 && !(TREE_CODE (TREE_TYPE (current_function_decl
)) == FUNCTION_TYPE
3783 && (TYPE_RETURNS_STACK_DEPRESSED
3784 (TREE_TYPE (current_function_decl
))))
3785 && (((HAVE_epilogue
|| HAVE_prologue
)
3786 && prologue_epilogue_contains (insn
))
3787 || (HAVE_sibcall_epilogue
3788 && sibcall_epilogue_contains (insn
)))
3789 && find_reg_note (insn
, REG_MAYBE_DEAD
, NULL_RTX
) == 0)
3792 /* Record sets. Do this even for dead instructions, since they
3793 would have killed the values if they hadn't been deleted. */
3794 mark_set_regs (pbi
, PATTERN (insn
), insn
);
3796 /* CC0 is now known to be dead. Either this insn used it,
3797 in which case it doesn't anymore, or clobbered it,
3798 so the next insn can't use it. */
3801 if (libcall_is_dead
)
3803 prev
= propagate_block_delete_libcall (pbi
->bb
, insn
, note
);
3804 insn
= NEXT_INSN (prev
);
3807 propagate_block_delete_insn (pbi
->bb
, insn
);
3812 /* See if this is an increment or decrement that can be merged into
3813 a following memory address. */
3816 register rtx x
= single_set (insn
);
3818 /* Does this instruction increment or decrement a register? */
3819 if ((flags
& PROP_AUTOINC
)
3821 && GET_CODE (SET_DEST (x
)) == REG
3822 && (GET_CODE (SET_SRC (x
)) == PLUS
3823 || GET_CODE (SET_SRC (x
)) == MINUS
)
3824 && XEXP (SET_SRC (x
), 0) == SET_DEST (x
)
3825 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
3826 /* Ok, look for a following memory ref we can combine with.
3827 If one is found, change the memory ref to a PRE_INC
3828 or PRE_DEC, cancel this insn, and return 1.
3829 Return 0 if nothing has been done. */
3830 && try_pre_increment_1 (pbi
, insn
))
3833 #endif /* AUTO_INC_DEC */
3835 CLEAR_REG_SET (pbi
->new_set
);
3837 /* If this is not the final pass, and this insn is copying the value of
3838 a library call and it's dead, don't scan the insns that perform the
3839 library call, so that the call's arguments are not marked live. */
3840 if (libcall_is_dead
)
3842 /* Record the death of the dest reg. */
3843 mark_set_regs (pbi
, PATTERN (insn
), insn
);
3845 insn
= XEXP (note
, 0);
3846 return PREV_INSN (insn
);
3848 else if (GET_CODE (PATTERN (insn
)) == SET
3849 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
3850 && GET_CODE (SET_SRC (PATTERN (insn
))) == PLUS
3851 && XEXP (SET_SRC (PATTERN (insn
)), 0) == stack_pointer_rtx
3852 && GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == CONST_INT
)
3853 /* We have an insn to pop a constant amount off the stack.
3854 (Such insns use PLUS regardless of the direction of the stack,
3855 and any insn to adjust the stack by a constant is always a pop.)
3856 These insns, if not dead stores, have no effect on life. */
3860 /* Any regs live at the time of a call instruction must not go
3861 in a register clobbered by calls. Find all regs now live and
3862 record this for them. */
3864 if (GET_CODE (insn
) == CALL_INSN
&& (flags
& PROP_REG_INFO
))
3865 EXECUTE_IF_SET_IN_REG_SET (pbi
->reg_live
, 0, i
,
3866 { REG_N_CALLS_CROSSED (i
)++; });
3868 /* Record sets. Do this even for dead instructions, since they
3869 would have killed the values if they hadn't been deleted. */
3870 mark_set_regs (pbi
, PATTERN (insn
), insn
);
3872 if (GET_CODE (insn
) == CALL_INSN
)
3878 if (GET_CODE (PATTERN (insn
)) == COND_EXEC
)
3879 cond
= COND_EXEC_TEST (PATTERN (insn
));
3881 /* Non-constant calls clobber memory. */
3882 if (! CONST_CALL_P (insn
))
3884 free_EXPR_LIST_list (&pbi
->mem_set_list
);
3885 pbi
->mem_set_list_len
= 0;
3888 /* There may be extra registers to be clobbered. */
3889 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
3891 note
= XEXP (note
, 1))
3892 if (GET_CODE (XEXP (note
, 0)) == CLOBBER
)
3893 mark_set_1 (pbi
, CLOBBER
, XEXP (XEXP (note
, 0), 0),
3894 cond
, insn
, pbi
->flags
);
3896 /* Calls change all call-used and global registers. */
3897 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3898 if (call_used_regs
[i
] && ! global_regs
[i
]
3901 /* We do not want REG_UNUSED notes for these registers. */
3902 mark_set_1 (pbi
, CLOBBER
, gen_rtx_REG (reg_raw_mode
[i
], i
),
3904 pbi
->flags
& ~(PROP_DEATH_NOTES
| PROP_REG_INFO
));
3908 /* If an insn doesn't use CC0, it becomes dead since we assume
3909 that every insn clobbers it. So show it dead here;
3910 mark_used_regs will set it live if it is referenced. */
3915 mark_used_regs (pbi
, PATTERN (insn
), NULL_RTX
, insn
);
3917 /* Sometimes we may have inserted something before INSN (such as a move)
3918 when we make an auto-inc. So ensure we will scan those insns. */
3920 prev
= PREV_INSN (insn
);
3923 if (! insn_is_dead
&& GET_CODE (insn
) == CALL_INSN
)
3929 if (GET_CODE (PATTERN (insn
)) == COND_EXEC
)
3930 cond
= COND_EXEC_TEST (PATTERN (insn
));
3932 /* Calls use their arguments. */
3933 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
3935 note
= XEXP (note
, 1))
3936 if (GET_CODE (XEXP (note
, 0)) == USE
)
3937 mark_used_regs (pbi
, XEXP (XEXP (note
, 0), 0),
3940 /* The stack ptr is used (honorarily) by a CALL insn. */
3941 SET_REGNO_REG_SET (pbi
->reg_live
, STACK_POINTER_REGNUM
);
3943 /* Calls may also reference any of the global registers,
3944 so they are made live. */
3945 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3947 mark_used_reg (pbi
, gen_rtx_REG (reg_raw_mode
[i
], i
),
3952 /* On final pass, update counts of how many insns in which each reg
3954 if (flags
& PROP_REG_INFO
)
3955 EXECUTE_IF_SET_IN_REG_SET (pbi
->reg_live
, 0, i
,
3956 { REG_LIVE_LENGTH (i
)++; });
3961 /* Initialize a propagate_block_info struct for public consumption.
3962 Note that the structure itself is opaque to this file, but that
3963 the user can use the regsets provided here. */
3965 struct propagate_block_info
*
3966 init_propagate_block_info (bb
, live
, local_set
, cond_local_set
, flags
)
3968 regset live
, local_set
, cond_local_set
;
3971 struct propagate_block_info
*pbi
= xmalloc (sizeof (*pbi
));
3974 pbi
->reg_live
= live
;
3975 pbi
->mem_set_list
= NULL_RTX
;
3976 pbi
->mem_set_list_len
= 0;
3977 pbi
->local_set
= local_set
;
3978 pbi
->cond_local_set
= cond_local_set
;
3982 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
3983 pbi
->reg_next_use
= (rtx
*) xcalloc (max_reg_num (), sizeof (rtx
));
3985 pbi
->reg_next_use
= NULL
;
3987 pbi
->new_set
= BITMAP_XMALLOC ();
3989 #ifdef HAVE_conditional_execution
3990 pbi
->reg_cond_dead
= splay_tree_new (splay_tree_compare_ints
, NULL
,
3991 free_reg_cond_life_info
);
3992 pbi
->reg_cond_reg
= BITMAP_XMALLOC ();
3994 /* If this block ends in a conditional branch, for each register live
3995 from one side of the branch and not the other, record the register
3996 as conditionally dead. */
3997 if (GET_CODE (bb
->end
) == JUMP_INSN
3998 && any_condjump_p (bb
->end
))
4000 regset_head diff_head
;
4001 regset diff
= INITIALIZE_REG_SET (diff_head
);
4002 basic_block bb_true
, bb_false
;
4003 rtx cond_true
, cond_false
, set_src
;
4006 /* Identify the successor blocks. */
4007 bb_true
= bb
->succ
->dest
;
4008 if (bb
->succ
->succ_next
!= NULL
)
4010 bb_false
= bb
->succ
->succ_next
->dest
;
4012 if (bb
->succ
->flags
& EDGE_FALLTHRU
)
4014 basic_block t
= bb_false
;
4018 else if (! (bb
->succ
->succ_next
->flags
& EDGE_FALLTHRU
))
4023 /* This can happen with a conditional jump to the next insn. */
4024 if (JUMP_LABEL (bb
->end
) != bb_true
->head
)
4027 /* Simplest way to do nothing. */
4031 /* Extract the condition from the branch. */
4032 set_src
= SET_SRC (pc_set (bb
->end
));
4033 cond_true
= XEXP (set_src
, 0);
4034 cond_false
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true
)),
4035 GET_MODE (cond_true
), XEXP (cond_true
, 0),
4036 XEXP (cond_true
, 1));
4037 if (GET_CODE (XEXP (set_src
, 1)) == PC
)
4040 cond_false
= cond_true
;
4044 /* Compute which register lead different lives in the successors. */
4045 if (bitmap_operation (diff
, bb_true
->global_live_at_start
,
4046 bb_false
->global_live_at_start
, BITMAP_XOR
))
4048 rtx reg
= XEXP (cond_true
, 0);
4050 if (GET_CODE (reg
) == SUBREG
)
4051 reg
= SUBREG_REG (reg
);
4053 if (GET_CODE (reg
) != REG
)
4056 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (reg
));
4058 /* For each such register, mark it conditionally dead. */
4059 EXECUTE_IF_SET_IN_REG_SET
4062 struct reg_cond_life_info
*rcli
;
4065 rcli
= (struct reg_cond_life_info
*) xmalloc (sizeof (*rcli
));
4067 if (REGNO_REG_SET_P (bb_true
->global_live_at_start
, i
))
4071 rcli
->condition
= cond
;
4073 splay_tree_insert (pbi
->reg_cond_dead
, i
,
4074 (splay_tree_value
) rcli
);
4078 FREE_REG_SET (diff
);
4082 /* If this block has no successors, any stores to the frame that aren't
4083 used later in the block are dead. So make a pass over the block
4084 recording any such that are made and show them dead at the end. We do
4085 a very conservative and simple job here. */
4087 && ! (TREE_CODE (TREE_TYPE (current_function_decl
)) == FUNCTION_TYPE
4088 && (TYPE_RETURNS_STACK_DEPRESSED
4089 (TREE_TYPE (current_function_decl
))))
4090 && (flags
& PROP_SCAN_DEAD_CODE
)
4091 && (bb
->succ
== NULL
4092 || (bb
->succ
->succ_next
== NULL
4093 && bb
->succ
->dest
== EXIT_BLOCK_PTR
)))
4096 for (insn
= bb
->end
; insn
!= bb
->head
; insn
= PREV_INSN (insn
))
4097 if (GET_CODE (insn
) == INSN
4098 && GET_CODE (PATTERN (insn
)) == SET
4099 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
4101 rtx mem
= SET_DEST (PATTERN (insn
));
4103 /* This optimization is performed by faking a store to the
4104 memory at the end of the block. This doesn't work for
4105 unchanging memories because multiple stores to unchanging
4106 memory is illegal and alias analysis doesn't consider it. */
4107 if (RTX_UNCHANGING_P (mem
))
4110 if (XEXP (mem
, 0) == frame_pointer_rtx
4111 || (GET_CODE (XEXP (mem
, 0)) == PLUS
4112 && XEXP (XEXP (mem
, 0), 0) == frame_pointer_rtx
4113 && GET_CODE (XEXP (XEXP (mem
, 0), 1)) == CONST_INT
))
4116 /* Store a copy of mem, otherwise the address may be scrogged
4117 by find_auto_inc. This matters because insn_dead_p uses
4118 an rtx_equal_p check to determine if two addresses are
4119 the same. This works before find_auto_inc, but fails
4120 after find_auto_inc, causing discrepencies between the
4121 set of live registers calculated during the
4122 calculate_global_regs_live phase and what actually exists
4123 after flow completes, leading to aborts. */
4124 if (flags
& PROP_AUTOINC
)
4125 mem
= shallow_copy_rtx (mem
);
4127 pbi
->mem_set_list
= alloc_EXPR_LIST (0, mem
, pbi
->mem_set_list
);
4128 if (++pbi
->mem_set_list_len
>= MAX_MEM_SET_LIST_LEN
)
4137 /* Release a propagate_block_info struct. */
4140 free_propagate_block_info (pbi
)
4141 struct propagate_block_info
*pbi
;
4143 free_EXPR_LIST_list (&pbi
->mem_set_list
);
4145 BITMAP_XFREE (pbi
->new_set
);
4147 #ifdef HAVE_conditional_execution
4148 splay_tree_delete (pbi
->reg_cond_dead
);
4149 BITMAP_XFREE (pbi
->reg_cond_reg
);
4152 if (pbi
->reg_next_use
)
4153 free (pbi
->reg_next_use
);
4158 /* Compute the registers live at the beginning of a basic block BB from
4159 those live at the end.
4161 When called, REG_LIVE contains those live at the end. On return, it
4162 contains those live at the beginning.
4164 LOCAL_SET, if non-null, will be set with all registers killed
4165 unconditionally by this basic block.
4166 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
4167 killed conditionally by this basic block. If there is any unconditional
4168 set of a register, then the corresponding bit will be set in LOCAL_SET
4169 and cleared in COND_LOCAL_SET.
4170 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
4171 case, the resulting set will be equal to the union of the two sets that
4172 would otherwise be computed. */
4175 propagate_block (bb
, live
, local_set
, cond_local_set
, flags
)
4179 regset cond_local_set
;
4182 struct propagate_block_info
*pbi
;
4185 pbi
= init_propagate_block_info (bb
, live
, local_set
, cond_local_set
, flags
);
4187 if (flags
& PROP_REG_INFO
)
4191 /* Process the regs live at the end of the block.
4192 Mark them as not local to any one basic block. */
4193 EXECUTE_IF_SET_IN_REG_SET (live
, 0, i
,
4194 { REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
; });
4197 /* Scan the block an insn at a time from end to beginning. */
4199 for (insn
= bb
->end
;; insn
= prev
)
4201 /* If this is a call to `setjmp' et al, warn if any
4202 non-volatile datum is live. */
4203 if ((flags
& PROP_REG_INFO
)
4204 && GET_CODE (insn
) == NOTE
4205 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
4206 IOR_REG_SET (regs_live_at_setjmp
, pbi
->reg_live
);
4208 prev
= propagate_one_insn (pbi
, insn
);
4210 if (insn
== bb
->head
)
4214 free_propagate_block_info (pbi
);
4217 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4218 (SET expressions whose destinations are registers dead after the insn).
4219 NEEDED is the regset that says which regs are alive after the insn.
4221 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4223 If X is the entire body of an insn, NOTES contains the reg notes
4224 pertaining to the insn. */
4227 insn_dead_p (pbi
, x
, call_ok
, notes
)
4228 struct propagate_block_info
*pbi
;
4231 rtx notes ATTRIBUTE_UNUSED
;
4233 enum rtx_code code
= GET_CODE (x
);
4236 /* If flow is invoked after reload, we must take existing AUTO_INC
4237 expresions into account. */
4238 if (reload_completed
)
4240 for (; notes
; notes
= XEXP (notes
, 1))
4242 if (REG_NOTE_KIND (notes
) == REG_INC
)
4244 int regno
= REGNO (XEXP (notes
, 0));
4246 /* Don't delete insns to set global regs. */
4247 if ((regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
4248 || REGNO_REG_SET_P (pbi
->reg_live
, regno
))
4255 /* If setting something that's a reg or part of one,
4256 see if that register's altered value will be live. */
4260 rtx r
= SET_DEST (x
);
4263 if (GET_CODE (r
) == CC0
)
4264 return ! pbi
->cc0_live
;
4267 /* A SET that is a subroutine call cannot be dead. */
4268 if (GET_CODE (SET_SRC (x
)) == CALL
)
4274 /* Don't eliminate loads from volatile memory or volatile asms. */
4275 else if (volatile_refs_p (SET_SRC (x
)))
4278 if (GET_CODE (r
) == MEM
)
4282 if (MEM_VOLATILE_P (r
))
4285 /* Walk the set of memory locations we are currently tracking
4286 and see if one is an identical match to this memory location.
4287 If so, this memory write is dead (remember, we're walking
4288 backwards from the end of the block to the start). */
4289 temp
= pbi
->mem_set_list
;
4292 rtx mem
= XEXP (temp
, 0);
4294 if (rtx_equal_p (mem
, r
))
4297 /* Check if memory reference matches an auto increment. Only
4298 post increment/decrement or modify are valid. */
4299 if (GET_MODE (mem
) == GET_MODE (r
)
4300 && (GET_CODE (XEXP (mem
, 0)) == POST_DEC
4301 || GET_CODE (XEXP (mem
, 0)) == POST_INC
4302 || GET_CODE (XEXP (mem
, 0)) == POST_MODIFY
)
4303 && GET_MODE (XEXP (mem
, 0)) == GET_MODE (r
)
4304 && rtx_equal_p (XEXP (XEXP (mem
, 0), 0), XEXP (r
, 0)))
4307 temp
= XEXP (temp
, 1);
4312 while (GET_CODE (r
) == SUBREG
4313 || GET_CODE (r
) == STRICT_LOW_PART
4314 || GET_CODE (r
) == ZERO_EXTRACT
)
4317 if (GET_CODE (r
) == REG
)
4319 int regno
= REGNO (r
);
4322 if (REGNO_REG_SET_P (pbi
->reg_live
, regno
))
4325 /* If this is a hard register, verify that subsequent
4326 words are not needed. */
4327 if (regno
< FIRST_PSEUDO_REGISTER
)
4329 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (r
));
4332 if (REGNO_REG_SET_P (pbi
->reg_live
, regno
+n
))
4336 /* Don't delete insns to set global regs. */
4337 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
4340 /* Make sure insns to set the stack pointer aren't deleted. */
4341 if (regno
== STACK_POINTER_REGNUM
)
4344 /* ??? These bits might be redundant with the force live bits
4345 in calculate_global_regs_live. We would delete from
4346 sequential sets; whether this actually affects real code
4347 for anything but the stack pointer I don't know. */
4348 /* Make sure insns to set the frame pointer aren't deleted. */
4349 if (regno
== FRAME_POINTER_REGNUM
4350 && (! reload_completed
|| frame_pointer_needed
))
4352 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4353 if (regno
== HARD_FRAME_POINTER_REGNUM
4354 && (! reload_completed
|| frame_pointer_needed
))
4358 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4359 /* Make sure insns to set arg pointer are never deleted
4360 (if the arg pointer isn't fixed, there will be a USE
4361 for it, so we can treat it normally). */
4362 if (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
4366 /* Otherwise, the set is dead. */
4372 /* If performing several activities, insn is dead if each activity
4373 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4374 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4376 else if (code
== PARALLEL
)
4378 int i
= XVECLEN (x
, 0);
4380 for (i
--; i
>= 0; i
--)
4381 if (GET_CODE (XVECEXP (x
, 0, i
)) != CLOBBER
4382 && GET_CODE (XVECEXP (x
, 0, i
)) != USE
4383 && ! insn_dead_p (pbi
, XVECEXP (x
, 0, i
), call_ok
, NULL_RTX
))
4389 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4390 is not necessarily true for hard registers. */
4391 else if (code
== CLOBBER
&& GET_CODE (XEXP (x
, 0)) == REG
4392 && REGNO (XEXP (x
, 0)) >= FIRST_PSEUDO_REGISTER
4393 && ! REGNO_REG_SET_P (pbi
->reg_live
, REGNO (XEXP (x
, 0))))
4396 /* We do not check other CLOBBER or USE here. An insn consisting of just
4397 a CLOBBER or just a USE should not be deleted. */
4401 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4402 return 1 if the entire library call is dead.
4403 This is true if INSN copies a register (hard or pseudo)
4404 and if the hard return reg of the call insn is dead.
4405 (The caller should have tested the destination of the SET inside
4406 INSN already for death.)
4408 If this insn doesn't just copy a register, then we don't
4409 have an ordinary libcall. In that case, cse could not have
4410 managed to substitute the source for the dest later on,
4411 so we can assume the libcall is dead.
4413 PBI is the block info giving pseudoregs live before this insn.
4414 NOTE is the REG_RETVAL note of the insn. */
4417 libcall_dead_p (pbi
, note
, insn
)
4418 struct propagate_block_info
*pbi
;
4422 rtx x
= single_set (insn
);
4426 register rtx r
= SET_SRC (x
);
4427 if (GET_CODE (r
) == REG
)
4429 rtx call
= XEXP (note
, 0);
4433 /* Find the call insn. */
4434 while (call
!= insn
&& GET_CODE (call
) != CALL_INSN
)
4435 call
= NEXT_INSN (call
);
4437 /* If there is none, do nothing special,
4438 since ordinary death handling can understand these insns. */
4442 /* See if the hard reg holding the value is dead.
4443 If this is a PARALLEL, find the call within it. */
4444 call_pat
= PATTERN (call
);
4445 if (GET_CODE (call_pat
) == PARALLEL
)
4447 for (i
= XVECLEN (call_pat
, 0) - 1; i
>= 0; i
--)
4448 if (GET_CODE (XVECEXP (call_pat
, 0, i
)) == SET
4449 && GET_CODE (SET_SRC (XVECEXP (call_pat
, 0, i
))) == CALL
)
4452 /* This may be a library call that is returning a value
4453 via invisible pointer. Do nothing special, since
4454 ordinary death handling can understand these insns. */
4458 call_pat
= XVECEXP (call_pat
, 0, i
);
4461 return insn_dead_p (pbi
, call_pat
, 1, REG_NOTES (call
));
4467 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4468 live at function entry. Don't count global register variables, variables
4469 in registers that can be used for function arg passing, or variables in
4470 fixed hard registers. */
4473 regno_uninitialized (regno
)
4476 if (n_basic_blocks
== 0
4477 || (regno
< FIRST_PSEUDO_REGISTER
4478 && (global_regs
[regno
]
4479 || fixed_regs
[regno
]
4480 || FUNCTION_ARG_REGNO_P (regno
))))
4483 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
);
4486 /* 1 if register REGNO was alive at a place where `setjmp' was called
4487 and was set more than once or is an argument.
4488 Such regs may be clobbered by `longjmp'. */
4491 regno_clobbered_at_setjmp (regno
)
4494 if (n_basic_blocks
== 0)
4497 return ((REG_N_SETS (regno
) > 1
4498 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
))
4499 && REGNO_REG_SET_P (regs_live_at_setjmp
, regno
));
4502 /* INSN references memory, possibly using autoincrement addressing modes.
4503 Find any entries on the mem_set_list that need to be invalidated due
4504 to an address change. */
4507 invalidate_mems_from_autoinc (pbi
, insn
)
4508 struct propagate_block_info
*pbi
;
4511 rtx note
= REG_NOTES (insn
);
4512 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
4514 if (REG_NOTE_KIND (note
) == REG_INC
)
4516 rtx temp
= pbi
->mem_set_list
;
4517 rtx prev
= NULL_RTX
;
4522 next
= XEXP (temp
, 1);
4523 if (reg_overlap_mentioned_p (XEXP (note
, 0), XEXP (temp
, 0)))
4525 /* Splice temp out of list. */
4527 XEXP (prev
, 1) = next
;
4529 pbi
->mem_set_list
= next
;
4530 free_EXPR_LIST_node (temp
);
4531 pbi
->mem_set_list_len
--;
4541 /* EXP is either a MEM or a REG. Remove any dependant entries
4542 from pbi->mem_set_list. */
4545 invalidate_mems_from_set (pbi
, exp
)
4546 struct propagate_block_info
*pbi
;
4549 rtx temp
= pbi
->mem_set_list
;
4550 rtx prev
= NULL_RTX
;
4555 next
= XEXP (temp
, 1);
4556 if ((GET_CODE (exp
) == MEM
4557 && output_dependence (XEXP (temp
, 0), exp
))
4558 || (GET_CODE (exp
) == REG
4559 && reg_overlap_mentioned_p (exp
, XEXP (temp
, 0))))
4561 /* Splice this entry out of the list. */
4563 XEXP (prev
, 1) = next
;
4565 pbi
->mem_set_list
= next
;
4566 free_EXPR_LIST_node (temp
);
4567 pbi
->mem_set_list_len
--;
4575 /* Process the registers that are set within X. Their bits are set to
4576 1 in the regset DEAD, because they are dead prior to this insn.
4578 If INSN is nonzero, it is the insn being processed.
4580 FLAGS is the set of operations to perform. */
4583 mark_set_regs (pbi
, x
, insn
)
4584 struct propagate_block_info
*pbi
;
4587 rtx cond
= NULL_RTX
;
4592 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
4594 if (REG_NOTE_KIND (link
) == REG_INC
)
4595 mark_set_1 (pbi
, SET
, XEXP (link
, 0),
4596 (GET_CODE (x
) == COND_EXEC
4597 ? COND_EXEC_TEST (x
) : NULL_RTX
),
4601 switch (code
= GET_CODE (x
))
4605 mark_set_1 (pbi
, code
, SET_DEST (x
), cond
, insn
, pbi
->flags
);
4609 cond
= COND_EXEC_TEST (x
);
4610 x
= COND_EXEC_CODE (x
);
4616 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
4618 rtx sub
= XVECEXP (x
, 0, i
);
4619 switch (code
= GET_CODE (sub
))
4622 if (cond
!= NULL_RTX
)
4625 cond
= COND_EXEC_TEST (sub
);
4626 sub
= COND_EXEC_CODE (sub
);
4627 if (GET_CODE (sub
) != SET
&& GET_CODE (sub
) != CLOBBER
)
4633 mark_set_1 (pbi
, code
, SET_DEST (sub
), cond
, insn
, pbi
->flags
);
4648 /* Process a single SET rtx, X. */
4651 mark_set_1 (pbi
, code
, reg
, cond
, insn
, flags
)
4652 struct propagate_block_info
*pbi
;
4654 rtx reg
, cond
, insn
;
4657 int regno_first
= -1, regno_last
= -1;
4661 /* Modifying just one hardware register of a multi-reg value or just a
4662 byte field of a register does not mean the value from before this insn
4663 is now dead. Of course, if it was dead after it's unused now. */
4665 switch (GET_CODE (reg
))
4668 /* Some targets place small structures in registers for return values of
4669 functions. We have to detect this case specially here to get correct
4670 flow information. */
4671 for (i
= XVECLEN (reg
, 0) - 1; i
>= 0; i
--)
4672 if (XEXP (XVECEXP (reg
, 0, i
), 0) != 0)
4673 mark_set_1 (pbi
, code
, XEXP (XVECEXP (reg
, 0, i
), 0), cond
, insn
,
4679 case STRICT_LOW_PART
:
4680 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4682 reg
= XEXP (reg
, 0);
4683 while (GET_CODE (reg
) == SUBREG
4684 || GET_CODE (reg
) == ZERO_EXTRACT
4685 || GET_CODE (reg
) == SIGN_EXTRACT
4686 || GET_CODE (reg
) == STRICT_LOW_PART
);
4687 if (GET_CODE (reg
) == MEM
)
4689 not_dead
= REGNO_REG_SET_P (pbi
->reg_live
, REGNO (reg
));
4693 regno_last
= regno_first
= REGNO (reg
);
4694 if (regno_first
< FIRST_PSEUDO_REGISTER
)
4695 regno_last
+= HARD_REGNO_NREGS (regno_first
, GET_MODE (reg
)) - 1;
4699 if (GET_CODE (SUBREG_REG (reg
)) == REG
)
4701 enum machine_mode outer_mode
= GET_MODE (reg
);
4702 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (reg
));
4704 /* Identify the range of registers affected. This is moderately
4705 tricky for hard registers. See alter_subreg. */
4707 regno_last
= regno_first
= REGNO (SUBREG_REG (reg
));
4708 if (regno_first
< FIRST_PSEUDO_REGISTER
)
4710 #ifdef ALTER_HARD_SUBREG
4711 regno_first
= ALTER_HARD_SUBREG (outer_mode
, SUBREG_WORD (reg
),
4712 inner_mode
, regno_first
);
4714 regno_first
+= SUBREG_WORD (reg
);
4716 regno_last
= (regno_first
4717 + HARD_REGNO_NREGS (regno_first
, outer_mode
) - 1);
4719 /* Since we've just adjusted the register number ranges, make
4720 sure REG matches. Otherwise some_was_live will be clear
4721 when it shouldn't have been, and we'll create incorrect
4722 REG_UNUSED notes. */
4723 reg
= gen_rtx_REG (outer_mode
, regno_first
);
4727 /* If the number of words in the subreg is less than the number
4728 of words in the full register, we have a well-defined partial
4729 set. Otherwise the high bits are undefined.
4731 This is only really applicable to pseudos, since we just took
4732 care of multi-word hard registers. */
4733 if (((GET_MODE_SIZE (outer_mode
)
4734 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
4735 < ((GET_MODE_SIZE (inner_mode
)
4736 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
))
4737 not_dead
= REGNO_REG_SET_P (pbi
->reg_live
, regno_first
);
4739 reg
= SUBREG_REG (reg
);
4743 reg
= SUBREG_REG (reg
);
4750 /* If this set is a MEM, then it kills any aliased writes.
4751 If this set is a REG, then it kills any MEMs which use the reg. */
4752 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
4754 if (GET_CODE (reg
) == MEM
|| GET_CODE (reg
) == REG
)
4755 invalidate_mems_from_set (pbi
, reg
);
4757 /* If the memory reference had embedded side effects (autoincrement
4758 address modes. Then we may need to kill some entries on the
4760 if (insn
&& GET_CODE (reg
) == MEM
)
4761 invalidate_mems_from_autoinc (pbi
, insn
);
4763 if (pbi
->mem_set_list_len
< MAX_MEM_SET_LIST_LEN
4764 && GET_CODE (reg
) == MEM
&& ! side_effects_p (reg
)
4765 /* ??? With more effort we could track conditional memory life. */
4767 /* We do not know the size of a BLKmode store, so we do not track
4768 them for redundant store elimination. */
4769 && GET_MODE (reg
) != BLKmode
4770 /* There are no REG_INC notes for SP, so we can't assume we'll see
4771 everything that invalidates it. To be safe, don't eliminate any
4772 stores though SP; none of them should be redundant anyway. */
4773 && ! reg_mentioned_p (stack_pointer_rtx
, reg
))
4776 /* Store a copy of mem, otherwise the address may be
4777 scrogged by find_auto_inc. */
4778 if (flags
& PROP_AUTOINC
)
4779 reg
= shallow_copy_rtx (reg
);
4781 pbi
->mem_set_list
= alloc_EXPR_LIST (0, reg
, pbi
->mem_set_list
);
4782 pbi
->mem_set_list_len
++;
4786 if (GET_CODE (reg
) == REG
4787 && ! (regno_first
== FRAME_POINTER_REGNUM
4788 && (! reload_completed
|| frame_pointer_needed
))
4789 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4790 && ! (regno_first
== HARD_FRAME_POINTER_REGNUM
4791 && (! reload_completed
|| frame_pointer_needed
))
4793 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4794 && ! (regno_first
== ARG_POINTER_REGNUM
&& fixed_regs
[regno_first
])
4798 int some_was_live
= 0, some_was_dead
= 0;
4800 for (i
= regno_first
; i
<= regno_last
; ++i
)
4802 int needed_regno
= REGNO_REG_SET_P (pbi
->reg_live
, i
);
4805 /* Order of the set operation matters here since both
4806 sets may be the same. */
4807 CLEAR_REGNO_REG_SET (pbi
->cond_local_set
, i
);
4808 if (cond
!= NULL_RTX
4809 && ! REGNO_REG_SET_P (pbi
->local_set
, i
))
4810 SET_REGNO_REG_SET (pbi
->cond_local_set
, i
);
4812 SET_REGNO_REG_SET (pbi
->local_set
, i
);
4814 if (code
!= CLOBBER
)
4815 SET_REGNO_REG_SET (pbi
->new_set
, i
);
4817 some_was_live
|= needed_regno
;
4818 some_was_dead
|= ! needed_regno
;
4821 #ifdef HAVE_conditional_execution
4822 /* Consider conditional death in deciding that the register needs
4824 if (some_was_live
&& ! not_dead
4825 /* The stack pointer is never dead. Well, not strictly true,
4826 but it's very difficult to tell from here. Hopefully
4827 combine_stack_adjustments will fix up the most egregious
4829 && regno_first
!= STACK_POINTER_REGNUM
)
4831 for (i
= regno_first
; i
<= regno_last
; ++i
)
4832 if (! mark_regno_cond_dead (pbi
, i
, cond
))
4837 /* Additional data to record if this is the final pass. */
4838 if (flags
& (PROP_LOG_LINKS
| PROP_REG_INFO
4839 | PROP_DEATH_NOTES
| PROP_AUTOINC
))
4842 register int blocknum
= pbi
->bb
->index
;
4845 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
4847 y
= pbi
->reg_next_use
[regno_first
];
4849 /* The next use is no longer next, since a store intervenes. */
4850 for (i
= regno_first
; i
<= regno_last
; ++i
)
4851 pbi
->reg_next_use
[i
] = 0;
4854 if (flags
& PROP_REG_INFO
)
4856 for (i
= regno_first
; i
<= regno_last
; ++i
)
4858 /* Count (weighted) references, stores, etc. This counts a
4859 register twice if it is modified, but that is correct. */
4860 REG_N_SETS (i
) += 1;
4861 REG_N_REFS (i
) += (optimize_size
? 1
4862 : pbi
->bb
->loop_depth
+ 1);
4864 /* The insns where a reg is live are normally counted
4865 elsewhere, but we want the count to include the insn
4866 where the reg is set, and the normal counting mechanism
4867 would not count it. */
4868 REG_LIVE_LENGTH (i
) += 1;
4871 /* If this is a hard reg, record this function uses the reg. */
4872 if (regno_first
< FIRST_PSEUDO_REGISTER
)
4874 for (i
= regno_first
; i
<= regno_last
; i
++)
4875 regs_ever_live
[i
] = 1;
4879 /* Keep track of which basic blocks each reg appears in. */
4880 if (REG_BASIC_BLOCK (regno_first
) == REG_BLOCK_UNKNOWN
)
4881 REG_BASIC_BLOCK (regno_first
) = blocknum
;
4882 else if (REG_BASIC_BLOCK (regno_first
) != blocknum
)
4883 REG_BASIC_BLOCK (regno_first
) = REG_BLOCK_GLOBAL
;
4887 if (! some_was_dead
)
4889 if (flags
& PROP_LOG_LINKS
)
4891 /* Make a logical link from the next following insn
4892 that uses this register, back to this insn.
4893 The following insns have already been processed.
4895 We don't build a LOG_LINK for hard registers containing
4896 in ASM_OPERANDs. If these registers get replaced,
4897 we might wind up changing the semantics of the insn,
4898 even if reload can make what appear to be valid
4899 assignments later. */
4900 if (y
&& (BLOCK_NUM (y
) == blocknum
)
4901 && (regno_first
>= FIRST_PSEUDO_REGISTER
4902 || asm_noperands (PATTERN (y
)) < 0))
4903 LOG_LINKS (y
) = alloc_INSN_LIST (insn
, LOG_LINKS (y
));
4908 else if (! some_was_live
)
4910 if (flags
& PROP_REG_INFO
)
4911 REG_N_DEATHS (regno_first
) += 1;
4913 if (flags
& PROP_DEATH_NOTES
)
4915 /* Note that dead stores have already been deleted
4916 when possible. If we get here, we have found a
4917 dead store that cannot be eliminated (because the
4918 same insn does something useful). Indicate this
4919 by marking the reg being set as dying here. */
4921 = alloc_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
4926 if (flags
& PROP_DEATH_NOTES
)
4928 /* This is a case where we have a multi-word hard register
4929 and some, but not all, of the words of the register are
4930 needed in subsequent insns. Write REG_UNUSED notes
4931 for those parts that were not needed. This case should
4934 for (i
= regno_first
; i
<= regno_last
; ++i
)
4935 if (! REGNO_REG_SET_P (pbi
->reg_live
, i
))
4937 = alloc_EXPR_LIST (REG_UNUSED
,
4938 gen_rtx_REG (reg_raw_mode
[i
], i
),
4944 /* Mark the register as being dead. */
4947 /* The stack pointer is never dead. Well, not strictly true,
4948 but it's very difficult to tell from here. Hopefully
4949 combine_stack_adjustments will fix up the most egregious
4951 && regno_first
!= STACK_POINTER_REGNUM
)
4953 for (i
= regno_first
; i
<= regno_last
; ++i
)
4954 CLEAR_REGNO_REG_SET (pbi
->reg_live
, i
);
4957 else if (GET_CODE (reg
) == REG
)
4959 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
4960 pbi
->reg_next_use
[regno_first
] = 0;
4963 /* If this is the last pass and this is a SCRATCH, show it will be dying
4964 here and count it. */
4965 else if (GET_CODE (reg
) == SCRATCH
)
4967 if (flags
& PROP_DEATH_NOTES
)
4969 = alloc_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
4973 #ifdef HAVE_conditional_execution
4974 /* Mark REGNO conditionally dead.
4975 Return true if the register is now unconditionally dead. */
4978 mark_regno_cond_dead (pbi
, regno
, cond
)
4979 struct propagate_block_info
*pbi
;
4983 /* If this is a store to a predicate register, the value of the
4984 predicate is changing, we don't know that the predicate as seen
4985 before is the same as that seen after. Flush all dependent
4986 conditions from reg_cond_dead. This will make all such
4987 conditionally live registers unconditionally live. */
4988 if (REGNO_REG_SET_P (pbi
->reg_cond_reg
, regno
))
4989 flush_reg_cond_reg (pbi
, regno
);
4991 /* If this is an unconditional store, remove any conditional
4992 life that may have existed. */
4993 if (cond
== NULL_RTX
)
4994 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
4997 splay_tree_node node
;
4998 struct reg_cond_life_info
*rcli
;
5001 /* Otherwise this is a conditional set. Record that fact.
5002 It may have been conditionally used, or there may be a
5003 subsequent set with a complimentary condition. */
5005 node
= splay_tree_lookup (pbi
->reg_cond_dead
, regno
);
5008 /* The register was unconditionally live previously.
5009 Record the current condition as the condition under
5010 which it is dead. */
5011 rcli
= (struct reg_cond_life_info
*) xmalloc (sizeof (*rcli
));
5012 rcli
->condition
= cond
;
5013 splay_tree_insert (pbi
->reg_cond_dead
, regno
,
5014 (splay_tree_value
) rcli
);
5016 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
5018 /* Not unconditionaly dead. */
5023 /* The register was conditionally live previously.
5024 Add the new condition to the old. */
5025 rcli
= (struct reg_cond_life_info
*) node
->value
;
5026 ncond
= rcli
->condition
;
5027 ncond
= ior_reg_cond (ncond
, cond
, 1);
5029 /* If the register is now unconditionally dead,
5030 remove the entry in the splay_tree. */
5031 if (ncond
== const1_rtx
)
5032 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
5035 rcli
->condition
= ncond
;
5037 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
5039 /* Not unconditionaly dead. */
5048 /* Called from splay_tree_delete for pbi->reg_cond_life. */
5051 free_reg_cond_life_info (value
)
5052 splay_tree_value value
;
5054 struct reg_cond_life_info
*rcli
= (struct reg_cond_life_info
*) value
;
5058 /* Helper function for flush_reg_cond_reg. */
5061 flush_reg_cond_reg_1 (node
, data
)
5062 splay_tree_node node
;
5065 struct reg_cond_life_info
*rcli
;
5066 int *xdata
= (int *) data
;
5067 unsigned int regno
= xdata
[0];
5069 /* Don't need to search if last flushed value was farther on in
5070 the in-order traversal. */
5071 if (xdata
[1] >= (int) node
->key
)
5074 /* Splice out portions of the expression that refer to regno. */
5075 rcli
= (struct reg_cond_life_info
*) node
->value
;
5076 rcli
->condition
= elim_reg_cond (rcli
->condition
, regno
);
5078 /* If the entire condition is now false, signal the node to be removed. */
5079 if (rcli
->condition
== const0_rtx
)
5081 xdata
[1] = node
->key
;
5084 else if (rcli
->condition
== const1_rtx
)
5090 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
5093 flush_reg_cond_reg (pbi
, regno
)
5094 struct propagate_block_info
*pbi
;
5101 while (splay_tree_foreach (pbi
->reg_cond_dead
,
5102 flush_reg_cond_reg_1
, pair
) == -1)
5103 splay_tree_remove (pbi
->reg_cond_dead
, pair
[1]);
5105 CLEAR_REGNO_REG_SET (pbi
->reg_cond_reg
, regno
);
5108 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
5109 For ior/and, the ADD flag determines whether we want to add the new
5110 condition X to the old one unconditionally. If it is zero, we will
5111 only return a new expression if X allows us to simplify part of
5112 OLD, otherwise we return OLD unchanged to the caller.
5113 If ADD is nonzero, we will return a new condition in all cases. The
5114 toplevel caller of one of these functions should always pass 1 for
5118 ior_reg_cond (old
, x
, add
)
5124 if (GET_RTX_CLASS (GET_CODE (old
)) == '<')
5126 if (GET_RTX_CLASS (GET_CODE (x
)) == '<'
5127 && GET_CODE (x
) == reverse_condition (GET_CODE (old
))
5128 && REGNO (XEXP (x
, 0)) == REGNO (XEXP (old
, 0)))
5130 if (GET_CODE (x
) == GET_CODE (old
)
5131 && REGNO (XEXP (x
, 0)) == REGNO (XEXP (old
, 0)))
5135 return gen_rtx_IOR (0, old
, x
);
5138 switch (GET_CODE (old
))
5141 op0
= ior_reg_cond (XEXP (old
, 0), x
, 0);
5142 op1
= ior_reg_cond (XEXP (old
, 1), x
, 0);
5143 if (op0
!= XEXP (old
, 0) || op1
!= XEXP (old
, 1))
5145 if (op0
== const0_rtx
)
5147 if (op1
== const0_rtx
)
5149 if (op0
== const1_rtx
|| op1
== const1_rtx
)
5151 if (op0
== XEXP (old
, 0))
5152 op0
= gen_rtx_IOR (0, op0
, x
);
5154 op1
= gen_rtx_IOR (0, op1
, x
);
5155 return gen_rtx_IOR (0, op0
, op1
);
5159 return gen_rtx_IOR (0, old
, x
);
5162 op0
= ior_reg_cond (XEXP (old
, 0), x
, 0);
5163 op1
= ior_reg_cond (XEXP (old
, 1), x
, 0);
5164 if (op0
!= XEXP (old
, 0) || op1
!= XEXP (old
, 1))
5166 if (op0
== const1_rtx
)
5168 if (op1
== const1_rtx
)
5170 if (op0
== const0_rtx
|| op1
== const0_rtx
)
5172 if (op0
== XEXP (old
, 0))
5173 op0
= gen_rtx_IOR (0, op0
, x
);
5175 op1
= gen_rtx_IOR (0, op1
, x
);
5176 return gen_rtx_AND (0, op0
, op1
);
5180 return gen_rtx_IOR (0, old
, x
);
5183 op0
= and_reg_cond (XEXP (old
, 0), not_reg_cond (x
), 0);
5184 if (op0
!= XEXP (old
, 0))
5185 return not_reg_cond (op0
);
5188 return gen_rtx_IOR (0, old
, x
);
5199 enum rtx_code x_code
;
5201 if (x
== const0_rtx
)
5203 else if (x
== const1_rtx
)
5205 x_code
= GET_CODE (x
);
5208 if (GET_RTX_CLASS (x_code
) == '<'
5209 && GET_CODE (XEXP (x
, 0)) == REG
)
5211 if (XEXP (x
, 1) != const0_rtx
)
5214 return gen_rtx_fmt_ee (reverse_condition (x_code
),
5215 VOIDmode
, XEXP (x
, 0), const0_rtx
);
5217 return gen_rtx_NOT (0, x
);
5221 and_reg_cond (old
, x
, add
)
5227 if (GET_RTX_CLASS (GET_CODE (old
)) == '<')
5229 if (GET_RTX_CLASS (GET_CODE (x
)) == '<'
5230 && GET_CODE (x
) == reverse_condition (GET_CODE (old
))
5231 && REGNO (XEXP (x
, 0)) == REGNO (XEXP (old
, 0)))
5233 if (GET_CODE (x
) == GET_CODE (old
)
5234 && REGNO (XEXP (x
, 0)) == REGNO (XEXP (old
, 0)))
5238 return gen_rtx_AND (0, old
, x
);
5241 switch (GET_CODE (old
))
5244 op0
= and_reg_cond (XEXP (old
, 0), x
, 0);
5245 op1
= and_reg_cond (XEXP (old
, 1), x
, 0);
5246 if (op0
!= XEXP (old
, 0) || op1
!= XEXP (old
, 1))
5248 if (op0
== const0_rtx
)
5250 if (op1
== const0_rtx
)
5252 if (op0
== const1_rtx
|| op1
== const1_rtx
)
5254 if (op0
== XEXP (old
, 0))
5255 op0
= gen_rtx_AND (0, op0
, x
);
5257 op1
= gen_rtx_AND (0, op1
, x
);
5258 return gen_rtx_IOR (0, op0
, op1
);
5262 return gen_rtx_AND (0, old
, x
);
5265 op0
= and_reg_cond (XEXP (old
, 0), x
, 0);
5266 op1
= and_reg_cond (XEXP (old
, 1), x
, 0);
5267 if (op0
!= XEXP (old
, 0) || op1
!= XEXP (old
, 1))
5269 if (op0
== const1_rtx
)
5271 if (op1
== const1_rtx
)
5273 if (op0
== const0_rtx
|| op1
== const0_rtx
)
5275 if (op0
== XEXP (old
, 0))
5276 op0
= gen_rtx_AND (0, op0
, x
);
5278 op1
= gen_rtx_AND (0, op1
, x
);
5279 return gen_rtx_AND (0, op0
, op1
);
5283 return gen_rtx_AND (0, old
, x
);
5286 op0
= ior_reg_cond (XEXP (old
, 0), not_reg_cond (x
), 0);
5287 if (op0
!= XEXP (old
, 0))
5288 return not_reg_cond (op0
);
5291 return gen_rtx_AND (0, old
, x
);
5298 /* Given a condition X, remove references to reg REGNO and return the
5299 new condition. The removal will be done so that all conditions
5300 involving REGNO are considered to evaluate to false. This function
5301 is used when the value of REGNO changes. */
5304 elim_reg_cond (x
, regno
)
5310 if (GET_RTX_CLASS (GET_CODE (x
)) == '<')
5312 if (REGNO (XEXP (x
, 0)) == regno
)
5317 switch (GET_CODE (x
))
5320 op0
= elim_reg_cond (XEXP (x
, 0), regno
);
5321 op1
= elim_reg_cond (XEXP (x
, 1), regno
);
5322 if (op0
== const0_rtx
|| op1
== const0_rtx
)
5324 if (op0
== const1_rtx
)
5326 if (op1
== const1_rtx
)
5328 if (op0
== XEXP (x
, 0) && op1
== XEXP (x
, 1))
5330 return gen_rtx_AND (0, op0
, op1
);
5333 op0
= elim_reg_cond (XEXP (x
, 0), regno
);
5334 op1
= elim_reg_cond (XEXP (x
, 1), regno
);
5335 if (op0
== const1_rtx
|| op1
== const1_rtx
)
5337 if (op0
== const0_rtx
)
5339 if (op1
== const0_rtx
)
5341 if (op0
== XEXP (x
, 0) && op1
== XEXP (x
, 1))
5343 return gen_rtx_IOR (0, op0
, op1
);
5346 op0
= elim_reg_cond (XEXP (x
, 0), regno
);
5347 if (op0
== const0_rtx
)
5349 if (op0
== const1_rtx
)
5351 if (op0
!= XEXP (x
, 0))
5352 return not_reg_cond (op0
);
5359 #endif /* HAVE_conditional_execution */
5363 /* Try to substitute the auto-inc expression INC as the address inside
5364 MEM which occurs in INSN. Currently, the address of MEM is an expression
5365 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5366 that has a single set whose source is a PLUS of INCR_REG and something
5370 attempt_auto_inc (pbi
, inc
, insn
, mem
, incr
, incr_reg
)
5371 struct propagate_block_info
*pbi
;
5372 rtx inc
, insn
, mem
, incr
, incr_reg
;
5374 int regno
= REGNO (incr_reg
);
5375 rtx set
= single_set (incr
);
5376 rtx q
= SET_DEST (set
);
5377 rtx y
= SET_SRC (set
);
5378 int opnum
= XEXP (y
, 0) == incr_reg
? 0 : 1;
5380 /* Make sure this reg appears only once in this insn. */
5381 if (count_occurrences (PATTERN (insn
), incr_reg
, 1) != 1)
5384 if (dead_or_set_p (incr
, incr_reg
)
5385 /* Mustn't autoinc an eliminable register. */
5386 && (regno
>= FIRST_PSEUDO_REGISTER
5387 || ! TEST_HARD_REG_BIT (elim_reg_set
, regno
)))
5389 /* This is the simple case. Try to make the auto-inc. If
5390 we can't, we are done. Otherwise, we will do any
5391 needed updates below. */
5392 if (! validate_change (insn
, &XEXP (mem
, 0), inc
, 0))
5395 else if (GET_CODE (q
) == REG
5396 /* PREV_INSN used here to check the semi-open interval
5398 && ! reg_used_between_p (q
, PREV_INSN (insn
), incr
)
5399 /* We must also check for sets of q as q may be
5400 a call clobbered hard register and there may
5401 be a call between PREV_INSN (insn) and incr. */
5402 && ! reg_set_between_p (q
, PREV_INSN (insn
), incr
))
5404 /* We have *p followed sometime later by q = p+size.
5405 Both p and q must be live afterward,
5406 and q is not used between INSN and its assignment.
5407 Change it to q = p, ...*q..., q = q+size.
5408 Then fall into the usual case. */
5412 emit_move_insn (q
, incr_reg
);
5413 insns
= get_insns ();
5416 if (basic_block_for_insn
)
5417 for (temp
= insns
; temp
; temp
= NEXT_INSN (temp
))
5418 set_block_for_insn (temp
, pbi
->bb
);
5420 /* If we can't make the auto-inc, or can't make the
5421 replacement into Y, exit. There's no point in making
5422 the change below if we can't do the auto-inc and doing
5423 so is not correct in the pre-inc case. */
5426 validate_change (insn
, &XEXP (mem
, 0), inc
, 1);
5427 validate_change (incr
, &XEXP (y
, opnum
), q
, 1);
5428 if (! apply_change_group ())
5431 /* We now know we'll be doing this change, so emit the
5432 new insn(s) and do the updates. */
5433 emit_insns_before (insns
, insn
);
5435 if (pbi
->bb
->head
== insn
)
5436 pbi
->bb
->head
= insns
;
5438 /* INCR will become a NOTE and INSN won't contain a
5439 use of INCR_REG. If a use of INCR_REG was just placed in
5440 the insn before INSN, make that the next use.
5441 Otherwise, invalidate it. */
5442 if (GET_CODE (PREV_INSN (insn
)) == INSN
5443 && GET_CODE (PATTERN (PREV_INSN (insn
))) == SET
5444 && SET_SRC (PATTERN (PREV_INSN (insn
))) == incr_reg
)
5445 pbi
->reg_next_use
[regno
] = PREV_INSN (insn
);
5447 pbi
->reg_next_use
[regno
] = 0;
5452 /* REGNO is now used in INCR which is below INSN, but
5453 it previously wasn't live here. If we don't mark
5454 it as live, we'll put a REG_DEAD note for it
5455 on this insn, which is incorrect. */
5456 SET_REGNO_REG_SET (pbi
->reg_live
, regno
);
5458 /* If there are any calls between INSN and INCR, show
5459 that REGNO now crosses them. */
5460 for (temp
= insn
; temp
!= incr
; temp
= NEXT_INSN (temp
))
5461 if (GET_CODE (temp
) == CALL_INSN
)
5462 REG_N_CALLS_CROSSED (regno
)++;
5467 /* If we haven't returned, it means we were able to make the
5468 auto-inc, so update the status. First, record that this insn
5469 has an implicit side effect. */
5471 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_INC
, incr_reg
, REG_NOTES (insn
));
5473 /* Modify the old increment-insn to simply copy
5474 the already-incremented value of our register. */
5475 if (! validate_change (incr
, &SET_SRC (set
), incr_reg
, 0))
5478 /* If that makes it a no-op (copying the register into itself) delete
5479 it so it won't appear to be a "use" and a "set" of this
5481 if (REGNO (SET_DEST (set
)) == REGNO (incr_reg
))
5483 /* If the original source was dead, it's dead now. */
5486 while ((note
= find_reg_note (incr
, REG_DEAD
, NULL_RTX
)) != NULL_RTX
)
5488 remove_note (incr
, note
);
5489 if (XEXP (note
, 0) != incr_reg
)
5490 CLEAR_REGNO_REG_SET (pbi
->reg_live
, REGNO (XEXP (note
, 0)));
5493 PUT_CODE (incr
, NOTE
);
5494 NOTE_LINE_NUMBER (incr
) = NOTE_INSN_DELETED
;
5495 NOTE_SOURCE_FILE (incr
) = 0;
5498 if (regno
>= FIRST_PSEUDO_REGISTER
)
5500 /* Count an extra reference to the reg. When a reg is
5501 incremented, spilling it is worse, so we want to make
5502 that less likely. */
5503 REG_N_REFS (regno
) += (optimize_size
? 1 : pbi
->bb
->loop_depth
+ 1);
5505 /* Count the increment as a setting of the register,
5506 even though it isn't a SET in rtl. */
5507 REG_N_SETS (regno
)++;
5511 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5515 find_auto_inc (pbi
, x
, insn
)
5516 struct propagate_block_info
*pbi
;
5520 rtx addr
= XEXP (x
, 0);
5521 HOST_WIDE_INT offset
= 0;
5522 rtx set
, y
, incr
, inc_val
;
5524 int size
= GET_MODE_SIZE (GET_MODE (x
));
5526 if (GET_CODE (insn
) == JUMP_INSN
)
5529 /* Here we detect use of an index register which might be good for
5530 postincrement, postdecrement, preincrement, or predecrement. */
5532 if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
5533 offset
= INTVAL (XEXP (addr
, 1)), addr
= XEXP (addr
, 0);
5535 if (GET_CODE (addr
) != REG
)
5538 regno
= REGNO (addr
);
5540 /* Is the next use an increment that might make auto-increment? */
5541 incr
= pbi
->reg_next_use
[regno
];
5542 if (incr
== 0 || BLOCK_NUM (incr
) != BLOCK_NUM (insn
))
5544 set
= single_set (incr
);
5545 if (set
== 0 || GET_CODE (set
) != SET
)
5549 if (GET_CODE (y
) != PLUS
)
5552 if (REG_P (XEXP (y
, 0)) && REGNO (XEXP (y
, 0)) == REGNO (addr
))
5553 inc_val
= XEXP (y
, 1);
5554 else if (REG_P (XEXP (y
, 1)) && REGNO (XEXP (y
, 1)) == REGNO (addr
))
5555 inc_val
= XEXP (y
, 0);
5559 if (GET_CODE (inc_val
) == CONST_INT
)
5561 if (HAVE_POST_INCREMENT
5562 && (INTVAL (inc_val
) == size
&& offset
== 0))
5563 attempt_auto_inc (pbi
, gen_rtx_POST_INC (Pmode
, addr
), insn
, x
,
5565 else if (HAVE_POST_DECREMENT
5566 && (INTVAL (inc_val
) == -size
&& offset
== 0))
5567 attempt_auto_inc (pbi
, gen_rtx_POST_DEC (Pmode
, addr
), insn
, x
,
5569 else if (HAVE_PRE_INCREMENT
5570 && (INTVAL (inc_val
) == size
&& offset
== size
))
5571 attempt_auto_inc (pbi
, gen_rtx_PRE_INC (Pmode
, addr
), insn
, x
,
5573 else if (HAVE_PRE_DECREMENT
5574 && (INTVAL (inc_val
) == -size
&& offset
== -size
))
5575 attempt_auto_inc (pbi
, gen_rtx_PRE_DEC (Pmode
, addr
), insn
, x
,
5577 else if (HAVE_POST_MODIFY_DISP
&& offset
== 0)
5578 attempt_auto_inc (pbi
, gen_rtx_POST_MODIFY (Pmode
, addr
,
5579 gen_rtx_PLUS (Pmode
,
5582 insn
, x
, incr
, addr
);
5584 else if (GET_CODE (inc_val
) == REG
5585 && ! reg_set_between_p (inc_val
, PREV_INSN (insn
),
5589 if (HAVE_POST_MODIFY_REG
&& offset
== 0)
5590 attempt_auto_inc (pbi
, gen_rtx_POST_MODIFY (Pmode
, addr
,
5591 gen_rtx_PLUS (Pmode
,
5594 insn
, x
, incr
, addr
);
5598 #endif /* AUTO_INC_DEC */
5601 mark_used_reg (pbi
, reg
, cond
, insn
)
5602 struct propagate_block_info
*pbi
;
5604 rtx cond ATTRIBUTE_UNUSED
;
5607 int regno
= REGNO (reg
);
5608 int some_was_live
= REGNO_REG_SET_P (pbi
->reg_live
, regno
);
5609 int some_was_dead
= ! some_was_live
;
5613 /* A hard reg in a wide mode may really be multiple registers.
5614 If so, mark all of them just like the first. */
5615 if (regno
< FIRST_PSEUDO_REGISTER
)
5617 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5620 int needed_regno
= REGNO_REG_SET_P (pbi
->reg_live
, regno
+ n
);
5621 some_was_live
|= needed_regno
;
5622 some_was_dead
|= ! needed_regno
;
5626 if (pbi
->flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
5628 /* Record where each reg is used, so when the reg is set we know
5629 the next insn that uses it. */
5630 pbi
->reg_next_use
[regno
] = insn
;
5633 if (pbi
->flags
& PROP_REG_INFO
)
5635 if (regno
< FIRST_PSEUDO_REGISTER
)
5637 /* If this is a register we are going to try to eliminate,
5638 don't mark it live here. If we are successful in
5639 eliminating it, it need not be live unless it is used for
5640 pseudos, in which case it will have been set live when it
5641 was allocated to the pseudos. If the register will not
5642 be eliminated, reload will set it live at that point.
5644 Otherwise, record that this function uses this register. */
5645 /* ??? The PPC backend tries to "eliminate" on the pic
5646 register to itself. This should be fixed. In the mean
5647 time, hack around it. */
5649 if (! (TEST_HARD_REG_BIT (elim_reg_set
, regno
)
5650 && (regno
== FRAME_POINTER_REGNUM
5651 || regno
== ARG_POINTER_REGNUM
)))
5653 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5655 regs_ever_live
[regno
+ --n
] = 1;
5661 /* Keep track of which basic block each reg appears in. */
5663 register int blocknum
= pbi
->bb
->index
;
5664 if (REG_BASIC_BLOCK (regno
) == REG_BLOCK_UNKNOWN
)
5665 REG_BASIC_BLOCK (regno
) = blocknum
;
5666 else if (REG_BASIC_BLOCK (regno
) != blocknum
)
5667 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
5669 /* Count (weighted) number of uses of each reg. */
5670 REG_N_REFS (regno
) += (optimize_size
? 1
5671 : pbi
->bb
->loop_depth
+ 1);
5675 /* Find out if any of the register was set this insn. */
5676 some_not_set
= ! REGNO_REG_SET_P (pbi
->new_set
, regno
);
5677 if (regno
< FIRST_PSEUDO_REGISTER
)
5679 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5681 some_not_set
|= ! REGNO_REG_SET_P (pbi
->new_set
, regno
+ n
);
5684 /* Record and count the insns in which a reg dies. If it is used in
5685 this insn and was dead below the insn then it dies in this insn.
5686 If it was set in this insn, we do not make a REG_DEAD note;
5687 likewise if we already made such a note. */
5688 if ((pbi
->flags
& (PROP_DEATH_NOTES
| PROP_REG_INFO
))
5692 /* Check for the case where the register dying partially
5693 overlaps the register set by this insn. */
5694 if (regno
< FIRST_PSEUDO_REGISTER
5695 && HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) > 1)
5697 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5699 some_was_live
|= REGNO_REG_SET_P (pbi
->new_set
, regno
+ n
);
5702 /* If none of the words in X is needed, make a REG_DEAD note.
5703 Otherwise, we must make partial REG_DEAD notes. */
5704 if (! some_was_live
)
5706 if ((pbi
->flags
& PROP_DEATH_NOTES
)
5707 && ! find_regno_note (insn
, REG_DEAD
, regno
))
5709 = alloc_EXPR_LIST (REG_DEAD
, reg
, REG_NOTES (insn
));
5711 if (pbi
->flags
& PROP_REG_INFO
)
5712 REG_N_DEATHS (regno
)++;
5716 /* Don't make a REG_DEAD note for a part of a register
5717 that is set in the insn. */
5719 n
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) - 1;
5720 for (; n
>= regno
; n
--)
5721 if (! REGNO_REG_SET_P (pbi
->reg_live
, n
)
5722 && ! dead_or_set_regno_p (insn
, n
))
5724 = alloc_EXPR_LIST (REG_DEAD
,
5725 gen_rtx_REG (reg_raw_mode
[n
], n
),
5730 SET_REGNO_REG_SET (pbi
->reg_live
, regno
);
5731 if (regno
< FIRST_PSEUDO_REGISTER
)
5733 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
5735 SET_REGNO_REG_SET (pbi
->reg_live
, regno
+ n
);
5738 #ifdef HAVE_conditional_execution
5739 /* If this is a conditional use, record that fact. If it is later
5740 conditionally set, we'll know to kill the register. */
5741 if (cond
!= NULL_RTX
)
5743 splay_tree_node node
;
5744 struct reg_cond_life_info
*rcli
;
5749 node
= splay_tree_lookup (pbi
->reg_cond_dead
, regno
);
5752 /* The register was unconditionally live previously.
5753 No need to do anything. */
5757 /* The register was conditionally live previously.
5758 Subtract the new life cond from the old death cond. */
5759 rcli
= (struct reg_cond_life_info
*) node
->value
;
5760 ncond
= rcli
->condition
;
5761 ncond
= and_reg_cond (ncond
, not_reg_cond (cond
), 1);
5763 /* If the register is now unconditionally live, remove the
5764 entry in the splay_tree. */
5765 if (ncond
== const0_rtx
)
5767 rcli
->condition
= NULL_RTX
;
5768 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
5772 rcli
->condition
= ncond
;
5773 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
5779 /* The register was not previously live at all. Record
5780 the condition under which it is still dead. */
5781 rcli
= (struct reg_cond_life_info
*) xmalloc (sizeof (*rcli
));
5782 rcli
->condition
= not_reg_cond (cond
);
5783 splay_tree_insert (pbi
->reg_cond_dead
, regno
,
5784 (splay_tree_value
) rcli
);
5786 SET_REGNO_REG_SET (pbi
->reg_cond_reg
, REGNO (XEXP (cond
, 0)));
5789 else if (some_was_live
)
5791 splay_tree_node node
;
5792 struct reg_cond_life_info
*rcli
;
5794 node
= splay_tree_lookup (pbi
->reg_cond_dead
, regno
);
5797 /* The register was conditionally live previously, but is now
5798 unconditionally so. Remove it from the conditionally dead
5799 list, so that a conditional set won't cause us to think
5801 rcli
= (struct reg_cond_life_info
*) node
->value
;
5802 rcli
->condition
= NULL_RTX
;
5803 splay_tree_remove (pbi
->reg_cond_dead
, regno
);
5810 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5811 This is done assuming the registers needed from X are those that
5812 have 1-bits in PBI->REG_LIVE.
5814 INSN is the containing instruction. If INSN is dead, this function
5818 mark_used_regs (pbi
, x
, cond
, insn
)
5819 struct propagate_block_info
*pbi
;
5822 register RTX_CODE code
;
5824 int flags
= pbi
->flags
;
5827 code
= GET_CODE (x
);
5847 /* If we are clobbering a MEM, mark any registers inside the address
5849 if (GET_CODE (XEXP (x
, 0)) == MEM
)
5850 mark_used_regs (pbi
, XEXP (XEXP (x
, 0), 0), cond
, insn
);
5854 /* Don't bother watching stores to mems if this is not the
5855 final pass. We'll not be deleting dead stores this round. */
5856 if (optimize
&& (flags
& PROP_SCAN_DEAD_CODE
))
5858 /* Invalidate the data for the last MEM stored, but only if MEM is
5859 something that can be stored into. */
5860 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
5861 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
5862 /* Needn't clear the memory set list. */
5866 rtx temp
= pbi
->mem_set_list
;
5867 rtx prev
= NULL_RTX
;
5872 next
= XEXP (temp
, 1);
5873 if (anti_dependence (XEXP (temp
, 0), x
))
5875 /* Splice temp out of the list. */
5877 XEXP (prev
, 1) = next
;
5879 pbi
->mem_set_list
= next
;
5880 free_EXPR_LIST_node (temp
);
5881 pbi
->mem_set_list_len
--;
5889 /* If the memory reference had embedded side effects (autoincrement
5890 address modes. Then we may need to kill some entries on the
5893 invalidate_mems_from_autoinc (pbi
, insn
);
5897 if (flags
& PROP_AUTOINC
)
5898 find_auto_inc (pbi
, x
, insn
);
5903 #ifdef CLASS_CANNOT_CHANGE_MODE
5904 if (GET_CODE (SUBREG_REG (x
)) == REG
5905 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
5906 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x
),
5907 GET_MODE (SUBREG_REG (x
))))
5908 REG_CHANGES_MODE (REGNO (SUBREG_REG (x
))) = 1;
5911 /* While we're here, optimize this case. */
5913 if (GET_CODE (x
) != REG
)
5918 /* See a register other than being set => mark it as needed. */
5919 mark_used_reg (pbi
, x
, cond
, insn
);
5924 register rtx testreg
= SET_DEST (x
);
5927 /* If storing into MEM, don't show it as being used. But do
5928 show the address as being used. */
5929 if (GET_CODE (testreg
) == MEM
)
5932 if (flags
& PROP_AUTOINC
)
5933 find_auto_inc (pbi
, testreg
, insn
);
5935 mark_used_regs (pbi
, XEXP (testreg
, 0), cond
, insn
);
5936 mark_used_regs (pbi
, SET_SRC (x
), cond
, insn
);
5940 /* Storing in STRICT_LOW_PART is like storing in a reg
5941 in that this SET might be dead, so ignore it in TESTREG.
5942 but in some other ways it is like using the reg.
5944 Storing in a SUBREG or a bit field is like storing the entire
5945 register in that if the register's value is not used
5946 then this SET is not needed. */
5947 while (GET_CODE (testreg
) == STRICT_LOW_PART
5948 || GET_CODE (testreg
) == ZERO_EXTRACT
5949 || GET_CODE (testreg
) == SIGN_EXTRACT
5950 || GET_CODE (testreg
) == SUBREG
)
5952 #ifdef CLASS_CANNOT_CHANGE_MODE
5953 if (GET_CODE (testreg
) == SUBREG
5954 && GET_CODE (SUBREG_REG (testreg
)) == REG
5955 && REGNO (SUBREG_REG (testreg
)) >= FIRST_PSEUDO_REGISTER
5956 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg
)),
5957 GET_MODE (testreg
)))
5958 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg
))) = 1;
5961 /* Modifying a single register in an alternate mode
5962 does not use any of the old value. But these other
5963 ways of storing in a register do use the old value. */
5964 if (GET_CODE (testreg
) == SUBREG
5965 && !(REG_SIZE (SUBREG_REG (testreg
)) > REG_SIZE (testreg
)))
5970 testreg
= XEXP (testreg
, 0);
5973 /* If this is a store into a register or group of registers,
5974 recursively scan the value being stored. */
5976 if ((GET_CODE (testreg
) == PARALLEL
5977 && GET_MODE (testreg
) == BLKmode
)
5978 || (GET_CODE (testreg
) == REG
5979 && (regno
= REGNO (testreg
),
5980 ! (regno
== FRAME_POINTER_REGNUM
5981 && (! reload_completed
|| frame_pointer_needed
)))
5982 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5983 && ! (regno
== HARD_FRAME_POINTER_REGNUM
5984 && (! reload_completed
|| frame_pointer_needed
))
5986 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5987 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
5992 mark_used_regs (pbi
, SET_DEST (x
), cond
, insn
);
5993 mark_used_regs (pbi
, SET_SRC (x
), cond
, insn
);
6000 case UNSPEC_VOLATILE
:
6004 /* Traditional and volatile asm instructions must be considered to use
6005 and clobber all hard registers, all pseudo-registers and all of
6006 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
6008 Consider for instance a volatile asm that changes the fpu rounding
6009 mode. An insn should not be moved across this even if it only uses
6010 pseudo-regs because it might give an incorrectly rounded result.
6012 ?!? Unfortunately, marking all hard registers as live causes massive
6013 problems for the register allocator and marking all pseudos as live
6014 creates mountains of uninitialized variable warnings.
6016 So for now, just clear the memory set list and mark any regs
6017 we can find in ASM_OPERANDS as used. */
6018 if (code
!= ASM_OPERANDS
|| MEM_VOLATILE_P (x
))
6020 free_EXPR_LIST_list (&pbi
->mem_set_list
);
6021 pbi
->mem_set_list_len
= 0;
6024 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
6025 We can not just fall through here since then we would be confused
6026 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
6027 traditional asms unlike their normal usage. */
6028 if (code
== ASM_OPERANDS
)
6032 for (j
= 0; j
< ASM_OPERANDS_INPUT_LENGTH (x
); j
++)
6033 mark_used_regs (pbi
, ASM_OPERANDS_INPUT (x
, j
), cond
, insn
);
6039 if (cond
!= NULL_RTX
)
6042 mark_used_regs (pbi
, COND_EXEC_TEST (x
), NULL_RTX
, insn
);
6044 cond
= COND_EXEC_TEST (x
);
6045 x
= COND_EXEC_CODE (x
);
6049 /* We _do_not_ want to scan operands of phi nodes. Operands of
6050 a phi function are evaluated only when control reaches this
6051 block along a particular edge. Therefore, regs that appear
6052 as arguments to phi should not be added to the global live at
6060 /* Recursively scan the operands of this expression. */
6063 register const char *fmt
= GET_RTX_FORMAT (code
);
6066 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6070 /* Tail recursive case: save a function call level. */
6076 mark_used_regs (pbi
, XEXP (x
, i
), cond
, insn
);
6078 else if (fmt
[i
] == 'E')
6081 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
6082 mark_used_regs (pbi
, XVECEXP (x
, i
, j
), cond
, insn
);
6091 try_pre_increment_1 (pbi
, insn
)
6092 struct propagate_block_info
*pbi
;
6095 /* Find the next use of this reg. If in same basic block,
6096 make it do pre-increment or pre-decrement if appropriate. */
6097 rtx x
= single_set (insn
);
6098 HOST_WIDE_INT amount
= ((GET_CODE (SET_SRC (x
)) == PLUS
? 1 : -1)
6099 * INTVAL (XEXP (SET_SRC (x
), 1)));
6100 int regno
= REGNO (SET_DEST (x
));
6101 rtx y
= pbi
->reg_next_use
[regno
];
6103 && SET_DEST (x
) != stack_pointer_rtx
6104 && BLOCK_NUM (y
) == BLOCK_NUM (insn
)
6105 /* Don't do this if the reg dies, or gets set in y; a standard addressing
6106 mode would be better. */
6107 && ! dead_or_set_p (y
, SET_DEST (x
))
6108 && try_pre_increment (y
, SET_DEST (x
), amount
))
6110 /* We have found a suitable auto-increment and already changed
6111 insn Y to do it. So flush this increment instruction. */
6112 propagate_block_delete_insn (pbi
->bb
, insn
);
6114 /* Count a reference to this reg for the increment insn we are
6115 deleting. When a reg is incremented, spilling it is worse,
6116 so we want to make that less likely. */
6117 if (regno
>= FIRST_PSEUDO_REGISTER
)
6119 REG_N_REFS (regno
) += (optimize_size
? 1
6120 : pbi
->bb
->loop_depth
+ 1);
6121 REG_N_SETS (regno
)++;
6124 /* Flush any remembered memories depending on the value of
6125 the incremented register. */
6126 invalidate_mems_from_set (pbi
, SET_DEST (x
));
6133 /* Try to change INSN so that it does pre-increment or pre-decrement
6134 addressing on register REG in order to add AMOUNT to REG.
6135 AMOUNT is negative for pre-decrement.
6136 Returns 1 if the change could be made.
6137 This checks all about the validity of the result of modifying INSN. */
6140 try_pre_increment (insn
, reg
, amount
)
6142 HOST_WIDE_INT amount
;
6146 /* Nonzero if we can try to make a pre-increment or pre-decrement.
6147 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
6149 /* Nonzero if we can try to make a post-increment or post-decrement.
6150 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
6151 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
6152 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
6155 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
6158 /* From the sign of increment, see which possibilities are conceivable
6159 on this target machine. */
6160 if (HAVE_PRE_INCREMENT
&& amount
> 0)
6162 if (HAVE_POST_INCREMENT
&& amount
> 0)
6165 if (HAVE_PRE_DECREMENT
&& amount
< 0)
6167 if (HAVE_POST_DECREMENT
&& amount
< 0)
6170 if (! (pre_ok
|| post_ok
))
6173 /* It is not safe to add a side effect to a jump insn
6174 because if the incremented register is spilled and must be reloaded
6175 there would be no way to store the incremented value back in memory. */
6177 if (GET_CODE (insn
) == JUMP_INSN
)
6182 use
= find_use_as_address (PATTERN (insn
), reg
, 0);
6183 if (post_ok
&& (use
== 0 || use
== (rtx
) 1))
6185 use
= find_use_as_address (PATTERN (insn
), reg
, -amount
);
6189 if (use
== 0 || use
== (rtx
) 1)
6192 if (GET_MODE_SIZE (GET_MODE (use
)) != (amount
> 0 ? amount
: - amount
))
6195 /* See if this combination of instruction and addressing mode exists. */
6196 if (! validate_change (insn
, &XEXP (use
, 0),
6197 gen_rtx_fmt_e (amount
> 0
6198 ? (do_post
? POST_INC
: PRE_INC
)
6199 : (do_post
? POST_DEC
: PRE_DEC
),
6203 /* Record that this insn now has an implicit side effect on X. */
6204 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_INC
, reg
, REG_NOTES (insn
));
6208 #endif /* AUTO_INC_DEC */
6210 /* Find the place in the rtx X where REG is used as a memory address.
6211 Return the MEM rtx that so uses it.
6212 If PLUSCONST is nonzero, search instead for a memory address equivalent to
6213 (plus REG (const_int PLUSCONST)).
6215 If such an address does not appear, return 0.
6216 If REG appears more than once, or is used other than in such an address,
6220 find_use_as_address (x
, reg
, plusconst
)
6223 HOST_WIDE_INT plusconst
;
6225 enum rtx_code code
= GET_CODE (x
);
6226 const char *fmt
= GET_RTX_FORMAT (code
);
6228 register rtx value
= 0;
6231 if (code
== MEM
&& XEXP (x
, 0) == reg
&& plusconst
== 0)
6234 if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == PLUS
6235 && XEXP (XEXP (x
, 0), 0) == reg
6236 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
6237 && INTVAL (XEXP (XEXP (x
, 0), 1)) == plusconst
)
6240 if (code
== SIGN_EXTRACT
|| code
== ZERO_EXTRACT
)
6242 /* If REG occurs inside a MEM used in a bit-field reference,
6243 that is unacceptable. */
6244 if (find_use_as_address (XEXP (x
, 0), reg
, 0) != 0)
6245 return (rtx
) (HOST_WIDE_INT
) 1;
6249 return (rtx
) (HOST_WIDE_INT
) 1;
6251 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6255 tem
= find_use_as_address (XEXP (x
, i
), reg
, plusconst
);
6259 return (rtx
) (HOST_WIDE_INT
) 1;
6261 else if (fmt
[i
] == 'E')
6264 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6266 tem
= find_use_as_address (XVECEXP (x
, i
, j
), reg
, plusconst
);
6270 return (rtx
) (HOST_WIDE_INT
) 1;
6278 /* Write information about registers and basic blocks into FILE.
6279 This is part of making a debugging dump. */
6282 dump_regset (r
, outf
)
6289 fputs (" (nil)", outf
);
6293 EXECUTE_IF_SET_IN_REG_SET (r
, 0, i
,
6295 fprintf (outf
, " %d", i
);
6296 if (i
< FIRST_PSEUDO_REGISTER
)
6297 fprintf (outf
, " [%s]",
6306 dump_regset (r
, stderr
);
6307 putc ('\n', stderr
);
6311 dump_flow_info (file
)
6315 static const char * const reg_class_names
[] = REG_CLASS_NAMES
;
6317 fprintf (file
, "%d registers.\n", max_regno
);
6318 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
6321 enum reg_class
class, altclass
;
6322 fprintf (file
, "\nRegister %d used %d times across %d insns",
6323 i
, REG_N_REFS (i
), REG_LIVE_LENGTH (i
));
6324 if (REG_BASIC_BLOCK (i
) >= 0)
6325 fprintf (file
, " in block %d", REG_BASIC_BLOCK (i
));
6327 fprintf (file
, "; set %d time%s", REG_N_SETS (i
),
6328 (REG_N_SETS (i
) == 1) ? "" : "s");
6329 if (REG_USERVAR_P (regno_reg_rtx
[i
]))
6330 fprintf (file
, "; user var");
6331 if (REG_N_DEATHS (i
) != 1)
6332 fprintf (file
, "; dies in %d places", REG_N_DEATHS (i
));
6333 if (REG_N_CALLS_CROSSED (i
) == 1)
6334 fprintf (file
, "; crosses 1 call");
6335 else if (REG_N_CALLS_CROSSED (i
))
6336 fprintf (file
, "; crosses %d calls", REG_N_CALLS_CROSSED (i
));
6337 if (PSEUDO_REGNO_BYTES (i
) != UNITS_PER_WORD
)
6338 fprintf (file
, "; %d bytes", PSEUDO_REGNO_BYTES (i
));
6339 class = reg_preferred_class (i
);
6340 altclass
= reg_alternate_class (i
);
6341 if (class != GENERAL_REGS
|| altclass
!= ALL_REGS
)
6343 if (altclass
== ALL_REGS
|| class == ALL_REGS
)
6344 fprintf (file
, "; pref %s", reg_class_names
[(int) class]);
6345 else if (altclass
== NO_REGS
)
6346 fprintf (file
, "; %s or none", reg_class_names
[(int) class]);
6348 fprintf (file
, "; pref %s, else %s",
6349 reg_class_names
[(int) class],
6350 reg_class_names
[(int) altclass
]);
6352 if (REG_POINTER (regno_reg_rtx
[i
]))
6353 fprintf (file
, "; pointer");
6354 fprintf (file
, ".\n");
6357 fprintf (file
, "\n%d basic blocks, %d edges.\n", n_basic_blocks
, n_edges
);
6358 for (i
= 0; i
< n_basic_blocks
; i
++)
6360 register basic_block bb
= BASIC_BLOCK (i
);
6363 fprintf (file
, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
6364 i
, INSN_UID (bb
->head
), INSN_UID (bb
->end
), bb
->loop_depth
, bb
->count
);
6366 fprintf (file
, "Predecessors: ");
6367 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
6368 dump_edge_info (file
, e
, 0);
6370 fprintf (file
, "\nSuccessors: ");
6371 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6372 dump_edge_info (file
, e
, 1);
6374 fprintf (file
, "\nRegisters live at start:");
6375 dump_regset (bb
->global_live_at_start
, file
);
6377 fprintf (file
, "\nRegisters live at end:");
6378 dump_regset (bb
->global_live_at_end
, file
);
6389 dump_flow_info (stderr
);
6393 dump_edge_info (file
, e
, do_succ
)
6398 basic_block side
= (do_succ
? e
->dest
: e
->src
);
6400 if (side
== ENTRY_BLOCK_PTR
)
6401 fputs (" ENTRY", file
);
6402 else if (side
== EXIT_BLOCK_PTR
)
6403 fputs (" EXIT", file
);
6405 fprintf (file
, " %d", side
->index
);
6408 fprintf (file
, " count:%d", e
->count
);
6412 static const char * const bitnames
[] = {
6413 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6416 int i
, flags
= e
->flags
;
6420 for (i
= 0; flags
; i
++)
6421 if (flags
& (1 << i
))
6427 if (i
< (int) ARRAY_SIZE (bitnames
))
6428 fputs (bitnames
[i
], file
);
6430 fprintf (file
, "%d", i
);
6437 /* Print out one basic block with live information at start and end. */
6448 fprintf (outf
, ";; Basic block %d, loop depth %d, count %d",
6449 bb
->index
, bb
->loop_depth
, bb
->count
);
6450 if (bb
->eh_beg
!= -1 || bb
->eh_end
!= -1)
6451 fprintf (outf
, ", eh regions %d/%d", bb
->eh_beg
, bb
->eh_end
);
6454 fputs (";; Predecessors: ", outf
);
6455 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
6456 dump_edge_info (outf
, e
, 0);
6459 fputs (";; Registers live at start:", outf
);
6460 dump_regset (bb
->global_live_at_start
, outf
);
6463 for (insn
= bb
->head
, last
= NEXT_INSN (bb
->end
);
6465 insn
= NEXT_INSN (insn
))
6466 print_rtl_single (outf
, insn
);
6468 fputs (";; Registers live at end:", outf
);
6469 dump_regset (bb
->global_live_at_end
, outf
);
6472 fputs (";; Successors: ", outf
);
6473 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6474 dump_edge_info (outf
, e
, 1);
6482 dump_bb (bb
, stderr
);
6489 dump_bb (BASIC_BLOCK (n
), stderr
);
6492 /* Like print_rtl, but also print out live information for the start of each
6496 print_rtl_with_bb (outf
, rtx_first
)
6500 register rtx tmp_rtx
;
6503 fprintf (outf
, "(nil)\n");
6507 enum bb_state
{ NOT_IN_BB
, IN_ONE_BB
, IN_MULTIPLE_BB
};
6508 int max_uid
= get_max_uid ();
6509 basic_block
*start
= (basic_block
*)
6510 xcalloc (max_uid
, sizeof (basic_block
));
6511 basic_block
*end
= (basic_block
*)
6512 xcalloc (max_uid
, sizeof (basic_block
));
6513 enum bb_state
*in_bb_p
= (enum bb_state
*)
6514 xcalloc (max_uid
, sizeof (enum bb_state
));
6516 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
6518 basic_block bb
= BASIC_BLOCK (i
);
6521 start
[INSN_UID (bb
->head
)] = bb
;
6522 end
[INSN_UID (bb
->end
)] = bb
;
6523 for (x
= bb
->head
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
6525 enum bb_state state
= IN_MULTIPLE_BB
;
6526 if (in_bb_p
[INSN_UID (x
)] == NOT_IN_BB
)
6528 in_bb_p
[INSN_UID (x
)] = state
;
6535 for (tmp_rtx
= rtx_first
; NULL
!= tmp_rtx
; tmp_rtx
= NEXT_INSN (tmp_rtx
))
6540 if ((bb
= start
[INSN_UID (tmp_rtx
)]) != NULL
)
6542 fprintf (outf
, ";; Start of basic block %d, registers live:",
6544 dump_regset (bb
->global_live_at_start
, outf
);
6548 if (in_bb_p
[INSN_UID (tmp_rtx
)] == NOT_IN_BB
6549 && GET_CODE (tmp_rtx
) != NOTE
6550 && GET_CODE (tmp_rtx
) != BARRIER
)
6551 fprintf (outf
, ";; Insn is not within a basic block\n");
6552 else if (in_bb_p
[INSN_UID (tmp_rtx
)] == IN_MULTIPLE_BB
)
6553 fprintf (outf
, ";; Insn is in multiple basic blocks\n");
6555 did_output
= print_rtl_single (outf
, tmp_rtx
);
6557 if ((bb
= end
[INSN_UID (tmp_rtx
)]) != NULL
)
6559 fprintf (outf
, ";; End of basic block %d, registers live:\n",
6561 dump_regset (bb
->global_live_at_end
, outf
);
6574 if (current_function_epilogue_delay_list
!= 0)
6576 fprintf (outf
, "\n;; Insns in epilogue delay list:\n\n");
6577 for (tmp_rtx
= current_function_epilogue_delay_list
; tmp_rtx
!= 0;
6578 tmp_rtx
= XEXP (tmp_rtx
, 1))
6579 print_rtl_single (outf
, XEXP (tmp_rtx
, 0));
6583 /* Dump the rtl into the current debugging dump file, then abort. */
6585 print_rtl_and_abort ()
6589 print_rtl_with_bb (rtl_dump_file
, get_insns ());
6590 fclose (rtl_dump_file
);
6595 /* Recompute register set/reference counts immediately prior to register
6598 This avoids problems with set/reference counts changing to/from values
6599 which have special meanings to the register allocators.
6601 Additionally, the reference counts are the primary component used by the
6602 register allocators to prioritize pseudos for allocation to hard regs.
6603 More accurate reference counts generally lead to better register allocation.
6605 F is the first insn to be scanned.
6607 LOOP_STEP denotes how much loop_depth should be incremented per
6608 loop nesting level in order to increase the ref count more for
6609 references in a loop.
6611 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6612 possibly other information which is used by the register allocators. */
6615 recompute_reg_usage (f
, loop_step
)
6616 rtx f ATTRIBUTE_UNUSED
;
6617 int loop_step ATTRIBUTE_UNUSED
;
6619 allocate_reg_life_data ();
6620 update_life_info (NULL
, UPDATE_LIFE_LOCAL
, PROP_REG_INFO
);
6623 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6624 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6625 of the number of registers that died. */
6628 count_or_remove_death_notes (blocks
, kill
)
6634 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
6639 if (blocks
&& ! TEST_BIT (blocks
, i
))
6642 bb
= BASIC_BLOCK (i
);
6644 for (insn
= bb
->head
;; insn
= NEXT_INSN (insn
))
6648 rtx
*pprev
= ®_NOTES (insn
);
6653 switch (REG_NOTE_KIND (link
))
6656 if (GET_CODE (XEXP (link
, 0)) == REG
)
6658 rtx reg
= XEXP (link
, 0);
6661 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
6664 n
= HARD_REGNO_NREGS (REGNO (reg
), GET_MODE (reg
));
6672 rtx next
= XEXP (link
, 1);
6673 free_EXPR_LIST_node (link
);
6674 *pprev
= link
= next
;
6680 pprev
= &XEXP (link
, 1);
6687 if (insn
== bb
->end
)
6696 /* Update insns block within BB. */
6699 update_bb_for_insn (bb
)
6704 if (! basic_block_for_insn
)
6707 for (insn
= bb
->head
; ; insn
= NEXT_INSN (insn
))
6709 set_block_for_insn (insn
, bb
);
6711 if (insn
== bb
->end
)
6717 /* Record INSN's block as BB. */
6720 set_block_for_insn (insn
, bb
)
6724 size_t uid
= INSN_UID (insn
);
6725 if (uid
>= basic_block_for_insn
->num_elements
)
6729 /* Add one-eighth the size so we don't keep calling xrealloc. */
6730 new_size
= uid
+ (uid
+ 7) / 8;
6732 VARRAY_GROW (basic_block_for_insn
, new_size
);
6734 VARRAY_BB (basic_block_for_insn
, uid
) = bb
;
6737 /* Record INSN's block number as BB. */
6738 /* ??? This has got to go. */
6741 set_block_num (insn
, bb
)
6745 set_block_for_insn (insn
, BASIC_BLOCK (bb
));
6748 /* Verify the CFG consistency. This function check some CFG invariants and
6749 aborts when something is wrong. Hope that this function will help to
6750 convert many optimization passes to preserve CFG consistent.
6752 Currently it does following checks:
6754 - test head/end pointers
6755 - overlapping of basic blocks
6756 - edge list corectness
6757 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6758 - tails of basic blocks (ensure that boundary is necesary)
6759 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6760 and NOTE_INSN_BASIC_BLOCK
6761 - check that all insns are in the basic blocks
6762 (except the switch handling code, barriers and notes)
6763 - check that all returns are followed by barriers
6765 In future it can be extended check a lot of other stuff as well
6766 (reachability of basic blocks, life information, etc. etc.). */
6771 const int max_uid
= get_max_uid ();
6772 const rtx rtx_first
= get_insns ();
6773 rtx last_head
= get_last_insn ();
6774 basic_block
*bb_info
;
6776 int i
, last_bb_num_seen
, num_bb_notes
, err
= 0;
6778 bb_info
= (basic_block
*) xcalloc (max_uid
, sizeof (basic_block
));
6780 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
6782 basic_block bb
= BASIC_BLOCK (i
);
6783 rtx head
= bb
->head
;
6786 /* Verify the end of the basic block is in the INSN chain. */
6787 for (x
= last_head
; x
!= NULL_RTX
; x
= PREV_INSN (x
))
6792 error ("End insn %d for block %d not found in the insn stream.",
6793 INSN_UID (end
), bb
->index
);
6797 /* Work backwards from the end to the head of the basic block
6798 to verify the head is in the RTL chain. */
6799 for (; x
!= NULL_RTX
; x
= PREV_INSN (x
))
6801 /* While walking over the insn chain, verify insns appear
6802 in only one basic block and initialize the BB_INFO array
6803 used by other passes. */
6804 if (bb_info
[INSN_UID (x
)] != NULL
)
6806 error ("Insn %d is in multiple basic blocks (%d and %d)",
6807 INSN_UID (x
), bb
->index
, bb_info
[INSN_UID (x
)]->index
);
6810 bb_info
[INSN_UID (x
)] = bb
;
6817 error ("Head insn %d for block %d not found in the insn stream.",
6818 INSN_UID (head
), bb
->index
);
6825 /* Now check the basic blocks (boundaries etc.) */
6826 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
6828 basic_block bb
= BASIC_BLOCK (i
);
6829 /* Check corectness of edge lists */
6838 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6840 fprintf (stderr
, "Predecessor: ");
6841 dump_edge_info (stderr
, e
, 0);
6842 fprintf (stderr
, "\nSuccessor: ");
6843 dump_edge_info (stderr
, e
, 1);
6847 if (e
->dest
!= EXIT_BLOCK_PTR
)
6849 edge e2
= e
->dest
->pred
;
6850 while (e2
&& e2
!= e
)
6854 error ("Basic block %i edge lists are corrupted", bb
->index
);
6866 error ("Basic block %d pred edge is corrupted", bb
->index
);
6867 fputs ("Predecessor: ", stderr
);
6868 dump_edge_info (stderr
, e
, 0);
6869 fputs ("\nSuccessor: ", stderr
);
6870 dump_edge_info (stderr
, e
, 1);
6871 fputc ('\n', stderr
);
6874 if (e
->src
!= ENTRY_BLOCK_PTR
)
6876 edge e2
= e
->src
->succ
;
6877 while (e2
&& e2
!= e
)
6881 error ("Basic block %i edge lists are corrupted", bb
->index
);
6888 /* OK pointers are correct. Now check the header of basic
6889 block. It ought to contain optional CODE_LABEL followed
6890 by NOTE_BASIC_BLOCK. */
6892 if (GET_CODE (x
) == CODE_LABEL
)
6896 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6902 if (!NOTE_INSN_BASIC_BLOCK_P (x
) || NOTE_BASIC_BLOCK (x
) != bb
)
6904 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6911 /* Do checks for empty blocks here */
6918 if (NOTE_INSN_BASIC_BLOCK_P (x
))
6920 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6921 INSN_UID (x
), bb
->index
);
6928 if (GET_CODE (x
) == JUMP_INSN
6929 || GET_CODE (x
) == CODE_LABEL
6930 || GET_CODE (x
) == BARRIER
)
6932 error ("In basic block %d:", bb
->index
);
6933 fatal_insn ("Flow control insn inside a basic block", x
);
6941 last_bb_num_seen
= -1;
6946 if (NOTE_INSN_BASIC_BLOCK_P (x
))
6948 basic_block bb
= NOTE_BASIC_BLOCK (x
);
6950 if (bb
->index
!= last_bb_num_seen
+ 1)
6951 fatal ("Basic blocks not numbered consecutively");
6952 last_bb_num_seen
= bb
->index
;
6955 if (!bb_info
[INSN_UID (x
)])
6957 switch (GET_CODE (x
))
6964 /* An addr_vec is placed outside any block block. */
6966 && GET_CODE (NEXT_INSN (x
)) == JUMP_INSN
6967 && (GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_DIFF_VEC
6968 || GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_VEC
))
6973 /* But in any case, non-deletable labels can appear anywhere. */
6977 fatal_insn ("Insn outside basic block", x
);
6982 && GET_CODE (x
) == JUMP_INSN
6983 && returnjump_p (x
) && ! condjump_p (x
)
6984 && ! (NEXT_INSN (x
) && GET_CODE (NEXT_INSN (x
)) == BARRIER
))
6985 fatal_insn ("Return not followed by barrier", x
);
6990 if (num_bb_notes
!= n_basic_blocks
)
6991 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6992 num_bb_notes
, n_basic_blocks
);
7001 /* Functions to access an edge list with a vector representation.
7002 Enough data is kept such that given an index number, the
7003 pred and succ that edge represents can be determined, or
7004 given a pred and a succ, its index number can be returned.
7005 This allows algorithms which consume a lot of memory to
7006 represent the normally full matrix of edge (pred,succ) with a
7007 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
7008 wasted space in the client code due to sparse flow graphs. */
7010 /* This functions initializes the edge list. Basically the entire
7011 flowgraph is processed, and all edges are assigned a number,
7012 and the data structure is filled in. */
7017 struct edge_list
*elist
;
7023 block_count
= n_basic_blocks
+ 2; /* Include the entry and exit blocks. */
7027 /* Determine the number of edges in the flow graph by counting successor
7028 edges on each basic block. */
7029 for (x
= 0; x
< n_basic_blocks
; x
++)
7031 basic_block bb
= BASIC_BLOCK (x
);
7033 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
7036 /* Don't forget successors of the entry block. */
7037 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
7040 elist
= (struct edge_list
*) xmalloc (sizeof (struct edge_list
));
7041 elist
->num_blocks
= block_count
;
7042 elist
->num_edges
= num_edges
;
7043 elist
->index_to_edge
= (edge
*) xmalloc (sizeof (edge
) * num_edges
);
7047 /* Follow successors of the entry block, and register these edges. */
7048 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
7050 elist
->index_to_edge
[num_edges
] = e
;
7054 for (x
= 0; x
< n_basic_blocks
; x
++)
7056 basic_block bb
= BASIC_BLOCK (x
);
7058 /* Follow all successors of blocks, and register these edges. */
7059 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
7061 elist
->index_to_edge
[num_edges
] = e
;
7068 /* This function free's memory associated with an edge list. */
7071 free_edge_list (elist
)
7072 struct edge_list
*elist
;
7076 free (elist
->index_to_edge
);
7081 /* This function provides debug output showing an edge list. */
7084 print_edge_list (f
, elist
)
7086 struct edge_list
*elist
;
7089 fprintf (f
, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
7090 elist
->num_blocks
- 2, elist
->num_edges
);
7092 for (x
= 0; x
< elist
->num_edges
; x
++)
7094 fprintf (f
, " %-4d - edge(", x
);
7095 if (INDEX_EDGE_PRED_BB (elist
, x
) == ENTRY_BLOCK_PTR
)
7096 fprintf (f
, "entry,");
7098 fprintf (f
, "%d,", INDEX_EDGE_PRED_BB (elist
, x
)->index
);
7100 if (INDEX_EDGE_SUCC_BB (elist
, x
) == EXIT_BLOCK_PTR
)
7101 fprintf (f
, "exit)\n");
7103 fprintf (f
, "%d)\n", INDEX_EDGE_SUCC_BB (elist
, x
)->index
);
7107 /* This function provides an internal consistency check of an edge list,
7108 verifying that all edges are present, and that there are no
7112 verify_edge_list (f
, elist
)
7114 struct edge_list
*elist
;
7116 int x
, pred
, succ
, index
;
7119 for (x
= 0; x
< n_basic_blocks
; x
++)
7121 basic_block bb
= BASIC_BLOCK (x
);
7123 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
7125 pred
= e
->src
->index
;
7126 succ
= e
->dest
->index
;
7127 index
= EDGE_INDEX (elist
, e
->src
, e
->dest
);
7128 if (index
== EDGE_INDEX_NO_EDGE
)
7130 fprintf (f
, "*p* No index for edge from %d to %d\n", pred
, succ
);
7133 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
7134 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
7135 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
7136 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
7137 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
7138 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
7141 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
7143 pred
= e
->src
->index
;
7144 succ
= e
->dest
->index
;
7145 index
= EDGE_INDEX (elist
, e
->src
, e
->dest
);
7146 if (index
== EDGE_INDEX_NO_EDGE
)
7148 fprintf (f
, "*p* No index for edge from %d to %d\n", pred
, succ
);
7151 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
7152 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
7153 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
7154 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
7155 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
7156 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
7158 /* We've verified that all the edges are in the list, no lets make sure
7159 there are no spurious edges in the list. */
7161 for (pred
= 0; pred
< n_basic_blocks
; pred
++)
7162 for (succ
= 0; succ
< n_basic_blocks
; succ
++)
7164 basic_block p
= BASIC_BLOCK (pred
);
7165 basic_block s
= BASIC_BLOCK (succ
);
7169 for (e
= p
->succ
; e
; e
= e
->succ_next
)
7175 for (e
= s
->pred
; e
; e
= e
->pred_next
)
7181 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), BASIC_BLOCK (succ
))
7182 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
7183 fprintf (f
, "*** Edge (%d, %d) appears to not have an index\n",
7185 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), BASIC_BLOCK (succ
))
7186 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
7187 fprintf (f
, "*** Edge (%d, %d) has index %d, but there is no edge\n",
7188 pred
, succ
, EDGE_INDEX (elist
, BASIC_BLOCK (pred
),
7189 BASIC_BLOCK (succ
)));
7191 for (succ
= 0; succ
< n_basic_blocks
; succ
++)
7193 basic_block p
= ENTRY_BLOCK_PTR
;
7194 basic_block s
= BASIC_BLOCK (succ
);
7198 for (e
= p
->succ
; e
; e
= e
->succ_next
)
7204 for (e
= s
->pred
; e
; e
= e
->pred_next
)
7210 if (EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (succ
))
7211 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
7212 fprintf (f
, "*** Edge (entry, %d) appears to not have an index\n",
7214 if (EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (succ
))
7215 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
7216 fprintf (f
, "*** Edge (entry, %d) has index %d, but no edge exists\n",
7217 succ
, EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
,
7218 BASIC_BLOCK (succ
)));
7220 for (pred
= 0; pred
< n_basic_blocks
; pred
++)
7222 basic_block p
= BASIC_BLOCK (pred
);
7223 basic_block s
= EXIT_BLOCK_PTR
;
7227 for (e
= p
->succ
; e
; e
= e
->succ_next
)
7233 for (e
= s
->pred
; e
; e
= e
->pred_next
)
7239 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), EXIT_BLOCK_PTR
)
7240 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
7241 fprintf (f
, "*** Edge (%d, exit) appears to not have an index\n",
7243 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), EXIT_BLOCK_PTR
)
7244 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
7245 fprintf (f
, "*** Edge (%d, exit) has index %d, but no edge exists\n",
7246 pred
, EDGE_INDEX (elist
, BASIC_BLOCK (pred
),
7251 /* This routine will determine what, if any, edge there is between
7252 a specified predecessor and successor. */
7255 find_edge_index (edge_list
, pred
, succ
)
7256 struct edge_list
*edge_list
;
7257 basic_block pred
, succ
;
7260 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
7262 if (INDEX_EDGE_PRED_BB (edge_list
, x
) == pred
7263 && INDEX_EDGE_SUCC_BB (edge_list
, x
) == succ
)
7266 return (EDGE_INDEX_NO_EDGE
);
7269 /* This function will remove an edge from the flow graph. */
7275 edge last_pred
= NULL
;
7276 edge last_succ
= NULL
;
7278 basic_block src
, dest
;
7281 for (tmp
= src
->succ
; tmp
&& tmp
!= e
; tmp
= tmp
->succ_next
)
7287 last_succ
->succ_next
= e
->succ_next
;
7289 src
->succ
= e
->succ_next
;
7291 for (tmp
= dest
->pred
; tmp
&& tmp
!= e
; tmp
= tmp
->pred_next
)
7297 last_pred
->pred_next
= e
->pred_next
;
7299 dest
->pred
= e
->pred_next
;
7305 /* This routine will remove any fake successor edges for a basic block.
7306 When the edge is removed, it is also removed from whatever predecessor
7310 remove_fake_successors (bb
)
7314 for (e
= bb
->succ
; e
;)
7318 if ((tmp
->flags
& EDGE_FAKE
) == EDGE_FAKE
)
7323 /* This routine will remove all fake edges from the flow graph. If
7324 we remove all fake successors, it will automatically remove all
7325 fake predecessors. */
7328 remove_fake_edges ()
7332 for (x
= 0; x
< n_basic_blocks
; x
++)
7333 remove_fake_successors (BASIC_BLOCK (x
));
7335 /* We've handled all successors except the entry block's. */
7336 remove_fake_successors (ENTRY_BLOCK_PTR
);
7339 /* This function will add a fake edge between any block which has no
7340 successors, and the exit block. Some data flow equations require these
7344 add_noreturn_fake_exit_edges ()
7348 for (x
= 0; x
< n_basic_blocks
; x
++)
7349 if (BASIC_BLOCK (x
)->succ
== NULL
)
7350 make_edge (NULL
, BASIC_BLOCK (x
), EXIT_BLOCK_PTR
, EDGE_FAKE
);
7353 /* This function adds a fake edge between any infinite loops to the
7354 exit block. Some optimizations require a path from each node to
7357 See also Morgan, Figure 3.10, pp. 82-83.
7359 The current implementation is ugly, not attempting to minimize the
7360 number of inserted fake edges. To reduce the number of fake edges
7361 to insert, add fake edges from _innermost_ loops containing only
7362 nodes not reachable from the exit block. */
7365 connect_infinite_loops_to_exit ()
7367 basic_block unvisited_block
;
7369 /* Perform depth-first search in the reverse graph to find nodes
7370 reachable from the exit block. */
7371 struct depth_first_search_dsS dfs_ds
;
7373 flow_dfs_compute_reverse_init (&dfs_ds
);
7374 flow_dfs_compute_reverse_add_bb (&dfs_ds
, EXIT_BLOCK_PTR
);
7376 /* Repeatedly add fake edges, updating the unreachable nodes. */
7379 unvisited_block
= flow_dfs_compute_reverse_execute (&dfs_ds
);
7380 if (!unvisited_block
)
7382 make_edge (NULL
, unvisited_block
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
7383 flow_dfs_compute_reverse_add_bb (&dfs_ds
, unvisited_block
);
7386 flow_dfs_compute_reverse_finish (&dfs_ds
);
7391 /* Redirect an edge's successor from one block to another. */
7394 redirect_edge_succ (e
, new_succ
)
7396 basic_block new_succ
;
7400 /* Disconnect the edge from the old successor block. */
7401 for (pe
= &e
->dest
->pred
; *pe
!= e
; pe
= &(*pe
)->pred_next
)
7403 *pe
= (*pe
)->pred_next
;
7405 /* Reconnect the edge to the new successor block. */
7406 e
->pred_next
= new_succ
->pred
;
7411 /* Redirect an edge's predecessor from one block to another. */
7414 redirect_edge_pred (e
, new_pred
)
7416 basic_block new_pred
;
7420 /* Disconnect the edge from the old predecessor block. */
7421 for (pe
= &e
->src
->succ
; *pe
!= e
; pe
= &(*pe
)->succ_next
)
7423 *pe
= (*pe
)->succ_next
;
7425 /* Reconnect the edge to the new predecessor block. */
7426 e
->succ_next
= new_pred
->succ
;
7431 /* Dump the list of basic blocks in the bitmap NODES. */
7434 flow_nodes_print (str
, nodes
, file
)
7436 const sbitmap nodes
;
7444 fprintf (file
, "%s { ", str
);
7445 EXECUTE_IF_SET_IN_SBITMAP (nodes
, 0, node
, {fprintf (file
, "%d ", node
);});
7446 fputs ("}\n", file
);
7450 /* Dump the list of edges in the array EDGE_LIST. */
7453 flow_edge_list_print (str
, edge_list
, num_edges
, file
)
7455 const edge
*edge_list
;
7464 fprintf (file
, "%s { ", str
);
7465 for (i
= 0; i
< num_edges
; i
++)
7466 fprintf (file
, "%d->%d ", edge_list
[i
]->src
->index
,
7467 edge_list
[i
]->dest
->index
);
7468 fputs ("}\n", file
);
7472 /* Dump loop related CFG information. */
7475 flow_loops_cfg_dump (loops
, file
)
7476 const struct loops
*loops
;
7481 if (! loops
->num
|| ! file
|| ! loops
->cfg
.dom
)
7484 for (i
= 0; i
< n_basic_blocks
; i
++)
7488 fprintf (file
, ";; %d succs { ", i
);
7489 for (succ
= BASIC_BLOCK (i
)->succ
; succ
; succ
= succ
->succ_next
)
7490 fprintf (file
, "%d ", succ
->dest
->index
);
7491 flow_nodes_print ("} dom", loops
->cfg
.dom
[i
], file
);
7494 /* Dump the DFS node order. */
7495 if (loops
->cfg
.dfs_order
)
7497 fputs (";; DFS order: ", file
);
7498 for (i
= 0; i
< n_basic_blocks
; i
++)
7499 fprintf (file
, "%d ", loops
->cfg
.dfs_order
[i
]);
7502 /* Dump the reverse completion node order. */
7503 if (loops
->cfg
.rc_order
)
7505 fputs (";; RC order: ", file
);
7506 for (i
= 0; i
< n_basic_blocks
; i
++)
7507 fprintf (file
, "%d ", loops
->cfg
.rc_order
[i
]);
7512 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7515 flow_loop_nested_p (outer
, loop
)
7519 return sbitmap_a_subset_b_p (loop
->nodes
, outer
->nodes
);
7523 /* Dump the loop information specified by LOOP to the stream FILE
7524 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7526 flow_loop_dump (loop
, file
, loop_dump_aux
, verbose
)
7527 const struct loop
*loop
;
7529 void (*loop_dump_aux
) PARAMS((const struct loop
*, FILE *, int));
7532 if (! loop
|| ! loop
->header
)
7535 fprintf (file
, ";;\n;; Loop %d (%d to %d):%s%s\n",
7536 loop
->num
, INSN_UID (loop
->first
->head
),
7537 INSN_UID (loop
->last
->end
),
7538 loop
->shared
? " shared" : "",
7539 loop
->invalid
? " invalid" : "");
7540 fprintf (file
, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7541 loop
->header
->index
, loop
->latch
->index
,
7542 loop
->pre_header
? loop
->pre_header
->index
: -1,
7543 loop
->first
->index
, loop
->last
->index
);
7544 fprintf (file
, ";; depth %d, level %d, outer %ld\n",
7545 loop
->depth
, loop
->level
,
7546 (long) (loop
->outer
? loop
->outer
->num
: -1));
7548 if (loop
->pre_header_edges
)
7549 flow_edge_list_print (";; pre-header edges", loop
->pre_header_edges
,
7550 loop
->num_pre_header_edges
, file
);
7551 flow_edge_list_print (";; entry edges", loop
->entry_edges
,
7552 loop
->num_entries
, file
);
7553 fprintf (file
, ";; %d", loop
->num_nodes
);
7554 flow_nodes_print (" nodes", loop
->nodes
, file
);
7555 flow_edge_list_print (";; exit edges", loop
->exit_edges
,
7556 loop
->num_exits
, file
);
7557 if (loop
->exits_doms
)
7558 flow_nodes_print (";; exit doms", loop
->exits_doms
, file
);
7560 loop_dump_aux (loop
, file
, verbose
);
7564 /* Dump the loop information specified by LOOPS to the stream FILE,
7565 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7567 flow_loops_dump (loops
, file
, loop_dump_aux
, verbose
)
7568 const struct loops
*loops
;
7570 void (*loop_dump_aux
) PARAMS((const struct loop
*, FILE *, int));
7576 num_loops
= loops
->num
;
7577 if (! num_loops
|| ! file
)
7580 fprintf (file
, ";; %d loops found, %d levels\n",
7581 num_loops
, loops
->levels
);
7583 for (i
= 0; i
< num_loops
; i
++)
7585 struct loop
*loop
= &loops
->array
[i
];
7587 flow_loop_dump (loop
, file
, loop_dump_aux
, verbose
);
7593 for (j
= 0; j
< i
; j
++)
7595 struct loop
*oloop
= &loops
->array
[j
];
7597 if (loop
->header
== oloop
->header
)
7602 smaller
= loop
->num_nodes
< oloop
->num_nodes
;
7604 /* If the union of LOOP and OLOOP is different than
7605 the larger of LOOP and OLOOP then LOOP and OLOOP
7606 must be disjoint. */
7607 disjoint
= ! flow_loop_nested_p (smaller
? loop
: oloop
,
7608 smaller
? oloop
: loop
);
7610 ";; loop header %d shared by loops %d, %d %s\n",
7611 loop
->header
->index
, i
, j
,
7612 disjoint
? "disjoint" : "nested");
7619 flow_loops_cfg_dump (loops
, file
);
7623 /* Free all the memory allocated for LOOPS. */
7626 flow_loops_free (loops
)
7627 struct loops
*loops
;
7636 /* Free the loop descriptors. */
7637 for (i
= 0; i
< loops
->num
; i
++)
7639 struct loop
*loop
= &loops
->array
[i
];
7641 if (loop
->pre_header_edges
)
7642 free (loop
->pre_header_edges
);
7644 sbitmap_free (loop
->nodes
);
7645 if (loop
->entry_edges
)
7646 free (loop
->entry_edges
);
7647 if (loop
->exit_edges
)
7648 free (loop
->exit_edges
);
7649 if (loop
->exits_doms
)
7650 sbitmap_free (loop
->exits_doms
);
7652 free (loops
->array
);
7653 loops
->array
= NULL
;
7656 sbitmap_vector_free (loops
->cfg
.dom
);
7657 if (loops
->cfg
.dfs_order
)
7658 free (loops
->cfg
.dfs_order
);
7660 if (loops
->shared_headers
)
7661 sbitmap_free (loops
->shared_headers
);
7666 /* Find the entry edges into the loop with header HEADER and nodes
7667 NODES and store in ENTRY_EDGES array. Return the number of entry
7668 edges from the loop. */
7671 flow_loop_entry_edges_find (header
, nodes
, entry_edges
)
7673 const sbitmap nodes
;
7679 *entry_edges
= NULL
;
7682 for (e
= header
->pred
; e
; e
= e
->pred_next
)
7684 basic_block src
= e
->src
;
7686 if (src
== ENTRY_BLOCK_PTR
|| ! TEST_BIT (nodes
, src
->index
))
7693 *entry_edges
= (edge
*) xmalloc (num_entries
* sizeof (edge
*));
7696 for (e
= header
->pred
; e
; e
= e
->pred_next
)
7698 basic_block src
= e
->src
;
7700 if (src
== ENTRY_BLOCK_PTR
|| ! TEST_BIT (nodes
, src
->index
))
7701 (*entry_edges
)[num_entries
++] = e
;
7708 /* Find the exit edges from the loop using the bitmap of loop nodes
7709 NODES and store in EXIT_EDGES array. Return the number of
7710 exit edges from the loop. */
7713 flow_loop_exit_edges_find (nodes
, exit_edges
)
7714 const sbitmap nodes
;
7723 /* Check all nodes within the loop to see if there are any
7724 successors not in the loop. Note that a node may have multiple
7725 exiting edges ????? A node can have one jumping edge and one fallthru
7726 edge so only one of these can exit the loop. */
7728 EXECUTE_IF_SET_IN_SBITMAP (nodes
, 0, node
, {
7729 for (e
= BASIC_BLOCK (node
)->succ
; e
; e
= e
->succ_next
)
7731 basic_block dest
= e
->dest
;
7733 if (dest
== EXIT_BLOCK_PTR
|| ! TEST_BIT (nodes
, dest
->index
))
7741 *exit_edges
= (edge
*) xmalloc (num_exits
* sizeof (edge
*));
7743 /* Store all exiting edges into an array. */
7745 EXECUTE_IF_SET_IN_SBITMAP (nodes
, 0, node
, {
7746 for (e
= BASIC_BLOCK (node
)->succ
; e
; e
= e
->succ_next
)
7748 basic_block dest
= e
->dest
;
7750 if (dest
== EXIT_BLOCK_PTR
|| ! TEST_BIT (nodes
, dest
->index
))
7751 (*exit_edges
)[num_exits
++] = e
;
7759 /* Find the nodes contained within the loop with header HEADER and
7760 latch LATCH and store in NODES. Return the number of nodes within
7764 flow_loop_nodes_find (header
, latch
, nodes
)
7773 stack
= (basic_block
*) xmalloc (n_basic_blocks
* sizeof (basic_block
));
7776 /* Start with only the loop header in the set of loop nodes. */
7777 sbitmap_zero (nodes
);
7778 SET_BIT (nodes
, header
->index
);
7780 header
->loop_depth
++;
7782 /* Push the loop latch on to the stack. */
7783 if (! TEST_BIT (nodes
, latch
->index
))
7785 SET_BIT (nodes
, latch
->index
);
7786 latch
->loop_depth
++;
7788 stack
[sp
++] = latch
;
7797 for (e
= node
->pred
; e
; e
= e
->pred_next
)
7799 basic_block ancestor
= e
->src
;
7801 /* If each ancestor not marked as part of loop, add to set of
7802 loop nodes and push on to stack. */
7803 if (ancestor
!= ENTRY_BLOCK_PTR
7804 && ! TEST_BIT (nodes
, ancestor
->index
))
7806 SET_BIT (nodes
, ancestor
->index
);
7807 ancestor
->loop_depth
++;
7809 stack
[sp
++] = ancestor
;
7817 /* Compute the depth first search order and store in the array
7818 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7819 RC_ORDER is non-zero, return the reverse completion number for each
7820 node. Returns the number of nodes visited. A depth first search
7821 tries to get as far away from the starting point as quickly as
7825 flow_depth_first_order_compute (dfs_order
, rc_order
)
7832 int rcnum
= n_basic_blocks
- 1;
7835 /* Allocate stack for back-tracking up CFG. */
7836 stack
= (edge
*) xmalloc ((n_basic_blocks
+ 1) * sizeof (edge
));
7839 /* Allocate bitmap to track nodes that have been visited. */
7840 visited
= sbitmap_alloc (n_basic_blocks
);
7842 /* None of the nodes in the CFG have been visited yet. */
7843 sbitmap_zero (visited
);
7845 /* Push the first edge on to the stack. */
7846 stack
[sp
++] = ENTRY_BLOCK_PTR
->succ
;
7854 /* Look at the edge on the top of the stack. */
7859 /* Check if the edge destination has been visited yet. */
7860 if (dest
!= EXIT_BLOCK_PTR
&& ! TEST_BIT (visited
, dest
->index
))
7862 /* Mark that we have visited the destination. */
7863 SET_BIT (visited
, dest
->index
);
7866 dfs_order
[dfsnum
++] = dest
->index
;
7870 /* Since the DEST node has been visited for the first
7871 time, check its successors. */
7872 stack
[sp
++] = dest
->succ
;
7876 /* There are no successors for the DEST node so assign
7877 its reverse completion number. */
7879 rc_order
[rcnum
--] = dest
->index
;
7884 if (! e
->succ_next
&& src
!= ENTRY_BLOCK_PTR
)
7886 /* There are no more successors for the SRC node
7887 so assign its reverse completion number. */
7889 rc_order
[rcnum
--] = src
->index
;
7893 stack
[sp
- 1] = e
->succ_next
;
7900 sbitmap_free (visited
);
7902 /* The number of nodes visited should not be greater than
7904 if (dfsnum
> n_basic_blocks
)
7907 /* There are some nodes left in the CFG that are unreachable. */
7908 if (dfsnum
< n_basic_blocks
)
7913 /* Compute the depth first search order on the _reverse_ graph and
7914 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7915 Returns the number of nodes visited.
7917 The computation is split into three pieces:
7919 flow_dfs_compute_reverse_init () creates the necessary data
7922 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7923 structures. The block will start the search.
7925 flow_dfs_compute_reverse_execute () continues (or starts) the
7926 search using the block on the top of the stack, stopping when the
7929 flow_dfs_compute_reverse_finish () destroys the necessary data
7932 Thus, the user will probably call ..._init(), call ..._add_bb() to
7933 add a beginning basic block to the stack, call ..._execute(),
7934 possibly add another bb to the stack and again call ..._execute(),
7935 ..., and finally call _finish(). */
7937 /* Initialize the data structures used for depth-first search on the
7938 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7939 added to the basic block stack. DATA is the current depth-first
7940 search context. If INITIALIZE_STACK is non-zero, there is an
7941 element on the stack. */
7944 flow_dfs_compute_reverse_init (data
)
7945 depth_first_search_ds data
;
7947 /* Allocate stack for back-tracking up CFG. */
7949 (basic_block
*) xmalloc ((n_basic_blocks
- (INVALID_BLOCK
+ 1))
7950 * sizeof (basic_block
));
7953 /* Allocate bitmap to track nodes that have been visited. */
7954 data
->visited_blocks
= sbitmap_alloc (n_basic_blocks
- (INVALID_BLOCK
+ 1));
7956 /* None of the nodes in the CFG have been visited yet. */
7957 sbitmap_zero (data
->visited_blocks
);
7962 /* Add the specified basic block to the top of the dfs data
7963 structures. When the search continues, it will start at the
7967 flow_dfs_compute_reverse_add_bb (data
, bb
)
7968 depth_first_search_ds data
;
7971 data
->stack
[data
->sp
++] = bb
;
7975 /* Continue the depth-first search through the reverse graph starting
7976 with the block at the stack's top and ending when the stack is
7977 empty. Visited nodes are marked. Returns an unvisited basic
7978 block, or NULL if there is none available. */
7981 flow_dfs_compute_reverse_execute (data
)
7982 depth_first_search_ds data
;
7988 while (data
->sp
> 0)
7990 bb
= data
->stack
[--data
->sp
];
7992 /* Mark that we have visited this node. */
7993 if (!TEST_BIT (data
->visited_blocks
, bb
->index
- (INVALID_BLOCK
+ 1)))
7995 SET_BIT (data
->visited_blocks
, bb
->index
- (INVALID_BLOCK
+ 1));
7997 /* Perform depth-first search on adjacent vertices. */
7998 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
7999 flow_dfs_compute_reverse_add_bb (data
, e
->src
);
8003 /* Determine if there are unvisited basic blocks. */
8004 for (i
= n_basic_blocks
- (INVALID_BLOCK
+ 1); --i
>= 0;)
8005 if (!TEST_BIT (data
->visited_blocks
, i
))
8006 return BASIC_BLOCK (i
+ (INVALID_BLOCK
+ 1));
8010 /* Destroy the data structures needed for depth-first search on the
8014 flow_dfs_compute_reverse_finish (data
)
8015 depth_first_search_ds data
;
8018 sbitmap_free (data
->visited_blocks
);
8023 /* Find the root node of the loop pre-header extended basic block and
8024 the edges along the trace from the root node to the loop header. */
8027 flow_loop_pre_header_scan (loop
)
8033 loop
->num_pre_header_edges
= 0;
8035 if (loop
->num_entries
!= 1)
8038 ebb
= loop
->entry_edges
[0]->src
;
8040 if (ebb
!= ENTRY_BLOCK_PTR
)
8044 /* Count number of edges along trace from loop header to
8045 root of pre-header extended basic block. Usually this is
8046 only one or two edges. */
8048 while (ebb
->pred
->src
!= ENTRY_BLOCK_PTR
&& ! ebb
->pred
->pred_next
)
8050 ebb
= ebb
->pred
->src
;
8054 loop
->pre_header_edges
= (edge
*) xmalloc (num
* sizeof (edge
*));
8055 loop
->num_pre_header_edges
= num
;
8057 /* Store edges in order that they are followed. The source
8058 of the first edge is the root node of the pre-header extended
8059 basic block and the destination of the last last edge is
8061 for (e
= loop
->entry_edges
[0]; num
; e
= e
->src
->pred
)
8063 loop
->pre_header_edges
[--num
] = e
;
8069 /* Return the block for the pre-header of the loop with header
8070 HEADER where DOM specifies the dominator information. Return NULL if
8071 there is no pre-header. */
8074 flow_loop_pre_header_find (header
, dom
)
8078 basic_block pre_header
;
8081 /* If block p is a predecessor of the header and is the only block
8082 that the header does not dominate, then it is the pre-header. */
8084 for (e
= header
->pred
; e
; e
= e
->pred_next
)
8086 basic_block node
= e
->src
;
8088 if (node
!= ENTRY_BLOCK_PTR
8089 && ! TEST_BIT (dom
[node
->index
], header
->index
))
8091 if (pre_header
== NULL
)
8095 /* There are multiple edges into the header from outside
8096 the loop so there is no pre-header block. */
8105 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
8106 previously added. The insertion algorithm assumes that the loops
8107 are added in the order found by a depth first search of the CFG. */
8110 flow_loop_tree_node_add (prevloop
, loop
)
8111 struct loop
*prevloop
;
8115 if (flow_loop_nested_p (prevloop
, loop
))
8117 prevloop
->inner
= loop
;
8118 loop
->outer
= prevloop
;
8122 while (prevloop
->outer
)
8124 if (flow_loop_nested_p (prevloop
->outer
, loop
))
8126 prevloop
->next
= loop
;
8127 loop
->outer
= prevloop
->outer
;
8130 prevloop
= prevloop
->outer
;
8133 prevloop
->next
= loop
;
8137 /* Build the loop hierarchy tree for LOOPS. */
8140 flow_loops_tree_build (loops
)
8141 struct loops
*loops
;
8146 num_loops
= loops
->num
;
8150 /* Root the loop hierarchy tree with the first loop found.
8151 Since we used a depth first search this should be the
8153 loops
->tree
= &loops
->array
[0];
8154 loops
->tree
->outer
= loops
->tree
->inner
= loops
->tree
->next
= NULL
;
8156 /* Add the remaining loops to the tree. */
8157 for (i
= 1; i
< num_loops
; i
++)
8158 flow_loop_tree_node_add (&loops
->array
[i
- 1], &loops
->array
[i
]);
8161 /* Helper function to compute loop nesting depth and enclosed loop level
8162 for the natural loop specified by LOOP at the loop depth DEPTH.
8163 Returns the loop level. */
8166 flow_loop_level_compute (loop
, depth
)
8176 /* Traverse loop tree assigning depth and computing level as the
8177 maximum level of all the inner loops of this loop. The loop
8178 level is equivalent to the height of the loop in the loop tree
8179 and corresponds to the number of enclosed loop levels (including
8181 for (inner
= loop
->inner
; inner
; inner
= inner
->next
)
8185 ilevel
= flow_loop_level_compute (inner
, depth
+ 1) + 1;
8190 loop
->level
= level
;
8191 loop
->depth
= depth
;
8195 /* Compute the loop nesting depth and enclosed loop level for the loop
8196 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
8200 flow_loops_level_compute (loops
)
8201 struct loops
*loops
;
8207 /* Traverse all the outer level loops. */
8208 for (loop
= loops
->tree
; loop
; loop
= loop
->next
)
8210 level
= flow_loop_level_compute (loop
, 1);
8218 /* Scan a single natural loop specified by LOOP collecting information
8219 about it specified by FLAGS. */
8222 flow_loop_scan (loops
, loop
, flags
)
8223 struct loops
*loops
;
8227 /* Determine prerequisites. */
8228 if ((flags
& LOOP_EXITS_DOMS
) && ! loop
->exit_edges
)
8229 flags
|= LOOP_EXIT_EDGES
;
8231 if (flags
& LOOP_ENTRY_EDGES
)
8233 /* Find edges which enter the loop header.
8234 Note that the entry edges should only
8235 enter the header of a natural loop. */
8237 = flow_loop_entry_edges_find (loop
->header
,
8239 &loop
->entry_edges
);
8242 if (flags
& LOOP_EXIT_EDGES
)
8244 /* Find edges which exit the loop. */
8246 = flow_loop_exit_edges_find (loop
->nodes
,
8250 if (flags
& LOOP_EXITS_DOMS
)
8254 /* Determine which loop nodes dominate all the exits
8256 loop
->exits_doms
= sbitmap_alloc (n_basic_blocks
);
8257 sbitmap_copy (loop
->exits_doms
, loop
->nodes
);
8258 for (j
= 0; j
< loop
->num_exits
; j
++)
8259 sbitmap_a_and_b (loop
->exits_doms
, loop
->exits_doms
,
8260 loops
->cfg
.dom
[loop
->exit_edges
[j
]->src
->index
]);
8262 /* The header of a natural loop must dominate
8264 if (! TEST_BIT (loop
->exits_doms
, loop
->header
->index
))
8268 if (flags
& LOOP_PRE_HEADER
)
8270 /* Look to see if the loop has a pre-header node. */
8272 = flow_loop_pre_header_find (loop
->header
, loops
->cfg
.dom
);
8274 /* Find the blocks within the extended basic block of
8275 the loop pre-header. */
8276 flow_loop_pre_header_scan (loop
);
8282 /* Find all the natural loops in the function and save in LOOPS structure
8283 and recalculate loop_depth information in basic block structures.
8284 FLAGS controls which loop information is collected.
8285 Return the number of natural loops found. */
8288 flow_loops_find (loops
, flags
)
8289 struct loops
*loops
;
8301 /* This function cannot be repeatedly called with different
8302 flags to build up the loop information. The loop tree
8303 must always be built if this function is called. */
8304 if (! (flags
& LOOP_TREE
))
8307 memset (loops
, 0, sizeof (*loops
));
8309 /* Taking care of this degenerate case makes the rest of
8310 this code simpler. */
8311 if (n_basic_blocks
== 0)
8317 /* Compute the dominators. */
8318 dom
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
8319 calculate_dominance_info (NULL
, dom
, CDI_DOMINATORS
);
8321 /* Count the number of loop edges (back edges). This should be the
8322 same as the number of natural loops. */
8325 for (b
= 0; b
< n_basic_blocks
; b
++)
8329 header
= BASIC_BLOCK (b
);
8330 header
->loop_depth
= 0;
8332 for (e
= header
->pred
; e
; e
= e
->pred_next
)
8334 basic_block latch
= e
->src
;
8336 /* Look for back edges where a predecessor is dominated
8337 by this block. A natural loop has a single entry
8338 node (header) that dominates all the nodes in the
8339 loop. It also has single back edge to the header
8340 from a latch node. Note that multiple natural loops
8341 may share the same header. */
8342 if (b
!= header
->index
)
8345 if (latch
!= ENTRY_BLOCK_PTR
&& TEST_BIT (dom
[latch
->index
], b
))
8352 /* Compute depth first search order of the CFG so that outer
8353 natural loops will be found before inner natural loops. */
8354 dfs_order
= (int *) xmalloc (n_basic_blocks
* sizeof (int));
8355 rc_order
= (int *) xmalloc (n_basic_blocks
* sizeof (int));
8356 flow_depth_first_order_compute (dfs_order
, rc_order
);
8358 /* Save CFG derived information to avoid recomputing it. */
8359 loops
->cfg
.dom
= dom
;
8360 loops
->cfg
.dfs_order
= dfs_order
;
8361 loops
->cfg
.rc_order
= rc_order
;
8363 /* Allocate loop structures. */
8365 = (struct loop
*) xcalloc (num_loops
, sizeof (struct loop
));
8367 headers
= sbitmap_alloc (n_basic_blocks
);
8368 sbitmap_zero (headers
);
8370 loops
->shared_headers
= sbitmap_alloc (n_basic_blocks
);
8371 sbitmap_zero (loops
->shared_headers
);
8373 /* Find and record information about all the natural loops
8376 for (b
= 0; b
< n_basic_blocks
; b
++)
8380 /* Search the nodes of the CFG in reverse completion order
8381 so that we can find outer loops first. */
8382 header
= BASIC_BLOCK (rc_order
[b
]);
8384 /* Look for all the possible latch blocks for this header. */
8385 for (e
= header
->pred
; e
; e
= e
->pred_next
)
8387 basic_block latch
= e
->src
;
8389 /* Look for back edges where a predecessor is dominated
8390 by this block. A natural loop has a single entry
8391 node (header) that dominates all the nodes in the
8392 loop. It also has single back edge to the header
8393 from a latch node. Note that multiple natural loops
8394 may share the same header. */
8395 if (latch
!= ENTRY_BLOCK_PTR
8396 && TEST_BIT (dom
[latch
->index
], header
->index
))
8400 loop
= loops
->array
+ num_loops
;
8402 loop
->header
= header
;
8403 loop
->latch
= latch
;
8404 loop
->num
= num_loops
;
8411 for (i
= 0; i
< num_loops
; i
++)
8413 struct loop
*loop
= &loops
->array
[i
];
8415 /* Keep track of blocks that are loop headers so
8416 that we can tell which loops should be merged. */
8417 if (TEST_BIT (headers
, loop
->header
->index
))
8418 SET_BIT (loops
->shared_headers
, loop
->header
->index
);
8419 SET_BIT (headers
, loop
->header
->index
);
8421 /* Find nodes contained within the loop. */
8422 loop
->nodes
= sbitmap_alloc (n_basic_blocks
);
8424 = flow_loop_nodes_find (loop
->header
, loop
->latch
, loop
->nodes
);
8426 /* Compute first and last blocks within the loop.
8427 These are often the same as the loop header and
8428 loop latch respectively, but this is not always
8431 = BASIC_BLOCK (sbitmap_first_set_bit (loop
->nodes
));
8433 = BASIC_BLOCK (sbitmap_last_set_bit (loop
->nodes
));
8435 flow_loop_scan (loops
, loop
, flags
);
8438 /* Natural loops with shared headers may either be disjoint or
8439 nested. Disjoint loops with shared headers cannot be inner
8440 loops and should be merged. For now just mark loops that share
8442 for (i
= 0; i
< num_loops
; i
++)
8443 if (TEST_BIT (loops
->shared_headers
, loops
->array
[i
].header
->index
))
8444 loops
->array
[i
].shared
= 1;
8446 sbitmap_free (headers
);
8449 loops
->num
= num_loops
;
8451 /* Build the loop hierarchy tree. */
8452 flow_loops_tree_build (loops
);
8454 /* Assign the loop nesting depth and enclosed loop level for each
8456 loops
->levels
= flow_loops_level_compute (loops
);
8462 /* Update the information regarding the loops in the CFG
8463 specified by LOOPS. */
8465 flow_loops_update (loops
, flags
)
8466 struct loops
*loops
;
8469 /* One day we may want to update the current loop data. For now
8470 throw away the old stuff and rebuild what we need. */
8472 flow_loops_free (loops
);
8474 return flow_loops_find (loops
, flags
);
8478 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8481 flow_loop_outside_edge_p (loop
, e
)
8482 const struct loop
*loop
;
8485 if (e
->dest
!= loop
->header
)
8487 return (e
->src
== ENTRY_BLOCK_PTR
)
8488 || ! TEST_BIT (loop
->nodes
, e
->src
->index
);
8491 /* Clear LOG_LINKS fields of insns in a chain.
8492 Also clear the global_live_at_{start,end} fields of the basic block
8496 clear_log_links (insns
)
8502 for (i
= insns
; i
; i
= NEXT_INSN (i
))
8506 for (b
= 0; b
< n_basic_blocks
; b
++)
8508 basic_block bb
= BASIC_BLOCK (b
);
8510 bb
->global_live_at_start
= NULL
;
8511 bb
->global_live_at_end
= NULL
;
8514 ENTRY_BLOCK_PTR
->global_live_at_end
= NULL
;
8515 EXIT_BLOCK_PTR
->global_live_at_start
= NULL
;
8518 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8519 correspond to the hard registers, if any, set in that map. This
8520 could be done far more efficiently by having all sorts of special-cases
8521 with moving single words, but probably isn't worth the trouble. */
8524 reg_set_to_hard_reg_set (to
, from
)
8530 EXECUTE_IF_SET_IN_BITMAP
8533 if (i
>= FIRST_PSEUDO_REGISTER
)
8535 SET_HARD_REG_BIT (*to
, i
);
8539 /* Called once at intialization time. */
8544 static int initialized
;
8548 gcc_obstack_init (&flow_obstack
);
8549 flow_firstobj
= (char *) obstack_alloc (&flow_obstack
, 0);
8554 obstack_free (&flow_obstack
, flow_firstobj
);
8555 flow_firstobj
= (char *) obstack_alloc (&flow_obstack
, 0);