1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
124 #include "basic-block.h"
125 #include "insn-config.h"
127 #include "hard-reg-set.h"
130 #include "function.h"
134 #include "insn-flags.h"
135 #include "resource.h"
138 #define obstack_chunk_alloc xmalloc
139 #define obstack_chunk_free free
142 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
143 the stack pointer does not matter. The value is tested only in
144 functions that have frame pointers.
145 No definition is equivalent to always zero. */
146 #ifndef EXIT_IGNORE_STACK
147 #define EXIT_IGNORE_STACK 0
151 /* The contents of the current function definition are allocated
152 in this obstack, and all are freed at the end of the function.
153 For top-level functions, this is temporary_obstack.
154 Separate obstacks are made for nested functions. */
156 extern struct obstack
*function_obstack
;
158 /* Number of basic blocks in the current function. */
162 /* The basic block array. */
164 varray_type basic_block_info
;
166 /* The special entry and exit blocks. */
168 struct basic_block_def entry_exit_blocks
[2] =
175 NULL
, /* local_set */
176 NULL
, /* global_live_at_start */
177 NULL
, /* global_live_at_end */
179 ENTRY_BLOCK
, /* index */
187 NULL
, /* local_set */
188 NULL
, /* global_live_at_start */
189 NULL
, /* global_live_at_end */
191 EXIT_BLOCK
, /* index */
196 /* Nonzero if the second flow pass has completed. */
199 /* Maximum register number used in this function, plus one. */
203 /* Indexed by n, giving various register information */
205 varray_type reg_n_info
;
207 /* Size of the reg_n_info table. */
209 unsigned int reg_n_max
;
211 /* Element N is the next insn that uses (hard or pseudo) register number N
212 within the current basic block; or zero, if there is no such insn.
213 This is valid only during the final backward scan in propagate_block. */
215 static rtx
*reg_next_use
;
217 /* Size of a regset for the current function,
218 in (1) bytes and (2) elements. */
223 /* Regset of regs live when calls to `setjmp'-like functions happen. */
224 /* ??? Does this exist only for the setjmp-clobbered warning message? */
226 regset regs_live_at_setjmp
;
228 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
229 that have to go in the same hard reg.
230 The first two regs in the list are a pair, and the next two
231 are another pair, etc. */
234 /* Depth within loops of basic block being scanned for lifetime analysis,
235 plus one. This is the weight attached to references to registers. */
237 static int loop_depth
;
239 /* During propagate_block, this is non-zero if the value of CC0 is live. */
243 /* During propagate_block, this contains a list of all the MEMs we are
244 tracking for dead store elimination.
246 ?!? Note we leak memory by not free-ing items on this list. We need to
247 write some generic routines to operate on memory lists since cse, gcse,
248 loop, sched, flow and possibly other passes all need to do basically the
249 same operations on these lists. */
251 static rtx mem_set_list
;
253 /* Set of registers that may be eliminable. These are handled specially
254 in updating regs_ever_live. */
256 static HARD_REG_SET elim_reg_set
;
258 /* The basic block structure for every insn, indexed by uid. */
260 varray_type basic_block_for_insn
;
262 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
263 /* ??? Should probably be using LABEL_NUSES instead. It would take a
264 bit of surgery to be able to use or co-opt the routines in jump. */
266 static rtx label_value_list
;
268 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
270 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
271 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
272 static bitmap uid_volatile
;
274 /* Forward declarations */
275 static int count_basic_blocks
PROTO((rtx
));
276 static rtx find_basic_blocks_1
PROTO((rtx
, rtx
*));
277 static void create_basic_block
PROTO((int, rtx
, rtx
, rtx
));
278 static void compute_bb_for_insn
PROTO((varray_type
, int));
279 static void clear_edges
PROTO((void));
280 static void make_edges
PROTO((rtx
, rtx
*));
281 static void make_edge
PROTO((basic_block
, basic_block
, int));
282 static void make_label_edge
PROTO((basic_block
, rtx
, int));
283 static void mark_critical_edges
PROTO((void));
285 static void commit_one_edge_insertion
PROTO((edge
));
287 static void delete_unreachable_blocks
PROTO((void));
288 static void delete_eh_regions
PROTO((void));
289 static int can_delete_note_p
PROTO((rtx
));
290 static void delete_insn_chain
PROTO((rtx
, rtx
));
291 static int delete_block
PROTO((basic_block
));
292 static void expunge_block
PROTO((basic_block
));
293 static rtx flow_delete_insn
PROTO((rtx
));
294 static int can_delete_label_p
PROTO((rtx
));
295 static void merge_blocks_nomove
PROTO((basic_block
, basic_block
));
296 static int merge_blocks
PROTO((edge
,basic_block
,basic_block
));
297 static void tidy_fallthru_edge
PROTO((edge
,basic_block
,basic_block
));
298 static void calculate_loop_depth
PROTO((rtx
));
300 static int set_noop_p
PROTO((rtx
));
301 static int noop_move_p
PROTO((rtx
));
302 static void notice_stack_pointer_modification
PROTO ((rtx
, rtx
));
303 static void record_volatile_insns
PROTO((rtx
));
304 static void mark_regs_live_at_end
PROTO((regset
));
305 static void life_analysis_1
PROTO((rtx
, int, int));
306 static void init_regset_vector
PROTO ((regset
*, int,
308 static void propagate_block
PROTO((regset
, rtx
, rtx
, int,
310 static int insn_dead_p
PROTO((rtx
, regset
, int, rtx
));
311 static int libcall_dead_p
PROTO((rtx
, regset
, rtx
, rtx
));
312 static void mark_set_regs
PROTO((regset
, regset
, rtx
,
314 static void mark_set_1
PROTO((regset
, regset
, rtx
,
317 static void find_auto_inc
PROTO((regset
, rtx
, rtx
));
318 static int try_pre_increment_1
PROTO((rtx
));
319 static int try_pre_increment
PROTO((rtx
, rtx
, HOST_WIDE_INT
));
321 static void mark_used_regs
PROTO((regset
, regset
, rtx
, int, rtx
));
322 void dump_flow_info
PROTO((FILE *));
323 static void dump_edge_info
PROTO((FILE *, edge
, int));
325 static int_list_ptr alloc_int_list_node
PROTO ((int_list_block
**));
326 static int_list_ptr add_int_list_node
PROTO ((int_list_block
**,
329 static void add_pred_succ
PROTO ((int, int, int_list_ptr
*,
330 int_list_ptr
*, int *, int *));
332 static void count_reg_sets_1
PROTO ((rtx
));
333 static void count_reg_sets
PROTO ((rtx
));
334 static void count_reg_references
PROTO ((rtx
));
335 static void notice_stack_pointer_modification
PROTO ((rtx
, rtx
));
336 static void invalidate_mems_from_autoinc
PROTO ((rtx
));
337 static void maybe_remove_dead_notes
PROTO ((rtx
, rtx
, rtx
, rtx
,
339 static int maybe_add_dead_note_use
PROTO ((rtx
, rtx
));
340 static int maybe_add_dead_note
PROTO ((rtx
, rtx
, rtx
));
341 static int sets_reg_or_subreg
PROTO ((rtx
, rtx
));
342 static void update_n_sets
PROTO ((rtx
, int));
343 static void new_insn_dead_notes
PROTO ((rtx
, rtx
, rtx
, rtx
, rtx
, rtx
));
344 void verify_flow_info
PROTO ((void));
346 /* Find basic blocks of the current function.
347 F is the first insn of the function and NREGS the number of register
351 find_basic_blocks (f
, nregs
, file
, do_cleanup
)
353 int nregs ATTRIBUTE_UNUSED
;
354 FILE *file ATTRIBUTE_UNUSED
;
360 /* Flush out existing data. */
361 if (basic_block_info
!= NULL
)
367 /* Clear bb->aux on all extant basic blocks. We'll use this as a
368 tag for reuse during create_basic_block, just in case some pass
369 copies around basic block notes improperly. */
370 for (i
= 0; i
< n_basic_blocks
; ++i
)
371 BASIC_BLOCK (i
)->aux
= NULL
;
373 VARRAY_FREE (basic_block_info
);
376 n_basic_blocks
= count_basic_blocks (f
);
378 /* Size the basic block table. The actual structures will be allocated
379 by find_basic_blocks_1, since we want to keep the structure pointers
380 stable across calls to find_basic_blocks. */
381 /* ??? This whole issue would be much simpler if we called find_basic_blocks
382 exactly once, and thereafter we don't have a single long chain of
383 instructions at all until close to the end of compilation when we
384 actually lay them out. */
386 VARRAY_BB_INIT (basic_block_info
, n_basic_blocks
, "basic_block_info");
388 /* An array to record the active exception region at the end of each
389 basic block. It is filled in by find_basic_blocks_1 for make_edges. */
390 bb_eh_end
= (rtx
*) alloca (n_basic_blocks
* sizeof (rtx
));
392 label_value_list
= find_basic_blocks_1 (f
, bb_eh_end
);
394 /* Record the block to which an insn belongs. */
395 /* ??? This should be done another way, by which (perhaps) a label is
396 tagged directly with the basic block that it starts. It is used for
397 more than that currently, but IMO that is the only valid use. */
399 max_uid
= get_max_uid ();
401 /* Leave space for insns life_analysis makes in some cases for auto-inc.
402 These cases are rare, so we don't need too much space. */
403 max_uid
+= max_uid
/ 10;
406 VARRAY_BB_INIT (basic_block_for_insn
, max_uid
, "basic_block_for_insn");
407 compute_bb_for_insn (basic_block_for_insn
, max_uid
);
409 /* Discover the edges of our cfg. */
411 make_edges (label_value_list
, bb_eh_end
);
413 /* Delete unreachable blocks. */
416 delete_unreachable_blocks ();
418 /* Mark critical edges. */
420 mark_critical_edges ();
422 /* Discover the loop depth at the start of each basic block to aid
423 register allocation. */
424 calculate_loop_depth (f
);
426 /* Kill the data we won't maintain. */
427 label_value_list
= 0;
429 #ifdef ENABLE_CHECKING
434 /* Count the basic blocks of the function. */
437 count_basic_blocks (f
)
441 register RTX_CODE prev_code
;
442 register int count
= 0;
444 int call_had_abnormal_edge
= 0;
445 rtx prev_call
= NULL_RTX
;
447 prev_code
= JUMP_INSN
;
448 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
450 register RTX_CODE code
= GET_CODE (insn
);
452 if (code
== CODE_LABEL
453 || (GET_RTX_CLASS (code
) == 'i'
454 && (prev_code
== JUMP_INSN
455 || prev_code
== BARRIER
456 || (prev_code
== CALL_INSN
&& call_had_abnormal_edge
))))
460 /* If the previous insn was a call that did not create an
461 abnormal edge, we want to add a nop so that the CALL_INSN
462 itself is not at basic_block_end. This allows us to
463 easily distinguish between normal calls and those which
464 create abnormal edges in the flow graph. */
466 if (count
> 0 && prev_call
!= 0 && !call_had_abnormal_edge
)
468 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
469 emit_insn_after (nop
, prev_call
);
473 /* Record whether this call created an edge. */
474 if (code
== CALL_INSN
)
476 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
477 int region
= (note
? XINT (XEXP (note
, 0), 0) : 1);
479 call_had_abnormal_edge
= 0;
481 /* If there is a specified EH region, we have an edge. */
482 if (eh_region
&& region
> 0)
483 call_had_abnormal_edge
= 1;
486 /* If there is a nonlocal goto label and the specified
487 region number isn't -1, we have an edge. (0 means
488 no throw, but might have a nonlocal goto). */
489 if (nonlocal_goto_handler_labels
&& region
>= 0)
490 call_had_abnormal_edge
= 1;
493 else if (code
!= NOTE
)
494 prev_call
= NULL_RTX
;
498 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
500 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
)
505 /* The rest of the compiler works a bit smoother when we don't have to
506 check for the edge case of do-nothing functions with no basic blocks. */
509 emit_insn (gen_rtx_USE (VOIDmode
, const0_rtx
));
516 /* Find all basic blocks of the function whose first insn is F.
517 Store the correct data in the tables that describe the basic blocks,
518 set up the chains of references for each CODE_LABEL, and
519 delete any entire basic blocks that cannot be reached.
521 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
522 that are otherwise unreachable may be reachable with a non-local goto.
524 BB_EH_END is an array in which we record the list of exception regions
525 active at the end of every basic block. */
528 find_basic_blocks_1 (f
, bb_eh_end
)
532 register rtx insn
, next
;
533 int call_has_abnormal_edge
= 0;
535 rtx bb_note
= NULL_RTX
;
536 rtx eh_list
= NULL_RTX
;
537 rtx label_value_list
= NULL_RTX
;
541 /* We process the instructions in a slightly different way than we did
542 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
543 closed out the previous block, so that it gets attached at the proper
544 place. Since this form should be equivalent to the previous,
545 find_basic_blocks_0 continues to use the old form as a check. */
547 for (insn
= f
; insn
; insn
= next
)
549 enum rtx_code code
= GET_CODE (insn
);
551 next
= NEXT_INSN (insn
);
553 if (code
== CALL_INSN
)
555 /* Record whether this call created an edge. */
556 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
557 int region
= (note
? XINT (XEXP (note
, 0), 0) : 1);
558 call_has_abnormal_edge
= 0;
560 /* If there is an EH region, we have an edge. */
561 if (eh_list
&& region
> 0)
562 call_has_abnormal_edge
= 1;
565 /* If there is a nonlocal goto label and the specified
566 region number isn't -1, we have an edge. (0 means
567 no throw, but might have a nonlocal goto). */
568 if (nonlocal_goto_handler_labels
&& region
>= 0)
569 call_has_abnormal_edge
= 1;
577 int kind
= NOTE_LINE_NUMBER (insn
);
579 /* Keep a LIFO list of the currently active exception notes. */
580 if (kind
== NOTE_INSN_EH_REGION_BEG
)
581 eh_list
= gen_rtx_INSN_LIST (VOIDmode
, insn
, eh_list
);
582 else if (kind
== NOTE_INSN_EH_REGION_END
)
583 eh_list
= XEXP (eh_list
, 1);
585 /* Look for basic block notes with which to keep the
586 basic_block_info pointers stable. Unthread the note now;
587 we'll put it back at the right place in create_basic_block.
588 Or not at all if we've already found a note in this block. */
589 else if (kind
== NOTE_INSN_BASIC_BLOCK
)
591 if (bb_note
== NULL_RTX
)
593 next
= flow_delete_insn (insn
);
600 /* A basic block starts at a label. If we've closed one off due
601 to a barrier or some such, no need to do it again. */
602 if (head
!= NULL_RTX
)
604 /* While we now have edge lists with which other portions of
605 the compiler might determine a call ending a basic block
606 does not imply an abnormal edge, it will be a bit before
607 everything can be updated. So continue to emit a noop at
608 the end of such a block. */
609 if (GET_CODE (end
) == CALL_INSN
)
611 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
612 end
= emit_insn_after (nop
, end
);
615 bb_eh_end
[i
] = eh_list
;
616 create_basic_block (i
++, head
, end
, bb_note
);
623 /* A basic block ends at a jump. */
624 if (head
== NULL_RTX
)
628 /* ??? Make a special check for table jumps. The way this
629 happens is truely and amazingly gross. We are about to
630 create a basic block that contains just a code label and
631 an addr*vec jump insn. Worse, an addr_diff_vec creates
632 its own natural loop.
634 Prevent this bit of brain damage, pasting things together
635 correctly in make_edges.
637 The correct solution involves emitting the table directly
638 on the tablejump instruction as a note, or JUMP_LABEL. */
640 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
641 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
649 goto new_bb_inclusive
;
652 /* A basic block ends at a barrier. It may be that an unconditional
653 jump already closed the basic block -- no need to do it again. */
654 if (head
== NULL_RTX
)
657 /* While we now have edge lists with which other portions of the
658 compiler might determine a call ending a basic block does not
659 imply an abnormal edge, it will be a bit before everything can
660 be updated. So continue to emit a noop at the end of such a
662 if (GET_CODE (end
) == CALL_INSN
)
664 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
665 end
= emit_insn_after (nop
, end
);
667 goto new_bb_exclusive
;
670 /* A basic block ends at a call that can either throw or
671 do a non-local goto. */
672 if (call_has_abnormal_edge
)
675 if (head
== NULL_RTX
)
680 bb_eh_end
[i
] = eh_list
;
681 create_basic_block (i
++, head
, end
, bb_note
);
682 head
= end
= NULL_RTX
;
689 if (GET_RTX_CLASS (code
) == 'i')
691 if (head
== NULL_RTX
)
698 if (GET_RTX_CLASS (code
) == 'i')
702 /* Make a list of all labels referred to other than by jumps
703 (which just don't have the REG_LABEL notes).
705 Make a special exception for labels followed by an ADDR*VEC,
706 as this would be a part of the tablejump setup code.
708 Make a special exception for the eh_return_stub_label, which
709 we know isn't part of any otherwise visible control flow. */
711 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
712 if (REG_NOTE_KIND (note
) == REG_LABEL
)
714 rtx lab
= XEXP (note
, 0), next
;
716 if (lab
== eh_return_stub_label
)
718 else if ((next
= next_nonnote_insn (lab
)) != NULL
719 && GET_CODE (next
) == JUMP_INSN
720 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
721 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
725 = gen_rtx_EXPR_LIST (VOIDmode
, XEXP (note
, 0),
731 if (head
!= NULL_RTX
)
733 bb_eh_end
[i
] = eh_list
;
734 create_basic_block (i
++, head
, end
, bb_note
);
737 if (i
!= n_basic_blocks
)
740 return label_value_list
;
743 /* Create a new basic block consisting of the instructions between
744 HEAD and END inclusive. Reuses the note and basic block struct
745 in BB_NOTE, if any. */
748 create_basic_block (index
, head
, end
, bb_note
)
750 rtx head
, end
, bb_note
;
755 && ! RTX_INTEGRATED_P (bb_note
)
756 && (bb
= NOTE_BASIC_BLOCK (bb_note
)) != NULL
759 /* If we found an existing note, thread it back onto the chain. */
761 if (GET_CODE (head
) == CODE_LABEL
)
762 add_insn_after (bb_note
, head
);
765 add_insn_before (bb_note
, head
);
771 /* Otherwise we must create a note and a basic block structure.
772 Since we allow basic block structs in rtl, give the struct
773 the same lifetime by allocating it off the function obstack
774 rather than using malloc. */
776 bb
= (basic_block
) obstack_alloc (function_obstack
, sizeof (*bb
));
777 memset (bb
, 0, sizeof (*bb
));
779 if (GET_CODE (head
) == CODE_LABEL
)
780 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, head
);
783 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, head
);
786 NOTE_BASIC_BLOCK (bb_note
) = bb
;
789 /* Always include the bb note in the block. */
790 if (NEXT_INSN (end
) == bb_note
)
796 BASIC_BLOCK (index
) = bb
;
798 /* Tag the block so that we know it has been used when considering
799 other basic block notes. */
803 /* Records the basic block struct in BB_FOR_INSN, for every instruction
804 indexed by INSN_UID. MAX is the size of the array. */
807 compute_bb_for_insn (bb_for_insn
, max
)
808 varray_type bb_for_insn
;
813 for (i
= 0; i
< n_basic_blocks
; ++i
)
815 basic_block bb
= BASIC_BLOCK (i
);
822 int uid
= INSN_UID (insn
);
824 VARRAY_BB (bb_for_insn
, uid
) = bb
;
827 insn
= NEXT_INSN (insn
);
832 /* Free the memory associated with the edge structures. */
840 for (i
= 0; i
< n_basic_blocks
; ++i
)
842 basic_block bb
= BASIC_BLOCK (i
);
844 for (e
= bb
->succ
; e
; e
= n
)
854 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= n
)
860 ENTRY_BLOCK_PTR
->succ
= 0;
861 EXIT_BLOCK_PTR
->pred
= 0;
864 /* Identify the edges between basic blocks.
866 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
867 that are otherwise unreachable may be reachable with a non-local goto.
869 BB_EH_END is an array indexed by basic block number in which we record
870 the list of exception regions active at the end of the basic block. */
873 make_edges (label_value_list
, bb_eh_end
)
874 rtx label_value_list
;
878 eh_nesting_info
*eh_nest_info
= init_eh_nesting_info ();
880 /* Assume no computed jump; revise as we create edges. */
881 current_function_has_computed_jump
= 0;
883 /* By nature of the way these get numbered, block 0 is always the entry. */
884 make_edge (ENTRY_BLOCK_PTR
, BASIC_BLOCK (0), EDGE_FALLTHRU
);
886 for (i
= 0; i
< n_basic_blocks
; ++i
)
888 basic_block bb
= BASIC_BLOCK (i
);
889 rtx insn
, x
, eh_list
;
891 int force_fallthru
= 0;
893 /* If we have asynchronous exceptions, scan the notes for all exception
894 regions active in the block. In the normal case, we only need the
895 one active at the end of the block, which is bb_eh_end[i]. */
897 eh_list
= bb_eh_end
[i
];
898 if (asynchronous_exceptions
)
900 for (insn
= bb
->end
; insn
!= bb
->head
; insn
= PREV_INSN (insn
))
902 if (GET_CODE (insn
) == NOTE
903 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
)
904 eh_list
= gen_rtx_INSN_LIST (VOIDmode
, insn
, eh_list
);
908 /* Now examine the last instruction of the block, and discover the
909 ways we can leave the block. */
912 code
= GET_CODE (insn
);
915 if (code
== JUMP_INSN
)
919 /* ??? Recognize a tablejump and do the right thing. */
920 if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
921 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
922 && GET_CODE (tmp
) == JUMP_INSN
923 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
924 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
929 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
930 vec
= XVEC (PATTERN (tmp
), 0);
932 vec
= XVEC (PATTERN (tmp
), 1);
934 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
935 make_label_edge (bb
, XEXP (RTVEC_ELT (vec
, j
), 0), 0);
937 /* Some targets (eg, ARM) emit a conditional jump that also
938 contains the out-of-range target. Scan for these and
939 add an edge if necessary. */
940 if ((tmp
= single_set (insn
)) != NULL
941 && SET_DEST (tmp
) == pc_rtx
942 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
943 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
944 make_label_edge (bb
, XEXP (XEXP (SET_SRC (tmp
), 2), 0), 0);
946 #ifdef CASE_DROPS_THROUGH
947 /* Silly VAXen. The ADDR_VEC is going to be in the way of
948 us naturally detecting fallthru into the next block. */
953 /* If this is a computed jump, then mark it as reaching
954 everything on the label_value_list and forced_labels list. */
955 else if (computed_jump_p (insn
))
957 current_function_has_computed_jump
= 1;
959 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
960 make_label_edge (bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
962 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
963 make_label_edge (bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
966 /* Returns create an exit out. */
967 else if (returnjump_p (insn
))
968 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
970 /* Otherwise, we have a plain conditional or unconditional jump. */
973 if (! JUMP_LABEL (insn
))
975 make_label_edge (bb
, JUMP_LABEL (insn
), 0);
979 /* If this is a CALL_INSN, then mark it as reaching the active EH
980 handler for this CALL_INSN. If we're handling asynchronous
981 exceptions then any insn can reach any of the active handlers.
983 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
985 if (code
== CALL_INSN
|| asynchronous_exceptions
)
987 int is_call
= (code
== CALL_INSN
? EDGE_ABNORMAL_CALL
: 0);
988 handler_info
**handler_list
;
993 eh_region
= NOTE_BLOCK_NUMBER (XEXP (eh_list
, 0));
995 num
= reachable_handlers (eh_region
, eh_nest_info
,
996 insn
, &handler_list
);
997 for ( ; num
> 0; num
--)
999 make_label_edge (bb
, handler_list
[num
- 1]->handler_label
,
1000 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
1003 if (code
== CALL_INSN
&& nonlocal_goto_handler_labels
)
1005 /* ??? This could be made smarter: in some cases it's possible
1006 to tell that certain calls will not do a nonlocal goto.
1008 For example, if the nested functions that do the nonlocal
1009 gotos do not have their addresses taken, then only calls to
1010 those functions or to other nested functions that use them
1011 could possibly do nonlocal gotos. */
1012 /* We do know that a REG_EH_REGION note with a value less
1013 than 0 is guaranteed not to perform a non-local goto. */
1014 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1015 if (!note
|| XINT (XEXP (note
, 0), 0) >= 0)
1016 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
1017 make_label_edge (bb
, XEXP (x
, 0),
1018 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
1022 /* We know something about the structure of the function __throw in
1023 libgcc2.c. It is the only function that ever contains eh_stub
1024 labels. It modifies its return address so that the last block
1025 returns to one of the eh_stub labels within it. So we have to
1026 make additional edges in the flow graph. */
1027 if (i
+ 1 == n_basic_blocks
&& eh_return_stub_label
!= 0)
1028 make_label_edge (bb
, eh_return_stub_label
, EDGE_EH
);
1030 /* Find out if we can drop through to the next block. */
1031 insn
= next_nonnote_insn (insn
);
1032 if (!insn
|| (i
+ 1 == n_basic_blocks
&& force_fallthru
))
1033 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
1034 else if (i
+ 1 < n_basic_blocks
)
1036 rtx tmp
= BLOCK_HEAD (i
+ 1);
1037 if (GET_CODE (tmp
) == NOTE
)
1038 tmp
= next_nonnote_insn (tmp
);
1039 if (force_fallthru
|| insn
== tmp
)
1040 make_edge (bb
, BASIC_BLOCK (i
+ 1), EDGE_FALLTHRU
);
1043 free_eh_nesting_info (eh_nest_info
);
1046 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1047 about the edge that is accumulated between calls. */
1050 make_edge (src
, dst
, flags
)
1051 basic_block src
, dst
;
1056 /* Make sure we don't add duplicate edges. */
1058 for (e
= src
->succ
; e
; e
= e
->succ_next
)
1065 e
= (edge
) xcalloc (1, sizeof (*e
));
1067 e
->succ_next
= src
->succ
;
1068 e
->pred_next
= dst
->pred
;
1077 /* Create an edge from a basic block to a label. */
1080 make_label_edge (src
, label
, flags
)
1085 if (GET_CODE (label
) != CODE_LABEL
)
1088 /* If the label was never emitted, this insn is junk, but avoid a
1089 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1090 as a result of a syntax error and a diagnostic has already been
1093 if (INSN_UID (label
) == 0)
1096 make_edge (src
, BLOCK_FOR_INSN (label
), flags
);
1099 /* Identify critical edges and set the bits appropriately. */
1101 mark_critical_edges ()
1103 int i
, n
= n_basic_blocks
;
1106 /* We begin with the entry block. This is not terribly important now,
1107 but could be if a front end (Fortran) implemented alternate entry
1109 bb
= ENTRY_BLOCK_PTR
;
1116 /* (1) Critical edges must have a source with multiple successors. */
1117 if (bb
->succ
&& bb
->succ
->succ_next
)
1119 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1121 /* (2) Critical edges must have a destination with multiple
1122 predecessors. Note that we know there is at least one
1123 predecessor -- the edge we followed to get here. */
1124 if (e
->dest
->pred
->pred_next
)
1125 e
->flags
|= EDGE_CRITICAL
;
1127 e
->flags
&= ~EDGE_CRITICAL
;
1132 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1133 e
->flags
&= ~EDGE_CRITICAL
;
1138 bb
= BASIC_BLOCK (i
);
1142 /* Split a (typically critical) edge. Return the new block.
1143 Abort on abnormal edges.
1145 ??? The code generally expects to be called on critical edges.
1146 The case of a block ending in an unconditional jump to a
1147 block with multiple predecessors is not handled optimally. */
1150 split_edge (edge_in
)
1153 basic_block old_pred
, bb
, old_succ
;
1158 /* Abnormal edges cannot be split. */
1159 if ((edge_in
->flags
& EDGE_ABNORMAL
) != 0)
1162 old_pred
= edge_in
->src
;
1163 old_succ
= edge_in
->dest
;
1165 /* Remove the existing edge from the destination's pred list. */
1168 for (pp
= &old_succ
->pred
; *pp
!= edge_in
; pp
= &(*pp
)->pred_next
)
1170 *pp
= edge_in
->pred_next
;
1171 edge_in
->pred_next
= NULL
;
1174 /* Create the new structures. */
1175 bb
= (basic_block
) obstack_alloc (function_obstack
, sizeof (*bb
));
1176 edge_out
= (edge
) xcalloc (1, sizeof (*edge_out
));
1178 memset (bb
, 0, sizeof (*bb
));
1179 bb
->local_set
= OBSTACK_ALLOC_REG_SET (function_obstack
);
1180 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (function_obstack
);
1181 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (function_obstack
);
1183 /* ??? This info is likely going to be out of date very soon. */
1184 CLEAR_REG_SET (bb
->local_set
);
1185 if (old_succ
->global_live_at_start
)
1187 COPY_REG_SET (bb
->global_live_at_start
, old_succ
->global_live_at_start
);
1188 COPY_REG_SET (bb
->global_live_at_end
, old_succ
->global_live_at_start
);
1192 CLEAR_REG_SET (bb
->global_live_at_start
);
1193 CLEAR_REG_SET (bb
->global_live_at_end
);
1198 bb
->succ
= edge_out
;
1201 edge_in
->flags
&= ~EDGE_CRITICAL
;
1203 edge_out
->pred_next
= old_succ
->pred
;
1204 edge_out
->succ_next
= NULL
;
1206 edge_out
->dest
= old_succ
;
1207 edge_out
->flags
= EDGE_FALLTHRU
;
1208 edge_out
->probability
= REG_BR_PROB_BASE
;
1210 old_succ
->pred
= edge_out
;
1212 /* Tricky case -- if there existed a fallthru into the successor
1213 (and we're not it) we must add a new unconditional jump around
1214 the new block we're actually interested in.
1216 Further, if that edge is critical, this means a second new basic
1217 block must be created to hold it. In order to simplify correct
1218 insn placement, do this before we touch the existing basic block
1219 ordering for the block we were really wanting. */
1220 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1223 for (e
= edge_out
->pred_next
; e
; e
= e
->pred_next
)
1224 if (e
->flags
& EDGE_FALLTHRU
)
1229 basic_block jump_block
;
1232 if ((e
->flags
& EDGE_CRITICAL
) == 0)
1234 /* Non critical -- we can simply add a jump to the end
1235 of the existing predecessor. */
1236 jump_block
= e
->src
;
1240 /* We need a new block to hold the jump. The simplest
1241 way to do the bulk of the work here is to recursively
1243 jump_block
= split_edge (e
);
1244 e
= jump_block
->succ
;
1247 /* Now add the jump insn ... */
1248 pos
= emit_jump_insn_after (gen_jump (old_succ
->head
),
1250 jump_block
->end
= pos
;
1251 emit_barrier_after (pos
);
1253 /* ... let jump know that label is in use, ... */
1254 JUMP_LABEL (pos
) = old_succ
->head
;
1255 ++LABEL_NUSES (old_succ
->head
);
1257 /* ... and clear fallthru on the outgoing edge. */
1258 e
->flags
&= ~EDGE_FALLTHRU
;
1260 /* Continue splitting the interesting edge. */
1264 /* Place the new block just in front of the successor. */
1265 VARRAY_GROW (basic_block_info
, ++n_basic_blocks
);
1266 for (i
= n_basic_blocks
- 1; i
> old_succ
->index
; --i
)
1268 basic_block tmp
= BASIC_BLOCK (i
- 1);
1269 BASIC_BLOCK (i
) = tmp
;
1272 BASIC_BLOCK (i
) = bb
;
1275 /* Create the basic block note. */
1276 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, old_succ
->head
);
1277 NOTE_BASIC_BLOCK (bb_note
) = bb
;
1278 bb
->head
= bb
->end
= bb_note
;
1280 /* Not quite simple -- for non-fallthru edges, we must adjust the
1281 predecessor's jump instruction to target our new block. */
1282 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1284 rtx tmp
, insn
= old_pred
->end
;
1285 rtx old_label
= old_succ
->head
;
1286 rtx new_label
= gen_label_rtx ();
1288 if (GET_CODE (insn
) != JUMP_INSN
)
1291 /* ??? Recognize a tablejump and adjust all matching cases. */
1292 if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
1293 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
1294 && GET_CODE (tmp
) == JUMP_INSN
1295 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
1296 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
1301 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
1302 vec
= XVEC (PATTERN (tmp
), 0);
1304 vec
= XVEC (PATTERN (tmp
), 1);
1306 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1307 if (XEXP (RTVEC_ELT (vec
, j
), 0) == old_label
)
1309 RTVEC_ELT (vec
, j
) = gen_rtx_LABEL_REF (VOIDmode
, new_label
);
1310 --LABEL_NUSES (old_label
);
1311 ++LABEL_NUSES (new_label
);
1316 /* This would have indicated an abnormal edge. */
1317 if (computed_jump_p (insn
))
1320 /* A return instruction can't be redirected. */
1321 if (returnjump_p (insn
))
1324 /* If the insn doesn't go where we think, we're confused. */
1325 if (JUMP_LABEL (insn
) != old_label
)
1328 redirect_jump (insn
, new_label
);
1331 emit_label_before (new_label
, bb_note
);
1332 bb
->head
= new_label
;
1338 /* Queue instructions for insertion on an edge between two basic blocks.
1339 The new instructions and basic blocks (if any) will not appear in the
1340 CFG until commit_edge_insertions is called. */
1343 insert_insn_on_edge (pattern
, e
)
1347 /* We cannot insert instructions on an abnormal critical edge.
1348 It will be easier to find the culprit if we die now. */
1349 if ((e
->flags
& (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1350 == (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1353 if (e
->insns
== NULL_RTX
)
1356 push_to_sequence (e
->insns
);
1358 emit_insn (pattern
);
1360 e
->insns
= get_insns ();
1364 /* Update the CFG for the instructions queued on edge E. */
1367 commit_one_edge_insertion (e
)
1370 rtx before
= NULL_RTX
, after
= NULL_RTX
, tmp
;
1373 /* Figure out where to put these things. If the destination has
1374 one predecessor, insert there. Except for the exit block. */
1375 if (e
->dest
->pred
->pred_next
== NULL
1376 && e
->dest
!= EXIT_BLOCK_PTR
)
1380 /* Get the location correct wrt a code label, and "nice" wrt
1381 a basic block note, and before everything else. */
1383 if (GET_CODE (tmp
) == CODE_LABEL
)
1384 tmp
= NEXT_INSN (tmp
);
1385 if (GET_CODE (tmp
) == NOTE
1386 && NOTE_LINE_NUMBER (tmp
) == NOTE_INSN_BASIC_BLOCK
)
1387 tmp
= NEXT_INSN (tmp
);
1388 if (tmp
== bb
->head
)
1391 after
= PREV_INSN (tmp
);
1394 /* If the source has one successor and the edge is not abnormal,
1395 insert there. Except for the entry block. */
1396 else if ((e
->flags
& EDGE_ABNORMAL
) == 0
1397 && e
->src
->succ
->succ_next
== NULL
1398 && e
->src
!= ENTRY_BLOCK_PTR
)
1401 if (GET_CODE (bb
->end
) == JUMP_INSN
)
1403 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1404 a jump with delay slots already filled? */
1405 if (! simplejump_p (bb
->end
))
1412 /* We'd better be fallthru, or we've lost track of what's what. */
1413 if ((e
->flags
& EDGE_FALLTHRU
) == 0)
1420 /* Otherwise we must split the edge. */
1423 bb
= split_edge (e
);
1427 /* Now that we've found the spot, do the insertion. */
1429 e
->insns
= NULL_RTX
;
1431 /* Set the new block number for these insns, if structure is allocated. */
1432 if (basic_block_for_insn
)
1435 for (i
= tmp
; i
!= NULL_RTX
; i
= NEXT_INSN (i
))
1436 set_block_for_insn (i
, bb
);
1441 emit_insns_before (tmp
, before
);
1442 if (before
== bb
->head
)
1447 tmp
= emit_insns_after (tmp
, after
);
1448 if (after
== bb
->end
)
1453 /* Update the CFG for all queued instructions. */
1456 commit_edge_insertions ()
1462 bb
= ENTRY_BLOCK_PTR
;
1467 for (e
= bb
->succ
; e
; e
= next
)
1469 next
= e
->succ_next
;
1471 commit_one_edge_insertion (e
);
1474 if (++i
>= n_basic_blocks
)
1476 bb
= BASIC_BLOCK (i
);
1480 /* Delete all unreachable basic blocks. */
1483 delete_unreachable_blocks ()
1485 basic_block
*worklist
, *tos
;
1486 int deleted_handler
;
1491 tos
= worklist
= (basic_block
*) alloca (sizeof (basic_block
) * n
);
1493 /* Use basic_block->aux as a marker. Clear them all. */
1495 for (i
= 0; i
< n
; ++i
)
1496 BASIC_BLOCK (i
)->aux
= NULL
;
1498 /* Add our starting points to the worklist. Almost always there will
1499 be only one. It isn't inconcievable that we might one day directly
1500 support Fortran alternate entry points. */
1502 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
1506 /* Mark the block with a handy non-null value. */
1510 /* Iterate: find everything reachable from what we've already seen. */
1512 while (tos
!= worklist
)
1514 basic_block b
= *--tos
;
1516 for (e
= b
->succ
; e
; e
= e
->succ_next
)
1524 /* Delete all unreachable basic blocks. Count down so that we don't
1525 interfere with the block renumbering that happens in delete_block. */
1527 deleted_handler
= 0;
1529 for (i
= n
- 1; i
>= 0; --i
)
1531 basic_block b
= BASIC_BLOCK (i
);
1534 /* This block was found. Tidy up the mark. */
1537 deleted_handler
|= delete_block (b
);
1540 /* Fix up edges that now fall through, or rather should now fall through
1541 but previously required a jump around now deleted blocks. Simplify
1542 the search by only examining blocks numerically adjacent, since this
1543 is how find_basic_blocks created them. */
1545 for (i
= 1; i
< n_basic_blocks
; ++i
)
1547 basic_block b
= BASIC_BLOCK (i
- 1);
1548 basic_block c
= BASIC_BLOCK (i
);
1551 /* We care about simple conditional or unconditional jumps with
1554 If we had a conditional branch to the next instruction when
1555 find_basic_blocks was called, then there will only be one
1556 out edge for the block which ended with the conditional
1557 branch (since we do not create duplicate edges).
1559 Furthermore, the edge will be marked as a fallthru because we
1560 merge the flags for the duplicate edges. So we do not want to
1561 check that the edge is not a FALLTHRU edge. */
1562 if ((s
= b
->succ
) != NULL
1563 && s
->succ_next
== NULL
1565 /* If the jump insn has side effects, we can't tidy the edge. */
1566 && (GET_CODE (b
->end
) != JUMP_INSN
1567 || onlyjump_p (b
->end
)))
1568 tidy_fallthru_edge (s
, b
, c
);
1571 /* Attempt to merge blocks as made possible by edge removal. If a block
1572 has only one successor, and the successor has only one predecessor,
1573 they may be combined. */
1575 for (i
= 0; i
< n_basic_blocks
; )
1577 basic_block c
, b
= BASIC_BLOCK (i
);
1580 /* A loop because chains of blocks might be combineable. */
1581 while ((s
= b
->succ
) != NULL
1582 && s
->succ_next
== NULL
1583 && (s
->flags
& EDGE_EH
) == 0
1584 && (c
= s
->dest
) != EXIT_BLOCK_PTR
1585 && c
->pred
->pred_next
== NULL
1586 /* If the jump insn has side effects, we can't kill the edge. */
1587 && (GET_CODE (b
->end
) != JUMP_INSN
1588 || onlyjump_p (b
->end
))
1589 && merge_blocks (s
, b
, c
))
1592 /* Don't get confused by the index shift caused by deleting blocks. */
1596 /* If we deleted an exception handler, we may have EH region begin/end
1597 blocks to remove as well. */
1598 if (deleted_handler
)
1599 delete_eh_regions ();
1602 /* Find EH regions for which there is no longer a handler, and delete them. */
1605 delete_eh_regions ()
1609 update_rethrow_references ();
1611 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1612 if (GET_CODE (insn
) == NOTE
)
1614 if ((NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
) ||
1615 (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
))
1617 int num
= CODE_LABEL_NUMBER (insn
);
1618 /* A NULL handler indicates a region is no longer needed,
1619 as long as it isn't the target of a rethrow. */
1620 if (get_first_handler (num
) == NULL
&& ! rethrow_used (num
))
1622 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1623 NOTE_SOURCE_FILE (insn
) = 0;
1629 /* Return true if NOTE is not one of the ones that must be kept paired,
1630 so that we may simply delete them. */
1633 can_delete_note_p (note
)
1636 return (NOTE_LINE_NUMBER (note
) == NOTE_INSN_DELETED
1637 || NOTE_LINE_NUMBER (note
) == NOTE_INSN_BASIC_BLOCK
);
1640 /* Unlink a chain of insns between START and FINISH, leaving notes
1641 that must be paired. */
1644 delete_insn_chain (start
, finish
)
1647 /* Unchain the insns one by one. It would be quicker to delete all
1648 of these with a single unchaining, rather than one at a time, but
1649 we need to keep the NOTE's. */
1655 next
= NEXT_INSN (start
);
1656 if (GET_CODE (start
) == NOTE
&& !can_delete_note_p (start
))
1658 else if (GET_CODE (start
) == CODE_LABEL
&& !can_delete_label_p (start
))
1661 next
= flow_delete_insn (start
);
1663 if (start
== finish
)
1669 /* Delete the insns in a (non-live) block. We physically delete every
1670 non-deleted-note insn, and update the flow graph appropriately.
1672 Return nonzero if we deleted an exception handler. */
1674 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1675 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1681 int deleted_handler
= 0;
1684 /* If the head of this block is a CODE_LABEL, then it might be the
1685 label for an exception handler which can't be reached.
1687 We need to remove the label from the exception_handler_label list
1688 and remove the associated NOTE_INSN_EH_REGION_BEG and
1689 NOTE_INSN_EH_REGION_END notes. */
1693 never_reached_warning (insn
);
1695 if (GET_CODE (insn
) == CODE_LABEL
)
1697 rtx x
, *prev
= &exception_handler_labels
;
1699 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
1701 if (XEXP (x
, 0) == insn
)
1703 /* Found a match, splice this label out of the EH label list. */
1704 *prev
= XEXP (x
, 1);
1705 XEXP (x
, 1) = NULL_RTX
;
1706 XEXP (x
, 0) = NULL_RTX
;
1708 /* Remove the handler from all regions */
1709 remove_handler (insn
);
1710 deleted_handler
= 1;
1713 prev
= &XEXP (x
, 1);
1716 /* This label may be referenced by code solely for its value, or
1717 referenced by static data, or something. We have determined
1718 that it is not reachable, but cannot delete the label itself.
1719 Save code space and continue to delete the balance of the block,
1720 along with properly updating the cfg. */
1721 if (!can_delete_label_p (insn
))
1723 /* If we've only got one of these, skip the whole deleting
1726 goto no_delete_insns
;
1727 insn
= NEXT_INSN (insn
);
1731 /* Selectively unlink the insn chain. Include any BARRIER that may
1732 follow the basic block. */
1733 end
= next_nonnote_insn (b
->end
);
1734 if (!end
|| GET_CODE (end
) != BARRIER
)
1736 delete_insn_chain (insn
, end
);
1740 /* Remove the edges into and out of this block. Note that there may
1741 indeed be edges in, if we are removing an unreachable loop. */
1745 for (e
= b
->pred
; e
; e
= next
)
1747 for (q
= &e
->src
->succ
; *q
!= e
; q
= &(*q
)->succ_next
)
1750 next
= e
->pred_next
;
1753 for (e
= b
->succ
; e
; e
= next
)
1755 for (q
= &e
->dest
->pred
; *q
!= e
; q
= &(*q
)->pred_next
)
1758 next
= e
->succ_next
;
1766 /* Remove the basic block from the array, and compact behind it. */
1769 return deleted_handler
;
1772 /* Remove block B from the basic block array and compact behind it. */
1778 int i
, n
= n_basic_blocks
;
1780 for (i
= b
->index
; i
+ 1 < n
; ++i
)
1782 basic_block x
= BASIC_BLOCK (i
+ 1);
1783 BASIC_BLOCK (i
) = x
;
1787 basic_block_info
->num_elements
--;
1791 /* Delete INSN by patching it out. Return the next insn. */
1794 flow_delete_insn (insn
)
1797 rtx prev
= PREV_INSN (insn
);
1798 rtx next
= NEXT_INSN (insn
);
1800 PREV_INSN (insn
) = NULL_RTX
;
1801 NEXT_INSN (insn
) = NULL_RTX
;
1804 NEXT_INSN (prev
) = next
;
1806 PREV_INSN (next
) = prev
;
1808 set_last_insn (prev
);
1810 if (GET_CODE (insn
) == CODE_LABEL
)
1811 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
1813 /* If deleting a jump, decrement the use count of the label. Deleting
1814 the label itself should happen in the normal course of block merging. */
1815 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
1816 LABEL_NUSES (JUMP_LABEL (insn
))--;
1821 /* True if a given label can be deleted. */
1824 can_delete_label_p (label
)
1829 if (LABEL_PRESERVE_P (label
))
1832 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
1833 if (label
== XEXP (x
, 0))
1835 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
1836 if (label
== XEXP (x
, 0))
1838 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
1839 if (label
== XEXP (x
, 0))
1842 /* User declared labels must be preserved. */
1843 if (LABEL_NAME (label
) != 0)
1849 /* Blocks A and B are to be merged into a single block. The insns
1850 are already contiguous, hence `nomove'. */
1853 merge_blocks_nomove (a
, b
)
1857 rtx b_head
, b_end
, a_end
;
1860 /* If there was a CODE_LABEL beginning B, delete it. */
1863 if (GET_CODE (b_head
) == CODE_LABEL
)
1865 /* Detect basic blocks with nothing but a label. This can happen
1866 in particular at the end of a function. */
1867 if (b_head
== b_end
)
1869 b_head
= flow_delete_insn (b_head
);
1872 /* Delete the basic block note. */
1873 if (GET_CODE (b_head
) == NOTE
1874 && NOTE_LINE_NUMBER (b_head
) == NOTE_INSN_BASIC_BLOCK
)
1876 if (b_head
== b_end
)
1878 b_head
= flow_delete_insn (b_head
);
1881 /* If there was a jump out of A, delete it. */
1883 if (GET_CODE (a_end
) == JUMP_INSN
)
1887 prev
= prev_nonnote_insn (a_end
);
1892 /* If this was a conditional jump, we need to also delete
1893 the insn that set cc0. */
1895 if (prev
&& sets_cc0_p (prev
))
1898 prev
= prev_nonnote_insn (prev
);
1901 flow_delete_insn (tmp
);
1905 /* Note that a->head != a->end, since we should have at least a
1906 bb note plus the jump, so prev != insn. */
1907 flow_delete_insn (a_end
);
1911 /* By definition, there should only be one successor of A, and that is
1912 B. Free that edge struct. */
1915 /* Adjust the edges out of B for the new owner. */
1916 for (e
= b
->succ
; e
; e
= e
->succ_next
)
1920 /* Reassociate the insns of B with A. */
1923 BLOCK_FOR_INSN (b_head
) = a
;
1924 while (b_head
!= b_end
)
1926 b_head
= NEXT_INSN (b_head
);
1927 BLOCK_FOR_INSN (b_head
) = a
;
1933 /* Compact the basic block array. */
1937 /* Attempt to merge basic blocks that are potentially non-adjacent.
1938 Return true iff the attempt succeeded. */
1941 merge_blocks (e
, b
, c
)
1945 /* If B has a fallthru edge to C, no need to move anything. */
1946 if (!(e
->flags
& EDGE_FALLTHRU
))
1948 /* ??? From here on out we must make sure to not munge nesting
1949 of exception regions and lexical blocks. Need to think about
1950 these cases before this gets implemented. */
1953 /* If C has an outgoing fallthru, and B does not have an incoming
1954 fallthru, move B before C. The later clause is somewhat arbitrary,
1955 but avoids modifying blocks other than the two we've been given. */
1957 /* Otherwise, move C after B. If C had a fallthru, which doesn't
1958 happen to be the physical successor to B, insert an unconditional
1959 branch. If C already ended with a conditional branch, the new
1960 jump must go in a new basic block D. */
1963 /* If a label still appears somewhere and we cannot delete the label,
1964 then we cannot merge the blocks. The edge was tidied already. */
1966 rtx insn
, stop
= NEXT_INSN (c
->head
);
1967 for (insn
= NEXT_INSN (b
->end
); insn
!= stop
; insn
= NEXT_INSN (insn
))
1968 if (GET_CODE (insn
) == CODE_LABEL
&& !can_delete_label_p (insn
))
1972 merge_blocks_nomove (b
, c
);
1976 /* The given edge should potentially a fallthru edge. If that is in
1977 fact true, delete the unconditional jump and barriers that are in
1981 tidy_fallthru_edge (e
, b
, c
)
1987 /* ??? In a late-running flow pass, other folks may have deleted basic
1988 blocks by nopping out blocks, leaving multiple BARRIERs between here
1989 and the target label. They ought to be chastized and fixed.
1991 We can also wind up with a sequence of undeletable labels between
1992 one block and the next.
1994 So search through a sequence of barriers, labels, and notes for
1995 the head of block C and assert that we really do fall through. */
1997 if (next_real_insn (b
->end
) != next_real_insn (PREV_INSN (c
->head
)))
2000 /* Remove what will soon cease being the jump insn from the source block.
2001 If block B consisted only of this single jump, turn it into a deleted
2004 if (GET_CODE (q
) == JUMP_INSN
)
2007 /* If this was a conditional jump, we need to also delete
2008 the insn that set cc0. */
2009 if (! simplejump_p (q
) && condjump_p (q
) && sets_cc0_p (PREV_INSN (q
)))
2016 NOTE_LINE_NUMBER (q
) = NOTE_INSN_DELETED
;
2017 NOTE_SOURCE_FILE (q
) = 0;
2020 b
->end
= q
= PREV_INSN (q
);
2023 /* Selectively unlink the sequence. */
2024 if (q
!= PREV_INSN (c
->head
))
2025 delete_insn_chain (NEXT_INSN (q
), PREV_INSN (c
->head
));
2027 e
->flags
|= EDGE_FALLTHRU
;
2030 /* Discover and record the loop depth at the head of each basic block. */
2033 calculate_loop_depth (insns
)
2038 int i
= 0, depth
= 1;
2040 bb
= BASIC_BLOCK (i
);
2041 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2043 if (insn
== bb
->head
)
2045 bb
->loop_depth
= depth
;
2046 if (++i
>= n_basic_blocks
)
2048 bb
= BASIC_BLOCK (i
);
2051 if (GET_CODE (insn
) == NOTE
)
2053 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
2055 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
)
2058 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2065 /* Perform data flow analysis.
2066 F is the first insn of the function and NREGS the number of register numbers
2070 life_analysis (f
, nregs
, file
, remove_dead_code
)
2074 int remove_dead_code
;
2076 #ifdef ELIMINABLE_REGS
2078 static struct {int from
, to
; } eliminables
[] = ELIMINABLE_REGS
;
2081 /* Record which registers will be eliminated. We use this in
2084 CLEAR_HARD_REG_SET (elim_reg_set
);
2086 #ifdef ELIMINABLE_REGS
2087 for (i
= 0; i
< sizeof eliminables
/ sizeof eliminables
[0]; i
++)
2088 SET_HARD_REG_BIT (elim_reg_set
, eliminables
[i
].from
);
2090 SET_HARD_REG_BIT (elim_reg_set
, FRAME_POINTER_REGNUM
);
2093 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2094 uid_volatile
= BITMAP_ALLOCA ();
2096 /* We want alias analysis information for local dead store elimination. */
2097 init_alias_analysis ();
2099 life_analysis_1 (f
, nregs
, remove_dead_code
);
2101 if (! reload_completed
)
2102 mark_constant_function ();
2104 end_alias_analysis ();
2107 dump_flow_info (file
);
2109 BITMAP_FREE (uid_volatile
);
2110 free_basic_block_vars (1);
2113 /* Free the variables allocated by find_basic_blocks.
2115 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2118 free_basic_block_vars (keep_head_end_p
)
2119 int keep_head_end_p
;
2121 if (basic_block_for_insn
)
2123 VARRAY_FREE (basic_block_for_insn
);
2124 basic_block_for_insn
= NULL
;
2127 if (! keep_head_end_p
)
2130 VARRAY_FREE (basic_block_info
);
2133 ENTRY_BLOCK_PTR
->aux
= NULL
;
2134 ENTRY_BLOCK_PTR
->global_live_at_end
= NULL
;
2135 EXIT_BLOCK_PTR
->aux
= NULL
;
2136 EXIT_BLOCK_PTR
->global_live_at_start
= NULL
;
2140 /* Return nonzero if the destination of SET equals the source. */
2145 rtx src
= SET_SRC (set
);
2146 rtx dst
= SET_DEST (set
);
2147 if (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
2148 && REGNO (src
) == REGNO (dst
))
2150 if (GET_CODE (src
) != SUBREG
|| GET_CODE (dst
) != SUBREG
2151 || SUBREG_WORD (src
) != SUBREG_WORD (dst
))
2153 src
= SUBREG_REG (src
);
2154 dst
= SUBREG_REG (dst
);
2155 if (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
2156 && REGNO (src
) == REGNO (dst
))
2161 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2167 rtx pat
= PATTERN (insn
);
2169 /* Insns carrying these notes are useful later on. */
2170 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
2173 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
2176 if (GET_CODE (pat
) == PARALLEL
)
2179 /* If nothing but SETs of registers to themselves,
2180 this insn can also be deleted. */
2181 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2183 rtx tem
= XVECEXP (pat
, 0, i
);
2185 if (GET_CODE (tem
) == USE
2186 || GET_CODE (tem
) == CLOBBER
)
2189 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
2199 notice_stack_pointer_modification (x
, pat
)
2201 rtx pat ATTRIBUTE_UNUSED
;
2203 if (x
== stack_pointer_rtx
2204 /* The stack pointer is only modified indirectly as the result
2205 of a push until later in flow. See the comments in rtl.texi
2206 regarding Embedded Side-Effects on Addresses. */
2207 || (GET_CODE (x
) == MEM
2208 && (GET_CODE (XEXP (x
, 0)) == PRE_DEC
2209 || GET_CODE (XEXP (x
, 0)) == PRE_INC
2210 || GET_CODE (XEXP (x
, 0)) == POST_DEC
2211 || GET_CODE (XEXP (x
, 0)) == POST_INC
)
2212 && XEXP (XEXP (x
, 0), 0) == stack_pointer_rtx
))
2213 current_function_sp_is_unchanging
= 0;
2216 /* Record which insns refer to any volatile memory
2217 or for any reason can't be deleted just because they are dead stores.
2218 Also, delete any insns that copy a register to itself.
2219 And see if the stack pointer is modified. */
2221 record_volatile_insns (f
)
2225 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
2227 enum rtx_code code1
= GET_CODE (insn
);
2228 if (code1
== CALL_INSN
)
2229 SET_INSN_VOLATILE (insn
);
2230 else if (code1
== INSN
|| code1
== JUMP_INSN
)
2232 if (GET_CODE (PATTERN (insn
)) != USE
2233 && volatile_refs_p (PATTERN (insn
)))
2234 SET_INSN_VOLATILE (insn
);
2236 /* A SET that makes space on the stack cannot be dead.
2237 (Such SETs occur only for allocating variable-size data,
2238 so they will always have a PLUS or MINUS according to the
2239 direction of stack growth.)
2240 Even if this function never uses this stack pointer value,
2241 signal handlers do! */
2242 else if (code1
== INSN
&& GET_CODE (PATTERN (insn
)) == SET
2243 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
2244 #ifdef STACK_GROWS_DOWNWARD
2245 && GET_CODE (SET_SRC (PATTERN (insn
))) == MINUS
2247 && GET_CODE (SET_SRC (PATTERN (insn
))) == PLUS
2249 && XEXP (SET_SRC (PATTERN (insn
)), 0) == stack_pointer_rtx
)
2250 SET_INSN_VOLATILE (insn
);
2252 /* Delete (in effect) any obvious no-op moves. */
2253 else if (noop_move_p (insn
))
2255 PUT_CODE (insn
, NOTE
);
2256 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2257 NOTE_SOURCE_FILE (insn
) = 0;
2261 /* Check if insn modifies the stack pointer. */
2262 if ( current_function_sp_is_unchanging
2263 && GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2264 note_stores (PATTERN (insn
), notice_stack_pointer_modification
);
2268 /* Mark those regs which are needed at the end of the function as live
2269 at the end of the last basic block. */
2271 mark_regs_live_at_end (set
)
2276 /* If exiting needs the right stack value, consider the stack pointer
2277 live at the end of the function. */
2278 if (! EXIT_IGNORE_STACK
2279 || (! FRAME_POINTER_REQUIRED
2280 && ! current_function_calls_alloca
2281 && flag_omit_frame_pointer
)
2282 || current_function_sp_is_unchanging
)
2284 SET_REGNO_REG_SET (set
, STACK_POINTER_REGNUM
);
2287 /* Mark the frame pointer if needed at the end of the function. If
2288 we end up eliminating it, it will be removed from the live list
2289 of each basic block by reload. */
2291 if (! reload_completed
|| frame_pointer_needed
)
2293 SET_REGNO_REG_SET (set
, FRAME_POINTER_REGNUM
);
2294 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2295 /* If they are different, also mark the hard frame pointer as live */
2296 SET_REGNO_REG_SET (set
, HARD_FRAME_POINTER_REGNUM
);
2300 /* Mark all global registers, and all registers used by the epilogue
2301 as being live at the end of the function since they may be
2302 referenced by our caller. */
2303 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2305 #ifdef EPILOGUE_USES
2306 || EPILOGUE_USES (i
)
2309 SET_REGNO_REG_SET (set
, i
);
2311 /* ??? Mark function return value here rather than as uses. */
2314 /* Determine which registers are live at the start of each
2315 basic block of the function whose first insn is F.
2316 NREGS is the number of registers used in F.
2317 We allocate the vector basic_block_live_at_start
2318 and the regsets that it points to, and fill them with the data.
2319 regset_size and regset_bytes are also set here. */
2322 life_analysis_1 (f
, nregs
, remove_dead_code
)
2325 int remove_dead_code
;
2330 char save_regs_ever_live
[FIRST_PSEUDO_REGISTER
];
2331 regset
*new_live_at_end
;
2333 struct obstack flow_obstack
;
2335 gcc_obstack_init (&flow_obstack
);
2339 /* Allocate and zero out many data structures
2340 that will record the data from lifetime analysis. */
2342 allocate_reg_life_data ();
2343 allocate_bb_life_data ();
2345 reg_next_use
= (rtx
*) alloca (nregs
* sizeof (rtx
));
2346 memset (reg_next_use
, 0, nregs
* sizeof (rtx
));
2348 /* Set up regset-vectors used internally within this function.
2349 Their meanings are documented above, with their declarations. */
2351 new_live_at_end
= (regset
*) alloca ((n_basic_blocks
+ 1) * sizeof (regset
));
2352 init_regset_vector (new_live_at_end
, n_basic_blocks
+ 1, &flow_obstack
);
2354 /* Stick these vectors into the AUX field of the basic block, so that
2355 we don't have to keep going through the index. */
2357 for (i
= 0; i
< n_basic_blocks
; ++i
)
2358 BASIC_BLOCK (i
)->aux
= new_live_at_end
[i
];
2359 ENTRY_BLOCK_PTR
->aux
= new_live_at_end
[i
];
2361 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2362 This will be cleared by record_volatile_insns if it encounters an insn
2363 which modifies the stack pointer. */
2364 current_function_sp_is_unchanging
= !current_function_calls_alloca
;
2366 record_volatile_insns (f
);
2368 if (n_basic_blocks
> 0)
2373 theend
= EXIT_BLOCK_PTR
->global_live_at_start
;
2374 mark_regs_live_at_end (theend
);
2376 /* Propogate this exit data to each of EXIT's predecessors. */
2377 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
2379 COPY_REG_SET (e
->src
->global_live_at_end
, theend
);
2380 COPY_REG_SET ((regset
) e
->src
->aux
, theend
);
2384 /* The post-reload life analysis have (on a global basis) the same registers
2385 live as was computed by reload itself.
2387 Otherwise elimination offsets and such may be incorrect.
2389 Reload will make some registers as live even though they do not appear
2391 if (reload_completed
)
2392 memcpy (save_regs_ever_live
, regs_ever_live
, sizeof (regs_ever_live
));
2393 memset (regs_ever_live
, 0, sizeof regs_ever_live
);
2395 /* Propagate life info through the basic blocks
2396 around the graph of basic blocks.
2398 This is a relaxation process: each time a new register
2399 is live at the end of the basic block, we must scan the block
2400 to determine which registers are, as a consequence, live at the beginning
2401 of that block. These registers must then be marked live at the ends
2402 of all the blocks that can transfer control to that block.
2403 The process continues until it reaches a fixed point. */
2410 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
2412 basic_block bb
= BASIC_BLOCK (i
);
2413 int consider
= first_pass
;
2414 int must_rescan
= first_pass
;
2419 /* Set CONSIDER if this block needs thinking about at all
2420 (that is, if the regs live now at the end of it
2421 are not the same as were live at the end of it when
2422 we last thought about it).
2423 Set must_rescan if it needs to be thought about
2424 instruction by instruction (that is, if any additional
2425 reg that is live at the end now but was not live there before
2426 is one of the significant regs of this basic block). */
2428 EXECUTE_IF_AND_COMPL_IN_REG_SET
2429 ((regset
) bb
->aux
, bb
->global_live_at_end
, 0, j
,
2432 if (REGNO_REG_SET_P (bb
->local_set
, j
))
2443 /* The live_at_start of this block may be changing,
2444 so another pass will be required after this one. */
2449 /* No complete rescan needed;
2450 just record those variables newly known live at end
2451 as live at start as well. */
2452 IOR_AND_COMPL_REG_SET (bb
->global_live_at_start
,
2454 bb
->global_live_at_end
);
2456 IOR_AND_COMPL_REG_SET (bb
->global_live_at_end
,
2458 bb
->global_live_at_end
);
2462 /* Update the basic_block_live_at_start
2463 by propagation backwards through the block. */
2464 COPY_REG_SET (bb
->global_live_at_end
, (regset
) bb
->aux
);
2465 COPY_REG_SET (bb
->global_live_at_start
,
2466 bb
->global_live_at_end
);
2467 propagate_block (bb
->global_live_at_start
,
2468 bb
->head
, bb
->end
, 0,
2469 first_pass
? bb
->local_set
: (regset
) 0,
2470 i
, remove_dead_code
);
2473 /* Update the new_live_at_end's of the block's predecessors. */
2477 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
2478 IOR_REG_SET ((regset
) e
->src
->aux
, bb
->global_live_at_start
);
2488 /* The only pseudos that are live at the beginning of the function are
2489 those that were not set anywhere in the function. local-alloc doesn't
2490 know how to handle these correctly, so mark them as not local to any
2493 if (n_basic_blocks
> 0)
2494 EXECUTE_IF_SET_IN_REG_SET (BASIC_BLOCK (0)->global_live_at_start
,
2495 FIRST_PSEUDO_REGISTER
, i
,
2497 REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
;
2500 /* Now the life information is accurate. Make one more pass over each
2501 basic block to delete dead stores, create autoincrement addressing
2502 and record how many times each register is used, is set, or dies. */
2504 for (i
= 0; i
< n_basic_blocks
; i
++)
2506 basic_block bb
= BASIC_BLOCK (i
);
2508 /* We start with global_live_at_end to determine which stores are
2509 dead. This process is destructive, and we wish to preserve the
2510 contents of global_live_at_end for posterity. Fortunately,
2511 new_live_at_end, due to the way we converged on a solution,
2512 contains a duplicate of global_live_at_end that we can kill. */
2513 propagate_block ((regset
) bb
->aux
, bb
->head
, bb
->end
, 1, (regset
) 0, i
, remove_dead_code
);
2520 /* We have a problem with any pseudoreg that lives across the setjmp.
2521 ANSI says that if a user variable does not change in value between
2522 the setjmp and the longjmp, then the longjmp preserves it. This
2523 includes longjmp from a place where the pseudo appears dead.
2524 (In principle, the value still exists if it is in scope.)
2525 If the pseudo goes in a hard reg, some other value may occupy
2526 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2527 Conclusion: such a pseudo must not go in a hard reg. */
2528 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp
,
2529 FIRST_PSEUDO_REGISTER
, i
,
2531 if (regno_reg_rtx
[i
] != 0)
2533 REG_LIVE_LENGTH (i
) = -1;
2534 REG_BASIC_BLOCK (i
) = -1;
2538 /* Restore regs_ever_live that was provided by reload. */
2539 if (reload_completed
)
2540 memcpy (regs_ever_live
, save_regs_ever_live
, sizeof (regs_ever_live
));
2542 free_regset_vector (new_live_at_end
, n_basic_blocks
);
2543 obstack_free (&flow_obstack
, NULL_PTR
);
2545 for (i
= 0; i
< n_basic_blocks
; ++i
)
2546 BASIC_BLOCK (i
)->aux
= NULL
;
2547 ENTRY_BLOCK_PTR
->aux
= NULL
;
2550 /* Subroutines of life analysis. */
2552 /* Allocate the permanent data structures that represent the results
2553 of life analysis. Not static since used also for stupid life analysis. */
2556 allocate_bb_life_data ()
2560 for (i
= 0; i
< n_basic_blocks
; i
++)
2562 basic_block bb
= BASIC_BLOCK (i
);
2564 bb
->local_set
= OBSTACK_ALLOC_REG_SET (function_obstack
);
2565 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (function_obstack
);
2566 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (function_obstack
);
2569 ENTRY_BLOCK_PTR
->global_live_at_end
2570 = OBSTACK_ALLOC_REG_SET (function_obstack
);
2571 EXIT_BLOCK_PTR
->global_live_at_start
2572 = OBSTACK_ALLOC_REG_SET (function_obstack
);
2574 regs_live_at_setjmp
= OBSTACK_ALLOC_REG_SET (function_obstack
);
2578 allocate_reg_life_data ()
2582 /* Recalculate the register space, in case it has grown. Old style
2583 vector oriented regsets would set regset_{size,bytes} here also. */
2584 allocate_reg_info (max_regno
, FALSE
, FALSE
);
2586 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
2587 information, explicitly reset it here. The allocation should have
2588 already happened on the previous reg_scan pass. Make sure in case
2589 some more registers were allocated. */
2590 for (i
= 0; i
< max_regno
; i
++)
2594 /* Make each element of VECTOR point at a regset. The vector has
2595 NELTS elements, and space is allocated from the ALLOC_OBSTACK
2599 init_regset_vector (vector
, nelts
, alloc_obstack
)
2602 struct obstack
*alloc_obstack
;
2606 for (i
= 0; i
< nelts
; i
++)
2608 vector
[i
] = OBSTACK_ALLOC_REG_SET (alloc_obstack
);
2609 CLEAR_REG_SET (vector
[i
]);
2613 /* Release any additional space allocated for each element of VECTOR point
2614 other than the regset header itself. The vector has NELTS elements. */
2617 free_regset_vector (vector
, nelts
)
2623 for (i
= 0; i
< nelts
; i
++)
2624 FREE_REG_SET (vector
[i
]);
2627 /* Compute the registers live at the beginning of a basic block
2628 from those live at the end.
2630 When called, OLD contains those live at the end.
2631 On return, it contains those live at the beginning.
2632 FIRST and LAST are the first and last insns of the basic block.
2634 FINAL is nonzero if we are doing the final pass which is not
2635 for computing the life info (since that has already been done)
2636 but for acting on it. On this pass, we delete dead stores,
2637 set up the logical links and dead-variables lists of instructions,
2638 and merge instructions for autoincrement and autodecrement addresses.
2640 SIGNIFICANT is nonzero only the first time for each basic block.
2641 If it is nonzero, it points to a regset in which we store
2642 a 1 for each register that is set within the block.
2644 BNUM is the number of the basic block. */
2647 propagate_block (old
, first
, last
, final
, significant
, bnum
, remove_dead_code
)
2648 register regset old
;
2654 int remove_dead_code
;
2661 /* Find the loop depth for this block. Ignore loop level changes in the
2662 middle of the basic block -- for register allocation purposes, the
2663 important uses will be in the blocks wholely contained within the loop
2664 not in the loop pre-header or post-trailer. */
2665 loop_depth
= BASIC_BLOCK (bnum
)->loop_depth
;
2667 dead
= ALLOCA_REG_SET ();
2668 live
= ALLOCA_REG_SET ();
2671 mem_set_list
= NULL_RTX
;
2677 /* Process the regs live at the end of the block.
2678 Mark them as not local to any one basic block. */
2679 EXECUTE_IF_SET_IN_REG_SET (old
, 0, i
,
2681 REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
;
2685 /* Scan the block an insn at a time from end to beginning. */
2687 for (insn
= last
; ; insn
= prev
)
2689 prev
= PREV_INSN (insn
);
2691 if (GET_CODE (insn
) == NOTE
)
2693 /* If this is a call to `setjmp' et al,
2694 warn if any non-volatile datum is live. */
2696 if (final
&& NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
2697 IOR_REG_SET (regs_live_at_setjmp
, old
);
2700 /* Update the life-status of regs for this insn.
2701 First DEAD gets which regs are set in this insn
2702 then LIVE gets which regs are used in this insn.
2703 Then the regs live before the insn
2704 are those live after, with DEAD regs turned off,
2705 and then LIVE regs turned on. */
2707 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2710 rtx note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
2711 int insn_is_dead
= 0;
2712 int libcall_is_dead
= 0;
2714 if (remove_dead_code
)
2716 insn_is_dead
= (insn_dead_p (PATTERN (insn
), old
, 0, REG_NOTES (insn
))
2717 /* Don't delete something that refers to volatile storage! */
2718 && ! INSN_VOLATILE (insn
));
2719 libcall_is_dead
= (insn_is_dead
&& note
!= 0
2720 && libcall_dead_p (PATTERN (insn
), old
, note
, insn
));
2723 /* If an instruction consists of just dead store(s) on final pass,
2724 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
2725 We could really delete it with delete_insn, but that
2726 can cause trouble for first or last insn in a basic block. */
2727 if (final
&& insn_is_dead
)
2729 PUT_CODE (insn
, NOTE
);
2730 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2731 NOTE_SOURCE_FILE (insn
) = 0;
2733 /* CC0 is now known to be dead. Either this insn used it,
2734 in which case it doesn't anymore, or clobbered it,
2735 so the next insn can't use it. */
2738 /* If this insn is copying the return value from a library call,
2739 delete the entire library call. */
2740 if (libcall_is_dead
)
2742 rtx first
= XEXP (note
, 0);
2744 while (INSN_DELETED_P (first
))
2745 first
= NEXT_INSN (first
);
2750 NOTE_LINE_NUMBER (p
) = NOTE_INSN_DELETED
;
2751 NOTE_SOURCE_FILE (p
) = 0;
2757 CLEAR_REG_SET (dead
);
2758 CLEAR_REG_SET (live
);
2760 /* See if this is an increment or decrement that can be
2761 merged into a following memory address. */
2764 register rtx x
= single_set (insn
);
2766 /* Does this instruction increment or decrement a register? */
2767 if (!reload_completed
2769 && GET_CODE (SET_DEST (x
)) == REG
2770 && (GET_CODE (SET_SRC (x
)) == PLUS
2771 || GET_CODE (SET_SRC (x
)) == MINUS
)
2772 && XEXP (SET_SRC (x
), 0) == SET_DEST (x
)
2773 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
2774 /* Ok, look for a following memory ref we can combine with.
2775 If one is found, change the memory ref to a PRE_INC
2776 or PRE_DEC, cancel this insn, and return 1.
2777 Return 0 if nothing has been done. */
2778 && try_pre_increment_1 (insn
))
2781 #endif /* AUTO_INC_DEC */
2783 /* If this is not the final pass, and this insn is copying the
2784 value of a library call and it's dead, don't scan the
2785 insns that perform the library call, so that the call's
2786 arguments are not marked live. */
2787 if (libcall_is_dead
)
2789 /* Mark the dest reg as `significant'. */
2790 mark_set_regs (old
, dead
, PATTERN (insn
), NULL_RTX
, significant
);
2792 insn
= XEXP (note
, 0);
2793 prev
= PREV_INSN (insn
);
2795 else if (GET_CODE (PATTERN (insn
)) == SET
2796 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
2797 && GET_CODE (SET_SRC (PATTERN (insn
))) == PLUS
2798 && XEXP (SET_SRC (PATTERN (insn
)), 0) == stack_pointer_rtx
2799 && GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == CONST_INT
)
2800 /* We have an insn to pop a constant amount off the stack.
2801 (Such insns use PLUS regardless of the direction of the stack,
2802 and any insn to adjust the stack by a constant is always a pop.)
2803 These insns, if not dead stores, have no effect on life. */
2807 /* Any regs live at the time of a call instruction
2808 must not go in a register clobbered by calls.
2809 Find all regs now live and record this for them. */
2811 if (GET_CODE (insn
) == CALL_INSN
&& final
)
2812 EXECUTE_IF_SET_IN_REG_SET (old
, 0, i
,
2814 REG_N_CALLS_CROSSED (i
)++;
2817 /* LIVE gets the regs used in INSN;
2818 DEAD gets those set by it. Dead insns don't make anything
2821 mark_set_regs (old
, dead
, PATTERN (insn
),
2822 final
? insn
: NULL_RTX
, significant
);
2824 /* If an insn doesn't use CC0, it becomes dead since we
2825 assume that every insn clobbers it. So show it dead here;
2826 mark_used_regs will set it live if it is referenced. */
2830 mark_used_regs (old
, live
, PATTERN (insn
), final
, insn
);
2832 /* Sometimes we may have inserted something before INSN (such as
2833 a move) when we make an auto-inc. So ensure we will scan
2836 prev
= PREV_INSN (insn
);
2839 if (! insn_is_dead
&& GET_CODE (insn
) == CALL_INSN
)
2845 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
2847 note
= XEXP (note
, 1))
2848 if (GET_CODE (XEXP (note
, 0)) == USE
)
2849 mark_used_regs (old
, live
, SET_DEST (XEXP (note
, 0)),
2852 /* Each call clobbers all call-clobbered regs that are not
2853 global or fixed. Note that the function-value reg is a
2854 call-clobbered reg, and mark_set_regs has already had
2855 a chance to handle it. */
2857 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2858 if (call_used_regs
[i
] && ! global_regs
[i
]
2860 SET_REGNO_REG_SET (dead
, i
);
2862 /* The stack ptr is used (honorarily) by a CALL insn. */
2863 SET_REGNO_REG_SET (live
, STACK_POINTER_REGNUM
);
2865 /* Calls may also reference any of the global registers,
2866 so they are made live. */
2867 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2869 mark_used_regs (old
, live
,
2870 gen_rtx_REG (reg_raw_mode
[i
], i
),
2873 /* Calls also clobber memory. */
2874 mem_set_list
= NULL_RTX
;
2877 /* Update OLD for the registers used or set. */
2878 AND_COMPL_REG_SET (old
, dead
);
2879 IOR_REG_SET (old
, live
);
2883 /* On final pass, update counts of how many insns each reg is live
2886 EXECUTE_IF_SET_IN_REG_SET (old
, 0, i
,
2887 { REG_LIVE_LENGTH (i
)++; });
2894 FREE_REG_SET (dead
);
2895 FREE_REG_SET (live
);
2898 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2899 (SET expressions whose destinations are registers dead after the insn).
2900 NEEDED is the regset that says which regs are alive after the insn.
2902 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
2904 If X is the entire body of an insn, NOTES contains the reg notes
2905 pertaining to the insn. */
2908 insn_dead_p (x
, needed
, call_ok
, notes
)
2912 rtx notes ATTRIBUTE_UNUSED
;
2914 enum rtx_code code
= GET_CODE (x
);
2917 /* If flow is invoked after reload, we must take existing AUTO_INC
2918 expresions into account. */
2919 if (reload_completed
)
2921 for ( ; notes
; notes
= XEXP (notes
, 1))
2923 if (REG_NOTE_KIND (notes
) == REG_INC
)
2925 int regno
= REGNO (XEXP (notes
, 0));
2927 /* Don't delete insns to set global regs. */
2928 if ((regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
2929 || REGNO_REG_SET_P (needed
, regno
))
2936 /* If setting something that's a reg or part of one,
2937 see if that register's altered value will be live. */
2941 rtx r
= SET_DEST (x
);
2943 /* A SET that is a subroutine call cannot be dead. */
2944 if (! call_ok
&& GET_CODE (SET_SRC (x
)) == CALL
)
2948 if (GET_CODE (r
) == CC0
)
2952 if (GET_CODE (r
) == MEM
&& ! MEM_VOLATILE_P (r
))
2955 /* Walk the set of memory locations we are currently tracking
2956 and see if one is an identical match to this memory location.
2957 If so, this memory write is dead (remember, we're walking
2958 backwards from the end of the block to the start. */
2959 temp
= mem_set_list
;
2962 if (rtx_equal_p (XEXP (temp
, 0), r
))
2964 temp
= XEXP (temp
, 1);
2968 while (GET_CODE (r
) == SUBREG
|| GET_CODE (r
) == STRICT_LOW_PART
2969 || GET_CODE (r
) == ZERO_EXTRACT
)
2972 if (GET_CODE (r
) == REG
)
2974 int regno
= REGNO (r
);
2976 /* Don't delete insns to set global regs. */
2977 if ((regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
2978 /* Make sure insns to set frame pointer aren't deleted. */
2979 || (regno
== FRAME_POINTER_REGNUM
2980 && (! reload_completed
|| frame_pointer_needed
))
2981 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2982 || (regno
== HARD_FRAME_POINTER_REGNUM
2983 && (! reload_completed
|| frame_pointer_needed
))
2985 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2986 /* Make sure insns to set arg pointer are never deleted
2987 (if the arg pointer isn't fixed, there will be a USE for
2988 it, so we can treat it normally). */
2989 || (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
2991 || REGNO_REG_SET_P (needed
, regno
))
2994 /* If this is a hard register, verify that subsequent words are
2996 if (regno
< FIRST_PSEUDO_REGISTER
)
2998 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (r
));
3001 if (REGNO_REG_SET_P (needed
, regno
+n
))
3009 /* If performing several activities,
3010 insn is dead if each activity is individually dead.
3011 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3012 that's inside a PARALLEL doesn't make the insn worth keeping. */
3013 else if (code
== PARALLEL
)
3015 int i
= XVECLEN (x
, 0);
3017 for (i
--; i
>= 0; i
--)
3018 if (GET_CODE (XVECEXP (x
, 0, i
)) != CLOBBER
3019 && GET_CODE (XVECEXP (x
, 0, i
)) != USE
3020 && ! insn_dead_p (XVECEXP (x
, 0, i
), needed
, call_ok
, NULL_RTX
))
3026 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3027 is not necessarily true for hard registers. */
3028 else if (code
== CLOBBER
&& GET_CODE (XEXP (x
, 0)) == REG
3029 && REGNO (XEXP (x
, 0)) >= FIRST_PSEUDO_REGISTER
3030 && ! REGNO_REG_SET_P (needed
, REGNO (XEXP (x
, 0))))
3033 /* We do not check other CLOBBER or USE here. An insn consisting of just
3034 a CLOBBER or just a USE should not be deleted. */
3038 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3039 return 1 if the entire library call is dead.
3040 This is true if X copies a register (hard or pseudo)
3041 and if the hard return reg of the call insn is dead.
3042 (The caller should have tested the destination of X already for death.)
3044 If this insn doesn't just copy a register, then we don't
3045 have an ordinary libcall. In that case, cse could not have
3046 managed to substitute the source for the dest later on,
3047 so we can assume the libcall is dead.
3049 NEEDED is the bit vector of pseudoregs live before this insn.
3050 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3053 libcall_dead_p (x
, needed
, note
, insn
)
3059 register RTX_CODE code
= GET_CODE (x
);
3063 register rtx r
= SET_SRC (x
);
3064 if (GET_CODE (r
) == REG
)
3066 rtx call
= XEXP (note
, 0);
3070 /* Find the call insn. */
3071 while (call
!= insn
&& GET_CODE (call
) != CALL_INSN
)
3072 call
= NEXT_INSN (call
);
3074 /* If there is none, do nothing special,
3075 since ordinary death handling can understand these insns. */
3079 /* See if the hard reg holding the value is dead.
3080 If this is a PARALLEL, find the call within it. */
3081 call_pat
= PATTERN (call
);
3082 if (GET_CODE (call_pat
) == PARALLEL
)
3084 for (i
= XVECLEN (call_pat
, 0) - 1; i
>= 0; i
--)
3085 if (GET_CODE (XVECEXP (call_pat
, 0, i
)) == SET
3086 && GET_CODE (SET_SRC (XVECEXP (call_pat
, 0, i
))) == CALL
)
3089 /* This may be a library call that is returning a value
3090 via invisible pointer. Do nothing special, since
3091 ordinary death handling can understand these insns. */
3095 call_pat
= XVECEXP (call_pat
, 0, i
);
3098 return insn_dead_p (call_pat
, needed
, 1, REG_NOTES (call
));
3104 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3105 live at function entry. Don't count global register variables, variables
3106 in registers that can be used for function arg passing, or variables in
3107 fixed hard registers. */
3110 regno_uninitialized (regno
)
3113 if (n_basic_blocks
== 0
3114 || (regno
< FIRST_PSEUDO_REGISTER
3115 && (global_regs
[regno
]
3116 || fixed_regs
[regno
]
3117 || FUNCTION_ARG_REGNO_P (regno
))))
3120 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
);
3123 /* 1 if register REGNO was alive at a place where `setjmp' was called
3124 and was set more than once or is an argument.
3125 Such regs may be clobbered by `longjmp'. */
3128 regno_clobbered_at_setjmp (regno
)
3131 if (n_basic_blocks
== 0)
3134 return ((REG_N_SETS (regno
) > 1
3135 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
))
3136 && REGNO_REG_SET_P (regs_live_at_setjmp
, regno
));
3139 /* INSN references memory, possibly using autoincrement addressing modes.
3140 Find any entries on the mem_set_list that need to be invalidated due
3141 to an address change. */
3143 invalidate_mems_from_autoinc (insn
)
3146 rtx note
= REG_NOTES (insn
);
3147 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
3149 if (REG_NOTE_KIND (note
) == REG_INC
)
3151 rtx temp
= mem_set_list
;
3152 rtx prev
= NULL_RTX
;
3156 if (reg_overlap_mentioned_p (XEXP (note
, 0), XEXP (temp
, 0)))
3158 /* Splice temp out of list. */
3160 XEXP (prev
, 1) = XEXP (temp
, 1);
3162 mem_set_list
= XEXP (temp
, 1);
3166 temp
= XEXP (temp
, 1);
3172 /* Process the registers that are set within X.
3173 Their bits are set to 1 in the regset DEAD,
3174 because they are dead prior to this insn.
3176 If INSN is nonzero, it is the insn being processed
3177 and the fact that it is nonzero implies this is the FINAL pass
3178 in propagate_block. In this case, various info about register
3179 usage is stored, LOG_LINKS fields of insns are set up. */
3182 mark_set_regs (needed
, dead
, x
, insn
, significant
)
3189 register RTX_CODE code
= GET_CODE (x
);
3191 if (code
== SET
|| code
== CLOBBER
)
3192 mark_set_1 (needed
, dead
, x
, insn
, significant
);
3193 else if (code
== PARALLEL
)
3196 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
3198 code
= GET_CODE (XVECEXP (x
, 0, i
));
3199 if (code
== SET
|| code
== CLOBBER
)
3200 mark_set_1 (needed
, dead
, XVECEXP (x
, 0, i
), insn
, significant
);
3205 /* Process a single SET rtx, X. */
3208 mark_set_1 (needed
, dead
, x
, insn
, significant
)
3216 register rtx reg
= SET_DEST (x
);
3218 /* Some targets place small structures in registers for
3219 return values of functions. We have to detect this
3220 case specially here to get correct flow information. */
3221 if (GET_CODE (reg
) == PARALLEL
3222 && GET_MODE (reg
) == BLKmode
)
3226 for (i
= XVECLEN (reg
, 0) - 1; i
>= 0; i
--)
3227 mark_set_1 (needed
, dead
, XVECEXP (reg
, 0, i
), insn
, significant
);
3231 /* Modifying just one hardware register of a multi-reg value
3232 or just a byte field of a register
3233 does not mean the value from before this insn is now dead.
3234 But it does mean liveness of that register at the end of the block
3237 Within mark_set_1, however, we treat it as if the register is
3238 indeed modified. mark_used_regs will, however, also treat this
3239 register as being used. Thus, we treat these insns as setting a
3240 new value for the register as a function of its old value. This
3241 cases LOG_LINKS to be made appropriately and this will help combine. */
3243 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
3244 || GET_CODE (reg
) == SIGN_EXTRACT
3245 || GET_CODE (reg
) == STRICT_LOW_PART
)
3246 reg
= XEXP (reg
, 0);
3248 /* If this set is a MEM, then it kills any aliased writes.
3249 If this set is a REG, then it kills any MEMs which use the reg. */
3250 if (GET_CODE (reg
) == MEM
3251 || GET_CODE (reg
) == REG
)
3253 rtx temp
= mem_set_list
;
3254 rtx prev
= NULL_RTX
;
3258 if ((GET_CODE (reg
) == MEM
3259 && output_dependence (XEXP (temp
, 0), reg
))
3260 || (GET_CODE (reg
) == REG
3261 && reg_overlap_mentioned_p (reg
, XEXP (temp
, 0))))
3263 /* Splice this entry out of the list. */
3265 XEXP (prev
, 1) = XEXP (temp
, 1);
3267 mem_set_list
= XEXP (temp
, 1);
3271 temp
= XEXP (temp
, 1);
3275 /* If the memory reference had embedded side effects (autoincrement
3276 address modes. Then we may need to kill some entries on the
3278 if (insn
&& GET_CODE (reg
) == MEM
)
3279 invalidate_mems_from_autoinc (insn
);
3281 if (GET_CODE (reg
) == MEM
&& ! side_effects_p (reg
)
3282 /* We do not know the size of a BLKmode store, so we do not track
3283 them for redundant store elimination. */
3284 && GET_MODE (reg
) != BLKmode
3285 /* There are no REG_INC notes for SP, so we can't assume we'll see
3286 everything that invalidates it. To be safe, don't eliminate any
3287 stores though SP; none of them should be redundant anyway. */
3288 && ! reg_mentioned_p (stack_pointer_rtx
, reg
))
3289 mem_set_list
= gen_rtx_EXPR_LIST (VOIDmode
, reg
, mem_set_list
);
3291 if (GET_CODE (reg
) == REG
3292 && (regno
= REGNO (reg
), ! (regno
== FRAME_POINTER_REGNUM
3293 && (! reload_completed
|| frame_pointer_needed
)))
3294 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3295 && ! (regno
== HARD_FRAME_POINTER_REGNUM
3296 && (! reload_completed
|| frame_pointer_needed
))
3298 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3299 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
3301 && ! (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
]))
3302 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3304 int some_needed
= REGNO_REG_SET_P (needed
, regno
);
3305 int some_not_needed
= ! some_needed
;
3307 /* Mark it as a significant register for this basic block. */
3309 SET_REGNO_REG_SET (significant
, regno
);
3311 /* Mark it as dead before this insn. */
3312 SET_REGNO_REG_SET (dead
, regno
);
3314 /* A hard reg in a wide mode may really be multiple registers.
3315 If so, mark all of them just like the first. */
3316 if (regno
< FIRST_PSEUDO_REGISTER
)
3320 /* Nothing below is needed for the stack pointer; get out asap.
3321 Eg, log links aren't needed, since combine won't use them. */
3322 if (regno
== STACK_POINTER_REGNUM
)
3325 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
3328 int regno_n
= regno
+ n
;
3329 int needed_regno
= REGNO_REG_SET_P (needed
, regno_n
);
3331 SET_REGNO_REG_SET (significant
, regno_n
);
3333 SET_REGNO_REG_SET (dead
, regno_n
);
3334 some_needed
|= needed_regno
;
3335 some_not_needed
|= ! needed_regno
;
3338 /* Additional data to record if this is the final pass. */
3341 register rtx y
= reg_next_use
[regno
];
3342 register int blocknum
= BLOCK_NUM (insn
);
3344 /* If this is a hard reg, record this function uses the reg. */
3346 if (regno
< FIRST_PSEUDO_REGISTER
)
3349 int endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
3351 for (i
= regno
; i
< endregno
; i
++)
3353 /* The next use is no longer "next", since a store
3355 reg_next_use
[i
] = 0;
3357 regs_ever_live
[i
] = 1;
3363 /* The next use is no longer "next", since a store
3365 reg_next_use
[regno
] = 0;
3367 /* Keep track of which basic blocks each reg appears in. */
3369 if (REG_BASIC_BLOCK (regno
) == REG_BLOCK_UNKNOWN
)
3370 REG_BASIC_BLOCK (regno
) = blocknum
;
3371 else if (REG_BASIC_BLOCK (regno
) != blocknum
)
3372 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
3374 /* Count (weighted) references, stores, etc. This counts a
3375 register twice if it is modified, but that is correct. */
3376 REG_N_SETS (regno
)++;
3378 REG_N_REFS (regno
) += loop_depth
;
3380 /* The insns where a reg is live are normally counted
3381 elsewhere, but we want the count to include the insn
3382 where the reg is set, and the normal counting mechanism
3383 would not count it. */
3384 REG_LIVE_LENGTH (regno
)++;
3387 if (! some_not_needed
)
3389 /* Make a logical link from the next following insn
3390 that uses this register, back to this insn.
3391 The following insns have already been processed.
3393 We don't build a LOG_LINK for hard registers containing
3394 in ASM_OPERANDs. If these registers get replaced,
3395 we might wind up changing the semantics of the insn,
3396 even if reload can make what appear to be valid assignments
3398 if (y
&& (BLOCK_NUM (y
) == blocknum
)
3399 && (regno
>= FIRST_PSEUDO_REGISTER
3400 || asm_noperands (PATTERN (y
)) < 0))
3402 = gen_rtx_INSN_LIST (VOIDmode
, insn
, LOG_LINKS (y
));
3404 else if (! some_needed
)
3406 /* Note that dead stores have already been deleted when possible
3407 If we get here, we have found a dead store that cannot
3408 be eliminated (because the same insn does something useful).
3409 Indicate this by marking the reg being set as dying here. */
3411 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
3412 REG_N_DEATHS (REGNO (reg
))++;
3416 /* This is a case where we have a multi-word hard register
3417 and some, but not all, of the words of the register are
3418 needed in subsequent insns. Write REG_UNUSED notes
3419 for those parts that were not needed. This case should
3424 for (i
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) - 1;
3426 if (!REGNO_REG_SET_P (needed
, regno
+ i
))
3428 = gen_rtx_EXPR_LIST (REG_UNUSED
,
3429 gen_rtx_REG (reg_raw_mode
[regno
+ i
],
3435 else if (GET_CODE (reg
) == REG
)
3436 reg_next_use
[regno
] = 0;
3438 /* If this is the last pass and this is a SCRATCH, show it will be dying
3439 here and count it. */
3440 else if (GET_CODE (reg
) == SCRATCH
&& insn
!= 0)
3443 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
3449 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
3453 find_auto_inc (needed
, x
, insn
)
3458 rtx addr
= XEXP (x
, 0);
3459 HOST_WIDE_INT offset
= 0;
3462 /* Here we detect use of an index register which might be good for
3463 postincrement, postdecrement, preincrement, or predecrement. */
3465 if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3466 offset
= INTVAL (XEXP (addr
, 1)), addr
= XEXP (addr
, 0);
3468 if (GET_CODE (addr
) == REG
)
3471 register int size
= GET_MODE_SIZE (GET_MODE (x
));
3474 int regno
= REGNO (addr
);
3476 /* Is the next use an increment that might make auto-increment? */
3477 if ((incr
= reg_next_use
[regno
]) != 0
3478 && (set
= single_set (incr
)) != 0
3479 && GET_CODE (set
) == SET
3480 && BLOCK_NUM (incr
) == BLOCK_NUM (insn
)
3481 /* Can't add side effects to jumps; if reg is spilled and
3482 reloaded, there's no way to store back the altered value. */
3483 && GET_CODE (insn
) != JUMP_INSN
3484 && (y
= SET_SRC (set
), GET_CODE (y
) == PLUS
)
3485 && XEXP (y
, 0) == addr
3486 && GET_CODE (XEXP (y
, 1)) == CONST_INT
3487 && ((HAVE_POST_INCREMENT
3488 && (INTVAL (XEXP (y
, 1)) == size
&& offset
== 0))
3489 || (HAVE_POST_DECREMENT
3490 && (INTVAL (XEXP (y
, 1)) == - size
&& offset
== 0))
3491 || (HAVE_PRE_INCREMENT
3492 && (INTVAL (XEXP (y
, 1)) == size
&& offset
== size
))
3493 || (HAVE_PRE_DECREMENT
3494 && (INTVAL (XEXP (y
, 1)) == - size
&& offset
== - size
)))
3495 /* Make sure this reg appears only once in this insn. */
3496 && (use
= find_use_as_address (PATTERN (insn
), addr
, offset
),
3497 use
!= 0 && use
!= (rtx
) 1))
3499 rtx q
= SET_DEST (set
);
3500 enum rtx_code inc_code
= (INTVAL (XEXP (y
, 1)) == size
3501 ? (offset
? PRE_INC
: POST_INC
)
3502 : (offset
? PRE_DEC
: POST_DEC
));
3504 if (dead_or_set_p (incr
, addr
))
3506 /* This is the simple case. Try to make the auto-inc. If
3507 we can't, we are done. Otherwise, we will do any
3508 needed updates below. */
3509 if (! validate_change (insn
, &XEXP (x
, 0),
3510 gen_rtx_fmt_e (inc_code
, Pmode
, addr
),
3514 else if (GET_CODE (q
) == REG
3515 /* PREV_INSN used here to check the semi-open interval
3517 && ! reg_used_between_p (q
, PREV_INSN (insn
), incr
)
3518 /* We must also check for sets of q as q may be
3519 a call clobbered hard register and there may
3520 be a call between PREV_INSN (insn) and incr. */
3521 && ! reg_set_between_p (q
, PREV_INSN (insn
), incr
))
3523 /* We have *p followed sometime later by q = p+size.
3524 Both p and q must be live afterward,
3525 and q is not used between INSN and its assignment.
3526 Change it to q = p, ...*q..., q = q+size.
3527 Then fall into the usual case. */
3532 emit_move_insn (q
, addr
);
3533 insns
= get_insns ();
3536 bb
= BLOCK_FOR_INSN (insn
);
3537 for (temp
= insns
; temp
; temp
= NEXT_INSN (temp
))
3538 set_block_for_insn (temp
, bb
);
3540 /* If we can't make the auto-inc, or can't make the
3541 replacement into Y, exit. There's no point in making
3542 the change below if we can't do the auto-inc and doing
3543 so is not correct in the pre-inc case. */
3545 validate_change (insn
, &XEXP (x
, 0),
3546 gen_rtx_fmt_e (inc_code
, Pmode
, q
),
3548 validate_change (incr
, &XEXP (y
, 0), q
, 1);
3549 if (! apply_change_group ())
3552 /* We now know we'll be doing this change, so emit the
3553 new insn(s) and do the updates. */
3554 emit_insns_before (insns
, insn
);
3556 if (BLOCK_FOR_INSN (insn
)->head
== insn
)
3557 BLOCK_FOR_INSN (insn
)->head
= insns
;
3559 /* INCR will become a NOTE and INSN won't contain a
3560 use of ADDR. If a use of ADDR was just placed in
3561 the insn before INSN, make that the next use.
3562 Otherwise, invalidate it. */
3563 if (GET_CODE (PREV_INSN (insn
)) == INSN
3564 && GET_CODE (PATTERN (PREV_INSN (insn
))) == SET
3565 && SET_SRC (PATTERN (PREV_INSN (insn
))) == addr
)
3566 reg_next_use
[regno
] = PREV_INSN (insn
);
3568 reg_next_use
[regno
] = 0;
3573 /* REGNO is now used in INCR which is below INSN, but
3574 it previously wasn't live here. If we don't mark
3575 it as needed, we'll put a REG_DEAD note for it
3576 on this insn, which is incorrect. */
3577 SET_REGNO_REG_SET (needed
, regno
);
3579 /* If there are any calls between INSN and INCR, show
3580 that REGNO now crosses them. */
3581 for (temp
= insn
; temp
!= incr
; temp
= NEXT_INSN (temp
))
3582 if (GET_CODE (temp
) == CALL_INSN
)
3583 REG_N_CALLS_CROSSED (regno
)++;
3588 /* If we haven't returned, it means we were able to make the
3589 auto-inc, so update the status. First, record that this insn
3590 has an implicit side effect. */
3593 = gen_rtx_EXPR_LIST (REG_INC
, addr
, REG_NOTES (insn
));
3595 /* Modify the old increment-insn to simply copy
3596 the already-incremented value of our register. */
3597 if (! validate_change (incr
, &SET_SRC (set
), addr
, 0))
3600 /* If that makes it a no-op (copying the register into itself) delete
3601 it so it won't appear to be a "use" and a "set" of this
3603 if (SET_DEST (set
) == addr
)
3605 PUT_CODE (incr
, NOTE
);
3606 NOTE_LINE_NUMBER (incr
) = NOTE_INSN_DELETED
;
3607 NOTE_SOURCE_FILE (incr
) = 0;
3610 if (regno
>= FIRST_PSEUDO_REGISTER
)
3612 /* Count an extra reference to the reg. When a reg is
3613 incremented, spilling it is worse, so we want to make
3614 that less likely. */
3615 REG_N_REFS (regno
) += loop_depth
;
3617 /* Count the increment as a setting of the register,
3618 even though it isn't a SET in rtl. */
3619 REG_N_SETS (regno
)++;
3624 #endif /* AUTO_INC_DEC */
3626 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
3627 This is done assuming the registers needed from X
3628 are those that have 1-bits in NEEDED.
3630 On the final pass, FINAL is 1. This means try for autoincrement
3631 and count the uses and deaths of each pseudo-reg.
3633 INSN is the containing instruction. If INSN is dead, this function is not
3637 mark_used_regs (needed
, live
, x
, final
, insn
)
3644 register RTX_CODE code
;
3649 code
= GET_CODE (x
);
3669 /* If we are clobbering a MEM, mark any registers inside the address
3671 if (GET_CODE (XEXP (x
, 0)) == MEM
)
3672 mark_used_regs (needed
, live
, XEXP (XEXP (x
, 0), 0), final
, insn
);
3676 /* Invalidate the data for the last MEM stored, but only if MEM is
3677 something that can be stored into. */
3678 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
3679 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
3680 ; /* needn't clear the memory set list */
3683 rtx temp
= mem_set_list
;
3684 rtx prev
= NULL_RTX
;
3688 if (anti_dependence (XEXP (temp
, 0), x
))
3690 /* Splice temp out of the list. */
3692 XEXP (prev
, 1) = XEXP (temp
, 1);
3694 mem_set_list
= XEXP (temp
, 1);
3698 temp
= XEXP (temp
, 1);
3702 /* If the memory reference had embedded side effects (autoincrement
3703 address modes. Then we may need to kill some entries on the
3706 invalidate_mems_from_autoinc (insn
);
3710 find_auto_inc (needed
, x
, insn
);
3715 if (GET_CODE (SUBREG_REG (x
)) == REG
3716 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
3717 && (GET_MODE_SIZE (GET_MODE (x
))
3718 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))))
3719 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x
))) = 1;
3721 /* While we're here, optimize this case. */
3724 /* In case the SUBREG is not of a register, don't optimize */
3725 if (GET_CODE (x
) != REG
)
3727 mark_used_regs (needed
, live
, x
, final
, insn
);
3731 /* ... fall through ... */
3734 /* See a register other than being set
3735 => mark it as needed. */
3739 int some_needed
= REGNO_REG_SET_P (needed
, regno
);
3740 int some_not_needed
= ! some_needed
;
3742 SET_REGNO_REG_SET (live
, regno
);
3744 /* A hard reg in a wide mode may really be multiple registers.
3745 If so, mark all of them just like the first. */
3746 if (regno
< FIRST_PSEUDO_REGISTER
)
3750 /* For stack ptr or fixed arg pointer,
3751 nothing below can be necessary, so waste no more time. */
3752 if (regno
== STACK_POINTER_REGNUM
3753 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3754 || (regno
== HARD_FRAME_POINTER_REGNUM
3755 && (! reload_completed
|| frame_pointer_needed
))
3757 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3758 || (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
3760 || (regno
== FRAME_POINTER_REGNUM
3761 && (! reload_completed
|| frame_pointer_needed
)))
3763 /* If this is a register we are going to try to eliminate,
3764 don't mark it live here. If we are successful in
3765 eliminating it, it need not be live unless it is used for
3766 pseudos, in which case it will have been set live when
3767 it was allocated to the pseudos. If the register will not
3768 be eliminated, reload will set it live at that point. */
3770 if (! TEST_HARD_REG_BIT (elim_reg_set
, regno
))
3771 regs_ever_live
[regno
] = 1;
3774 /* No death notes for global register variables;
3775 their values are live after this function exits. */
3776 if (global_regs
[regno
])
3779 reg_next_use
[regno
] = insn
;
3783 n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3786 int regno_n
= regno
+ n
;
3787 int needed_regno
= REGNO_REG_SET_P (needed
, regno_n
);
3789 SET_REGNO_REG_SET (live
, regno_n
);
3790 some_needed
|= needed_regno
;
3791 some_not_needed
|= ! needed_regno
;
3796 /* Record where each reg is used, so when the reg
3797 is set we know the next insn that uses it. */
3799 reg_next_use
[regno
] = insn
;
3801 if (regno
< FIRST_PSEUDO_REGISTER
)
3803 /* If a hard reg is being used,
3804 record that this function does use it. */
3806 i
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3810 regs_ever_live
[regno
+ --i
] = 1;
3815 /* Keep track of which basic block each reg appears in. */
3817 register int blocknum
= BLOCK_NUM (insn
);
3819 if (REG_BASIC_BLOCK (regno
) == REG_BLOCK_UNKNOWN
)
3820 REG_BASIC_BLOCK (regno
) = blocknum
;
3821 else if (REG_BASIC_BLOCK (regno
) != blocknum
)
3822 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
3824 /* Count (weighted) number of uses of each reg. */
3826 REG_N_REFS (regno
) += loop_depth
;
3829 /* Record and count the insns in which a reg dies.
3830 If it is used in this insn and was dead below the insn
3831 then it dies in this insn. If it was set in this insn,
3832 we do not make a REG_DEAD note; likewise if we already
3833 made such a note. */
3836 && ! dead_or_set_p (insn
, x
)
3838 && (regno
>= FIRST_PSEUDO_REGISTER
|| ! fixed_regs
[regno
])
3842 /* Check for the case where the register dying partially
3843 overlaps the register set by this insn. */
3844 if (regno
< FIRST_PSEUDO_REGISTER
3845 && HARD_REGNO_NREGS (regno
, GET_MODE (x
)) > 1)
3847 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3849 some_needed
|= dead_or_set_regno_p (insn
, regno
+ n
);
3852 /* If none of the words in X is needed, make a REG_DEAD
3853 note. Otherwise, we must make partial REG_DEAD notes. */
3857 = gen_rtx_EXPR_LIST (REG_DEAD
, x
, REG_NOTES (insn
));
3858 REG_N_DEATHS (regno
)++;
3864 /* Don't make a REG_DEAD note for a part of a register
3865 that is set in the insn. */
3867 for (i
= HARD_REGNO_NREGS (regno
, GET_MODE (x
)) - 1;
3869 if (!REGNO_REG_SET_P (needed
, regno
+ i
)
3870 && ! dead_or_set_regno_p (insn
, regno
+ i
))
3872 = gen_rtx_EXPR_LIST (REG_DEAD
,
3873 gen_rtx_REG (reg_raw_mode
[regno
+ i
],
3884 register rtx testreg
= SET_DEST (x
);
3887 /* If storing into MEM, don't show it as being used. But do
3888 show the address as being used. */
3889 if (GET_CODE (testreg
) == MEM
)
3893 find_auto_inc (needed
, testreg
, insn
);
3895 mark_used_regs (needed
, live
, XEXP (testreg
, 0), final
, insn
);
3896 mark_used_regs (needed
, live
, SET_SRC (x
), final
, insn
);
3900 /* Storing in STRICT_LOW_PART is like storing in a reg
3901 in that this SET might be dead, so ignore it in TESTREG.
3902 but in some other ways it is like using the reg.
3904 Storing in a SUBREG or a bit field is like storing the entire
3905 register in that if the register's value is not used
3906 then this SET is not needed. */
3907 while (GET_CODE (testreg
) == STRICT_LOW_PART
3908 || GET_CODE (testreg
) == ZERO_EXTRACT
3909 || GET_CODE (testreg
) == SIGN_EXTRACT
3910 || GET_CODE (testreg
) == SUBREG
)
3912 if (GET_CODE (testreg
) == SUBREG
3913 && GET_CODE (SUBREG_REG (testreg
)) == REG
3914 && REGNO (SUBREG_REG (testreg
)) >= FIRST_PSEUDO_REGISTER
3915 && (GET_MODE_SIZE (GET_MODE (testreg
))
3916 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg
)))))
3917 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg
))) = 1;
3919 /* Modifying a single register in an alternate mode
3920 does not use any of the old value. But these other
3921 ways of storing in a register do use the old value. */
3922 if (GET_CODE (testreg
) == SUBREG
3923 && !(REG_SIZE (SUBREG_REG (testreg
)) > REG_SIZE (testreg
)))
3928 testreg
= XEXP (testreg
, 0);
3931 /* If this is a store into a register,
3932 recursively scan the value being stored. */
3934 if ((GET_CODE (testreg
) == PARALLEL
3935 && GET_MODE (testreg
) == BLKmode
)
3936 || (GET_CODE (testreg
) == REG
3937 && (regno
= REGNO (testreg
), ! (regno
== FRAME_POINTER_REGNUM
3938 && (! reload_completed
|| frame_pointer_needed
)))
3939 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3940 && ! (regno
== HARD_FRAME_POINTER_REGNUM
3941 && (! reload_completed
|| frame_pointer_needed
))
3943 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3944 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
3947 /* We used to exclude global_regs here, but that seems wrong.
3948 Storing in them is like storing in mem. */
3950 mark_used_regs (needed
, live
, SET_SRC (x
), final
, insn
);
3952 mark_used_regs (needed
, live
, SET_DEST (x
), final
, insn
);
3959 /* If exiting needs the right stack value, consider this insn as
3960 using the stack pointer. In any event, consider it as using
3961 all global registers and all registers used by return. */
3962 if (! EXIT_IGNORE_STACK
3963 || (! FRAME_POINTER_REQUIRED
3964 && ! current_function_calls_alloca
3965 && flag_omit_frame_pointer
)
3966 || current_function_sp_is_unchanging
)
3967 SET_REGNO_REG_SET (live
, STACK_POINTER_REGNUM
);
3969 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3971 #ifdef EPILOGUE_USES
3972 || EPILOGUE_USES (i
)
3975 SET_REGNO_REG_SET (live
, i
);
3979 case UNSPEC_VOLATILE
:
3983 /* Traditional and volatile asm instructions must be considered to use
3984 and clobber all hard registers, all pseudo-registers and all of
3985 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
3987 Consider for instance a volatile asm that changes the fpu rounding
3988 mode. An insn should not be moved across this even if it only uses
3989 pseudo-regs because it might give an incorrectly rounded result.
3991 ?!? Unfortunately, marking all hard registers as live causes massive
3992 problems for the register allocator and marking all pseudos as live
3993 creates mountains of uninitialized variable warnings.
3995 So for now, just clear the memory set list and mark any regs
3996 we can find in ASM_OPERANDS as used. */
3997 if (code
!= ASM_OPERANDS
|| MEM_VOLATILE_P (x
))
3998 mem_set_list
= NULL_RTX
;
4000 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4001 We can not just fall through here since then we would be confused
4002 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4003 traditional asms unlike their normal usage. */
4004 if (code
== ASM_OPERANDS
)
4008 for (j
= 0; j
< ASM_OPERANDS_INPUT_LENGTH (x
); j
++)
4009 mark_used_regs (needed
, live
, ASM_OPERANDS_INPUT (x
, j
),
4020 /* Recursively scan the operands of this expression. */
4023 register const char *fmt
= GET_RTX_FORMAT (code
);
4026 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4030 /* Tail recursive case: save a function call level. */
4036 mark_used_regs (needed
, live
, XEXP (x
, i
), final
, insn
);
4038 else if (fmt
[i
] == 'E')
4041 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4042 mark_used_regs (needed
, live
, XVECEXP (x
, i
, j
), final
, insn
);
4051 try_pre_increment_1 (insn
)
4054 /* Find the next use of this reg. If in same basic block,
4055 make it do pre-increment or pre-decrement if appropriate. */
4056 rtx x
= single_set (insn
);
4057 HOST_WIDE_INT amount
= ((GET_CODE (SET_SRC (x
)) == PLUS
? 1 : -1)
4058 * INTVAL (XEXP (SET_SRC (x
), 1)));
4059 int regno
= REGNO (SET_DEST (x
));
4060 rtx y
= reg_next_use
[regno
];
4062 && BLOCK_NUM (y
) == BLOCK_NUM (insn
)
4063 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4064 mode would be better. */
4065 && ! dead_or_set_p (y
, SET_DEST (x
))
4066 && try_pre_increment (y
, SET_DEST (x
), amount
))
4068 /* We have found a suitable auto-increment
4069 and already changed insn Y to do it.
4070 So flush this increment-instruction. */
4071 PUT_CODE (insn
, NOTE
);
4072 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
4073 NOTE_SOURCE_FILE (insn
) = 0;
4074 /* Count a reference to this reg for the increment
4075 insn we are deleting. When a reg is incremented.
4076 spilling it is worse, so we want to make that
4078 if (regno
>= FIRST_PSEUDO_REGISTER
)
4080 REG_N_REFS (regno
) += loop_depth
;
4081 REG_N_SETS (regno
)++;
4088 /* Try to change INSN so that it does pre-increment or pre-decrement
4089 addressing on register REG in order to add AMOUNT to REG.
4090 AMOUNT is negative for pre-decrement.
4091 Returns 1 if the change could be made.
4092 This checks all about the validity of the result of modifying INSN. */
4095 try_pre_increment (insn
, reg
, amount
)
4097 HOST_WIDE_INT amount
;
4101 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4102 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4104 /* Nonzero if we can try to make a post-increment or post-decrement.
4105 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4106 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4107 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4110 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4113 /* From the sign of increment, see which possibilities are conceivable
4114 on this target machine. */
4115 if (HAVE_PRE_INCREMENT
&& amount
> 0)
4117 if (HAVE_POST_INCREMENT
&& amount
> 0)
4120 if (HAVE_PRE_DECREMENT
&& amount
< 0)
4122 if (HAVE_POST_DECREMENT
&& amount
< 0)
4125 if (! (pre_ok
|| post_ok
))
4128 /* It is not safe to add a side effect to a jump insn
4129 because if the incremented register is spilled and must be reloaded
4130 there would be no way to store the incremented value back in memory. */
4132 if (GET_CODE (insn
) == JUMP_INSN
)
4137 use
= find_use_as_address (PATTERN (insn
), reg
, 0);
4138 if (post_ok
&& (use
== 0 || use
== (rtx
) 1))
4140 use
= find_use_as_address (PATTERN (insn
), reg
, -amount
);
4144 if (use
== 0 || use
== (rtx
) 1)
4147 if (GET_MODE_SIZE (GET_MODE (use
)) != (amount
> 0 ? amount
: - amount
))
4150 /* See if this combination of instruction and addressing mode exists. */
4151 if (! validate_change (insn
, &XEXP (use
, 0),
4152 gen_rtx_fmt_e (amount
> 0
4153 ? (do_post
? POST_INC
: PRE_INC
)
4154 : (do_post
? POST_DEC
: PRE_DEC
),
4158 /* Record that this insn now has an implicit side effect on X. */
4159 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_INC
, reg
, REG_NOTES (insn
));
4163 #endif /* AUTO_INC_DEC */
4165 /* Find the place in the rtx X where REG is used as a memory address.
4166 Return the MEM rtx that so uses it.
4167 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4168 (plus REG (const_int PLUSCONST)).
4170 If such an address does not appear, return 0.
4171 If REG appears more than once, or is used other than in such an address,
4175 find_use_as_address (x
, reg
, plusconst
)
4178 HOST_WIDE_INT plusconst
;
4180 enum rtx_code code
= GET_CODE (x
);
4181 const char *fmt
= GET_RTX_FORMAT (code
);
4183 register rtx value
= 0;
4186 if (code
== MEM
&& XEXP (x
, 0) == reg
&& plusconst
== 0)
4189 if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == PLUS
4190 && XEXP (XEXP (x
, 0), 0) == reg
4191 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4192 && INTVAL (XEXP (XEXP (x
, 0), 1)) == plusconst
)
4195 if (code
== SIGN_EXTRACT
|| code
== ZERO_EXTRACT
)
4197 /* If REG occurs inside a MEM used in a bit-field reference,
4198 that is unacceptable. */
4199 if (find_use_as_address (XEXP (x
, 0), reg
, 0) != 0)
4200 return (rtx
) (HOST_WIDE_INT
) 1;
4204 return (rtx
) (HOST_WIDE_INT
) 1;
4206 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4210 tem
= find_use_as_address (XEXP (x
, i
), reg
, plusconst
);
4214 return (rtx
) (HOST_WIDE_INT
) 1;
4219 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4221 tem
= find_use_as_address (XVECEXP (x
, i
, j
), reg
, plusconst
);
4225 return (rtx
) (HOST_WIDE_INT
) 1;
4233 /* Write information about registers and basic blocks into FILE.
4234 This is part of making a debugging dump. */
4237 dump_flow_info (file
)
4241 static const char * const reg_class_names
[] = REG_CLASS_NAMES
;
4243 fprintf (file
, "%d registers.\n", max_regno
);
4244 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
4247 enum reg_class
class, altclass
;
4248 fprintf (file
, "\nRegister %d used %d times across %d insns",
4249 i
, REG_N_REFS (i
), REG_LIVE_LENGTH (i
));
4250 if (REG_BASIC_BLOCK (i
) >= 0)
4251 fprintf (file
, " in block %d", REG_BASIC_BLOCK (i
));
4253 fprintf (file
, "; set %d time%s", REG_N_SETS (i
),
4254 (REG_N_SETS (i
) == 1) ? "" : "s");
4255 if (REG_USERVAR_P (regno_reg_rtx
[i
]))
4256 fprintf (file
, "; user var");
4257 if (REG_N_DEATHS (i
) != 1)
4258 fprintf (file
, "; dies in %d places", REG_N_DEATHS (i
));
4259 if (REG_N_CALLS_CROSSED (i
) == 1)
4260 fprintf (file
, "; crosses 1 call");
4261 else if (REG_N_CALLS_CROSSED (i
))
4262 fprintf (file
, "; crosses %d calls", REG_N_CALLS_CROSSED (i
));
4263 if (PSEUDO_REGNO_BYTES (i
) != UNITS_PER_WORD
)
4264 fprintf (file
, "; %d bytes", PSEUDO_REGNO_BYTES (i
));
4265 class = reg_preferred_class (i
);
4266 altclass
= reg_alternate_class (i
);
4267 if (class != GENERAL_REGS
|| altclass
!= ALL_REGS
)
4269 if (altclass
== ALL_REGS
|| class == ALL_REGS
)
4270 fprintf (file
, "; pref %s", reg_class_names
[(int) class]);
4271 else if (altclass
== NO_REGS
)
4272 fprintf (file
, "; %s or none", reg_class_names
[(int) class]);
4274 fprintf (file
, "; pref %s, else %s",
4275 reg_class_names
[(int) class],
4276 reg_class_names
[(int) altclass
]);
4278 if (REGNO_POINTER_FLAG (i
))
4279 fprintf (file
, "; pointer");
4280 fprintf (file
, ".\n");
4283 fprintf (file
, "\n%d basic blocks.\n", n_basic_blocks
);
4284 for (i
= 0; i
< n_basic_blocks
; i
++)
4286 register basic_block bb
= BASIC_BLOCK (i
);
4290 fprintf (file
, "\nBasic block %d: first insn %d, last %d.\n",
4291 i
, INSN_UID (bb
->head
), INSN_UID (bb
->end
));
4293 fprintf (file
, "Predecessors: ");
4294 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
4295 dump_edge_info (file
, e
, 0);
4297 fprintf (file
, "\nSuccessors: ");
4298 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
4299 dump_edge_info (file
, e
, 1);
4301 fprintf (file
, "\nRegisters live at start:");
4302 if (bb
->global_live_at_start
)
4304 for (regno
= 0; regno
< max_regno
; regno
++)
4305 if (REGNO_REG_SET_P (bb
->global_live_at_start
, regno
))
4306 fprintf (file
, " %d", regno
);
4309 fprintf (file
, " n/a");
4311 fprintf (file
, "\nRegisters live at end:");
4312 if (bb
->global_live_at_end
)
4314 for (regno
= 0; regno
< max_regno
; regno
++)
4315 if (REGNO_REG_SET_P (bb
->global_live_at_end
, regno
))
4316 fprintf (file
, " %d", regno
);
4319 fprintf (file
, " n/a");
4328 dump_edge_info (file
, e
, do_succ
)
4333 basic_block side
= (do_succ
? e
->dest
: e
->src
);
4335 if (side
== ENTRY_BLOCK_PTR
)
4336 fputs (" ENTRY", file
);
4337 else if (side
== EXIT_BLOCK_PTR
)
4338 fputs (" EXIT", file
);
4340 fprintf (file
, " %d", side
->index
);
4344 static const char * const bitnames
[] = {
4345 "fallthru", "crit", "ab", "abcall", "eh", "fake"
4348 int i
, flags
= e
->flags
;
4352 for (i
= 0; flags
; i
++)
4353 if (flags
& (1 << i
))
4359 if (i
< (int)(sizeof (bitnames
) / sizeof (*bitnames
)))
4360 fputs (bitnames
[i
], file
);
4362 fprintf (file
, "%d", i
);
4370 /* Like print_rtl, but also print out live information for the start of each
4374 print_rtl_with_bb (outf
, rtx_first
)
4378 register rtx tmp_rtx
;
4381 fprintf (outf
, "(nil)\n");
4385 enum bb_state
{ NOT_IN_BB
, IN_ONE_BB
, IN_MULTIPLE_BB
};
4386 int max_uid
= get_max_uid ();
4387 basic_block
*start
= (basic_block
*)
4388 alloca (max_uid
* sizeof (basic_block
));
4389 basic_block
*end
= (basic_block
*)
4390 alloca (max_uid
* sizeof (basic_block
));
4391 enum bb_state
*in_bb_p
= (enum bb_state
*)
4392 alloca (max_uid
* sizeof (enum bb_state
));
4394 memset (start
, 0, max_uid
* sizeof (basic_block
));
4395 memset (end
, 0, max_uid
* sizeof (basic_block
));
4396 memset (in_bb_p
, 0, max_uid
* sizeof (enum bb_state
));
4398 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
4400 basic_block bb
= BASIC_BLOCK (i
);
4403 start
[INSN_UID (bb
->head
)] = bb
;
4404 end
[INSN_UID (bb
->end
)] = bb
;
4405 for (x
= bb
->head
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
4407 enum bb_state state
= IN_MULTIPLE_BB
;
4408 if (in_bb_p
[INSN_UID(x
)] == NOT_IN_BB
)
4410 in_bb_p
[INSN_UID(x
)] = state
;
4417 for (tmp_rtx
= rtx_first
; NULL
!= tmp_rtx
; tmp_rtx
= NEXT_INSN (tmp_rtx
))
4422 if ((bb
= start
[INSN_UID (tmp_rtx
)]) != NULL
)
4424 fprintf (outf
, ";; Start of basic block %d, registers live:",
4427 EXECUTE_IF_SET_IN_REG_SET (bb
->global_live_at_start
, 0, i
,
4429 fprintf (outf
, " %d", i
);
4430 if (i
< FIRST_PSEUDO_REGISTER
)
4431 fprintf (outf
, " [%s]",
4437 if (in_bb_p
[INSN_UID(tmp_rtx
)] == NOT_IN_BB
4438 && GET_CODE (tmp_rtx
) != NOTE
4439 && GET_CODE (tmp_rtx
) != BARRIER
4441 fprintf (outf
, ";; Insn is not within a basic block\n");
4442 else if (in_bb_p
[INSN_UID(tmp_rtx
)] == IN_MULTIPLE_BB
)
4443 fprintf (outf
, ";; Insn is in multiple basic blocks\n");
4445 did_output
= print_rtl_single (outf
, tmp_rtx
);
4447 if ((bb
= end
[INSN_UID (tmp_rtx
)]) != NULL
)
4448 fprintf (outf
, ";; End of basic block %d\n", bb
->index
);
4457 /* Integer list support. */
4459 /* Allocate a node from list *HEAD_PTR. */
4462 alloc_int_list_node (head_ptr
)
4463 int_list_block
**head_ptr
;
4465 struct int_list_block
*first_blk
= *head_ptr
;
4467 if (first_blk
== NULL
|| first_blk
->nodes_left
<= 0)
4469 first_blk
= (struct int_list_block
*) xmalloc (sizeof (struct int_list_block
));
4470 first_blk
->nodes_left
= INT_LIST_NODES_IN_BLK
;
4471 first_blk
->next
= *head_ptr
;
4472 *head_ptr
= first_blk
;
4475 first_blk
->nodes_left
--;
4476 return &first_blk
->nodes
[first_blk
->nodes_left
];
4479 /* Pointer to head of predecessor/successor block list. */
4480 static int_list_block
*pred_int_list_blocks
;
4482 /* Add a new node to integer list LIST with value VAL.
4483 LIST is a pointer to a list object to allow for different implementations.
4484 If *LIST is initially NULL, the list is empty.
4485 The caller must not care whether the element is added to the front or
4486 to the end of the list (to allow for different implementations). */
4489 add_int_list_node (blk_list
, list
, val
)
4490 int_list_block
**blk_list
;
4494 int_list_ptr p
= alloc_int_list_node (blk_list
);
4502 /* Free the blocks of lists at BLK_LIST. */
4505 free_int_list (blk_list
)
4506 int_list_block
**blk_list
;
4508 int_list_block
*p
, *next
;
4510 for (p
= *blk_list
; p
!= NULL
; p
= next
)
4516 /* Mark list as empty for the next function we compile. */
4520 /* Predecessor/successor computation. */
4522 /* Mark PRED_BB a precessor of SUCC_BB,
4523 and conversely SUCC_BB a successor of PRED_BB. */
4526 add_pred_succ (pred_bb
, succ_bb
, s_preds
, s_succs
, num_preds
, num_succs
)
4529 int_list_ptr
*s_preds
;
4530 int_list_ptr
*s_succs
;
4534 if (succ_bb
!= EXIT_BLOCK
)
4536 add_int_list_node (&pred_int_list_blocks
, &s_preds
[succ_bb
], pred_bb
);
4537 num_preds
[succ_bb
]++;
4539 if (pred_bb
!= ENTRY_BLOCK
)
4541 add_int_list_node (&pred_int_list_blocks
, &s_succs
[pred_bb
], succ_bb
);
4542 num_succs
[pred_bb
]++;
4546 /* Convert edge lists into pred/succ lists for backward compatibility. */
4549 compute_preds_succs (s_preds
, s_succs
, num_preds
, num_succs
)
4550 int_list_ptr
*s_preds
;
4551 int_list_ptr
*s_succs
;
4555 int i
, n
= n_basic_blocks
;
4558 memset (s_preds
, 0, n_basic_blocks
* sizeof (int_list_ptr
));
4559 memset (s_succs
, 0, n_basic_blocks
* sizeof (int_list_ptr
));
4560 memset (num_preds
, 0, n_basic_blocks
* sizeof (int));
4561 memset (num_succs
, 0, n_basic_blocks
* sizeof (int));
4563 for (i
= 0; i
< n
; ++i
)
4565 basic_block bb
= BASIC_BLOCK (i
);
4567 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
4568 add_pred_succ (i
, e
->dest
->index
, s_preds
, s_succs
,
4569 num_preds
, num_succs
);
4572 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
4573 add_pred_succ (ENTRY_BLOCK
, e
->dest
->index
, s_preds
, s_succs
,
4574 num_preds
, num_succs
);
4578 dump_bb_data (file
, preds
, succs
, live_info
)
4580 int_list_ptr
*preds
;
4581 int_list_ptr
*succs
;
4587 fprintf (file
, "BB data\n\n");
4588 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
4590 fprintf (file
, "BB %d, start %d, end %d\n", bb
,
4591 INSN_UID (BLOCK_HEAD (bb
)), INSN_UID (BLOCK_END (bb
)));
4592 fprintf (file
, " preds:");
4593 for (p
= preds
[bb
]; p
!= NULL
; p
= p
->next
)
4595 int pred_bb
= INT_LIST_VAL (p
);
4596 if (pred_bb
== ENTRY_BLOCK
)
4597 fprintf (file
, " entry");
4599 fprintf (file
, " %d", pred_bb
);
4601 fprintf (file
, "\n");
4602 fprintf (file
, " succs:");
4603 for (p
= succs
[bb
]; p
!= NULL
; p
= p
->next
)
4605 int succ_bb
= INT_LIST_VAL (p
);
4606 if (succ_bb
== EXIT_BLOCK
)
4607 fprintf (file
, " exit");
4609 fprintf (file
, " %d", succ_bb
);
4614 fprintf (file
, "\nRegisters live at start:");
4615 for (regno
= 0; regno
< max_regno
; regno
++)
4616 if (REGNO_REG_SET_P (BASIC_BLOCK (bb
)->global_live_at_start
, regno
))
4617 fprintf (file
, " %d", regno
);
4618 fprintf (file
, "\n");
4620 fprintf (file
, "\n");
4622 fprintf (file
, "\n");
4625 /* Free basic block data storage. */
4630 free_int_list (&pred_int_list_blocks
);
4633 /* Compute dominator relationships. */
4635 compute_dominators (dominators
, post_dominators
, s_preds
, s_succs
)
4636 sbitmap
*dominators
;
4637 sbitmap
*post_dominators
;
4638 int_list_ptr
*s_preds
;
4639 int_list_ptr
*s_succs
;
4641 int bb
, changed
, passes
;
4642 sbitmap
*temp_bitmap
;
4644 temp_bitmap
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
4645 sbitmap_vector_ones (dominators
, n_basic_blocks
);
4646 sbitmap_vector_ones (post_dominators
, n_basic_blocks
);
4647 sbitmap_vector_zero (temp_bitmap
, n_basic_blocks
);
4649 sbitmap_zero (dominators
[0]);
4650 SET_BIT (dominators
[0], 0);
4652 sbitmap_zero (post_dominators
[n_basic_blocks
- 1]);
4653 SET_BIT (post_dominators
[n_basic_blocks
- 1], 0);
4660 for (bb
= 1; bb
< n_basic_blocks
; bb
++)
4662 sbitmap_intersect_of_predecessors (temp_bitmap
[bb
], dominators
,
4664 SET_BIT (temp_bitmap
[bb
], bb
);
4665 changed
|= sbitmap_a_and_b (dominators
[bb
],
4668 sbitmap_intersect_of_successors (temp_bitmap
[bb
], post_dominators
,
4670 SET_BIT (temp_bitmap
[bb
], bb
);
4671 changed
|= sbitmap_a_and_b (post_dominators
[bb
],
4672 post_dominators
[bb
],
4681 /* Compute dominator relationships using new flow graph structures. */
4683 compute_flow_dominators (dominators
, post_dominators
)
4684 sbitmap
*dominators
;
4685 sbitmap
*post_dominators
;
4687 int bb
, changed
, passes
;
4688 sbitmap
*temp_bitmap
;
4690 temp_bitmap
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
4691 sbitmap_vector_ones (dominators
, n_basic_blocks
);
4692 sbitmap_vector_ones (post_dominators
, n_basic_blocks
);
4693 sbitmap_vector_zero (temp_bitmap
, n_basic_blocks
);
4695 sbitmap_zero (dominators
[0]);
4696 SET_BIT (dominators
[0], 0);
4698 sbitmap_zero (post_dominators
[n_basic_blocks
- 1]);
4699 SET_BIT (post_dominators
[n_basic_blocks
- 1], 0);
4706 for (bb
= 1; bb
< n_basic_blocks
; bb
++)
4708 sbitmap_intersection_of_preds (temp_bitmap
[bb
], dominators
, bb
);
4709 SET_BIT (temp_bitmap
[bb
], bb
);
4710 changed
|= sbitmap_a_and_b (dominators
[bb
],
4713 sbitmap_intersection_of_succs (temp_bitmap
[bb
], post_dominators
, bb
);
4714 SET_BIT (temp_bitmap
[bb
], bb
);
4715 changed
|= sbitmap_a_and_b (post_dominators
[bb
],
4716 post_dominators
[bb
],
4725 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
4728 compute_immediate_dominators (idom
, dominators
)
4730 sbitmap
*dominators
;
4735 tmp
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
4737 /* Begin with tmp(n) = dom(n) - { n }. */
4738 for (b
= n_basic_blocks
; --b
>= 0; )
4740 sbitmap_copy (tmp
[b
], dominators
[b
]);
4741 RESET_BIT (tmp
[b
], b
);
4744 /* Subtract out all of our dominator's dominators. */
4745 for (b
= n_basic_blocks
; --b
>= 0; )
4747 sbitmap tmp_b
= tmp
[b
];
4750 for (s
= n_basic_blocks
; --s
>= 0; )
4751 if (TEST_BIT (tmp_b
, s
))
4752 sbitmap_difference (tmp_b
, tmp_b
, tmp
[s
]);
4755 /* Find the one bit set in the bitmap and put it in the output array. */
4756 for (b
= n_basic_blocks
; --b
>= 0; )
4759 EXECUTE_IF_SET_IN_SBITMAP (tmp
[b
], 0, t
, { idom
[b
] = t
; });
4762 sbitmap_vector_free (tmp
);
4765 /* Count for a single SET rtx, X. */
4768 count_reg_sets_1 (x
)
4772 register rtx reg
= SET_DEST (x
);
4774 /* Find the register that's set/clobbered. */
4775 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
4776 || GET_CODE (reg
) == SIGN_EXTRACT
4777 || GET_CODE (reg
) == STRICT_LOW_PART
)
4778 reg
= XEXP (reg
, 0);
4780 if (GET_CODE (reg
) == PARALLEL
4781 && GET_MODE (reg
) == BLKmode
)
4784 for (i
= XVECLEN (reg
, 0) - 1; i
>= 0; i
--)
4785 count_reg_sets_1 (XVECEXP (reg
, 0, i
));
4789 if (GET_CODE (reg
) == REG
)
4791 regno
= REGNO (reg
);
4792 if (regno
>= FIRST_PSEUDO_REGISTER
)
4794 /* Count (weighted) references, stores, etc. This counts a
4795 register twice if it is modified, but that is correct. */
4796 REG_N_SETS (regno
)++;
4798 REG_N_REFS (regno
) += loop_depth
;
4803 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4804 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
4810 register RTX_CODE code
= GET_CODE (x
);
4812 if (code
== SET
|| code
== CLOBBER
)
4813 count_reg_sets_1 (x
);
4814 else if (code
== PARALLEL
)
4817 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
4819 code
= GET_CODE (XVECEXP (x
, 0, i
));
4820 if (code
== SET
|| code
== CLOBBER
)
4821 count_reg_sets_1 (XVECEXP (x
, 0, i
));
4826 /* Increment REG_N_REFS by the current loop depth each register reference
4830 count_reg_references (x
)
4833 register RTX_CODE code
;
4836 code
= GET_CODE (x
);
4856 /* If we are clobbering a MEM, mark any registers inside the address
4858 if (GET_CODE (XEXP (x
, 0)) == MEM
)
4859 count_reg_references (XEXP (XEXP (x
, 0), 0));
4863 /* While we're here, optimize this case. */
4866 /* In case the SUBREG is not of a register, don't optimize */
4867 if (GET_CODE (x
) != REG
)
4869 count_reg_references (x
);
4873 /* ... fall through ... */
4876 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
4877 REG_N_REFS (REGNO (x
)) += loop_depth
;
4882 register rtx testreg
= SET_DEST (x
);
4885 /* If storing into MEM, don't show it as being used. But do
4886 show the address as being used. */
4887 if (GET_CODE (testreg
) == MEM
)
4889 count_reg_references (XEXP (testreg
, 0));
4890 count_reg_references (SET_SRC (x
));
4894 /* Storing in STRICT_LOW_PART is like storing in a reg
4895 in that this SET might be dead, so ignore it in TESTREG.
4896 but in some other ways it is like using the reg.
4898 Storing in a SUBREG or a bit field is like storing the entire
4899 register in that if the register's value is not used
4900 then this SET is not needed. */
4901 while (GET_CODE (testreg
) == STRICT_LOW_PART
4902 || GET_CODE (testreg
) == ZERO_EXTRACT
4903 || GET_CODE (testreg
) == SIGN_EXTRACT
4904 || GET_CODE (testreg
) == SUBREG
)
4906 /* Modifying a single register in an alternate mode
4907 does not use any of the old value. But these other
4908 ways of storing in a register do use the old value. */
4909 if (GET_CODE (testreg
) == SUBREG
4910 && !(REG_SIZE (SUBREG_REG (testreg
)) > REG_SIZE (testreg
)))
4915 testreg
= XEXP (testreg
, 0);
4918 /* If this is a store into a register,
4919 recursively scan the value being stored. */
4921 if ((GET_CODE (testreg
) == PARALLEL
4922 && GET_MODE (testreg
) == BLKmode
)
4923 || GET_CODE (testreg
) == REG
)
4925 count_reg_references (SET_SRC (x
));
4927 count_reg_references (SET_DEST (x
));
4937 /* Recursively scan the operands of this expression. */
4940 register const char *fmt
= GET_RTX_FORMAT (code
);
4943 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4947 /* Tail recursive case: save a function call level. */
4953 count_reg_references (XEXP (x
, i
));
4955 else if (fmt
[i
] == 'E')
4958 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4959 count_reg_references (XVECEXP (x
, i
, j
));
4965 /* Recompute register set/reference counts immediately prior to register
4968 This avoids problems with set/reference counts changing to/from values
4969 which have special meanings to the register allocators.
4971 Additionally, the reference counts are the primary component used by the
4972 register allocators to prioritize pseudos for allocation to hard regs.
4973 More accurate reference counts generally lead to better register allocation.
4975 F is the first insn to be scanned.
4976 LOOP_STEP denotes how much loop_depth should be incremented per
4977 loop nesting level in order to increase the ref count more for references
4980 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4981 possibly other information which is used by the register allocators. */
4984 recompute_reg_usage (f
, loop_step
)
4991 /* Clear out the old data. */
4992 max_reg
= max_reg_num ();
4993 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_reg
; i
++)
4999 /* Scan each insn in the chain and count how many times each register is
5002 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
5004 /* Keep track of loop depth. */
5005 if (GET_CODE (insn
) == NOTE
)
5007 /* Look for loop boundaries. */
5008 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
)
5009 loop_depth
-= loop_step
;
5010 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
5011 loop_depth
+= loop_step
;
5013 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
5014 Abort now rather than setting register status incorrectly. */
5015 if (loop_depth
== 0)
5018 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
5022 /* This call will increment REG_N_SETS for each SET or CLOBBER
5023 of a register in INSN. It will also increment REG_N_REFS
5024 by the loop depth for each set of a register in INSN. */
5025 count_reg_sets (PATTERN (insn
));
5027 /* count_reg_sets does not detect autoincrement address modes, so
5028 detect them here by looking at the notes attached to INSN. */
5029 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
5031 if (REG_NOTE_KIND (links
) == REG_INC
)
5032 /* Count (weighted) references, stores, etc. This counts a
5033 register twice if it is modified, but that is correct. */
5034 REG_N_SETS (REGNO (XEXP (links
, 0)))++;
5037 /* This call will increment REG_N_REFS by the current loop depth for
5038 each reference to a register in INSN. */
5039 count_reg_references (PATTERN (insn
));
5041 /* count_reg_references will not include counts for arguments to
5042 function calls, so detect them here by examining the
5043 CALL_INSN_FUNCTION_USAGE data. */
5044 if (GET_CODE (insn
) == CALL_INSN
)
5048 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
5050 note
= XEXP (note
, 1))
5051 if (GET_CODE (XEXP (note
, 0)) == USE
)
5052 count_reg_references (SET_DEST (XEXP (note
, 0)));
5058 /* Record INSN's block as BB. */
5061 set_block_for_insn (insn
, bb
)
5065 size_t uid
= INSN_UID (insn
);
5066 if (uid
>= basic_block_for_insn
->num_elements
)
5070 /* Add one-eighth the size so we don't keep calling xrealloc. */
5071 new_size
= uid
+ (uid
+ 7) / 8;
5073 VARRAY_GROW (basic_block_for_insn
, new_size
);
5075 VARRAY_BB (basic_block_for_insn
, uid
) = bb
;
5078 /* Record INSN's block number as BB. */
5079 /* ??? This has got to go. */
5082 set_block_num (insn
, bb
)
5086 set_block_for_insn (insn
, BASIC_BLOCK (bb
));
5089 /* Unlink a chain of insns between START and FINISH inclusive, leaving notes
5090 that must be paired, and return the new chain. */
5093 unlink_insn_chain (start
, finish
)
5096 rtx insert_point
= PREV_INSN (start
);
5097 rtx chain
= NULL_RTX
, curr
;
5099 /* Unchain the insns one by one. It would be quicker to delete all
5100 of these with a single unchaining, rather than one at a time, but
5101 we need to keep the NOTE's. */
5105 rtx next
= NEXT_INSN (start
);
5107 remove_insn (start
);
5109 /* ??? Despite the fact that we're patching out the insn, it's
5110 still referenced in LOG_LINKS. Rather than try and track
5111 them all down and remove them, just mark the insn deleted. */
5112 INSN_DELETED_P (start
) = 1;
5114 if (GET_CODE (start
) == NOTE
&& ! can_delete_note_p (start
))
5116 add_insn_after (start
, insert_point
);
5117 insert_point
= start
;
5123 NEXT_INSN (curr
) = start
;
5124 PREV_INSN (start
) = curr
;
5131 PREV_INSN (chain
) = NULL_RTX
;
5135 if (start
== finish
)
5140 if (chain
!= NULL_RTX
)
5141 NEXT_INSN (curr
) = NULL_RTX
;
5146 /* Subroutine of update_life_info. Determines whether multiple
5147 REG_NOTEs need to be distributed for the hard register mentioned in
5148 NOTE. This can happen if a reference to a hard register in the
5149 original insns was split into several smaller hard register
5150 references in the new insns. */
5153 split_hard_reg_notes (curr_insn
, note
, first
, last
)
5154 rtx curr_insn
, note
, first
, last
;
5156 rtx reg
, temp
, link
;
5158 int n_regs
, i
, new_reg
;
5160 reg
= XEXP (note
, 0);
5162 if (REG_NOTE_KIND (note
) != REG_DEAD
5163 || GET_CODE (reg
) != REG
5164 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
5165 || HARD_REGNO_NREGS (REGNO (reg
), GET_MODE (reg
)) == 1)
5167 XEXP (note
, 1) = REG_NOTES (curr_insn
);
5168 REG_NOTES (curr_insn
) = note
;
5172 n_regs
= HARD_REGNO_NREGS (REGNO (reg
), GET_MODE (reg
));
5174 for (i
= 0; i
< n_regs
; i
++)
5176 new_reg
= REGNO (reg
) + i
;
5178 /* Check for references to new_reg in the split insns. */
5179 for (insn
= last
; ; insn
= PREV_INSN (insn
))
5181 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
5182 && (temp
= regno_use_in (new_reg
, PATTERN (insn
))))
5184 /* Create a new reg dead note here. */
5185 link
= rtx_alloc (EXPR_LIST
);
5186 PUT_REG_NOTE_KIND (link
, REG_DEAD
);
5187 XEXP (link
, 0) = temp
;
5188 XEXP (link
, 1) = REG_NOTES (insn
);
5189 REG_NOTES (insn
) = link
;
5191 /* If killed multiple registers here, then add in the excess. */
5192 i
+= HARD_REGNO_NREGS (REGNO (temp
), GET_MODE (temp
)) - 1;
5196 /* It isn't mentioned anywhere, so no new reg note is needed for
5204 /* SET_INSN kills REG; add a REG_DEAD note mentioning REG to the last
5205 use of REG in the insns after SET_INSN and before or including
5208 A non-zero value is returned if we added a REG_DEAD note, or if we
5209 determined that a REG_DEAD note because of this particular SET
5210 wasn't necessary. */
5213 maybe_add_dead_note (reg
, set_insn
, last
)
5214 rtx reg
, set_insn
, last
;
5218 for (insn
= last
; insn
!= set_insn
; insn
= PREV_INSN (insn
))
5222 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
5223 && reg_overlap_mentioned_p (reg
, PATTERN (insn
))
5224 && (set
= single_set (insn
)))
5226 rtx insn_dest
= SET_DEST (set
);
5228 while (GET_CODE (insn_dest
) == ZERO_EXTRACT
5229 || GET_CODE (insn_dest
) == SUBREG
5230 || GET_CODE (insn_dest
) == STRICT_LOW_PART
5231 || GET_CODE (insn_dest
) == SIGN_EXTRACT
)
5232 insn_dest
= XEXP (insn_dest
, 0);
5234 if (! rtx_equal_p (insn_dest
, reg
))
5236 /* Use the same scheme as combine.c, don't put both REG_DEAD
5237 and REG_UNUSED notes on the same insn. */
5238 if (! find_regno_note (insn
, REG_UNUSED
, REGNO (reg
))
5239 && ! find_regno_note (insn
, REG_DEAD
, REGNO (reg
)))
5241 rtx note
= rtx_alloc (EXPR_LIST
);
5242 PUT_REG_NOTE_KIND (note
, REG_DEAD
);
5243 XEXP (note
, 0) = reg
;
5244 XEXP (note
, 1) = REG_NOTES (insn
);
5245 REG_NOTES (insn
) = note
;
5249 else if (reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
5251 /* We found an instruction that both uses the register and
5252 sets it, so no new REG_NOTE is needed for the previous
5262 maybe_add_dead_note_use (insn
, dest
)
5267 /* We need to add a REG_DEAD note to the last place DEST is
5270 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
5271 && reg_mentioned_p (dest
, PATTERN (insn
))
5272 && (set
= single_set (insn
)))
5274 rtx insn_dest
= SET_DEST (set
);
5276 while (GET_CODE (insn_dest
) == ZERO_EXTRACT
5277 || GET_CODE (insn_dest
) == SUBREG
5278 || GET_CODE (insn_dest
) == STRICT_LOW_PART
5279 || GET_CODE (insn_dest
) == SIGN_EXTRACT
)
5280 insn_dest
= XEXP (insn_dest
, 0);
5282 if (! rtx_equal_p (insn_dest
, dest
))
5284 /* Use the same scheme as combine.c, don't put both REG_DEAD
5285 and REG_UNUSED notes on the same insn. */
5286 if (! find_regno_note (insn
, REG_UNUSED
, REGNO (dest
))
5287 && ! find_regno_note (insn
, REG_DEAD
, REGNO (dest
)))
5289 rtx note
= rtx_alloc (EXPR_LIST
);
5290 PUT_REG_NOTE_KIND (note
, REG_DEAD
);
5291 XEXP (note
, 0) = dest
;
5292 XEXP (note
, 1) = REG_NOTES (insn
);
5293 REG_NOTES (insn
) = note
;
5301 /* Find the first insn in the set of insns from FIRST to LAST inclusive
5302 that contains the note NOTE. */
5304 find_insn_with_note (note
, first
, last
)
5305 rtx note
, first
, last
;
5309 for (insn
= first
; insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
5311 rtx temp
= find_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
5324 /* Subroutine of update_life_info. Determines whether a SET or
5325 CLOBBER in an insn created by splitting needs a REG_DEAD or
5326 REG_UNUSED note added. */
5329 new_insn_dead_notes (pat
, insn
, first
, last
, orig_first_insn
, orig_last_insn
)
5330 rtx pat
, insn
, first
, last
, orig_first_insn
, orig_last_insn
;
5334 if (GET_CODE (pat
) != CLOBBER
&& GET_CODE (pat
) != SET
)
5337 dest
= XEXP (pat
, 0);
5339 while (GET_CODE (dest
) == ZERO_EXTRACT
|| GET_CODE (dest
) == SUBREG
5340 || GET_CODE (dest
) == STRICT_LOW_PART
5341 || GET_CODE (dest
) == SIGN_EXTRACT
)
5342 dest
= XEXP (dest
, 0);
5344 if (GET_CODE (dest
) == REG
)
5346 /* If the original insns already used this register, we may not
5347 add new notes for it. One example for a replacement that
5348 needs this test is when a multi-word memory access with
5349 register-indirect addressing is changed into multiple memory
5350 accesses with auto-increment and one adjusting add
5351 instruction for the address register.
5353 However, there is a problem with this code. We're assuming
5354 that any registers that are set in the new insns are either
5355 set/referenced in the old insns (and thus "inherit" the
5356 liveness of the old insns), or are registers that are dead
5357 before we enter this part of the stream (and thus should be
5358 dead when we leave).
5360 To do this absolutely correctly, we must determine the actual
5361 liveness of the registers before we go randomly adding
5362 REG_DEAD notes. This can probably be accurately done by
5363 calling mark_referenced_resources() on the old stream before
5364 replacing the old insns. */
5366 for (tem
= orig_first_insn
; tem
!= NULL_RTX
; tem
= NEXT_INSN (tem
))
5368 if (GET_RTX_CLASS (GET_CODE (tem
)) == 'i'
5369 && reg_referenced_p (dest
, PATTERN (tem
)))
5371 if (tem
== orig_last_insn
)
5374 /* So it's a new register, presumably only used within this
5375 group of insns. Find the last insn in the set of new insns
5376 that DEST is referenced in, and add a dead note to it. */
5377 if (! maybe_add_dead_note (dest
, insn
, last
))
5379 /* If this is a set, it must die somewhere, unless it is the
5380 dest of the original insn, and thus is live after the
5381 original insn. Abort if it isn't supposed to be live after
5384 If this is a clobber, then just add a REG_UNUSED note. */
5385 if (GET_CODE (pat
) == CLOBBER
)
5387 rtx note
= rtx_alloc (EXPR_LIST
);
5388 PUT_REG_NOTE_KIND (note
, REG_UNUSED
);
5389 XEXP (note
, 0) = dest
;
5390 XEXP (note
, 1) = REG_NOTES (insn
);
5391 REG_NOTES (insn
) = note
;
5396 struct resources res
;
5399 CLEAR_RESOURCE (&res
);
5400 for (curr
= orig_first_insn
;
5402 curr
= NEXT_INSN (curr
))
5404 if (GET_RTX_CLASS (GET_CODE (curr
)) == 'i')
5405 mark_set_resources (PATTERN (curr
), &res
, 0, 0);
5406 if (TEST_HARD_REG_BIT (res
.regs
, REGNO (dest
)))
5408 if (curr
== orig_last_insn
)
5412 /* In case reg was not used later, it is dead store.
5413 add REG_UNUSED note. */
5414 if (! TEST_HARD_REG_BIT (res
.regs
, REGNO (dest
)))
5416 rtx note
= rtx_alloc (EXPR_LIST
);
5417 PUT_REG_NOTE_KIND (note
, REG_UNUSED
);
5418 XEXP (note
, 0) = dest
;
5419 XEXP (note
, 1) = REG_NOTES (insn
);
5420 REG_NOTES (insn
) = note
;
5427 rtx set
= single_set (insn
);
5428 /* If this is a set, scan backwards for a previous
5429 reference, and attach a REG_DEAD note to it. But we don't
5430 want to do it if the insn is both using and setting the
5433 Global registers are always live. */
5434 if (set
&& ! reg_overlap_mentioned_p (dest
, SET_SRC (pat
))
5435 && (REGNO (dest
) >= FIRST_PSEUDO_REGISTER
5436 || ! global_regs
[REGNO (dest
)]))
5438 for (tem
= PREV_INSN (insn
);
5439 tem
!= NULL_RTX
; tem
= PREV_INSN (tem
))
5441 if (maybe_add_dead_note_use (tem
, dest
))
5451 /* Subroutine of update_life_info. Update the value of reg_n_sets for all
5452 registers modified by X. INC is -1 if the containing insn is being deleted,
5453 and is 1 if the containing insn is a newly generated insn. */
5456 update_n_sets (x
, inc
)
5460 rtx dest
= SET_DEST (x
);
5462 while (GET_CODE (dest
) == STRICT_LOW_PART
|| GET_CODE (dest
) == SUBREG
5463 || GET_CODE (dest
) == ZERO_EXTRACT
|| GET_CODE (dest
) == SIGN_EXTRACT
)
5464 dest
= SUBREG_REG (dest
);
5466 if (GET_CODE (dest
) == REG
)
5468 int regno
= REGNO (dest
);
5470 if (regno
< FIRST_PSEUDO_REGISTER
)
5473 int endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (dest
));
5475 for (i
= regno
; i
< endregno
; i
++)
5476 REG_N_SETS (i
) += inc
;
5479 REG_N_SETS (regno
) += inc
;
5483 /* Scan INSN for a SET that sets REG. If it sets REG via a SUBREG,
5484 then return 2. If it sets REG directly, return 1. Otherwise, return
5487 static int sets_reg_or_subreg_ret
;
5488 static rtx sets_reg_or_subreg_rtx
;
5491 sets_reg_or_subreg_1 (x
, set
)
5494 if (rtx_equal_p (x
, sets_reg_or_subreg_rtx
))
5496 if (x
== XEXP (set
, 0))
5497 sets_reg_or_subreg_ret
= 1;
5498 else if (GET_CODE (XEXP (set
, 0)) == SUBREG
)
5499 sets_reg_or_subreg_ret
= 2;
5504 sets_reg_or_subreg (insn
, reg
)
5508 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
5511 sets_reg_or_subreg_ret
= 0;
5512 sets_reg_or_subreg_rtx
= reg
;
5513 note_stores (PATTERN (insn
), sets_reg_or_subreg_1
);
5514 return sets_reg_or_subreg_ret
;
5517 /* If a replaced SET_INSN (which is part of the insns between
5518 OLD_FIRST_INSN and OLD_LAST_INSN inclusive) is modifying a multiple
5519 register target, and the original dest is now set in the new insns
5520 (between FIRST_INSN and LAST_INSN inclusive) by one or more subreg
5521 sets, then the new insns no longer kill the destination of the
5524 We may also be directly using the register in the new insns before
5527 In either case, if there exists an instruction in the same basic
5528 block before the replaced insns which uses the original dest (and
5529 contains a corresponding REG_DEAD note), then we must remove this
5532 SET_INSN is the insn that contains the SET; it may be a PARALLEL
5533 containing the SET insn.
5535 SET is the actual SET insn proper. */
5538 maybe_remove_dead_notes (set_insn
, set
, first_insn
, last_insn
,
5539 old_first_insn
, old_last_insn
)
5541 rtx first_insn
, last_insn
;
5542 rtx old_first_insn
, old_last_insn
;
5545 rtx stop_insn
= NEXT_INSN (last_insn
);
5550 if (GET_RTX_CLASS (GET_CODE (set
)) != 'i')
5553 set_pattern
= PATTERN (set
);
5555 if (GET_CODE (set_pattern
) == PARALLEL
)
5559 for (i
= 0; i
< XVECLEN (set_pattern
, 0); i
++)
5561 maybe_remove_dead_notes (set_insn
, XVECEXP (set_pattern
, 0, i
),
5562 first_insn
, last_insn
,
5563 old_first_insn
, old_last_insn
);
5568 if (GET_CODE (set_pattern
) != SET
)
5573 set_dest
= SET_DEST (set_pattern
);
5575 if (GET_CODE (set_dest
) != REG
)
5580 /* We have a set of a REG. First we need to determine if this set is
5581 both using and setting the register. (FIXME: if this is in a
5582 PARALLEL, we will have to check the other exprs as well.) */
5583 if (reg_overlap_mentioned_p (set_dest
, SET_SRC (set_pattern
)))
5588 /* Now determine if we used or set the register in the old insns
5589 previous to this one. */
5591 for (insn
= old_first_insn
; insn
!= set_insn
; insn
= NEXT_INSN (insn
))
5593 if (reg_overlap_mentioned_p (set_dest
, insn
))
5599 /* Now determine if we're setting it in the new insns, or using
5601 for (insn
= first_insn
; insn
!= stop_insn
; insn
= NEXT_INSN (insn
))
5603 set_type
= sets_reg_or_subreg (insn
, set_dest
);
5608 else if (reg_overlap_mentioned_p (set_dest
, insn
))
5610 /* Is the reg now used in this new insn? -- This is probably an
5618 /* The register is being set via a SUBREG or is being used in
5619 some other way, so it's no longer dead.
5621 Search backwards from first_insn, looking for the first insn
5622 that uses the original dest. Stop if we pass a CODE_LABEL or
5625 If we find such an insn and it has a REG_DEAD note referring
5626 to the original dest, then delete the note. */
5628 for (insn
= first_insn
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
5630 if (GET_CODE (insn
) == CODE_LABEL
5631 || GET_CODE (insn
) == JUMP_INSN
)
5633 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
5634 && reg_mentioned_p (set_dest
, insn
))
5636 rtx note
= find_regno_note (insn
, REG_DEAD
, REGNO (set_dest
));
5637 if (note
!= NULL_RTX
)
5639 remove_note (insn
, note
);
5641 /* ??? -- Is this right? */
5646 else if (set_type
== 0)
5648 /* The reg is not being set or used in the new insns at all. */
5651 /* Should never reach here for a pseudo reg. */
5652 if (REGNO (set_dest
) >= FIRST_PSEUDO_REGISTER
)
5655 /* This can happen for a hard register, if the new insns do not
5656 contain instructions which would be no-ops referring to the
5659 We try to verify that this is the case by checking to see if
5660 the original instruction uses all of the registers that it
5661 set. This case is OK, because deleting a no-op can not affect
5662 REG_DEAD notes on other insns. If this is not the case, then
5665 regno
= REGNO (set_dest
);
5666 for (i
= HARD_REGNO_NREGS (regno
, GET_MODE (set_dest
)) - 1;
5669 if (! refers_to_regno_p (regno
+ i
, regno
+ i
+ 1, set
,
5678 /* Updates all flow-analysis related quantities (including REG_NOTES) for
5679 the insns from FIRST to LAST inclusive that were created by replacing
5680 the insns from ORIG_INSN_FIRST to ORIG_INSN_LAST inclusive. NOTES
5681 are the original REG_NOTES. */
5684 update_life_info (notes
, first
, last
, orig_first_insn
, orig_last_insn
)
5687 rtx orig_first_insn
, orig_last_insn
;
5691 rtx orig_dest
, temp
;
5695 /* Get and save the destination set by the original insn, if there
5696 was only one insn replaced. */
5698 if (orig_first_insn
== orig_last_insn
)
5700 orig_insn
= orig_first_insn
;
5701 orig_dest
= single_set (orig_insn
);
5703 orig_dest
= SET_DEST (orig_dest
);
5707 orig_insn
= NULL_RTX
;
5708 orig_dest
= NULL_RTX
;
5711 /* Move REG_NOTES from the original insns to where they now belong. */
5713 for (note
= notes
; note
; note
= next
)
5715 next
= XEXP (note
, 1);
5716 switch (REG_NOTE_KIND (note
))
5720 /* Move these notes from the original insn to the last new
5721 insn where the register is mentioned. */
5723 for (insn
= last
; ; insn
= PREV_INSN (insn
))
5725 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
5726 && reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
5728 /* Sometimes need to convert REG_UNUSED notes to
5730 if (REG_NOTE_KIND (note
) == REG_UNUSED
5731 && GET_CODE (XEXP (note
, 0)) == REG
5732 && ! dead_or_set_p (insn
, XEXP (note
, 0)))
5734 PUT_REG_NOTE_KIND (note
, REG_DEAD
);
5736 split_hard_reg_notes (insn
, note
, first
, last
);
5737 /* The reg only dies in one insn, the last one that uses
5741 /* It must die somewhere, fail if we couldn't find where it died.
5743 We abort because otherwise the register will be live
5744 longer than it should, and we'll probably take an
5745 abort later. What we should do instead is search back
5746 and find the appropriate places to insert the note. */
5749 if (REG_NOTE_KIND (note
) == REG_DEAD
)
5762 /* If the insn that set the register to 0 was deleted, this
5763 note cannot be relied on any longer. The destination might
5764 even have been moved to memory.
5765 This was observed for SH4 with execute/920501-6.c compilation,
5766 -O2 -fomit-frame-pointer -finline-functions . */
5768 if (GET_CODE (XEXP (note
, 0)) == NOTE
5769 || INSN_DELETED_P (XEXP (note
, 0)))
5771 if (orig_insn
!= NULL_RTX
)
5773 note_dest
= orig_dest
;
5777 note_dest
= find_insn_with_note (note
, first
, last
);
5778 if (note_dest
!= NULL_RTX
)
5780 note_dest
= single_set (orig_dest
);
5781 if (note_dest
!= NULL_RTX
)
5783 note_dest
= SET_DEST (orig_dest
);
5787 /* This note applies to the dest of the original insn. Find the
5788 first new insn that now has the same dest, and move the note
5794 for (insn
= first
; ; insn
= NEXT_INSN (insn
))
5796 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
5797 && (temp
= single_set (insn
))
5798 && rtx_equal_p (SET_DEST (temp
), note_dest
))
5800 XEXP (note
, 1) = REG_NOTES (insn
);
5801 REG_NOTES (insn
) = note
;
5802 /* The reg is only zero before one insn, the first that
5806 /* If this note refers to a multiple word hard
5807 register, it may have been split into several smaller
5808 hard register references. We could split the notes,
5809 but simply dropping them is good enough. */
5810 if (GET_CODE (note_dest
) == REG
5811 && REGNO (note_dest
) < FIRST_PSEUDO_REGISTER
5812 && HARD_REGNO_NREGS (REGNO (note_dest
),
5813 GET_MODE (note_dest
)) > 1)
5815 /* It must be set somewhere; fail if we couldn't find
5816 where it was set. */
5825 /* A REG_EQUIV or REG_EQUAL note on an insn with more than one
5826 set is meaningless. Just drop the note. */
5830 case REG_NO_CONFLICT
:
5831 /* These notes apply to the dest of the original insn. Find the last
5832 new insn that now has the same dest, and move the note there.
5834 If we are replacing multiple insns, just drop the note. */
5842 for (insn
= last
; ; insn
= PREV_INSN (insn
))
5844 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
5845 && (temp
= single_set (insn
))
5846 && rtx_equal_p (SET_DEST (temp
), orig_dest
))
5848 XEXP (note
, 1) = REG_NOTES (insn
);
5849 REG_NOTES (insn
) = note
;
5850 /* Only put this note on one of the new insns. */
5854 /* The original dest must still be set someplace. Abort if we
5855 couldn't find it. */
5858 /* However, if this note refers to a multiple word hard
5859 register, it may have been split into several smaller
5860 hard register references. We could split the notes,
5861 but simply dropping them is good enough. */
5862 if (GET_CODE (orig_dest
) == REG
5863 && REGNO (orig_dest
) < FIRST_PSEUDO_REGISTER
5864 && HARD_REGNO_NREGS (REGNO (orig_dest
),
5865 GET_MODE (orig_dest
)) > 1)
5867 /* Likewise for multi-word memory references. */
5868 if (GET_CODE (orig_dest
) == MEM
5869 && GET_MODE_SIZE (GET_MODE (orig_dest
)) > MOVE_MAX
)
5877 /* Move a REG_LIBCALL note to the first insn created, and update
5878 the corresponding REG_RETVAL note. */
5879 XEXP (note
, 1) = REG_NOTES (first
);
5880 REG_NOTES (first
) = note
;
5882 insn
= XEXP (note
, 0);
5883 note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
5885 XEXP (note
, 0) = first
;
5888 case REG_EXEC_COUNT
:
5889 /* Move a REG_EXEC_COUNT note to the first insn created. */
5890 XEXP (note
, 1) = REG_NOTES (first
);
5891 REG_NOTES (first
) = note
;
5895 /* Move a REG_RETVAL note to the last insn created, and update
5896 the corresponding REG_LIBCALL note. */
5897 XEXP (note
, 1) = REG_NOTES (last
);
5898 REG_NOTES (last
) = note
;
5900 insn
= XEXP (note
, 0);
5901 note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
);
5903 XEXP (note
, 0) = last
;
5908 /* This should be moved to whichever instruction is a JUMP_INSN. */
5910 for (insn
= last
; ; insn
= PREV_INSN (insn
))
5912 if (GET_CODE (insn
) == JUMP_INSN
)
5914 XEXP (note
, 1) = REG_NOTES (insn
);
5915 REG_NOTES (insn
) = note
;
5916 /* Only put this note on one of the new insns. */
5919 /* Fail if we couldn't find a JUMP_INSN. */
5926 /* reload sometimes leaves obsolete REG_INC notes around. */
5927 if (reload_completed
)
5929 /* This should be moved to whichever instruction now has the
5930 increment operation. */
5934 /* Should be moved to the new insn(s) which use the label. */
5935 for (insn
= first
; insn
!= NEXT_INSN (last
); insn
= NEXT_INSN (insn
))
5936 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
5937 && reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
5938 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_LABEL
,
5945 /* These two notes will never appear until after reorg, so we don't
5946 have to handle them here. */
5952 /* Each new insn created has a new set. If the destination is a
5953 register, then this reg is now live across several insns, whereas
5954 previously the dest reg was born and died within the same insn.
5955 To reflect this, we now need a REG_DEAD note on the insn where
5958 Similarly, the new insns may have clobbers that need REG_UNUSED
5961 for (insn
= first
; ;insn
= NEXT_INSN (insn
))
5966 pat
= PATTERN (insn
);
5967 if (GET_CODE (pat
) == SET
|| GET_CODE (pat
) == CLOBBER
)
5968 new_insn_dead_notes (pat
, insn
, first
, last
,
5969 orig_first_insn
, orig_last_insn
);
5970 else if (GET_CODE (pat
) == PARALLEL
)
5972 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
5974 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
5975 || GET_CODE (XVECEXP (pat
, 0, i
)) == CLOBBER
)
5977 rtx parpat
= XVECEXP (pat
, 0, i
);
5979 new_insn_dead_notes (parpat
, insn
, first
, last
,
5980 orig_first_insn
, orig_last_insn
);
5990 /* Check to see if we have any REG_DEAD notes on insns previous to
5991 the new ones that are now incorrect and need to be removed. */
5993 for (insn
= orig_first_insn
; ; insn
= NEXT_INSN (insn
))
5995 maybe_remove_dead_notes (insn
, insn
, first
, last
,
5996 orig_first_insn
, orig_last_insn
);
5998 if (insn
== orig_last_insn
)
6002 /* Update reg_n_sets. This is necessary to prevent local alloc from
6003 converting REG_EQUAL notes to REG_EQUIV when the new insns are setting
6004 a reg multiple times instead of once. */
6006 for (tem
= orig_first_insn
; tem
!= NULL_RTX
; tem
= NEXT_INSN (tem
))
6011 if (GET_RTX_CLASS (GET_CODE (tem
)) != 'i')
6015 code
= GET_CODE (x
);
6016 if (code
== SET
|| code
== CLOBBER
)
6017 update_n_sets (x
, -1);
6018 else if (code
== PARALLEL
)
6021 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6023 code
= GET_CODE (XVECEXP (x
, 0, i
));
6024 if (code
== SET
|| code
== CLOBBER
)
6025 update_n_sets (XVECEXP (x
, 0, i
), -1);
6028 if (tem
== orig_last_insn
)
6032 for (insn
= first
; ; insn
= NEXT_INSN (insn
))
6034 rtx x
= PATTERN (insn
);
6035 RTX_CODE code
= GET_CODE (x
);
6037 if (code
== SET
|| code
== CLOBBER
)
6038 update_n_sets (x
, 1);
6039 else if (code
== PARALLEL
)
6042 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6044 code
= GET_CODE (XVECEXP (x
, 0, i
));
6045 if (code
== SET
|| code
== CLOBBER
)
6046 update_n_sets (XVECEXP (x
, 0, i
), 1);
6055 /* Prepends the set of REG_NOTES in NEW to NOTES, and returns NEW. */
6057 prepend_reg_notes (notes
, new)
6062 if (new == NULL_RTX
)
6066 if (notes
== NULL_RTX
)
6071 while (XEXP (end
, 1) != NULL_RTX
)
6073 end
= XEXP (end
, 1);
6075 XEXP (end
, 1) = notes
;
6079 /* Replace the insns from FIRST to LAST inclusive with the set of insns in
6080 NEW, and update the life analysis info accordingly. */
6082 replace_insns (first
, last
, first_new
, notes
)
6083 rtx first
, last
, first_new
, notes
;
6085 rtx stop
= NEXT_INSN (last
);
6088 rtx prev
= PREV_INSN (first
);
6091 if (notes
== NULL_RTX
)
6093 for (curr
= first
; curr
!= stop
; curr
= NEXT_INSN (curr
))
6095 notes
= prepend_reg_notes (notes
, REG_NOTES (curr
));
6098 for (curr
= first
; curr
; curr
= next
)
6100 next
= NEXT_INSN (curr
);
6105 last_new
= emit_insn_after (first_new
, prev
);
6106 first_new
= NEXT_INSN (prev
);
6107 for (i
= 0; i
< n_basic_blocks
; i
++)
6109 if (BLOCK_HEAD (i
) == first
)
6111 BLOCK_HEAD (i
) = first_new
;
6113 if (BLOCK_END (i
) == last
)
6115 BLOCK_END (i
) = last_new
;
6118 /* This is probably bogus. */
6119 if (first_new
== last_new
)
6121 if (GET_CODE (first_new
) == SEQUENCE
)
6123 first_new
= XVECEXP (first_new
, 0, 0);
6124 last_new
= XVECEXP (last_new
, 0, XVECLEN (last_new
, 0) - 1);
6127 update_life_info (notes
, first_new
, last_new
, first
, last
);
6130 /* Verify the CFG consistency. This function check some CFG invariants and
6131 aborts when something is wrong. Hope that this function will help to
6132 convert many optimization passes to preserve CFG consistent.
6134 Currently it does following checks:
6136 - test head/end pointers
6137 - overlapping of basic blocks
6138 - edge list corectness
6139 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6140 - tails of basic blocks (ensure that boundary is necesary)
6141 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6142 and NOTE_INSN_BASIC_BLOCK
6143 - check that all insns are in the basic blocks
6144 (except the switch handling code, barriers and notes)
6146 In future it can be extended check a lot of other stuff as well
6147 (reachability of basic blocks, life information, etc. etc.). */
6152 const int max_uid
= get_max_uid ();
6153 const rtx rtx_first
= get_insns ();
6154 basic_block
*bb_info
;
6158 bb_info
= (basic_block
*) alloca (max_uid
* sizeof (basic_block
));
6159 memset (bb_info
, 0, max_uid
* sizeof (basic_block
));
6161 /* First pass check head/end pointers and set bb_info array used by
6163 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
6165 basic_block bb
= BASIC_BLOCK (i
);
6167 /* Check the head pointer and make sure that it is pointing into
6169 for (x
= rtx_first
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
6174 fatal ("verify_flow_info: Head insn %d for block %d not found in the insn stream.\n",
6175 INSN_UID (bb
->head
), bb
->index
);
6178 /* Check the end pointer and make sure that it is pointing into
6180 for (x
= bb
->head
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
6182 if (bb_info
[INSN_UID (x
)] != NULL
)
6184 fatal ("verify_flow_info: Insn %d is in multiple basic blocks (%d and %d)",
6185 INSN_UID (x
), bb
->index
, bb_info
[INSN_UID (x
)]->index
);
6187 bb_info
[INSN_UID (x
)] = bb
;
6194 fatal ("verify_flow_info: End insn %d for block %d not found in the insn stream.\n",
6195 INSN_UID (bb
->end
), bb
->index
);
6199 /* Now check the basic blocks (boundaries etc.) */
6200 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
6202 basic_block bb
= BASIC_BLOCK (i
);
6203 /* Check corectness of edge lists */
6211 fprintf (stderr
, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6213 fprintf (stderr
, "Predecessor: ");
6214 dump_edge_info (stderr
, e
, 0);
6215 fprintf (stderr
, "\nSuccessor: ");
6216 dump_edge_info (stderr
, e
, 1);
6220 if (e
->dest
!= EXIT_BLOCK_PTR
)
6222 edge e2
= e
->dest
->pred
;
6223 while (e2
&& e2
!= e
)
6227 fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
6239 fprintf (stderr
, "verify_flow_info: Basic block %d pred edge is corrupted\n",
6241 fprintf (stderr
, "Predecessor: ");
6242 dump_edge_info (stderr
, e
, 0);
6243 fprintf (stderr
, "\nSuccessor: ");
6244 dump_edge_info (stderr
, e
, 1);
6248 if (e
->src
!= ENTRY_BLOCK_PTR
)
6250 edge e2
= e
->src
->succ
;
6251 while (e2
&& e2
!= e
)
6255 fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
6262 /* OK pointers are correct. Now check the header of basic
6263 block. It ought to contain optional CODE_LABEL followed
6264 by NOTE_BASIC_BLOCK. */
6266 if (GET_CODE (x
) == CODE_LABEL
)
6270 fatal ("verify_flow_info: Basic block contains only CODE_LABEL and no NOTE_INSN_BASIC_BLOCK note\n");
6274 if (GET_CODE (x
) != NOTE
6275 || NOTE_LINE_NUMBER (x
) != NOTE_INSN_BASIC_BLOCK
6276 || NOTE_BASIC_BLOCK (x
) != bb
)
6278 fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6284 /* Do checks for empty blocks here */
6291 if (GET_CODE (x
) == NOTE
6292 && NOTE_LINE_NUMBER (x
) == NOTE_INSN_BASIC_BLOCK
)
6294 fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d\n",
6295 INSN_UID (x
), bb
->index
);
6301 if (GET_CODE (x
) == JUMP_INSN
6302 || GET_CODE (x
) == CODE_LABEL
6303 || GET_CODE (x
) == BARRIER
)
6305 fatal_insn ("verify_flow_info: Incorrect insn in the middle of basic block %d\n",
6317 if (!bb_info
[INSN_UID (x
)])
6319 switch (GET_CODE (x
))
6326 /* An addr_vec is placed outside any block block. */
6328 && GET_CODE (NEXT_INSN (x
)) == JUMP_INSN
6329 && (GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_DIFF_VEC
6330 || GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_VEC
))
6335 /* But in any case, non-deletable labels can appear anywhere. */
6339 fatal_insn ("verify_flow_info: Insn outside basic block\n", x
);
6347 /* Functions to access an edge list with a vector representation.
6348 Enough data is kept such that given an index number, the
6349 pred and succ that edge reprsents can be determined, or
6350 given a pred and a succ, it's index number can be returned.
6351 This allows algorithms which comsume a lot of memory to
6352 represent the normally full matrix of edge (pred,succ) with a
6353 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6354 wasted space in the client code due to sparse flow graphs. */
6356 /* This functions initializes the edge list. Basically the entire
6357 flowgraph is processed, and all edges are assigned a number,
6358 and the data structure is filed in. */
6362 struct edge_list
*elist
;
6369 block_count
= n_basic_blocks
+ 2; /* Include the entry and exit blocks. */
6373 /* Determine the number of edges in the flow graph by counting successor
6374 edges on each basic block. */
6375 for (x
= 0; x
< n_basic_blocks
; x
++)
6377 basic_block bb
= BASIC_BLOCK (x
);
6379 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6382 /* Don't forget successors of the entry block. */
6383 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
6386 elist
= xmalloc (sizeof (struct edge_list
));
6387 elist
->num_blocks
= block_count
;
6388 elist
->num_edges
= num_edges
;
6389 elist
->index_to_edge
= xmalloc (sizeof (edge
) * num_edges
);
6393 /* Follow successors of the entry block, and register these edges. */
6394 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
6396 elist
->index_to_edge
[num_edges
] = e
;
6400 for (x
= 0; x
< n_basic_blocks
; x
++)
6402 basic_block bb
= BASIC_BLOCK (x
);
6404 /* Follow all successors of blocks, and register these edges. */
6405 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6407 elist
->index_to_edge
[num_edges
] = e
;
6414 /* This function free's memory associated with an edge list. */
6416 free_edge_list (elist
)
6417 struct edge_list
*elist
;
6421 free (elist
->index_to_edge
);
6426 /* This function provides debug output showing an edge list. */
6428 print_edge_list (f
, elist
)
6430 struct edge_list
*elist
;
6433 fprintf(f
, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6434 elist
->num_blocks
- 2, elist
->num_edges
);
6436 for (x
= 0; x
< elist
->num_edges
; x
++)
6438 fprintf (f
, " %-4d - edge(", x
);
6439 if (INDEX_EDGE_PRED_BB (elist
, x
) == ENTRY_BLOCK_PTR
)
6440 fprintf (f
,"entry,");
6442 fprintf (f
,"%d,", INDEX_EDGE_PRED_BB (elist
, x
)->index
);
6444 if (INDEX_EDGE_SUCC_BB (elist
, x
) == EXIT_BLOCK_PTR
)
6445 fprintf (f
,"exit)\n");
6447 fprintf (f
,"%d)\n", INDEX_EDGE_SUCC_BB (elist
, x
)->index
);
6451 /* This function provides an internal consistancy check of an edge list,
6452 verifying that all edges are present, and that there are no
6455 verify_edge_list (f
, elist
)
6457 struct edge_list
*elist
;
6459 int x
, pred
, succ
, index
;
6464 for (x
= 0; x
< n_basic_blocks
; x
++)
6466 basic_block bb
= BASIC_BLOCK (x
);
6468 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6470 pred
= e
->src
->index
;
6471 succ
= e
->dest
->index
;
6472 index
= EDGE_INDEX (elist
, pred
, succ
);
6473 if (index
== EDGE_INDEX_NO_EDGE
)
6475 fprintf (f
, "*p* No index for edge from %d to %d\n",pred
, succ
);
6478 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
6479 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
6480 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
6481 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
6482 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
6483 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
6486 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
6488 pred
= e
->src
->index
;
6489 succ
= e
->dest
->index
;
6490 index
= EDGE_INDEX (elist
, pred
, succ
);
6491 if (index
== EDGE_INDEX_NO_EDGE
)
6493 fprintf (f
, "*p* No index for edge from %d to %d\n",pred
, succ
);
6496 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
6497 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
6498 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
6499 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
6500 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
6501 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
6503 /* We've verified that all the edges are in the list, no lets make sure
6504 there are no spurious edges in the list. */
6506 for (pred
= 0 ; pred
< n_basic_blocks
; pred
++)
6507 for (succ
= 0 ; succ
< n_basic_blocks
; succ
++)
6509 basic_block p
= BASIC_BLOCK (pred
);
6510 basic_block s
= BASIC_BLOCK (succ
);
6514 for (e
= p
->succ
; e
; e
= e
->succ_next
)
6520 for (e
= s
->pred
; e
; e
= e
->pred_next
)
6526 if (EDGE_INDEX (elist
, pred
, succ
) == EDGE_INDEX_NO_EDGE
6528 fprintf (f
, "*** Edge (%d, %d) appears to not have an index\n",
6530 if (EDGE_INDEX (elist
, pred
, succ
) != EDGE_INDEX_NO_EDGE
6532 fprintf (f
, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6533 pred
, succ
, EDGE_INDEX (elist
, pred
, succ
));
6535 for (succ
= 0 ; succ
< n_basic_blocks
; succ
++)
6537 basic_block p
= ENTRY_BLOCK_PTR
;
6538 basic_block s
= BASIC_BLOCK (succ
);
6542 for (e
= p
->succ
; e
; e
= e
->succ_next
)
6548 for (e
= s
->pred
; e
; e
= e
->pred_next
)
6554 if (EDGE_INDEX (elist
, ENTRY_BLOCK
, succ
) == EDGE_INDEX_NO_EDGE
6556 fprintf (f
, "*** Edge (entry, %d) appears to not have an index\n",
6558 if (EDGE_INDEX (elist
, ENTRY_BLOCK
, succ
) != EDGE_INDEX_NO_EDGE
6560 fprintf (f
, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6561 succ
, EDGE_INDEX (elist
, ENTRY_BLOCK
, succ
));
6563 for (pred
= 0 ; pred
< n_basic_blocks
; pred
++)
6565 basic_block p
= BASIC_BLOCK (pred
);
6566 basic_block s
= EXIT_BLOCK_PTR
;
6570 for (e
= p
->succ
; e
; e
= e
->succ_next
)
6576 for (e
= s
->pred
; e
; e
= e
->pred_next
)
6582 if (EDGE_INDEX (elist
, pred
, EXIT_BLOCK
) == EDGE_INDEX_NO_EDGE
6584 fprintf (f
, "*** Edge (%d, exit) appears to not have an index\n",
6586 if (EDGE_INDEX (elist
, pred
, EXIT_BLOCK
) != EDGE_INDEX_NO_EDGE
6588 fprintf (f
, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6589 pred
, EDGE_INDEX (elist
, pred
, EXIT_BLOCK
));
6593 /* This routine will determine what, if any, edge there is between
6594 a specified predecessor and successor. */
6597 find_edge_index (edge_list
, pred
, succ
)
6598 struct edge_list
*edge_list
;
6602 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
6604 if (INDEX_EDGE_PRED_BB (edge_list
, x
)->index
== pred
6605 && INDEX_EDGE_SUCC_BB (edge_list
, x
)->index
== succ
)
6608 return (EDGE_INDEX_NO_EDGE
);