1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "basic-block.h"
127 #include "insn-config.h"
129 #include "hard-reg-set.h"
132 #include "function.h"
136 #include "insn-flags.h"
140 #define obstack_chunk_alloc xmalloc
141 #define obstack_chunk_free free
144 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
145 the stack pointer does not matter. The value is tested only in
146 functions that have frame pointers.
147 No definition is equivalent to always zero. */
148 #ifndef EXIT_IGNORE_STACK
149 #define EXIT_IGNORE_STACK 0
152 #ifndef HAVE_epilogue
153 #define HAVE_epilogue 0
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
160 /* The contents of the current function definition are allocated
161 in this obstack, and all are freed at the end of the function.
162 For top-level functions, this is temporary_obstack.
163 Separate obstacks are made for nested functions. */
165 extern struct obstack
*function_obstack
;
167 /* Number of basic blocks in the current function. */
171 /* Number of edges in the current function. */
175 /* The basic block array. */
177 varray_type basic_block_info
;
179 /* The special entry and exit blocks. */
181 struct basic_block_def entry_exit_blocks
[2] =
188 NULL
, /* local_set */
189 NULL
, /* global_live_at_start */
190 NULL
, /* global_live_at_end */
192 ENTRY_BLOCK
, /* index */
194 -1, -1 /* eh_beg, eh_end */
201 NULL
, /* local_set */
202 NULL
, /* global_live_at_start */
203 NULL
, /* global_live_at_end */
205 EXIT_BLOCK
, /* index */
207 -1, -1 /* eh_beg, eh_end */
211 /* Nonzero if the second flow pass has completed. */
214 /* Maximum register number used in this function, plus one. */
218 /* Indexed by n, giving various register information */
220 varray_type reg_n_info
;
222 /* Size of the reg_n_info table. */
224 unsigned int reg_n_max
;
226 /* Element N is the next insn that uses (hard or pseudo) register number N
227 within the current basic block; or zero, if there is no such insn.
228 This is valid only during the final backward scan in propagate_block. */
230 static rtx
*reg_next_use
;
232 /* Size of a regset for the current function,
233 in (1) bytes and (2) elements. */
238 /* Regset of regs live when calls to `setjmp'-like functions happen. */
239 /* ??? Does this exist only for the setjmp-clobbered warning message? */
241 regset regs_live_at_setjmp
;
243 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
244 that have to go in the same hard reg.
245 The first two regs in the list are a pair, and the next two
246 are another pair, etc. */
249 /* Depth within loops of basic block being scanned for lifetime analysis,
250 plus one. This is the weight attached to references to registers. */
252 static int loop_depth
;
254 /* During propagate_block, this is non-zero if the value of CC0 is live. */
258 /* During propagate_block, this contains a list of all the MEMs we are
259 tracking for dead store elimination. */
261 static rtx mem_set_list
;
263 /* Set of registers that may be eliminable. These are handled specially
264 in updating regs_ever_live. */
266 static HARD_REG_SET elim_reg_set
;
268 /* The basic block structure for every insn, indexed by uid. */
270 varray_type basic_block_for_insn
;
272 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
273 /* ??? Should probably be using LABEL_NUSES instead. It would take a
274 bit of surgery to be able to use or co-opt the routines in jump. */
276 static rtx label_value_list
;
278 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
280 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
281 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
282 static bitmap uid_volatile
;
284 /* Forward declarations */
285 static int count_basic_blocks
PROTO((rtx
));
286 static rtx find_basic_blocks_1
PROTO((rtx
));
287 static void create_basic_block
PROTO((int, rtx
, rtx
, rtx
));
288 static void clear_edges
PROTO((void));
289 static void make_edges
PROTO((rtx
));
290 static void make_edge
PROTO((sbitmap
*, basic_block
,
292 static void make_label_edge
PROTO((sbitmap
*, basic_block
,
294 static void make_eh_edge
PROTO((sbitmap
*, eh_nesting_info
*,
295 basic_block
, rtx
, int));
296 static void mark_critical_edges
PROTO((void));
297 static void move_stray_eh_region_notes
PROTO((void));
298 static void record_active_eh_regions
PROTO((rtx
));
300 static void commit_one_edge_insertion
PROTO((edge
));
302 static void delete_unreachable_blocks
PROTO((void));
303 static void delete_eh_regions
PROTO((void));
304 static int can_delete_note_p
PROTO((rtx
));
305 static int delete_block
PROTO((basic_block
));
306 static void expunge_block
PROTO((basic_block
));
307 static rtx flow_delete_insn
PROTO((rtx
));
308 static int can_delete_label_p
PROTO((rtx
));
309 static int merge_blocks_move_predecessor_nojumps
PROTO((basic_block
,
311 static int merge_blocks_move_successor_nojumps
PROTO((basic_block
,
313 static void merge_blocks_nomove
PROTO((basic_block
, basic_block
));
314 static int merge_blocks
PROTO((edge
,basic_block
,basic_block
));
315 static void try_merge_blocks
PROTO((void));
316 static void tidy_fallthru_edge
PROTO((edge
,basic_block
,basic_block
));
317 static void calculate_loop_depth
PROTO((rtx
));
319 static int verify_wide_reg_1
PROTO((rtx
*, void *));
320 static void verify_wide_reg
PROTO((int, rtx
, rtx
));
321 static void verify_local_live_at_start
PROTO((regset
, basic_block
));
322 static int set_noop_p
PROTO((rtx
));
323 static int noop_move_p
PROTO((rtx
));
324 static void notice_stack_pointer_modification
PROTO ((rtx
, rtx
, void *));
325 static void record_volatile_insns
PROTO((rtx
));
326 static void mark_reg
PROTO((regset
, rtx
));
327 static void mark_regs_live_at_end
PROTO((regset
));
328 static void life_analysis_1
PROTO((rtx
, int, int));
329 static void calculate_global_regs_live
PROTO((sbitmap
, sbitmap
, int));
330 static void propagate_block
PROTO((regset
, rtx
, rtx
,
332 static int insn_dead_p
PROTO((rtx
, regset
, int, rtx
));
333 static int libcall_dead_p
PROTO((rtx
, regset
, rtx
, rtx
));
334 static void mark_set_regs
PROTO((regset
, regset
, rtx
,
336 static void mark_set_1
PROTO((regset
, regset
, rtx
,
339 static void find_auto_inc
PROTO((regset
, rtx
, rtx
));
340 static int try_pre_increment_1
PROTO((rtx
));
341 static int try_pre_increment
PROTO((rtx
, rtx
, HOST_WIDE_INT
));
343 static void mark_used_regs
PROTO((regset
, regset
, rtx
, int, rtx
));
344 void dump_flow_info
PROTO((FILE *));
345 void debug_flow_info
PROTO((void));
346 static void dump_edge_info
PROTO((FILE *, edge
, int));
348 static int_list_ptr alloc_int_list_node
PROTO ((int_list_block
**));
349 static int_list_ptr add_int_list_node
PROTO ((int_list_block
**,
352 static void add_pred_succ
PROTO ((int, int, int_list_ptr
*,
353 int_list_ptr
*, int *, int *));
355 static void count_reg_sets_1
PROTO ((rtx
));
356 static void count_reg_sets
PROTO ((rtx
));
357 static void count_reg_references
PROTO ((rtx
));
358 static void invalidate_mems_from_autoinc
PROTO ((rtx
));
359 static void remove_edge
PROTO ((edge
));
360 static void remove_fake_successors
PROTO ((basic_block
));
362 /* This function is always defined so it can be called from the
363 debugger, and it is declared extern so we don't get warnings about
365 void verify_flow_info
PROTO ((void));
368 /* Find basic blocks of the current function.
369 F is the first insn of the function and NREGS the number of register
373 find_basic_blocks (f
, nregs
, file
, do_cleanup
)
375 int nregs ATTRIBUTE_UNUSED
;
376 FILE *file ATTRIBUTE_UNUSED
;
381 /* Flush out existing data. */
382 if (basic_block_info
!= NULL
)
388 /* Clear bb->aux on all extant basic blocks. We'll use this as a
389 tag for reuse during create_basic_block, just in case some pass
390 copies around basic block notes improperly. */
391 for (i
= 0; i
< n_basic_blocks
; ++i
)
392 BASIC_BLOCK (i
)->aux
= NULL
;
394 VARRAY_FREE (basic_block_info
);
397 n_basic_blocks
= count_basic_blocks (f
);
399 /* Size the basic block table. The actual structures will be allocated
400 by find_basic_blocks_1, since we want to keep the structure pointers
401 stable across calls to find_basic_blocks. */
402 /* ??? This whole issue would be much simpler if we called find_basic_blocks
403 exactly once, and thereafter we don't have a single long chain of
404 instructions at all until close to the end of compilation when we
405 actually lay them out. */
407 VARRAY_BB_INIT (basic_block_info
, n_basic_blocks
, "basic_block_info");
409 label_value_list
= find_basic_blocks_1 (f
);
411 /* Record the block to which an insn belongs. */
412 /* ??? This should be done another way, by which (perhaps) a label is
413 tagged directly with the basic block that it starts. It is used for
414 more than that currently, but IMO that is the only valid use. */
416 max_uid
= get_max_uid ();
418 /* Leave space for insns life_analysis makes in some cases for auto-inc.
419 These cases are rare, so we don't need too much space. */
420 max_uid
+= max_uid
/ 10;
423 compute_bb_for_insn (max_uid
);
425 /* Discover the edges of our cfg. */
427 record_active_eh_regions (f
);
428 make_edges (label_value_list
);
430 /* Delete unreachable blocks, then merge blocks when possible. */
434 delete_unreachable_blocks ();
435 move_stray_eh_region_notes ();
436 record_active_eh_regions (f
);
440 /* Mark critical edges. */
442 mark_critical_edges ();
444 /* Discover the loop depth at the start of each basic block to aid
445 register allocation. */
446 calculate_loop_depth (f
);
448 /* Kill the data we won't maintain. */
449 label_value_list
= NULL_RTX
;
451 #ifdef ENABLE_CHECKING
456 /* Count the basic blocks of the function. */
459 count_basic_blocks (f
)
463 register RTX_CODE prev_code
;
464 register int count
= 0;
466 int call_had_abnormal_edge
= 0;
467 rtx prev_call
= NULL_RTX
;
469 prev_code
= JUMP_INSN
;
470 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
472 register RTX_CODE code
= GET_CODE (insn
);
474 if (code
== CODE_LABEL
475 || (GET_RTX_CLASS (code
) == 'i'
476 && (prev_code
== JUMP_INSN
477 || prev_code
== BARRIER
478 || (prev_code
== CALL_INSN
&& call_had_abnormal_edge
))))
482 /* If the previous insn was a call that did not create an
483 abnormal edge, we want to add a nop so that the CALL_INSN
484 itself is not at basic_block_end. This allows us to
485 easily distinguish between normal calls and those which
486 create abnormal edges in the flow graph. */
488 if (count
> 0 && prev_call
!= 0 && !call_had_abnormal_edge
)
490 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
491 emit_insn_after (nop
, prev_call
);
495 /* Record whether this call created an edge. */
496 if (code
== CALL_INSN
)
498 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
499 int region
= (note
? XWINT (XEXP (note
, 0), 0) : 1);
501 call_had_abnormal_edge
= 0;
503 /* If there is a specified EH region, we have an edge. */
504 if (eh_region
&& region
> 0)
505 call_had_abnormal_edge
= 1;
508 /* If there is a nonlocal goto label and the specified
509 region number isn't -1, we have an edge. (0 means
510 no throw, but might have a nonlocal goto). */
511 if (nonlocal_goto_handler_labels
&& region
>= 0)
512 call_had_abnormal_edge
= 1;
515 else if (code
!= NOTE
)
516 prev_call
= NULL_RTX
;
520 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
522 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
)
527 /* The rest of the compiler works a bit smoother when we don't have to
528 check for the edge case of do-nothing functions with no basic blocks. */
531 emit_insn (gen_rtx_USE (VOIDmode
, const0_rtx
));
538 /* Find all basic blocks of the function whose first insn is F.
540 Collect and return a list of labels whose addresses are taken. This
541 will be used in make_edges for use with computed gotos. */
544 find_basic_blocks_1 (f
)
547 register rtx insn
, next
;
548 int call_has_abnormal_edge
= 0;
550 rtx bb_note
= NULL_RTX
;
551 rtx eh_list
= NULL_RTX
;
552 rtx label_value_list
= NULL_RTX
;
556 /* We process the instructions in a slightly different way than we did
557 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
558 closed out the previous block, so that it gets attached at the proper
559 place. Since this form should be equivalent to the previous,
560 count_basic_blocks continues to use the old form as a check. */
562 for (insn
= f
; insn
; insn
= next
)
564 enum rtx_code code
= GET_CODE (insn
);
566 next
= NEXT_INSN (insn
);
568 if (code
== CALL_INSN
)
570 /* Record whether this call created an edge. */
571 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
572 int region
= (note
? XWINT (XEXP (note
, 0), 0) : 1);
573 call_has_abnormal_edge
= 0;
575 /* If there is an EH region, we have an edge. */
576 if (eh_list
&& region
> 0)
577 call_has_abnormal_edge
= 1;
580 /* If there is a nonlocal goto label and the specified
581 region number isn't -1, we have an edge. (0 means
582 no throw, but might have a nonlocal goto). */
583 if (nonlocal_goto_handler_labels
&& region
>= 0)
584 call_has_abnormal_edge
= 1;
592 int kind
= NOTE_LINE_NUMBER (insn
);
594 /* Keep a LIFO list of the currently active exception notes. */
595 if (kind
== NOTE_INSN_EH_REGION_BEG
)
596 eh_list
= alloc_INSN_LIST (insn
, eh_list
);
597 else if (kind
== NOTE_INSN_EH_REGION_END
)
600 eh_list
= XEXP (eh_list
, 1);
601 free_INSN_LIST_node (t
);
604 /* Look for basic block notes with which to keep the
605 basic_block_info pointers stable. Unthread the note now;
606 we'll put it back at the right place in create_basic_block.
607 Or not at all if we've already found a note in this block. */
608 else if (kind
== NOTE_INSN_BASIC_BLOCK
)
610 if (bb_note
== NULL_RTX
)
612 next
= flow_delete_insn (insn
);
619 /* A basic block starts at a label. If we've closed one off due
620 to a barrier or some such, no need to do it again. */
621 if (head
!= NULL_RTX
)
623 /* While we now have edge lists with which other portions of
624 the compiler might determine a call ending a basic block
625 does not imply an abnormal edge, it will be a bit before
626 everything can be updated. So continue to emit a noop at
627 the end of such a block. */
628 if (GET_CODE (end
) == CALL_INSN
)
630 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
631 end
= emit_insn_after (nop
, end
);
634 create_basic_block (i
++, head
, end
, bb_note
);
641 /* A basic block ends at a jump. */
642 if (head
== NULL_RTX
)
646 /* ??? Make a special check for table jumps. The way this
647 happens is truly and amazingly gross. We are about to
648 create a basic block that contains just a code label and
649 an addr*vec jump insn. Worse, an addr_diff_vec creates
650 its own natural loop.
652 Prevent this bit of brain damage, pasting things together
653 correctly in make_edges.
655 The correct solution involves emitting the table directly
656 on the tablejump instruction as a note, or JUMP_LABEL. */
658 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
659 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
667 goto new_bb_inclusive
;
670 /* A basic block ends at a barrier. It may be that an unconditional
671 jump already closed the basic block -- no need to do it again. */
672 if (head
== NULL_RTX
)
675 /* While we now have edge lists with which other portions of the
676 compiler might determine a call ending a basic block does not
677 imply an abnormal edge, it will be a bit before everything can
678 be updated. So continue to emit a noop at the end of such a
680 if (GET_CODE (end
) == CALL_INSN
)
682 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
683 end
= emit_insn_after (nop
, end
);
685 goto new_bb_exclusive
;
688 /* A basic block ends at a call that can either throw or
689 do a non-local goto. */
690 if (call_has_abnormal_edge
)
693 if (head
== NULL_RTX
)
698 create_basic_block (i
++, head
, end
, bb_note
);
699 head
= end
= NULL_RTX
;
706 if (GET_RTX_CLASS (code
) == 'i')
708 if (head
== NULL_RTX
)
715 if (GET_RTX_CLASS (code
) == 'i')
719 /* Make a list of all labels referred to other than by jumps
720 (which just don't have the REG_LABEL notes).
722 Make a special exception for labels followed by an ADDR*VEC,
723 as this would be a part of the tablejump setup code.
725 Make a special exception for the eh_return_stub_label, which
726 we know isn't part of any otherwise visible control flow. */
728 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
729 if (REG_NOTE_KIND (note
) == REG_LABEL
)
731 rtx lab
= XEXP (note
, 0), next
;
733 if (lab
== eh_return_stub_label
)
735 else if ((next
= next_nonnote_insn (lab
)) != NULL
736 && GET_CODE (next
) == JUMP_INSN
737 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
738 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
742 = alloc_EXPR_LIST (0, XEXP (note
, 0), label_value_list
);
747 if (head
!= NULL_RTX
)
748 create_basic_block (i
++, head
, end
, bb_note
);
750 if (i
!= n_basic_blocks
)
753 return label_value_list
;
756 /* Create a new basic block consisting of the instructions between
757 HEAD and END inclusive. Reuses the note and basic block struct
758 in BB_NOTE, if any. */
761 create_basic_block (index
, head
, end
, bb_note
)
763 rtx head
, end
, bb_note
;
768 && ! RTX_INTEGRATED_P (bb_note
)
769 && (bb
= NOTE_BASIC_BLOCK (bb_note
)) != NULL
772 /* If we found an existing note, thread it back onto the chain. */
774 if (GET_CODE (head
) == CODE_LABEL
)
775 add_insn_after (bb_note
, head
);
778 add_insn_before (bb_note
, head
);
784 /* Otherwise we must create a note and a basic block structure.
785 Since we allow basic block structs in rtl, give the struct
786 the same lifetime by allocating it off the function obstack
787 rather than using malloc. */
789 bb
= (basic_block
) obstack_alloc (function_obstack
, sizeof (*bb
));
790 memset (bb
, 0, sizeof (*bb
));
792 if (GET_CODE (head
) == CODE_LABEL
)
793 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, head
);
796 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, head
);
799 NOTE_BASIC_BLOCK (bb_note
) = bb
;
802 /* Always include the bb note in the block. */
803 if (NEXT_INSN (end
) == bb_note
)
809 BASIC_BLOCK (index
) = bb
;
811 /* Tag the block so that we know it has been used when considering
812 other basic block notes. */
816 /* Records the basic block struct in BB_FOR_INSN, for every instruction
817 indexed by INSN_UID. MAX is the size of the array. */
820 compute_bb_for_insn (max
)
825 if (basic_block_for_insn
)
826 VARRAY_FREE (basic_block_for_insn
);
827 VARRAY_BB_INIT (basic_block_for_insn
, max
, "basic_block_for_insn");
829 for (i
= 0; i
< n_basic_blocks
; ++i
)
831 basic_block bb
= BASIC_BLOCK (i
);
838 int uid
= INSN_UID (insn
);
840 VARRAY_BB (basic_block_for_insn
, uid
) = bb
;
843 insn
= NEXT_INSN (insn
);
848 /* Free the memory associated with the edge structures. */
856 for (i
= 0; i
< n_basic_blocks
; ++i
)
858 basic_block bb
= BASIC_BLOCK (i
);
860 for (e
= bb
->succ
; e
; e
= n
)
870 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= n
)
876 ENTRY_BLOCK_PTR
->succ
= 0;
877 EXIT_BLOCK_PTR
->pred
= 0;
882 /* Identify the edges between basic blocks.
884 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
885 that are otherwise unreachable may be reachable with a non-local goto.
887 BB_EH_END is an array indexed by basic block number in which we record
888 the list of exception regions active at the end of the basic block. */
891 make_edges (label_value_list
)
892 rtx label_value_list
;
895 eh_nesting_info
*eh_nest_info
= init_eh_nesting_info ();
896 sbitmap
*edge_cache
= NULL
;
898 /* Assume no computed jump; revise as we create edges. */
899 current_function_has_computed_jump
= 0;
901 /* Heavy use of computed goto in machine-generated code can lead to
902 nearly fully-connected CFGs. In that case we spend a significant
903 amount of time searching the edge lists for duplicates. */
904 if (forced_labels
|| label_value_list
)
906 edge_cache
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
907 sbitmap_vector_zero (edge_cache
, n_basic_blocks
);
910 /* By nature of the way these get numbered, block 0 is always the entry. */
911 make_edge (edge_cache
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (0), EDGE_FALLTHRU
);
913 for (i
= 0; i
< n_basic_blocks
; ++i
)
915 basic_block bb
= BASIC_BLOCK (i
);
918 int force_fallthru
= 0;
920 /* Examine the last instruction of the block, and discover the
921 ways we can leave the block. */
924 code
= GET_CODE (insn
);
927 if (code
== JUMP_INSN
)
931 /* ??? Recognize a tablejump and do the right thing. */
932 if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
933 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
934 && GET_CODE (tmp
) == JUMP_INSN
935 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
936 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
941 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
942 vec
= XVEC (PATTERN (tmp
), 0);
944 vec
= XVEC (PATTERN (tmp
), 1);
946 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
947 make_label_edge (edge_cache
, bb
,
948 XEXP (RTVEC_ELT (vec
, j
), 0), 0);
950 /* Some targets (eg, ARM) emit a conditional jump that also
951 contains the out-of-range target. Scan for these and
952 add an edge if necessary. */
953 if ((tmp
= single_set (insn
)) != NULL
954 && SET_DEST (tmp
) == pc_rtx
955 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
956 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
957 make_label_edge (edge_cache
, bb
,
958 XEXP (XEXP (SET_SRC (tmp
), 2), 0), 0);
960 #ifdef CASE_DROPS_THROUGH
961 /* Silly VAXen. The ADDR_VEC is going to be in the way of
962 us naturally detecting fallthru into the next block. */
967 /* If this is a computed jump, then mark it as reaching
968 everything on the label_value_list and forced_labels list. */
969 else if (computed_jump_p (insn
))
971 current_function_has_computed_jump
= 1;
973 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
974 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
976 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
977 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
980 /* Returns create an exit out. */
981 else if (returnjump_p (insn
))
982 make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, 0);
984 /* Otherwise, we have a plain conditional or unconditional jump. */
987 if (! JUMP_LABEL (insn
))
989 make_label_edge (edge_cache
, bb
, JUMP_LABEL (insn
), 0);
993 /* If this is a CALL_INSN, then mark it as reaching the active EH
994 handler for this CALL_INSN. If we're handling asynchronous
995 exceptions then any insn can reach any of the active handlers.
997 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
999 if (code
== CALL_INSN
|| asynchronous_exceptions
)
1001 /* If there's an EH region active at the end of a block,
1002 add the appropriate edges. */
1003 if (bb
->eh_end
>= 0)
1004 make_eh_edge (edge_cache
, eh_nest_info
, bb
, insn
, bb
->eh_end
);
1006 /* If we have asynchronous exceptions, do the same for *all*
1007 exception regions active in the block. */
1008 if (asynchronous_exceptions
1009 && bb
->eh_beg
!= bb
->eh_end
)
1011 if (bb
->eh_beg
>= 0)
1012 make_eh_edge (edge_cache
, eh_nest_info
, bb
,
1013 NULL_RTX
, bb
->eh_beg
);
1015 for (x
= bb
->head
; x
!= bb
->end
; x
= NEXT_INSN (x
))
1016 if (GET_CODE (x
) == NOTE
1017 && (NOTE_LINE_NUMBER (x
) == NOTE_INSN_EH_REGION_BEG
1018 || NOTE_LINE_NUMBER (x
) == NOTE_INSN_EH_REGION_END
))
1020 int region
= NOTE_EH_HANDLER (x
);
1021 make_eh_edge (edge_cache
, eh_nest_info
, bb
,
1026 if (code
== CALL_INSN
&& nonlocal_goto_handler_labels
)
1028 /* ??? This could be made smarter: in some cases it's possible
1029 to tell that certain calls will not do a nonlocal goto.
1031 For example, if the nested functions that do the nonlocal
1032 gotos do not have their addresses taken, then only calls to
1033 those functions or to other nested functions that use them
1034 could possibly do nonlocal gotos. */
1035 /* We do know that a REG_EH_REGION note with a value less
1036 than 0 is guaranteed not to perform a non-local goto. */
1037 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1038 if (!note
|| XINT (XEXP (note
, 0), 0) >= 0)
1039 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
1040 make_label_edge (edge_cache
, bb
, XEXP (x
, 0),
1041 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
1045 /* We know something about the structure of the function __throw in
1046 libgcc2.c. It is the only function that ever contains eh_stub
1047 labels. It modifies its return address so that the last block
1048 returns to one of the eh_stub labels within it. So we have to
1049 make additional edges in the flow graph. */
1050 if (i
+ 1 == n_basic_blocks
&& eh_return_stub_label
!= 0)
1051 make_label_edge (edge_cache
, bb
, eh_return_stub_label
, EDGE_EH
);
1053 /* Find out if we can drop through to the next block. */
1054 insn
= next_nonnote_insn (insn
);
1055 if (!insn
|| (i
+ 1 == n_basic_blocks
&& force_fallthru
))
1056 make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
1057 else if (i
+ 1 < n_basic_blocks
)
1059 rtx tmp
= BLOCK_HEAD (i
+ 1);
1060 if (GET_CODE (tmp
) == NOTE
)
1061 tmp
= next_nonnote_insn (tmp
);
1062 if (force_fallthru
|| insn
== tmp
)
1063 make_edge (edge_cache
, bb
, BASIC_BLOCK (i
+ 1), EDGE_FALLTHRU
);
1067 free_eh_nesting_info (eh_nest_info
);
1069 sbitmap_vector_free (edge_cache
);
1072 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1073 about the edge that is accumulated between calls. */
1076 make_edge (edge_cache
, src
, dst
, flags
)
1077 sbitmap
*edge_cache
;
1078 basic_block src
, dst
;
1084 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1085 many edges to them, and we didn't allocate memory for it. */
1086 use_edge_cache
= (edge_cache
1087 && src
!= ENTRY_BLOCK_PTR
1088 && dst
!= EXIT_BLOCK_PTR
);
1090 /* Make sure we don't add duplicate edges. */
1091 if (! use_edge_cache
|| TEST_BIT (edge_cache
[src
->index
], dst
->index
))
1092 for (e
= src
->succ
; e
; e
= e
->succ_next
)
1099 e
= (edge
) xcalloc (1, sizeof (*e
));
1102 e
->succ_next
= src
->succ
;
1103 e
->pred_next
= dst
->pred
;
1112 SET_BIT (edge_cache
[src
->index
], dst
->index
);
1115 /* Create an edge from a basic block to a label. */
1118 make_label_edge (edge_cache
, src
, label
, flags
)
1119 sbitmap
*edge_cache
;
1124 if (GET_CODE (label
) != CODE_LABEL
)
1127 /* If the label was never emitted, this insn is junk, but avoid a
1128 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1129 as a result of a syntax error and a diagnostic has already been
1132 if (INSN_UID (label
) == 0)
1135 make_edge (edge_cache
, src
, BLOCK_FOR_INSN (label
), flags
);
1138 /* Create the edges generated by INSN in REGION. */
1141 make_eh_edge (edge_cache
, eh_nest_info
, src
, insn
, region
)
1142 sbitmap
*edge_cache
;
1143 eh_nesting_info
*eh_nest_info
;
1148 handler_info
**handler_list
;
1151 is_call
= (insn
&& GET_CODE (insn
) == CALL_INSN
? EDGE_ABNORMAL_CALL
: 0);
1152 num
= reachable_handlers (region
, eh_nest_info
, insn
, &handler_list
);
1155 make_label_edge (edge_cache
, src
, handler_list
[num
]->handler_label
,
1156 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
1160 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1161 dangerous if we intend to move basic blocks around. Move such notes
1162 into the following block. */
1165 move_stray_eh_region_notes ()
1170 if (n_basic_blocks
< 2)
1173 b2
= BASIC_BLOCK (n_basic_blocks
- 1);
1174 for (i
= n_basic_blocks
- 2; i
>= 0; --i
, b2
= b1
)
1176 rtx insn
, next
, list
= NULL_RTX
;
1178 b1
= BASIC_BLOCK (i
);
1179 for (insn
= NEXT_INSN (b1
->end
); insn
!= b2
->head
; insn
= next
)
1181 next
= NEXT_INSN (insn
);
1182 if (GET_CODE (insn
) == NOTE
1183 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
1184 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
))
1186 /* Unlink from the insn chain. */
1187 NEXT_INSN (PREV_INSN (insn
)) = next
;
1188 PREV_INSN (next
) = PREV_INSN (insn
);
1191 NEXT_INSN (insn
) = list
;
1196 if (list
== NULL_RTX
)
1199 /* Find where to insert these things. */
1201 if (GET_CODE (insn
) == CODE_LABEL
)
1202 insn
= NEXT_INSN (insn
);
1206 next
= NEXT_INSN (list
);
1207 add_insn_after (list
, insn
);
1213 /* Recompute eh_beg/eh_end for each basic block. */
1216 record_active_eh_regions (f
)
1219 rtx insn
, eh_list
= NULL_RTX
;
1221 basic_block bb
= BASIC_BLOCK (0);
1223 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
1225 if (bb
->head
== insn
)
1226 bb
->eh_beg
= (eh_list
? NOTE_EH_HANDLER (XEXP (eh_list
, 0)) : -1);
1228 if (GET_CODE (insn
) == NOTE
)
1230 int kind
= NOTE_LINE_NUMBER (insn
);
1231 if (kind
== NOTE_INSN_EH_REGION_BEG
)
1232 eh_list
= alloc_INSN_LIST (insn
, eh_list
);
1233 else if (kind
== NOTE_INSN_EH_REGION_END
)
1235 rtx t
= XEXP (eh_list
, 1);
1236 free_INSN_LIST_node (eh_list
);
1241 if (bb
->end
== insn
)
1243 bb
->eh_end
= (eh_list
? NOTE_EH_HANDLER (XEXP (eh_list
, 0)) : -1);
1245 if (i
== n_basic_blocks
)
1247 bb
= BASIC_BLOCK (i
);
1252 /* Identify critical edges and set the bits appropriately. */
1255 mark_critical_edges ()
1257 int i
, n
= n_basic_blocks
;
1260 /* We begin with the entry block. This is not terribly important now,
1261 but could be if a front end (Fortran) implemented alternate entry
1263 bb
= ENTRY_BLOCK_PTR
;
1270 /* (1) Critical edges must have a source with multiple successors. */
1271 if (bb
->succ
&& bb
->succ
->succ_next
)
1273 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1275 /* (2) Critical edges must have a destination with multiple
1276 predecessors. Note that we know there is at least one
1277 predecessor -- the edge we followed to get here. */
1278 if (e
->dest
->pred
->pred_next
)
1279 e
->flags
|= EDGE_CRITICAL
;
1281 e
->flags
&= ~EDGE_CRITICAL
;
1286 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1287 e
->flags
&= ~EDGE_CRITICAL
;
1292 bb
= BASIC_BLOCK (i
);
1296 /* Split a (typically critical) edge. Return the new block.
1297 Abort on abnormal edges.
1299 ??? The code generally expects to be called on critical edges.
1300 The case of a block ending in an unconditional jump to a
1301 block with multiple predecessors is not handled optimally. */
1304 split_edge (edge_in
)
1307 basic_block old_pred
, bb
, old_succ
;
1312 /* Abnormal edges cannot be split. */
1313 if ((edge_in
->flags
& EDGE_ABNORMAL
) != 0)
1316 old_pred
= edge_in
->src
;
1317 old_succ
= edge_in
->dest
;
1319 /* Remove the existing edge from the destination's pred list. */
1322 for (pp
= &old_succ
->pred
; *pp
!= edge_in
; pp
= &(*pp
)->pred_next
)
1324 *pp
= edge_in
->pred_next
;
1325 edge_in
->pred_next
= NULL
;
1328 /* Create the new structures. */
1329 bb
= (basic_block
) obstack_alloc (function_obstack
, sizeof (*bb
));
1330 edge_out
= (edge
) xcalloc (1, sizeof (*edge_out
));
1333 memset (bb
, 0, sizeof (*bb
));
1334 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (function_obstack
);
1335 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (function_obstack
);
1337 /* ??? This info is likely going to be out of date very soon. */
1338 if (old_succ
->global_live_at_start
)
1340 COPY_REG_SET (bb
->global_live_at_start
, old_succ
->global_live_at_start
);
1341 COPY_REG_SET (bb
->global_live_at_end
, old_succ
->global_live_at_start
);
1345 CLEAR_REG_SET (bb
->global_live_at_start
);
1346 CLEAR_REG_SET (bb
->global_live_at_end
);
1351 bb
->succ
= edge_out
;
1354 edge_in
->flags
&= ~EDGE_CRITICAL
;
1356 edge_out
->pred_next
= old_succ
->pred
;
1357 edge_out
->succ_next
= NULL
;
1359 edge_out
->dest
= old_succ
;
1360 edge_out
->flags
= EDGE_FALLTHRU
;
1361 edge_out
->probability
= REG_BR_PROB_BASE
;
1363 old_succ
->pred
= edge_out
;
1365 /* Tricky case -- if there existed a fallthru into the successor
1366 (and we're not it) we must add a new unconditional jump around
1367 the new block we're actually interested in.
1369 Further, if that edge is critical, this means a second new basic
1370 block must be created to hold it. In order to simplify correct
1371 insn placement, do this before we touch the existing basic block
1372 ordering for the block we were really wanting. */
1373 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1376 for (e
= edge_out
->pred_next
; e
; e
= e
->pred_next
)
1377 if (e
->flags
& EDGE_FALLTHRU
)
1382 basic_block jump_block
;
1385 if ((e
->flags
& EDGE_CRITICAL
) == 0)
1387 /* Non critical -- we can simply add a jump to the end
1388 of the existing predecessor. */
1389 jump_block
= e
->src
;
1393 /* We need a new block to hold the jump. The simplest
1394 way to do the bulk of the work here is to recursively
1396 jump_block
= split_edge (e
);
1397 e
= jump_block
->succ
;
1400 /* Now add the jump insn ... */
1401 pos
= emit_jump_insn_after (gen_jump (old_succ
->head
),
1403 jump_block
->end
= pos
;
1404 emit_barrier_after (pos
);
1406 /* ... let jump know that label is in use, ... */
1407 JUMP_LABEL (pos
) = old_succ
->head
;
1408 ++LABEL_NUSES (old_succ
->head
);
1410 /* ... and clear fallthru on the outgoing edge. */
1411 e
->flags
&= ~EDGE_FALLTHRU
;
1413 /* Continue splitting the interesting edge. */
1417 /* Place the new block just in front of the successor. */
1418 VARRAY_GROW (basic_block_info
, ++n_basic_blocks
);
1419 if (old_succ
== EXIT_BLOCK_PTR
)
1420 j
= n_basic_blocks
- 1;
1422 j
= old_succ
->index
;
1423 for (i
= n_basic_blocks
- 1; i
> j
; --i
)
1425 basic_block tmp
= BASIC_BLOCK (i
- 1);
1426 BASIC_BLOCK (i
) = tmp
;
1429 BASIC_BLOCK (i
) = bb
;
1432 /* Create the basic block note. */
1433 if (old_succ
!= EXIT_BLOCK_PTR
)
1434 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, old_succ
->head
);
1436 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, get_last_insn ());
1437 NOTE_BASIC_BLOCK (bb_note
) = bb
;
1438 bb
->head
= bb
->end
= bb_note
;
1440 /* Not quite simple -- for non-fallthru edges, we must adjust the
1441 predecessor's jump instruction to target our new block. */
1442 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1444 rtx tmp
, insn
= old_pred
->end
;
1445 rtx old_label
= old_succ
->head
;
1446 rtx new_label
= gen_label_rtx ();
1448 if (GET_CODE (insn
) != JUMP_INSN
)
1451 /* ??? Recognize a tablejump and adjust all matching cases. */
1452 if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
1453 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
1454 && GET_CODE (tmp
) == JUMP_INSN
1455 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
1456 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
1461 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
1462 vec
= XVEC (PATTERN (tmp
), 0);
1464 vec
= XVEC (PATTERN (tmp
), 1);
1466 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1467 if (XEXP (RTVEC_ELT (vec
, j
), 0) == old_label
)
1469 RTVEC_ELT (vec
, j
) = gen_rtx_LABEL_REF (VOIDmode
, new_label
);
1470 --LABEL_NUSES (old_label
);
1471 ++LABEL_NUSES (new_label
);
1474 /* Handle casesi dispatch insns */
1475 if ((tmp
= single_set (insn
)) != NULL
1476 && SET_DEST (tmp
) == pc_rtx
1477 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
1478 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
1479 && XEXP (XEXP (SET_SRC (tmp
), 2), 0) == old_label
)
1481 XEXP (SET_SRC (tmp
), 2) = gen_rtx_LABEL_REF (VOIDmode
,
1483 --LABEL_NUSES (old_label
);
1484 ++LABEL_NUSES (new_label
);
1489 /* This would have indicated an abnormal edge. */
1490 if (computed_jump_p (insn
))
1493 /* A return instruction can't be redirected. */
1494 if (returnjump_p (insn
))
1497 /* If the insn doesn't go where we think, we're confused. */
1498 if (JUMP_LABEL (insn
) != old_label
)
1501 redirect_jump (insn
, new_label
);
1504 emit_label_before (new_label
, bb_note
);
1505 bb
->head
= new_label
;
1511 /* Queue instructions for insertion on an edge between two basic blocks.
1512 The new instructions and basic blocks (if any) will not appear in the
1513 CFG until commit_edge_insertions is called. */
1516 insert_insn_on_edge (pattern
, e
)
1520 /* We cannot insert instructions on an abnormal critical edge.
1521 It will be easier to find the culprit if we die now. */
1522 if ((e
->flags
& (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1523 == (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1526 if (e
->insns
== NULL_RTX
)
1529 push_to_sequence (e
->insns
);
1531 emit_insn (pattern
);
1533 e
->insns
= get_insns ();
1537 /* Update the CFG for the instructions queued on edge E. */
1540 commit_one_edge_insertion (e
)
1543 rtx before
= NULL_RTX
, after
= NULL_RTX
, tmp
;
1546 /* Figure out where to put these things. If the destination has
1547 one predecessor, insert there. Except for the exit block. */
1548 if (e
->dest
->pred
->pred_next
== NULL
1549 && e
->dest
!= EXIT_BLOCK_PTR
)
1553 /* Get the location correct wrt a code label, and "nice" wrt
1554 a basic block note, and before everything else. */
1556 if (GET_CODE (tmp
) == CODE_LABEL
)
1557 tmp
= NEXT_INSN (tmp
);
1558 if (GET_CODE (tmp
) == NOTE
1559 && NOTE_LINE_NUMBER (tmp
) == NOTE_INSN_BASIC_BLOCK
)
1560 tmp
= NEXT_INSN (tmp
);
1561 if (tmp
== bb
->head
)
1564 after
= PREV_INSN (tmp
);
1567 /* If the source has one successor and the edge is not abnormal,
1568 insert there. Except for the entry block. */
1569 else if ((e
->flags
& EDGE_ABNORMAL
) == 0
1570 && e
->src
->succ
->succ_next
== NULL
1571 && e
->src
!= ENTRY_BLOCK_PTR
)
1574 if (GET_CODE (bb
->end
) == JUMP_INSN
)
1576 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1577 a jump with delay slots already filled? */
1578 if (! simplejump_p (bb
->end
))
1585 /* We'd better be fallthru, or we've lost track of what's what. */
1586 if ((e
->flags
& EDGE_FALLTHRU
) == 0)
1593 /* Otherwise we must split the edge. */
1596 bb
= split_edge (e
);
1600 /* Now that we've found the spot, do the insertion. */
1602 e
->insns
= NULL_RTX
;
1604 /* Set the new block number for these insns, if structure is allocated. */
1605 if (basic_block_for_insn
)
1608 for (i
= tmp
; i
!= NULL_RTX
; i
= NEXT_INSN (i
))
1609 set_block_for_insn (i
, bb
);
1614 emit_insns_before (tmp
, before
);
1615 if (before
== bb
->head
)
1620 tmp
= emit_insns_after (tmp
, after
);
1621 if (after
== bb
->end
)
1626 /* Update the CFG for all queued instructions. */
1629 commit_edge_insertions ()
1635 bb
= ENTRY_BLOCK_PTR
;
1640 for (e
= bb
->succ
; e
; e
= next
)
1642 next
= e
->succ_next
;
1644 commit_one_edge_insertion (e
);
1647 if (++i
>= n_basic_blocks
)
1649 bb
= BASIC_BLOCK (i
);
1653 /* Delete all unreachable basic blocks. */
1656 delete_unreachable_blocks ()
1658 basic_block
*worklist
, *tos
;
1659 int deleted_handler
;
1664 tos
= worklist
= (basic_block
*) xmalloc (sizeof (basic_block
) * n
);
1666 /* Use basic_block->aux as a marker. Clear them all. */
1668 for (i
= 0; i
< n
; ++i
)
1669 BASIC_BLOCK (i
)->aux
= NULL
;
1671 /* Add our starting points to the worklist. Almost always there will
1672 be only one. It isn't inconcievable that we might one day directly
1673 support Fortran alternate entry points. */
1675 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
1679 /* Mark the block with a handy non-null value. */
1683 /* Iterate: find everything reachable from what we've already seen. */
1685 while (tos
!= worklist
)
1687 basic_block b
= *--tos
;
1689 for (e
= b
->succ
; e
; e
= e
->succ_next
)
1697 /* Delete all unreachable basic blocks. Count down so that we don't
1698 interfere with the block renumbering that happens in delete_block. */
1700 deleted_handler
= 0;
1702 for (i
= n
- 1; i
>= 0; --i
)
1704 basic_block b
= BASIC_BLOCK (i
);
1707 /* This block was found. Tidy up the mark. */
1710 deleted_handler
|= delete_block (b
);
1713 /* Fix up edges that now fall through, or rather should now fall through
1714 but previously required a jump around now deleted blocks. Simplify
1715 the search by only examining blocks numerically adjacent, since this
1716 is how find_basic_blocks created them. */
1718 for (i
= 1; i
< n_basic_blocks
; ++i
)
1720 basic_block b
= BASIC_BLOCK (i
- 1);
1721 basic_block c
= BASIC_BLOCK (i
);
1724 /* We care about simple conditional or unconditional jumps with
1727 If we had a conditional branch to the next instruction when
1728 find_basic_blocks was called, then there will only be one
1729 out edge for the block which ended with the conditional
1730 branch (since we do not create duplicate edges).
1732 Furthermore, the edge will be marked as a fallthru because we
1733 merge the flags for the duplicate edges. So we do not want to
1734 check that the edge is not a FALLTHRU edge. */
1735 if ((s
= b
->succ
) != NULL
1736 && s
->succ_next
== NULL
1738 /* If the jump insn has side effects, we can't tidy the edge. */
1739 && (GET_CODE (b
->end
) != JUMP_INSN
1740 || onlyjump_p (b
->end
)))
1741 tidy_fallthru_edge (s
, b
, c
);
1744 /* If we deleted an exception handler, we may have EH region begin/end
1745 blocks to remove as well. */
1746 if (deleted_handler
)
1747 delete_eh_regions ();
1752 /* Find EH regions for which there is no longer a handler, and delete them. */
1755 delete_eh_regions ()
1759 update_rethrow_references ();
1761 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1762 if (GET_CODE (insn
) == NOTE
)
1764 if ((NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
) ||
1765 (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
))
1767 int num
= NOTE_EH_HANDLER (insn
);
1768 /* A NULL handler indicates a region is no longer needed,
1769 as long as it isn't the target of a rethrow. */
1770 if (get_first_handler (num
) == NULL
&& ! rethrow_used (num
))
1772 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1773 NOTE_SOURCE_FILE (insn
) = 0;
1779 /* Return true if NOTE is not one of the ones that must be kept paired,
1780 so that we may simply delete them. */
1783 can_delete_note_p (note
)
1786 return (NOTE_LINE_NUMBER (note
) == NOTE_INSN_DELETED
1787 || NOTE_LINE_NUMBER (note
) == NOTE_INSN_BASIC_BLOCK
);
1790 /* Unlink a chain of insns between START and FINISH, leaving notes
1791 that must be paired. */
1794 flow_delete_insn_chain (start
, finish
)
1797 /* Unchain the insns one by one. It would be quicker to delete all
1798 of these with a single unchaining, rather than one at a time, but
1799 we need to keep the NOTE's. */
1805 next
= NEXT_INSN (start
);
1806 if (GET_CODE (start
) == NOTE
&& !can_delete_note_p (start
))
1808 else if (GET_CODE (start
) == CODE_LABEL
&& !can_delete_label_p (start
))
1811 next
= flow_delete_insn (start
);
1813 if (start
== finish
)
1819 /* Delete the insns in a (non-live) block. We physically delete every
1820 non-deleted-note insn, and update the flow graph appropriately.
1822 Return nonzero if we deleted an exception handler. */
1824 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1825 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1831 int deleted_handler
= 0;
1834 /* If the head of this block is a CODE_LABEL, then it might be the
1835 label for an exception handler which can't be reached.
1837 We need to remove the label from the exception_handler_label list
1838 and remove the associated NOTE_INSN_EH_REGION_BEG and
1839 NOTE_INSN_EH_REGION_END notes. */
1843 never_reached_warning (insn
);
1845 if (GET_CODE (insn
) == CODE_LABEL
)
1847 rtx x
, *prev
= &exception_handler_labels
;
1849 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
1851 if (XEXP (x
, 0) == insn
)
1853 /* Found a match, splice this label out of the EH label list. */
1854 *prev
= XEXP (x
, 1);
1855 XEXP (x
, 1) = NULL_RTX
;
1856 XEXP (x
, 0) = NULL_RTX
;
1858 /* Remove the handler from all regions */
1859 remove_handler (insn
);
1860 deleted_handler
= 1;
1863 prev
= &XEXP (x
, 1);
1866 /* This label may be referenced by code solely for its value, or
1867 referenced by static data, or something. We have determined
1868 that it is not reachable, but cannot delete the label itself.
1869 Save code space and continue to delete the balance of the block,
1870 along with properly updating the cfg. */
1871 if (!can_delete_label_p (insn
))
1873 /* If we've only got one of these, skip the whole deleting
1876 goto no_delete_insns
;
1877 insn
= NEXT_INSN (insn
);
1881 /* Selectively unlink the insn chain. Include any BARRIER that may
1882 follow the basic block. */
1883 end
= next_nonnote_insn (b
->end
);
1884 if (!end
|| GET_CODE (end
) != BARRIER
)
1886 flow_delete_insn_chain (insn
, end
);
1890 /* Remove the edges into and out of this block. Note that there may
1891 indeed be edges in, if we are removing an unreachable loop. */
1895 for (e
= b
->pred
; e
; e
= next
)
1897 for (q
= &e
->src
->succ
; *q
!= e
; q
= &(*q
)->succ_next
)
1900 next
= e
->pred_next
;
1904 for (e
= b
->succ
; e
; e
= next
)
1906 for (q
= &e
->dest
->pred
; *q
!= e
; q
= &(*q
)->pred_next
)
1909 next
= e
->succ_next
;
1918 /* Remove the basic block from the array, and compact behind it. */
1921 return deleted_handler
;
1924 /* Remove block B from the basic block array and compact behind it. */
1930 int i
, n
= n_basic_blocks
;
1932 for (i
= b
->index
; i
+ 1 < n
; ++i
)
1934 basic_block x
= BASIC_BLOCK (i
+ 1);
1935 BASIC_BLOCK (i
) = x
;
1939 basic_block_info
->num_elements
--;
1943 /* Delete INSN by patching it out. Return the next insn. */
1946 flow_delete_insn (insn
)
1949 rtx prev
= PREV_INSN (insn
);
1950 rtx next
= NEXT_INSN (insn
);
1952 PREV_INSN (insn
) = NULL_RTX
;
1953 NEXT_INSN (insn
) = NULL_RTX
;
1956 NEXT_INSN (prev
) = next
;
1958 PREV_INSN (next
) = prev
;
1960 set_last_insn (prev
);
1962 if (GET_CODE (insn
) == CODE_LABEL
)
1963 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
1965 /* If deleting a jump, decrement the use count of the label. Deleting
1966 the label itself should happen in the normal course of block merging. */
1967 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
1968 LABEL_NUSES (JUMP_LABEL (insn
))--;
1973 /* True if a given label can be deleted. */
1976 can_delete_label_p (label
)
1981 if (LABEL_PRESERVE_P (label
))
1984 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
1985 if (label
== XEXP (x
, 0))
1987 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
1988 if (label
== XEXP (x
, 0))
1990 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
1991 if (label
== XEXP (x
, 0))
1994 /* User declared labels must be preserved. */
1995 if (LABEL_NAME (label
) != 0)
2001 /* Blocks A and B are to be merged into a single block. A has no incoming
2002 fallthru edge, so it can be moved before B without adding or modifying
2003 any jumps (aside from the jump from A to B). */
2006 merge_blocks_move_predecessor_nojumps (a
, b
)
2009 rtx start
, end
, barrier
;
2015 /* We want to delete the BARRIER after the end of the insns we are
2016 going to move. If we don't find a BARRIER, then do nothing. This
2017 can happen in some cases if we have labels we can not delete.
2019 Similarly, do nothing if we can not delete the label at the start
2020 of the target block. */
2021 barrier
= next_nonnote_insn (end
);
2022 if (GET_CODE (barrier
) != BARRIER
2023 || (GET_CODE (b
->head
) == CODE_LABEL
2024 && ! can_delete_label_p (b
->head
)))
2027 flow_delete_insn (barrier
);
2029 /* Move block and loop notes out of the chain so that we do not
2030 disturb their order.
2032 ??? A better solution would be to squeeze out all the non-nested notes
2033 and adjust the block trees appropriately. Even better would be to have
2034 a tighter connection between block trees and rtl so that this is not
2036 start
= squeeze_notes (start
, end
);
2038 /* Scramble the insn chain. */
2039 if (end
!= PREV_INSN (b
->head
))
2040 reorder_insns (start
, end
, PREV_INSN (b
->head
));
2044 fprintf (rtl_dump_file
, "Moved block %d before %d and merged.\n",
2045 a
->index
, b
->index
);
2048 /* Swap the records for the two blocks around. Although we are deleting B,
2049 A is now where B was and we want to compact the BB array from where
2051 BASIC_BLOCK(a
->index
) = b
;
2052 BASIC_BLOCK(b
->index
) = a
;
2054 a
->index
= b
->index
;
2057 /* Now blocks A and B are contiguous. Merge them. */
2058 merge_blocks_nomove (a
, b
);
2063 /* Blocks A and B are to be merged into a single block. B has no outgoing
2064 fallthru edge, so it can be moved after A without adding or modifying
2065 any jumps (aside from the jump from A to B). */
2068 merge_blocks_move_successor_nojumps (a
, b
)
2071 rtx start
, end
, barrier
;
2076 /* We want to delete the BARRIER after the end of the insns we are
2077 going to move. If we don't find a BARRIER, then do nothing. This
2078 can happen in some cases if we have labels we can not delete.
2080 Similarly, do nothing if we can not delete the label at the start
2081 of the target block. */
2082 barrier
= next_nonnote_insn (end
);
2083 if (GET_CODE (barrier
) != BARRIER
2084 || (GET_CODE (b
->head
) == CODE_LABEL
2085 && ! can_delete_label_p (b
->head
)))
2088 flow_delete_insn (barrier
);
2090 /* Move block and loop notes out of the chain so that we do not
2091 disturb their order.
2093 ??? A better solution would be to squeeze out all the non-nested notes
2094 and adjust the block trees appropriately. Even better would be to have
2095 a tighter connection between block trees and rtl so that this is not
2097 start
= squeeze_notes (start
, end
);
2099 /* Scramble the insn chain. */
2100 reorder_insns (start
, end
, a
->end
);
2102 /* Now blocks A and B are contiguous. Merge them. */
2103 merge_blocks_nomove (a
, b
);
2107 fprintf (rtl_dump_file
, "Moved block %d after %d and merged.\n",
2108 b
->index
, a
->index
);
2114 /* Blocks A and B are to be merged into a single block. The insns
2115 are already contiguous, hence `nomove'. */
2118 merge_blocks_nomove (a
, b
)
2122 rtx b_head
, b_end
, a_end
;
2125 /* If there was a CODE_LABEL beginning B, delete it. */
2128 if (GET_CODE (b_head
) == CODE_LABEL
)
2130 /* Detect basic blocks with nothing but a label. This can happen
2131 in particular at the end of a function. */
2132 if (b_head
== b_end
)
2134 b_head
= flow_delete_insn (b_head
);
2137 /* Delete the basic block note. */
2138 if (GET_CODE (b_head
) == NOTE
2139 && NOTE_LINE_NUMBER (b_head
) == NOTE_INSN_BASIC_BLOCK
)
2141 if (b_head
== b_end
)
2143 b_head
= flow_delete_insn (b_head
);
2146 /* If there was a jump out of A, delete it. */
2148 if (GET_CODE (a_end
) == JUMP_INSN
)
2152 prev
= prev_nonnote_insn (a_end
);
2157 /* If this was a conditional jump, we need to also delete
2158 the insn that set cc0. */
2160 if (prev
&& sets_cc0_p (prev
))
2163 prev
= prev_nonnote_insn (prev
);
2166 flow_delete_insn (tmp
);
2170 /* Note that a->head != a->end, since we should have at least a
2171 bb note plus the jump, so prev != insn. */
2172 flow_delete_insn (a_end
);
2176 /* By definition, there should only be one successor of A, and that is
2177 B. Free that edge struct. */
2181 /* Adjust the edges out of B for the new owner. */
2182 for (e
= b
->succ
; e
; e
= e
->succ_next
)
2186 /* Reassociate the insns of B with A. */
2189 BLOCK_FOR_INSN (b_head
) = a
;
2190 while (b_head
!= b_end
)
2192 b_head
= NEXT_INSN (b_head
);
2193 BLOCK_FOR_INSN (b_head
) = a
;
2199 /* Compact the basic block array. */
2203 /* Attempt to merge basic blocks that are potentially non-adjacent.
2204 Return true iff the attempt succeeded. */
2207 merge_blocks (e
, b
, c
)
2211 /* If B has a fallthru edge to C, no need to move anything. */
2212 if (e
->flags
& EDGE_FALLTHRU
)
2214 /* If a label still appears somewhere and we cannot delete the label,
2215 then we cannot merge the blocks. The edge was tidied already. */
2217 rtx insn
, stop
= NEXT_INSN (c
->head
);
2218 for (insn
= NEXT_INSN (b
->end
); insn
!= stop
; insn
= NEXT_INSN (insn
))
2219 if (GET_CODE (insn
) == CODE_LABEL
&& !can_delete_label_p (insn
))
2222 merge_blocks_nomove (b
, c
);
2226 fprintf (rtl_dump_file
, "Merged %d and %d without moving.\n",
2227 b
->index
, c
->index
);
2236 int c_has_outgoing_fallthru
;
2237 int b_has_incoming_fallthru
;
2239 /* We must make sure to not munge nesting of exception regions,
2240 lexical blocks, and loop notes.
2242 The first is taken care of by requiring that the active eh
2243 region at the end of one block always matches the active eh
2244 region at the beginning of the next block.
2246 The later two are taken care of by squeezing out all the notes. */
2248 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2249 executed and we may want to treat blocks which have two out
2250 edges, one normal, one abnormal as only having one edge for
2251 block merging purposes. */
2253 for (tmp_edge
= c
->succ
; tmp_edge
; tmp_edge
= tmp_edge
->succ_next
)
2254 if (tmp_edge
->flags
& EDGE_FALLTHRU
)
2256 c_has_outgoing_fallthru
= (tmp_edge
!= NULL
);
2258 for (tmp_edge
= b
->pred
; tmp_edge
; tmp_edge
= tmp_edge
->pred_next
)
2259 if (tmp_edge
->flags
& EDGE_FALLTHRU
)
2261 b_has_incoming_fallthru
= (tmp_edge
!= NULL
);
2263 /* If B does not have an incoming fallthru, and the exception regions
2264 match, then it can be moved immediately before C without introducing
2267 C can not be the first block, so we do not have to worry about
2268 accessing a non-existent block. */
2269 d
= BASIC_BLOCK (c
->index
- 1);
2270 if (! b_has_incoming_fallthru
2271 && d
->eh_end
== b
->eh_beg
2272 && b
->eh_end
== c
->eh_beg
)
2273 return merge_blocks_move_predecessor_nojumps (b
, c
);
2275 /* Otherwise, we're going to try to move C after B. Make sure the
2276 exception regions match.
2278 If B is the last basic block, then we must not try to access the
2279 block structure for block B + 1. Luckily in that case we do not
2280 need to worry about matching exception regions. */
2281 d
= (b
->index
+ 1 < n_basic_blocks
? BASIC_BLOCK (b
->index
+ 1) : NULL
);
2282 if (b
->eh_end
== c
->eh_beg
2283 && (d
== NULL
|| c
->eh_end
== d
->eh_beg
))
2285 /* If C does not have an outgoing fallthru, then it can be moved
2286 immediately after B without introducing or modifying jumps. */
2287 if (! c_has_outgoing_fallthru
)
2288 return merge_blocks_move_successor_nojumps (b
, c
);
2290 /* Otherwise, we'll need to insert an extra jump, and possibly
2291 a new block to contain it. */
2292 /* ??? Not implemented yet. */
2299 /* Top level driver for merge_blocks. */
2306 /* Attempt to merge blocks as made possible by edge removal. If a block
2307 has only one successor, and the successor has only one predecessor,
2308 they may be combined. */
2310 for (i
= 0; i
< n_basic_blocks
; )
2312 basic_block c
, b
= BASIC_BLOCK (i
);
2315 /* A loop because chains of blocks might be combineable. */
2316 while ((s
= b
->succ
) != NULL
2317 && s
->succ_next
== NULL
2318 && (s
->flags
& EDGE_EH
) == 0
2319 && (c
= s
->dest
) != EXIT_BLOCK_PTR
2320 && c
->pred
->pred_next
== NULL
2321 /* If the jump insn has side effects, we can't kill the edge. */
2322 && (GET_CODE (b
->end
) != JUMP_INSN
2323 || onlyjump_p (b
->end
))
2324 && merge_blocks (s
, b
, c
))
2327 /* Don't get confused by the index shift caused by deleting blocks. */
2332 /* The given edge should potentially a fallthru edge. If that is in
2333 fact true, delete the unconditional jump and barriers that are in
2337 tidy_fallthru_edge (e
, b
, c
)
2343 /* ??? In a late-running flow pass, other folks may have deleted basic
2344 blocks by nopping out blocks, leaving multiple BARRIERs between here
2345 and the target label. They ought to be chastized and fixed.
2347 We can also wind up with a sequence of undeletable labels between
2348 one block and the next.
2350 So search through a sequence of barriers, labels, and notes for
2351 the head of block C and assert that we really do fall through. */
2353 if (next_real_insn (b
->end
) != next_real_insn (PREV_INSN (c
->head
)))
2356 /* Remove what will soon cease being the jump insn from the source block.
2357 If block B consisted only of this single jump, turn it into a deleted
2360 if (GET_CODE (q
) == JUMP_INSN
)
2363 /* If this was a conditional jump, we need to also delete
2364 the insn that set cc0. */
2365 if (! simplejump_p (q
) && condjump_p (q
) && sets_cc0_p (PREV_INSN (q
)))
2372 NOTE_LINE_NUMBER (q
) = NOTE_INSN_DELETED
;
2373 NOTE_SOURCE_FILE (q
) = 0;
2376 b
->end
= q
= PREV_INSN (q
);
2379 /* Selectively unlink the sequence. */
2380 if (q
!= PREV_INSN (c
->head
))
2381 flow_delete_insn_chain (NEXT_INSN (q
), PREV_INSN (c
->head
));
2383 e
->flags
|= EDGE_FALLTHRU
;
2386 /* Discover and record the loop depth at the head of each basic block. */
2389 calculate_loop_depth (insns
)
2394 int i
= 0, depth
= 1;
2396 bb
= BASIC_BLOCK (i
);
2397 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2399 if (insn
== bb
->head
)
2401 bb
->loop_depth
= depth
;
2402 if (++i
>= n_basic_blocks
)
2404 bb
= BASIC_BLOCK (i
);
2407 if (GET_CODE (insn
) == NOTE
)
2409 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
2411 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
)
2414 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2421 /* Perform data flow analysis.
2422 F is the first insn of the function and NREGS the number of register numbers
2426 life_analysis (f
, nregs
, file
, remove_dead_code
)
2430 int remove_dead_code
;
2432 #ifdef ELIMINABLE_REGS
2434 static struct {int from
, to
; } eliminables
[] = ELIMINABLE_REGS
;
2438 /* Record which registers will be eliminated. We use this in
2441 CLEAR_HARD_REG_SET (elim_reg_set
);
2443 #ifdef ELIMINABLE_REGS
2444 for (i
= 0; i
< sizeof eliminables
/ sizeof eliminables
[0]; i
++)
2445 SET_HARD_REG_BIT (elim_reg_set
, eliminables
[i
].from
);
2447 SET_HARD_REG_BIT (elim_reg_set
, FRAME_POINTER_REGNUM
);
2450 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2451 uid_volatile
= BITMAP_XMALLOC ();
2453 /* We want alias analysis information for local dead store elimination. */
2454 init_alias_analysis ();
2457 if (! remove_dead_code
)
2458 flags
&= ~(PROP_SCAN_DEAD_CODE
| PROP_KILL_DEAD_CODE
);
2459 life_analysis_1 (f
, nregs
, flags
);
2461 if (! reload_completed
)
2462 mark_constant_function ();
2464 end_alias_analysis ();
2467 dump_flow_info (file
);
2469 BITMAP_FREE (uid_volatile
);
2470 free (uid_volatile
);
2471 free_basic_block_vars (1);
2474 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2475 Search for REGNO. If found, abort if it is not wider than word_mode. */
2478 verify_wide_reg_1 (px
, pregno
)
2483 int regno
= *(int *) pregno
;
2485 if (GET_CODE (x
) == REG
&& REGNO (x
) == regno
)
2487 if (GET_MODE_BITSIZE (GET_MODE (x
)) <= BITS_PER_WORD
)
2494 /* A subroutine of verify_local_live_at_start. Search through insns
2495 between HEAD and END looking for register REGNO. */
2498 verify_wide_reg (regno
, head
, end
)
2504 if (GET_RTX_CLASS (GET_CODE (head
)) == 'i'
2505 && for_each_rtx (&PATTERN (head
), verify_wide_reg_1
, ®no
))
2509 head
= NEXT_INSN (head
);
2512 /* We didn't find the register at all. Something's way screwy. */
2516 /* A subroutine of update_life_info. Verify that there are no untoward
2517 changes in live_at_start during a local update. */
2520 verify_local_live_at_start (new_live_at_start
, bb
)
2521 regset new_live_at_start
;
2524 if (reload_completed
)
2526 /* After reload, there are no pseudos, nor subregs of multi-word
2527 registers. The regsets should exactly match. */
2528 if (! REG_SET_EQUAL_P (new_live_at_start
, bb
->global_live_at_start
))
2535 /* Find the set of changed registers. */
2536 XOR_REG_SET (new_live_at_start
, bb
->global_live_at_start
);
2538 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start
, 0, i
,
2540 /* No registers should die. */
2541 if (REGNO_REG_SET_P (bb
->global_live_at_start
, i
))
2543 /* Verify that the now-live register is wider than word_mode. */
2544 verify_wide_reg (i
, bb
->head
, bb
->end
);
2549 /* Updates death notes starting with the basic blocks set in BLOCKS.
2551 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2552 expecting local modifications to basic blocks. If we find extra
2553 registers live at the beginning of a block, then we either killed
2554 useful data, or we have a broken split that wants data not provided.
2555 If we find registers removed from live_at_start, that means we have
2556 a broken peephole that is killing a register it shouldn't.
2558 ??? This is not true in one situation -- when a pre-reload splitter
2559 generates subregs of a multi-word pseudo, current life analysis will
2560 lose the kill. So we _can_ have a pseudo go live. How irritating.
2562 BLOCK_FOR_INSN is assumed to be correct.
2564 ??? PROP_FLAGS should not contain PROP_LOG_LINKS. Need to set up
2565 reg_next_use for that. Including PROP_REG_INFO does not refresh
2566 regs_ever_live unless the caller resets it to zero. */
2569 update_life_info (blocks
, extent
, prop_flags
)
2571 enum update_life_extent extent
;
2577 tmp
= ALLOCA_REG_SET ();
2579 /* For a global update, we go through the relaxation process again. */
2580 if (extent
!= UPDATE_LIFE_LOCAL
)
2582 calculate_global_regs_live (blocks
, blocks
,
2583 prop_flags
& PROP_SCAN_DEAD_CODE
);
2585 /* If asked, remove notes from the blocks we'll update. */
2586 if (extent
== UPDATE_LIFE_GLOBAL_RM_NOTES
)
2587 count_or_remove_death_notes (blocks
, 1);
2590 EXECUTE_IF_SET_IN_SBITMAP (blocks
, 0, i
,
2592 basic_block bb
= BASIC_BLOCK (i
);
2594 COPY_REG_SET (tmp
, bb
->global_live_at_end
);
2595 propagate_block (tmp
, bb
->head
, bb
->end
, (regset
) NULL
, i
,
2598 if (extent
== UPDATE_LIFE_LOCAL
)
2599 verify_local_live_at_start (tmp
, bb
);
2605 /* Free the variables allocated by find_basic_blocks.
2607 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2610 free_basic_block_vars (keep_head_end_p
)
2611 int keep_head_end_p
;
2613 if (basic_block_for_insn
)
2615 VARRAY_FREE (basic_block_for_insn
);
2616 basic_block_for_insn
= NULL
;
2619 if (! keep_head_end_p
)
2622 VARRAY_FREE (basic_block_info
);
2625 ENTRY_BLOCK_PTR
->aux
= NULL
;
2626 ENTRY_BLOCK_PTR
->global_live_at_end
= NULL
;
2627 EXIT_BLOCK_PTR
->aux
= NULL
;
2628 EXIT_BLOCK_PTR
->global_live_at_start
= NULL
;
2632 /* Return nonzero if the destination of SET equals the source. */
2637 rtx src
= SET_SRC (set
);
2638 rtx dst
= SET_DEST (set
);
2639 if (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
2640 && REGNO (src
) == REGNO (dst
))
2642 if (GET_CODE (src
) != SUBREG
|| GET_CODE (dst
) != SUBREG
2643 || SUBREG_WORD (src
) != SUBREG_WORD (dst
))
2645 src
= SUBREG_REG (src
);
2646 dst
= SUBREG_REG (dst
);
2647 if (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
2648 && REGNO (src
) == REGNO (dst
))
2653 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2659 rtx pat
= PATTERN (insn
);
2661 /* Insns carrying these notes are useful later on. */
2662 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
2665 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
2668 if (GET_CODE (pat
) == PARALLEL
)
2671 /* If nothing but SETs of registers to themselves,
2672 this insn can also be deleted. */
2673 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2675 rtx tem
= XVECEXP (pat
, 0, i
);
2677 if (GET_CODE (tem
) == USE
2678 || GET_CODE (tem
) == CLOBBER
)
2681 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
2691 notice_stack_pointer_modification (x
, pat
, data
)
2693 rtx pat ATTRIBUTE_UNUSED
;
2694 void *data ATTRIBUTE_UNUSED
;
2696 if (x
== stack_pointer_rtx
2697 /* The stack pointer is only modified indirectly as the result
2698 of a push until later in flow. See the comments in rtl.texi
2699 regarding Embedded Side-Effects on Addresses. */
2700 || (GET_CODE (x
) == MEM
2701 && (GET_CODE (XEXP (x
, 0)) == PRE_DEC
2702 || GET_CODE (XEXP (x
, 0)) == PRE_INC
2703 || GET_CODE (XEXP (x
, 0)) == POST_DEC
2704 || GET_CODE (XEXP (x
, 0)) == POST_INC
)
2705 && XEXP (XEXP (x
, 0), 0) == stack_pointer_rtx
))
2706 current_function_sp_is_unchanging
= 0;
2709 /* Record which insns refer to any volatile memory
2710 or for any reason can't be deleted just because they are dead stores.
2711 Also, delete any insns that copy a register to itself.
2712 And see if the stack pointer is modified. */
2714 record_volatile_insns (f
)
2718 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
2720 enum rtx_code code1
= GET_CODE (insn
);
2721 if (code1
== CALL_INSN
)
2722 SET_INSN_VOLATILE (insn
);
2723 else if (code1
== INSN
|| code1
== JUMP_INSN
)
2725 if (GET_CODE (PATTERN (insn
)) != USE
2726 && volatile_refs_p (PATTERN (insn
)))
2727 SET_INSN_VOLATILE (insn
);
2729 /* A SET that makes space on the stack cannot be dead.
2730 (Such SETs occur only for allocating variable-size data,
2731 so they will always have a PLUS or MINUS according to the
2732 direction of stack growth.)
2733 Even if this function never uses this stack pointer value,
2734 signal handlers do! */
2735 else if (code1
== INSN
&& GET_CODE (PATTERN (insn
)) == SET
2736 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
2737 #ifdef STACK_GROWS_DOWNWARD
2738 && GET_CODE (SET_SRC (PATTERN (insn
))) == MINUS
2740 && GET_CODE (SET_SRC (PATTERN (insn
))) == PLUS
2742 && XEXP (SET_SRC (PATTERN (insn
)), 0) == stack_pointer_rtx
)
2743 SET_INSN_VOLATILE (insn
);
2745 /* Delete (in effect) any obvious no-op moves. */
2746 else if (noop_move_p (insn
))
2748 PUT_CODE (insn
, NOTE
);
2749 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2750 NOTE_SOURCE_FILE (insn
) = 0;
2754 /* Check if insn modifies the stack pointer. */
2755 if ( current_function_sp_is_unchanging
2756 && GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2757 note_stores (PATTERN (insn
),
2758 notice_stack_pointer_modification
,
2763 /* Mark a register in SET. Hard registers in large modes get all
2764 of their component registers set as well. */
2770 int regno
= REGNO (reg
);
2772 SET_REGNO_REG_SET (set
, regno
);
2773 if (regno
< FIRST_PSEUDO_REGISTER
)
2775 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
2777 SET_REGNO_REG_SET (set
, regno
+ n
);
2781 /* Mark those regs which are needed at the end of the function as live
2782 at the end of the last basic block. */
2784 mark_regs_live_at_end (set
)
2790 /* If exiting needs the right stack value, consider the stack pointer
2791 live at the end of the function. */
2792 if ((HAVE_epilogue
&& reload_completed
)
2793 || ! EXIT_IGNORE_STACK
2794 || (! FRAME_POINTER_REQUIRED
2795 && ! current_function_calls_alloca
2796 && flag_omit_frame_pointer
)
2797 || current_function_sp_is_unchanging
)
2799 SET_REGNO_REG_SET (set
, STACK_POINTER_REGNUM
);
2802 /* Mark the frame pointer if needed at the end of the function. If
2803 we end up eliminating it, it will be removed from the live list
2804 of each basic block by reload. */
2806 if (! reload_completed
|| frame_pointer_needed
)
2808 SET_REGNO_REG_SET (set
, FRAME_POINTER_REGNUM
);
2809 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2810 /* If they are different, also mark the hard frame pointer as live */
2811 SET_REGNO_REG_SET (set
, HARD_FRAME_POINTER_REGNUM
);
2815 #ifdef PIC_OFFSET_TABLE_REGNUM
2816 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2817 /* Many architectures have a GP register even without flag_pic.
2818 Assume the pic register is not in use, or will be handled by
2819 other means, if it is not fixed. */
2820 if (fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
2821 SET_REGNO_REG_SET (set
, PIC_OFFSET_TABLE_REGNUM
);
2825 /* Mark all global registers, and all registers used by the epilogue
2826 as being live at the end of the function since they may be
2827 referenced by our caller. */
2828 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2830 #ifdef EPILOGUE_USES
2831 || EPILOGUE_USES (i
)
2834 SET_REGNO_REG_SET (set
, i
);
2836 /* Mark all call-saved registers that we actaully used. */
2837 if (HAVE_epilogue
&& reload_completed
)
2839 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2840 if (! call_used_regs
[i
] && regs_ever_live
[i
])
2841 SET_REGNO_REG_SET (set
, i
);
2844 /* Mark function return value. */
2845 /* ??? Only do this after reload. Consider a non-void function that
2846 omits a return statement. Across that edge we'll have the return
2847 register live, and no set for it. Thus the return register will
2848 be live back through the CFG to the entry, and thus we die. A
2849 possible solution is to emit a clobber at exits without returns. */
2851 type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
2852 if (reload_completed
2853 && type
!= void_type_node
)
2857 if (current_function_returns_struct
2858 || current_function_returns_pcc_struct
)
2859 type
= build_pointer_type (type
);
2861 #ifdef FUNCTION_OUTGOING_VALUE
2862 outgoing
= FUNCTION_OUTGOING_VALUE (type
, current_function_decl
);
2864 outgoing
= FUNCTION_VALUE (type
, current_function_decl
);
2867 if (GET_CODE (outgoing
) == REG
)
2868 mark_reg (set
, outgoing
);
2869 else if (GET_CODE (outgoing
) == PARALLEL
)
2871 int len
= XVECLEN (outgoing
, 0);
2873 /* Check for a NULL entry, used to indicate that the parameter
2874 goes on the stack and in registers. */
2875 i
= (XEXP (XVECEXP (outgoing
, 0, 0), 0) ? 0 : 1);
2877 for ( ; i
< len
; ++i
)
2879 rtx r
= XVECEXP (outgoing
, 0, i
);
2880 if (GET_CODE (r
) == REG
)
2889 /* Determine which registers are live at the start of each
2890 basic block of the function whose first insn is F.
2891 NREGS is the number of registers used in F.
2892 We allocate the vector basic_block_live_at_start
2893 and the regsets that it points to, and fill them with the data.
2894 regset_size and regset_bytes are also set here. */
2897 life_analysis_1 (f
, nregs
, flags
)
2902 char save_regs_ever_live
[FIRST_PSEUDO_REGISTER
];
2907 /* Allocate and zero out many data structures
2908 that will record the data from lifetime analysis. */
2910 allocate_reg_life_data ();
2911 allocate_bb_life_data ();
2913 reg_next_use
= (rtx
*) xcalloc (nregs
, sizeof (rtx
));
2915 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2916 This will be cleared by record_volatile_insns if it encounters an insn
2917 which modifies the stack pointer. */
2918 current_function_sp_is_unchanging
= !current_function_calls_alloca
;
2919 record_volatile_insns (f
);
2921 /* Find the set of registers live on function exit. Do this before
2922 zeroing regs_ever_live, as we use that data post-reload. */
2923 mark_regs_live_at_end (EXIT_BLOCK_PTR
->global_live_at_start
);
2925 /* The post-reload life analysis have (on a global basis) the same
2926 registers live as was computed by reload itself. elimination
2927 Otherwise offsets and such may be incorrect.
2929 Reload will make some registers as live even though they do not
2930 appear in the rtl. */
2931 if (reload_completed
)
2932 memcpy (save_regs_ever_live
, regs_ever_live
, sizeof (regs_ever_live
));
2933 memset (regs_ever_live
, 0, sizeof regs_ever_live
);
2935 /* Compute register life at block boundaries. It'd be nice to
2936 begin with just the exit and noreturn blocks, but that set
2937 is not immediately handy. */
2940 blocks
= sbitmap_alloc (n_basic_blocks
);
2941 sbitmap_ones (blocks
);
2942 calculate_global_regs_live (blocks
, blocks
, flags
& PROP_SCAN_DEAD_CODE
);
2945 /* The only pseudos that are live at the beginning of the function are
2946 those that were not set anywhere in the function. local-alloc doesn't
2947 know how to handle these correctly, so mark them as not local to any
2950 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR
->global_live_at_end
,
2951 FIRST_PSEUDO_REGISTER
, i
,
2952 { REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
; });
2954 /* Now the life information is accurate. Make one more pass over each
2955 basic block to delete dead stores, create autoincrement addressing
2956 and record how many times each register is used, is set, or dies. */
2959 tmp
= ALLOCA_REG_SET ();
2961 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
2963 basic_block bb
= BASIC_BLOCK (i
);
2965 COPY_REG_SET (tmp
, bb
->global_live_at_end
);
2966 propagate_block (tmp
, bb
->head
, bb
->end
, (regset
) NULL
, i
, flags
);
2972 /* We have a problem with any pseudoreg that lives across the setjmp.
2973 ANSI says that if a user variable does not change in value between
2974 the setjmp and the longjmp, then the longjmp preserves it. This
2975 includes longjmp from a place where the pseudo appears dead.
2976 (In principle, the value still exists if it is in scope.)
2977 If the pseudo goes in a hard reg, some other value may occupy
2978 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2979 Conclusion: such a pseudo must not go in a hard reg. */
2980 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp
,
2981 FIRST_PSEUDO_REGISTER
, i
,
2983 if (regno_reg_rtx
[i
] != 0)
2985 REG_LIVE_LENGTH (i
) = -1;
2986 REG_BASIC_BLOCK (i
) = REG_BLOCK_UNKNOWN
;
2990 /* Restore regs_ever_live that was provided by reload. */
2991 if (reload_completed
)
2992 memcpy (regs_ever_live
, save_regs_ever_live
, sizeof (regs_ever_live
));
2995 free (reg_next_use
);
2996 reg_next_use
= NULL
;
2999 /* Propagate global life info around the graph of basic blocks. Begin
3000 considering blocks with their corresponding bit set in BLOCKS_IN.
3001 BLOCKS_OUT is set for every block that was changed. */
3004 calculate_global_regs_live (blocks_in
, blocks_out
, flags
)
3005 sbitmap blocks_in
, blocks_out
;
3008 basic_block
*queue
, *qhead
, *qtail
, *qend
;
3009 regset tmp
, new_live_at_end
;
3012 tmp
= ALLOCA_REG_SET ();
3013 new_live_at_end
= ALLOCA_REG_SET ();
3015 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3016 because the `head == tail' style test for an empty queue doesn't
3017 work with a full queue. */
3018 queue
= (basic_block
*) xmalloc ((n_basic_blocks
+ 2) * sizeof (*queue
));
3020 qhead
= qend
= queue
+ n_basic_blocks
+ 2;
3022 /* Queue the blocks set in the initial mask. Do this in reverse block
3023 number order so that we are more likely for the first round to do
3024 useful work. We use AUX non-null to flag that the block is queued. */
3025 EXECUTE_IF_SET_IN_SBITMAP (blocks_in
, 0, i
,
3027 basic_block bb
= BASIC_BLOCK (i
);
3032 sbitmap_zero (blocks_out
);
3034 while (qhead
!= qtail
)
3036 int rescan
, changed
;
3045 /* Begin by propogating live_at_start from the successor blocks. */
3046 CLEAR_REG_SET (new_live_at_end
);
3047 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
3049 basic_block sb
= e
->dest
;
3050 IOR_REG_SET (new_live_at_end
, sb
->global_live_at_start
);
3053 if (bb
== ENTRY_BLOCK_PTR
)
3055 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3059 /* On our first pass through this block, we'll go ahead and continue.
3060 Recognize first pass by local_set NULL. On subsequent passes, we
3061 get to skip out early if live_at_end wouldn't have changed. */
3063 if (bb
->local_set
== NULL
)
3065 bb
->local_set
= OBSTACK_ALLOC_REG_SET (function_obstack
);
3070 /* If any bits were removed from live_at_end, we'll have to
3071 rescan the block. This wouldn't be necessary if we had
3072 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3073 local_live is really dependant on live_at_end. */
3074 CLEAR_REG_SET (tmp
);
3075 rescan
= bitmap_operation (tmp
, bb
->global_live_at_end
,
3076 new_live_at_end
, BITMAP_AND_COMPL
);
3080 /* Find the set of changed bits. Take this opportunity
3081 to notice that this set is empty and early out. */
3082 CLEAR_REG_SET (tmp
);
3083 changed
= bitmap_operation (tmp
, bb
->global_live_at_end
,
3084 new_live_at_end
, BITMAP_XOR
);
3088 /* If any of the changed bits overlap with local_set,
3089 we'll have to rescan the block. Detect overlap by
3090 the AND with ~local_set turning off bits. */
3091 rescan
= bitmap_operation (tmp
, tmp
, bb
->local_set
,
3096 /* Let our caller know that BB changed enough to require its
3097 death notes updated. */
3098 SET_BIT (blocks_out
, bb
->index
);
3102 /* Add to live_at_start the set of all registers in
3103 new_live_at_end that aren't in the old live_at_end. */
3105 bitmap_operation (tmp
, new_live_at_end
, bb
->global_live_at_end
,
3107 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3109 changed
= bitmap_operation (bb
->global_live_at_start
,
3110 bb
->global_live_at_start
,
3117 COPY_REG_SET (bb
->global_live_at_end
, new_live_at_end
);
3119 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3120 into live_at_start. */
3121 propagate_block (new_live_at_end
, bb
->head
, bb
->end
,
3122 bb
->local_set
, bb
->index
, flags
);
3124 /* If live_at start didn't change, no need to go farther. */
3125 if (REG_SET_EQUAL_P (bb
->global_live_at_start
, new_live_at_end
))
3128 COPY_REG_SET (bb
->global_live_at_start
, new_live_at_end
);
3131 /* Queue all predecessors of BB so that we may re-examine
3132 their live_at_end. */
3133 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
3135 basic_block pb
= e
->src
;
3136 if (pb
->aux
== NULL
)
3147 FREE_REG_SET (new_live_at_end
);
3149 EXECUTE_IF_SET_IN_SBITMAP (blocks_out
, 0, i
,
3151 basic_block bb
= BASIC_BLOCK (i
);
3152 FREE_REG_SET (bb
->local_set
);
3158 /* Subroutines of life analysis. */
3160 /* Allocate the permanent data structures that represent the results
3161 of life analysis. Not static since used also for stupid life analysis. */
3164 allocate_bb_life_data ()
3168 for (i
= 0; i
< n_basic_blocks
; i
++)
3170 basic_block bb
= BASIC_BLOCK (i
);
3172 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (function_obstack
);
3173 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (function_obstack
);
3176 ENTRY_BLOCK_PTR
->global_live_at_end
3177 = OBSTACK_ALLOC_REG_SET (function_obstack
);
3178 EXIT_BLOCK_PTR
->global_live_at_start
3179 = OBSTACK_ALLOC_REG_SET (function_obstack
);
3181 regs_live_at_setjmp
= OBSTACK_ALLOC_REG_SET (function_obstack
);
3185 allocate_reg_life_data ()
3189 /* Recalculate the register space, in case it has grown. Old style
3190 vector oriented regsets would set regset_{size,bytes} here also. */
3191 allocate_reg_info (max_regno
, FALSE
, FALSE
);
3193 /* Reset all the data we'll collect in propagate_block and its
3195 for (i
= 0; i
< max_regno
; i
++)
3199 REG_N_DEATHS (i
) = 0;
3200 REG_N_CALLS_CROSSED (i
) = 0;
3201 REG_LIVE_LENGTH (i
) = 0;
3202 REG_BASIC_BLOCK (i
) = REG_BLOCK_UNKNOWN
;
3206 /* Compute the registers live at the beginning of a basic block
3207 from those live at the end.
3209 When called, OLD contains those live at the end.
3210 On return, it contains those live at the beginning.
3211 FIRST and LAST are the first and last insns of the basic block.
3213 FINAL is nonzero if we are doing the final pass which is not
3214 for computing the life info (since that has already been done)
3215 but for acting on it. On this pass, we delete dead stores,
3216 set up the logical links and dead-variables lists of instructions,
3217 and merge instructions for autoincrement and autodecrement addresses.
3219 SIGNIFICANT is nonzero only the first time for each basic block.
3220 If it is nonzero, it points to a regset in which we store
3221 a 1 for each register that is set within the block.
3223 BNUM is the number of the basic block. */
3226 propagate_block (old
, first
, last
, significant
, bnum
, flags
)
3227 register regset old
;
3239 /* Find the loop depth for this block. Ignore loop level changes in the
3240 middle of the basic block -- for register allocation purposes, the
3241 important uses will be in the blocks wholely contained within the loop
3242 not in the loop pre-header or post-trailer. */
3243 loop_depth
= BASIC_BLOCK (bnum
)->loop_depth
;
3245 dead
= ALLOCA_REG_SET ();
3246 live
= ALLOCA_REG_SET ();
3250 if (flags
& PROP_REG_INFO
)
3254 /* Process the regs live at the end of the block.
3255 Mark them as not local to any one basic block. */
3256 EXECUTE_IF_SET_IN_REG_SET (old
, 0, i
,
3258 REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
;
3262 /* Scan the block an insn at a time from end to beginning. */
3264 for (insn
= last
; ; insn
= prev
)
3266 prev
= PREV_INSN (insn
);
3268 if (GET_CODE (insn
) == NOTE
)
3270 /* If this is a call to `setjmp' et al,
3271 warn if any non-volatile datum is live. */
3273 if ((flags
& PROP_REG_INFO
)
3274 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
3275 IOR_REG_SET (regs_live_at_setjmp
, old
);
3278 /* Update the life-status of regs for this insn.
3279 First DEAD gets which regs are set in this insn
3280 then LIVE gets which regs are used in this insn.
3281 Then the regs live before the insn
3282 are those live after, with DEAD regs turned off,
3283 and then LIVE regs turned on. */
3285 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
3288 rtx note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
3289 int insn_is_dead
= 0;
3290 int libcall_is_dead
= 0;
3292 if (flags
& PROP_SCAN_DEAD_CODE
)
3294 insn_is_dead
= (insn_dead_p (PATTERN (insn
), old
, 0, REG_NOTES (insn
))
3295 /* Don't delete something that refers to volatile storage! */
3296 && ! INSN_VOLATILE (insn
));
3297 libcall_is_dead
= (insn_is_dead
&& note
!= 0
3298 && libcall_dead_p (PATTERN (insn
), old
, note
, insn
));
3301 /* We almost certainly don't want to delete prologue or epilogue
3302 instructions. Warn about probable compiler losage. */
3303 if ((flags
& PROP_KILL_DEAD_CODE
)
3306 && (HAVE_epilogue
|| HAVE_prologue
)
3307 && prologue_epilogue_contains (insn
))
3309 warning ("ICE: would have deleted prologue/epilogue insn");
3311 libcall_is_dead
= insn_is_dead
= 0;
3314 /* If an instruction consists of just dead store(s) on final pass,
3315 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3316 We could really delete it with delete_insn, but that
3317 can cause trouble for first or last insn in a basic block. */
3318 if ((flags
& PROP_KILL_DEAD_CODE
) && insn_is_dead
)
3321 /* If the insn referred to a label, note that the label is
3323 for (inote
= REG_NOTES (insn
); inote
; inote
= XEXP (inote
, 1))
3325 if (REG_NOTE_KIND (inote
) == REG_LABEL
)
3327 rtx label
= XEXP (inote
, 0);
3329 LABEL_NUSES (label
)--;
3331 /* If this label was attached to an ADDR_VEC, it's
3332 safe to delete the ADDR_VEC. In fact, it's pretty much
3333 mandatory to delete it, because the ADDR_VEC may
3334 be referencing labels that no longer exist. */
3335 if (LABEL_NUSES (label
) == 0
3336 && (next
= next_nonnote_insn (label
)) != NULL
3337 && GET_CODE (next
) == JUMP_INSN
3338 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
3339 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
3341 rtx pat
= PATTERN (next
);
3342 int diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
3343 int len
= XVECLEN (pat
, diff_vec_p
);
3345 for (i
= 0; i
< len
; i
++)
3346 LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0))--;
3347 PUT_CODE (next
, NOTE
);
3348 NOTE_LINE_NUMBER (next
) = NOTE_INSN_DELETED
;
3349 NOTE_SOURCE_FILE (next
) = 0;
3354 PUT_CODE (insn
, NOTE
);
3355 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
3356 NOTE_SOURCE_FILE (insn
) = 0;
3358 /* CC0 is now known to be dead. Either this insn used it,
3359 in which case it doesn't anymore, or clobbered it,
3360 so the next insn can't use it. */
3363 /* If this insn is copying the return value from a library call,
3364 delete the entire library call. */
3365 if (libcall_is_dead
)
3367 rtx first
= XEXP (note
, 0);
3369 while (INSN_DELETED_P (first
))
3370 first
= NEXT_INSN (first
);
3375 NOTE_LINE_NUMBER (p
) = NOTE_INSN_DELETED
;
3376 NOTE_SOURCE_FILE (p
) = 0;
3382 CLEAR_REG_SET (dead
);
3383 CLEAR_REG_SET (live
);
3385 /* See if this is an increment or decrement that can be
3386 merged into a following memory address. */
3389 register rtx x
= single_set (insn
);
3391 /* Does this instruction increment or decrement a register? */
3392 if (!reload_completed
3393 && (flags
& PROP_AUTOINC
)
3395 && GET_CODE (SET_DEST (x
)) == REG
3396 && (GET_CODE (SET_SRC (x
)) == PLUS
3397 || GET_CODE (SET_SRC (x
)) == MINUS
)
3398 && XEXP (SET_SRC (x
), 0) == SET_DEST (x
)
3399 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
3400 /* Ok, look for a following memory ref we can combine with.
3401 If one is found, change the memory ref to a PRE_INC
3402 or PRE_DEC, cancel this insn, and return 1.
3403 Return 0 if nothing has been done. */
3404 && try_pre_increment_1 (insn
))
3407 #endif /* AUTO_INC_DEC */
3409 /* If this is not the final pass, and this insn is copying the
3410 value of a library call and it's dead, don't scan the
3411 insns that perform the library call, so that the call's
3412 arguments are not marked live. */
3413 if (libcall_is_dead
)
3415 /* Mark the dest reg as `significant'. */
3416 mark_set_regs (old
, dead
, PATTERN (insn
), NULL_RTX
,
3417 significant
, flags
);
3419 insn
= XEXP (note
, 0);
3420 prev
= PREV_INSN (insn
);
3422 else if (GET_CODE (PATTERN (insn
)) == SET
3423 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
3424 && GET_CODE (SET_SRC (PATTERN (insn
))) == PLUS
3425 && XEXP (SET_SRC (PATTERN (insn
)), 0) == stack_pointer_rtx
3426 && GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == CONST_INT
)
3427 /* We have an insn to pop a constant amount off the stack.
3428 (Such insns use PLUS regardless of the direction of the stack,
3429 and any insn to adjust the stack by a constant is always a pop.)
3430 These insns, if not dead stores, have no effect on life. */
3434 /* Any regs live at the time of a call instruction
3435 must not go in a register clobbered by calls.
3436 Find all regs now live and record this for them. */
3438 if (GET_CODE (insn
) == CALL_INSN
3439 && (flags
& PROP_REG_INFO
))
3440 EXECUTE_IF_SET_IN_REG_SET (old
, 0, i
,
3442 REG_N_CALLS_CROSSED (i
)++;
3445 /* LIVE gets the regs used in INSN;
3446 DEAD gets those set by it. Dead insns don't make anything
3449 mark_set_regs (old
, dead
, PATTERN (insn
),
3450 insn
, significant
, flags
);
3452 /* If an insn doesn't use CC0, it becomes dead since we
3453 assume that every insn clobbers it. So show it dead here;
3454 mark_used_regs will set it live if it is referenced. */
3458 mark_used_regs (old
, live
, PATTERN (insn
), flags
, insn
);
3460 /* Sometimes we may have inserted something before INSN (such as
3461 a move) when we make an auto-inc. So ensure we will scan
3464 prev
= PREV_INSN (insn
);
3467 if (! insn_is_dead
&& GET_CODE (insn
) == CALL_INSN
)
3473 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
3475 note
= XEXP (note
, 1))
3476 if (GET_CODE (XEXP (note
, 0)) == USE
)
3477 mark_used_regs (old
, live
, XEXP (XEXP (note
, 0), 0),
3480 /* Each call clobbers all call-clobbered regs that are not
3481 global or fixed. Note that the function-value reg is a
3482 call-clobbered reg, and mark_set_regs has already had
3483 a chance to handle it. */
3485 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3486 if (call_used_regs
[i
] && ! global_regs
[i
]
3489 SET_REGNO_REG_SET (dead
, i
);
3491 SET_REGNO_REG_SET (significant
, i
);
3494 /* The stack ptr is used (honorarily) by a CALL insn. */
3495 SET_REGNO_REG_SET (live
, STACK_POINTER_REGNUM
);
3497 /* Calls may also reference any of the global registers,
3498 so they are made live. */
3499 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3501 mark_used_regs (old
, live
,
3502 gen_rtx_REG (reg_raw_mode
[i
], i
),
3505 /* Calls also clobber memory. */
3506 free_EXPR_LIST_list (&mem_set_list
);
3509 /* Update OLD for the registers used or set. */
3510 AND_COMPL_REG_SET (old
, dead
);
3511 IOR_REG_SET (old
, live
);
3515 /* On final pass, update counts of how many insns each reg is live
3517 if (flags
& PROP_REG_INFO
)
3518 EXECUTE_IF_SET_IN_REG_SET (old
, 0, i
,
3519 { REG_LIVE_LENGTH (i
)++; });
3526 FREE_REG_SET (dead
);
3527 FREE_REG_SET (live
);
3528 free_EXPR_LIST_list (&mem_set_list
);
3531 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3532 (SET expressions whose destinations are registers dead after the insn).
3533 NEEDED is the regset that says which regs are alive after the insn.
3535 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3537 If X is the entire body of an insn, NOTES contains the reg notes
3538 pertaining to the insn. */
3541 insn_dead_p (x
, needed
, call_ok
, notes
)
3545 rtx notes ATTRIBUTE_UNUSED
;
3547 enum rtx_code code
= GET_CODE (x
);
3550 /* If flow is invoked after reload, we must take existing AUTO_INC
3551 expresions into account. */
3552 if (reload_completed
)
3554 for ( ; notes
; notes
= XEXP (notes
, 1))
3556 if (REG_NOTE_KIND (notes
) == REG_INC
)
3558 int regno
= REGNO (XEXP (notes
, 0));
3560 /* Don't delete insns to set global regs. */
3561 if ((regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
3562 || REGNO_REG_SET_P (needed
, regno
))
3569 /* If setting something that's a reg or part of one,
3570 see if that register's altered value will be live. */
3574 rtx r
= SET_DEST (x
);
3576 /* A SET that is a subroutine call cannot be dead. */
3577 if (! call_ok
&& GET_CODE (SET_SRC (x
)) == CALL
)
3581 if (GET_CODE (r
) == CC0
)
3585 if (GET_CODE (r
) == MEM
&& ! MEM_VOLATILE_P (r
))
3588 /* Walk the set of memory locations we are currently tracking
3589 and see if one is an identical match to this memory location.
3590 If so, this memory write is dead (remember, we're walking
3591 backwards from the end of the block to the start. */
3592 temp
= mem_set_list
;
3595 if (rtx_equal_p (XEXP (temp
, 0), r
))
3597 temp
= XEXP (temp
, 1);
3601 while (GET_CODE (r
) == SUBREG
|| GET_CODE (r
) == STRICT_LOW_PART
3602 || GET_CODE (r
) == ZERO_EXTRACT
)
3605 if (GET_CODE (r
) == REG
)
3607 int regno
= REGNO (r
);
3609 /* Don't delete insns to set global regs. */
3610 if ((regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
3611 /* Make sure insns to set frame pointer aren't deleted. */
3612 || (regno
== FRAME_POINTER_REGNUM
3613 && (! reload_completed
|| frame_pointer_needed
))
3614 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3615 || (regno
== HARD_FRAME_POINTER_REGNUM
3616 && (! reload_completed
|| frame_pointer_needed
))
3618 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3619 /* Make sure insns to set arg pointer are never deleted
3620 (if the arg pointer isn't fixed, there will be a USE for
3621 it, so we can treat it normally). */
3622 || (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
3624 || REGNO_REG_SET_P (needed
, regno
))
3627 /* If this is a hard register, verify that subsequent words are
3629 if (regno
< FIRST_PSEUDO_REGISTER
)
3631 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (r
));
3634 if (REGNO_REG_SET_P (needed
, regno
+n
))
3642 /* If performing several activities,
3643 insn is dead if each activity is individually dead.
3644 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3645 that's inside a PARALLEL doesn't make the insn worth keeping. */
3646 else if (code
== PARALLEL
)
3648 int i
= XVECLEN (x
, 0);
3650 for (i
--; i
>= 0; i
--)
3651 if (GET_CODE (XVECEXP (x
, 0, i
)) != CLOBBER
3652 && GET_CODE (XVECEXP (x
, 0, i
)) != USE
3653 && ! insn_dead_p (XVECEXP (x
, 0, i
), needed
, call_ok
, NULL_RTX
))
3659 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3660 is not necessarily true for hard registers. */
3661 else if (code
== CLOBBER
&& GET_CODE (XEXP (x
, 0)) == REG
3662 && REGNO (XEXP (x
, 0)) >= FIRST_PSEUDO_REGISTER
3663 && ! REGNO_REG_SET_P (needed
, REGNO (XEXP (x
, 0))))
3666 /* We do not check other CLOBBER or USE here. An insn consisting of just
3667 a CLOBBER or just a USE should not be deleted. */
3671 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3672 return 1 if the entire library call is dead.
3673 This is true if X copies a register (hard or pseudo)
3674 and if the hard return reg of the call insn is dead.
3675 (The caller should have tested the destination of X already for death.)
3677 If this insn doesn't just copy a register, then we don't
3678 have an ordinary libcall. In that case, cse could not have
3679 managed to substitute the source for the dest later on,
3680 so we can assume the libcall is dead.
3682 NEEDED is the bit vector of pseudoregs live before this insn.
3683 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3686 libcall_dead_p (x
, needed
, note
, insn
)
3692 register RTX_CODE code
= GET_CODE (x
);
3696 register rtx r
= SET_SRC (x
);
3697 if (GET_CODE (r
) == REG
)
3699 rtx call
= XEXP (note
, 0);
3703 /* Find the call insn. */
3704 while (call
!= insn
&& GET_CODE (call
) != CALL_INSN
)
3705 call
= NEXT_INSN (call
);
3707 /* If there is none, do nothing special,
3708 since ordinary death handling can understand these insns. */
3712 /* See if the hard reg holding the value is dead.
3713 If this is a PARALLEL, find the call within it. */
3714 call_pat
= PATTERN (call
);
3715 if (GET_CODE (call_pat
) == PARALLEL
)
3717 for (i
= XVECLEN (call_pat
, 0) - 1; i
>= 0; i
--)
3718 if (GET_CODE (XVECEXP (call_pat
, 0, i
)) == SET
3719 && GET_CODE (SET_SRC (XVECEXP (call_pat
, 0, i
))) == CALL
)
3722 /* This may be a library call that is returning a value
3723 via invisible pointer. Do nothing special, since
3724 ordinary death handling can understand these insns. */
3728 call_pat
= XVECEXP (call_pat
, 0, i
);
3731 return insn_dead_p (call_pat
, needed
, 1, REG_NOTES (call
));
3737 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3738 live at function entry. Don't count global register variables, variables
3739 in registers that can be used for function arg passing, or variables in
3740 fixed hard registers. */
3743 regno_uninitialized (regno
)
3746 if (n_basic_blocks
== 0
3747 || (regno
< FIRST_PSEUDO_REGISTER
3748 && (global_regs
[regno
]
3749 || fixed_regs
[regno
]
3750 || FUNCTION_ARG_REGNO_P (regno
))))
3753 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
);
3756 /* 1 if register REGNO was alive at a place where `setjmp' was called
3757 and was set more than once or is an argument.
3758 Such regs may be clobbered by `longjmp'. */
3761 regno_clobbered_at_setjmp (regno
)
3764 if (n_basic_blocks
== 0)
3767 return ((REG_N_SETS (regno
) > 1
3768 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
))
3769 && REGNO_REG_SET_P (regs_live_at_setjmp
, regno
));
3772 /* INSN references memory, possibly using autoincrement addressing modes.
3773 Find any entries on the mem_set_list that need to be invalidated due
3774 to an address change. */
3776 invalidate_mems_from_autoinc (insn
)
3779 rtx note
= REG_NOTES (insn
);
3780 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
3782 if (REG_NOTE_KIND (note
) == REG_INC
)
3784 rtx temp
= mem_set_list
;
3785 rtx prev
= NULL_RTX
;
3790 next
= XEXP (temp
, 1);
3791 if (reg_overlap_mentioned_p (XEXP (note
, 0), XEXP (temp
, 0)))
3793 /* Splice temp out of list. */
3795 XEXP (prev
, 1) = next
;
3797 mem_set_list
= next
;
3798 free_EXPR_LIST_node (temp
);
3808 /* Process the registers that are set within X. Their bits are set to
3809 1 in the regset DEAD, because they are dead prior to this insn.
3811 If INSN is nonzero, it is the insn being processed.
3813 FLAGS is the set of operations to perform. */
3816 mark_set_regs (needed
, dead
, x
, insn
, significant
, flags
)
3824 register RTX_CODE code
= GET_CODE (x
);
3826 if (code
== SET
|| code
== CLOBBER
)
3827 mark_set_1 (needed
, dead
, x
, insn
, significant
, flags
);
3828 else if (code
== PARALLEL
)
3831 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
3833 code
= GET_CODE (XVECEXP (x
, 0, i
));
3834 if (code
== SET
|| code
== CLOBBER
)
3835 mark_set_1 (needed
, dead
, XVECEXP (x
, 0, i
), insn
,
3836 significant
, flags
);
3841 /* Process a single SET rtx, X. */
3844 mark_set_1 (needed
, dead
, x
, insn
, significant
, flags
)
3852 register int regno
= -1;
3853 register rtx reg
= SET_DEST (x
);
3855 /* Some targets place small structures in registers for
3856 return values of functions. We have to detect this
3857 case specially here to get correct flow information. */
3858 if (GET_CODE (reg
) == PARALLEL
3859 && GET_MODE (reg
) == BLKmode
)
3863 for (i
= XVECLEN (reg
, 0) - 1; i
>= 0; i
--)
3864 mark_set_1 (needed
, dead
, XVECEXP (reg
, 0, i
), insn
,
3865 significant
, flags
);
3869 /* Modifying just one hardware register of a multi-reg value
3870 or just a byte field of a register
3871 does not mean the value from before this insn is now dead.
3872 But it does mean liveness of that register at the end of the block
3875 Within mark_set_1, however, we treat it as if the register is
3876 indeed modified. mark_used_regs will, however, also treat this
3877 register as being used. Thus, we treat these insns as setting a
3878 new value for the register as a function of its old value. This
3879 cases LOG_LINKS to be made appropriately and this will help combine. */
3881 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
3882 || GET_CODE (reg
) == SIGN_EXTRACT
3883 || GET_CODE (reg
) == STRICT_LOW_PART
)
3884 reg
= XEXP (reg
, 0);
3886 /* If this set is a MEM, then it kills any aliased writes.
3887 If this set is a REG, then it kills any MEMs which use the reg. */
3888 if (flags
& PROP_SCAN_DEAD_CODE
)
3890 if (GET_CODE (reg
) == MEM
3891 || GET_CODE (reg
) == REG
)
3893 rtx temp
= mem_set_list
;
3894 rtx prev
= NULL_RTX
;
3899 next
= XEXP (temp
, 1);
3900 if ((GET_CODE (reg
) == MEM
3901 && output_dependence (XEXP (temp
, 0), reg
))
3902 || (GET_CODE (reg
) == REG
3903 && reg_overlap_mentioned_p (reg
, XEXP (temp
, 0))))
3905 /* Splice this entry out of the list. */
3907 XEXP (prev
, 1) = next
;
3909 mem_set_list
= next
;
3910 free_EXPR_LIST_node (temp
);
3918 /* If the memory reference had embedded side effects (autoincrement
3919 address modes. Then we may need to kill some entries on the
3921 if (insn
&& GET_CODE (reg
) == MEM
)
3922 invalidate_mems_from_autoinc (insn
);
3924 if (GET_CODE (reg
) == MEM
&& ! side_effects_p (reg
)
3925 /* We do not know the size of a BLKmode store, so we do not track
3926 them for redundant store elimination. */
3927 && GET_MODE (reg
) != BLKmode
3928 /* There are no REG_INC notes for SP, so we can't assume we'll see
3929 everything that invalidates it. To be safe, don't eliminate any
3930 stores though SP; none of them should be redundant anyway. */
3931 && ! reg_mentioned_p (stack_pointer_rtx
, reg
))
3932 mem_set_list
= alloc_EXPR_LIST (0, reg
, mem_set_list
);
3935 if (GET_CODE (reg
) == REG
3936 && (regno
= REGNO (reg
),
3937 ! (regno
== FRAME_POINTER_REGNUM
3938 && (! reload_completed
|| frame_pointer_needed
)))
3939 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3940 && ! (regno
== HARD_FRAME_POINTER_REGNUM
3941 && (! reload_completed
|| frame_pointer_needed
))
3943 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3944 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
3946 && ! (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
]))
3947 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3949 int some_needed
= REGNO_REG_SET_P (needed
, regno
);
3950 int some_not_needed
= ! some_needed
;
3952 /* Mark it as a significant register for this basic block. */
3954 SET_REGNO_REG_SET (significant
, regno
);
3956 /* Mark it as dead before this insn. */
3957 SET_REGNO_REG_SET (dead
, regno
);
3959 /* A hard reg in a wide mode may really be multiple registers.
3960 If so, mark all of them just like the first. */
3961 if (regno
< FIRST_PSEUDO_REGISTER
)
3965 /* Nothing below is needed for the stack pointer; get out asap.
3966 Eg, log links aren't needed, since combine won't use them. */
3967 if (regno
== STACK_POINTER_REGNUM
)
3970 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
3973 int regno_n
= regno
+ n
;
3974 int needed_regno
= REGNO_REG_SET_P (needed
, regno_n
);
3976 SET_REGNO_REG_SET (significant
, regno_n
);
3978 SET_REGNO_REG_SET (dead
, regno_n
);
3979 some_needed
|= needed_regno
;
3980 some_not_needed
|= ! needed_regno
;
3984 /* Additional data to record if this is the final pass. */
3985 if (flags
& (PROP_LOG_LINKS
| PROP_REG_INFO
3986 | PROP_DEATH_NOTES
| PROP_AUTOINC
))
3989 register int blocknum
= BLOCK_NUM (insn
);
3992 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
3993 y
= reg_next_use
[regno
];
3995 /* If this is a hard reg, record this function uses the reg. */
3997 if (regno
< FIRST_PSEUDO_REGISTER
)
4000 int endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
4002 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
4003 for (i
= regno
; i
< endregno
; i
++)
4005 /* The next use is no longer "next", since a store
4007 reg_next_use
[i
] = 0;
4010 if (flags
& PROP_REG_INFO
)
4011 for (i
= regno
; i
< endregno
; i
++)
4013 regs_ever_live
[i
] = 1;
4019 /* The next use is no longer "next", since a store
4021 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
4022 reg_next_use
[regno
] = 0;
4024 /* Keep track of which basic blocks each reg appears in. */
4026 if (flags
& PROP_REG_INFO
)
4028 if (REG_BASIC_BLOCK (regno
) == REG_BLOCK_UNKNOWN
)
4029 REG_BASIC_BLOCK (regno
) = blocknum
;
4030 else if (REG_BASIC_BLOCK (regno
) != blocknum
)
4031 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
4033 /* Count (weighted) references, stores, etc. This counts a
4034 register twice if it is modified, but that is correct. */
4035 REG_N_SETS (regno
)++;
4036 REG_N_REFS (regno
) += loop_depth
;
4038 /* The insns where a reg is live are normally counted
4039 elsewhere, but we want the count to include the insn
4040 where the reg is set, and the normal counting mechanism
4041 would not count it. */
4042 REG_LIVE_LENGTH (regno
)++;
4046 if (! some_not_needed
)
4048 if (flags
& PROP_LOG_LINKS
)
4050 /* Make a logical link from the next following insn
4051 that uses this register, back to this insn.
4052 The following insns have already been processed.
4054 We don't build a LOG_LINK for hard registers containing
4055 in ASM_OPERANDs. If these registers get replaced,
4056 we might wind up changing the semantics of the insn,
4057 even if reload can make what appear to be valid
4058 assignments later. */
4059 if (y
&& (BLOCK_NUM (y
) == blocknum
)
4060 && (regno
>= FIRST_PSEUDO_REGISTER
4061 || asm_noperands (PATTERN (y
)) < 0))
4062 LOG_LINKS (y
) = alloc_INSN_LIST (insn
, LOG_LINKS (y
));
4065 else if (! some_needed
)
4067 if (flags
& PROP_REG_INFO
)
4068 REG_N_DEATHS (REGNO (reg
))++;
4070 if (flags
& PROP_DEATH_NOTES
)
4072 /* Note that dead stores have already been deleted
4073 when possible. If we get here, we have found a
4074 dead store that cannot be eliminated (because the
4075 same insn does something useful). Indicate this
4076 by marking the reg being set as dying here. */
4078 = alloc_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
4083 if (flags
& PROP_DEATH_NOTES
)
4085 /* This is a case where we have a multi-word hard register
4086 and some, but not all, of the words of the register are
4087 needed in subsequent insns. Write REG_UNUSED notes
4088 for those parts that were not needed. This case should
4093 for (i
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) - 1;
4095 if (!REGNO_REG_SET_P (needed
, regno
+ i
))
4099 gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
),
4105 else if (GET_CODE (reg
) == REG
)
4107 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
4108 reg_next_use
[regno
] = 0;
4111 /* If this is the last pass and this is a SCRATCH, show it will be dying
4112 here and count it. */
4113 else if (GET_CODE (reg
) == SCRATCH
)
4115 if (flags
& PROP_DEATH_NOTES
)
4117 = alloc_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
4123 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4127 find_auto_inc (needed
, x
, insn
)
4132 rtx addr
= XEXP (x
, 0);
4133 HOST_WIDE_INT offset
= 0;
4136 /* Here we detect use of an index register which might be good for
4137 postincrement, postdecrement, preincrement, or predecrement. */
4139 if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
4140 offset
= INTVAL (XEXP (addr
, 1)), addr
= XEXP (addr
, 0);
4142 if (GET_CODE (addr
) == REG
)
4145 register int size
= GET_MODE_SIZE (GET_MODE (x
));
4148 int regno
= REGNO (addr
);
4150 /* Is the next use an increment that might make auto-increment? */
4151 if ((incr
= reg_next_use
[regno
]) != 0
4152 && (set
= single_set (incr
)) != 0
4153 && GET_CODE (set
) == SET
4154 && BLOCK_NUM (incr
) == BLOCK_NUM (insn
)
4155 /* Can't add side effects to jumps; if reg is spilled and
4156 reloaded, there's no way to store back the altered value. */
4157 && GET_CODE (insn
) != JUMP_INSN
4158 && (y
= SET_SRC (set
), GET_CODE (y
) == PLUS
)
4159 && XEXP (y
, 0) == addr
4160 && GET_CODE (XEXP (y
, 1)) == CONST_INT
4161 && ((HAVE_POST_INCREMENT
4162 && (INTVAL (XEXP (y
, 1)) == size
&& offset
== 0))
4163 || (HAVE_POST_DECREMENT
4164 && (INTVAL (XEXP (y
, 1)) == - size
&& offset
== 0))
4165 || (HAVE_PRE_INCREMENT
4166 && (INTVAL (XEXP (y
, 1)) == size
&& offset
== size
))
4167 || (HAVE_PRE_DECREMENT
4168 && (INTVAL (XEXP (y
, 1)) == - size
&& offset
== - size
)))
4169 /* Make sure this reg appears only once in this insn. */
4170 && (use
= find_use_as_address (PATTERN (insn
), addr
, offset
),
4171 use
!= 0 && use
!= (rtx
) 1))
4173 rtx q
= SET_DEST (set
);
4174 enum rtx_code inc_code
= (INTVAL (XEXP (y
, 1)) == size
4175 ? (offset
? PRE_INC
: POST_INC
)
4176 : (offset
? PRE_DEC
: POST_DEC
));
4178 if (dead_or_set_p (incr
, addr
))
4180 /* This is the simple case. Try to make the auto-inc. If
4181 we can't, we are done. Otherwise, we will do any
4182 needed updates below. */
4183 if (! validate_change (insn
, &XEXP (x
, 0),
4184 gen_rtx_fmt_e (inc_code
, Pmode
, addr
),
4188 else if (GET_CODE (q
) == REG
4189 /* PREV_INSN used here to check the semi-open interval
4191 && ! reg_used_between_p (q
, PREV_INSN (insn
), incr
)
4192 /* We must also check for sets of q as q may be
4193 a call clobbered hard register and there may
4194 be a call between PREV_INSN (insn) and incr. */
4195 && ! reg_set_between_p (q
, PREV_INSN (insn
), incr
))
4197 /* We have *p followed sometime later by q = p+size.
4198 Both p and q must be live afterward,
4199 and q is not used between INSN and its assignment.
4200 Change it to q = p, ...*q..., q = q+size.
4201 Then fall into the usual case. */
4206 emit_move_insn (q
, addr
);
4207 insns
= get_insns ();
4210 bb
= BLOCK_FOR_INSN (insn
);
4211 for (temp
= insns
; temp
; temp
= NEXT_INSN (temp
))
4212 set_block_for_insn (temp
, bb
);
4214 /* If we can't make the auto-inc, or can't make the
4215 replacement into Y, exit. There's no point in making
4216 the change below if we can't do the auto-inc and doing
4217 so is not correct in the pre-inc case. */
4219 validate_change (insn
, &XEXP (x
, 0),
4220 gen_rtx_fmt_e (inc_code
, Pmode
, q
),
4222 validate_change (incr
, &XEXP (y
, 0), q
, 1);
4223 if (! apply_change_group ())
4226 /* We now know we'll be doing this change, so emit the
4227 new insn(s) and do the updates. */
4228 emit_insns_before (insns
, insn
);
4230 if (BLOCK_FOR_INSN (insn
)->head
== insn
)
4231 BLOCK_FOR_INSN (insn
)->head
= insns
;
4233 /* INCR will become a NOTE and INSN won't contain a
4234 use of ADDR. If a use of ADDR was just placed in
4235 the insn before INSN, make that the next use.
4236 Otherwise, invalidate it. */
4237 if (GET_CODE (PREV_INSN (insn
)) == INSN
4238 && GET_CODE (PATTERN (PREV_INSN (insn
))) == SET
4239 && SET_SRC (PATTERN (PREV_INSN (insn
))) == addr
)
4240 reg_next_use
[regno
] = PREV_INSN (insn
);
4242 reg_next_use
[regno
] = 0;
4247 /* REGNO is now used in INCR which is below INSN, but
4248 it previously wasn't live here. If we don't mark
4249 it as needed, we'll put a REG_DEAD note for it
4250 on this insn, which is incorrect. */
4251 SET_REGNO_REG_SET (needed
, regno
);
4253 /* If there are any calls between INSN and INCR, show
4254 that REGNO now crosses them. */
4255 for (temp
= insn
; temp
!= incr
; temp
= NEXT_INSN (temp
))
4256 if (GET_CODE (temp
) == CALL_INSN
)
4257 REG_N_CALLS_CROSSED (regno
)++;
4262 /* If we haven't returned, it means we were able to make the
4263 auto-inc, so update the status. First, record that this insn
4264 has an implicit side effect. */
4267 = alloc_EXPR_LIST (REG_INC
, addr
, REG_NOTES (insn
));
4269 /* Modify the old increment-insn to simply copy
4270 the already-incremented value of our register. */
4271 if (! validate_change (incr
, &SET_SRC (set
), addr
, 0))
4274 /* If that makes it a no-op (copying the register into itself) delete
4275 it so it won't appear to be a "use" and a "set" of this
4277 if (SET_DEST (set
) == addr
)
4279 PUT_CODE (incr
, NOTE
);
4280 NOTE_LINE_NUMBER (incr
) = NOTE_INSN_DELETED
;
4281 NOTE_SOURCE_FILE (incr
) = 0;
4284 if (regno
>= FIRST_PSEUDO_REGISTER
)
4286 /* Count an extra reference to the reg. When a reg is
4287 incremented, spilling it is worse, so we want to make
4288 that less likely. */
4289 REG_N_REFS (regno
) += loop_depth
;
4291 /* Count the increment as a setting of the register,
4292 even though it isn't a SET in rtl. */
4293 REG_N_SETS (regno
)++;
4298 #endif /* AUTO_INC_DEC */
4300 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4301 This is done assuming the registers needed from X
4302 are those that have 1-bits in NEEDED.
4304 FLAGS is the set of enabled operations.
4306 INSN is the containing instruction. If INSN is dead, this function is not
4310 mark_used_regs (needed
, live
, x
, flags
, insn
)
4317 register RTX_CODE code
;
4322 code
= GET_CODE (x
);
4342 /* If we are clobbering a MEM, mark any registers inside the address
4344 if (GET_CODE (XEXP (x
, 0)) == MEM
)
4345 mark_used_regs (needed
, live
, XEXP (XEXP (x
, 0), 0), flags
, insn
);
4349 /* Don't bother watching stores to mems if this is not the
4350 final pass. We'll not be deleting dead stores this round. */
4351 if (flags
& PROP_SCAN_DEAD_CODE
)
4353 /* Invalidate the data for the last MEM stored, but only if MEM is
4354 something that can be stored into. */
4355 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
4356 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
4357 ; /* needn't clear the memory set list */
4360 rtx temp
= mem_set_list
;
4361 rtx prev
= NULL_RTX
;
4366 next
= XEXP (temp
, 1);
4367 if (anti_dependence (XEXP (temp
, 0), x
))
4369 /* Splice temp out of the list. */
4371 XEXP (prev
, 1) = next
;
4373 mem_set_list
= next
;
4374 free_EXPR_LIST_node (temp
);
4382 /* If the memory reference had embedded side effects (autoincrement
4383 address modes. Then we may need to kill some entries on the
4386 invalidate_mems_from_autoinc (insn
);
4390 if (flags
& PROP_AUTOINC
)
4391 find_auto_inc (needed
, x
, insn
);
4396 if (GET_CODE (SUBREG_REG (x
)) == REG
4397 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
4398 && (GET_MODE_SIZE (GET_MODE (x
))
4399 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))))
4400 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x
))) = 1;
4402 /* While we're here, optimize this case. */
4405 /* In case the SUBREG is not of a register, don't optimize */
4406 if (GET_CODE (x
) != REG
)
4408 mark_used_regs (needed
, live
, x
, flags
, insn
);
4412 /* ... fall through ... */
4415 /* See a register other than being set
4416 => mark it as needed. */
4420 int some_needed
= REGNO_REG_SET_P (needed
, regno
);
4421 int some_not_needed
= ! some_needed
;
4423 SET_REGNO_REG_SET (live
, regno
);
4425 /* A hard reg in a wide mode may really be multiple registers.
4426 If so, mark all of them just like the first. */
4427 if (regno
< FIRST_PSEUDO_REGISTER
)
4431 /* For stack ptr or fixed arg pointer,
4432 nothing below can be necessary, so waste no more time. */
4433 if (regno
== STACK_POINTER_REGNUM
4434 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4435 || (regno
== HARD_FRAME_POINTER_REGNUM
4436 && (! reload_completed
|| frame_pointer_needed
))
4438 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4439 || (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
4441 || (regno
== FRAME_POINTER_REGNUM
4442 && (! reload_completed
|| frame_pointer_needed
)))
4444 /* If this is a register we are going to try to eliminate,
4445 don't mark it live here. If we are successful in
4446 eliminating it, it need not be live unless it is used for
4447 pseudos, in which case it will have been set live when
4448 it was allocated to the pseudos. If the register will not
4449 be eliminated, reload will set it live at that point. */
4451 if (! TEST_HARD_REG_BIT (elim_reg_set
, regno
))
4452 regs_ever_live
[regno
] = 1;
4455 /* No death notes for global register variables;
4456 their values are live after this function exits. */
4457 if (global_regs
[regno
])
4459 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
4460 reg_next_use
[regno
] = insn
;
4464 n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
4467 int regno_n
= regno
+ n
;
4468 int needed_regno
= REGNO_REG_SET_P (needed
, regno_n
);
4470 SET_REGNO_REG_SET (live
, regno_n
);
4471 some_needed
|= needed_regno
;
4472 some_not_needed
|= ! needed_regno
;
4476 if (flags
& (PROP_LOG_LINKS
| PROP_AUTOINC
))
4478 /* Record where each reg is used, so when the reg
4479 is set we know the next insn that uses it. */
4481 reg_next_use
[regno
] = insn
;
4483 if (flags
& PROP_REG_INFO
)
4485 if (regno
< FIRST_PSEUDO_REGISTER
)
4487 /* If a hard reg is being used,
4488 record that this function does use it. */
4490 i
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
4494 regs_ever_live
[regno
+ --i
] = 1;
4499 /* Keep track of which basic block each reg appears in. */
4501 register int blocknum
= BLOCK_NUM (insn
);
4503 if (REG_BASIC_BLOCK (regno
) == REG_BLOCK_UNKNOWN
)
4504 REG_BASIC_BLOCK (regno
) = blocknum
;
4505 else if (REG_BASIC_BLOCK (regno
) != blocknum
)
4506 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
4508 /* Count (weighted) number of uses of each reg. */
4510 REG_N_REFS (regno
) += loop_depth
;
4514 /* Record and count the insns in which a reg dies.
4515 If it is used in this insn and was dead below the insn
4516 then it dies in this insn. If it was set in this insn,
4517 we do not make a REG_DEAD note; likewise if we already
4518 made such a note. */
4520 if (flags
& PROP_DEATH_NOTES
)
4523 && ! dead_or_set_p (insn
, x
)
4525 && (regno
>= FIRST_PSEUDO_REGISTER
|| ! fixed_regs
[regno
])
4529 /* Check for the case where the register dying partially
4530 overlaps the register set by this insn. */
4531 if (regno
< FIRST_PSEUDO_REGISTER
4532 && HARD_REGNO_NREGS (regno
, GET_MODE (x
)) > 1)
4534 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
4536 some_needed
|= dead_or_set_regno_p (insn
, regno
+ n
);
4539 /* If none of the words in X is needed, make a REG_DEAD
4540 note. Otherwise, we must make partial REG_DEAD notes. */
4544 = alloc_EXPR_LIST (REG_DEAD
, x
, REG_NOTES (insn
));
4545 REG_N_DEATHS (regno
)++;
4551 /* Don't make a REG_DEAD note for a part of a register
4552 that is set in the insn. */
4554 for (i
= HARD_REGNO_NREGS (regno
, GET_MODE (x
)) - 1;
4556 if (!REGNO_REG_SET_P (needed
, regno
+ i
)
4557 && ! dead_or_set_regno_p (insn
, regno
+ i
))
4560 (REG_DEAD
, gen_rtx_REG (reg_raw_mode
[regno
+ i
],
4571 register rtx testreg
= SET_DEST (x
);
4574 /* If storing into MEM, don't show it as being used. But do
4575 show the address as being used. */
4576 if (GET_CODE (testreg
) == MEM
)
4579 if (flags
& PROP_AUTOINC
)
4580 find_auto_inc (needed
, testreg
, insn
);
4582 mark_used_regs (needed
, live
, XEXP (testreg
, 0), flags
, insn
);
4583 mark_used_regs (needed
, live
, SET_SRC (x
), flags
, insn
);
4587 /* Storing in STRICT_LOW_PART is like storing in a reg
4588 in that this SET might be dead, so ignore it in TESTREG.
4589 but in some other ways it is like using the reg.
4591 Storing in a SUBREG or a bit field is like storing the entire
4592 register in that if the register's value is not used
4593 then this SET is not needed. */
4594 while (GET_CODE (testreg
) == STRICT_LOW_PART
4595 || GET_CODE (testreg
) == ZERO_EXTRACT
4596 || GET_CODE (testreg
) == SIGN_EXTRACT
4597 || GET_CODE (testreg
) == SUBREG
)
4599 if (GET_CODE (testreg
) == SUBREG
4600 && GET_CODE (SUBREG_REG (testreg
)) == REG
4601 && REGNO (SUBREG_REG (testreg
)) >= FIRST_PSEUDO_REGISTER
4602 && (GET_MODE_SIZE (GET_MODE (testreg
))
4603 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg
)))))
4604 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg
))) = 1;
4606 /* Modifying a single register in an alternate mode
4607 does not use any of the old value. But these other
4608 ways of storing in a register do use the old value. */
4609 if (GET_CODE (testreg
) == SUBREG
4610 && !(REG_SIZE (SUBREG_REG (testreg
)) > REG_SIZE (testreg
)))
4615 testreg
= XEXP (testreg
, 0);
4618 /* If this is a store into a register,
4619 recursively scan the value being stored. */
4621 if ((GET_CODE (testreg
) == PARALLEL
4622 && GET_MODE (testreg
) == BLKmode
)
4623 || (GET_CODE (testreg
) == REG
4624 && (regno
= REGNO (testreg
), ! (regno
== FRAME_POINTER_REGNUM
4625 && (! reload_completed
|| frame_pointer_needed
)))
4626 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4627 && ! (regno
== HARD_FRAME_POINTER_REGNUM
4628 && (! reload_completed
|| frame_pointer_needed
))
4630 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4631 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
4634 /* We used to exclude global_regs here, but that seems wrong.
4635 Storing in them is like storing in mem. */
4637 mark_used_regs (needed
, live
, SET_SRC (x
), flags
, insn
);
4639 mark_used_regs (needed
, live
, SET_DEST (x
), flags
, insn
);
4646 /* ??? This info should have been gotten from mark_regs_live_at_end,
4647 as applied to the EXIT block, and propagated along the edge that
4648 connects this block to the EXIT. */
4652 case UNSPEC_VOLATILE
:
4656 /* Traditional and volatile asm instructions must be considered to use
4657 and clobber all hard registers, all pseudo-registers and all of
4658 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4660 Consider for instance a volatile asm that changes the fpu rounding
4661 mode. An insn should not be moved across this even if it only uses
4662 pseudo-regs because it might give an incorrectly rounded result.
4664 ?!? Unfortunately, marking all hard registers as live causes massive
4665 problems for the register allocator and marking all pseudos as live
4666 creates mountains of uninitialized variable warnings.
4668 So for now, just clear the memory set list and mark any regs
4669 we can find in ASM_OPERANDS as used. */
4670 if (code
!= ASM_OPERANDS
|| MEM_VOLATILE_P (x
))
4671 free_EXPR_LIST_list (&mem_set_list
);
4673 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4674 We can not just fall through here since then we would be confused
4675 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4676 traditional asms unlike their normal usage. */
4677 if (code
== ASM_OPERANDS
)
4681 for (j
= 0; j
< ASM_OPERANDS_INPUT_LENGTH (x
); j
++)
4682 mark_used_regs (needed
, live
, ASM_OPERANDS_INPUT (x
, j
),
4693 /* Recursively scan the operands of this expression. */
4696 register const char *fmt
= GET_RTX_FORMAT (code
);
4699 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4703 /* Tail recursive case: save a function call level. */
4709 mark_used_regs (needed
, live
, XEXP (x
, i
), flags
, insn
);
4711 else if (fmt
[i
] == 'E')
4714 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4715 mark_used_regs (needed
, live
, XVECEXP (x
, i
, j
), flags
, insn
);
4724 try_pre_increment_1 (insn
)
4727 /* Find the next use of this reg. If in same basic block,
4728 make it do pre-increment or pre-decrement if appropriate. */
4729 rtx x
= single_set (insn
);
4730 HOST_WIDE_INT amount
= ((GET_CODE (SET_SRC (x
)) == PLUS
? 1 : -1)
4731 * INTVAL (XEXP (SET_SRC (x
), 1)));
4732 int regno
= REGNO (SET_DEST (x
));
4733 rtx y
= reg_next_use
[regno
];
4735 && BLOCK_NUM (y
) == BLOCK_NUM (insn
)
4736 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4737 mode would be better. */
4738 && ! dead_or_set_p (y
, SET_DEST (x
))
4739 && try_pre_increment (y
, SET_DEST (x
), amount
))
4741 /* We have found a suitable auto-increment
4742 and already changed insn Y to do it.
4743 So flush this increment-instruction. */
4744 PUT_CODE (insn
, NOTE
);
4745 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
4746 NOTE_SOURCE_FILE (insn
) = 0;
4747 /* Count a reference to this reg for the increment
4748 insn we are deleting. When a reg is incremented.
4749 spilling it is worse, so we want to make that
4751 if (regno
>= FIRST_PSEUDO_REGISTER
)
4753 REG_N_REFS (regno
) += loop_depth
;
4754 REG_N_SETS (regno
)++;
4761 /* Try to change INSN so that it does pre-increment or pre-decrement
4762 addressing on register REG in order to add AMOUNT to REG.
4763 AMOUNT is negative for pre-decrement.
4764 Returns 1 if the change could be made.
4765 This checks all about the validity of the result of modifying INSN. */
4768 try_pre_increment (insn
, reg
, amount
)
4770 HOST_WIDE_INT amount
;
4774 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4775 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4777 /* Nonzero if we can try to make a post-increment or post-decrement.
4778 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4779 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4780 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4783 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4786 /* From the sign of increment, see which possibilities are conceivable
4787 on this target machine. */
4788 if (HAVE_PRE_INCREMENT
&& amount
> 0)
4790 if (HAVE_POST_INCREMENT
&& amount
> 0)
4793 if (HAVE_PRE_DECREMENT
&& amount
< 0)
4795 if (HAVE_POST_DECREMENT
&& amount
< 0)
4798 if (! (pre_ok
|| post_ok
))
4801 /* It is not safe to add a side effect to a jump insn
4802 because if the incremented register is spilled and must be reloaded
4803 there would be no way to store the incremented value back in memory. */
4805 if (GET_CODE (insn
) == JUMP_INSN
)
4810 use
= find_use_as_address (PATTERN (insn
), reg
, 0);
4811 if (post_ok
&& (use
== 0 || use
== (rtx
) 1))
4813 use
= find_use_as_address (PATTERN (insn
), reg
, -amount
);
4817 if (use
== 0 || use
== (rtx
) 1)
4820 if (GET_MODE_SIZE (GET_MODE (use
)) != (amount
> 0 ? amount
: - amount
))
4823 /* See if this combination of instruction and addressing mode exists. */
4824 if (! validate_change (insn
, &XEXP (use
, 0),
4825 gen_rtx_fmt_e (amount
> 0
4826 ? (do_post
? POST_INC
: PRE_INC
)
4827 : (do_post
? POST_DEC
: PRE_DEC
),
4831 /* Record that this insn now has an implicit side effect on X. */
4832 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_INC
, reg
, REG_NOTES (insn
));
4836 #endif /* AUTO_INC_DEC */
4838 /* Find the place in the rtx X where REG is used as a memory address.
4839 Return the MEM rtx that so uses it.
4840 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4841 (plus REG (const_int PLUSCONST)).
4843 If such an address does not appear, return 0.
4844 If REG appears more than once, or is used other than in such an address,
4848 find_use_as_address (x
, reg
, plusconst
)
4851 HOST_WIDE_INT plusconst
;
4853 enum rtx_code code
= GET_CODE (x
);
4854 const char *fmt
= GET_RTX_FORMAT (code
);
4856 register rtx value
= 0;
4859 if (code
== MEM
&& XEXP (x
, 0) == reg
&& plusconst
== 0)
4862 if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == PLUS
4863 && XEXP (XEXP (x
, 0), 0) == reg
4864 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4865 && INTVAL (XEXP (XEXP (x
, 0), 1)) == plusconst
)
4868 if (code
== SIGN_EXTRACT
|| code
== ZERO_EXTRACT
)
4870 /* If REG occurs inside a MEM used in a bit-field reference,
4871 that is unacceptable. */
4872 if (find_use_as_address (XEXP (x
, 0), reg
, 0) != 0)
4873 return (rtx
) (HOST_WIDE_INT
) 1;
4877 return (rtx
) (HOST_WIDE_INT
) 1;
4879 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4883 tem
= find_use_as_address (XEXP (x
, i
), reg
, plusconst
);
4887 return (rtx
) (HOST_WIDE_INT
) 1;
4892 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4894 tem
= find_use_as_address (XVECEXP (x
, i
, j
), reg
, plusconst
);
4898 return (rtx
) (HOST_WIDE_INT
) 1;
4906 /* Write information about registers and basic blocks into FILE.
4907 This is part of making a debugging dump. */
4910 dump_flow_info (file
)
4914 static const char * const reg_class_names
[] = REG_CLASS_NAMES
;
4916 fprintf (file
, "%d registers.\n", max_regno
);
4917 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
4920 enum reg_class
class, altclass
;
4921 fprintf (file
, "\nRegister %d used %d times across %d insns",
4922 i
, REG_N_REFS (i
), REG_LIVE_LENGTH (i
));
4923 if (REG_BASIC_BLOCK (i
) >= 0)
4924 fprintf (file
, " in block %d", REG_BASIC_BLOCK (i
));
4926 fprintf (file
, "; set %d time%s", REG_N_SETS (i
),
4927 (REG_N_SETS (i
) == 1) ? "" : "s");
4928 if (REG_USERVAR_P (regno_reg_rtx
[i
]))
4929 fprintf (file
, "; user var");
4930 if (REG_N_DEATHS (i
) != 1)
4931 fprintf (file
, "; dies in %d places", REG_N_DEATHS (i
));
4932 if (REG_N_CALLS_CROSSED (i
) == 1)
4933 fprintf (file
, "; crosses 1 call");
4934 else if (REG_N_CALLS_CROSSED (i
))
4935 fprintf (file
, "; crosses %d calls", REG_N_CALLS_CROSSED (i
));
4936 if (PSEUDO_REGNO_BYTES (i
) != UNITS_PER_WORD
)
4937 fprintf (file
, "; %d bytes", PSEUDO_REGNO_BYTES (i
));
4938 class = reg_preferred_class (i
);
4939 altclass
= reg_alternate_class (i
);
4940 if (class != GENERAL_REGS
|| altclass
!= ALL_REGS
)
4942 if (altclass
== ALL_REGS
|| class == ALL_REGS
)
4943 fprintf (file
, "; pref %s", reg_class_names
[(int) class]);
4944 else if (altclass
== NO_REGS
)
4945 fprintf (file
, "; %s or none", reg_class_names
[(int) class]);
4947 fprintf (file
, "; pref %s, else %s",
4948 reg_class_names
[(int) class],
4949 reg_class_names
[(int) altclass
]);
4951 if (REGNO_POINTER_FLAG (i
))
4952 fprintf (file
, "; pointer");
4953 fprintf (file
, ".\n");
4956 fprintf (file
, "\n%d basic blocks, %d edges.\n", n_basic_blocks
, n_edges
);
4957 for (i
= 0; i
< n_basic_blocks
; i
++)
4959 register basic_block bb
= BASIC_BLOCK (i
);
4963 fprintf (file
, "\nBasic block %d: first insn %d, last %d.\n",
4964 i
, INSN_UID (bb
->head
), INSN_UID (bb
->end
));
4966 fprintf (file
, "Predecessors: ");
4967 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
4968 dump_edge_info (file
, e
, 0);
4970 fprintf (file
, "\nSuccessors: ");
4971 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
4972 dump_edge_info (file
, e
, 1);
4974 fprintf (file
, "\nRegisters live at start:");
4975 if (bb
->global_live_at_start
)
4977 for (regno
= 0; regno
< max_regno
; regno
++)
4978 if (REGNO_REG_SET_P (bb
->global_live_at_start
, regno
))
4979 fprintf (file
, " %d", regno
);
4982 fprintf (file
, " n/a");
4984 fprintf (file
, "\nRegisters live at end:");
4985 if (bb
->global_live_at_end
)
4987 for (regno
= 0; regno
< max_regno
; regno
++)
4988 if (REGNO_REG_SET_P (bb
->global_live_at_end
, regno
))
4989 fprintf (file
, " %d", regno
);
4992 fprintf (file
, " n/a");
5003 dump_flow_info (stderr
);
5007 dump_edge_info (file
, e
, do_succ
)
5012 basic_block side
= (do_succ
? e
->dest
: e
->src
);
5014 if (side
== ENTRY_BLOCK_PTR
)
5015 fputs (" ENTRY", file
);
5016 else if (side
== EXIT_BLOCK_PTR
)
5017 fputs (" EXIT", file
);
5019 fprintf (file
, " %d", side
->index
);
5023 static const char * const bitnames
[] = {
5024 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5027 int i
, flags
= e
->flags
;
5031 for (i
= 0; flags
; i
++)
5032 if (flags
& (1 << i
))
5038 if (i
< (int)(sizeof (bitnames
) / sizeof (*bitnames
)))
5039 fputs (bitnames
[i
], file
);
5041 fprintf (file
, "%d", i
);
5049 /* Like print_rtl, but also print out live information for the start of each
5053 print_rtl_with_bb (outf
, rtx_first
)
5057 register rtx tmp_rtx
;
5060 fprintf (outf
, "(nil)\n");
5064 enum bb_state
{ NOT_IN_BB
, IN_ONE_BB
, IN_MULTIPLE_BB
};
5065 int max_uid
= get_max_uid ();
5066 basic_block
*start
= (basic_block
*)
5067 xcalloc (max_uid
, sizeof (basic_block
));
5068 basic_block
*end
= (basic_block
*)
5069 xcalloc (max_uid
, sizeof (basic_block
));
5070 enum bb_state
*in_bb_p
= (enum bb_state
*)
5071 xcalloc (max_uid
, sizeof (enum bb_state
));
5073 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
5075 basic_block bb
= BASIC_BLOCK (i
);
5078 start
[INSN_UID (bb
->head
)] = bb
;
5079 end
[INSN_UID (bb
->end
)] = bb
;
5080 for (x
= bb
->head
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
5082 enum bb_state state
= IN_MULTIPLE_BB
;
5083 if (in_bb_p
[INSN_UID(x
)] == NOT_IN_BB
)
5085 in_bb_p
[INSN_UID(x
)] = state
;
5092 for (tmp_rtx
= rtx_first
; NULL
!= tmp_rtx
; tmp_rtx
= NEXT_INSN (tmp_rtx
))
5097 if ((bb
= start
[INSN_UID (tmp_rtx
)]) != NULL
)
5099 fprintf (outf
, ";; Start of basic block %d, registers live:",
5102 EXECUTE_IF_SET_IN_REG_SET (bb
->global_live_at_start
, 0, i
,
5104 fprintf (outf
, " %d", i
);
5105 if (i
< FIRST_PSEUDO_REGISTER
)
5106 fprintf (outf
, " [%s]",
5112 if (in_bb_p
[INSN_UID(tmp_rtx
)] == NOT_IN_BB
5113 && GET_CODE (tmp_rtx
) != NOTE
5114 && GET_CODE (tmp_rtx
) != BARRIER
5116 fprintf (outf
, ";; Insn is not within a basic block\n");
5117 else if (in_bb_p
[INSN_UID(tmp_rtx
)] == IN_MULTIPLE_BB
)
5118 fprintf (outf
, ";; Insn is in multiple basic blocks\n");
5120 did_output
= print_rtl_single (outf
, tmp_rtx
);
5122 if ((bb
= end
[INSN_UID (tmp_rtx
)]) != NULL
)
5123 fprintf (outf
, ";; End of basic block %d\n", bb
->index
);
5134 if (current_function_epilogue_delay_list
!= 0)
5136 fprintf (outf
, "\n;; Insns in epilogue delay list:\n\n");
5137 for (tmp_rtx
= current_function_epilogue_delay_list
; tmp_rtx
!= 0;
5138 tmp_rtx
= XEXP (tmp_rtx
, 1))
5139 print_rtl_single (outf
, XEXP (tmp_rtx
, 0));
5144 /* Integer list support. */
5146 /* Allocate a node from list *HEAD_PTR. */
5149 alloc_int_list_node (head_ptr
)
5150 int_list_block
**head_ptr
;
5152 struct int_list_block
*first_blk
= *head_ptr
;
5154 if (first_blk
== NULL
|| first_blk
->nodes_left
<= 0)
5156 first_blk
= (struct int_list_block
*) xmalloc (sizeof (struct int_list_block
));
5157 first_blk
->nodes_left
= INT_LIST_NODES_IN_BLK
;
5158 first_blk
->next
= *head_ptr
;
5159 *head_ptr
= first_blk
;
5162 first_blk
->nodes_left
--;
5163 return &first_blk
->nodes
[first_blk
->nodes_left
];
5166 /* Pointer to head of predecessor/successor block list. */
5167 static int_list_block
*pred_int_list_blocks
;
5169 /* Add a new node to integer list LIST with value VAL.
5170 LIST is a pointer to a list object to allow for different implementations.
5171 If *LIST is initially NULL, the list is empty.
5172 The caller must not care whether the element is added to the front or
5173 to the end of the list (to allow for different implementations). */
5176 add_int_list_node (blk_list
, list
, val
)
5177 int_list_block
**blk_list
;
5181 int_list_ptr p
= alloc_int_list_node (blk_list
);
5189 /* Free the blocks of lists at BLK_LIST. */
5192 free_int_list (blk_list
)
5193 int_list_block
**blk_list
;
5195 int_list_block
*p
, *next
;
5197 for (p
= *blk_list
; p
!= NULL
; p
= next
)
5203 /* Mark list as empty for the next function we compile. */
5207 /* Predecessor/successor computation. */
5209 /* Mark PRED_BB a precessor of SUCC_BB,
5210 and conversely SUCC_BB a successor of PRED_BB. */
5213 add_pred_succ (pred_bb
, succ_bb
, s_preds
, s_succs
, num_preds
, num_succs
)
5216 int_list_ptr
*s_preds
;
5217 int_list_ptr
*s_succs
;
5221 if (succ_bb
!= EXIT_BLOCK
)
5223 add_int_list_node (&pred_int_list_blocks
, &s_preds
[succ_bb
], pred_bb
);
5224 num_preds
[succ_bb
]++;
5226 if (pred_bb
!= ENTRY_BLOCK
)
5228 add_int_list_node (&pred_int_list_blocks
, &s_succs
[pred_bb
], succ_bb
);
5229 num_succs
[pred_bb
]++;
5233 /* Convert edge lists into pred/succ lists for backward compatibility. */
5236 compute_preds_succs (s_preds
, s_succs
, num_preds
, num_succs
)
5237 int_list_ptr
*s_preds
;
5238 int_list_ptr
*s_succs
;
5242 int i
, n
= n_basic_blocks
;
5245 memset (s_preds
, 0, n_basic_blocks
* sizeof (int_list_ptr
));
5246 memset (s_succs
, 0, n_basic_blocks
* sizeof (int_list_ptr
));
5247 memset (num_preds
, 0, n_basic_blocks
* sizeof (int));
5248 memset (num_succs
, 0, n_basic_blocks
* sizeof (int));
5250 for (i
= 0; i
< n
; ++i
)
5252 basic_block bb
= BASIC_BLOCK (i
);
5254 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
5255 add_pred_succ (i
, e
->dest
->index
, s_preds
, s_succs
,
5256 num_preds
, num_succs
);
5259 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
5260 add_pred_succ (ENTRY_BLOCK
, e
->dest
->index
, s_preds
, s_succs
,
5261 num_preds
, num_succs
);
5265 dump_bb_data (file
, preds
, succs
, live_info
)
5267 int_list_ptr
*preds
;
5268 int_list_ptr
*succs
;
5274 fprintf (file
, "BB data\n\n");
5275 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
5277 fprintf (file
, "BB %d, start %d, end %d\n", bb
,
5278 INSN_UID (BLOCK_HEAD (bb
)), INSN_UID (BLOCK_END (bb
)));
5279 fprintf (file
, " preds:");
5280 for (p
= preds
[bb
]; p
!= NULL
; p
= p
->next
)
5282 int pred_bb
= INT_LIST_VAL (p
);
5283 if (pred_bb
== ENTRY_BLOCK
)
5284 fprintf (file
, " entry");
5286 fprintf (file
, " %d", pred_bb
);
5288 fprintf (file
, "\n");
5289 fprintf (file
, " succs:");
5290 for (p
= succs
[bb
]; p
!= NULL
; p
= p
->next
)
5292 int succ_bb
= INT_LIST_VAL (p
);
5293 if (succ_bb
== EXIT_BLOCK
)
5294 fprintf (file
, " exit");
5296 fprintf (file
, " %d", succ_bb
);
5301 fprintf (file
, "\nRegisters live at start:");
5302 for (regno
= 0; regno
< max_regno
; regno
++)
5303 if (REGNO_REG_SET_P (BASIC_BLOCK (bb
)->global_live_at_start
, regno
))
5304 fprintf (file
, " %d", regno
);
5305 fprintf (file
, "\n");
5307 fprintf (file
, "\n");
5309 fprintf (file
, "\n");
5312 /* Free basic block data storage. */
5317 free_int_list (&pred_int_list_blocks
);
5320 /* Compute dominator relationships. */
5322 compute_dominators (dominators
, post_dominators
, s_preds
, s_succs
)
5323 sbitmap
*dominators
;
5324 sbitmap
*post_dominators
;
5325 int_list_ptr
*s_preds
;
5326 int_list_ptr
*s_succs
;
5328 int bb
, changed
, passes
;
5329 sbitmap
*temp_bitmap
;
5331 temp_bitmap
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
5332 sbitmap_vector_ones (dominators
, n_basic_blocks
);
5333 sbitmap_vector_ones (post_dominators
, n_basic_blocks
);
5334 sbitmap_vector_zero (temp_bitmap
, n_basic_blocks
);
5336 sbitmap_zero (dominators
[0]);
5337 SET_BIT (dominators
[0], 0);
5339 sbitmap_zero (post_dominators
[n_basic_blocks
- 1]);
5340 SET_BIT (post_dominators
[n_basic_blocks
- 1], 0);
5347 for (bb
= 1; bb
< n_basic_blocks
; bb
++)
5349 sbitmap_intersect_of_predecessors (temp_bitmap
[bb
], dominators
,
5351 SET_BIT (temp_bitmap
[bb
], bb
);
5352 changed
|= sbitmap_a_and_b (dominators
[bb
],
5355 sbitmap_intersect_of_successors (temp_bitmap
[bb
], post_dominators
,
5357 SET_BIT (temp_bitmap
[bb
], bb
);
5358 changed
|= sbitmap_a_and_b (post_dominators
[bb
],
5359 post_dominators
[bb
],
5368 /* Compute dominator relationships using new flow graph structures. */
5370 compute_flow_dominators (dominators
, post_dominators
)
5371 sbitmap
*dominators
;
5372 sbitmap
*post_dominators
;
5374 int bb
, changed
, passes
;
5375 sbitmap
*temp_bitmap
;
5377 temp_bitmap
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
5378 sbitmap_vector_ones (dominators
, n_basic_blocks
);
5379 sbitmap_vector_ones (post_dominators
, n_basic_blocks
);
5380 sbitmap_vector_zero (temp_bitmap
, n_basic_blocks
);
5382 sbitmap_zero (dominators
[0]);
5383 SET_BIT (dominators
[0], 0);
5385 sbitmap_zero (post_dominators
[n_basic_blocks
- 1]);
5386 SET_BIT (post_dominators
[n_basic_blocks
- 1], 0);
5393 for (bb
= 1; bb
< n_basic_blocks
; bb
++)
5395 sbitmap_intersection_of_preds (temp_bitmap
[bb
], dominators
, bb
);
5396 SET_BIT (temp_bitmap
[bb
], bb
);
5397 changed
|= sbitmap_a_and_b (dominators
[bb
],
5400 sbitmap_intersection_of_succs (temp_bitmap
[bb
], post_dominators
, bb
);
5401 SET_BIT (temp_bitmap
[bb
], bb
);
5402 changed
|= sbitmap_a_and_b (post_dominators
[bb
],
5403 post_dominators
[bb
],
5412 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5415 compute_immediate_dominators (idom
, dominators
)
5417 sbitmap
*dominators
;
5422 tmp
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
5424 /* Begin with tmp(n) = dom(n) - { n }. */
5425 for (b
= n_basic_blocks
; --b
>= 0; )
5427 sbitmap_copy (tmp
[b
], dominators
[b
]);
5428 RESET_BIT (tmp
[b
], b
);
5431 /* Subtract out all of our dominator's dominators. */
5432 for (b
= n_basic_blocks
; --b
>= 0; )
5434 sbitmap tmp_b
= tmp
[b
];
5437 for (s
= n_basic_blocks
; --s
>= 0; )
5438 if (TEST_BIT (tmp_b
, s
))
5439 sbitmap_difference (tmp_b
, tmp_b
, tmp
[s
]);
5442 /* Find the one bit set in the bitmap and put it in the output array. */
5443 for (b
= n_basic_blocks
; --b
>= 0; )
5446 EXECUTE_IF_SET_IN_SBITMAP (tmp
[b
], 0, t
, { idom
[b
] = t
; });
5449 sbitmap_vector_free (tmp
);
5452 /* Count for a single SET rtx, X. */
5455 count_reg_sets_1 (x
)
5459 register rtx reg
= SET_DEST (x
);
5461 /* Find the register that's set/clobbered. */
5462 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
5463 || GET_CODE (reg
) == SIGN_EXTRACT
5464 || GET_CODE (reg
) == STRICT_LOW_PART
)
5465 reg
= XEXP (reg
, 0);
5467 if (GET_CODE (reg
) == PARALLEL
5468 && GET_MODE (reg
) == BLKmode
)
5471 for (i
= XVECLEN (reg
, 0) - 1; i
>= 0; i
--)
5472 count_reg_sets_1 (XVECEXP (reg
, 0, i
));
5476 if (GET_CODE (reg
) == REG
)
5478 regno
= REGNO (reg
);
5479 if (regno
>= FIRST_PSEUDO_REGISTER
)
5481 /* Count (weighted) references, stores, etc. This counts a
5482 register twice if it is modified, but that is correct. */
5483 REG_N_SETS (regno
)++;
5485 REG_N_REFS (regno
) += loop_depth
;
5490 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5491 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5497 register RTX_CODE code
= GET_CODE (x
);
5499 if (code
== SET
|| code
== CLOBBER
)
5500 count_reg_sets_1 (x
);
5501 else if (code
== PARALLEL
)
5504 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
5506 code
= GET_CODE (XVECEXP (x
, 0, i
));
5507 if (code
== SET
|| code
== CLOBBER
)
5508 count_reg_sets_1 (XVECEXP (x
, 0, i
));
5513 /* Increment REG_N_REFS by the current loop depth each register reference
5517 count_reg_references (x
)
5520 register RTX_CODE code
;
5523 code
= GET_CODE (x
);
5543 /* If we are clobbering a MEM, mark any registers inside the address
5545 if (GET_CODE (XEXP (x
, 0)) == MEM
)
5546 count_reg_references (XEXP (XEXP (x
, 0), 0));
5550 /* While we're here, optimize this case. */
5553 /* In case the SUBREG is not of a register, don't optimize */
5554 if (GET_CODE (x
) != REG
)
5556 count_reg_references (x
);
5560 /* ... fall through ... */
5563 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
5564 REG_N_REFS (REGNO (x
)) += loop_depth
;
5569 register rtx testreg
= SET_DEST (x
);
5572 /* If storing into MEM, don't show it as being used. But do
5573 show the address as being used. */
5574 if (GET_CODE (testreg
) == MEM
)
5576 count_reg_references (XEXP (testreg
, 0));
5577 count_reg_references (SET_SRC (x
));
5581 /* Storing in STRICT_LOW_PART is like storing in a reg
5582 in that this SET might be dead, so ignore it in TESTREG.
5583 but in some other ways it is like using the reg.
5585 Storing in a SUBREG or a bit field is like storing the entire
5586 register in that if the register's value is not used
5587 then this SET is not needed. */
5588 while (GET_CODE (testreg
) == STRICT_LOW_PART
5589 || GET_CODE (testreg
) == ZERO_EXTRACT
5590 || GET_CODE (testreg
) == SIGN_EXTRACT
5591 || GET_CODE (testreg
) == SUBREG
)
5593 /* Modifying a single register in an alternate mode
5594 does not use any of the old value. But these other
5595 ways of storing in a register do use the old value. */
5596 if (GET_CODE (testreg
) == SUBREG
5597 && !(REG_SIZE (SUBREG_REG (testreg
)) > REG_SIZE (testreg
)))
5602 testreg
= XEXP (testreg
, 0);
5605 /* If this is a store into a register,
5606 recursively scan the value being stored. */
5608 if ((GET_CODE (testreg
) == PARALLEL
5609 && GET_MODE (testreg
) == BLKmode
)
5610 || GET_CODE (testreg
) == REG
)
5612 count_reg_references (SET_SRC (x
));
5614 count_reg_references (SET_DEST (x
));
5624 /* Recursively scan the operands of this expression. */
5627 register const char *fmt
= GET_RTX_FORMAT (code
);
5630 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5634 /* Tail recursive case: save a function call level. */
5640 count_reg_references (XEXP (x
, i
));
5642 else if (fmt
[i
] == 'E')
5645 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5646 count_reg_references (XVECEXP (x
, i
, j
));
5652 /* Recompute register set/reference counts immediately prior to register
5655 This avoids problems with set/reference counts changing to/from values
5656 which have special meanings to the register allocators.
5658 Additionally, the reference counts are the primary component used by the
5659 register allocators to prioritize pseudos for allocation to hard regs.
5660 More accurate reference counts generally lead to better register allocation.
5662 F is the first insn to be scanned.
5663 LOOP_STEP denotes how much loop_depth should be incremented per
5664 loop nesting level in order to increase the ref count more for references
5667 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5668 possibly other information which is used by the register allocators. */
5671 recompute_reg_usage (f
, loop_step
)
5678 /* Clear out the old data. */
5679 max_reg
= max_reg_num ();
5680 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_reg
; i
++)
5686 /* Scan each insn in the chain and count how many times each register is
5689 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
5691 /* Keep track of loop depth. */
5692 if (GET_CODE (insn
) == NOTE
)
5694 /* Look for loop boundaries. */
5695 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
)
5696 loop_depth
-= loop_step
;
5697 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
5698 loop_depth
+= loop_step
;
5700 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
5701 Abort now rather than setting register status incorrectly. */
5702 if (loop_depth
== 0)
5705 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
5709 /* This call will increment REG_N_SETS for each SET or CLOBBER
5710 of a register in INSN. It will also increment REG_N_REFS
5711 by the loop depth for each set of a register in INSN. */
5712 count_reg_sets (PATTERN (insn
));
5714 /* count_reg_sets does not detect autoincrement address modes, so
5715 detect them here by looking at the notes attached to INSN. */
5716 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
5718 if (REG_NOTE_KIND (links
) == REG_INC
)
5719 /* Count (weighted) references, stores, etc. This counts a
5720 register twice if it is modified, but that is correct. */
5721 REG_N_SETS (REGNO (XEXP (links
, 0)))++;
5724 /* This call will increment REG_N_REFS by the current loop depth for
5725 each reference to a register in INSN. */
5726 count_reg_references (PATTERN (insn
));
5728 /* count_reg_references will not include counts for arguments to
5729 function calls, so detect them here by examining the
5730 CALL_INSN_FUNCTION_USAGE data. */
5731 if (GET_CODE (insn
) == CALL_INSN
)
5735 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
5737 note
= XEXP (note
, 1))
5738 if (GET_CODE (XEXP (note
, 0)) == USE
)
5739 count_reg_references (XEXP (XEXP (note
, 0), 0));
5745 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5746 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5747 of the number of registers that died. */
5750 count_or_remove_death_notes (blocks
, kill
)
5756 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
5761 if (blocks
&& ! TEST_BIT (blocks
, i
))
5764 bb
= BASIC_BLOCK (i
);
5766 for (insn
= bb
->head
; ; insn
= NEXT_INSN (insn
))
5768 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
5770 rtx
*pprev
= ®_NOTES (insn
);
5775 switch (REG_NOTE_KIND (link
))
5778 if (GET_CODE (XEXP (link
, 0)) == REG
)
5780 rtx reg
= XEXP (link
, 0);
5783 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
5786 n
= HARD_REGNO_NREGS (REGNO (reg
), GET_MODE (reg
));
5794 rtx next
= XEXP (link
, 1);
5795 free_EXPR_LIST_node (link
);
5796 *pprev
= link
= next
;
5802 pprev
= &XEXP (link
, 1);
5809 if (insn
== bb
->end
)
5817 /* Record INSN's block as BB. */
5820 set_block_for_insn (insn
, bb
)
5824 size_t uid
= INSN_UID (insn
);
5825 if (uid
>= basic_block_for_insn
->num_elements
)
5829 /* Add one-eighth the size so we don't keep calling xrealloc. */
5830 new_size
= uid
+ (uid
+ 7) / 8;
5832 VARRAY_GROW (basic_block_for_insn
, new_size
);
5834 VARRAY_BB (basic_block_for_insn
, uid
) = bb
;
5837 /* Record INSN's block number as BB. */
5838 /* ??? This has got to go. */
5841 set_block_num (insn
, bb
)
5845 set_block_for_insn (insn
, BASIC_BLOCK (bb
));
5848 /* Verify the CFG consistency. This function check some CFG invariants and
5849 aborts when something is wrong. Hope that this function will help to
5850 convert many optimization passes to preserve CFG consistent.
5852 Currently it does following checks:
5854 - test head/end pointers
5855 - overlapping of basic blocks
5856 - edge list corectness
5857 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5858 - tails of basic blocks (ensure that boundary is necesary)
5859 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5860 and NOTE_INSN_BASIC_BLOCK
5861 - check that all insns are in the basic blocks
5862 (except the switch handling code, barriers and notes)
5864 In future it can be extended check a lot of other stuff as well
5865 (reachability of basic blocks, life information, etc. etc.). */
5870 const int max_uid
= get_max_uid ();
5871 const rtx rtx_first
= get_insns ();
5872 basic_block
*bb_info
;
5876 bb_info
= (basic_block
*) xcalloc (max_uid
, sizeof (basic_block
));
5878 /* First pass check head/end pointers and set bb_info array used by
5880 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
5882 basic_block bb
= BASIC_BLOCK (i
);
5884 /* Check the head pointer and make sure that it is pointing into
5886 for (x
= rtx_first
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
5891 error ("Head insn %d for block %d not found in the insn stream.",
5892 INSN_UID (bb
->head
), bb
->index
);
5896 /* Check the end pointer and make sure that it is pointing into
5898 for (x
= bb
->head
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
5900 if (bb_info
[INSN_UID (x
)] != NULL
)
5902 error ("Insn %d is in multiple basic blocks (%d and %d)",
5903 INSN_UID (x
), bb
->index
, bb_info
[INSN_UID (x
)]->index
);
5906 bb_info
[INSN_UID (x
)] = bb
;
5913 error ("End insn %d for block %d not found in the insn stream.",
5914 INSN_UID (bb
->end
), bb
->index
);
5919 /* Now check the basic blocks (boundaries etc.) */
5920 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
5922 basic_block bb
= BASIC_BLOCK (i
);
5923 /* Check corectness of edge lists */
5931 fprintf (stderr
, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5933 fprintf (stderr
, "Predecessor: ");
5934 dump_edge_info (stderr
, e
, 0);
5935 fprintf (stderr
, "\nSuccessor: ");
5936 dump_edge_info (stderr
, e
, 1);
5940 if (e
->dest
!= EXIT_BLOCK_PTR
)
5942 edge e2
= e
->dest
->pred
;
5943 while (e2
&& e2
!= e
)
5947 error ("Basic block %i edge lists are corrupted", bb
->index
);
5959 error ("Basic block %d pred edge is corrupted", bb
->index
);
5960 fputs ("Predecessor: ", stderr
);
5961 dump_edge_info (stderr
, e
, 0);
5962 fputs ("\nSuccessor: ", stderr
);
5963 dump_edge_info (stderr
, e
, 1);
5964 fputc ('\n', stderr
);
5967 if (e
->src
!= ENTRY_BLOCK_PTR
)
5969 edge e2
= e
->src
->succ
;
5970 while (e2
&& e2
!= e
)
5974 error ("Basic block %i edge lists are corrupted", bb
->index
);
5981 /* OK pointers are correct. Now check the header of basic
5982 block. It ought to contain optional CODE_LABEL followed
5983 by NOTE_BASIC_BLOCK. */
5985 if (GET_CODE (x
) == CODE_LABEL
)
5989 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5995 if (GET_CODE (x
) != NOTE
5996 || NOTE_LINE_NUMBER (x
) != NOTE_INSN_BASIC_BLOCK
5997 || NOTE_BASIC_BLOCK (x
) != bb
)
5999 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6006 /* Do checks for empty blocks here */
6013 if (GET_CODE (x
) == NOTE
6014 && NOTE_LINE_NUMBER (x
) == NOTE_INSN_BASIC_BLOCK
)
6016 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6017 INSN_UID (x
), bb
->index
);
6024 if (GET_CODE (x
) == JUMP_INSN
6025 || GET_CODE (x
) == CODE_LABEL
6026 || GET_CODE (x
) == BARRIER
)
6028 error ("In basic block %d:", bb
->index
);
6029 fatal_insn ("Flow control insn inside a basic block", x
);
6040 if (!bb_info
[INSN_UID (x
)])
6042 switch (GET_CODE (x
))
6049 /* An addr_vec is placed outside any block block. */
6051 && GET_CODE (NEXT_INSN (x
)) == JUMP_INSN
6052 && (GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_DIFF_VEC
6053 || GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_VEC
))
6058 /* But in any case, non-deletable labels can appear anywhere. */
6062 fatal_insn ("Insn outside basic block", x
);
6076 /* Functions to access an edge list with a vector representation.
6077 Enough data is kept such that given an index number, the
6078 pred and succ that edge reprsents can be determined, or
6079 given a pred and a succ, it's index number can be returned.
6080 This allows algorithms which comsume a lot of memory to
6081 represent the normally full matrix of edge (pred,succ) with a
6082 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6083 wasted space in the client code due to sparse flow graphs. */
6085 /* This functions initializes the edge list. Basically the entire
6086 flowgraph is processed, and all edges are assigned a number,
6087 and the data structure is filed in. */
6091 struct edge_list
*elist
;
6097 block_count
= n_basic_blocks
+ 2; /* Include the entry and exit blocks. */
6101 /* Determine the number of edges in the flow graph by counting successor
6102 edges on each basic block. */
6103 for (x
= 0; x
< n_basic_blocks
; x
++)
6105 basic_block bb
= BASIC_BLOCK (x
);
6107 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6110 /* Don't forget successors of the entry block. */
6111 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
6114 elist
= xmalloc (sizeof (struct edge_list
));
6115 elist
->num_blocks
= block_count
;
6116 elist
->num_edges
= num_edges
;
6117 elist
->index_to_edge
= xmalloc (sizeof (edge
) * num_edges
);
6121 /* Follow successors of the entry block, and register these edges. */
6122 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
6124 elist
->index_to_edge
[num_edges
] = e
;
6128 for (x
= 0; x
< n_basic_blocks
; x
++)
6130 basic_block bb
= BASIC_BLOCK (x
);
6132 /* Follow all successors of blocks, and register these edges. */
6133 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6135 elist
->index_to_edge
[num_edges
] = e
;
6142 /* This function free's memory associated with an edge list. */
6144 free_edge_list (elist
)
6145 struct edge_list
*elist
;
6149 free (elist
->index_to_edge
);
6154 /* This function provides debug output showing an edge list. */
6156 print_edge_list (f
, elist
)
6158 struct edge_list
*elist
;
6161 fprintf(f
, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6162 elist
->num_blocks
- 2, elist
->num_edges
);
6164 for (x
= 0; x
< elist
->num_edges
; x
++)
6166 fprintf (f
, " %-4d - edge(", x
);
6167 if (INDEX_EDGE_PRED_BB (elist
, x
) == ENTRY_BLOCK_PTR
)
6168 fprintf (f
,"entry,");
6170 fprintf (f
,"%d,", INDEX_EDGE_PRED_BB (elist
, x
)->index
);
6172 if (INDEX_EDGE_SUCC_BB (elist
, x
) == EXIT_BLOCK_PTR
)
6173 fprintf (f
,"exit)\n");
6175 fprintf (f
,"%d)\n", INDEX_EDGE_SUCC_BB (elist
, x
)->index
);
6179 /* This function provides an internal consistancy check of an edge list,
6180 verifying that all edges are present, and that there are no
6183 verify_edge_list (f
, elist
)
6185 struct edge_list
*elist
;
6187 int x
, pred
, succ
, index
;
6190 for (x
= 0; x
< n_basic_blocks
; x
++)
6192 basic_block bb
= BASIC_BLOCK (x
);
6194 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
6196 pred
= e
->src
->index
;
6197 succ
= e
->dest
->index
;
6198 index
= EDGE_INDEX (elist
, e
->src
, e
->dest
);
6199 if (index
== EDGE_INDEX_NO_EDGE
)
6201 fprintf (f
, "*p* No index for edge from %d to %d\n",pred
, succ
);
6204 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
6205 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
6206 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
6207 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
6208 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
6209 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
6212 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
6214 pred
= e
->src
->index
;
6215 succ
= e
->dest
->index
;
6216 index
= EDGE_INDEX (elist
, e
->src
, e
->dest
);
6217 if (index
== EDGE_INDEX_NO_EDGE
)
6219 fprintf (f
, "*p* No index for edge from %d to %d\n",pred
, succ
);
6222 if (INDEX_EDGE_PRED_BB (elist
, index
)->index
!= pred
)
6223 fprintf (f
, "*p* Pred for index %d should be %d not %d\n",
6224 index
, pred
, INDEX_EDGE_PRED_BB (elist
, index
)->index
);
6225 if (INDEX_EDGE_SUCC_BB (elist
, index
)->index
!= succ
)
6226 fprintf (f
, "*p* Succ for index %d should be %d not %d\n",
6227 index
, succ
, INDEX_EDGE_SUCC_BB (elist
, index
)->index
);
6229 /* We've verified that all the edges are in the list, no lets make sure
6230 there are no spurious edges in the list. */
6232 for (pred
= 0 ; pred
< n_basic_blocks
; pred
++)
6233 for (succ
= 0 ; succ
< n_basic_blocks
; succ
++)
6235 basic_block p
= BASIC_BLOCK (pred
);
6236 basic_block s
= BASIC_BLOCK (succ
);
6240 for (e
= p
->succ
; e
; e
= e
->succ_next
)
6246 for (e
= s
->pred
; e
; e
= e
->pred_next
)
6252 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), BASIC_BLOCK (succ
))
6253 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
6254 fprintf (f
, "*** Edge (%d, %d) appears to not have an index\n",
6256 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), BASIC_BLOCK (succ
))
6257 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
6258 fprintf (f
, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6259 pred
, succ
, EDGE_INDEX (elist
, BASIC_BLOCK (pred
),
6260 BASIC_BLOCK (succ
)));
6262 for (succ
= 0 ; succ
< n_basic_blocks
; succ
++)
6264 basic_block p
= ENTRY_BLOCK_PTR
;
6265 basic_block s
= BASIC_BLOCK (succ
);
6269 for (e
= p
->succ
; e
; e
= e
->succ_next
)
6275 for (e
= s
->pred
; e
; e
= e
->pred_next
)
6281 if (EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (succ
))
6282 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
6283 fprintf (f
, "*** Edge (entry, %d) appears to not have an index\n",
6285 if (EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (succ
))
6286 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
6287 fprintf (f
, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6288 succ
, EDGE_INDEX (elist
, ENTRY_BLOCK_PTR
,
6289 BASIC_BLOCK (succ
)));
6291 for (pred
= 0 ; pred
< n_basic_blocks
; pred
++)
6293 basic_block p
= BASIC_BLOCK (pred
);
6294 basic_block s
= EXIT_BLOCK_PTR
;
6298 for (e
= p
->succ
; e
; e
= e
->succ_next
)
6304 for (e
= s
->pred
; e
; e
= e
->pred_next
)
6310 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), EXIT_BLOCK_PTR
)
6311 == EDGE_INDEX_NO_EDGE
&& found_edge
!= 0)
6312 fprintf (f
, "*** Edge (%d, exit) appears to not have an index\n",
6314 if (EDGE_INDEX (elist
, BASIC_BLOCK (pred
), EXIT_BLOCK_PTR
)
6315 != EDGE_INDEX_NO_EDGE
&& found_edge
== 0)
6316 fprintf (f
, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6317 pred
, EDGE_INDEX (elist
, BASIC_BLOCK (pred
),
6322 /* This routine will determine what, if any, edge there is between
6323 a specified predecessor and successor. */
6326 find_edge_index (edge_list
, pred
, succ
)
6327 struct edge_list
*edge_list
;
6328 basic_block pred
, succ
;
6331 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
6333 if (INDEX_EDGE_PRED_BB (edge_list
, x
) == pred
6334 && INDEX_EDGE_SUCC_BB (edge_list
, x
) == succ
)
6337 return (EDGE_INDEX_NO_EDGE
);
6340 /* This function will remove an edge from the flow graph. */
6345 edge last_pred
= NULL
;
6346 edge last_succ
= NULL
;
6348 basic_block src
, dest
;
6351 for (tmp
= src
->succ
; tmp
&& tmp
!= e
; tmp
= tmp
->succ_next
)
6357 last_succ
->succ_next
= e
->succ_next
;
6359 src
->succ
= e
->succ_next
;
6361 for (tmp
= dest
->pred
; tmp
&& tmp
!= e
; tmp
= tmp
->pred_next
)
6367 last_pred
->pred_next
= e
->pred_next
;
6369 dest
->pred
= e
->pred_next
;
6375 /* This routine will remove any fake successor edges for a basic block.
6376 When the edge is removed, it is also removed from whatever predecessor
6379 remove_fake_successors (bb
)
6383 for (e
= bb
->succ
; e
; )
6387 if ((tmp
->flags
& EDGE_FAKE
) == EDGE_FAKE
)
6392 /* This routine will remove all fake edges from the flow graph. If
6393 we remove all fake successors, it will automatically remove all
6394 fake predecessors. */
6396 remove_fake_edges ()
6400 for (x
= 0; x
< n_basic_blocks
; x
++)
6401 remove_fake_successors (BASIC_BLOCK (x
));
6403 /* We've handled all successors except the entry block's. */
6404 remove_fake_successors (ENTRY_BLOCK_PTR
);
6407 /* This functions will add a fake edge between any block which has no
6408 successors, and the exit block. Some data flow equations require these
6411 add_noreturn_fake_exit_edges ()
6415 for (x
= 0; x
< n_basic_blocks
; x
++)
6416 if (BASIC_BLOCK (x
)->succ
== NULL
)
6417 make_edge (NULL
, BASIC_BLOCK (x
), EXIT_BLOCK_PTR
, EDGE_FAKE
);