1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
124 #include "basic-block.h"
125 #include "insn-config.h"
127 #include "hard-reg-set.h"
133 #include "insn-flags.h"
136 #define obstack_chunk_alloc xmalloc
137 #define obstack_chunk_free free
140 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
141 the stack pointer does not matter. The value is tested only in
142 functions that have frame pointers.
143 No definition is equivalent to always zero. */
144 #ifndef EXIT_IGNORE_STACK
145 #define EXIT_IGNORE_STACK 0
149 /* The contents of the current function definition are allocated
150 in this obstack, and all are freed at the end of the function.
151 For top-level functions, this is temporary_obstack.
152 Separate obstacks are made for nested functions. */
154 extern struct obstack
*function_obstack
;
156 /* List of labels that must never be deleted. */
157 extern rtx forced_labels
;
159 /* Number of basic blocks in the current function. */
163 /* The basic block array. */
165 varray_type basic_block_info
;
167 /* The special entry and exit blocks. */
169 struct basic_block_def entry_exit_blocks
[2] =
176 NULL
, /* local_set */
177 NULL
, /* global_live_at_start */
178 NULL
, /* global_live_at_end */
180 ENTRY_BLOCK
, /* index */
188 NULL
, /* local_set */
189 NULL
, /* global_live_at_start */
190 NULL
, /* global_live_at_end */
192 EXIT_BLOCK
, /* index */
197 /* Nonzero if the second flow pass has completed. */
200 /* Maximum register number used in this function, plus one. */
204 /* Indexed by n, giving various register information */
206 varray_type reg_n_info
;
208 /* Size of the reg_n_info table. */
210 unsigned int reg_n_max
;
212 /* Element N is the next insn that uses (hard or pseudo) register number N
213 within the current basic block; or zero, if there is no such insn.
214 This is valid only during the final backward scan in propagate_block. */
216 static rtx
*reg_next_use
;
218 /* Size of a regset for the current function,
219 in (1) bytes and (2) elements. */
224 /* Regset of regs live when calls to `setjmp'-like functions happen. */
225 /* ??? Does this exist only for the setjmp-clobbered warning message? */
227 regset regs_live_at_setjmp
;
229 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
230 that have to go in the same hard reg.
231 The first two regs in the list are a pair, and the next two
232 are another pair, etc. */
235 /* Depth within loops of basic block being scanned for lifetime analysis,
236 plus one. This is the weight attached to references to registers. */
238 static int loop_depth
;
240 /* During propagate_block, this is non-zero if the value of CC0 is live. */
244 /* During propagate_block, this contains a list of all the MEMs we are
245 tracking for dead store elimination.
247 ?!? Note we leak memory by not free-ing items on this list. We need to
248 write some generic routines to operate on memory lists since cse, gcse,
249 loop, sched, flow and possibly other passes all need to do basically the
250 same operations on these lists. */
252 static rtx mem_set_list
;
254 /* Set of registers that may be eliminable. These are handled specially
255 in updating regs_ever_live. */
257 static HARD_REG_SET elim_reg_set
;
259 /* The basic block structure for every insn, indexed by uid. */
261 varray_type basic_block_for_insn
;
263 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
264 /* ??? Should probably be using LABEL_NUSES instead. It would take a
265 bit of surgery to be able to use or co-opt the routines in jump. */
267 static rtx label_value_list
;
269 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
271 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
272 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
273 static bitmap uid_volatile
;
275 /* Forward declarations */
276 static int count_basic_blocks
PROTO((rtx
));
277 static rtx find_basic_blocks_1
PROTO((rtx
, rtx
*));
278 static void create_basic_block
PROTO((int, rtx
, rtx
, rtx
));
279 static void compute_bb_for_insn
PROTO((varray_type
, int));
280 static void clear_edges
PROTO((void));
281 static void make_edges
PROTO((rtx
, rtx
*));
282 static void make_edge
PROTO((basic_block
, basic_block
, int));
283 static void make_label_edge
PROTO((basic_block
, rtx
, int));
284 static void mark_critical_edges
PROTO((void));
286 static void commit_one_edge_insertion
PROTO((edge
));
288 static void delete_unreachable_blocks
PROTO((void));
289 static void delete_eh_regions
PROTO((void));
290 static int can_delete_note_p
PROTO((rtx
));
291 static void delete_insn_chain
PROTO((rtx
, rtx
));
292 static int delete_block
PROTO((basic_block
));
293 static void expunge_block
PROTO((basic_block
));
294 static rtx flow_delete_insn
PROTO((rtx
));
295 static int can_delete_label_p
PROTO((rtx
));
296 static void merge_blocks_nomove
PROTO((basic_block
, basic_block
));
297 static int merge_blocks
PROTO((edge
,basic_block
,basic_block
));
298 static void tidy_fallthru_edge
PROTO((edge
,basic_block
,basic_block
));
299 static void calculate_loop_depth
PROTO((rtx
));
301 static int set_noop_p
PROTO((rtx
));
302 static int noop_move_p
PROTO((rtx
));
303 static void notice_stack_pointer_modification
PROTO ((rtx
, rtx
));
304 static void record_volatile_insns
PROTO((rtx
));
305 static void mark_regs_live_at_end
PROTO((regset
));
306 static void life_analysis_1
PROTO((rtx
, int, int));
307 static void init_regset_vector
PROTO ((regset
*, int,
309 static void propagate_block
PROTO((regset
, rtx
, rtx
, int,
311 static int insn_dead_p
PROTO((rtx
, regset
, int, rtx
));
312 static int libcall_dead_p
PROTO((rtx
, regset
, rtx
, rtx
));
313 static void mark_set_regs
PROTO((regset
, regset
, rtx
,
315 static void mark_set_1
PROTO((regset
, regset
, rtx
,
318 static void find_auto_inc
PROTO((regset
, rtx
, rtx
));
319 static int try_pre_increment_1
PROTO((rtx
));
320 static int try_pre_increment
PROTO((rtx
, rtx
, HOST_WIDE_INT
));
322 static void mark_used_regs
PROTO((regset
, regset
, rtx
, int, rtx
));
323 void dump_flow_info
PROTO((FILE *));
324 static void dump_edge_info
PROTO((FILE *, edge
, int));
326 static int_list_ptr alloc_int_list_node
PROTO ((int_list_block
**));
327 static int_list_ptr add_int_list_node
PROTO ((int_list_block
**,
330 static void add_pred_succ
PROTO ((int, int, int_list_ptr
*,
331 int_list_ptr
*, int *, int *));
333 static void count_reg_sets_1
PROTO ((rtx
));
334 static void count_reg_sets
PROTO ((rtx
));
335 static void count_reg_references
PROTO ((rtx
));
336 static void notice_stack_pointer_modification
PROTO ((rtx
, rtx
));
337 static void invalidate_mems_from_autoinc
PROTO ((rtx
));
338 void verify_flow_info
PROTO ((void));
340 /* Find basic blocks of the current function.
341 F is the first insn of the function and NREGS the number of register
345 find_basic_blocks (f
, nregs
, file
, do_cleanup
)
347 int nregs ATTRIBUTE_UNUSED
;
348 FILE *file ATTRIBUTE_UNUSED
;
354 /* Flush out existing data. */
355 if (basic_block_info
!= NULL
)
361 /* Clear bb->aux on all extant basic blocks. We'll use this as a
362 tag for reuse during create_basic_block, just in case some pass
363 copies around basic block notes improperly. */
364 for (i
= 0; i
< n_basic_blocks
; ++i
)
365 BASIC_BLOCK (i
)->aux
= NULL
;
367 VARRAY_FREE (basic_block_info
);
370 n_basic_blocks
= count_basic_blocks (f
);
372 /* Size the basic block table. The actual structures will be allocated
373 by find_basic_blocks_1, since we want to keep the structure pointers
374 stable across calls to find_basic_blocks. */
375 /* ??? This whole issue would be much simpler if we called find_basic_blocks
376 exactly once, and thereafter we don't have a single long chain of
377 instructions at all until close to the end of compilation when we
378 actually lay them out. */
380 VARRAY_BB_INIT (basic_block_info
, n_basic_blocks
, "basic_block_info");
382 /* An array to record the active exception region at the end of each
383 basic block. It is filled in by find_basic_blocks_1 for make_edges. */
384 bb_eh_end
= (rtx
*) alloca (n_basic_blocks
* sizeof (rtx
));
386 label_value_list
= find_basic_blocks_1 (f
, bb_eh_end
);
388 /* Record the block to which an insn belongs. */
389 /* ??? This should be done another way, by which (perhaps) a label is
390 tagged directly with the basic block that it starts. It is used for
391 more than that currently, but IMO that is the only valid use. */
393 max_uid
= get_max_uid ();
395 /* Leave space for insns life_analysis makes in some cases for auto-inc.
396 These cases are rare, so we don't need too much space. */
397 max_uid
+= max_uid
/ 10;
400 VARRAY_BB_INIT (basic_block_for_insn
, max_uid
, "basic_block_for_insn");
401 compute_bb_for_insn (basic_block_for_insn
, max_uid
);
403 /* Discover the edges of our cfg. */
405 make_edges (label_value_list
, bb_eh_end
);
407 /* Delete unreachable blocks. */
410 delete_unreachable_blocks ();
412 /* Mark critical edges. */
414 mark_critical_edges ();
416 /* Discover the loop depth at the start of each basic block to aid
417 register allocation. */
418 calculate_loop_depth (f
);
420 /* Kill the data we won't maintain. */
421 label_value_list
= 0;
423 #ifdef ENABLE_CHECKING
428 /* Count the basic blocks of the function. */
431 count_basic_blocks (f
)
435 register RTX_CODE prev_code
;
436 register int count
= 0;
438 int call_had_abnormal_edge
= 0;
439 rtx prev_call
= NULL_RTX
;
441 prev_code
= JUMP_INSN
;
442 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
444 register RTX_CODE code
= GET_CODE (insn
);
446 if (code
== CODE_LABEL
447 || (GET_RTX_CLASS (code
) == 'i'
448 && (prev_code
== JUMP_INSN
449 || prev_code
== BARRIER
450 || (prev_code
== CALL_INSN
&& call_had_abnormal_edge
))))
454 /* If the previous insn was a call that did not create an
455 abnormal edge, we want to add a nop so that the CALL_INSN
456 itself is not at basic_block_end. This allows us to
457 easily distinguish between normal calls and those which
458 create abnormal edges in the flow graph. */
460 if (count
> 0 && prev_call
!= 0 && !call_had_abnormal_edge
)
462 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
463 emit_insn_after (nop
, prev_call
);
467 /* Record whether this call created an edge. */
468 if (code
== CALL_INSN
)
470 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
471 int region
= (note
? XINT (XEXP (note
, 0), 0) : 1);
473 call_had_abnormal_edge
= 0;
475 /* If there is a specified EH region, we have an edge. */
476 if (eh_region
&& region
> 0)
477 call_had_abnormal_edge
= 1;
480 /* If there is a nonlocal goto label and the specified
481 region number isn't -1, we have an edge. (0 means
482 no throw, but might have a nonlocal goto). */
483 if (nonlocal_goto_handler_labels
&& region
>= 0)
484 call_had_abnormal_edge
= 1;
487 else if (code
!= NOTE
)
488 prev_call
= NULL_RTX
;
492 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
494 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
)
499 /* The rest of the compiler works a bit smoother when we don't have to
500 check for the edge case of do-nothing functions with no basic blocks. */
503 emit_insn (gen_rtx_USE (VOIDmode
, const0_rtx
));
510 /* Find all basic blocks of the function whose first insn is F.
511 Store the correct data in the tables that describe the basic blocks,
512 set up the chains of references for each CODE_LABEL, and
513 delete any entire basic blocks that cannot be reached.
515 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
516 that are otherwise unreachable may be reachable with a non-local goto.
518 BB_EH_END is an array in which we record the list of exception regions
519 active at the end of every basic block. */
522 find_basic_blocks_1 (f
, bb_eh_end
)
526 register rtx insn
, next
;
527 int call_has_abnormal_edge
= 0;
529 rtx bb_note
= NULL_RTX
;
530 rtx eh_list
= NULL_RTX
;
531 rtx label_value_list
= NULL_RTX
;
535 /* We process the instructions in a slightly different way than we did
536 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
537 closed out the previous block, so that it gets attached at the proper
538 place. Since this form should be equivalent to the previous,
539 find_basic_blocks_0 continues to use the old form as a check. */
541 for (insn
= f
; insn
; insn
= next
)
543 enum rtx_code code
= GET_CODE (insn
);
545 next
= NEXT_INSN (insn
);
547 if (code
== CALL_INSN
)
549 /* Record whether this call created an edge. */
550 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
551 int region
= (note
? XINT (XEXP (note
, 0), 0) : 1);
552 call_has_abnormal_edge
= 0;
554 /* If there is an EH region, we have an edge. */
555 if (eh_list
&& region
> 0)
556 call_has_abnormal_edge
= 1;
559 /* If there is a nonlocal goto label and the specified
560 region number isn't -1, we have an edge. (0 means
561 no throw, but might have a nonlocal goto). */
562 if (nonlocal_goto_handler_labels
&& region
>= 0)
563 call_has_abnormal_edge
= 1;
571 int kind
= NOTE_LINE_NUMBER (insn
);
573 /* Keep a LIFO list of the currently active exception notes. */
574 if (kind
== NOTE_INSN_EH_REGION_BEG
)
575 eh_list
= gen_rtx_INSN_LIST (VOIDmode
, insn
, eh_list
);
576 else if (kind
== NOTE_INSN_EH_REGION_END
)
577 eh_list
= XEXP (eh_list
, 1);
579 /* Look for basic block notes with which to keep the
580 basic_block_info pointers stable. Unthread the note now;
581 we'll put it back at the right place in create_basic_block.
582 Or not at all if we've already found a note in this block. */
583 else if (kind
== NOTE_INSN_BASIC_BLOCK
)
585 if (bb_note
== NULL_RTX
)
587 next
= flow_delete_insn (insn
);
594 /* A basic block starts at a label. If we've closed one off due
595 to a barrier or some such, no need to do it again. */
596 if (head
!= NULL_RTX
)
598 /* While we now have edge lists with which other portions of
599 the compiler might determine a call ending a basic block
600 does not imply an abnormal edge, it will be a bit before
601 everything can be updated. So continue to emit a noop at
602 the end of such a block. */
603 if (GET_CODE (end
) == CALL_INSN
)
605 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
606 end
= emit_insn_after (nop
, end
);
609 bb_eh_end
[i
] = eh_list
;
610 create_basic_block (i
++, head
, end
, bb_note
);
617 /* A basic block ends at a jump. */
618 if (head
== NULL_RTX
)
622 /* ??? Make a special check for table jumps. The way this
623 happens is truely and amazingly gross. We are about to
624 create a basic block that contains just a code label and
625 an addr*vec jump insn. Worse, an addr_diff_vec creates
626 its own natural loop.
628 Prevent this bit of brain damage, pasting things together
629 correctly in make_edges.
631 The correct solution involves emitting the table directly
632 on the tablejump instruction as a note, or JUMP_LABEL. */
634 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
635 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
643 goto new_bb_inclusive
;
646 /* A basic block ends at a barrier. It may be that an unconditional
647 jump already closed the basic block -- no need to do it again. */
648 if (head
== NULL_RTX
)
651 /* While we now have edge lists with which other portions of the
652 compiler might determine a call ending a basic block does not
653 imply an abnormal edge, it will be a bit before everything can
654 be updated. So continue to emit a noop at the end of such a
656 if (GET_CODE (end
) == CALL_INSN
)
658 rtx nop
= gen_rtx_USE (VOIDmode
, const0_rtx
);
659 end
= emit_insn_after (nop
, end
);
661 goto new_bb_exclusive
;
664 /* A basic block ends at a call that can either throw or
665 do a non-local goto. */
666 if (call_has_abnormal_edge
)
669 if (head
== NULL_RTX
)
674 bb_eh_end
[i
] = eh_list
;
675 create_basic_block (i
++, head
, end
, bb_note
);
676 head
= end
= NULL_RTX
;
683 if (GET_RTX_CLASS (code
) == 'i')
685 if (head
== NULL_RTX
)
692 if (GET_RTX_CLASS (code
) == 'i')
696 /* Make a list of all labels referred to other than by jumps
697 (which just don't have the REG_LABEL notes).
699 Make a special exception for labels followed by an ADDR*VEC,
700 as this would be a part of the tablejump setup code.
702 Make a special exception for the eh_return_stub_label, which
703 we know isn't part of any otherwise visible control flow. */
705 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
706 if (REG_NOTE_KIND (note
) == REG_LABEL
)
708 rtx lab
= XEXP (note
, 0), next
;
710 if (lab
== eh_return_stub_label
)
712 else if ((next
= next_nonnote_insn (lab
)) != NULL
713 && GET_CODE (next
) == JUMP_INSN
714 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
715 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
719 = gen_rtx_EXPR_LIST (VOIDmode
, XEXP (note
, 0),
725 if (head
!= NULL_RTX
)
727 bb_eh_end
[i
] = eh_list
;
728 create_basic_block (i
++, head
, end
, bb_note
);
731 if (i
!= n_basic_blocks
)
734 return label_value_list
;
737 /* Create a new basic block consisting of the instructions between
738 HEAD and END inclusive. Reuses the note and basic block struct
739 in BB_NOTE, if any. */
742 create_basic_block (index
, head
, end
, bb_note
)
744 rtx head
, end
, bb_note
;
749 && ! RTX_INTEGRATED_P (bb_note
)
750 && (bb
= NOTE_BASIC_BLOCK (bb_note
)) != NULL
753 /* If we found an existing note, thread it back onto the chain. */
755 if (GET_CODE (head
) == CODE_LABEL
)
756 add_insn_after (bb_note
, head
);
759 add_insn_before (bb_note
, head
);
765 /* Otherwise we must create a note and a basic block structure.
766 Since we allow basic block structs in rtl, give the struct
767 the same lifetime by allocating it off the function obstack
768 rather than using malloc. */
770 bb
= (basic_block
) obstack_alloc (function_obstack
, sizeof (*bb
));
771 memset (bb
, 0, sizeof (*bb
));
773 if (GET_CODE (head
) == CODE_LABEL
)
774 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, head
);
777 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, head
);
780 NOTE_BASIC_BLOCK (bb_note
) = bb
;
783 /* Always include the bb note in the block. */
784 if (NEXT_INSN (end
) == bb_note
)
790 BASIC_BLOCK (index
) = bb
;
792 /* Tag the block so that we know it has been used when considering
793 other basic block notes. */
797 /* Records the basic block struct in BB_FOR_INSN, for every instruction
798 indexed by INSN_UID. MAX is the size of the array. */
801 compute_bb_for_insn (bb_for_insn
, max
)
802 varray_type bb_for_insn
;
807 for (i
= 0; i
< n_basic_blocks
; ++i
)
809 basic_block bb
= BASIC_BLOCK (i
);
816 int uid
= INSN_UID (insn
);
818 VARRAY_BB (bb_for_insn
, uid
) = bb
;
821 insn
= NEXT_INSN (insn
);
826 /* Free the memory associated with the edge structures. */
834 for (i
= 0; i
< n_basic_blocks
; ++i
)
836 basic_block bb
= BASIC_BLOCK (i
);
838 for (e
= bb
->succ
; e
; e
= n
)
848 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= n
)
854 ENTRY_BLOCK_PTR
->succ
= 0;
855 EXIT_BLOCK_PTR
->pred
= 0;
858 /* Identify the edges between basic blocks.
860 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
861 that are otherwise unreachable may be reachable with a non-local goto.
863 BB_EH_END is an array indexed by basic block number in which we record
864 the list of exception regions active at the end of the basic block. */
867 make_edges (label_value_list
, bb_eh_end
)
868 rtx label_value_list
;
873 /* Assume no computed jump; revise as we create edges. */
874 current_function_has_computed_jump
= 0;
876 /* By nature of the way these get numbered, block 0 is always the entry. */
877 make_edge (ENTRY_BLOCK_PTR
, BASIC_BLOCK (0), EDGE_FALLTHRU
);
879 for (i
= 0; i
< n_basic_blocks
; ++i
)
881 basic_block bb
= BASIC_BLOCK (i
);
882 rtx insn
, x
, eh_list
;
884 int force_fallthru
= 0;
886 /* If we have asynchronous exceptions, scan the notes for all exception
887 regions active in the block. In the normal case, we only need the
888 one active at the end of the block, which is bb_eh_end[i]. */
890 eh_list
= bb_eh_end
[i
];
891 if (asynchronous_exceptions
)
893 for (insn
= bb
->end
; insn
!= bb
->head
; insn
= PREV_INSN (insn
))
895 if (GET_CODE (insn
) == NOTE
896 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
)
897 eh_list
= gen_rtx_INSN_LIST (VOIDmode
, insn
, eh_list
);
901 /* Now examine the last instruction of the block, and discover the
902 ways we can leave the block. */
905 code
= GET_CODE (insn
);
908 if (code
== JUMP_INSN
)
912 /* ??? Recognize a tablejump and do the right thing. */
913 if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
914 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
915 && GET_CODE (tmp
) == JUMP_INSN
916 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
917 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
922 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
923 vec
= XVEC (PATTERN (tmp
), 0);
925 vec
= XVEC (PATTERN (tmp
), 1);
927 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
928 make_label_edge (bb
, XEXP (RTVEC_ELT (vec
, j
), 0), 0);
930 /* Some targets (eg, ARM) emit a conditional jump that also
931 contains the out-of-range target. Scan for these and
932 add an edge if necessary. */
933 if ((tmp
= single_set (insn
)) != NULL
934 && SET_DEST (tmp
) == pc_rtx
935 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
936 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
937 make_label_edge (bb
, XEXP (XEXP (SET_SRC (tmp
), 2), 0), 0);
939 #ifdef CASE_DROPS_THROUGH
940 /* Silly VAXen. The ADDR_VEC is going to be in the way of
941 us naturally detecting fallthru into the next block. */
946 /* If this is a computed jump, then mark it as reaching
947 everything on the label_value_list and forced_labels list. */
948 else if (computed_jump_p (insn
))
950 current_function_has_computed_jump
= 1;
952 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
953 make_label_edge (bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
955 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
956 make_label_edge (bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
959 /* Returns create an exit out. */
960 else if (returnjump_p (insn
))
961 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
963 /* Otherwise, we have a plain conditional or unconditional jump. */
966 if (! JUMP_LABEL (insn
))
968 make_label_edge (bb
, JUMP_LABEL (insn
), 0);
972 /* If this is a CALL_INSN, then mark it as reaching the active EH
973 handler for this CALL_INSN. If we're handling asynchronous
974 exceptions then any insn can reach any of the active handlers.
976 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
978 if (code
== CALL_INSN
|| asynchronous_exceptions
)
980 int is_call
= (code
== CALL_INSN
? EDGE_ABNORMAL_CALL
: 0);
983 /* Use REG_EH_RETHROW and REG_EH_REGION if available. */
984 /* ??? REG_EH_REGION is not generated presently. Is it
985 inteded that there be multiple notes for the regions?
986 or is my eh_list collection redundant with handler linking? */
988 x
= find_reg_note (insn
, REG_EH_RETHROW
, 0);
990 x
= find_reg_note (insn
, REG_EH_REGION
, 0);
993 if (XINT (XEXP (x
, 0), 0) > 0)
995 ptr
= get_first_handler (XINT (XEXP (x
, 0), 0));
998 make_label_edge (bb
, ptr
->handler_label
,
999 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
1006 for (x
= eh_list
; x
; x
= XEXP (x
, 1))
1008 ptr
= get_first_handler (NOTE_BLOCK_NUMBER (XEXP (x
, 0)));
1011 make_label_edge (bb
, ptr
->handler_label
,
1012 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
1018 if (code
== CALL_INSN
&& nonlocal_goto_handler_labels
)
1020 /* ??? This could be made smarter: in some cases it's possible
1021 to tell that certain calls will not do a nonlocal goto.
1023 For example, if the nested functions that do the nonlocal
1024 gotos do not have their addresses taken, then only calls to
1025 those functions or to other nested functions that use them
1026 could possibly do nonlocal gotos. */
1028 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
1029 make_label_edge (bb
, XEXP (x
, 0),
1030 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
1034 /* We know something about the structure of the function __throw in
1035 libgcc2.c. It is the only function that ever contains eh_stub
1036 labels. It modifies its return address so that the last block
1037 returns to one of the eh_stub labels within it. So we have to
1038 make additional edges in the flow graph. */
1039 if (i
+ 1 == n_basic_blocks
&& eh_return_stub_label
!= 0)
1040 make_label_edge (bb
, eh_return_stub_label
, EDGE_EH
);
1042 /* Find out if we can drop through to the next block. */
1043 insn
= next_nonnote_insn (insn
);
1044 if (!insn
|| (i
+ 1 == n_basic_blocks
&& force_fallthru
))
1045 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
1046 else if (i
+ 1 < n_basic_blocks
)
1048 rtx tmp
= BLOCK_HEAD (i
+ 1);
1049 if (GET_CODE (tmp
) == NOTE
)
1050 tmp
= next_nonnote_insn (tmp
);
1051 if (force_fallthru
|| insn
== tmp
)
1052 make_edge (bb
, BASIC_BLOCK (i
+ 1), EDGE_FALLTHRU
);
1057 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1058 about the edge that is accumulated between calls. */
1061 make_edge (src
, dst
, flags
)
1062 basic_block src
, dst
;
1067 /* Make sure we don't add duplicate edges. */
1069 for (e
= src
->succ
; e
; e
= e
->succ_next
)
1076 e
= (edge
) xcalloc (1, sizeof (*e
));
1078 e
->succ_next
= src
->succ
;
1079 e
->pred_next
= dst
->pred
;
1088 /* Create an edge from a basic block to a label. */
1091 make_label_edge (src
, label
, flags
)
1096 if (GET_CODE (label
) != CODE_LABEL
)
1099 /* If the label was never emitted, this insn is junk, but avoid a
1100 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1101 as a result of a syntax error and a diagnostic has already been
1104 if (INSN_UID (label
) == 0)
1107 make_edge (src
, BLOCK_FOR_INSN (label
), flags
);
1110 /* Identify critical edges and set the bits appropriately. */
1112 mark_critical_edges ()
1114 int i
, n
= n_basic_blocks
;
1117 /* We begin with the entry block. This is not terribly important now,
1118 but could be if a front end (Fortran) implemented alternate entry
1120 bb
= ENTRY_BLOCK_PTR
;
1127 /* (1) Critical edges must have a source with multiple successors. */
1128 if (bb
->succ
&& bb
->succ
->succ_next
)
1130 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1132 /* (2) Critical edges must have a destination with multiple
1133 predecessors. Note that we know there is at least one
1134 predecessor -- the edge we followed to get here. */
1135 if (e
->dest
->pred
->pred_next
)
1136 e
->flags
|= EDGE_CRITICAL
;
1138 e
->flags
&= ~EDGE_CRITICAL
;
1143 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1144 e
->flags
&= ~EDGE_CRITICAL
;
1149 bb
= BASIC_BLOCK (i
);
1153 /* Split a (typically critical) edge. Return the new block.
1154 Abort on abnormal edges.
1156 ??? The code generally expects to be called on critical edges.
1157 The case of a block ending in an unconditional jump to a
1158 block with multiple predecessors is not handled optimally. */
1161 split_edge (edge_in
)
1164 basic_block old_pred
, bb
, old_succ
;
1169 /* Abnormal edges cannot be split. */
1170 if ((edge_in
->flags
& EDGE_ABNORMAL
) != 0)
1173 old_pred
= edge_in
->src
;
1174 old_succ
= edge_in
->dest
;
1176 /* Remove the existing edge from the destination's pred list. */
1179 for (pp
= &old_succ
->pred
; *pp
!= edge_in
; pp
= &(*pp
)->pred_next
)
1181 *pp
= edge_in
->pred_next
;
1182 edge_in
->pred_next
= NULL
;
1185 /* Create the new structures. */
1186 bb
= (basic_block
) obstack_alloc (function_obstack
, sizeof (*bb
));
1187 edge_out
= (edge
) xcalloc (1, sizeof (*edge_out
));
1189 memset (bb
, 0, sizeof (*bb
));
1190 bb
->local_set
= OBSTACK_ALLOC_REG_SET (function_obstack
);
1191 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (function_obstack
);
1192 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (function_obstack
);
1194 /* ??? This info is likely going to be out of date very soon. */
1195 CLEAR_REG_SET (bb
->local_set
);
1196 if (old_succ
->global_live_at_start
)
1198 COPY_REG_SET (bb
->global_live_at_start
, old_succ
->global_live_at_start
);
1199 COPY_REG_SET (bb
->global_live_at_end
, old_succ
->global_live_at_start
);
1203 CLEAR_REG_SET (bb
->global_live_at_start
);
1204 CLEAR_REG_SET (bb
->global_live_at_end
);
1209 bb
->succ
= edge_out
;
1212 edge_in
->flags
&= ~EDGE_CRITICAL
;
1214 edge_out
->pred_next
= old_succ
->pred
;
1215 edge_out
->succ_next
= NULL
;
1217 edge_out
->dest
= old_succ
;
1218 edge_out
->flags
= EDGE_FALLTHRU
;
1219 edge_out
->probability
= REG_BR_PROB_BASE
;
1221 old_succ
->pred
= edge_out
;
1223 /* Tricky case -- if there existed a fallthru into the successor
1224 (and we're not it) we must add a new unconditional jump around
1225 the new block we're actually interested in.
1227 Further, if that edge is critical, this means a second new basic
1228 block must be created to hold it. In order to simplify correct
1229 insn placement, do this before we touch the existing basic block
1230 ordering for the block we were really wanting. */
1231 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1234 for (e
= edge_out
->pred_next
; e
; e
= e
->pred_next
)
1235 if (e
->flags
& EDGE_FALLTHRU
)
1240 basic_block jump_block
;
1243 if ((e
->flags
& EDGE_CRITICAL
) == 0)
1245 /* Non critical -- we can simply add a jump to the end
1246 of the existing predecessor. */
1247 jump_block
= e
->src
;
1251 /* We need a new block to hold the jump. The simplest
1252 way to do the bulk of the work here is to recursively
1254 jump_block
= split_edge (e
);
1255 e
= jump_block
->succ
;
1258 /* Now add the jump insn ... */
1259 pos
= emit_jump_insn_after (gen_jump (old_succ
->head
),
1261 jump_block
->end
= pos
;
1262 emit_barrier_after (pos
);
1264 /* ... let jump know that label is in use, ... */
1265 ++LABEL_NUSES (old_succ
->head
);
1267 /* ... and clear fallthru on the outgoing edge. */
1268 e
->flags
&= ~EDGE_FALLTHRU
;
1270 /* Continue splitting the interesting edge. */
1274 /* Place the new block just in front of the successor. */
1275 VARRAY_GROW (basic_block_info
, ++n_basic_blocks
);
1276 for (i
= n_basic_blocks
- 1; i
> old_succ
->index
; --i
)
1278 basic_block tmp
= BASIC_BLOCK (i
- 1);
1279 BASIC_BLOCK (i
) = tmp
;
1282 BASIC_BLOCK (i
) = bb
;
1285 /* Create the basic block note. */
1286 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, old_succ
->head
);
1287 NOTE_BASIC_BLOCK (bb_note
) = bb
;
1288 bb
->head
= bb
->end
= bb_note
;
1290 /* Not quite simple -- for non-fallthru edges, we must adjust the
1291 predecessor's jump instruction to target our new block. */
1292 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1294 rtx tmp
, insn
= old_pred
->end
;
1295 rtx old_label
= old_succ
->head
;
1296 rtx new_label
= gen_label_rtx ();
1298 if (GET_CODE (insn
) != JUMP_INSN
)
1301 /* ??? Recognize a tablejump and adjust all matching cases. */
1302 if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
1303 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
1304 && GET_CODE (tmp
) == JUMP_INSN
1305 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
1306 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
1311 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
1312 vec
= XVEC (PATTERN (tmp
), 0);
1314 vec
= XVEC (PATTERN (tmp
), 1);
1316 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1317 if (XEXP (RTVEC_ELT (vec
, j
), 0) == old_label
)
1319 RTVEC_ELT (vec
, j
) = gen_rtx_LABEL_REF (VOIDmode
, new_label
);
1320 --LABEL_NUSES (old_label
);
1321 ++LABEL_NUSES (new_label
);
1326 /* This would have indicated an abnormal edge. */
1327 if (computed_jump_p (insn
))
1330 /* A return instruction can't be redirected. */
1331 if (returnjump_p (insn
))
1334 /* If the insn doesn't go where we think, we're confused. */
1335 if (JUMP_LABEL (insn
) != old_label
)
1338 redirect_jump (insn
, new_label
);
1341 emit_label_before (new_label
, bb_note
);
1342 bb
->head
= new_label
;
1348 /* Queue instructions for insertion on an edge between two basic blocks.
1349 The new instructions and basic blocks (if any) will not appear in the
1350 CFG until commit_edge_insertions is called. */
1353 insert_insn_on_edge (pattern
, e
)
1357 /* We cannot insert instructions on an abnormal critical edge.
1358 It will be easier to find the culprit if we die now. */
1359 if ((e
->flags
& (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1360 == (EDGE_ABNORMAL
|EDGE_CRITICAL
))
1363 if (e
->insns
== NULL_RTX
)
1366 push_to_sequence (e
->insns
);
1368 emit_insn (pattern
);
1370 e
->insns
= get_insns ();
1374 /* Update the CFG for the instructions queued on edge E. */
1377 commit_one_edge_insertion (e
)
1380 rtx before
= NULL_RTX
, after
= NULL_RTX
, tmp
;
1383 /* Figure out where to put these things. If the destination has
1384 one predecessor, insert there. Except for the exit block. */
1385 if (e
->dest
->pred
->pred_next
== NULL
1386 && e
->dest
!= EXIT_BLOCK_PTR
)
1390 /* Get the location correct wrt a code label, and "nice" wrt
1391 a basic block note, and before everything else. */
1393 if (GET_CODE (tmp
) == CODE_LABEL
)
1394 tmp
= NEXT_INSN (tmp
);
1395 if (GET_CODE (tmp
) == NOTE
1396 && NOTE_LINE_NUMBER (tmp
) == NOTE_INSN_BASIC_BLOCK
)
1397 tmp
= NEXT_INSN (tmp
);
1398 if (tmp
== bb
->head
)
1401 after
= PREV_INSN (tmp
);
1404 /* If the source has one successor and the edge is not abnormal,
1405 insert there. Except for the entry block. */
1406 else if ((e
->flags
& EDGE_ABNORMAL
) == 0
1407 && e
->src
->succ
->succ_next
== NULL
1408 && e
->src
!= ENTRY_BLOCK_PTR
)
1411 if (GET_CODE (bb
->end
) == JUMP_INSN
)
1413 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1414 a jump with delay slots already filled? */
1415 if (! simplejump_p (bb
->end
))
1422 /* We'd better be fallthru, or we've lost track of what's what. */
1423 if ((e
->flags
& EDGE_FALLTHRU
) == 0)
1430 /* Otherwise we must split the edge. */
1433 bb
= split_edge (e
);
1437 /* Now that we've found the spot, do the insertion. */
1439 e
->insns
= NULL_RTX
;
1442 emit_insns_before (tmp
, before
);
1443 if (before
== bb
->head
)
1448 tmp
= emit_insns_after (tmp
, after
);
1449 if (after
== bb
->end
)
1454 /* Update the CFG for all queued instructions. */
1457 commit_edge_insertions ()
1463 bb
= ENTRY_BLOCK_PTR
;
1468 for (e
= bb
->succ
; e
; e
= next
)
1470 next
= e
->succ_next
;
1472 commit_one_edge_insertion (e
);
1475 if (++i
>= n_basic_blocks
)
1477 bb
= BASIC_BLOCK (i
);
1481 /* Delete all unreachable basic blocks. */
1484 delete_unreachable_blocks ()
1486 basic_block
*worklist
, *tos
;
1487 int deleted_handler
;
1492 tos
= worklist
= (basic_block
*) alloca (sizeof (basic_block
) * n
);
1494 /* Use basic_block->aux as a marker. Clear them all. */
1496 for (i
= 0; i
< n
; ++i
)
1497 BASIC_BLOCK (i
)->aux
= NULL
;
1499 /* Add our starting points to the worklist. Almost always there will
1500 be only one. It isn't inconcievable that we might one day directly
1501 support Fortran alternate entry points. */
1503 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
1507 /* Mark the block with a handy non-null value. */
1511 /* Iterate: find everything reachable from what we've already seen. */
1513 while (tos
!= worklist
)
1515 basic_block b
= *--tos
;
1517 for (e
= b
->succ
; e
; e
= e
->succ_next
)
1525 /* Delete all unreachable basic blocks. Count down so that we don't
1526 interfere with the block renumbering that happens in delete_block. */
1528 deleted_handler
= 0;
1530 for (i
= n
- 1; i
>= 0; --i
)
1532 basic_block b
= BASIC_BLOCK (i
);
1535 /* This block was found. Tidy up the mark. */
1538 deleted_handler
|= delete_block (b
);
1541 /* Fix up edges that now fall through, or rather should now fall through
1542 but previously required a jump around now deleted blocks. Simplify
1543 the search by only examining blocks numerically adjacent, since this
1544 is how find_basic_blocks created them. */
1546 for (i
= 1; i
< n_basic_blocks
; ++i
)
1548 basic_block b
= BASIC_BLOCK (i
- 1);
1549 basic_block c
= BASIC_BLOCK (i
);
1552 /* We care about simple conditional or unconditional jumps with
1555 If we had a conditional branch to the next instruction when
1556 find_basic_blocks was called, then there will only be one
1557 out edge for the block which ended with the conditional
1558 branch (since we do not create duplicate edges).
1560 Furthermore, the edge will be marked as a fallthru because we
1561 merge the flags for the duplicate edges. So we do not want to
1562 check that the edge is not a FALLTHRU edge. */
1563 if ((s
= b
->succ
) != NULL
1564 && s
->succ_next
== NULL
1566 tidy_fallthru_edge (s
, b
, c
);
1569 /* Attempt to merge blocks as made possible by edge removal. If a block
1570 has only one successor, and the successor has only one predecessor,
1571 they may be combined. */
1573 for (i
= 0; i
< n_basic_blocks
; )
1575 basic_block c
, b
= BASIC_BLOCK (i
);
1578 /* A loop because chains of blocks might be combineable. */
1579 while ((s
= b
->succ
) != NULL
1580 && s
->succ_next
== NULL
1581 && (c
= s
->dest
) != EXIT_BLOCK_PTR
1582 && c
->pred
->pred_next
== NULL
1583 && merge_blocks (s
, b
, c
))
1586 /* Don't get confused by the index shift caused by deleting blocks. */
1590 /* If we deleted an exception handler, we may have EH region begin/end
1591 blocks to remove as well. */
1592 if (deleted_handler
)
1593 delete_eh_regions ();
1596 /* Find EH regions for which there is no longer a handler, and delete them. */
1599 delete_eh_regions ()
1603 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1604 if (GET_CODE (insn
) == NOTE
)
1606 if ((NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
) ||
1607 (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
))
1609 int num
= CODE_LABEL_NUMBER (insn
);
1610 /* A NULL handler indicates a region is no longer needed */
1611 if (get_first_handler (num
) == NULL
)
1613 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1614 NOTE_SOURCE_FILE (insn
) = 0;
1620 /* Return true if NOTE is not one of the ones that must be kept paired,
1621 so that we may simply delete them. */
1624 can_delete_note_p (note
)
1627 return (NOTE_LINE_NUMBER (note
) == NOTE_INSN_DELETED
1628 || NOTE_LINE_NUMBER (note
) == NOTE_INSN_BASIC_BLOCK
);
1631 /* Unlink a chain of insns between START and FINISH, leaving notes
1632 that must be paired. */
1635 delete_insn_chain (start
, finish
)
1638 /* Unchain the insns one by one. It would be quicker to delete all
1639 of these with a single unchaining, rather than one at a time, but
1640 we need to keep the NOTE's. */
1646 next
= NEXT_INSN (start
);
1647 if (GET_CODE (start
) == NOTE
&& !can_delete_note_p (start
))
1649 else if (GET_CODE (start
) == CODE_LABEL
&& !can_delete_label_p (start
))
1652 next
= flow_delete_insn (start
);
1654 if (start
== finish
)
1660 /* Delete the insns in a (non-live) block. We physically delete every
1661 non-deleted-note insn, and update the flow graph appropriately.
1663 Return nonzero if we deleted an exception handler. */
1665 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1666 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1672 int deleted_handler
= 0;
1675 /* If the head of this block is a CODE_LABEL, then it might be the
1676 label for an exception handler which can't be reached.
1678 We need to remove the label from the exception_handler_label list
1679 and remove the associated NOTE_EH_REGION_BEG and NOTE_EH_REGION_END
1684 if (GET_CODE (insn
) == CODE_LABEL
)
1686 rtx x
, *prev
= &exception_handler_labels
;
1688 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
1690 if (XEXP (x
, 0) == insn
)
1692 /* Found a match, splice this label out of the EH label list. */
1693 *prev
= XEXP (x
, 1);
1694 XEXP (x
, 1) = NULL_RTX
;
1695 XEXP (x
, 0) = NULL_RTX
;
1697 /* Remove the handler from all regions */
1698 remove_handler (insn
);
1699 deleted_handler
= 1;
1702 prev
= &XEXP (x
, 1);
1705 /* This label may be referenced by code solely for its value, or
1706 referenced by static data, or something. We have determined
1707 that it is not reachable, but cannot delete the label itself.
1708 Save code space and continue to delete the balance of the block,
1709 along with properly updating the cfg. */
1710 if (!can_delete_label_p (insn
))
1712 /* If we've only got one of these, skip the whole deleting
1715 goto no_delete_insns
;
1716 insn
= NEXT_INSN (insn
);
1720 /* Selectively unlink the insn chain. Include any BARRIER that may
1721 follow the basic block. */
1722 end
= next_nonnote_insn (b
->end
);
1723 if (!end
|| GET_CODE (end
) != BARRIER
)
1725 delete_insn_chain (insn
, end
);
1729 /* Remove the edges into and out of this block. Note that there may
1730 indeed be edges in, if we are removing an unreachable loop. */
1734 for (e
= b
->pred
; e
; e
= next
)
1736 for (q
= &e
->src
->succ
; *q
!= e
; q
= &(*q
)->succ_next
)
1739 next
= e
->pred_next
;
1742 for (e
= b
->succ
; e
; e
= next
)
1744 for (q
= &e
->dest
->pred
; *q
!= e
; q
= &(*q
)->pred_next
)
1747 next
= e
->succ_next
;
1755 /* Remove the basic block from the array, and compact behind it. */
1758 return deleted_handler
;
1761 /* Remove block B from the basic block array and compact behind it. */
1767 int i
, n
= n_basic_blocks
;
1769 for (i
= b
->index
; i
+ 1 < n
; ++i
)
1771 basic_block x
= BASIC_BLOCK (i
+ 1);
1772 BASIC_BLOCK (i
) = x
;
1776 basic_block_info
->num_elements
--;
1780 /* Delete INSN by patching it out. Return the next insn. */
1783 flow_delete_insn (insn
)
1786 rtx prev
= PREV_INSN (insn
);
1787 rtx next
= NEXT_INSN (insn
);
1789 PREV_INSN (insn
) = NULL_RTX
;
1790 NEXT_INSN (insn
) = NULL_RTX
;
1793 NEXT_INSN (prev
) = next
;
1795 PREV_INSN (next
) = prev
;
1797 set_last_insn (prev
);
1799 if (GET_CODE (insn
) == CODE_LABEL
)
1800 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
1802 /* If deleting a jump, decrement the use count of the label. Deleting
1803 the label itself should happen in the normal course of block merging. */
1804 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
1805 LABEL_NUSES (JUMP_LABEL (insn
))--;
1810 /* True if a given label can be deleted. */
1813 can_delete_label_p (label
)
1818 if (LABEL_PRESERVE_P (label
))
1821 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
1822 if (label
== XEXP (x
, 0))
1824 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
1825 if (label
== XEXP (x
, 0))
1827 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
1828 if (label
== XEXP (x
, 0))
1831 /* User declared labels must be preserved. */
1832 if (LABEL_NAME (label
) != 0)
1838 /* Blocks A and B are to be merged into a single block. The insns
1839 are already contiguous, hence `nomove'. */
1842 merge_blocks_nomove (a
, b
)
1846 rtx b_head
, b_end
, a_end
;
1849 /* If there was a CODE_LABEL beginning B, delete it. */
1852 if (GET_CODE (b_head
) == CODE_LABEL
)
1854 /* Detect basic blocks with nothing but a label. This can happen
1855 in particular at the end of a function. */
1856 if (b_head
== b_end
)
1858 b_head
= flow_delete_insn (b_head
);
1861 /* Delete the basic block note. */
1862 if (GET_CODE (b_head
) == NOTE
1863 && NOTE_LINE_NUMBER (b_head
) == NOTE_INSN_BASIC_BLOCK
)
1865 if (b_head
== b_end
)
1867 b_head
= flow_delete_insn (b_head
);
1870 /* If there was a jump out of A, delete it. */
1872 if (GET_CODE (a_end
) == JUMP_INSN
)
1876 prev
= prev_nonnote_insn (a_end
);
1881 /* If this was a conditional jump, we need to also delete
1882 the insn that set cc0. */
1884 if (prev
&& sets_cc0_p (prev
))
1887 prev
= prev_nonnote_insn (prev
);
1890 flow_delete_insn (tmp
);
1894 /* Note that a->head != a->end, since we should have at least a
1895 bb note plus the jump, so prev != insn. */
1896 flow_delete_insn (a_end
);
1900 /* By definition, there should only be one successor of A, and that is
1901 B. Free that edge struct. */
1904 /* Adjust the edges out of B for the new owner. */
1905 for (e
= b
->succ
; e
; e
= e
->succ_next
)
1909 /* Reassociate the insns of B with A. */
1912 BLOCK_FOR_INSN (b_head
) = a
;
1913 while (b_head
!= b_end
)
1915 b_head
= NEXT_INSN (b_head
);
1916 BLOCK_FOR_INSN (b_head
) = a
;
1922 /* Compact the basic block array. */
1926 /* Attempt to merge basic blocks that are potentially non-adjacent.
1927 Return true iff the attempt succeeded. */
1930 merge_blocks (e
, b
, c
)
1934 /* If B has a fallthru edge to C, no need to move anything. */
1935 if (!(e
->flags
& EDGE_FALLTHRU
))
1937 /* ??? From here on out we must make sure to not munge nesting
1938 of exception regions and lexical blocks. Need to think about
1939 these cases before this gets implemented. */
1942 /* If C has an outgoing fallthru, and B does not have an incoming
1943 fallthru, move B before C. The later clause is somewhat arbitrary,
1944 but avoids modifying blocks other than the two we've been given. */
1946 /* Otherwise, move C after B. If C had a fallthru, which doesn't
1947 happen to be the physical successor to B, insert an unconditional
1948 branch. If C already ended with a conditional branch, the new
1949 jump must go in a new basic block D. */
1952 /* If a label still appears somewhere and we cannot delete the label,
1953 then we cannot merge the blocks. The edge was tidied already. */
1955 rtx insn
, stop
= NEXT_INSN (c
->head
);
1956 for (insn
= NEXT_INSN (b
->end
); insn
!= stop
; insn
= NEXT_INSN (insn
))
1957 if (GET_CODE (insn
) == CODE_LABEL
&& !can_delete_label_p (insn
))
1961 merge_blocks_nomove (b
, c
);
1965 /* The given edge should potentially a fallthru edge. If that is in
1966 fact true, delete the unconditional jump and barriers that are in
1970 tidy_fallthru_edge (e
, b
, c
)
1976 /* ??? In a late-running flow pass, other folks may have deleted basic
1977 blocks by nopping out blocks, leaving multiple BARRIERs between here
1978 and the target label. They ought to be chastized and fixed.
1980 We can also wind up with a sequence of undeletable labels between
1981 one block and the next.
1983 So search through a sequence of barriers, labels, and notes for
1984 the head of block C and assert that we really do fall through. */
1986 if (next_real_insn (b
->end
) != next_real_insn (PREV_INSN (c
->head
)))
1989 /* Remove what will soon cease being the jump insn from the source block.
1990 If block B consisted only of this single jump, turn it into a deleted
1993 if (GET_CODE (q
) == JUMP_INSN
)
1996 /* If this was a conditional jump, we need to also delete
1997 the insn that set cc0. */
1998 if (! simplejump_p (q
) && condjump_p (q
))
2005 NOTE_LINE_NUMBER (q
) = NOTE_INSN_DELETED
;
2006 NOTE_SOURCE_FILE (q
) = 0;
2009 b
->end
= q
= PREV_INSN (q
);
2012 /* Selectively unlink the sequence. */
2013 if (q
!= PREV_INSN (c
->head
))
2014 delete_insn_chain (NEXT_INSN (q
), PREV_INSN (c
->head
));
2016 e
->flags
|= EDGE_FALLTHRU
;
2019 /* Discover and record the loop depth at the head of each basic block. */
2022 calculate_loop_depth (insns
)
2027 int i
= 0, depth
= 1;
2029 bb
= BASIC_BLOCK (i
);
2030 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2032 if (insn
== bb
->head
)
2034 bb
->loop_depth
= depth
;
2035 if (++i
>= n_basic_blocks
)
2037 bb
= BASIC_BLOCK (i
);
2040 if (GET_CODE (insn
) == NOTE
)
2042 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
2044 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
)
2047 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2054 /* Perform data flow analysis.
2055 F is the first insn of the function and NREGS the number of register numbers
2059 life_analysis (f
, nregs
, file
, remove_dead_code
)
2063 int remove_dead_code
;
2065 #ifdef ELIMINABLE_REGS
2067 static struct {int from
, to
; } eliminables
[] = ELIMINABLE_REGS
;
2070 /* Record which registers will be eliminated. We use this in
2073 CLEAR_HARD_REG_SET (elim_reg_set
);
2075 #ifdef ELIMINABLE_REGS
2076 for (i
= 0; i
< sizeof eliminables
/ sizeof eliminables
[0]; i
++)
2077 SET_HARD_REG_BIT (elim_reg_set
, eliminables
[i
].from
);
2079 SET_HARD_REG_BIT (elim_reg_set
, FRAME_POINTER_REGNUM
);
2082 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2083 uid_volatile
= BITMAP_ALLOCA ();
2085 /* We want alias analysis information for local dead store elimination. */
2086 init_alias_analysis ();
2087 life_analysis_1 (f
, nregs
, remove_dead_code
);
2088 end_alias_analysis ();
2091 dump_flow_info (file
);
2093 BITMAP_FREE (uid_volatile
);
2094 free_basic_block_vars (1);
2097 /* Free the variables allocated by find_basic_blocks.
2099 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2102 free_basic_block_vars (keep_head_end_p
)
2103 int keep_head_end_p
;
2105 if (basic_block_for_insn
)
2107 VARRAY_FREE (basic_block_for_insn
);
2108 basic_block_for_insn
= NULL
;
2111 if (! keep_head_end_p
)
2114 VARRAY_FREE (basic_block_info
);
2117 ENTRY_BLOCK_PTR
->aux
= NULL
;
2118 ENTRY_BLOCK_PTR
->global_live_at_end
= NULL
;
2119 EXIT_BLOCK_PTR
->aux
= NULL
;
2120 EXIT_BLOCK_PTR
->global_live_at_start
= NULL
;
2124 /* Return nonzero if the destination of SET equals the source. */
2129 rtx src
= SET_SRC (set
);
2130 rtx dst
= SET_DEST (set
);
2131 if (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
2132 && REGNO (src
) == REGNO (dst
))
2134 if (GET_CODE (src
) != SUBREG
|| GET_CODE (dst
) != SUBREG
2135 || SUBREG_WORD (src
) != SUBREG_WORD (dst
))
2137 src
= SUBREG_REG (src
);
2138 dst
= SUBREG_REG (dst
);
2139 if (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
2140 && REGNO (src
) == REGNO (dst
))
2145 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2151 rtx pat
= PATTERN (insn
);
2153 /* Insns carrying these notes are useful later on. */
2154 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
2157 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
2160 if (GET_CODE (pat
) == PARALLEL
)
2163 /* If nothing but SETs of registers to themselves,
2164 this insn can also be deleted. */
2165 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2167 rtx tem
= XVECEXP (pat
, 0, i
);
2169 if (GET_CODE (tem
) == USE
2170 || GET_CODE (tem
) == CLOBBER
)
2173 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
2183 notice_stack_pointer_modification (x
, pat
)
2185 rtx pat ATTRIBUTE_UNUSED
;
2187 if (x
== stack_pointer_rtx
2188 /* The stack pointer is only modified indirectly as the result
2189 of a push until later in flow. See the comments in rtl.texi
2190 regarding Embedded Side-Effects on Addresses. */
2191 || (GET_CODE (x
) == MEM
2192 && (GET_CODE (XEXP (x
, 0)) == PRE_DEC
2193 || GET_CODE (XEXP (x
, 0)) == PRE_INC
2194 || GET_CODE (XEXP (x
, 0)) == POST_DEC
2195 || GET_CODE (XEXP (x
, 0)) == POST_INC
)
2196 && XEXP (XEXP (x
, 0), 0) == stack_pointer_rtx
))
2197 current_function_sp_is_unchanging
= 0;
2200 /* Record which insns refer to any volatile memory
2201 or for any reason can't be deleted just because they are dead stores.
2202 Also, delete any insns that copy a register to itself.
2203 And see if the stack pointer is modified. */
2205 record_volatile_insns (f
)
2209 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
2211 enum rtx_code code1
= GET_CODE (insn
);
2212 if (code1
== CALL_INSN
)
2213 SET_INSN_VOLATILE (insn
);
2214 else if (code1
== INSN
|| code1
== JUMP_INSN
)
2216 if (GET_CODE (PATTERN (insn
)) != USE
2217 && volatile_refs_p (PATTERN (insn
)))
2218 SET_INSN_VOLATILE (insn
);
2220 /* A SET that makes space on the stack cannot be dead.
2221 (Such SETs occur only for allocating variable-size data,
2222 so they will always have a PLUS or MINUS according to the
2223 direction of stack growth.)
2224 Even if this function never uses this stack pointer value,
2225 signal handlers do! */
2226 else if (code1
== INSN
&& GET_CODE (PATTERN (insn
)) == SET
2227 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
2228 #ifdef STACK_GROWS_DOWNWARD
2229 && GET_CODE (SET_SRC (PATTERN (insn
))) == MINUS
2231 && GET_CODE (SET_SRC (PATTERN (insn
))) == PLUS
2233 && XEXP (SET_SRC (PATTERN (insn
)), 0) == stack_pointer_rtx
)
2234 SET_INSN_VOLATILE (insn
);
2236 /* Delete (in effect) any obvious no-op moves. */
2237 else if (noop_move_p (insn
))
2239 PUT_CODE (insn
, NOTE
);
2240 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2241 NOTE_SOURCE_FILE (insn
) = 0;
2245 /* Check if insn modifies the stack pointer. */
2246 if ( current_function_sp_is_unchanging
2247 && GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2248 note_stores (PATTERN (insn
), notice_stack_pointer_modification
);
2252 /* Mark those regs which are needed at the end of the function as live
2253 at the end of the last basic block. */
2255 mark_regs_live_at_end (set
)
2260 /* If exiting needs the right stack value, consider the stack pointer
2261 live at the end of the function. */
2262 if (! EXIT_IGNORE_STACK
2263 || (! FRAME_POINTER_REQUIRED
2264 && ! current_function_calls_alloca
2265 && flag_omit_frame_pointer
)
2266 || current_function_sp_is_unchanging
)
2268 SET_REGNO_REG_SET (set
, STACK_POINTER_REGNUM
);
2271 /* Mark the frame pointer if needed at the end of the function. If
2272 we end up eliminating it, it will be removed from the live list
2273 of each basic block by reload. */
2275 SET_REGNO_REG_SET (set
, FRAME_POINTER_REGNUM
);
2276 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2277 /* If they are different, also mark the hard frame pointer as live */
2278 SET_REGNO_REG_SET (set
, HARD_FRAME_POINTER_REGNUM
);
2281 /* Mark all global registers, and all registers used by the epilogue
2282 as being live at the end of the function since they may be
2283 referenced by our caller. */
2284 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2286 #ifdef EPILOGUE_USES
2287 || EPILOGUE_USES (i
)
2290 SET_REGNO_REG_SET (set
, i
);
2292 /* ??? Mark function return value here rather than as uses. */
2295 /* Determine which registers are live at the start of each
2296 basic block of the function whose first insn is F.
2297 NREGS is the number of registers used in F.
2298 We allocate the vector basic_block_live_at_start
2299 and the regsets that it points to, and fill them with the data.
2300 regset_size and regset_bytes are also set here. */
2303 life_analysis_1 (f
, nregs
, remove_dead_code
)
2306 int remove_dead_code
;
2311 char save_regs_ever_live
[FIRST_PSEUDO_REGISTER
];
2312 regset
*new_live_at_end
;
2314 struct obstack flow_obstack
;
2316 gcc_obstack_init (&flow_obstack
);
2320 /* Allocate and zero out many data structures
2321 that will record the data from lifetime analysis. */
2323 allocate_reg_life_data ();
2324 allocate_bb_life_data ();
2326 reg_next_use
= (rtx
*) alloca (nregs
* sizeof (rtx
));
2327 memset (reg_next_use
, 0, nregs
* sizeof (rtx
));
2329 /* Set up regset-vectors used internally within this function.
2330 Their meanings are documented above, with their declarations. */
2332 new_live_at_end
= (regset
*) alloca ((n_basic_blocks
+ 1) * sizeof (regset
));
2333 init_regset_vector (new_live_at_end
, n_basic_blocks
+ 1, &flow_obstack
);
2335 /* Stick these vectors into the AUX field of the basic block, so that
2336 we don't have to keep going through the index. */
2338 for (i
= 0; i
< n_basic_blocks
; ++i
)
2339 BASIC_BLOCK (i
)->aux
= new_live_at_end
[i
];
2340 ENTRY_BLOCK_PTR
->aux
= new_live_at_end
[i
];
2342 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2343 This will be cleared by record_volatile_insns if it encounters an insn
2344 which modifies the stack pointer. */
2345 current_function_sp_is_unchanging
= !current_function_calls_alloca
;
2347 record_volatile_insns (f
);
2349 if (n_basic_blocks
> 0)
2354 theend
= EXIT_BLOCK_PTR
->global_live_at_start
;
2355 mark_regs_live_at_end (theend
);
2357 /* Propogate this exit data to each of EXIT's predecessors. */
2358 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
2360 COPY_REG_SET (e
->src
->global_live_at_end
, theend
);
2361 COPY_REG_SET ((regset
) e
->src
->aux
, theend
);
2365 /* The post-reload life analysis have (on a global basis) the same registers
2366 live as was computed by reload itself.
2368 Otherwise elimination offsets and such may be incorrect.
2370 Reload will make some registers as live even though they do not appear
2372 if (reload_completed
)
2373 memcpy (save_regs_ever_live
, regs_ever_live
, sizeof (regs_ever_live
));
2374 memset (regs_ever_live
, 0, sizeof regs_ever_live
);
2376 /* Propagate life info through the basic blocks
2377 around the graph of basic blocks.
2379 This is a relaxation process: each time a new register
2380 is live at the end of the basic block, we must scan the block
2381 to determine which registers are, as a consequence, live at the beginning
2382 of that block. These registers must then be marked live at the ends
2383 of all the blocks that can transfer control to that block.
2384 The process continues until it reaches a fixed point. */
2391 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
2393 basic_block bb
= BASIC_BLOCK (i
);
2394 int consider
= first_pass
;
2395 int must_rescan
= first_pass
;
2400 /* Set CONSIDER if this block needs thinking about at all
2401 (that is, if the regs live now at the end of it
2402 are not the same as were live at the end of it when
2403 we last thought about it).
2404 Set must_rescan if it needs to be thought about
2405 instruction by instruction (that is, if any additional
2406 reg that is live at the end now but was not live there before
2407 is one of the significant regs of this basic block). */
2409 EXECUTE_IF_AND_COMPL_IN_REG_SET
2410 ((regset
) bb
->aux
, bb
->global_live_at_end
, 0, j
,
2413 if (REGNO_REG_SET_P (bb
->local_set
, j
))
2424 /* The live_at_start of this block may be changing,
2425 so another pass will be required after this one. */
2430 /* No complete rescan needed;
2431 just record those variables newly known live at end
2432 as live at start as well. */
2433 IOR_AND_COMPL_REG_SET (bb
->global_live_at_start
,
2435 bb
->global_live_at_end
);
2437 IOR_AND_COMPL_REG_SET (bb
->global_live_at_end
,
2439 bb
->global_live_at_end
);
2443 /* Update the basic_block_live_at_start
2444 by propagation backwards through the block. */
2445 COPY_REG_SET (bb
->global_live_at_end
, (regset
) bb
->aux
);
2446 COPY_REG_SET (bb
->global_live_at_start
,
2447 bb
->global_live_at_end
);
2448 propagate_block (bb
->global_live_at_start
,
2449 bb
->head
, bb
->end
, 0,
2450 first_pass
? bb
->local_set
: (regset
) 0,
2451 i
, remove_dead_code
);
2454 /* Update the new_live_at_end's of the block's predecessors. */
2458 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
2459 IOR_REG_SET ((regset
) e
->src
->aux
, bb
->global_live_at_start
);
2469 /* The only pseudos that are live at the beginning of the function are
2470 those that were not set anywhere in the function. local-alloc doesn't
2471 know how to handle these correctly, so mark them as not local to any
2474 if (n_basic_blocks
> 0)
2475 EXECUTE_IF_SET_IN_REG_SET (BASIC_BLOCK (0)->global_live_at_start
,
2476 FIRST_PSEUDO_REGISTER
, i
,
2478 REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
;
2481 /* Now the life information is accurate. Make one more pass over each
2482 basic block to delete dead stores, create autoincrement addressing
2483 and record how many times each register is used, is set, or dies. */
2485 for (i
= 0; i
< n_basic_blocks
; i
++)
2487 basic_block bb
= BASIC_BLOCK (i
);
2489 /* We start with global_live_at_end to determine which stores are
2490 dead. This process is destructive, and we wish to preserve the
2491 contents of global_live_at_end for posterity. Fortunately,
2492 new_live_at_end, due to the way we converged on a solution,
2493 contains a duplicate of global_live_at_end that we can kill. */
2494 propagate_block ((regset
) bb
->aux
, bb
->head
, bb
->end
, 1, (regset
) 0, i
, remove_dead_code
);
2501 /* We have a problem with any pseudoreg that lives across the setjmp.
2502 ANSI says that if a user variable does not change in value between
2503 the setjmp and the longjmp, then the longjmp preserves it. This
2504 includes longjmp from a place where the pseudo appears dead.
2505 (In principle, the value still exists if it is in scope.)
2506 If the pseudo goes in a hard reg, some other value may occupy
2507 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2508 Conclusion: such a pseudo must not go in a hard reg. */
2509 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp
,
2510 FIRST_PSEUDO_REGISTER
, i
,
2512 if (regno_reg_rtx
[i
] != 0)
2514 REG_LIVE_LENGTH (i
) = -1;
2515 REG_BASIC_BLOCK (i
) = -1;
2519 /* Restore regs_ever_live that was provided by reload. */
2520 if (reload_completed
)
2521 memcpy (regs_ever_live
, save_regs_ever_live
, sizeof (regs_ever_live
));
2523 free_regset_vector (new_live_at_end
, n_basic_blocks
);
2524 obstack_free (&flow_obstack
, NULL_PTR
);
2526 for (i
= 0; i
< n_basic_blocks
; ++i
)
2527 BASIC_BLOCK (i
)->aux
= NULL
;
2528 ENTRY_BLOCK_PTR
->aux
= NULL
;
2531 /* Subroutines of life analysis. */
2533 /* Allocate the permanent data structures that represent the results
2534 of life analysis. Not static since used also for stupid life analysis. */
2537 allocate_bb_life_data ()
2541 for (i
= 0; i
< n_basic_blocks
; i
++)
2543 basic_block bb
= BASIC_BLOCK (i
);
2545 bb
->local_set
= OBSTACK_ALLOC_REG_SET (function_obstack
);
2546 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (function_obstack
);
2547 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (function_obstack
);
2550 ENTRY_BLOCK_PTR
->global_live_at_end
2551 = OBSTACK_ALLOC_REG_SET (function_obstack
);
2552 EXIT_BLOCK_PTR
->global_live_at_start
2553 = OBSTACK_ALLOC_REG_SET (function_obstack
);
2555 regs_live_at_setjmp
= OBSTACK_ALLOC_REG_SET (function_obstack
);
2559 allocate_reg_life_data ()
2563 /* Recalculate the register space, in case it has grown. Old style
2564 vector oriented regsets would set regset_{size,bytes} here also. */
2565 allocate_reg_info (max_regno
, FALSE
, FALSE
);
2567 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
2568 information, explicitly reset it here. The allocation should have
2569 already happened on the previous reg_scan pass. Make sure in case
2570 some more registers were allocated. */
2571 for (i
= 0; i
< max_regno
; i
++)
2575 /* Make each element of VECTOR point at a regset. The vector has
2576 NELTS elements, and space is allocated from the ALLOC_OBSTACK
2580 init_regset_vector (vector
, nelts
, alloc_obstack
)
2583 struct obstack
*alloc_obstack
;
2587 for (i
= 0; i
< nelts
; i
++)
2589 vector
[i
] = OBSTACK_ALLOC_REG_SET (alloc_obstack
);
2590 CLEAR_REG_SET (vector
[i
]);
2594 /* Release any additional space allocated for each element of VECTOR point
2595 other than the regset header itself. The vector has NELTS elements. */
2598 free_regset_vector (vector
, nelts
)
2604 for (i
= 0; i
< nelts
; i
++)
2605 FREE_REG_SET (vector
[i
]);
2608 /* Compute the registers live at the beginning of a basic block
2609 from those live at the end.
2611 When called, OLD contains those live at the end.
2612 On return, it contains those live at the beginning.
2613 FIRST and LAST are the first and last insns of the basic block.
2615 FINAL is nonzero if we are doing the final pass which is not
2616 for computing the life info (since that has already been done)
2617 but for acting on it. On this pass, we delete dead stores,
2618 set up the logical links and dead-variables lists of instructions,
2619 and merge instructions for autoincrement and autodecrement addresses.
2621 SIGNIFICANT is nonzero only the first time for each basic block.
2622 If it is nonzero, it points to a regset in which we store
2623 a 1 for each register that is set within the block.
2625 BNUM is the number of the basic block. */
2628 propagate_block (old
, first
, last
, final
, significant
, bnum
, remove_dead_code
)
2629 register regset old
;
2635 int remove_dead_code
;
2642 /* Find the loop depth for this block. Ignore loop level changes in the
2643 middle of the basic block -- for register allocation purposes, the
2644 important uses will be in the blocks wholely contained within the loop
2645 not in the loop pre-header or post-trailer. */
2646 loop_depth
= BASIC_BLOCK (bnum
)->loop_depth
;
2648 dead
= ALLOCA_REG_SET ();
2649 live
= ALLOCA_REG_SET ();
2652 mem_set_list
= NULL_RTX
;
2658 /* Process the regs live at the end of the block.
2659 Mark them as not local to any one basic block. */
2660 EXECUTE_IF_SET_IN_REG_SET (old
, 0, i
,
2662 REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
;
2666 /* Scan the block an insn at a time from end to beginning. */
2668 for (insn
= last
; ; insn
= prev
)
2670 prev
= PREV_INSN (insn
);
2672 if (GET_CODE (insn
) == NOTE
)
2674 /* If this is a call to `setjmp' et al,
2675 warn if any non-volatile datum is live. */
2677 if (final
&& NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
2678 IOR_REG_SET (regs_live_at_setjmp
, old
);
2681 /* Update the life-status of regs for this insn.
2682 First DEAD gets which regs are set in this insn
2683 then LIVE gets which regs are used in this insn.
2684 Then the regs live before the insn
2685 are those live after, with DEAD regs turned off,
2686 and then LIVE regs turned on. */
2688 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2691 rtx note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
2692 int insn_is_dead
= 0;
2693 int libcall_is_dead
= 0;
2695 if (remove_dead_code
)
2697 insn_is_dead
= (insn_dead_p (PATTERN (insn
), old
, 0, REG_NOTES (insn
))
2698 /* Don't delete something that refers to volatile storage! */
2699 && ! INSN_VOLATILE (insn
));
2700 libcall_is_dead
= (insn_is_dead
&& note
!= 0
2701 && libcall_dead_p (PATTERN (insn
), old
, note
, insn
));
2704 /* If an instruction consists of just dead store(s) on final pass,
2705 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
2706 We could really delete it with delete_insn, but that
2707 can cause trouble for first or last insn in a basic block. */
2708 if (final
&& insn_is_dead
)
2710 PUT_CODE (insn
, NOTE
);
2711 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2712 NOTE_SOURCE_FILE (insn
) = 0;
2714 /* CC0 is now known to be dead. Either this insn used it,
2715 in which case it doesn't anymore, or clobbered it,
2716 so the next insn can't use it. */
2719 /* If this insn is copying the return value from a library call,
2720 delete the entire library call. */
2721 if (libcall_is_dead
)
2723 rtx first
= XEXP (note
, 0);
2725 while (INSN_DELETED_P (first
))
2726 first
= NEXT_INSN (first
);
2731 NOTE_LINE_NUMBER (p
) = NOTE_INSN_DELETED
;
2732 NOTE_SOURCE_FILE (p
) = 0;
2738 CLEAR_REG_SET (dead
);
2739 CLEAR_REG_SET (live
);
2741 /* See if this is an increment or decrement that can be
2742 merged into a following memory address. */
2745 register rtx x
= single_set (insn
);
2747 /* Does this instruction increment or decrement a register? */
2748 if (!reload_completed
2750 && GET_CODE (SET_DEST (x
)) == REG
2751 && (GET_CODE (SET_SRC (x
)) == PLUS
2752 || GET_CODE (SET_SRC (x
)) == MINUS
)
2753 && XEXP (SET_SRC (x
), 0) == SET_DEST (x
)
2754 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
2755 /* Ok, look for a following memory ref we can combine with.
2756 If one is found, change the memory ref to a PRE_INC
2757 or PRE_DEC, cancel this insn, and return 1.
2758 Return 0 if nothing has been done. */
2759 && try_pre_increment_1 (insn
))
2762 #endif /* AUTO_INC_DEC */
2764 /* If this is not the final pass, and this insn is copying the
2765 value of a library call and it's dead, don't scan the
2766 insns that perform the library call, so that the call's
2767 arguments are not marked live. */
2768 if (libcall_is_dead
)
2770 /* Mark the dest reg as `significant'. */
2771 mark_set_regs (old
, dead
, PATTERN (insn
), NULL_RTX
, significant
);
2773 insn
= XEXP (note
, 0);
2774 prev
= PREV_INSN (insn
);
2776 else if (GET_CODE (PATTERN (insn
)) == SET
2777 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
2778 && GET_CODE (SET_SRC (PATTERN (insn
))) == PLUS
2779 && XEXP (SET_SRC (PATTERN (insn
)), 0) == stack_pointer_rtx
2780 && GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == CONST_INT
)
2781 /* We have an insn to pop a constant amount off the stack.
2782 (Such insns use PLUS regardless of the direction of the stack,
2783 and any insn to adjust the stack by a constant is always a pop.)
2784 These insns, if not dead stores, have no effect on life. */
2788 /* Any regs live at the time of a call instruction
2789 must not go in a register clobbered by calls.
2790 Find all regs now live and record this for them. */
2792 if (GET_CODE (insn
) == CALL_INSN
&& final
)
2793 EXECUTE_IF_SET_IN_REG_SET (old
, 0, i
,
2795 REG_N_CALLS_CROSSED (i
)++;
2798 /* LIVE gets the regs used in INSN;
2799 DEAD gets those set by it. Dead insns don't make anything
2802 mark_set_regs (old
, dead
, PATTERN (insn
),
2803 final
? insn
: NULL_RTX
, significant
);
2805 /* If an insn doesn't use CC0, it becomes dead since we
2806 assume that every insn clobbers it. So show it dead here;
2807 mark_used_regs will set it live if it is referenced. */
2811 mark_used_regs (old
, live
, PATTERN (insn
), final
, insn
);
2813 /* Sometimes we may have inserted something before INSN (such as
2814 a move) when we make an auto-inc. So ensure we will scan
2817 prev
= PREV_INSN (insn
);
2820 if (! insn_is_dead
&& GET_CODE (insn
) == CALL_INSN
)
2826 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
2828 note
= XEXP (note
, 1))
2829 if (GET_CODE (XEXP (note
, 0)) == USE
)
2830 mark_used_regs (old
, live
, SET_DEST (XEXP (note
, 0)),
2833 /* Each call clobbers all call-clobbered regs that are not
2834 global or fixed. Note that the function-value reg is a
2835 call-clobbered reg, and mark_set_regs has already had
2836 a chance to handle it. */
2838 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2839 if (call_used_regs
[i
] && ! global_regs
[i
]
2841 SET_REGNO_REG_SET (dead
, i
);
2843 /* The stack ptr is used (honorarily) by a CALL insn. */
2844 SET_REGNO_REG_SET (live
, STACK_POINTER_REGNUM
);
2846 /* Calls may also reference any of the global registers,
2847 so they are made live. */
2848 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2850 mark_used_regs (old
, live
,
2851 gen_rtx_REG (reg_raw_mode
[i
], i
),
2854 /* Calls also clobber memory. */
2855 mem_set_list
= NULL_RTX
;
2858 /* Update OLD for the registers used or set. */
2859 AND_COMPL_REG_SET (old
, dead
);
2860 IOR_REG_SET (old
, live
);
2864 /* On final pass, update counts of how many insns each reg is live
2867 EXECUTE_IF_SET_IN_REG_SET (old
, 0, i
,
2868 { REG_LIVE_LENGTH (i
)++; });
2875 FREE_REG_SET (dead
);
2876 FREE_REG_SET (live
);
2879 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2880 (SET expressions whose destinations are registers dead after the insn).
2881 NEEDED is the regset that says which regs are alive after the insn.
2883 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
2885 If X is the entire body of an insn, NOTES contains the reg notes
2886 pertaining to the insn. */
2889 insn_dead_p (x
, needed
, call_ok
, notes
)
2893 rtx notes ATTRIBUTE_UNUSED
;
2895 enum rtx_code code
= GET_CODE (x
);
2898 /* If flow is invoked after reload, we must take existing AUTO_INC
2899 expresions into account. */
2900 if (reload_completed
)
2902 for ( ; notes
; notes
= XEXP (notes
, 1))
2904 if (REG_NOTE_KIND (notes
) == REG_INC
)
2906 int regno
= REGNO (XEXP (notes
, 0));
2908 /* Don't delete insns to set global regs. */
2909 if ((regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
2910 || REGNO_REG_SET_P (needed
, regno
))
2917 /* If setting something that's a reg or part of one,
2918 see if that register's altered value will be live. */
2922 rtx r
= SET_DEST (x
);
2924 /* A SET that is a subroutine call cannot be dead. */
2925 if (! call_ok
&& GET_CODE (SET_SRC (x
)) == CALL
)
2929 if (GET_CODE (r
) == CC0
)
2933 if (GET_CODE (r
) == MEM
&& ! MEM_VOLATILE_P (r
))
2936 /* Walk the set of memory locations we are currently tracking
2937 and see if one is an identical match to this memory location.
2938 If so, this memory write is dead (remember, we're walking
2939 backwards from the end of the block to the start. */
2940 temp
= mem_set_list
;
2943 if (rtx_equal_p (XEXP (temp
, 0), r
))
2945 temp
= XEXP (temp
, 1);
2949 while (GET_CODE (r
) == SUBREG
|| GET_CODE (r
) == STRICT_LOW_PART
2950 || GET_CODE (r
) == ZERO_EXTRACT
)
2953 if (GET_CODE (r
) == REG
)
2955 int regno
= REGNO (r
);
2957 /* Don't delete insns to set global regs. */
2958 if ((regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
2959 /* Make sure insns to set frame pointer aren't deleted. */
2960 || regno
== FRAME_POINTER_REGNUM
2961 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2962 || regno
== HARD_FRAME_POINTER_REGNUM
2964 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2965 /* Make sure insns to set arg pointer are never deleted
2966 (if the arg pointer isn't fixed, there will be a USE for
2967 it, so we can treat it normally). */
2968 || (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
2970 || REGNO_REG_SET_P (needed
, regno
))
2973 /* If this is a hard register, verify that subsequent words are
2975 if (regno
< FIRST_PSEUDO_REGISTER
)
2977 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (r
));
2980 if (REGNO_REG_SET_P (needed
, regno
+n
))
2988 /* If performing several activities,
2989 insn is dead if each activity is individually dead.
2990 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
2991 that's inside a PARALLEL doesn't make the insn worth keeping. */
2992 else if (code
== PARALLEL
)
2994 int i
= XVECLEN (x
, 0);
2996 for (i
--; i
>= 0; i
--)
2997 if (GET_CODE (XVECEXP (x
, 0, i
)) != CLOBBER
2998 && GET_CODE (XVECEXP (x
, 0, i
)) != USE
2999 && ! insn_dead_p (XVECEXP (x
, 0, i
), needed
, call_ok
, NULL_RTX
))
3005 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3006 is not necessarily true for hard registers. */
3007 else if (code
== CLOBBER
&& GET_CODE (XEXP (x
, 0)) == REG
3008 && REGNO (XEXP (x
, 0)) >= FIRST_PSEUDO_REGISTER
3009 && ! REGNO_REG_SET_P (needed
, REGNO (XEXP (x
, 0))))
3012 /* We do not check other CLOBBER or USE here. An insn consisting of just
3013 a CLOBBER or just a USE should not be deleted. */
3017 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3018 return 1 if the entire library call is dead.
3019 This is true if X copies a register (hard or pseudo)
3020 and if the hard return reg of the call insn is dead.
3021 (The caller should have tested the destination of X already for death.)
3023 If this insn doesn't just copy a register, then we don't
3024 have an ordinary libcall. In that case, cse could not have
3025 managed to substitute the source for the dest later on,
3026 so we can assume the libcall is dead.
3028 NEEDED is the bit vector of pseudoregs live before this insn.
3029 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3032 libcall_dead_p (x
, needed
, note
, insn
)
3038 register RTX_CODE code
= GET_CODE (x
);
3042 register rtx r
= SET_SRC (x
);
3043 if (GET_CODE (r
) == REG
)
3045 rtx call
= XEXP (note
, 0);
3049 /* Find the call insn. */
3050 while (call
!= insn
&& GET_CODE (call
) != CALL_INSN
)
3051 call
= NEXT_INSN (call
);
3053 /* If there is none, do nothing special,
3054 since ordinary death handling can understand these insns. */
3058 /* See if the hard reg holding the value is dead.
3059 If this is a PARALLEL, find the call within it. */
3060 call_pat
= PATTERN (call
);
3061 if (GET_CODE (call_pat
) == PARALLEL
)
3063 for (i
= XVECLEN (call_pat
, 0) - 1; i
>= 0; i
--)
3064 if (GET_CODE (XVECEXP (call_pat
, 0, i
)) == SET
3065 && GET_CODE (SET_SRC (XVECEXP (call_pat
, 0, i
))) == CALL
)
3068 /* This may be a library call that is returning a value
3069 via invisible pointer. Do nothing special, since
3070 ordinary death handling can understand these insns. */
3074 call_pat
= XVECEXP (call_pat
, 0, i
);
3077 return insn_dead_p (call_pat
, needed
, 1, REG_NOTES (call
));
3083 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3084 live at function entry. Don't count global register variables, variables
3085 in registers that can be used for function arg passing, or variables in
3086 fixed hard registers. */
3089 regno_uninitialized (regno
)
3092 if (n_basic_blocks
== 0
3093 || (regno
< FIRST_PSEUDO_REGISTER
3094 && (global_regs
[regno
]
3095 || fixed_regs
[regno
]
3096 || FUNCTION_ARG_REGNO_P (regno
))))
3099 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
);
3102 /* 1 if register REGNO was alive at a place where `setjmp' was called
3103 and was set more than once or is an argument.
3104 Such regs may be clobbered by `longjmp'. */
3107 regno_clobbered_at_setjmp (regno
)
3110 if (n_basic_blocks
== 0)
3113 return ((REG_N_SETS (regno
) > 1
3114 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start
, regno
))
3115 && REGNO_REG_SET_P (regs_live_at_setjmp
, regno
));
3118 /* INSN references memory, possibly using autoincrement addressing modes.
3119 Find any entries on the mem_set_list that need to be invalidated due
3120 to an address change. */
3122 invalidate_mems_from_autoinc (insn
)
3125 rtx note
= REG_NOTES (insn
);
3126 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
3128 if (REG_NOTE_KIND (note
) == REG_INC
)
3130 rtx temp
= mem_set_list
;
3131 rtx prev
= NULL_RTX
;
3135 if (reg_overlap_mentioned_p (XEXP (note
, 0), XEXP (temp
, 0)))
3137 /* Splice temp out of list. */
3139 XEXP (prev
, 1) = XEXP (temp
, 1);
3141 mem_set_list
= XEXP (temp
, 1);
3145 temp
= XEXP (temp
, 1);
3151 /* Process the registers that are set within X.
3152 Their bits are set to 1 in the regset DEAD,
3153 because they are dead prior to this insn.
3155 If INSN is nonzero, it is the insn being processed
3156 and the fact that it is nonzero implies this is the FINAL pass
3157 in propagate_block. In this case, various info about register
3158 usage is stored, LOG_LINKS fields of insns are set up. */
3161 mark_set_regs (needed
, dead
, x
, insn
, significant
)
3168 register RTX_CODE code
= GET_CODE (x
);
3170 if (code
== SET
|| code
== CLOBBER
)
3171 mark_set_1 (needed
, dead
, x
, insn
, significant
);
3172 else if (code
== PARALLEL
)
3175 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
3177 code
= GET_CODE (XVECEXP (x
, 0, i
));
3178 if (code
== SET
|| code
== CLOBBER
)
3179 mark_set_1 (needed
, dead
, XVECEXP (x
, 0, i
), insn
, significant
);
3184 /* Process a single SET rtx, X. */
3187 mark_set_1 (needed
, dead
, x
, insn
, significant
)
3195 register rtx reg
= SET_DEST (x
);
3197 /* Some targets place small structures in registers for
3198 return values of functions. We have to detect this
3199 case specially here to get correct flow information. */
3200 if (GET_CODE (reg
) == PARALLEL
3201 && GET_MODE (reg
) == BLKmode
)
3205 for (i
= XVECLEN (reg
, 0) - 1; i
>= 0; i
--)
3206 mark_set_1 (needed
, dead
, XVECEXP (reg
, 0, i
), insn
, significant
);
3210 /* Modifying just one hardware register of a multi-reg value
3211 or just a byte field of a register
3212 does not mean the value from before this insn is now dead.
3213 But it does mean liveness of that register at the end of the block
3216 Within mark_set_1, however, we treat it as if the register is
3217 indeed modified. mark_used_regs will, however, also treat this
3218 register as being used. Thus, we treat these insns as setting a
3219 new value for the register as a function of its old value. This
3220 cases LOG_LINKS to be made appropriately and this will help combine. */
3222 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
3223 || GET_CODE (reg
) == SIGN_EXTRACT
3224 || GET_CODE (reg
) == STRICT_LOW_PART
)
3225 reg
= XEXP (reg
, 0);
3227 /* If this set is a MEM, then it kills any aliased writes.
3228 If this set is a REG, then it kills any MEMs which use the reg. */
3229 if (GET_CODE (reg
) == MEM
3230 || GET_CODE (reg
) == REG
)
3232 rtx temp
= mem_set_list
;
3233 rtx prev
= NULL_RTX
;
3237 if ((GET_CODE (reg
) == MEM
3238 && output_dependence (XEXP (temp
, 0), reg
))
3239 || (GET_CODE (reg
) == REG
3240 && reg_overlap_mentioned_p (reg
, XEXP (temp
, 0))))
3242 /* Splice this entry out of the list. */
3244 XEXP (prev
, 1) = XEXP (temp
, 1);
3246 mem_set_list
= XEXP (temp
, 1);
3250 temp
= XEXP (temp
, 1);
3254 /* If the memory reference had embedded side effects (autoincrement
3255 address modes. Then we may need to kill some entries on the
3257 if (insn
&& GET_CODE (reg
) == MEM
)
3258 invalidate_mems_from_autoinc (insn
);
3260 if (GET_CODE (reg
) == MEM
&& ! side_effects_p (reg
)
3261 /* There are no REG_INC notes for SP, so we can't assume we'll see
3262 everything that invalidates it. To be safe, don't eliminate any
3263 stores though SP; none of them should be redundant anyway. */
3264 && ! reg_mentioned_p (stack_pointer_rtx
, reg
))
3265 mem_set_list
= gen_rtx_EXPR_LIST (VOIDmode
, reg
, mem_set_list
);
3267 if (GET_CODE (reg
) == REG
3268 && (regno
= REGNO (reg
), regno
!= FRAME_POINTER_REGNUM
)
3269 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3270 && regno
!= HARD_FRAME_POINTER_REGNUM
3272 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3273 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
3275 && ! (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
]))
3276 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3278 int some_needed
= REGNO_REG_SET_P (needed
, regno
);
3279 int some_not_needed
= ! some_needed
;
3281 /* Mark it as a significant register for this basic block. */
3283 SET_REGNO_REG_SET (significant
, regno
);
3285 /* Mark it as dead before this insn. */
3286 SET_REGNO_REG_SET (dead
, regno
);
3288 /* A hard reg in a wide mode may really be multiple registers.
3289 If so, mark all of them just like the first. */
3290 if (regno
< FIRST_PSEUDO_REGISTER
)
3294 /* Nothing below is needed for the stack pointer; get out asap.
3295 Eg, log links aren't needed, since combine won't use them. */
3296 if (regno
== STACK_POINTER_REGNUM
)
3299 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
3302 int regno_n
= regno
+ n
;
3303 int needed_regno
= REGNO_REG_SET_P (needed
, regno_n
);
3305 SET_REGNO_REG_SET (significant
, regno_n
);
3307 SET_REGNO_REG_SET (dead
, regno_n
);
3308 some_needed
|= needed_regno
;
3309 some_not_needed
|= ! needed_regno
;
3312 /* Additional data to record if this is the final pass. */
3315 register rtx y
= reg_next_use
[regno
];
3316 register int blocknum
= BLOCK_NUM (insn
);
3318 /* If this is a hard reg, record this function uses the reg. */
3320 if (regno
< FIRST_PSEUDO_REGISTER
)
3323 int endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
3325 for (i
= regno
; i
< endregno
; i
++)
3327 /* The next use is no longer "next", since a store
3329 reg_next_use
[i
] = 0;
3331 regs_ever_live
[i
] = 1;
3337 /* The next use is no longer "next", since a store
3339 reg_next_use
[regno
] = 0;
3341 /* Keep track of which basic blocks each reg appears in. */
3343 if (REG_BASIC_BLOCK (regno
) == REG_BLOCK_UNKNOWN
)
3344 REG_BASIC_BLOCK (regno
) = blocknum
;
3345 else if (REG_BASIC_BLOCK (regno
) != blocknum
)
3346 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
3348 /* Count (weighted) references, stores, etc. This counts a
3349 register twice if it is modified, but that is correct. */
3350 REG_N_SETS (regno
)++;
3352 REG_N_REFS (regno
) += loop_depth
;
3354 /* The insns where a reg is live are normally counted
3355 elsewhere, but we want the count to include the insn
3356 where the reg is set, and the normal counting mechanism
3357 would not count it. */
3358 REG_LIVE_LENGTH (regno
)++;
3361 if (! some_not_needed
)
3363 /* Make a logical link from the next following insn
3364 that uses this register, back to this insn.
3365 The following insns have already been processed.
3367 We don't build a LOG_LINK for hard registers containing
3368 in ASM_OPERANDs. If these registers get replaced,
3369 we might wind up changing the semantics of the insn,
3370 even if reload can make what appear to be valid assignments
3372 if (y
&& (BLOCK_NUM (y
) == blocknum
)
3373 && (regno
>= FIRST_PSEUDO_REGISTER
3374 || asm_noperands (PATTERN (y
)) < 0))
3376 = gen_rtx_INSN_LIST (VOIDmode
, insn
, LOG_LINKS (y
));
3378 else if (! some_needed
)
3380 /* Note that dead stores have already been deleted when possible
3381 If we get here, we have found a dead store that cannot
3382 be eliminated (because the same insn does something useful).
3383 Indicate this by marking the reg being set as dying here. */
3385 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
3386 REG_N_DEATHS (REGNO (reg
))++;
3390 /* This is a case where we have a multi-word hard register
3391 and some, but not all, of the words of the register are
3392 needed in subsequent insns. Write REG_UNUSED notes
3393 for those parts that were not needed. This case should
3398 for (i
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) - 1;
3400 if (!REGNO_REG_SET_P (needed
, regno
+ i
))
3402 = gen_rtx_EXPR_LIST (REG_UNUSED
,
3403 gen_rtx_REG (reg_raw_mode
[regno
+ i
],
3409 else if (GET_CODE (reg
) == REG
)
3410 reg_next_use
[regno
] = 0;
3412 /* If this is the last pass and this is a SCRATCH, show it will be dying
3413 here and count it. */
3414 else if (GET_CODE (reg
) == SCRATCH
&& insn
!= 0)
3417 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
3423 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
3427 find_auto_inc (needed
, x
, insn
)
3432 rtx addr
= XEXP (x
, 0);
3433 HOST_WIDE_INT offset
= 0;
3436 /* Here we detect use of an index register which might be good for
3437 postincrement, postdecrement, preincrement, or predecrement. */
3439 if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3440 offset
= INTVAL (XEXP (addr
, 1)), addr
= XEXP (addr
, 0);
3442 if (GET_CODE (addr
) == REG
)
3445 register int size
= GET_MODE_SIZE (GET_MODE (x
));
3448 int regno
= REGNO (addr
);
3450 /* Is the next use an increment that might make auto-increment? */
3451 if ((incr
= reg_next_use
[regno
]) != 0
3452 && (set
= single_set (incr
)) != 0
3453 && GET_CODE (set
) == SET
3454 && BLOCK_NUM (incr
) == BLOCK_NUM (insn
)
3455 /* Can't add side effects to jumps; if reg is spilled and
3456 reloaded, there's no way to store back the altered value. */
3457 && GET_CODE (insn
) != JUMP_INSN
3458 && (y
= SET_SRC (set
), GET_CODE (y
) == PLUS
)
3459 && XEXP (y
, 0) == addr
3460 && GET_CODE (XEXP (y
, 1)) == CONST_INT
3461 && ((HAVE_POST_INCREMENT
3462 && (INTVAL (XEXP (y
, 1)) == size
&& offset
== 0))
3463 || (HAVE_POST_DECREMENT
3464 && (INTVAL (XEXP (y
, 1)) == - size
&& offset
== 0))
3465 || (HAVE_PRE_INCREMENT
3466 && (INTVAL (XEXP (y
, 1)) == size
&& offset
== size
))
3467 || (HAVE_PRE_DECREMENT
3468 && (INTVAL (XEXP (y
, 1)) == - size
&& offset
== - size
)))
3469 /* Make sure this reg appears only once in this insn. */
3470 && (use
= find_use_as_address (PATTERN (insn
), addr
, offset
),
3471 use
!= 0 && use
!= (rtx
) 1))
3473 rtx q
= SET_DEST (set
);
3474 enum rtx_code inc_code
= (INTVAL (XEXP (y
, 1)) == size
3475 ? (offset
? PRE_INC
: POST_INC
)
3476 : (offset
? PRE_DEC
: POST_DEC
));
3478 if (dead_or_set_p (incr
, addr
))
3480 /* This is the simple case. Try to make the auto-inc. If
3481 we can't, we are done. Otherwise, we will do any
3482 needed updates below. */
3483 if (! validate_change (insn
, &XEXP (x
, 0),
3484 gen_rtx_fmt_e (inc_code
, Pmode
, addr
),
3488 else if (GET_CODE (q
) == REG
3489 /* PREV_INSN used here to check the semi-open interval
3491 && ! reg_used_between_p (q
, PREV_INSN (insn
), incr
)
3492 /* We must also check for sets of q as q may be
3493 a call clobbered hard register and there may
3494 be a call between PREV_INSN (insn) and incr. */
3495 && ! reg_set_between_p (q
, PREV_INSN (insn
), incr
))
3497 /* We have *p followed sometime later by q = p+size.
3498 Both p and q must be live afterward,
3499 and q is not used between INSN and its assignment.
3500 Change it to q = p, ...*q..., q = q+size.
3501 Then fall into the usual case. */
3506 emit_move_insn (q
, addr
);
3507 insns
= get_insns ();
3510 bb
= BLOCK_FOR_INSN (insn
);
3511 for (temp
= insns
; temp
; temp
= NEXT_INSN (temp
))
3512 set_block_for_insn (temp
, bb
);
3514 /* If we can't make the auto-inc, or can't make the
3515 replacement into Y, exit. There's no point in making
3516 the change below if we can't do the auto-inc and doing
3517 so is not correct in the pre-inc case. */
3519 validate_change (insn
, &XEXP (x
, 0),
3520 gen_rtx_fmt_e (inc_code
, Pmode
, q
),
3522 validate_change (incr
, &XEXP (y
, 0), q
, 1);
3523 if (! apply_change_group ())
3526 /* We now know we'll be doing this change, so emit the
3527 new insn(s) and do the updates. */
3528 emit_insns_before (insns
, insn
);
3530 if (BLOCK_FOR_INSN (insn
)->head
== insn
)
3531 BLOCK_FOR_INSN (insn
)->head
= insns
;
3533 /* INCR will become a NOTE and INSN won't contain a
3534 use of ADDR. If a use of ADDR was just placed in
3535 the insn before INSN, make that the next use.
3536 Otherwise, invalidate it. */
3537 if (GET_CODE (PREV_INSN (insn
)) == INSN
3538 && GET_CODE (PATTERN (PREV_INSN (insn
))) == SET
3539 && SET_SRC (PATTERN (PREV_INSN (insn
))) == addr
)
3540 reg_next_use
[regno
] = PREV_INSN (insn
);
3542 reg_next_use
[regno
] = 0;
3547 /* REGNO is now used in INCR which is below INSN, but
3548 it previously wasn't live here. If we don't mark
3549 it as needed, we'll put a REG_DEAD note for it
3550 on this insn, which is incorrect. */
3551 SET_REGNO_REG_SET (needed
, regno
);
3553 /* If there are any calls between INSN and INCR, show
3554 that REGNO now crosses them. */
3555 for (temp
= insn
; temp
!= incr
; temp
= NEXT_INSN (temp
))
3556 if (GET_CODE (temp
) == CALL_INSN
)
3557 REG_N_CALLS_CROSSED (regno
)++;
3562 /* If we haven't returned, it means we were able to make the
3563 auto-inc, so update the status. First, record that this insn
3564 has an implicit side effect. */
3567 = gen_rtx_EXPR_LIST (REG_INC
, addr
, REG_NOTES (insn
));
3569 /* Modify the old increment-insn to simply copy
3570 the already-incremented value of our register. */
3571 if (! validate_change (incr
, &SET_SRC (set
), addr
, 0))
3574 /* If that makes it a no-op (copying the register into itself) delete
3575 it so it won't appear to be a "use" and a "set" of this
3577 if (SET_DEST (set
) == addr
)
3579 PUT_CODE (incr
, NOTE
);
3580 NOTE_LINE_NUMBER (incr
) = NOTE_INSN_DELETED
;
3581 NOTE_SOURCE_FILE (incr
) = 0;
3584 if (regno
>= FIRST_PSEUDO_REGISTER
)
3586 /* Count an extra reference to the reg. When a reg is
3587 incremented, spilling it is worse, so we want to make
3588 that less likely. */
3589 REG_N_REFS (regno
) += loop_depth
;
3591 /* Count the increment as a setting of the register,
3592 even though it isn't a SET in rtl. */
3593 REG_N_SETS (regno
)++;
3598 #endif /* AUTO_INC_DEC */
3600 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
3601 This is done assuming the registers needed from X
3602 are those that have 1-bits in NEEDED.
3604 On the final pass, FINAL is 1. This means try for autoincrement
3605 and count the uses and deaths of each pseudo-reg.
3607 INSN is the containing instruction. If INSN is dead, this function is not
3611 mark_used_regs (needed
, live
, x
, final
, insn
)
3618 register RTX_CODE code
;
3623 code
= GET_CODE (x
);
3643 /* If we are clobbering a MEM, mark any registers inside the address
3645 if (GET_CODE (XEXP (x
, 0)) == MEM
)
3646 mark_used_regs (needed
, live
, XEXP (XEXP (x
, 0), 0), final
, insn
);
3650 /* Invalidate the data for the last MEM stored, but only if MEM is
3651 something that can be stored into. */
3652 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
3653 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
3654 ; /* needn't clear the memory set list */
3657 rtx temp
= mem_set_list
;
3658 rtx prev
= NULL_RTX
;
3662 if (anti_dependence (XEXP (temp
, 0), x
))
3664 /* Splice temp out of the list. */
3666 XEXP (prev
, 1) = XEXP (temp
, 1);
3668 mem_set_list
= XEXP (temp
, 1);
3672 temp
= XEXP (temp
, 1);
3676 /* If the memory reference had embedded side effects (autoincrement
3677 address modes. Then we may need to kill some entries on the
3680 invalidate_mems_from_autoinc (insn
);
3684 find_auto_inc (needed
, x
, insn
);
3689 if (GET_CODE (SUBREG_REG (x
)) == REG
3690 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
3691 && (GET_MODE_SIZE (GET_MODE (x
))
3692 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))))
3693 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x
))) = 1;
3695 /* While we're here, optimize this case. */
3698 /* In case the SUBREG is not of a register, don't optimize */
3699 if (GET_CODE (x
) != REG
)
3701 mark_used_regs (needed
, live
, x
, final
, insn
);
3705 /* ... fall through ... */
3708 /* See a register other than being set
3709 => mark it as needed. */
3713 int some_needed
= REGNO_REG_SET_P (needed
, regno
);
3714 int some_not_needed
= ! some_needed
;
3716 SET_REGNO_REG_SET (live
, regno
);
3718 /* A hard reg in a wide mode may really be multiple registers.
3719 If so, mark all of them just like the first. */
3720 if (regno
< FIRST_PSEUDO_REGISTER
)
3724 /* For stack ptr or fixed arg pointer,
3725 nothing below can be necessary, so waste no more time. */
3726 if (regno
== STACK_POINTER_REGNUM
3727 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3728 || regno
== HARD_FRAME_POINTER_REGNUM
3730 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3731 || (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
3733 || regno
== FRAME_POINTER_REGNUM
)
3735 /* If this is a register we are going to try to eliminate,
3736 don't mark it live here. If we are successful in
3737 eliminating it, it need not be live unless it is used for
3738 pseudos, in which case it will have been set live when
3739 it was allocated to the pseudos. If the register will not
3740 be eliminated, reload will set it live at that point. */
3742 if (! TEST_HARD_REG_BIT (elim_reg_set
, regno
))
3743 regs_ever_live
[regno
] = 1;
3746 /* No death notes for global register variables;
3747 their values are live after this function exits. */
3748 if (global_regs
[regno
])
3751 reg_next_use
[regno
] = insn
;
3755 n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3758 int regno_n
= regno
+ n
;
3759 int needed_regno
= REGNO_REG_SET_P (needed
, regno_n
);
3761 SET_REGNO_REG_SET (live
, regno_n
);
3762 some_needed
|= needed_regno
;
3763 some_not_needed
|= ! needed_regno
;
3768 /* Record where each reg is used, so when the reg
3769 is set we know the next insn that uses it. */
3771 reg_next_use
[regno
] = insn
;
3773 if (regno
< FIRST_PSEUDO_REGISTER
)
3775 /* If a hard reg is being used,
3776 record that this function does use it. */
3778 i
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3782 regs_ever_live
[regno
+ --i
] = 1;
3787 /* Keep track of which basic block each reg appears in. */
3789 register int blocknum
= BLOCK_NUM (insn
);
3791 if (REG_BASIC_BLOCK (regno
) == REG_BLOCK_UNKNOWN
)
3792 REG_BASIC_BLOCK (regno
) = blocknum
;
3793 else if (REG_BASIC_BLOCK (regno
) != blocknum
)
3794 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
3796 /* Count (weighted) number of uses of each reg. */
3798 REG_N_REFS (regno
) += loop_depth
;
3801 /* Record and count the insns in which a reg dies.
3802 If it is used in this insn and was dead below the insn
3803 then it dies in this insn. If it was set in this insn,
3804 we do not make a REG_DEAD note; likewise if we already
3805 made such a note. */
3808 && ! dead_or_set_p (insn
, x
)
3810 && (regno
>= FIRST_PSEUDO_REGISTER
|| ! fixed_regs
[regno
])
3814 /* Check for the case where the register dying partially
3815 overlaps the register set by this insn. */
3816 if (regno
< FIRST_PSEUDO_REGISTER
3817 && HARD_REGNO_NREGS (regno
, GET_MODE (x
)) > 1)
3819 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3821 some_needed
|= dead_or_set_regno_p (insn
, regno
+ n
);
3824 /* If none of the words in X is needed, make a REG_DEAD
3825 note. Otherwise, we must make partial REG_DEAD notes. */
3829 = gen_rtx_EXPR_LIST (REG_DEAD
, x
, REG_NOTES (insn
));
3830 REG_N_DEATHS (regno
)++;
3836 /* Don't make a REG_DEAD note for a part of a register
3837 that is set in the insn. */
3839 for (i
= HARD_REGNO_NREGS (regno
, GET_MODE (x
)) - 1;
3841 if (!REGNO_REG_SET_P (needed
, regno
+ i
)
3842 && ! dead_or_set_regno_p (insn
, regno
+ i
))
3844 = gen_rtx_EXPR_LIST (REG_DEAD
,
3845 gen_rtx_REG (reg_raw_mode
[regno
+ i
],
3856 register rtx testreg
= SET_DEST (x
);
3859 /* If storing into MEM, don't show it as being used. But do
3860 show the address as being used. */
3861 if (GET_CODE (testreg
) == MEM
)
3865 find_auto_inc (needed
, testreg
, insn
);
3867 mark_used_regs (needed
, live
, XEXP (testreg
, 0), final
, insn
);
3868 mark_used_regs (needed
, live
, SET_SRC (x
), final
, insn
);
3872 /* Storing in STRICT_LOW_PART is like storing in a reg
3873 in that this SET might be dead, so ignore it in TESTREG.
3874 but in some other ways it is like using the reg.
3876 Storing in a SUBREG or a bit field is like storing the entire
3877 register in that if the register's value is not used
3878 then this SET is not needed. */
3879 while (GET_CODE (testreg
) == STRICT_LOW_PART
3880 || GET_CODE (testreg
) == ZERO_EXTRACT
3881 || GET_CODE (testreg
) == SIGN_EXTRACT
3882 || GET_CODE (testreg
) == SUBREG
)
3884 if (GET_CODE (testreg
) == SUBREG
3885 && GET_CODE (SUBREG_REG (testreg
)) == REG
3886 && REGNO (SUBREG_REG (testreg
)) >= FIRST_PSEUDO_REGISTER
3887 && (GET_MODE_SIZE (GET_MODE (testreg
))
3888 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg
)))))
3889 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg
))) = 1;
3891 /* Modifying a single register in an alternate mode
3892 does not use any of the old value. But these other
3893 ways of storing in a register do use the old value. */
3894 if (GET_CODE (testreg
) == SUBREG
3895 && !(REG_SIZE (SUBREG_REG (testreg
)) > REG_SIZE (testreg
)))
3900 testreg
= XEXP (testreg
, 0);
3903 /* If this is a store into a register,
3904 recursively scan the value being stored. */
3906 if ((GET_CODE (testreg
) == PARALLEL
3907 && GET_MODE (testreg
) == BLKmode
)
3908 || (GET_CODE (testreg
) == REG
3909 && (regno
= REGNO (testreg
), regno
!= FRAME_POINTER_REGNUM
)
3910 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3911 && regno
!= HARD_FRAME_POINTER_REGNUM
3913 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3914 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
3917 /* We used to exclude global_regs here, but that seems wrong.
3918 Storing in them is like storing in mem. */
3920 mark_used_regs (needed
, live
, SET_SRC (x
), final
, insn
);
3922 mark_used_regs (needed
, live
, SET_DEST (x
), final
, insn
);
3929 /* If exiting needs the right stack value, consider this insn as
3930 using the stack pointer. In any event, consider it as using
3931 all global registers and all registers used by return. */
3932 if (! EXIT_IGNORE_STACK
3933 || (! FRAME_POINTER_REQUIRED
3934 && ! current_function_calls_alloca
3935 && flag_omit_frame_pointer
)
3936 || current_function_sp_is_unchanging
)
3937 SET_REGNO_REG_SET (live
, STACK_POINTER_REGNUM
);
3939 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3941 #ifdef EPILOGUE_USES
3942 || EPILOGUE_USES (i
)
3945 SET_REGNO_REG_SET (live
, i
);
3949 case UNSPEC_VOLATILE
:
3953 /* Traditional and volatile asm instructions must be considered to use
3954 and clobber all hard registers, all pseudo-registers and all of
3955 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
3957 Consider for instance a volatile asm that changes the fpu rounding
3958 mode. An insn should not be moved across this even if it only uses
3959 pseudo-regs because it might give an incorrectly rounded result.
3961 ?!? Unfortunately, marking all hard registers as live causes massive
3962 problems for the register allocator and marking all pseudos as live
3963 creates mountains of uninitialized variable warnings.
3965 So for now, just clear the memory set list and mark any regs
3966 we can find in ASM_OPERANDS as used. */
3967 if (code
!= ASM_OPERANDS
|| MEM_VOLATILE_P (x
))
3968 mem_set_list
= NULL_RTX
;
3970 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
3971 We can not just fall through here since then we would be confused
3972 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
3973 traditional asms unlike their normal usage. */
3974 if (code
== ASM_OPERANDS
)
3978 for (j
= 0; j
< ASM_OPERANDS_INPUT_LENGTH (x
); j
++)
3979 mark_used_regs (needed
, live
, ASM_OPERANDS_INPUT (x
, j
),
3990 /* Recursively scan the operands of this expression. */
3993 register char *fmt
= GET_RTX_FORMAT (code
);
3996 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4000 /* Tail recursive case: save a function call level. */
4006 mark_used_regs (needed
, live
, XEXP (x
, i
), final
, insn
);
4008 else if (fmt
[i
] == 'E')
4011 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4012 mark_used_regs (needed
, live
, XVECEXP (x
, i
, j
), final
, insn
);
4021 try_pre_increment_1 (insn
)
4024 /* Find the next use of this reg. If in same basic block,
4025 make it do pre-increment or pre-decrement if appropriate. */
4026 rtx x
= single_set (insn
);
4027 HOST_WIDE_INT amount
= ((GET_CODE (SET_SRC (x
)) == PLUS
? 1 : -1)
4028 * INTVAL (XEXP (SET_SRC (x
), 1)));
4029 int regno
= REGNO (SET_DEST (x
));
4030 rtx y
= reg_next_use
[regno
];
4032 && BLOCK_NUM (y
) == BLOCK_NUM (insn
)
4033 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4034 mode would be better. */
4035 && ! dead_or_set_p (y
, SET_DEST (x
))
4036 && try_pre_increment (y
, SET_DEST (x
), amount
))
4038 /* We have found a suitable auto-increment
4039 and already changed insn Y to do it.
4040 So flush this increment-instruction. */
4041 PUT_CODE (insn
, NOTE
);
4042 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
4043 NOTE_SOURCE_FILE (insn
) = 0;
4044 /* Count a reference to this reg for the increment
4045 insn we are deleting. When a reg is incremented.
4046 spilling it is worse, so we want to make that
4048 if (regno
>= FIRST_PSEUDO_REGISTER
)
4050 REG_N_REFS (regno
) += loop_depth
;
4051 REG_N_SETS (regno
)++;
4058 /* Try to change INSN so that it does pre-increment or pre-decrement
4059 addressing on register REG in order to add AMOUNT to REG.
4060 AMOUNT is negative for pre-decrement.
4061 Returns 1 if the change could be made.
4062 This checks all about the validity of the result of modifying INSN. */
4065 try_pre_increment (insn
, reg
, amount
)
4067 HOST_WIDE_INT amount
;
4071 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4072 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4074 /* Nonzero if we can try to make a post-increment or post-decrement.
4075 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4076 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4077 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4080 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4083 /* From the sign of increment, see which possibilities are conceivable
4084 on this target machine. */
4085 if (HAVE_PRE_INCREMENT
&& amount
> 0)
4087 if (HAVE_POST_INCREMENT
&& amount
> 0)
4090 if (HAVE_PRE_DECREMENT
&& amount
< 0)
4092 if (HAVE_POST_DECREMENT
&& amount
< 0)
4095 if (! (pre_ok
|| post_ok
))
4098 /* It is not safe to add a side effect to a jump insn
4099 because if the incremented register is spilled and must be reloaded
4100 there would be no way to store the incremented value back in memory. */
4102 if (GET_CODE (insn
) == JUMP_INSN
)
4107 use
= find_use_as_address (PATTERN (insn
), reg
, 0);
4108 if (post_ok
&& (use
== 0 || use
== (rtx
) 1))
4110 use
= find_use_as_address (PATTERN (insn
), reg
, -amount
);
4114 if (use
== 0 || use
== (rtx
) 1)
4117 if (GET_MODE_SIZE (GET_MODE (use
)) != (amount
> 0 ? amount
: - amount
))
4120 /* See if this combination of instruction and addressing mode exists. */
4121 if (! validate_change (insn
, &XEXP (use
, 0),
4122 gen_rtx_fmt_e (amount
> 0
4123 ? (do_post
? POST_INC
: PRE_INC
)
4124 : (do_post
? POST_DEC
: PRE_DEC
),
4128 /* Record that this insn now has an implicit side effect on X. */
4129 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_INC
, reg
, REG_NOTES (insn
));
4133 #endif /* AUTO_INC_DEC */
4135 /* Find the place in the rtx X where REG is used as a memory address.
4136 Return the MEM rtx that so uses it.
4137 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4138 (plus REG (const_int PLUSCONST)).
4140 If such an address does not appear, return 0.
4141 If REG appears more than once, or is used other than in such an address,
4145 find_use_as_address (x
, reg
, plusconst
)
4148 HOST_WIDE_INT plusconst
;
4150 enum rtx_code code
= GET_CODE (x
);
4151 char *fmt
= GET_RTX_FORMAT (code
);
4153 register rtx value
= 0;
4156 if (code
== MEM
&& XEXP (x
, 0) == reg
&& plusconst
== 0)
4159 if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == PLUS
4160 && XEXP (XEXP (x
, 0), 0) == reg
4161 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4162 && INTVAL (XEXP (XEXP (x
, 0), 1)) == plusconst
)
4165 if (code
== SIGN_EXTRACT
|| code
== ZERO_EXTRACT
)
4167 /* If REG occurs inside a MEM used in a bit-field reference,
4168 that is unacceptable. */
4169 if (find_use_as_address (XEXP (x
, 0), reg
, 0) != 0)
4170 return (rtx
) (HOST_WIDE_INT
) 1;
4174 return (rtx
) (HOST_WIDE_INT
) 1;
4176 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4180 tem
= find_use_as_address (XEXP (x
, i
), reg
, plusconst
);
4184 return (rtx
) (HOST_WIDE_INT
) 1;
4189 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4191 tem
= find_use_as_address (XVECEXP (x
, i
, j
), reg
, plusconst
);
4195 return (rtx
) (HOST_WIDE_INT
) 1;
4203 /* Write information about registers and basic blocks into FILE.
4204 This is part of making a debugging dump. */
4207 dump_flow_info (file
)
4211 static char *reg_class_names
[] = REG_CLASS_NAMES
;
4213 fprintf (file
, "%d registers.\n", max_regno
);
4214 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
4217 enum reg_class
class, altclass
;
4218 fprintf (file
, "\nRegister %d used %d times across %d insns",
4219 i
, REG_N_REFS (i
), REG_LIVE_LENGTH (i
));
4220 if (REG_BASIC_BLOCK (i
) >= 0)
4221 fprintf (file
, " in block %d", REG_BASIC_BLOCK (i
));
4223 fprintf (file
, "; set %d time%s", REG_N_SETS (i
),
4224 (REG_N_SETS (i
) == 1) ? "" : "s");
4225 if (REG_USERVAR_P (regno_reg_rtx
[i
]))
4226 fprintf (file
, "; user var");
4227 if (REG_N_DEATHS (i
) != 1)
4228 fprintf (file
, "; dies in %d places", REG_N_DEATHS (i
));
4229 if (REG_N_CALLS_CROSSED (i
) == 1)
4230 fprintf (file
, "; crosses 1 call");
4231 else if (REG_N_CALLS_CROSSED (i
))
4232 fprintf (file
, "; crosses %d calls", REG_N_CALLS_CROSSED (i
));
4233 if (PSEUDO_REGNO_BYTES (i
) != UNITS_PER_WORD
)
4234 fprintf (file
, "; %d bytes", PSEUDO_REGNO_BYTES (i
));
4235 class = reg_preferred_class (i
);
4236 altclass
= reg_alternate_class (i
);
4237 if (class != GENERAL_REGS
|| altclass
!= ALL_REGS
)
4239 if (altclass
== ALL_REGS
|| class == ALL_REGS
)
4240 fprintf (file
, "; pref %s", reg_class_names
[(int) class]);
4241 else if (altclass
== NO_REGS
)
4242 fprintf (file
, "; %s or none", reg_class_names
[(int) class]);
4244 fprintf (file
, "; pref %s, else %s",
4245 reg_class_names
[(int) class],
4246 reg_class_names
[(int) altclass
]);
4248 if (REGNO_POINTER_FLAG (i
))
4249 fprintf (file
, "; pointer");
4250 fprintf (file
, ".\n");
4253 fprintf (file
, "\n%d basic blocks.\n", n_basic_blocks
);
4254 for (i
= 0; i
< n_basic_blocks
; i
++)
4256 register basic_block bb
= BASIC_BLOCK (i
);
4260 fprintf (file
, "\nBasic block %d: first insn %d, last %d.\n",
4261 i
, INSN_UID (bb
->head
), INSN_UID (bb
->end
));
4263 fprintf (file
, "Predecessors: ");
4264 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
4265 dump_edge_info (file
, e
, 0);
4267 fprintf (file
, "\nSuccessors: ");
4268 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
4269 dump_edge_info (file
, e
, 1);
4271 fprintf (file
, "\nRegisters live at start:");
4272 if (bb
->global_live_at_start
)
4274 for (regno
= 0; regno
< max_regno
; regno
++)
4275 if (REGNO_REG_SET_P (bb
->global_live_at_start
, regno
))
4276 fprintf (file
, " %d", regno
);
4279 fprintf (file
, " n/a");
4281 fprintf (file
, "\nRegisters live at end:");
4282 if (bb
->global_live_at_end
)
4284 for (regno
= 0; regno
< max_regno
; regno
++)
4285 if (REGNO_REG_SET_P (bb
->global_live_at_end
, regno
))
4286 fprintf (file
, " %d", regno
);
4289 fprintf (file
, " n/a");
4298 dump_edge_info (file
, e
, do_succ
)
4303 basic_block side
= (do_succ
? e
->dest
: e
->src
);
4305 if (side
== ENTRY_BLOCK_PTR
)
4306 fputs (" ENTRY", file
);
4307 else if (side
== EXIT_BLOCK_PTR
)
4308 fputs (" EXIT", file
);
4310 fprintf (file
, " %d", side
->index
);
4314 static char * bitnames
[] = {
4315 "fallthru", "crit", "ab", "abcall", "eh", "fake"
4318 int i
, flags
= e
->flags
;
4322 for (i
= 0; flags
; i
++)
4323 if (flags
& (1 << i
))
4329 if (i
< (int)(sizeof (bitnames
) / sizeof (*bitnames
)))
4330 fputs (bitnames
[i
], file
);
4332 fprintf (file
, "%d", i
);
4340 /* Like print_rtl, but also print out live information for the start of each
4344 print_rtl_with_bb (outf
, rtx_first
)
4348 register rtx tmp_rtx
;
4351 fprintf (outf
, "(nil)\n");
4355 enum bb_state
{ NOT_IN_BB
, IN_ONE_BB
, IN_MULTIPLE_BB
};
4356 int max_uid
= get_max_uid ();
4357 basic_block
*start
= (basic_block
*)
4358 alloca (max_uid
* sizeof (basic_block
));
4359 basic_block
*end
= (basic_block
*)
4360 alloca (max_uid
* sizeof (basic_block
));
4361 enum bb_state
*in_bb_p
= (enum bb_state
*)
4362 alloca (max_uid
* sizeof (enum bb_state
));
4364 memset (start
, 0, max_uid
* sizeof (basic_block
));
4365 memset (end
, 0, max_uid
* sizeof (basic_block
));
4366 memset (in_bb_p
, 0, max_uid
* sizeof (enum bb_state
));
4368 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
4370 basic_block bb
= BASIC_BLOCK (i
);
4373 start
[INSN_UID (bb
->head
)] = bb
;
4374 end
[INSN_UID (bb
->end
)] = bb
;
4375 for (x
= bb
->head
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
4377 enum bb_state state
= IN_MULTIPLE_BB
;
4378 if (in_bb_p
[INSN_UID(x
)] == NOT_IN_BB
)
4380 in_bb_p
[INSN_UID(x
)] = state
;
4387 for (tmp_rtx
= rtx_first
; NULL
!= tmp_rtx
; tmp_rtx
= NEXT_INSN (tmp_rtx
))
4392 if ((bb
= start
[INSN_UID (tmp_rtx
)]) != NULL
)
4394 fprintf (outf
, ";; Start of basic block %d, registers live:",
4397 EXECUTE_IF_SET_IN_REG_SET (bb
->global_live_at_start
, 0, i
,
4399 fprintf (outf
, " %d", i
);
4400 if (i
< FIRST_PSEUDO_REGISTER
)
4401 fprintf (outf
, " [%s]",
4407 if (in_bb_p
[INSN_UID(tmp_rtx
)] == NOT_IN_BB
4408 && GET_CODE (tmp_rtx
) != NOTE
4409 && GET_CODE (tmp_rtx
) != BARRIER
4411 fprintf (outf
, ";; Insn is not within a basic block\n");
4412 else if (in_bb_p
[INSN_UID(tmp_rtx
)] == IN_MULTIPLE_BB
)
4413 fprintf (outf
, ";; Insn is in multiple basic blocks\n");
4415 did_output
= print_rtl_single (outf
, tmp_rtx
);
4417 if ((bb
= end
[INSN_UID (tmp_rtx
)]) != NULL
)
4418 fprintf (outf
, ";; End of basic block %d\n", bb
->index
);
4427 /* Integer list support. */
4429 /* Allocate a node from list *HEAD_PTR. */
4432 alloc_int_list_node (head_ptr
)
4433 int_list_block
**head_ptr
;
4435 struct int_list_block
*first_blk
= *head_ptr
;
4437 if (first_blk
== NULL
|| first_blk
->nodes_left
<= 0)
4439 first_blk
= (struct int_list_block
*) xmalloc (sizeof (struct int_list_block
));
4440 first_blk
->nodes_left
= INT_LIST_NODES_IN_BLK
;
4441 first_blk
->next
= *head_ptr
;
4442 *head_ptr
= first_blk
;
4445 first_blk
->nodes_left
--;
4446 return &first_blk
->nodes
[first_blk
->nodes_left
];
4449 /* Pointer to head of predecessor/successor block list. */
4450 static int_list_block
*pred_int_list_blocks
;
4452 /* Add a new node to integer list LIST with value VAL.
4453 LIST is a pointer to a list object to allow for different implementations.
4454 If *LIST is initially NULL, the list is empty.
4455 The caller must not care whether the element is added to the front or
4456 to the end of the list (to allow for different implementations). */
4459 add_int_list_node (blk_list
, list
, val
)
4460 int_list_block
**blk_list
;
4464 int_list_ptr p
= alloc_int_list_node (blk_list
);
4472 /* Free the blocks of lists at BLK_LIST. */
4475 free_int_list (blk_list
)
4476 int_list_block
**blk_list
;
4478 int_list_block
*p
, *next
;
4480 for (p
= *blk_list
; p
!= NULL
; p
= next
)
4486 /* Mark list as empty for the next function we compile. */
4490 /* Predecessor/successor computation. */
4492 /* Mark PRED_BB a precessor of SUCC_BB,
4493 and conversely SUCC_BB a successor of PRED_BB. */
4496 add_pred_succ (pred_bb
, succ_bb
, s_preds
, s_succs
, num_preds
, num_succs
)
4499 int_list_ptr
*s_preds
;
4500 int_list_ptr
*s_succs
;
4504 if (succ_bb
!= EXIT_BLOCK
)
4506 add_int_list_node (&pred_int_list_blocks
, &s_preds
[succ_bb
], pred_bb
);
4507 num_preds
[succ_bb
]++;
4509 if (pred_bb
!= ENTRY_BLOCK
)
4511 add_int_list_node (&pred_int_list_blocks
, &s_succs
[pred_bb
], succ_bb
);
4512 num_succs
[pred_bb
]++;
4516 /* Convert edge lists into pred/succ lists for backward compatibility. */
4519 compute_preds_succs (s_preds
, s_succs
, num_preds
, num_succs
)
4520 int_list_ptr
*s_preds
;
4521 int_list_ptr
*s_succs
;
4525 int i
, n
= n_basic_blocks
;
4528 memset (s_preds
, 0, n_basic_blocks
* sizeof (int_list_ptr
));
4529 memset (s_succs
, 0, n_basic_blocks
* sizeof (int_list_ptr
));
4530 memset (num_preds
, 0, n_basic_blocks
* sizeof (int));
4531 memset (num_succs
, 0, n_basic_blocks
* sizeof (int));
4533 for (i
= 0; i
< n
; ++i
)
4535 basic_block bb
= BASIC_BLOCK (i
);
4537 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
4538 add_pred_succ (i
, e
->dest
->index
, s_preds
, s_succs
,
4539 num_preds
, num_succs
);
4542 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
4543 add_pred_succ (ENTRY_BLOCK
, e
->dest
->index
, s_preds
, s_succs
,
4544 num_preds
, num_succs
);
4548 dump_bb_data (file
, preds
, succs
, live_info
)
4550 int_list_ptr
*preds
;
4551 int_list_ptr
*succs
;
4557 fprintf (file
, "BB data\n\n");
4558 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
4560 fprintf (file
, "BB %d, start %d, end %d\n", bb
,
4561 INSN_UID (BLOCK_HEAD (bb
)), INSN_UID (BLOCK_END (bb
)));
4562 fprintf (file
, " preds:");
4563 for (p
= preds
[bb
]; p
!= NULL
; p
= p
->next
)
4565 int pred_bb
= INT_LIST_VAL (p
);
4566 if (pred_bb
== ENTRY_BLOCK
)
4567 fprintf (file
, " entry");
4569 fprintf (file
, " %d", pred_bb
);
4571 fprintf (file
, "\n");
4572 fprintf (file
, " succs:");
4573 for (p
= succs
[bb
]; p
!= NULL
; p
= p
->next
)
4575 int succ_bb
= INT_LIST_VAL (p
);
4576 if (succ_bb
== EXIT_BLOCK
)
4577 fprintf (file
, " exit");
4579 fprintf (file
, " %d", succ_bb
);
4584 fprintf (file
, "\nRegisters live at start:");
4585 for (regno
= 0; regno
< max_regno
; regno
++)
4586 if (REGNO_REG_SET_P (BASIC_BLOCK (bb
)->global_live_at_start
, regno
))
4587 fprintf (file
, " %d", regno
);
4588 fprintf (file
, "\n");
4590 fprintf (file
, "\n");
4592 fprintf (file
, "\n");
4595 /* Free basic block data storage. */
4600 free_int_list (&pred_int_list_blocks
);
4603 /* Compute dominator relationships. */
4605 compute_dominators (dominators
, post_dominators
, s_preds
, s_succs
)
4606 sbitmap
*dominators
;
4607 sbitmap
*post_dominators
;
4608 int_list_ptr
*s_preds
;
4609 int_list_ptr
*s_succs
;
4611 int bb
, changed
, passes
;
4612 sbitmap
*temp_bitmap
;
4614 temp_bitmap
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
4615 sbitmap_vector_ones (dominators
, n_basic_blocks
);
4616 sbitmap_vector_ones (post_dominators
, n_basic_blocks
);
4617 sbitmap_vector_zero (temp_bitmap
, n_basic_blocks
);
4619 sbitmap_zero (dominators
[0]);
4620 SET_BIT (dominators
[0], 0);
4622 sbitmap_zero (post_dominators
[n_basic_blocks
- 1]);
4623 SET_BIT (post_dominators
[n_basic_blocks
- 1], 0);
4630 for (bb
= 1; bb
< n_basic_blocks
; bb
++)
4632 sbitmap_intersect_of_predecessors (temp_bitmap
[bb
], dominators
,
4634 SET_BIT (temp_bitmap
[bb
], bb
);
4635 changed
|= sbitmap_a_and_b (dominators
[bb
],
4638 sbitmap_intersect_of_successors (temp_bitmap
[bb
], post_dominators
,
4640 SET_BIT (temp_bitmap
[bb
], bb
);
4641 changed
|= sbitmap_a_and_b (post_dominators
[bb
],
4642 post_dominators
[bb
],
4651 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
4654 compute_immediate_dominators (idom
, dominators
)
4656 sbitmap
*dominators
;
4661 tmp
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
4663 /* Begin with tmp(n) = dom(n) - { n }. */
4664 for (b
= n_basic_blocks
; --b
>= 0; )
4666 sbitmap_copy (tmp
[b
], dominators
[b
]);
4667 RESET_BIT (tmp
[b
], b
);
4670 /* Subtract out all of our dominator's dominators. */
4671 for (b
= n_basic_blocks
; --b
>= 0; )
4673 sbitmap tmp_b
= tmp
[b
];
4676 for (s
= n_basic_blocks
; --s
>= 0; )
4677 if (TEST_BIT (tmp_b
, s
))
4678 sbitmap_difference (tmp_b
, tmp_b
, tmp
[s
]);
4681 /* Find the one bit set in the bitmap and put it in the output array. */
4682 for (b
= n_basic_blocks
; --b
>= 0; )
4685 EXECUTE_IF_SET_IN_SBITMAP (tmp
[b
], 0, t
, { idom
[b
] = t
; });
4688 sbitmap_vector_free (tmp
);
4691 /* Count for a single SET rtx, X. */
4694 count_reg_sets_1 (x
)
4698 register rtx reg
= SET_DEST (x
);
4700 /* Find the register that's set/clobbered. */
4701 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
4702 || GET_CODE (reg
) == SIGN_EXTRACT
4703 || GET_CODE (reg
) == STRICT_LOW_PART
)
4704 reg
= XEXP (reg
, 0);
4706 if (GET_CODE (reg
) == PARALLEL
4707 && GET_MODE (reg
) == BLKmode
)
4710 for (i
= XVECLEN (reg
, 0) - 1; i
>= 0; i
--)
4711 count_reg_sets_1 (XVECEXP (reg
, 0, i
));
4715 if (GET_CODE (reg
) == REG
)
4717 regno
= REGNO (reg
);
4718 if (regno
>= FIRST_PSEUDO_REGISTER
)
4720 /* Count (weighted) references, stores, etc. This counts a
4721 register twice if it is modified, but that is correct. */
4722 REG_N_SETS (regno
)++;
4724 REG_N_REFS (regno
) += loop_depth
;
4729 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4730 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
4736 register RTX_CODE code
= GET_CODE (x
);
4738 if (code
== SET
|| code
== CLOBBER
)
4739 count_reg_sets_1 (x
);
4740 else if (code
== PARALLEL
)
4743 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
4745 code
= GET_CODE (XVECEXP (x
, 0, i
));
4746 if (code
== SET
|| code
== CLOBBER
)
4747 count_reg_sets_1 (XVECEXP (x
, 0, i
));
4752 /* Increment REG_N_REFS by the current loop depth each register reference
4756 count_reg_references (x
)
4759 register RTX_CODE code
;
4762 code
= GET_CODE (x
);
4782 /* If we are clobbering a MEM, mark any registers inside the address
4784 if (GET_CODE (XEXP (x
, 0)) == MEM
)
4785 count_reg_references (XEXP (XEXP (x
, 0), 0));
4789 /* While we're here, optimize this case. */
4792 /* In case the SUBREG is not of a register, don't optimize */
4793 if (GET_CODE (x
) != REG
)
4795 count_reg_references (x
);
4799 /* ... fall through ... */
4802 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
4803 REG_N_REFS (REGNO (x
)) += loop_depth
;
4808 register rtx testreg
= SET_DEST (x
);
4811 /* If storing into MEM, don't show it as being used. But do
4812 show the address as being used. */
4813 if (GET_CODE (testreg
) == MEM
)
4815 count_reg_references (XEXP (testreg
, 0));
4816 count_reg_references (SET_SRC (x
));
4820 /* Storing in STRICT_LOW_PART is like storing in a reg
4821 in that this SET might be dead, so ignore it in TESTREG.
4822 but in some other ways it is like using the reg.
4824 Storing in a SUBREG or a bit field is like storing the entire
4825 register in that if the register's value is not used
4826 then this SET is not needed. */
4827 while (GET_CODE (testreg
) == STRICT_LOW_PART
4828 || GET_CODE (testreg
) == ZERO_EXTRACT
4829 || GET_CODE (testreg
) == SIGN_EXTRACT
4830 || GET_CODE (testreg
) == SUBREG
)
4832 /* Modifying a single register in an alternate mode
4833 does not use any of the old value. But these other
4834 ways of storing in a register do use the old value. */
4835 if (GET_CODE (testreg
) == SUBREG
4836 && !(REG_SIZE (SUBREG_REG (testreg
)) > REG_SIZE (testreg
)))
4841 testreg
= XEXP (testreg
, 0);
4844 /* If this is a store into a register,
4845 recursively scan the value being stored. */
4847 if ((GET_CODE (testreg
) == PARALLEL
4848 && GET_MODE (testreg
) == BLKmode
)
4849 || GET_CODE (testreg
) == REG
)
4851 count_reg_references (SET_SRC (x
));
4853 count_reg_references (SET_DEST (x
));
4863 /* Recursively scan the operands of this expression. */
4866 register char *fmt
= GET_RTX_FORMAT (code
);
4869 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4873 /* Tail recursive case: save a function call level. */
4879 count_reg_references (XEXP (x
, i
));
4881 else if (fmt
[i
] == 'E')
4884 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4885 count_reg_references (XVECEXP (x
, i
, j
));
4891 /* Recompute register set/reference counts immediately prior to register
4894 This avoids problems with set/reference counts changing to/from values
4895 which have special meanings to the register allocators.
4897 Additionally, the reference counts are the primary component used by the
4898 register allocators to prioritize pseudos for allocation to hard regs.
4899 More accurate reference counts generally lead to better register allocation.
4901 F is the first insn to be scanned.
4902 LOOP_STEP denotes how much loop_depth should be incremented per
4903 loop nesting level in order to increase the ref count more for references
4906 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4907 possibly other information which is used by the register allocators. */
4910 recompute_reg_usage (f
, loop_step
)
4917 /* Clear out the old data. */
4918 max_reg
= max_reg_num ();
4919 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_reg
; i
++)
4925 /* Scan each insn in the chain and count how many times each register is
4928 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
4930 /* Keep track of loop depth. */
4931 if (GET_CODE (insn
) == NOTE
)
4933 /* Look for loop boundaries. */
4934 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
)
4935 loop_depth
-= loop_step
;
4936 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
4937 loop_depth
+= loop_step
;
4939 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
4940 Abort now rather than setting register status incorrectly. */
4941 if (loop_depth
== 0)
4944 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
4948 /* This call will increment REG_N_SETS for each SET or CLOBBER
4949 of a register in INSN. It will also increment REG_N_REFS
4950 by the loop depth for each set of a register in INSN. */
4951 count_reg_sets (PATTERN (insn
));
4953 /* count_reg_sets does not detect autoincrement address modes, so
4954 detect them here by looking at the notes attached to INSN. */
4955 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
4957 if (REG_NOTE_KIND (links
) == REG_INC
)
4958 /* Count (weighted) references, stores, etc. This counts a
4959 register twice if it is modified, but that is correct. */
4960 REG_N_SETS (REGNO (XEXP (links
, 0)))++;
4963 /* This call will increment REG_N_REFS by the current loop depth for
4964 each reference to a register in INSN. */
4965 count_reg_references (PATTERN (insn
));
4967 /* count_reg_references will not include counts for arguments to
4968 function calls, so detect them here by examining the
4969 CALL_INSN_FUNCTION_USAGE data. */
4970 if (GET_CODE (insn
) == CALL_INSN
)
4974 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
4976 note
= XEXP (note
, 1))
4977 if (GET_CODE (XEXP (note
, 0)) == USE
)
4978 count_reg_references (SET_DEST (XEXP (note
, 0)));
4984 /* Record INSN's block as BB. */
4987 set_block_for_insn (insn
, bb
)
4991 size_t uid
= INSN_UID (insn
);
4992 if (uid
>= basic_block_for_insn
->num_elements
)
4996 /* Add one-eighth the size so we don't keep calling xrealloc. */
4997 new_size
= uid
+ (uid
+ 7) / 8;
4999 VARRAY_GROW (basic_block_for_insn
, new_size
);
5001 VARRAY_BB (basic_block_for_insn
, uid
) = bb
;
5004 /* Record INSN's block number as BB. */
5005 /* ??? This has got to go. */
5008 set_block_num (insn
, bb
)
5012 set_block_for_insn (insn
, BASIC_BLOCK (bb
));
5015 /* Verify the CFG consistency. This function check some CFG invariants and
5016 aborts when something is wrong. Hope that this function will help to
5017 convert many optimization passes to preserve CFG consistent.
5019 Currently it does following checks:
5021 - test head/end pointers
5022 - overlapping of basic blocks
5023 - edge list corectness
5024 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5025 - tails of basic blocks (ensure that boundary is necesary)
5026 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5027 and NOTE_INSN_BASIC_BLOCK
5028 - check that all insns are in the basic blocks
5029 (except the switch handling code, barriers and notes)
5031 In future it can be extended check a lot of other stuff as well
5032 (reachability of basic blocks, life information, etc. etc.). */
5037 const int max_uid
= get_max_uid ();
5038 const rtx rtx_first
= get_insns ();
5039 basic_block
*bb_info
;
5043 bb_info
= (basic_block
*) alloca (max_uid
* sizeof (basic_block
));
5044 memset (bb_info
, 0, max_uid
* sizeof (basic_block
));
5046 /* First pass check head/end pointers and set bb_info array used by
5048 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
5050 basic_block bb
= BASIC_BLOCK (i
);
5052 /* Check the head pointer and make sure that it is pointing into
5054 for (x
= rtx_first
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
5059 fatal ("verify_flow_info: Head insn %d for block %d not found in the insn stream.\n",
5060 INSN_UID (bb
->head
), bb
->index
);
5063 /* Check the end pointer and make sure that it is pointing into
5065 for (x
= bb
->head
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
5067 if (bb_info
[INSN_UID (x
)] != NULL
)
5069 fatal ("verify_flow_info: Insn %d is in multiple basic blocks (%d and %d)",
5070 INSN_UID (x
), bb
->index
, bb_info
[INSN_UID (x
)]->index
);
5072 bb_info
[INSN_UID (x
)] = bb
;
5079 fatal ("verify_flow_info: End insn %d for block %d not found in the insn stream.\n",
5080 INSN_UID (bb
->end
), bb
->index
);
5084 /* Now check the basic blocks (boundaries etc.) */
5085 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
5087 basic_block bb
= BASIC_BLOCK (i
);
5088 /* Check corectness of edge lists */
5096 fprintf (stderr
, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5098 fprintf (stderr
, "Predecessor: ");
5099 dump_edge_info (stderr
, e
, 0);
5100 fprintf (stderr
, "\nSuccessor: ");
5101 dump_edge_info (stderr
, e
, 1);
5105 if (e
->dest
!= EXIT_BLOCK_PTR
)
5107 edge e2
= e
->dest
->pred
;
5108 while (e2
&& e2
!= e
)
5112 fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5124 fprintf (stderr
, "verify_flow_info: Basic block %d pred edge is corrupted\n",
5126 fprintf (stderr
, "Predecessor: ");
5127 dump_edge_info (stderr
, e
, 0);
5128 fprintf (stderr
, "\nSuccessor: ");
5129 dump_edge_info (stderr
, e
, 1);
5133 if (e
->src
!= ENTRY_BLOCK_PTR
)
5135 edge e2
= e
->src
->succ
;
5136 while (e2
&& e2
!= e
)
5140 fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5147 /* OK pointers are correct. Now check the header of basic
5148 block. It ought to contain optional CODE_LABEL followed
5149 by NOTE_BASIC_BLOCK. */
5151 if (GET_CODE (x
) == CODE_LABEL
)
5155 fatal ("verify_flow_info: Basic block contains only CODE_LABEL and no NOTE_INSN_BASIC_BLOCK note\n");
5159 if (GET_CODE (x
) != NOTE
5160 || NOTE_LINE_NUMBER (x
) != NOTE_INSN_BASIC_BLOCK
5161 || NOTE_BASIC_BLOCK (x
) != bb
)
5163 fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5169 /* Do checks for empty blocks here */
5176 if (GET_CODE (x
) == NOTE
5177 && NOTE_LINE_NUMBER (x
) == NOTE_INSN_BASIC_BLOCK
)
5179 fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d\n",
5180 INSN_UID (x
), bb
->index
);
5186 if (GET_CODE (x
) == JUMP_INSN
5187 || GET_CODE (x
) == CODE_LABEL
5188 || GET_CODE (x
) == BARRIER
)
5190 fatal_insn ("verify_flow_info: Incorrect insn in the middle of basic block %d\n",
5202 if (!bb_info
[INSN_UID (x
)])
5204 switch (GET_CODE (x
))
5211 /* An addr_vec is placed outside any block block. */
5213 && GET_CODE (NEXT_INSN (x
)) == JUMP_INSN
5214 && (GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_DIFF_VEC
5215 || GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_VEC
))
5220 /* But in any case, non-deletable labels can appear anywhere. */
5224 fatal_insn ("verify_flow_info: Insn outside basic block\n", x
);