* cp-tree.h (DECL_LOCAL_FUCNTION_P): New macro.
[official-gcc.git] / gcc / flow.c
blob9ed2b35646a8eca4633a2c961c6761122328c072
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "basic-block.h"
127 #include "insn-config.h"
128 #include "regs.h"
129 #include "hard-reg-set.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
138 #include "obstack.h"
140 #define obstack_chunk_alloc xmalloc
141 #define obstack_chunk_free free
144 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
145 the stack pointer does not matter. The value is tested only in
146 functions that have frame pointers.
147 No definition is equivalent to always zero. */
148 #ifndef EXIT_IGNORE_STACK
149 #define EXIT_IGNORE_STACK 0
150 #endif
152 #ifndef HAVE_epilogue
153 #define HAVE_epilogue 0
154 #endif
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
158 #endif
160 /* The contents of the current function definition are allocated
161 in this obstack, and all are freed at the end of the function.
162 For top-level functions, this is temporary_obstack.
163 Separate obstacks are made for nested functions. */
165 extern struct obstack *function_obstack;
167 /* Number of basic blocks in the current function. */
169 int n_basic_blocks;
171 /* Number of edges in the current function. */
173 int n_edges;
175 /* The basic block array. */
177 varray_type basic_block_info;
179 /* The special entry and exit blocks. */
181 struct basic_block_def entry_exit_blocks[2] =
184 NULL, /* head */
185 NULL, /* end */
186 NULL, /* pred */
187 NULL, /* succ */
188 NULL, /* local_set */
189 NULL, /* global_live_at_start */
190 NULL, /* global_live_at_end */
191 NULL, /* aux */
192 ENTRY_BLOCK, /* index */
193 0, /* loop_depth */
194 -1, -1 /* eh_beg, eh_end */
197 NULL, /* head */
198 NULL, /* end */
199 NULL, /* pred */
200 NULL, /* succ */
201 NULL, /* local_set */
202 NULL, /* global_live_at_start */
203 NULL, /* global_live_at_end */
204 NULL, /* aux */
205 EXIT_BLOCK, /* index */
206 0, /* loop_depth */
207 -1, -1 /* eh_beg, eh_end */
211 /* Nonzero if the second flow pass has completed. */
212 int flow2_completed;
214 /* Maximum register number used in this function, plus one. */
216 int max_regno;
218 /* Indexed by n, giving various register information */
220 varray_type reg_n_info;
222 /* Size of the reg_n_info table. */
224 unsigned int reg_n_max;
226 /* Element N is the next insn that uses (hard or pseudo) register number N
227 within the current basic block; or zero, if there is no such insn.
228 This is valid only during the final backward scan in propagate_block. */
230 static rtx *reg_next_use;
232 /* Size of a regset for the current function,
233 in (1) bytes and (2) elements. */
235 int regset_bytes;
236 int regset_size;
238 /* Regset of regs live when calls to `setjmp'-like functions happen. */
239 /* ??? Does this exist only for the setjmp-clobbered warning message? */
241 regset regs_live_at_setjmp;
243 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
244 that have to go in the same hard reg.
245 The first two regs in the list are a pair, and the next two
246 are another pair, etc. */
247 rtx regs_may_share;
249 /* Depth within loops of basic block being scanned for lifetime analysis,
250 plus one. This is the weight attached to references to registers. */
252 static int loop_depth;
254 /* During propagate_block, this is non-zero if the value of CC0 is live. */
256 static int cc0_live;
258 /* During propagate_block, this contains a list of all the MEMs we are
259 tracking for dead store elimination. */
261 static rtx mem_set_list;
263 /* Set of registers that may be eliminable. These are handled specially
264 in updating regs_ever_live. */
266 static HARD_REG_SET elim_reg_set;
268 /* The basic block structure for every insn, indexed by uid. */
270 varray_type basic_block_for_insn;
272 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
273 /* ??? Should probably be using LABEL_NUSES instead. It would take a
274 bit of surgery to be able to use or co-opt the routines in jump. */
276 static rtx label_value_list;
278 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
280 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
281 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
282 static bitmap uid_volatile;
284 /* Forward declarations */
285 static int count_basic_blocks PROTO((rtx));
286 static rtx find_basic_blocks_1 PROTO((rtx));
287 static void create_basic_block PROTO((int, rtx, rtx, rtx));
288 static void clear_edges PROTO((void));
289 static void make_edges PROTO((rtx));
290 static void make_edge PROTO((sbitmap *, basic_block,
291 basic_block, int));
292 static void make_label_edge PROTO((sbitmap *, basic_block,
293 rtx, int));
294 static void make_eh_edge PROTO((sbitmap *, eh_nesting_info *,
295 basic_block, rtx, int));
296 static void mark_critical_edges PROTO((void));
297 static void move_stray_eh_region_notes PROTO((void));
298 static void record_active_eh_regions PROTO((rtx));
300 static void commit_one_edge_insertion PROTO((edge));
302 static void delete_unreachable_blocks PROTO((void));
303 static void delete_eh_regions PROTO((void));
304 static int can_delete_note_p PROTO((rtx));
305 static int delete_block PROTO((basic_block));
306 static void expunge_block PROTO((basic_block));
307 static rtx flow_delete_insn PROTO((rtx));
308 static int can_delete_label_p PROTO((rtx));
309 static int merge_blocks_move_predecessor_nojumps PROTO((basic_block,
310 basic_block));
311 static int merge_blocks_move_successor_nojumps PROTO((basic_block,
312 basic_block));
313 static void merge_blocks_nomove PROTO((basic_block, basic_block));
314 static int merge_blocks PROTO((edge,basic_block,basic_block));
315 static void try_merge_blocks PROTO((void));
316 static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block));
317 static void calculate_loop_depth PROTO((rtx));
319 static int verify_wide_reg_1 PROTO((rtx *, void *));
320 static void verify_wide_reg PROTO((int, rtx, rtx));
321 static void verify_local_live_at_start PROTO((regset, basic_block));
322 static int set_noop_p PROTO((rtx));
323 static int noop_move_p PROTO((rtx));
324 static void notice_stack_pointer_modification PROTO ((rtx, rtx, void *));
325 static void record_volatile_insns PROTO((rtx));
326 static void mark_reg PROTO((regset, rtx));
327 static void mark_regs_live_at_end PROTO((regset));
328 static void life_analysis_1 PROTO((rtx, int, int));
329 static void calculate_global_regs_live PROTO((sbitmap, sbitmap, int));
330 static void propagate_block PROTO((regset, rtx, rtx,
331 regset, int, int));
332 static int insn_dead_p PROTO((rtx, regset, int, rtx));
333 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
334 static void mark_set_regs PROTO((regset, regset, rtx,
335 rtx, regset, int));
336 static void mark_set_1 PROTO((regset, regset, rtx,
337 rtx, regset, int));
338 #ifdef AUTO_INC_DEC
339 static void find_auto_inc PROTO((regset, rtx, rtx));
340 static int try_pre_increment_1 PROTO((rtx));
341 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
342 #endif
343 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
344 void dump_flow_info PROTO((FILE *));
345 void debug_flow_info PROTO((void));
346 static void dump_edge_info PROTO((FILE *, edge, int));
348 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
349 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
350 int_list **, int));
352 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
353 int_list_ptr *, int *, int *));
355 static void count_reg_sets_1 PROTO ((rtx));
356 static void count_reg_sets PROTO ((rtx));
357 static void count_reg_references PROTO ((rtx));
358 static void invalidate_mems_from_autoinc PROTO ((rtx));
359 static void remove_edge PROTO ((edge));
360 static void remove_fake_successors PROTO ((basic_block));
362 /* This function is always defined so it can be called from the
363 debugger, and it is declared extern so we don't get warnings about
364 it being unused. */
365 void verify_flow_info PROTO ((void));
368 /* Find basic blocks of the current function.
369 F is the first insn of the function and NREGS the number of register
370 numbers in use. */
372 void
373 find_basic_blocks (f, nregs, file, do_cleanup)
374 rtx f;
375 int nregs ATTRIBUTE_UNUSED;
376 FILE *file ATTRIBUTE_UNUSED;
377 int do_cleanup;
379 int max_uid;
381 /* Flush out existing data. */
382 if (basic_block_info != NULL)
384 int i;
386 clear_edges ();
388 /* Clear bb->aux on all extant basic blocks. We'll use this as a
389 tag for reuse during create_basic_block, just in case some pass
390 copies around basic block notes improperly. */
391 for (i = 0; i < n_basic_blocks; ++i)
392 BASIC_BLOCK (i)->aux = NULL;
394 VARRAY_FREE (basic_block_info);
397 n_basic_blocks = count_basic_blocks (f);
399 /* Size the basic block table. The actual structures will be allocated
400 by find_basic_blocks_1, since we want to keep the structure pointers
401 stable across calls to find_basic_blocks. */
402 /* ??? This whole issue would be much simpler if we called find_basic_blocks
403 exactly once, and thereafter we don't have a single long chain of
404 instructions at all until close to the end of compilation when we
405 actually lay them out. */
407 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
409 label_value_list = find_basic_blocks_1 (f);
411 /* Record the block to which an insn belongs. */
412 /* ??? This should be done another way, by which (perhaps) a label is
413 tagged directly with the basic block that it starts. It is used for
414 more than that currently, but IMO that is the only valid use. */
416 max_uid = get_max_uid ();
417 #ifdef AUTO_INC_DEC
418 /* Leave space for insns life_analysis makes in some cases for auto-inc.
419 These cases are rare, so we don't need too much space. */
420 max_uid += max_uid / 10;
421 #endif
423 compute_bb_for_insn (max_uid);
425 /* Discover the edges of our cfg. */
427 record_active_eh_regions (f);
428 make_edges (label_value_list);
430 /* Delete unreachable blocks, then merge blocks when possible. */
432 if (do_cleanup)
434 delete_unreachable_blocks ();
435 move_stray_eh_region_notes ();
436 record_active_eh_regions (f);
437 try_merge_blocks ();
440 /* Mark critical edges. */
442 mark_critical_edges ();
444 /* Discover the loop depth at the start of each basic block to aid
445 register allocation. */
446 calculate_loop_depth (f);
448 /* Kill the data we won't maintain. */
449 label_value_list = NULL_RTX;
451 #ifdef ENABLE_CHECKING
452 verify_flow_info ();
453 #endif
456 /* Count the basic blocks of the function. */
458 static int
459 count_basic_blocks (f)
460 rtx f;
462 register rtx insn;
463 register RTX_CODE prev_code;
464 register int count = 0;
465 int eh_region = 0;
466 int call_had_abnormal_edge = 0;
467 rtx prev_call = NULL_RTX;
469 prev_code = JUMP_INSN;
470 for (insn = f; insn; insn = NEXT_INSN (insn))
472 register RTX_CODE code = GET_CODE (insn);
474 if (code == CODE_LABEL
475 || (GET_RTX_CLASS (code) == 'i'
476 && (prev_code == JUMP_INSN
477 || prev_code == BARRIER
478 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
480 count++;
482 /* If the previous insn was a call that did not create an
483 abnormal edge, we want to add a nop so that the CALL_INSN
484 itself is not at basic_block_end. This allows us to
485 easily distinguish between normal calls and those which
486 create abnormal edges in the flow graph. */
488 if (count > 0 && prev_call != 0 && !call_had_abnormal_edge)
490 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
491 emit_insn_after (nop, prev_call);
495 /* Record whether this call created an edge. */
496 if (code == CALL_INSN)
498 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
499 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
500 prev_call = insn;
501 call_had_abnormal_edge = 0;
503 /* If there is a specified EH region, we have an edge. */
504 if (eh_region && region > 0)
505 call_had_abnormal_edge = 1;
506 else
508 /* If there is a nonlocal goto label and the specified
509 region number isn't -1, we have an edge. (0 means
510 no throw, but might have a nonlocal goto). */
511 if (nonlocal_goto_handler_labels && region >= 0)
512 call_had_abnormal_edge = 1;
515 else if (code != NOTE)
516 prev_call = NULL_RTX;
518 if (code != NOTE)
519 prev_code = code;
520 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
521 ++eh_region;
522 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
523 --eh_region;
527 /* The rest of the compiler works a bit smoother when we don't have to
528 check for the edge case of do-nothing functions with no basic blocks. */
529 if (count == 0)
531 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
532 count = 1;
535 return count;
538 /* Find all basic blocks of the function whose first insn is F.
540 Collect and return a list of labels whose addresses are taken. This
541 will be used in make_edges for use with computed gotos. */
543 static rtx
544 find_basic_blocks_1 (f)
545 rtx f;
547 register rtx insn, next;
548 int call_has_abnormal_edge = 0;
549 int i = 0;
550 rtx bb_note = NULL_RTX;
551 rtx eh_list = NULL_RTX;
552 rtx label_value_list = NULL_RTX;
553 rtx head = NULL_RTX;
554 rtx end = NULL_RTX;
556 /* We process the instructions in a slightly different way than we did
557 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
558 closed out the previous block, so that it gets attached at the proper
559 place. Since this form should be equivalent to the previous,
560 count_basic_blocks continues to use the old form as a check. */
562 for (insn = f; insn; insn = next)
564 enum rtx_code code = GET_CODE (insn);
566 next = NEXT_INSN (insn);
568 if (code == CALL_INSN)
570 /* Record whether this call created an edge. */
571 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
572 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
573 call_has_abnormal_edge = 0;
575 /* If there is an EH region, we have an edge. */
576 if (eh_list && region > 0)
577 call_has_abnormal_edge = 1;
578 else
580 /* If there is a nonlocal goto label and the specified
581 region number isn't -1, we have an edge. (0 means
582 no throw, but might have a nonlocal goto). */
583 if (nonlocal_goto_handler_labels && region >= 0)
584 call_has_abnormal_edge = 1;
588 switch (code)
590 case NOTE:
592 int kind = NOTE_LINE_NUMBER (insn);
594 /* Keep a LIFO list of the currently active exception notes. */
595 if (kind == NOTE_INSN_EH_REGION_BEG)
596 eh_list = alloc_INSN_LIST (insn, eh_list);
597 else if (kind == NOTE_INSN_EH_REGION_END)
599 rtx t = eh_list;
600 eh_list = XEXP (eh_list, 1);
601 free_INSN_LIST_node (t);
604 /* Look for basic block notes with which to keep the
605 basic_block_info pointers stable. Unthread the note now;
606 we'll put it back at the right place in create_basic_block.
607 Or not at all if we've already found a note in this block. */
608 else if (kind == NOTE_INSN_BASIC_BLOCK)
610 if (bb_note == NULL_RTX)
611 bb_note = insn;
612 next = flow_delete_insn (insn);
615 break;
618 case CODE_LABEL:
619 /* A basic block starts at a label. If we've closed one off due
620 to a barrier or some such, no need to do it again. */
621 if (head != NULL_RTX)
623 /* While we now have edge lists with which other portions of
624 the compiler might determine a call ending a basic block
625 does not imply an abnormal edge, it will be a bit before
626 everything can be updated. So continue to emit a noop at
627 the end of such a block. */
628 if (GET_CODE (end) == CALL_INSN)
630 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
631 end = emit_insn_after (nop, end);
634 create_basic_block (i++, head, end, bb_note);
635 bb_note = NULL_RTX;
637 head = end = insn;
638 break;
640 case JUMP_INSN:
641 /* A basic block ends at a jump. */
642 if (head == NULL_RTX)
643 head = insn;
644 else
646 /* ??? Make a special check for table jumps. The way this
647 happens is truly and amazingly gross. We are about to
648 create a basic block that contains just a code label and
649 an addr*vec jump insn. Worse, an addr_diff_vec creates
650 its own natural loop.
652 Prevent this bit of brain damage, pasting things together
653 correctly in make_edges.
655 The correct solution involves emitting the table directly
656 on the tablejump instruction as a note, or JUMP_LABEL. */
658 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
659 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
661 head = end = NULL;
662 n_basic_blocks--;
663 break;
666 end = insn;
667 goto new_bb_inclusive;
669 case BARRIER:
670 /* A basic block ends at a barrier. It may be that an unconditional
671 jump already closed the basic block -- no need to do it again. */
672 if (head == NULL_RTX)
673 break;
675 /* While we now have edge lists with which other portions of the
676 compiler might determine a call ending a basic block does not
677 imply an abnormal edge, it will be a bit before everything can
678 be updated. So continue to emit a noop at the end of such a
679 block. */
680 if (GET_CODE (end) == CALL_INSN)
682 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
683 end = emit_insn_after (nop, end);
685 goto new_bb_exclusive;
687 case CALL_INSN:
688 /* A basic block ends at a call that can either throw or
689 do a non-local goto. */
690 if (call_has_abnormal_edge)
692 new_bb_inclusive:
693 if (head == NULL_RTX)
694 head = insn;
695 end = insn;
697 new_bb_exclusive:
698 create_basic_block (i++, head, end, bb_note);
699 head = end = NULL_RTX;
700 bb_note = NULL_RTX;
701 break;
703 /* FALLTHRU */
705 default:
706 if (GET_RTX_CLASS (code) == 'i')
708 if (head == NULL_RTX)
709 head = insn;
710 end = insn;
712 break;
715 if (GET_RTX_CLASS (code) == 'i')
717 rtx note;
719 /* Make a list of all labels referred to other than by jumps
720 (which just don't have the REG_LABEL notes).
722 Make a special exception for labels followed by an ADDR*VEC,
723 as this would be a part of the tablejump setup code.
725 Make a special exception for the eh_return_stub_label, which
726 we know isn't part of any otherwise visible control flow. */
728 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
729 if (REG_NOTE_KIND (note) == REG_LABEL)
731 rtx lab = XEXP (note, 0), next;
733 if (lab == eh_return_stub_label)
735 else if ((next = next_nonnote_insn (lab)) != NULL
736 && GET_CODE (next) == JUMP_INSN
737 && (GET_CODE (PATTERN (next)) == ADDR_VEC
738 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
740 else
741 label_value_list
742 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
747 if (head != NULL_RTX)
748 create_basic_block (i++, head, end, bb_note);
750 if (i != n_basic_blocks)
751 abort ();
753 return label_value_list;
756 /* Create a new basic block consisting of the instructions between
757 HEAD and END inclusive. Reuses the note and basic block struct
758 in BB_NOTE, if any. */
760 static void
761 create_basic_block (index, head, end, bb_note)
762 int index;
763 rtx head, end, bb_note;
765 basic_block bb;
767 if (bb_note
768 && ! RTX_INTEGRATED_P (bb_note)
769 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
770 && bb->aux == NULL)
772 /* If we found an existing note, thread it back onto the chain. */
774 if (GET_CODE (head) == CODE_LABEL)
775 add_insn_after (bb_note, head);
776 else
778 add_insn_before (bb_note, head);
779 head = bb_note;
782 else
784 /* Otherwise we must create a note and a basic block structure.
785 Since we allow basic block structs in rtl, give the struct
786 the same lifetime by allocating it off the function obstack
787 rather than using malloc. */
789 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
790 memset (bb, 0, sizeof (*bb));
792 if (GET_CODE (head) == CODE_LABEL)
793 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
794 else
796 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
797 head = bb_note;
799 NOTE_BASIC_BLOCK (bb_note) = bb;
802 /* Always include the bb note in the block. */
803 if (NEXT_INSN (end) == bb_note)
804 end = bb_note;
806 bb->head = head;
807 bb->end = end;
808 bb->index = index;
809 BASIC_BLOCK (index) = bb;
811 /* Tag the block so that we know it has been used when considering
812 other basic block notes. */
813 bb->aux = bb;
816 /* Records the basic block struct in BB_FOR_INSN, for every instruction
817 indexed by INSN_UID. MAX is the size of the array. */
819 void
820 compute_bb_for_insn (max)
821 int max;
823 int i;
825 if (basic_block_for_insn)
826 VARRAY_FREE (basic_block_for_insn);
827 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
829 for (i = 0; i < n_basic_blocks; ++i)
831 basic_block bb = BASIC_BLOCK (i);
832 rtx insn, end;
834 end = bb->end;
835 insn = bb->head;
836 while (1)
838 int uid = INSN_UID (insn);
839 if (uid < max)
840 VARRAY_BB (basic_block_for_insn, uid) = bb;
841 if (insn == end)
842 break;
843 insn = NEXT_INSN (insn);
848 /* Free the memory associated with the edge structures. */
850 static void
851 clear_edges ()
853 int i;
854 edge n, e;
856 for (i = 0; i < n_basic_blocks; ++i)
858 basic_block bb = BASIC_BLOCK (i);
860 for (e = bb->succ; e ; e = n)
862 n = e->succ_next;
863 free (e);
866 bb->succ = 0;
867 bb->pred = 0;
870 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
872 n = e->succ_next;
873 free (e);
876 ENTRY_BLOCK_PTR->succ = 0;
877 EXIT_BLOCK_PTR->pred = 0;
879 n_edges = 0;
882 /* Identify the edges between basic blocks.
884 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
885 that are otherwise unreachable may be reachable with a non-local goto.
887 BB_EH_END is an array indexed by basic block number in which we record
888 the list of exception regions active at the end of the basic block. */
890 static void
891 make_edges (label_value_list)
892 rtx label_value_list;
894 int i;
895 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
896 sbitmap *edge_cache = NULL;
898 /* Assume no computed jump; revise as we create edges. */
899 current_function_has_computed_jump = 0;
901 /* Heavy use of computed goto in machine-generated code can lead to
902 nearly fully-connected CFGs. In that case we spend a significant
903 amount of time searching the edge lists for duplicates. */
904 if (forced_labels || label_value_list)
906 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
907 sbitmap_vector_zero (edge_cache, n_basic_blocks);
910 /* By nature of the way these get numbered, block 0 is always the entry. */
911 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
913 for (i = 0; i < n_basic_blocks; ++i)
915 basic_block bb = BASIC_BLOCK (i);
916 rtx insn, x;
917 enum rtx_code code;
918 int force_fallthru = 0;
920 /* Examine the last instruction of the block, and discover the
921 ways we can leave the block. */
923 insn = bb->end;
924 code = GET_CODE (insn);
926 /* A branch. */
927 if (code == JUMP_INSN)
929 rtx tmp;
931 /* ??? Recognize a tablejump and do the right thing. */
932 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
933 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
934 && GET_CODE (tmp) == JUMP_INSN
935 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
936 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
938 rtvec vec;
939 int j;
941 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
942 vec = XVEC (PATTERN (tmp), 0);
943 else
944 vec = XVEC (PATTERN (tmp), 1);
946 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
947 make_label_edge (edge_cache, bb,
948 XEXP (RTVEC_ELT (vec, j), 0), 0);
950 /* Some targets (eg, ARM) emit a conditional jump that also
951 contains the out-of-range target. Scan for these and
952 add an edge if necessary. */
953 if ((tmp = single_set (insn)) != NULL
954 && SET_DEST (tmp) == pc_rtx
955 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
956 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
957 make_label_edge (edge_cache, bb,
958 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
960 #ifdef CASE_DROPS_THROUGH
961 /* Silly VAXen. The ADDR_VEC is going to be in the way of
962 us naturally detecting fallthru into the next block. */
963 force_fallthru = 1;
964 #endif
967 /* If this is a computed jump, then mark it as reaching
968 everything on the label_value_list and forced_labels list. */
969 else if (computed_jump_p (insn))
971 current_function_has_computed_jump = 1;
973 for (x = label_value_list; x; x = XEXP (x, 1))
974 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
976 for (x = forced_labels; x; x = XEXP (x, 1))
977 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
980 /* Returns create an exit out. */
981 else if (returnjump_p (insn))
982 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
984 /* Otherwise, we have a plain conditional or unconditional jump. */
985 else
987 if (! JUMP_LABEL (insn))
988 abort ();
989 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
993 /* If this is a CALL_INSN, then mark it as reaching the active EH
994 handler for this CALL_INSN. If we're handling asynchronous
995 exceptions then any insn can reach any of the active handlers.
997 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
999 if (code == CALL_INSN || asynchronous_exceptions)
1001 /* If there's an EH region active at the end of a block,
1002 add the appropriate edges. */
1003 if (bb->eh_end >= 0)
1004 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1006 /* If we have asynchronous exceptions, do the same for *all*
1007 exception regions active in the block. */
1008 if (asynchronous_exceptions
1009 && bb->eh_beg != bb->eh_end)
1011 if (bb->eh_beg >= 0)
1012 make_eh_edge (edge_cache, eh_nest_info, bb,
1013 NULL_RTX, bb->eh_beg);
1015 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1016 if (GET_CODE (x) == NOTE
1017 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1018 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1020 int region = NOTE_EH_HANDLER (x);
1021 make_eh_edge (edge_cache, eh_nest_info, bb,
1022 NULL_RTX, region);
1026 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1028 /* ??? This could be made smarter: in some cases it's possible
1029 to tell that certain calls will not do a nonlocal goto.
1031 For example, if the nested functions that do the nonlocal
1032 gotos do not have their addresses taken, then only calls to
1033 those functions or to other nested functions that use them
1034 could possibly do nonlocal gotos. */
1035 /* We do know that a REG_EH_REGION note with a value less
1036 than 0 is guaranteed not to perform a non-local goto. */
1037 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1038 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1039 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1040 make_label_edge (edge_cache, bb, XEXP (x, 0),
1041 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1045 /* We know something about the structure of the function __throw in
1046 libgcc2.c. It is the only function that ever contains eh_stub
1047 labels. It modifies its return address so that the last block
1048 returns to one of the eh_stub labels within it. So we have to
1049 make additional edges in the flow graph. */
1050 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1051 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1053 /* Find out if we can drop through to the next block. */
1054 insn = next_nonnote_insn (insn);
1055 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1056 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1057 else if (i + 1 < n_basic_blocks)
1059 rtx tmp = BLOCK_HEAD (i + 1);
1060 if (GET_CODE (tmp) == NOTE)
1061 tmp = next_nonnote_insn (tmp);
1062 if (force_fallthru || insn == tmp)
1063 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1067 free_eh_nesting_info (eh_nest_info);
1068 if (edge_cache)
1069 sbitmap_vector_free (edge_cache);
1072 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1073 about the edge that is accumulated between calls. */
1075 static void
1076 make_edge (edge_cache, src, dst, flags)
1077 sbitmap *edge_cache;
1078 basic_block src, dst;
1079 int flags;
1081 int use_edge_cache;
1082 edge e;
1084 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1085 many edges to them, and we didn't allocate memory for it. */
1086 use_edge_cache = (edge_cache
1087 && src != ENTRY_BLOCK_PTR
1088 && dst != EXIT_BLOCK_PTR);
1090 /* Make sure we don't add duplicate edges. */
1091 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1092 for (e = src->succ; e ; e = e->succ_next)
1093 if (e->dest == dst)
1095 e->flags |= flags;
1096 return;
1099 e = (edge) xcalloc (1, sizeof (*e));
1100 n_edges++;
1102 e->succ_next = src->succ;
1103 e->pred_next = dst->pred;
1104 e->src = src;
1105 e->dest = dst;
1106 e->flags = flags;
1108 src->succ = e;
1109 dst->pred = e;
1111 if (use_edge_cache)
1112 SET_BIT (edge_cache[src->index], dst->index);
1115 /* Create an edge from a basic block to a label. */
1117 static void
1118 make_label_edge (edge_cache, src, label, flags)
1119 sbitmap *edge_cache;
1120 basic_block src;
1121 rtx label;
1122 int flags;
1124 if (GET_CODE (label) != CODE_LABEL)
1125 abort ();
1127 /* If the label was never emitted, this insn is junk, but avoid a
1128 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1129 as a result of a syntax error and a diagnostic has already been
1130 printed. */
1132 if (INSN_UID (label) == 0)
1133 return;
1135 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1138 /* Create the edges generated by INSN in REGION. */
1140 static void
1141 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1142 sbitmap *edge_cache;
1143 eh_nesting_info *eh_nest_info;
1144 basic_block src;
1145 rtx insn;
1146 int region;
1148 handler_info **handler_list;
1149 int num, is_call;
1151 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1152 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1153 while (--num >= 0)
1155 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1156 EDGE_ABNORMAL | EDGE_EH | is_call);
1160 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1161 dangerous if we intend to move basic blocks around. Move such notes
1162 into the following block. */
1164 static void
1165 move_stray_eh_region_notes ()
1167 int i;
1168 basic_block b1, b2;
1170 if (n_basic_blocks < 2)
1171 return;
1173 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1174 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1176 rtx insn, next, list = NULL_RTX;
1178 b1 = BASIC_BLOCK (i);
1179 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1181 next = NEXT_INSN (insn);
1182 if (GET_CODE (insn) == NOTE
1183 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1184 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1186 /* Unlink from the insn chain. */
1187 NEXT_INSN (PREV_INSN (insn)) = next;
1188 PREV_INSN (next) = PREV_INSN (insn);
1190 /* Queue it. */
1191 NEXT_INSN (insn) = list;
1192 list = insn;
1196 if (list == NULL_RTX)
1197 continue;
1199 /* Find where to insert these things. */
1200 insn = b2->head;
1201 if (GET_CODE (insn) == CODE_LABEL)
1202 insn = NEXT_INSN (insn);
1204 while (list)
1206 next = NEXT_INSN (list);
1207 add_insn_after (list, insn);
1208 list = next;
1213 /* Recompute eh_beg/eh_end for each basic block. */
1215 static void
1216 record_active_eh_regions (f)
1217 rtx f;
1219 rtx insn, eh_list = NULL_RTX;
1220 int i = 0;
1221 basic_block bb = BASIC_BLOCK (0);
1223 for (insn = f; insn ; insn = NEXT_INSN (insn))
1225 if (bb->head == insn)
1226 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1228 if (GET_CODE (insn) == NOTE)
1230 int kind = NOTE_LINE_NUMBER (insn);
1231 if (kind == NOTE_INSN_EH_REGION_BEG)
1232 eh_list = alloc_INSN_LIST (insn, eh_list);
1233 else if (kind == NOTE_INSN_EH_REGION_END)
1235 rtx t = XEXP (eh_list, 1);
1236 free_INSN_LIST_node (eh_list);
1237 eh_list = t;
1241 if (bb->end == insn)
1243 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1244 i += 1;
1245 if (i == n_basic_blocks)
1246 break;
1247 bb = BASIC_BLOCK (i);
1252 /* Identify critical edges and set the bits appropriately. */
1254 static void
1255 mark_critical_edges ()
1257 int i, n = n_basic_blocks;
1258 basic_block bb;
1260 /* We begin with the entry block. This is not terribly important now,
1261 but could be if a front end (Fortran) implemented alternate entry
1262 points. */
1263 bb = ENTRY_BLOCK_PTR;
1264 i = -1;
1266 while (1)
1268 edge e;
1270 /* (1) Critical edges must have a source with multiple successors. */
1271 if (bb->succ && bb->succ->succ_next)
1273 for (e = bb->succ; e ; e = e->succ_next)
1275 /* (2) Critical edges must have a destination with multiple
1276 predecessors. Note that we know there is at least one
1277 predecessor -- the edge we followed to get here. */
1278 if (e->dest->pred->pred_next)
1279 e->flags |= EDGE_CRITICAL;
1280 else
1281 e->flags &= ~EDGE_CRITICAL;
1284 else
1286 for (e = bb->succ; e ; e = e->succ_next)
1287 e->flags &= ~EDGE_CRITICAL;
1290 if (++i >= n)
1291 break;
1292 bb = BASIC_BLOCK (i);
1296 /* Split a (typically critical) edge. Return the new block.
1297 Abort on abnormal edges.
1299 ??? The code generally expects to be called on critical edges.
1300 The case of a block ending in an unconditional jump to a
1301 block with multiple predecessors is not handled optimally. */
1303 basic_block
1304 split_edge (edge_in)
1305 edge edge_in;
1307 basic_block old_pred, bb, old_succ;
1308 edge edge_out;
1309 rtx bb_note;
1310 int i, j;
1312 /* Abnormal edges cannot be split. */
1313 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1314 abort ();
1316 old_pred = edge_in->src;
1317 old_succ = edge_in->dest;
1319 /* Remove the existing edge from the destination's pred list. */
1321 edge *pp;
1322 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1323 continue;
1324 *pp = edge_in->pred_next;
1325 edge_in->pred_next = NULL;
1328 /* Create the new structures. */
1329 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1330 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1331 n_edges++;
1333 memset (bb, 0, sizeof (*bb));
1334 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1335 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1337 /* ??? This info is likely going to be out of date very soon. */
1338 if (old_succ->global_live_at_start)
1340 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1341 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1343 else
1345 CLEAR_REG_SET (bb->global_live_at_start);
1346 CLEAR_REG_SET (bb->global_live_at_end);
1349 /* Wire them up. */
1350 bb->pred = edge_in;
1351 bb->succ = edge_out;
1353 edge_in->dest = bb;
1354 edge_in->flags &= ~EDGE_CRITICAL;
1356 edge_out->pred_next = old_succ->pred;
1357 edge_out->succ_next = NULL;
1358 edge_out->src = bb;
1359 edge_out->dest = old_succ;
1360 edge_out->flags = EDGE_FALLTHRU;
1361 edge_out->probability = REG_BR_PROB_BASE;
1363 old_succ->pred = edge_out;
1365 /* Tricky case -- if there existed a fallthru into the successor
1366 (and we're not it) we must add a new unconditional jump around
1367 the new block we're actually interested in.
1369 Further, if that edge is critical, this means a second new basic
1370 block must be created to hold it. In order to simplify correct
1371 insn placement, do this before we touch the existing basic block
1372 ordering for the block we were really wanting. */
1373 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1375 edge e;
1376 for (e = edge_out->pred_next; e ; e = e->pred_next)
1377 if (e->flags & EDGE_FALLTHRU)
1378 break;
1380 if (e)
1382 basic_block jump_block;
1383 rtx pos;
1385 if ((e->flags & EDGE_CRITICAL) == 0)
1387 /* Non critical -- we can simply add a jump to the end
1388 of the existing predecessor. */
1389 jump_block = e->src;
1391 else
1393 /* We need a new block to hold the jump. The simplest
1394 way to do the bulk of the work here is to recursively
1395 call ourselves. */
1396 jump_block = split_edge (e);
1397 e = jump_block->succ;
1400 /* Now add the jump insn ... */
1401 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1402 jump_block->end);
1403 jump_block->end = pos;
1404 emit_barrier_after (pos);
1406 /* ... let jump know that label is in use, ... */
1407 JUMP_LABEL (pos) = old_succ->head;
1408 ++LABEL_NUSES (old_succ->head);
1410 /* ... and clear fallthru on the outgoing edge. */
1411 e->flags &= ~EDGE_FALLTHRU;
1413 /* Continue splitting the interesting edge. */
1417 /* Place the new block just in front of the successor. */
1418 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1419 if (old_succ == EXIT_BLOCK_PTR)
1420 j = n_basic_blocks - 1;
1421 else
1422 j = old_succ->index;
1423 for (i = n_basic_blocks - 1; i > j; --i)
1425 basic_block tmp = BASIC_BLOCK (i - 1);
1426 BASIC_BLOCK (i) = tmp;
1427 tmp->index = i;
1429 BASIC_BLOCK (i) = bb;
1430 bb->index = i;
1432 /* Create the basic block note. */
1433 if (old_succ != EXIT_BLOCK_PTR)
1434 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1435 else
1436 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1437 NOTE_BASIC_BLOCK (bb_note) = bb;
1438 bb->head = bb->end = bb_note;
1440 /* Not quite simple -- for non-fallthru edges, we must adjust the
1441 predecessor's jump instruction to target our new block. */
1442 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1444 rtx tmp, insn = old_pred->end;
1445 rtx old_label = old_succ->head;
1446 rtx new_label = gen_label_rtx ();
1448 if (GET_CODE (insn) != JUMP_INSN)
1449 abort ();
1451 /* ??? Recognize a tablejump and adjust all matching cases. */
1452 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1453 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1454 && GET_CODE (tmp) == JUMP_INSN
1455 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1456 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1458 rtvec vec;
1459 int j;
1461 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1462 vec = XVEC (PATTERN (tmp), 0);
1463 else
1464 vec = XVEC (PATTERN (tmp), 1);
1466 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1467 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1469 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1470 --LABEL_NUSES (old_label);
1471 ++LABEL_NUSES (new_label);
1474 /* Handle casesi dispatch insns */
1475 if ((tmp = single_set (insn)) != NULL
1476 && SET_DEST (tmp) == pc_rtx
1477 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1478 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1479 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1481 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1482 new_label);
1483 --LABEL_NUSES (old_label);
1484 ++LABEL_NUSES (new_label);
1487 else
1489 /* This would have indicated an abnormal edge. */
1490 if (computed_jump_p (insn))
1491 abort ();
1493 /* A return instruction can't be redirected. */
1494 if (returnjump_p (insn))
1495 abort ();
1497 /* If the insn doesn't go where we think, we're confused. */
1498 if (JUMP_LABEL (insn) != old_label)
1499 abort ();
1501 redirect_jump (insn, new_label);
1504 emit_label_before (new_label, bb_note);
1505 bb->head = new_label;
1508 return bb;
1511 /* Queue instructions for insertion on an edge between two basic blocks.
1512 The new instructions and basic blocks (if any) will not appear in the
1513 CFG until commit_edge_insertions is called. */
1515 void
1516 insert_insn_on_edge (pattern, e)
1517 rtx pattern;
1518 edge e;
1520 /* We cannot insert instructions on an abnormal critical edge.
1521 It will be easier to find the culprit if we die now. */
1522 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1523 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1524 abort ();
1526 if (e->insns == NULL_RTX)
1527 start_sequence ();
1528 else
1529 push_to_sequence (e->insns);
1531 emit_insn (pattern);
1533 e->insns = get_insns ();
1534 end_sequence();
1537 /* Update the CFG for the instructions queued on edge E. */
1539 static void
1540 commit_one_edge_insertion (e)
1541 edge e;
1543 rtx before = NULL_RTX, after = NULL_RTX, tmp;
1544 basic_block bb;
1546 /* Figure out where to put these things. If the destination has
1547 one predecessor, insert there. Except for the exit block. */
1548 if (e->dest->pred->pred_next == NULL
1549 && e->dest != EXIT_BLOCK_PTR)
1551 bb = e->dest;
1553 /* Get the location correct wrt a code label, and "nice" wrt
1554 a basic block note, and before everything else. */
1555 tmp = bb->head;
1556 if (GET_CODE (tmp) == CODE_LABEL)
1557 tmp = NEXT_INSN (tmp);
1558 if (GET_CODE (tmp) == NOTE
1559 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1560 tmp = NEXT_INSN (tmp);
1561 if (tmp == bb->head)
1562 before = tmp;
1563 else
1564 after = PREV_INSN (tmp);
1567 /* If the source has one successor and the edge is not abnormal,
1568 insert there. Except for the entry block. */
1569 else if ((e->flags & EDGE_ABNORMAL) == 0
1570 && e->src->succ->succ_next == NULL
1571 && e->src != ENTRY_BLOCK_PTR)
1573 bb = e->src;
1574 if (GET_CODE (bb->end) == JUMP_INSN)
1576 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1577 a jump with delay slots already filled? */
1578 if (! simplejump_p (bb->end))
1579 abort ();
1581 before = bb->end;
1583 else
1585 /* We'd better be fallthru, or we've lost track of what's what. */
1586 if ((e->flags & EDGE_FALLTHRU) == 0)
1587 abort ();
1589 after = bb->end;
1593 /* Otherwise we must split the edge. */
1594 else
1596 bb = split_edge (e);
1597 after = bb->end;
1600 /* Now that we've found the spot, do the insertion. */
1601 tmp = e->insns;
1602 e->insns = NULL_RTX;
1604 /* Set the new block number for these insns, if structure is allocated. */
1605 if (basic_block_for_insn)
1607 rtx i;
1608 for (i = tmp; i != NULL_RTX; i = NEXT_INSN (i))
1609 set_block_for_insn (i, bb);
1612 if (before)
1614 emit_insns_before (tmp, before);
1615 if (before == bb->head)
1616 bb->head = tmp;
1618 else
1620 tmp = emit_insns_after (tmp, after);
1621 if (after == bb->end)
1622 bb->end = tmp;
1626 /* Update the CFG for all queued instructions. */
1628 void
1629 commit_edge_insertions ()
1631 int i;
1632 basic_block bb;
1634 i = -1;
1635 bb = ENTRY_BLOCK_PTR;
1636 while (1)
1638 edge e, next;
1640 for (e = bb->succ; e ; e = next)
1642 next = e->succ_next;
1643 if (e->insns)
1644 commit_one_edge_insertion (e);
1647 if (++i >= n_basic_blocks)
1648 break;
1649 bb = BASIC_BLOCK (i);
1653 /* Delete all unreachable basic blocks. */
1655 static void
1656 delete_unreachable_blocks ()
1658 basic_block *worklist, *tos;
1659 int deleted_handler;
1660 edge e;
1661 int i, n;
1663 n = n_basic_blocks;
1664 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1666 /* Use basic_block->aux as a marker. Clear them all. */
1668 for (i = 0; i < n; ++i)
1669 BASIC_BLOCK (i)->aux = NULL;
1671 /* Add our starting points to the worklist. Almost always there will
1672 be only one. It isn't inconcievable that we might one day directly
1673 support Fortran alternate entry points. */
1675 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1677 *tos++ = e->dest;
1679 /* Mark the block with a handy non-null value. */
1680 e->dest->aux = e;
1683 /* Iterate: find everything reachable from what we've already seen. */
1685 while (tos != worklist)
1687 basic_block b = *--tos;
1689 for (e = b->succ; e ; e = e->succ_next)
1690 if (!e->dest->aux)
1692 *tos++ = e->dest;
1693 e->dest->aux = e;
1697 /* Delete all unreachable basic blocks. Count down so that we don't
1698 interfere with the block renumbering that happens in delete_block. */
1700 deleted_handler = 0;
1702 for (i = n - 1; i >= 0; --i)
1704 basic_block b = BASIC_BLOCK (i);
1706 if (b->aux != NULL)
1707 /* This block was found. Tidy up the mark. */
1708 b->aux = NULL;
1709 else
1710 deleted_handler |= delete_block (b);
1713 /* Fix up edges that now fall through, or rather should now fall through
1714 but previously required a jump around now deleted blocks. Simplify
1715 the search by only examining blocks numerically adjacent, since this
1716 is how find_basic_blocks created them. */
1718 for (i = 1; i < n_basic_blocks; ++i)
1720 basic_block b = BASIC_BLOCK (i - 1);
1721 basic_block c = BASIC_BLOCK (i);
1722 edge s;
1724 /* We care about simple conditional or unconditional jumps with
1725 a single successor.
1727 If we had a conditional branch to the next instruction when
1728 find_basic_blocks was called, then there will only be one
1729 out edge for the block which ended with the conditional
1730 branch (since we do not create duplicate edges).
1732 Furthermore, the edge will be marked as a fallthru because we
1733 merge the flags for the duplicate edges. So we do not want to
1734 check that the edge is not a FALLTHRU edge. */
1735 if ((s = b->succ) != NULL
1736 && s->succ_next == NULL
1737 && s->dest == c
1738 /* If the jump insn has side effects, we can't tidy the edge. */
1739 && (GET_CODE (b->end) != JUMP_INSN
1740 || onlyjump_p (b->end)))
1741 tidy_fallthru_edge (s, b, c);
1744 /* If we deleted an exception handler, we may have EH region begin/end
1745 blocks to remove as well. */
1746 if (deleted_handler)
1747 delete_eh_regions ();
1749 free (worklist);
1752 /* Find EH regions for which there is no longer a handler, and delete them. */
1754 static void
1755 delete_eh_regions ()
1757 rtx insn;
1759 update_rethrow_references ();
1761 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1762 if (GET_CODE (insn) == NOTE)
1764 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1765 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1767 int num = NOTE_EH_HANDLER (insn);
1768 /* A NULL handler indicates a region is no longer needed,
1769 as long as it isn't the target of a rethrow. */
1770 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1772 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1773 NOTE_SOURCE_FILE (insn) = 0;
1779 /* Return true if NOTE is not one of the ones that must be kept paired,
1780 so that we may simply delete them. */
1782 static int
1783 can_delete_note_p (note)
1784 rtx note;
1786 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1787 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1790 /* Unlink a chain of insns between START and FINISH, leaving notes
1791 that must be paired. */
1793 void
1794 flow_delete_insn_chain (start, finish)
1795 rtx start, finish;
1797 /* Unchain the insns one by one. It would be quicker to delete all
1798 of these with a single unchaining, rather than one at a time, but
1799 we need to keep the NOTE's. */
1801 rtx next;
1803 while (1)
1805 next = NEXT_INSN (start);
1806 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1808 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1810 else
1811 next = flow_delete_insn (start);
1813 if (start == finish)
1814 break;
1815 start = next;
1819 /* Delete the insns in a (non-live) block. We physically delete every
1820 non-deleted-note insn, and update the flow graph appropriately.
1822 Return nonzero if we deleted an exception handler. */
1824 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1825 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1827 static int
1828 delete_block (b)
1829 basic_block b;
1831 int deleted_handler = 0;
1832 rtx insn, end;
1834 /* If the head of this block is a CODE_LABEL, then it might be the
1835 label for an exception handler which can't be reached.
1837 We need to remove the label from the exception_handler_label list
1838 and remove the associated NOTE_INSN_EH_REGION_BEG and
1839 NOTE_INSN_EH_REGION_END notes. */
1841 insn = b->head;
1843 never_reached_warning (insn);
1845 if (GET_CODE (insn) == CODE_LABEL)
1847 rtx x, *prev = &exception_handler_labels;
1849 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1851 if (XEXP (x, 0) == insn)
1853 /* Found a match, splice this label out of the EH label list. */
1854 *prev = XEXP (x, 1);
1855 XEXP (x, 1) = NULL_RTX;
1856 XEXP (x, 0) = NULL_RTX;
1858 /* Remove the handler from all regions */
1859 remove_handler (insn);
1860 deleted_handler = 1;
1861 break;
1863 prev = &XEXP (x, 1);
1866 /* This label may be referenced by code solely for its value, or
1867 referenced by static data, or something. We have determined
1868 that it is not reachable, but cannot delete the label itself.
1869 Save code space and continue to delete the balance of the block,
1870 along with properly updating the cfg. */
1871 if (!can_delete_label_p (insn))
1873 /* If we've only got one of these, skip the whole deleting
1874 insns thing. */
1875 if (insn == b->end)
1876 goto no_delete_insns;
1877 insn = NEXT_INSN (insn);
1881 /* Selectively unlink the insn chain. Include any BARRIER that may
1882 follow the basic block. */
1883 end = next_nonnote_insn (b->end);
1884 if (!end || GET_CODE (end) != BARRIER)
1885 end = b->end;
1886 flow_delete_insn_chain (insn, end);
1888 no_delete_insns:
1890 /* Remove the edges into and out of this block. Note that there may
1891 indeed be edges in, if we are removing an unreachable loop. */
1893 edge e, next, *q;
1895 for (e = b->pred; e ; e = next)
1897 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1898 continue;
1899 *q = e->succ_next;
1900 next = e->pred_next;
1901 n_edges--;
1902 free (e);
1904 for (e = b->succ; e ; e = next)
1906 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1907 continue;
1908 *q = e->pred_next;
1909 next = e->succ_next;
1910 n_edges--;
1911 free (e);
1914 b->pred = NULL;
1915 b->succ = NULL;
1918 /* Remove the basic block from the array, and compact behind it. */
1919 expunge_block (b);
1921 return deleted_handler;
1924 /* Remove block B from the basic block array and compact behind it. */
1926 static void
1927 expunge_block (b)
1928 basic_block b;
1930 int i, n = n_basic_blocks;
1932 for (i = b->index; i + 1 < n; ++i)
1934 basic_block x = BASIC_BLOCK (i + 1);
1935 BASIC_BLOCK (i) = x;
1936 x->index = i;
1939 basic_block_info->num_elements--;
1940 n_basic_blocks--;
1943 /* Delete INSN by patching it out. Return the next insn. */
1945 static rtx
1946 flow_delete_insn (insn)
1947 rtx insn;
1949 rtx prev = PREV_INSN (insn);
1950 rtx next = NEXT_INSN (insn);
1952 PREV_INSN (insn) = NULL_RTX;
1953 NEXT_INSN (insn) = NULL_RTX;
1955 if (prev)
1956 NEXT_INSN (prev) = next;
1957 if (next)
1958 PREV_INSN (next) = prev;
1959 else
1960 set_last_insn (prev);
1962 if (GET_CODE (insn) == CODE_LABEL)
1963 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1965 /* If deleting a jump, decrement the use count of the label. Deleting
1966 the label itself should happen in the normal course of block merging. */
1967 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1968 LABEL_NUSES (JUMP_LABEL (insn))--;
1970 return next;
1973 /* True if a given label can be deleted. */
1975 static int
1976 can_delete_label_p (label)
1977 rtx label;
1979 rtx x;
1981 if (LABEL_PRESERVE_P (label))
1982 return 0;
1984 for (x = forced_labels; x ; x = XEXP (x, 1))
1985 if (label == XEXP (x, 0))
1986 return 0;
1987 for (x = label_value_list; x ; x = XEXP (x, 1))
1988 if (label == XEXP (x, 0))
1989 return 0;
1990 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1991 if (label == XEXP (x, 0))
1992 return 0;
1994 /* User declared labels must be preserved. */
1995 if (LABEL_NAME (label) != 0)
1996 return 0;
1998 return 1;
2001 /* Blocks A and B are to be merged into a single block. A has no incoming
2002 fallthru edge, so it can be moved before B without adding or modifying
2003 any jumps (aside from the jump from A to B). */
2005 static int
2006 merge_blocks_move_predecessor_nojumps (a, b)
2007 basic_block a, b;
2009 rtx start, end, barrier;
2010 int index;
2012 start = a->head;
2013 end = a->end;
2015 /* We want to delete the BARRIER after the end of the insns we are
2016 going to move. If we don't find a BARRIER, then do nothing. This
2017 can happen in some cases if we have labels we can not delete.
2019 Similarly, do nothing if we can not delete the label at the start
2020 of the target block. */
2021 barrier = next_nonnote_insn (end);
2022 if (GET_CODE (barrier) != BARRIER
2023 || (GET_CODE (b->head) == CODE_LABEL
2024 && ! can_delete_label_p (b->head)))
2025 return 0;
2026 else
2027 flow_delete_insn (barrier);
2029 /* Move block and loop notes out of the chain so that we do not
2030 disturb their order.
2032 ??? A better solution would be to squeeze out all the non-nested notes
2033 and adjust the block trees appropriately. Even better would be to have
2034 a tighter connection between block trees and rtl so that this is not
2035 necessary. */
2036 start = squeeze_notes (start, end);
2038 /* Scramble the insn chain. */
2039 if (end != PREV_INSN (b->head))
2040 reorder_insns (start, end, PREV_INSN (b->head));
2042 if (rtl_dump_file)
2044 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2045 a->index, b->index);
2048 /* Swap the records for the two blocks around. Although we are deleting B,
2049 A is now where B was and we want to compact the BB array from where
2050 A used to be. */
2051 BASIC_BLOCK(a->index) = b;
2052 BASIC_BLOCK(b->index) = a;
2053 index = a->index;
2054 a->index = b->index;
2055 b->index = index;
2057 /* Now blocks A and B are contiguous. Merge them. */
2058 merge_blocks_nomove (a, b);
2060 return 1;
2063 /* Blocks A and B are to be merged into a single block. B has no outgoing
2064 fallthru edge, so it can be moved after A without adding or modifying
2065 any jumps (aside from the jump from A to B). */
2067 static int
2068 merge_blocks_move_successor_nojumps (a, b)
2069 basic_block a, b;
2071 rtx start, end, barrier;
2073 start = b->head;
2074 end = b->end;
2076 /* We want to delete the BARRIER after the end of the insns we are
2077 going to move. If we don't find a BARRIER, then do nothing. This
2078 can happen in some cases if we have labels we can not delete.
2080 Similarly, do nothing if we can not delete the label at the start
2081 of the target block. */
2082 barrier = next_nonnote_insn (end);
2083 if (GET_CODE (barrier) != BARRIER
2084 || (GET_CODE (b->head) == CODE_LABEL
2085 && ! can_delete_label_p (b->head)))
2086 return 0;
2087 else
2088 flow_delete_insn (barrier);
2090 /* Move block and loop notes out of the chain so that we do not
2091 disturb their order.
2093 ??? A better solution would be to squeeze out all the non-nested notes
2094 and adjust the block trees appropriately. Even better would be to have
2095 a tighter connection between block trees and rtl so that this is not
2096 necessary. */
2097 start = squeeze_notes (start, end);
2099 /* Scramble the insn chain. */
2100 reorder_insns (start, end, a->end);
2102 /* Now blocks A and B are contiguous. Merge them. */
2103 merge_blocks_nomove (a, b);
2105 if (rtl_dump_file)
2107 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2108 b->index, a->index);
2111 return 1;
2114 /* Blocks A and B are to be merged into a single block. The insns
2115 are already contiguous, hence `nomove'. */
2117 static void
2118 merge_blocks_nomove (a, b)
2119 basic_block a, b;
2121 edge e;
2122 rtx b_head, b_end, a_end;
2123 int b_empty = 0;
2125 /* If there was a CODE_LABEL beginning B, delete it. */
2126 b_head = b->head;
2127 b_end = b->end;
2128 if (GET_CODE (b_head) == CODE_LABEL)
2130 /* Detect basic blocks with nothing but a label. This can happen
2131 in particular at the end of a function. */
2132 if (b_head == b_end)
2133 b_empty = 1;
2134 b_head = flow_delete_insn (b_head);
2137 /* Delete the basic block note. */
2138 if (GET_CODE (b_head) == NOTE
2139 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2141 if (b_head == b_end)
2142 b_empty = 1;
2143 b_head = flow_delete_insn (b_head);
2146 /* If there was a jump out of A, delete it. */
2147 a_end = a->end;
2148 if (GET_CODE (a_end) == JUMP_INSN)
2150 rtx prev;
2152 prev = prev_nonnote_insn (a_end);
2153 if (!prev)
2154 prev = a->head;
2156 #ifdef HAVE_cc0
2157 /* If this was a conditional jump, we need to also delete
2158 the insn that set cc0. */
2160 if (prev && sets_cc0_p (prev))
2162 rtx tmp = prev;
2163 prev = prev_nonnote_insn (prev);
2164 if (!prev)
2165 prev = a->head;
2166 flow_delete_insn (tmp);
2168 #endif
2170 /* Note that a->head != a->end, since we should have at least a
2171 bb note plus the jump, so prev != insn. */
2172 flow_delete_insn (a_end);
2173 a_end = prev;
2176 /* By definition, there should only be one successor of A, and that is
2177 B. Free that edge struct. */
2178 n_edges--;
2179 free (a->succ);
2181 /* Adjust the edges out of B for the new owner. */
2182 for (e = b->succ; e ; e = e->succ_next)
2183 e->src = a;
2184 a->succ = b->succ;
2186 /* Reassociate the insns of B with A. */
2187 if (!b_empty)
2189 BLOCK_FOR_INSN (b_head) = a;
2190 while (b_head != b_end)
2192 b_head = NEXT_INSN (b_head);
2193 BLOCK_FOR_INSN (b_head) = a;
2195 a_end = b_head;
2197 a->end = a_end;
2199 /* Compact the basic block array. */
2200 expunge_block (b);
2203 /* Attempt to merge basic blocks that are potentially non-adjacent.
2204 Return true iff the attempt succeeded. */
2206 static int
2207 merge_blocks (e, b, c)
2208 edge e;
2209 basic_block b, c;
2211 /* If B has a fallthru edge to C, no need to move anything. */
2212 if (e->flags & EDGE_FALLTHRU)
2214 /* If a label still appears somewhere and we cannot delete the label,
2215 then we cannot merge the blocks. The edge was tidied already. */
2217 rtx insn, stop = NEXT_INSN (c->head);
2218 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2219 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2220 return 0;
2222 merge_blocks_nomove (b, c);
2224 if (rtl_dump_file)
2226 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2227 b->index, c->index);
2230 return 1;
2232 else
2234 edge tmp_edge;
2235 basic_block d;
2236 int c_has_outgoing_fallthru;
2237 int b_has_incoming_fallthru;
2239 /* We must make sure to not munge nesting of exception regions,
2240 lexical blocks, and loop notes.
2242 The first is taken care of by requiring that the active eh
2243 region at the end of one block always matches the active eh
2244 region at the beginning of the next block.
2246 The later two are taken care of by squeezing out all the notes. */
2248 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2249 executed and we may want to treat blocks which have two out
2250 edges, one normal, one abnormal as only having one edge for
2251 block merging purposes. */
2253 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2254 if (tmp_edge->flags & EDGE_FALLTHRU)
2255 break;
2256 c_has_outgoing_fallthru = (tmp_edge != NULL);
2258 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2259 if (tmp_edge->flags & EDGE_FALLTHRU)
2260 break;
2261 b_has_incoming_fallthru = (tmp_edge != NULL);
2263 /* If B does not have an incoming fallthru, and the exception regions
2264 match, then it can be moved immediately before C without introducing
2265 or modifying jumps.
2267 C can not be the first block, so we do not have to worry about
2268 accessing a non-existent block. */
2269 d = BASIC_BLOCK (c->index - 1);
2270 if (! b_has_incoming_fallthru
2271 && d->eh_end == b->eh_beg
2272 && b->eh_end == c->eh_beg)
2273 return merge_blocks_move_predecessor_nojumps (b, c);
2275 /* Otherwise, we're going to try to move C after B. Make sure the
2276 exception regions match.
2278 If B is the last basic block, then we must not try to access the
2279 block structure for block B + 1. Luckily in that case we do not
2280 need to worry about matching exception regions. */
2281 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2282 if (b->eh_end == c->eh_beg
2283 && (d == NULL || c->eh_end == d->eh_beg))
2285 /* If C does not have an outgoing fallthru, then it can be moved
2286 immediately after B without introducing or modifying jumps. */
2287 if (! c_has_outgoing_fallthru)
2288 return merge_blocks_move_successor_nojumps (b, c);
2290 /* Otherwise, we'll need to insert an extra jump, and possibly
2291 a new block to contain it. */
2292 /* ??? Not implemented yet. */
2295 return 0;
2299 /* Top level driver for merge_blocks. */
2301 static void
2302 try_merge_blocks ()
2304 int i;
2306 /* Attempt to merge blocks as made possible by edge removal. If a block
2307 has only one successor, and the successor has only one predecessor,
2308 they may be combined. */
2310 for (i = 0; i < n_basic_blocks; )
2312 basic_block c, b = BASIC_BLOCK (i);
2313 edge s;
2315 /* A loop because chains of blocks might be combineable. */
2316 while ((s = b->succ) != NULL
2317 && s->succ_next == NULL
2318 && (s->flags & EDGE_EH) == 0
2319 && (c = s->dest) != EXIT_BLOCK_PTR
2320 && c->pred->pred_next == NULL
2321 /* If the jump insn has side effects, we can't kill the edge. */
2322 && (GET_CODE (b->end) != JUMP_INSN
2323 || onlyjump_p (b->end))
2324 && merge_blocks (s, b, c))
2325 continue;
2327 /* Don't get confused by the index shift caused by deleting blocks. */
2328 i = b->index + 1;
2332 /* The given edge should potentially a fallthru edge. If that is in
2333 fact true, delete the unconditional jump and barriers that are in
2334 the way. */
2336 static void
2337 tidy_fallthru_edge (e, b, c)
2338 edge e;
2339 basic_block b, c;
2341 rtx q;
2343 /* ??? In a late-running flow pass, other folks may have deleted basic
2344 blocks by nopping out blocks, leaving multiple BARRIERs between here
2345 and the target label. They ought to be chastized and fixed.
2347 We can also wind up with a sequence of undeletable labels between
2348 one block and the next.
2350 So search through a sequence of barriers, labels, and notes for
2351 the head of block C and assert that we really do fall through. */
2353 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2354 return;
2356 /* Remove what will soon cease being the jump insn from the source block.
2357 If block B consisted only of this single jump, turn it into a deleted
2358 note. */
2359 q = b->end;
2360 if (GET_CODE (q) == JUMP_INSN)
2362 #ifdef HAVE_cc0
2363 /* If this was a conditional jump, we need to also delete
2364 the insn that set cc0. */
2365 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2366 q = PREV_INSN (q);
2367 #endif
2369 if (b->head == q)
2371 PUT_CODE (q, NOTE);
2372 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2373 NOTE_SOURCE_FILE (q) = 0;
2375 else
2376 b->end = q = PREV_INSN (q);
2379 /* Selectively unlink the sequence. */
2380 if (q != PREV_INSN (c->head))
2381 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2383 e->flags |= EDGE_FALLTHRU;
2386 /* Discover and record the loop depth at the head of each basic block. */
2388 static void
2389 calculate_loop_depth (insns)
2390 rtx insns;
2392 basic_block bb;
2393 rtx insn;
2394 int i = 0, depth = 1;
2396 bb = BASIC_BLOCK (i);
2397 for (insn = insns; insn ; insn = NEXT_INSN (insn))
2399 if (insn == bb->head)
2401 bb->loop_depth = depth;
2402 if (++i >= n_basic_blocks)
2403 break;
2404 bb = BASIC_BLOCK (i);
2407 if (GET_CODE (insn) == NOTE)
2409 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2410 depth++;
2411 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2412 depth--;
2414 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2415 if (depth == 0)
2416 abort ();
2421 /* Perform data flow analysis.
2422 F is the first insn of the function and NREGS the number of register numbers
2423 in use. */
2425 void
2426 life_analysis (f, nregs, file, remove_dead_code)
2427 rtx f;
2428 int nregs;
2429 FILE *file;
2430 int remove_dead_code;
2432 #ifdef ELIMINABLE_REGS
2433 register size_t i;
2434 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2435 #endif
2436 int flags;
2438 /* Record which registers will be eliminated. We use this in
2439 mark_used_regs. */
2441 CLEAR_HARD_REG_SET (elim_reg_set);
2443 #ifdef ELIMINABLE_REGS
2444 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2445 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2446 #else
2447 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2448 #endif
2450 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2451 uid_volatile = BITMAP_XMALLOC ();
2453 /* We want alias analysis information for local dead store elimination. */
2454 init_alias_analysis ();
2456 flags = PROP_FINAL;
2457 if (! remove_dead_code)
2458 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2459 life_analysis_1 (f, nregs, flags);
2461 if (! reload_completed)
2462 mark_constant_function ();
2464 end_alias_analysis ();
2466 if (file)
2467 dump_flow_info (file);
2469 BITMAP_XFREE (uid_volatile);
2470 free_basic_block_vars (1);
2473 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2474 Search for REGNO. If found, abort if it is not wider than word_mode. */
2476 static int
2477 verify_wide_reg_1 (px, pregno)
2478 rtx *px;
2479 void *pregno;
2481 rtx x = *px;
2482 int regno = *(int *) pregno;
2484 if (GET_CODE (x) == REG && REGNO (x) == regno)
2486 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2487 abort ();
2488 return 1;
2490 return 0;
2493 /* A subroutine of verify_local_live_at_start. Search through insns
2494 between HEAD and END looking for register REGNO. */
2496 static void
2497 verify_wide_reg (regno, head, end)
2498 int regno;
2499 rtx head, end;
2501 while (1)
2503 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2504 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2505 return;
2506 if (head == end)
2507 break;
2508 head = NEXT_INSN (head);
2511 /* We didn't find the register at all. Something's way screwy. */
2512 abort ();
2515 /* A subroutine of update_life_info. Verify that there are no untoward
2516 changes in live_at_start during a local update. */
2518 static void
2519 verify_local_live_at_start (new_live_at_start, bb)
2520 regset new_live_at_start;
2521 basic_block bb;
2523 if (reload_completed)
2525 /* After reload, there are no pseudos, nor subregs of multi-word
2526 registers. The regsets should exactly match. */
2527 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2528 abort ();
2530 else
2532 int i;
2534 /* Find the set of changed registers. */
2535 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2537 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2539 /* No registers should die. */
2540 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2541 abort ();
2542 /* Verify that the now-live register is wider than word_mode. */
2543 verify_wide_reg (i, bb->head, bb->end);
2548 /* Updates death notes starting with the basic blocks set in BLOCKS.
2550 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2551 expecting local modifications to basic blocks. If we find extra
2552 registers live at the beginning of a block, then we either killed
2553 useful data, or we have a broken split that wants data not provided.
2554 If we find registers removed from live_at_start, that means we have
2555 a broken peephole that is killing a register it shouldn't.
2557 ??? This is not true in one situation -- when a pre-reload splitter
2558 generates subregs of a multi-word pseudo, current life analysis will
2559 lose the kill. So we _can_ have a pseudo go live. How irritating.
2561 BLOCK_FOR_INSN is assumed to be correct.
2563 ??? PROP_FLAGS should not contain PROP_LOG_LINKS. Need to set up
2564 reg_next_use for that. Including PROP_REG_INFO does not refresh
2565 regs_ever_live unless the caller resets it to zero. */
2567 void
2568 update_life_info (blocks, extent, prop_flags)
2569 sbitmap blocks;
2570 enum update_life_extent extent;
2571 int prop_flags;
2573 regset tmp;
2574 int i;
2576 tmp = ALLOCA_REG_SET ();
2578 /* For a global update, we go through the relaxation process again. */
2579 if (extent != UPDATE_LIFE_LOCAL)
2581 calculate_global_regs_live (blocks, blocks,
2582 prop_flags & PROP_SCAN_DEAD_CODE);
2584 /* If asked, remove notes from the blocks we'll update. */
2585 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2586 count_or_remove_death_notes (blocks, 1);
2589 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2591 basic_block bb = BASIC_BLOCK (i);
2593 COPY_REG_SET (tmp, bb->global_live_at_end);
2594 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i,
2595 prop_flags);
2597 if (extent == UPDATE_LIFE_LOCAL)
2598 verify_local_live_at_start (tmp, bb);
2601 FREE_REG_SET (tmp);
2604 /* Free the variables allocated by find_basic_blocks.
2606 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2608 void
2609 free_basic_block_vars (keep_head_end_p)
2610 int keep_head_end_p;
2612 if (basic_block_for_insn)
2614 VARRAY_FREE (basic_block_for_insn);
2615 basic_block_for_insn = NULL;
2618 if (! keep_head_end_p)
2620 clear_edges ();
2621 VARRAY_FREE (basic_block_info);
2622 n_basic_blocks = 0;
2624 ENTRY_BLOCK_PTR->aux = NULL;
2625 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2626 EXIT_BLOCK_PTR->aux = NULL;
2627 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2631 /* Return nonzero if the destination of SET equals the source. */
2632 static int
2633 set_noop_p (set)
2634 rtx set;
2636 rtx src = SET_SRC (set);
2637 rtx dst = SET_DEST (set);
2638 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2639 && REGNO (src) == REGNO (dst))
2640 return 1;
2641 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2642 || SUBREG_WORD (src) != SUBREG_WORD (dst))
2643 return 0;
2644 src = SUBREG_REG (src);
2645 dst = SUBREG_REG (dst);
2646 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2647 && REGNO (src) == REGNO (dst))
2648 return 1;
2649 return 0;
2652 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2653 value to itself. */
2654 static int
2655 noop_move_p (insn)
2656 rtx insn;
2658 rtx pat = PATTERN (insn);
2660 /* Insns carrying these notes are useful later on. */
2661 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2662 return 0;
2664 if (GET_CODE (pat) == SET && set_noop_p (pat))
2665 return 1;
2667 if (GET_CODE (pat) == PARALLEL)
2669 int i;
2670 /* If nothing but SETs of registers to themselves,
2671 this insn can also be deleted. */
2672 for (i = 0; i < XVECLEN (pat, 0); i++)
2674 rtx tem = XVECEXP (pat, 0, i);
2676 if (GET_CODE (tem) == USE
2677 || GET_CODE (tem) == CLOBBER)
2678 continue;
2680 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2681 return 0;
2684 return 1;
2686 return 0;
2689 static void
2690 notice_stack_pointer_modification (x, pat, data)
2691 rtx x;
2692 rtx pat ATTRIBUTE_UNUSED;
2693 void *data ATTRIBUTE_UNUSED;
2695 if (x == stack_pointer_rtx
2696 /* The stack pointer is only modified indirectly as the result
2697 of a push until later in flow. See the comments in rtl.texi
2698 regarding Embedded Side-Effects on Addresses. */
2699 || (GET_CODE (x) == MEM
2700 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2701 || GET_CODE (XEXP (x, 0)) == PRE_INC
2702 || GET_CODE (XEXP (x, 0)) == POST_DEC
2703 || GET_CODE (XEXP (x, 0)) == POST_INC)
2704 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2705 current_function_sp_is_unchanging = 0;
2708 /* Record which insns refer to any volatile memory
2709 or for any reason can't be deleted just because they are dead stores.
2710 Also, delete any insns that copy a register to itself.
2711 And see if the stack pointer is modified. */
2712 static void
2713 record_volatile_insns (f)
2714 rtx f;
2716 rtx insn;
2717 for (insn = f; insn; insn = NEXT_INSN (insn))
2719 enum rtx_code code1 = GET_CODE (insn);
2720 if (code1 == CALL_INSN)
2721 SET_INSN_VOLATILE (insn);
2722 else if (code1 == INSN || code1 == JUMP_INSN)
2724 if (GET_CODE (PATTERN (insn)) != USE
2725 && volatile_refs_p (PATTERN (insn)))
2726 SET_INSN_VOLATILE (insn);
2728 /* A SET that makes space on the stack cannot be dead.
2729 (Such SETs occur only for allocating variable-size data,
2730 so they will always have a PLUS or MINUS according to the
2731 direction of stack growth.)
2732 Even if this function never uses this stack pointer value,
2733 signal handlers do! */
2734 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2735 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2736 #ifdef STACK_GROWS_DOWNWARD
2737 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2738 #else
2739 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2740 #endif
2741 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2742 SET_INSN_VOLATILE (insn);
2744 /* Delete (in effect) any obvious no-op moves. */
2745 else if (noop_move_p (insn))
2747 PUT_CODE (insn, NOTE);
2748 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2749 NOTE_SOURCE_FILE (insn) = 0;
2753 /* Check if insn modifies the stack pointer. */
2754 if ( current_function_sp_is_unchanging
2755 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2756 note_stores (PATTERN (insn),
2757 notice_stack_pointer_modification,
2758 NULL);
2762 /* Mark a register in SET. Hard registers in large modes get all
2763 of their component registers set as well. */
2764 static void
2765 mark_reg (set, reg)
2766 regset set;
2767 rtx reg;
2769 int regno = REGNO (reg);
2771 SET_REGNO_REG_SET (set, regno);
2772 if (regno < FIRST_PSEUDO_REGISTER)
2774 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2775 while (--n > 0)
2776 SET_REGNO_REG_SET (set, regno + n);
2780 /* Mark those regs which are needed at the end of the function as live
2781 at the end of the last basic block. */
2782 static void
2783 mark_regs_live_at_end (set)
2784 regset set;
2786 tree type;
2787 int i;
2789 /* If exiting needs the right stack value, consider the stack pointer
2790 live at the end of the function. */
2791 if ((HAVE_epilogue && reload_completed)
2792 || ! EXIT_IGNORE_STACK
2793 || (! FRAME_POINTER_REQUIRED
2794 && ! current_function_calls_alloca
2795 && flag_omit_frame_pointer)
2796 || current_function_sp_is_unchanging)
2798 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2801 /* Mark the frame pointer if needed at the end of the function. If
2802 we end up eliminating it, it will be removed from the live list
2803 of each basic block by reload. */
2805 if (! reload_completed || frame_pointer_needed)
2807 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2808 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2809 /* If they are different, also mark the hard frame pointer as live */
2810 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2811 #endif
2814 #ifdef PIC_OFFSET_TABLE_REGNUM
2815 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2816 /* Many architectures have a GP register even without flag_pic.
2817 Assume the pic register is not in use, or will be handled by
2818 other means, if it is not fixed. */
2819 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2820 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2821 #endif
2822 #endif
2824 /* Mark all global registers, and all registers used by the epilogue
2825 as being live at the end of the function since they may be
2826 referenced by our caller. */
2827 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2828 if (global_regs[i]
2829 #ifdef EPILOGUE_USES
2830 || EPILOGUE_USES (i)
2831 #endif
2833 SET_REGNO_REG_SET (set, i);
2835 /* Mark all call-saved registers that we actaully used. */
2836 if (HAVE_epilogue && reload_completed)
2838 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2839 if (! call_used_regs[i] && regs_ever_live[i])
2840 SET_REGNO_REG_SET (set, i);
2843 /* Mark function return value. */
2844 /* ??? Only do this after reload. Consider a non-void function that
2845 omits a return statement. Across that edge we'll have the return
2846 register live, and no set for it. Thus the return register will
2847 be live back through the CFG to the entry, and thus we die. A
2848 possible solution is to emit a clobber at exits without returns. */
2850 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2851 if (reload_completed
2852 && type != void_type_node)
2854 rtx outgoing;
2856 if (current_function_returns_struct
2857 || current_function_returns_pcc_struct)
2858 type = build_pointer_type (type);
2860 #ifdef FUNCTION_OUTGOING_VALUE
2861 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
2862 #else
2863 outgoing = FUNCTION_VALUE (type, current_function_decl);
2864 #endif
2866 if (GET_CODE (outgoing) == REG)
2867 mark_reg (set, outgoing);
2868 else if (GET_CODE (outgoing) == PARALLEL)
2870 int len = XVECLEN (outgoing, 0);
2872 /* Check for a NULL entry, used to indicate that the parameter
2873 goes on the stack and in registers. */
2874 i = (XEXP (XVECEXP (outgoing, 0, 0), 0) ? 0 : 1);
2876 for ( ; i < len; ++i)
2878 rtx r = XVECEXP (outgoing, 0, i);
2879 if (GET_CODE (r) == REG)
2880 mark_reg (set, r);
2883 else
2884 abort ();
2888 /* Determine which registers are live at the start of each
2889 basic block of the function whose first insn is F.
2890 NREGS is the number of registers used in F.
2891 We allocate the vector basic_block_live_at_start
2892 and the regsets that it points to, and fill them with the data.
2893 regset_size and regset_bytes are also set here. */
2895 static void
2896 life_analysis_1 (f, nregs, flags)
2897 rtx f;
2898 int nregs;
2899 int flags;
2901 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2902 register int i;
2904 max_regno = nregs;
2906 /* Allocate and zero out many data structures
2907 that will record the data from lifetime analysis. */
2909 allocate_reg_life_data ();
2910 allocate_bb_life_data ();
2912 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2914 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2915 This will be cleared by record_volatile_insns if it encounters an insn
2916 which modifies the stack pointer. */
2917 current_function_sp_is_unchanging = !current_function_calls_alloca;
2918 record_volatile_insns (f);
2920 /* Find the set of registers live on function exit. Do this before
2921 zeroing regs_ever_live, as we use that data post-reload. */
2922 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2924 /* The post-reload life analysis have (on a global basis) the same
2925 registers live as was computed by reload itself. elimination
2926 Otherwise offsets and such may be incorrect.
2928 Reload will make some registers as live even though they do not
2929 appear in the rtl. */
2930 if (reload_completed)
2931 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2932 memset (regs_ever_live, 0, sizeof regs_ever_live);
2934 /* Compute register life at block boundaries. It'd be nice to
2935 begin with just the exit and noreturn blocks, but that set
2936 is not immediately handy. */
2938 sbitmap blocks;
2939 blocks = sbitmap_alloc (n_basic_blocks);
2940 sbitmap_ones (blocks);
2941 calculate_global_regs_live (blocks, blocks, flags & PROP_SCAN_DEAD_CODE);
2942 sbitmap_free (blocks);
2945 /* The only pseudos that are live at the beginning of the function are
2946 those that were not set anywhere in the function. local-alloc doesn't
2947 know how to handle these correctly, so mark them as not local to any
2948 one basic block. */
2950 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2951 FIRST_PSEUDO_REGISTER, i,
2952 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2954 /* Now the life information is accurate. Make one more pass over each
2955 basic block to delete dead stores, create autoincrement addressing
2956 and record how many times each register is used, is set, or dies. */
2958 regset tmp;
2959 tmp = ALLOCA_REG_SET ();
2961 for (i = n_basic_blocks - 1; i >= 0; --i)
2963 basic_block bb = BASIC_BLOCK (i);
2965 COPY_REG_SET (tmp, bb->global_live_at_end);
2966 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i, flags);
2969 FREE_REG_SET (tmp);
2972 /* We have a problem with any pseudoreg that lives across the setjmp.
2973 ANSI says that if a user variable does not change in value between
2974 the setjmp and the longjmp, then the longjmp preserves it. This
2975 includes longjmp from a place where the pseudo appears dead.
2976 (In principle, the value still exists if it is in scope.)
2977 If the pseudo goes in a hard reg, some other value may occupy
2978 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2979 Conclusion: such a pseudo must not go in a hard reg. */
2980 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2981 FIRST_PSEUDO_REGISTER, i,
2983 if (regno_reg_rtx[i] != 0)
2985 REG_LIVE_LENGTH (i) = -1;
2986 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2990 /* Restore regs_ever_live that was provided by reload. */
2991 if (reload_completed)
2992 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2994 /* Clean up. */
2995 free (reg_next_use);
2996 reg_next_use = NULL;
2999 /* Propagate global life info around the graph of basic blocks. Begin
3000 considering blocks with their corresponding bit set in BLOCKS_IN.
3001 BLOCKS_OUT is set for every block that was changed. */
3003 static void
3004 calculate_global_regs_live (blocks_in, blocks_out, flags)
3005 sbitmap blocks_in, blocks_out;
3006 int flags;
3008 basic_block *queue, *qhead, *qtail, *qend;
3009 regset tmp, new_live_at_end;
3010 int i;
3012 tmp = ALLOCA_REG_SET ();
3013 new_live_at_end = ALLOCA_REG_SET ();
3015 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3016 because the `head == tail' style test for an empty queue doesn't
3017 work with a full queue. */
3018 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3019 qtail = queue;
3020 qhead = qend = queue + n_basic_blocks + 2;
3022 /* Queue the blocks set in the initial mask. Do this in reverse block
3023 number order so that we are more likely for the first round to do
3024 useful work. We use AUX non-null to flag that the block is queued. */
3025 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3027 basic_block bb = BASIC_BLOCK (i);
3028 *--qhead = bb;
3029 bb->aux = bb;
3032 sbitmap_zero (blocks_out);
3034 while (qhead != qtail)
3036 int rescan, changed;
3037 basic_block bb;
3038 edge e;
3040 bb = *qhead++;
3041 if (qhead == qend)
3042 qhead = queue;
3043 bb->aux = NULL;
3045 /* Begin by propogating live_at_start from the successor blocks. */
3046 CLEAR_REG_SET (new_live_at_end);
3047 for (e = bb->succ; e ; e = e->succ_next)
3049 basic_block sb = e->dest;
3050 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3053 if (bb == ENTRY_BLOCK_PTR)
3055 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3056 continue;
3059 /* On our first pass through this block, we'll go ahead and continue.
3060 Recognize first pass by local_set NULL. On subsequent passes, we
3061 get to skip out early if live_at_end wouldn't have changed. */
3063 if (bb->local_set == NULL)
3065 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3066 rescan = 1;
3068 else
3070 /* If any bits were removed from live_at_end, we'll have to
3071 rescan the block. This wouldn't be necessary if we had
3072 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3073 local_live is really dependant on live_at_end. */
3074 CLEAR_REG_SET (tmp);
3075 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3076 new_live_at_end, BITMAP_AND_COMPL);
3078 if (! rescan)
3080 /* Find the set of changed bits. Take this opportunity
3081 to notice that this set is empty and early out. */
3082 CLEAR_REG_SET (tmp);
3083 changed = bitmap_operation (tmp, bb->global_live_at_end,
3084 new_live_at_end, BITMAP_XOR);
3085 if (! changed)
3086 continue;
3088 /* If any of the changed bits overlap with local_set,
3089 we'll have to rescan the block. Detect overlap by
3090 the AND with ~local_set turning off bits. */
3091 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3092 BITMAP_AND_COMPL);
3096 /* Let our caller know that BB changed enough to require its
3097 death notes updated. */
3098 SET_BIT (blocks_out, bb->index);
3100 if (! rescan)
3102 /* Add to live_at_start the set of all registers in
3103 new_live_at_end that aren't in the old live_at_end. */
3105 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3106 BITMAP_AND_COMPL);
3107 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3109 changed = bitmap_operation (bb->global_live_at_start,
3110 bb->global_live_at_start,
3111 tmp, BITMAP_IOR);
3112 if (! changed)
3113 continue;
3115 else
3117 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3119 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3120 into live_at_start. */
3121 propagate_block (new_live_at_end, bb->head, bb->end,
3122 bb->local_set, bb->index, flags);
3124 /* If live_at start didn't change, no need to go farther. */
3125 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3126 continue;
3128 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3131 /* Queue all predecessors of BB so that we may re-examine
3132 their live_at_end. */
3133 for (e = bb->pred; e ; e = e->pred_next)
3135 basic_block pb = e->src;
3136 if (pb->aux == NULL)
3138 *qtail++ = pb;
3139 if (qtail == qend)
3140 qtail = queue;
3141 pb->aux = pb;
3146 FREE_REG_SET (tmp);
3147 FREE_REG_SET (new_live_at_end);
3149 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3151 basic_block bb = BASIC_BLOCK (i);
3152 FREE_REG_SET (bb->local_set);
3155 free (queue);
3158 /* Subroutines of life analysis. */
3160 /* Allocate the permanent data structures that represent the results
3161 of life analysis. Not static since used also for stupid life analysis. */
3163 void
3164 allocate_bb_life_data ()
3166 register int i;
3168 for (i = 0; i < n_basic_blocks; i++)
3170 basic_block bb = BASIC_BLOCK (i);
3172 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3173 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3176 ENTRY_BLOCK_PTR->global_live_at_end
3177 = OBSTACK_ALLOC_REG_SET (function_obstack);
3178 EXIT_BLOCK_PTR->global_live_at_start
3179 = OBSTACK_ALLOC_REG_SET (function_obstack);
3181 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3184 void
3185 allocate_reg_life_data ()
3187 int i;
3189 /* Recalculate the register space, in case it has grown. Old style
3190 vector oriented regsets would set regset_{size,bytes} here also. */
3191 allocate_reg_info (max_regno, FALSE, FALSE);
3193 /* Reset all the data we'll collect in propagate_block and its
3194 subroutines. */
3195 for (i = 0; i < max_regno; i++)
3197 REG_N_SETS (i) = 0;
3198 REG_N_REFS (i) = 0;
3199 REG_N_DEATHS (i) = 0;
3200 REG_N_CALLS_CROSSED (i) = 0;
3201 REG_LIVE_LENGTH (i) = 0;
3202 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3206 /* Compute the registers live at the beginning of a basic block
3207 from those live at the end.
3209 When called, OLD contains those live at the end.
3210 On return, it contains those live at the beginning.
3211 FIRST and LAST are the first and last insns of the basic block.
3213 FINAL is nonzero if we are doing the final pass which is not
3214 for computing the life info (since that has already been done)
3215 but for acting on it. On this pass, we delete dead stores,
3216 set up the logical links and dead-variables lists of instructions,
3217 and merge instructions for autoincrement and autodecrement addresses.
3219 SIGNIFICANT is nonzero only the first time for each basic block.
3220 If it is nonzero, it points to a regset in which we store
3221 a 1 for each register that is set within the block.
3223 BNUM is the number of the basic block. */
3225 static void
3226 propagate_block (old, first, last, significant, bnum, flags)
3227 register regset old;
3228 rtx first;
3229 rtx last;
3230 regset significant;
3231 int bnum;
3232 int flags;
3234 register rtx insn;
3235 rtx prev;
3236 regset live;
3237 regset dead;
3239 /* Find the loop depth for this block. Ignore loop level changes in the
3240 middle of the basic block -- for register allocation purposes, the
3241 important uses will be in the blocks wholely contained within the loop
3242 not in the loop pre-header or post-trailer. */
3243 loop_depth = BASIC_BLOCK (bnum)->loop_depth;
3245 dead = ALLOCA_REG_SET ();
3246 live = ALLOCA_REG_SET ();
3248 cc0_live = 0;
3250 if (flags & PROP_REG_INFO)
3252 register int i;
3254 /* Process the regs live at the end of the block.
3255 Mark them as not local to any one basic block. */
3256 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3258 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3262 /* Scan the block an insn at a time from end to beginning. */
3264 for (insn = last; ; insn = prev)
3266 prev = PREV_INSN (insn);
3268 if (GET_CODE (insn) == NOTE)
3270 /* If this is a call to `setjmp' et al,
3271 warn if any non-volatile datum is live. */
3273 if ((flags & PROP_REG_INFO)
3274 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3275 IOR_REG_SET (regs_live_at_setjmp, old);
3278 /* Update the life-status of regs for this insn.
3279 First DEAD gets which regs are set in this insn
3280 then LIVE gets which regs are used in this insn.
3281 Then the regs live before the insn
3282 are those live after, with DEAD regs turned off,
3283 and then LIVE regs turned on. */
3285 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3287 register int i;
3288 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3289 int insn_is_dead = 0;
3290 int libcall_is_dead = 0;
3292 if (flags & PROP_SCAN_DEAD_CODE)
3294 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
3295 /* Don't delete something that refers to volatile storage! */
3296 && ! INSN_VOLATILE (insn));
3297 libcall_is_dead = (insn_is_dead && note != 0
3298 && libcall_dead_p (PATTERN (insn), old, note, insn));
3301 /* We almost certainly don't want to delete prologue or epilogue
3302 instructions. Warn about probable compiler losage. */
3303 if ((flags & PROP_KILL_DEAD_CODE)
3304 && insn_is_dead
3305 && reload_completed
3306 && (HAVE_epilogue || HAVE_prologue)
3307 && prologue_epilogue_contains (insn))
3309 warning ("ICE: would have deleted prologue/epilogue insn");
3310 debug_rtx (insn);
3311 libcall_is_dead = insn_is_dead = 0;
3314 /* If an instruction consists of just dead store(s) on final pass,
3315 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3316 We could really delete it with delete_insn, but that
3317 can cause trouble for first or last insn in a basic block. */
3318 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3320 rtx inote;
3321 /* If the insn referred to a label, note that the label is
3322 now less used. */
3323 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3325 if (REG_NOTE_KIND (inote) == REG_LABEL)
3327 rtx label = XEXP (inote, 0);
3328 rtx next;
3329 LABEL_NUSES (label)--;
3331 /* If this label was attached to an ADDR_VEC, it's
3332 safe to delete the ADDR_VEC. In fact, it's pretty much
3333 mandatory to delete it, because the ADDR_VEC may
3334 be referencing labels that no longer exist. */
3335 if (LABEL_NUSES (label) == 0
3336 && (next = next_nonnote_insn (label)) != NULL
3337 && GET_CODE (next) == JUMP_INSN
3338 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3339 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3341 rtx pat = PATTERN (next);
3342 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3343 int len = XVECLEN (pat, diff_vec_p);
3344 int i;
3345 for (i = 0; i < len; i++)
3346 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3347 PUT_CODE (next, NOTE);
3348 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3349 NOTE_SOURCE_FILE (next) = 0;
3354 PUT_CODE (insn, NOTE);
3355 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3356 NOTE_SOURCE_FILE (insn) = 0;
3358 /* CC0 is now known to be dead. Either this insn used it,
3359 in which case it doesn't anymore, or clobbered it,
3360 so the next insn can't use it. */
3361 cc0_live = 0;
3363 /* If this insn is copying the return value from a library call,
3364 delete the entire library call. */
3365 if (libcall_is_dead)
3367 rtx first = XEXP (note, 0);
3368 rtx p = insn;
3369 while (INSN_DELETED_P (first))
3370 first = NEXT_INSN (first);
3371 while (p != first)
3373 p = PREV_INSN (p);
3374 PUT_CODE (p, NOTE);
3375 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3376 NOTE_SOURCE_FILE (p) = 0;
3379 goto flushed;
3382 CLEAR_REG_SET (dead);
3383 CLEAR_REG_SET (live);
3385 /* See if this is an increment or decrement that can be
3386 merged into a following memory address. */
3387 #ifdef AUTO_INC_DEC
3389 register rtx x = single_set (insn);
3391 /* Does this instruction increment or decrement a register? */
3392 if (!reload_completed
3393 && (flags & PROP_AUTOINC)
3394 && x != 0
3395 && GET_CODE (SET_DEST (x)) == REG
3396 && (GET_CODE (SET_SRC (x)) == PLUS
3397 || GET_CODE (SET_SRC (x)) == MINUS)
3398 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3399 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3400 /* Ok, look for a following memory ref we can combine with.
3401 If one is found, change the memory ref to a PRE_INC
3402 or PRE_DEC, cancel this insn, and return 1.
3403 Return 0 if nothing has been done. */
3404 && try_pre_increment_1 (insn))
3405 goto flushed;
3407 #endif /* AUTO_INC_DEC */
3409 /* If this is not the final pass, and this insn is copying the
3410 value of a library call and it's dead, don't scan the
3411 insns that perform the library call, so that the call's
3412 arguments are not marked live. */
3413 if (libcall_is_dead)
3415 /* Mark the dest reg as `significant'. */
3416 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3417 significant, flags);
3419 insn = XEXP (note, 0);
3420 prev = PREV_INSN (insn);
3422 else if (GET_CODE (PATTERN (insn)) == SET
3423 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3424 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3425 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3426 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3427 /* We have an insn to pop a constant amount off the stack.
3428 (Such insns use PLUS regardless of the direction of the stack,
3429 and any insn to adjust the stack by a constant is always a pop.)
3430 These insns, if not dead stores, have no effect on life. */
3432 else
3434 /* Any regs live at the time of a call instruction
3435 must not go in a register clobbered by calls.
3436 Find all regs now live and record this for them. */
3438 if (GET_CODE (insn) == CALL_INSN
3439 && (flags & PROP_REG_INFO))
3440 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3442 REG_N_CALLS_CROSSED (i)++;
3445 /* LIVE gets the regs used in INSN;
3446 DEAD gets those set by it. Dead insns don't make anything
3447 live. */
3449 mark_set_regs (old, dead, PATTERN (insn),
3450 insn, significant, flags);
3452 /* If an insn doesn't use CC0, it becomes dead since we
3453 assume that every insn clobbers it. So show it dead here;
3454 mark_used_regs will set it live if it is referenced. */
3455 cc0_live = 0;
3457 if (! insn_is_dead)
3458 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3460 /* Sometimes we may have inserted something before INSN (such as
3461 a move) when we make an auto-inc. So ensure we will scan
3462 those insns. */
3463 #ifdef AUTO_INC_DEC
3464 prev = PREV_INSN (insn);
3465 #endif
3467 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3469 register int i;
3471 rtx note;
3473 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3474 note;
3475 note = XEXP (note, 1))
3476 if (GET_CODE (XEXP (note, 0)) == USE)
3477 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3478 flags, insn);
3480 /* Each call clobbers all call-clobbered regs that are not
3481 global or fixed. Note that the function-value reg is a
3482 call-clobbered reg, and mark_set_regs has already had
3483 a chance to handle it. */
3485 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3486 if (call_used_regs[i] && ! global_regs[i]
3487 && ! fixed_regs[i])
3489 SET_REGNO_REG_SET (dead, i);
3490 if (significant)
3491 SET_REGNO_REG_SET (significant, i);
3494 /* The stack ptr is used (honorarily) by a CALL insn. */
3495 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3497 /* Calls may also reference any of the global registers,
3498 so they are made live. */
3499 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3500 if (global_regs[i])
3501 mark_used_regs (old, live,
3502 gen_rtx_REG (reg_raw_mode[i], i),
3503 flags, insn);
3505 /* Calls also clobber memory. */
3506 free_EXPR_LIST_list (&mem_set_list);
3509 /* Update OLD for the registers used or set. */
3510 AND_COMPL_REG_SET (old, dead);
3511 IOR_REG_SET (old, live);
3515 /* On final pass, update counts of how many insns each reg is live
3516 at. */
3517 if (flags & PROP_REG_INFO)
3518 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3519 { REG_LIVE_LENGTH (i)++; });
3521 flushed: ;
3522 if (insn == first)
3523 break;
3526 FREE_REG_SET (dead);
3527 FREE_REG_SET (live);
3528 free_EXPR_LIST_list (&mem_set_list);
3531 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3532 (SET expressions whose destinations are registers dead after the insn).
3533 NEEDED is the regset that says which regs are alive after the insn.
3535 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3537 If X is the entire body of an insn, NOTES contains the reg notes
3538 pertaining to the insn. */
3540 static int
3541 insn_dead_p (x, needed, call_ok, notes)
3542 rtx x;
3543 regset needed;
3544 int call_ok;
3545 rtx notes ATTRIBUTE_UNUSED;
3547 enum rtx_code code = GET_CODE (x);
3549 #ifdef AUTO_INC_DEC
3550 /* If flow is invoked after reload, we must take existing AUTO_INC
3551 expresions into account. */
3552 if (reload_completed)
3554 for ( ; notes; notes = XEXP (notes, 1))
3556 if (REG_NOTE_KIND (notes) == REG_INC)
3558 int regno = REGNO (XEXP (notes, 0));
3560 /* Don't delete insns to set global regs. */
3561 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3562 || REGNO_REG_SET_P (needed, regno))
3563 return 0;
3567 #endif
3569 /* If setting something that's a reg or part of one,
3570 see if that register's altered value will be live. */
3572 if (code == SET)
3574 rtx r = SET_DEST (x);
3576 /* A SET that is a subroutine call cannot be dead. */
3577 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
3578 return 0;
3580 #ifdef HAVE_cc0
3581 if (GET_CODE (r) == CC0)
3582 return ! cc0_live;
3583 #endif
3585 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
3587 rtx temp;
3588 /* Walk the set of memory locations we are currently tracking
3589 and see if one is an identical match to this memory location.
3590 If so, this memory write is dead (remember, we're walking
3591 backwards from the end of the block to the start. */
3592 temp = mem_set_list;
3593 while (temp)
3595 if (rtx_equal_p (XEXP (temp, 0), r))
3596 return 1;
3597 temp = XEXP (temp, 1);
3601 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
3602 || GET_CODE (r) == ZERO_EXTRACT)
3603 r = XEXP (r, 0);
3605 if (GET_CODE (r) == REG)
3607 int regno = REGNO (r);
3609 /* Don't delete insns to set global regs. */
3610 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3611 /* Make sure insns to set frame pointer aren't deleted. */
3612 || (regno == FRAME_POINTER_REGNUM
3613 && (! reload_completed || frame_pointer_needed))
3614 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3615 || (regno == HARD_FRAME_POINTER_REGNUM
3616 && (! reload_completed || frame_pointer_needed))
3617 #endif
3618 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3619 /* Make sure insns to set arg pointer are never deleted
3620 (if the arg pointer isn't fixed, there will be a USE for
3621 it, so we can treat it normally). */
3622 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3623 #endif
3624 || REGNO_REG_SET_P (needed, regno))
3625 return 0;
3627 /* If this is a hard register, verify that subsequent words are
3628 not needed. */
3629 if (regno < FIRST_PSEUDO_REGISTER)
3631 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3633 while (--n > 0)
3634 if (REGNO_REG_SET_P (needed, regno+n))
3635 return 0;
3638 return 1;
3642 /* If performing several activities,
3643 insn is dead if each activity is individually dead.
3644 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3645 that's inside a PARALLEL doesn't make the insn worth keeping. */
3646 else if (code == PARALLEL)
3648 int i = XVECLEN (x, 0);
3650 for (i--; i >= 0; i--)
3651 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3652 && GET_CODE (XVECEXP (x, 0, i)) != USE
3653 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3654 return 0;
3656 return 1;
3659 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3660 is not necessarily true for hard registers. */
3661 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3662 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3663 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3664 return 1;
3666 /* We do not check other CLOBBER or USE here. An insn consisting of just
3667 a CLOBBER or just a USE should not be deleted. */
3668 return 0;
3671 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3672 return 1 if the entire library call is dead.
3673 This is true if X copies a register (hard or pseudo)
3674 and if the hard return reg of the call insn is dead.
3675 (The caller should have tested the destination of X already for death.)
3677 If this insn doesn't just copy a register, then we don't
3678 have an ordinary libcall. In that case, cse could not have
3679 managed to substitute the source for the dest later on,
3680 so we can assume the libcall is dead.
3682 NEEDED is the bit vector of pseudoregs live before this insn.
3683 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3685 static int
3686 libcall_dead_p (x, needed, note, insn)
3687 rtx x;
3688 regset needed;
3689 rtx note;
3690 rtx insn;
3692 register RTX_CODE code = GET_CODE (x);
3694 if (code == SET)
3696 register rtx r = SET_SRC (x);
3697 if (GET_CODE (r) == REG)
3699 rtx call = XEXP (note, 0);
3700 rtx call_pat;
3701 register int i;
3703 /* Find the call insn. */
3704 while (call != insn && GET_CODE (call) != CALL_INSN)
3705 call = NEXT_INSN (call);
3707 /* If there is none, do nothing special,
3708 since ordinary death handling can understand these insns. */
3709 if (call == insn)
3710 return 0;
3712 /* See if the hard reg holding the value is dead.
3713 If this is a PARALLEL, find the call within it. */
3714 call_pat = PATTERN (call);
3715 if (GET_CODE (call_pat) == PARALLEL)
3717 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3718 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3719 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3720 break;
3722 /* This may be a library call that is returning a value
3723 via invisible pointer. Do nothing special, since
3724 ordinary death handling can understand these insns. */
3725 if (i < 0)
3726 return 0;
3728 call_pat = XVECEXP (call_pat, 0, i);
3731 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3734 return 1;
3737 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3738 live at function entry. Don't count global register variables, variables
3739 in registers that can be used for function arg passing, or variables in
3740 fixed hard registers. */
3743 regno_uninitialized (regno)
3744 int regno;
3746 if (n_basic_blocks == 0
3747 || (regno < FIRST_PSEUDO_REGISTER
3748 && (global_regs[regno]
3749 || fixed_regs[regno]
3750 || FUNCTION_ARG_REGNO_P (regno))))
3751 return 0;
3753 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3756 /* 1 if register REGNO was alive at a place where `setjmp' was called
3757 and was set more than once or is an argument.
3758 Such regs may be clobbered by `longjmp'. */
3761 regno_clobbered_at_setjmp (regno)
3762 int regno;
3764 if (n_basic_blocks == 0)
3765 return 0;
3767 return ((REG_N_SETS (regno) > 1
3768 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3769 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3772 /* INSN references memory, possibly using autoincrement addressing modes.
3773 Find any entries on the mem_set_list that need to be invalidated due
3774 to an address change. */
3775 static void
3776 invalidate_mems_from_autoinc (insn)
3777 rtx insn;
3779 rtx note = REG_NOTES (insn);
3780 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3782 if (REG_NOTE_KIND (note) == REG_INC)
3784 rtx temp = mem_set_list;
3785 rtx prev = NULL_RTX;
3786 rtx next;
3788 while (temp)
3790 next = XEXP (temp, 1);
3791 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3793 /* Splice temp out of list. */
3794 if (prev)
3795 XEXP (prev, 1) = next;
3796 else
3797 mem_set_list = next;
3798 free_EXPR_LIST_node (temp);
3800 else
3801 prev = temp;
3802 temp = next;
3808 /* Process the registers that are set within X. Their bits are set to
3809 1 in the regset DEAD, because they are dead prior to this insn.
3811 If INSN is nonzero, it is the insn being processed.
3813 FLAGS is the set of operations to perform. */
3815 static void
3816 mark_set_regs (needed, dead, x, insn, significant, flags)
3817 regset needed;
3818 regset dead;
3819 rtx x;
3820 rtx insn;
3821 regset significant;
3822 int flags;
3824 register RTX_CODE code = GET_CODE (x);
3826 if (code == SET || code == CLOBBER)
3827 mark_set_1 (needed, dead, x, insn, significant, flags);
3828 else if (code == PARALLEL)
3830 register int i;
3831 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3833 code = GET_CODE (XVECEXP (x, 0, i));
3834 if (code == SET || code == CLOBBER)
3835 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3836 significant, flags);
3841 /* Process a single SET rtx, X. */
3843 static void
3844 mark_set_1 (needed, dead, x, insn, significant, flags)
3845 regset needed;
3846 regset dead;
3847 rtx x;
3848 rtx insn;
3849 regset significant;
3850 int flags;
3852 register int regno = -1;
3853 register rtx reg = SET_DEST (x);
3855 /* Some targets place small structures in registers for
3856 return values of functions. We have to detect this
3857 case specially here to get correct flow information. */
3858 if (GET_CODE (reg) == PARALLEL
3859 && GET_MODE (reg) == BLKmode)
3861 register int i;
3863 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3864 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3865 significant, flags);
3866 return;
3869 /* Modifying just one hardware register of a multi-reg value
3870 or just a byte field of a register
3871 does not mean the value from before this insn is now dead.
3872 But it does mean liveness of that register at the end of the block
3873 is significant.
3875 Within mark_set_1, however, we treat it as if the register is
3876 indeed modified. mark_used_regs will, however, also treat this
3877 register as being used. Thus, we treat these insns as setting a
3878 new value for the register as a function of its old value. This
3879 cases LOG_LINKS to be made appropriately and this will help combine. */
3881 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3882 || GET_CODE (reg) == SIGN_EXTRACT
3883 || GET_CODE (reg) == STRICT_LOW_PART)
3884 reg = XEXP (reg, 0);
3886 /* If this set is a MEM, then it kills any aliased writes.
3887 If this set is a REG, then it kills any MEMs which use the reg. */
3888 if (flags & PROP_SCAN_DEAD_CODE)
3890 if (GET_CODE (reg) == MEM
3891 || GET_CODE (reg) == REG)
3893 rtx temp = mem_set_list;
3894 rtx prev = NULL_RTX;
3895 rtx next;
3897 while (temp)
3899 next = XEXP (temp, 1);
3900 if ((GET_CODE (reg) == MEM
3901 && output_dependence (XEXP (temp, 0), reg))
3902 || (GET_CODE (reg) == REG
3903 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3905 /* Splice this entry out of the list. */
3906 if (prev)
3907 XEXP (prev, 1) = next;
3908 else
3909 mem_set_list = next;
3910 free_EXPR_LIST_node (temp);
3912 else
3913 prev = temp;
3914 temp = next;
3918 /* If the memory reference had embedded side effects (autoincrement
3919 address modes. Then we may need to kill some entries on the
3920 memory set list. */
3921 if (insn && GET_CODE (reg) == MEM)
3922 invalidate_mems_from_autoinc (insn);
3924 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3925 /* We do not know the size of a BLKmode store, so we do not track
3926 them for redundant store elimination. */
3927 && GET_MODE (reg) != BLKmode
3928 /* There are no REG_INC notes for SP, so we can't assume we'll see
3929 everything that invalidates it. To be safe, don't eliminate any
3930 stores though SP; none of them should be redundant anyway. */
3931 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3932 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3935 if (GET_CODE (reg) == REG
3936 && (regno = REGNO (reg),
3937 ! (regno == FRAME_POINTER_REGNUM
3938 && (! reload_completed || frame_pointer_needed)))
3939 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3940 && ! (regno == HARD_FRAME_POINTER_REGNUM
3941 && (! reload_completed || frame_pointer_needed))
3942 #endif
3943 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3944 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3945 #endif
3946 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3947 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3949 int some_needed = REGNO_REG_SET_P (needed, regno);
3950 int some_not_needed = ! some_needed;
3952 /* Mark it as a significant register for this basic block. */
3953 if (significant)
3954 SET_REGNO_REG_SET (significant, regno);
3956 /* Mark it as dead before this insn. */
3957 SET_REGNO_REG_SET (dead, regno);
3959 /* A hard reg in a wide mode may really be multiple registers.
3960 If so, mark all of them just like the first. */
3961 if (regno < FIRST_PSEUDO_REGISTER)
3963 int n;
3965 /* Nothing below is needed for the stack pointer; get out asap.
3966 Eg, log links aren't needed, since combine won't use them. */
3967 if (regno == STACK_POINTER_REGNUM)
3968 return;
3970 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3971 while (--n > 0)
3973 int regno_n = regno + n;
3974 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3975 if (significant)
3976 SET_REGNO_REG_SET (significant, regno_n);
3978 SET_REGNO_REG_SET (dead, regno_n);
3979 some_needed |= needed_regno;
3980 some_not_needed |= ! needed_regno;
3984 /* Additional data to record if this is the final pass. */
3985 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3986 | PROP_DEATH_NOTES | PROP_AUTOINC))
3988 register rtx y;
3989 register int blocknum = BLOCK_NUM (insn);
3991 y = NULL_RTX;
3992 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3993 y = reg_next_use[regno];
3995 /* If this is a hard reg, record this function uses the reg. */
3997 if (regno < FIRST_PSEUDO_REGISTER)
3999 register int i;
4000 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4002 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4003 for (i = regno; i < endregno; i++)
4005 /* The next use is no longer "next", since a store
4006 intervenes. */
4007 reg_next_use[i] = 0;
4010 if (flags & PROP_REG_INFO)
4011 for (i = regno; i < endregno; i++)
4013 regs_ever_live[i] = 1;
4014 REG_N_SETS (i)++;
4017 else
4019 /* The next use is no longer "next", since a store
4020 intervenes. */
4021 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4022 reg_next_use[regno] = 0;
4024 /* Keep track of which basic blocks each reg appears in. */
4026 if (flags & PROP_REG_INFO)
4028 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4029 REG_BASIC_BLOCK (regno) = blocknum;
4030 else if (REG_BASIC_BLOCK (regno) != blocknum)
4031 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4033 /* Count (weighted) references, stores, etc. This counts a
4034 register twice if it is modified, but that is correct. */
4035 REG_N_SETS (regno)++;
4036 REG_N_REFS (regno) += loop_depth;
4038 /* The insns where a reg is live are normally counted
4039 elsewhere, but we want the count to include the insn
4040 where the reg is set, and the normal counting mechanism
4041 would not count it. */
4042 REG_LIVE_LENGTH (regno)++;
4046 if (! some_not_needed)
4048 if (flags & PROP_LOG_LINKS)
4050 /* Make a logical link from the next following insn
4051 that uses this register, back to this insn.
4052 The following insns have already been processed.
4054 We don't build a LOG_LINK for hard registers containing
4055 in ASM_OPERANDs. If these registers get replaced,
4056 we might wind up changing the semantics of the insn,
4057 even if reload can make what appear to be valid
4058 assignments later. */
4059 if (y && (BLOCK_NUM (y) == blocknum)
4060 && (regno >= FIRST_PSEUDO_REGISTER
4061 || asm_noperands (PATTERN (y)) < 0))
4062 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4065 else if (! some_needed)
4067 if (flags & PROP_REG_INFO)
4068 REG_N_DEATHS (REGNO (reg))++;
4070 if (flags & PROP_DEATH_NOTES)
4072 /* Note that dead stores have already been deleted
4073 when possible. If we get here, we have found a
4074 dead store that cannot be eliminated (because the
4075 same insn does something useful). Indicate this
4076 by marking the reg being set as dying here. */
4077 REG_NOTES (insn)
4078 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4081 else
4083 if (flags & PROP_DEATH_NOTES)
4085 /* This is a case where we have a multi-word hard register
4086 and some, but not all, of the words of the register are
4087 needed in subsequent insns. Write REG_UNUSED notes
4088 for those parts that were not needed. This case should
4089 be rare. */
4091 int i;
4093 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4094 i >= 0; i--)
4095 if (!REGNO_REG_SET_P (needed, regno + i))
4096 REG_NOTES (insn)
4097 = (alloc_EXPR_LIST
4098 (REG_UNUSED,
4099 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4100 REG_NOTES (insn)));
4105 else if (GET_CODE (reg) == REG)
4107 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4108 reg_next_use[regno] = 0;
4111 /* If this is the last pass and this is a SCRATCH, show it will be dying
4112 here and count it. */
4113 else if (GET_CODE (reg) == SCRATCH)
4115 if (flags & PROP_DEATH_NOTES)
4116 REG_NOTES (insn)
4117 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4121 #ifdef AUTO_INC_DEC
4123 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4124 reference. */
4126 static void
4127 find_auto_inc (needed, x, insn)
4128 regset needed;
4129 rtx x;
4130 rtx insn;
4132 rtx addr = XEXP (x, 0);
4133 HOST_WIDE_INT offset = 0;
4134 rtx set;
4136 /* Here we detect use of an index register which might be good for
4137 postincrement, postdecrement, preincrement, or predecrement. */
4139 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4140 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4142 if (GET_CODE (addr) == REG)
4144 register rtx y;
4145 register int size = GET_MODE_SIZE (GET_MODE (x));
4146 rtx use;
4147 rtx incr;
4148 int regno = REGNO (addr);
4150 /* Is the next use an increment that might make auto-increment? */
4151 if ((incr = reg_next_use[regno]) != 0
4152 && (set = single_set (incr)) != 0
4153 && GET_CODE (set) == SET
4154 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4155 /* Can't add side effects to jumps; if reg is spilled and
4156 reloaded, there's no way to store back the altered value. */
4157 && GET_CODE (insn) != JUMP_INSN
4158 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4159 && XEXP (y, 0) == addr
4160 && GET_CODE (XEXP (y, 1)) == CONST_INT
4161 && ((HAVE_POST_INCREMENT
4162 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4163 || (HAVE_POST_DECREMENT
4164 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4165 || (HAVE_PRE_INCREMENT
4166 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4167 || (HAVE_PRE_DECREMENT
4168 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4169 /* Make sure this reg appears only once in this insn. */
4170 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4171 use != 0 && use != (rtx) 1))
4173 rtx q = SET_DEST (set);
4174 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4175 ? (offset ? PRE_INC : POST_INC)
4176 : (offset ? PRE_DEC : POST_DEC));
4178 if (dead_or_set_p (incr, addr))
4180 /* This is the simple case. Try to make the auto-inc. If
4181 we can't, we are done. Otherwise, we will do any
4182 needed updates below. */
4183 if (! validate_change (insn, &XEXP (x, 0),
4184 gen_rtx_fmt_e (inc_code, Pmode, addr),
4186 return;
4188 else if (GET_CODE (q) == REG
4189 /* PREV_INSN used here to check the semi-open interval
4190 [insn,incr). */
4191 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4192 /* We must also check for sets of q as q may be
4193 a call clobbered hard register and there may
4194 be a call between PREV_INSN (insn) and incr. */
4195 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4197 /* We have *p followed sometime later by q = p+size.
4198 Both p and q must be live afterward,
4199 and q is not used between INSN and its assignment.
4200 Change it to q = p, ...*q..., q = q+size.
4201 Then fall into the usual case. */
4202 rtx insns, temp;
4203 basic_block bb;
4205 start_sequence ();
4206 emit_move_insn (q, addr);
4207 insns = get_insns ();
4208 end_sequence ();
4210 bb = BLOCK_FOR_INSN (insn);
4211 for (temp = insns; temp; temp = NEXT_INSN (temp))
4212 set_block_for_insn (temp, bb);
4214 /* If we can't make the auto-inc, or can't make the
4215 replacement into Y, exit. There's no point in making
4216 the change below if we can't do the auto-inc and doing
4217 so is not correct in the pre-inc case. */
4219 validate_change (insn, &XEXP (x, 0),
4220 gen_rtx_fmt_e (inc_code, Pmode, q),
4222 validate_change (incr, &XEXP (y, 0), q, 1);
4223 if (! apply_change_group ())
4224 return;
4226 /* We now know we'll be doing this change, so emit the
4227 new insn(s) and do the updates. */
4228 emit_insns_before (insns, insn);
4230 if (BLOCK_FOR_INSN (insn)->head == insn)
4231 BLOCK_FOR_INSN (insn)->head = insns;
4233 /* INCR will become a NOTE and INSN won't contain a
4234 use of ADDR. If a use of ADDR was just placed in
4235 the insn before INSN, make that the next use.
4236 Otherwise, invalidate it. */
4237 if (GET_CODE (PREV_INSN (insn)) == INSN
4238 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4239 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4240 reg_next_use[regno] = PREV_INSN (insn);
4241 else
4242 reg_next_use[regno] = 0;
4244 addr = q;
4245 regno = REGNO (q);
4247 /* REGNO is now used in INCR which is below INSN, but
4248 it previously wasn't live here. If we don't mark
4249 it as needed, we'll put a REG_DEAD note for it
4250 on this insn, which is incorrect. */
4251 SET_REGNO_REG_SET (needed, regno);
4253 /* If there are any calls between INSN and INCR, show
4254 that REGNO now crosses them. */
4255 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4256 if (GET_CODE (temp) == CALL_INSN)
4257 REG_N_CALLS_CROSSED (regno)++;
4259 else
4260 return;
4262 /* If we haven't returned, it means we were able to make the
4263 auto-inc, so update the status. First, record that this insn
4264 has an implicit side effect. */
4266 REG_NOTES (insn)
4267 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4269 /* Modify the old increment-insn to simply copy
4270 the already-incremented value of our register. */
4271 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4272 abort ();
4274 /* If that makes it a no-op (copying the register into itself) delete
4275 it so it won't appear to be a "use" and a "set" of this
4276 register. */
4277 if (SET_DEST (set) == addr)
4279 PUT_CODE (incr, NOTE);
4280 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4281 NOTE_SOURCE_FILE (incr) = 0;
4284 if (regno >= FIRST_PSEUDO_REGISTER)
4286 /* Count an extra reference to the reg. When a reg is
4287 incremented, spilling it is worse, so we want to make
4288 that less likely. */
4289 REG_N_REFS (regno) += loop_depth;
4291 /* Count the increment as a setting of the register,
4292 even though it isn't a SET in rtl. */
4293 REG_N_SETS (regno)++;
4298 #endif /* AUTO_INC_DEC */
4300 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4301 This is done assuming the registers needed from X
4302 are those that have 1-bits in NEEDED.
4304 FLAGS is the set of enabled operations.
4306 INSN is the containing instruction. If INSN is dead, this function is not
4307 called. */
4309 static void
4310 mark_used_regs (needed, live, x, flags, insn)
4311 regset needed;
4312 regset live;
4313 rtx x;
4314 int flags;
4315 rtx insn;
4317 register RTX_CODE code;
4318 register int regno;
4319 int i;
4321 retry:
4322 code = GET_CODE (x);
4323 switch (code)
4325 case LABEL_REF:
4326 case SYMBOL_REF:
4327 case CONST_INT:
4328 case CONST:
4329 case CONST_DOUBLE:
4330 case PC:
4331 case ADDR_VEC:
4332 case ADDR_DIFF_VEC:
4333 return;
4335 #ifdef HAVE_cc0
4336 case CC0:
4337 cc0_live = 1;
4338 return;
4339 #endif
4341 case CLOBBER:
4342 /* If we are clobbering a MEM, mark any registers inside the address
4343 as being used. */
4344 if (GET_CODE (XEXP (x, 0)) == MEM)
4345 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4346 return;
4348 case MEM:
4349 /* Don't bother watching stores to mems if this is not the
4350 final pass. We'll not be deleting dead stores this round. */
4351 if (flags & PROP_SCAN_DEAD_CODE)
4353 /* Invalidate the data for the last MEM stored, but only if MEM is
4354 something that can be stored into. */
4355 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4356 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4357 ; /* needn't clear the memory set list */
4358 else
4360 rtx temp = mem_set_list;
4361 rtx prev = NULL_RTX;
4362 rtx next;
4364 while (temp)
4366 next = XEXP (temp, 1);
4367 if (anti_dependence (XEXP (temp, 0), x))
4369 /* Splice temp out of the list. */
4370 if (prev)
4371 XEXP (prev, 1) = next;
4372 else
4373 mem_set_list = next;
4374 free_EXPR_LIST_node (temp);
4376 else
4377 prev = temp;
4378 temp = next;
4382 /* If the memory reference had embedded side effects (autoincrement
4383 address modes. Then we may need to kill some entries on the
4384 memory set list. */
4385 if (insn)
4386 invalidate_mems_from_autoinc (insn);
4389 #ifdef AUTO_INC_DEC
4390 if (flags & PROP_AUTOINC)
4391 find_auto_inc (needed, x, insn);
4392 #endif
4393 break;
4395 case SUBREG:
4396 if (GET_CODE (SUBREG_REG (x)) == REG
4397 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4398 && (GET_MODE_SIZE (GET_MODE (x))
4399 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4400 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4402 /* While we're here, optimize this case. */
4403 x = SUBREG_REG (x);
4405 /* In case the SUBREG is not of a register, don't optimize */
4406 if (GET_CODE (x) != REG)
4408 mark_used_regs (needed, live, x, flags, insn);
4409 return;
4412 /* ... fall through ... */
4414 case REG:
4415 /* See a register other than being set
4416 => mark it as needed. */
4418 regno = REGNO (x);
4420 int some_needed = REGNO_REG_SET_P (needed, regno);
4421 int some_not_needed = ! some_needed;
4423 SET_REGNO_REG_SET (live, regno);
4425 /* A hard reg in a wide mode may really be multiple registers.
4426 If so, mark all of them just like the first. */
4427 if (regno < FIRST_PSEUDO_REGISTER)
4429 int n;
4431 /* For stack ptr or fixed arg pointer,
4432 nothing below can be necessary, so waste no more time. */
4433 if (regno == STACK_POINTER_REGNUM
4434 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4435 || (regno == HARD_FRAME_POINTER_REGNUM
4436 && (! reload_completed || frame_pointer_needed))
4437 #endif
4438 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4439 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4440 #endif
4441 || (regno == FRAME_POINTER_REGNUM
4442 && (! reload_completed || frame_pointer_needed)))
4444 /* If this is a register we are going to try to eliminate,
4445 don't mark it live here. If we are successful in
4446 eliminating it, it need not be live unless it is used for
4447 pseudos, in which case it will have been set live when
4448 it was allocated to the pseudos. If the register will not
4449 be eliminated, reload will set it live at that point. */
4451 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4452 regs_ever_live[regno] = 1;
4453 return;
4455 /* No death notes for global register variables;
4456 their values are live after this function exits. */
4457 if (global_regs[regno])
4459 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4460 reg_next_use[regno] = insn;
4461 return;
4464 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4465 while (--n > 0)
4467 int regno_n = regno + n;
4468 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4470 SET_REGNO_REG_SET (live, regno_n);
4471 some_needed |= needed_regno;
4472 some_not_needed |= ! needed_regno;
4476 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4478 /* Record where each reg is used, so when the reg
4479 is set we know the next insn that uses it. */
4481 reg_next_use[regno] = insn;
4483 if (flags & PROP_REG_INFO)
4485 if (regno < FIRST_PSEUDO_REGISTER)
4487 /* If a hard reg is being used,
4488 record that this function does use it. */
4490 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4491 if (i == 0)
4492 i = 1;
4494 regs_ever_live[regno + --i] = 1;
4495 while (i > 0);
4497 else
4499 /* Keep track of which basic block each reg appears in. */
4501 register int blocknum = BLOCK_NUM (insn);
4503 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4504 REG_BASIC_BLOCK (regno) = blocknum;
4505 else if (REG_BASIC_BLOCK (regno) != blocknum)
4506 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4508 /* Count (weighted) number of uses of each reg. */
4510 REG_N_REFS (regno) += loop_depth;
4514 /* Record and count the insns in which a reg dies.
4515 If it is used in this insn and was dead below the insn
4516 then it dies in this insn. If it was set in this insn,
4517 we do not make a REG_DEAD note; likewise if we already
4518 made such a note. */
4520 if (flags & PROP_DEATH_NOTES)
4522 if (some_not_needed
4523 && ! dead_or_set_p (insn, x)
4524 #if 0
4525 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4526 #endif
4529 /* Check for the case where the register dying partially
4530 overlaps the register set by this insn. */
4531 if (regno < FIRST_PSEUDO_REGISTER
4532 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4534 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4535 while (--n >= 0)
4536 some_needed |= dead_or_set_regno_p (insn, regno + n);
4539 /* If none of the words in X is needed, make a REG_DEAD
4540 note. Otherwise, we must make partial REG_DEAD notes. */
4541 if (! some_needed)
4543 REG_NOTES (insn)
4544 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4545 REG_N_DEATHS (regno)++;
4547 else
4549 int i;
4551 /* Don't make a REG_DEAD note for a part of a register
4552 that is set in the insn. */
4554 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4555 i >= 0; i--)
4556 if (!REGNO_REG_SET_P (needed, regno + i)
4557 && ! dead_or_set_regno_p (insn, regno + i))
4558 REG_NOTES (insn)
4559 = (alloc_EXPR_LIST
4560 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4561 regno + i),
4562 REG_NOTES (insn)));
4567 return;
4569 case SET:
4571 register rtx testreg = SET_DEST (x);
4572 int mark_dest = 0;
4574 /* If storing into MEM, don't show it as being used. But do
4575 show the address as being used. */
4576 if (GET_CODE (testreg) == MEM)
4578 #ifdef AUTO_INC_DEC
4579 if (flags & PROP_AUTOINC)
4580 find_auto_inc (needed, testreg, insn);
4581 #endif
4582 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4583 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4584 return;
4587 /* Storing in STRICT_LOW_PART is like storing in a reg
4588 in that this SET might be dead, so ignore it in TESTREG.
4589 but in some other ways it is like using the reg.
4591 Storing in a SUBREG or a bit field is like storing the entire
4592 register in that if the register's value is not used
4593 then this SET is not needed. */
4594 while (GET_CODE (testreg) == STRICT_LOW_PART
4595 || GET_CODE (testreg) == ZERO_EXTRACT
4596 || GET_CODE (testreg) == SIGN_EXTRACT
4597 || GET_CODE (testreg) == SUBREG)
4599 if (GET_CODE (testreg) == SUBREG
4600 && GET_CODE (SUBREG_REG (testreg)) == REG
4601 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4602 && (GET_MODE_SIZE (GET_MODE (testreg))
4603 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4604 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4606 /* Modifying a single register in an alternate mode
4607 does not use any of the old value. But these other
4608 ways of storing in a register do use the old value. */
4609 if (GET_CODE (testreg) == SUBREG
4610 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4612 else
4613 mark_dest = 1;
4615 testreg = XEXP (testreg, 0);
4618 /* If this is a store into a register,
4619 recursively scan the value being stored. */
4621 if ((GET_CODE (testreg) == PARALLEL
4622 && GET_MODE (testreg) == BLKmode)
4623 || (GET_CODE (testreg) == REG
4624 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4625 && (! reload_completed || frame_pointer_needed)))
4626 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4627 && ! (regno == HARD_FRAME_POINTER_REGNUM
4628 && (! reload_completed || frame_pointer_needed))
4629 #endif
4630 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4631 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4632 #endif
4634 /* We used to exclude global_regs here, but that seems wrong.
4635 Storing in them is like storing in mem. */
4637 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4638 if (mark_dest)
4639 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4640 return;
4643 break;
4645 case RETURN:
4646 /* ??? This info should have been gotten from mark_regs_live_at_end,
4647 as applied to the EXIT block, and propagated along the edge that
4648 connects this block to the EXIT. */
4649 break;
4651 case ASM_OPERANDS:
4652 case UNSPEC_VOLATILE:
4653 case TRAP_IF:
4654 case ASM_INPUT:
4656 /* Traditional and volatile asm instructions must be considered to use
4657 and clobber all hard registers, all pseudo-registers and all of
4658 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4660 Consider for instance a volatile asm that changes the fpu rounding
4661 mode. An insn should not be moved across this even if it only uses
4662 pseudo-regs because it might give an incorrectly rounded result.
4664 ?!? Unfortunately, marking all hard registers as live causes massive
4665 problems for the register allocator and marking all pseudos as live
4666 creates mountains of uninitialized variable warnings.
4668 So for now, just clear the memory set list and mark any regs
4669 we can find in ASM_OPERANDS as used. */
4670 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4671 free_EXPR_LIST_list (&mem_set_list);
4673 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4674 We can not just fall through here since then we would be confused
4675 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4676 traditional asms unlike their normal usage. */
4677 if (code == ASM_OPERANDS)
4679 int j;
4681 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4682 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4683 flags, insn);
4685 break;
4689 default:
4690 break;
4693 /* Recursively scan the operands of this expression. */
4696 register const char *fmt = GET_RTX_FORMAT (code);
4697 register int i;
4699 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4701 if (fmt[i] == 'e')
4703 /* Tail recursive case: save a function call level. */
4704 if (i == 0)
4706 x = XEXP (x, 0);
4707 goto retry;
4709 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4711 else if (fmt[i] == 'E')
4713 register int j;
4714 for (j = 0; j < XVECLEN (x, i); j++)
4715 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4721 #ifdef AUTO_INC_DEC
4723 static int
4724 try_pre_increment_1 (insn)
4725 rtx insn;
4727 /* Find the next use of this reg. If in same basic block,
4728 make it do pre-increment or pre-decrement if appropriate. */
4729 rtx x = single_set (insn);
4730 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4731 * INTVAL (XEXP (SET_SRC (x), 1)));
4732 int regno = REGNO (SET_DEST (x));
4733 rtx y = reg_next_use[regno];
4734 if (y != 0
4735 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4736 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4737 mode would be better. */
4738 && ! dead_or_set_p (y, SET_DEST (x))
4739 && try_pre_increment (y, SET_DEST (x), amount))
4741 /* We have found a suitable auto-increment
4742 and already changed insn Y to do it.
4743 So flush this increment-instruction. */
4744 PUT_CODE (insn, NOTE);
4745 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4746 NOTE_SOURCE_FILE (insn) = 0;
4747 /* Count a reference to this reg for the increment
4748 insn we are deleting. When a reg is incremented.
4749 spilling it is worse, so we want to make that
4750 less likely. */
4751 if (regno >= FIRST_PSEUDO_REGISTER)
4753 REG_N_REFS (regno) += loop_depth;
4754 REG_N_SETS (regno)++;
4756 return 1;
4758 return 0;
4761 /* Try to change INSN so that it does pre-increment or pre-decrement
4762 addressing on register REG in order to add AMOUNT to REG.
4763 AMOUNT is negative for pre-decrement.
4764 Returns 1 if the change could be made.
4765 This checks all about the validity of the result of modifying INSN. */
4767 static int
4768 try_pre_increment (insn, reg, amount)
4769 rtx insn, reg;
4770 HOST_WIDE_INT amount;
4772 register rtx use;
4774 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4775 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4776 int pre_ok = 0;
4777 /* Nonzero if we can try to make a post-increment or post-decrement.
4778 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4779 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4780 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4781 int post_ok = 0;
4783 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4784 int do_post = 0;
4786 /* From the sign of increment, see which possibilities are conceivable
4787 on this target machine. */
4788 if (HAVE_PRE_INCREMENT && amount > 0)
4789 pre_ok = 1;
4790 if (HAVE_POST_INCREMENT && amount > 0)
4791 post_ok = 1;
4793 if (HAVE_PRE_DECREMENT && amount < 0)
4794 pre_ok = 1;
4795 if (HAVE_POST_DECREMENT && amount < 0)
4796 post_ok = 1;
4798 if (! (pre_ok || post_ok))
4799 return 0;
4801 /* It is not safe to add a side effect to a jump insn
4802 because if the incremented register is spilled and must be reloaded
4803 there would be no way to store the incremented value back in memory. */
4805 if (GET_CODE (insn) == JUMP_INSN)
4806 return 0;
4808 use = 0;
4809 if (pre_ok)
4810 use = find_use_as_address (PATTERN (insn), reg, 0);
4811 if (post_ok && (use == 0 || use == (rtx) 1))
4813 use = find_use_as_address (PATTERN (insn), reg, -amount);
4814 do_post = 1;
4817 if (use == 0 || use == (rtx) 1)
4818 return 0;
4820 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4821 return 0;
4823 /* See if this combination of instruction and addressing mode exists. */
4824 if (! validate_change (insn, &XEXP (use, 0),
4825 gen_rtx_fmt_e (amount > 0
4826 ? (do_post ? POST_INC : PRE_INC)
4827 : (do_post ? POST_DEC : PRE_DEC),
4828 Pmode, reg), 0))
4829 return 0;
4831 /* Record that this insn now has an implicit side effect on X. */
4832 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4833 return 1;
4836 #endif /* AUTO_INC_DEC */
4838 /* Find the place in the rtx X where REG is used as a memory address.
4839 Return the MEM rtx that so uses it.
4840 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4841 (plus REG (const_int PLUSCONST)).
4843 If such an address does not appear, return 0.
4844 If REG appears more than once, or is used other than in such an address,
4845 return (rtx)1. */
4848 find_use_as_address (x, reg, plusconst)
4849 register rtx x;
4850 rtx reg;
4851 HOST_WIDE_INT plusconst;
4853 enum rtx_code code = GET_CODE (x);
4854 const char *fmt = GET_RTX_FORMAT (code);
4855 register int i;
4856 register rtx value = 0;
4857 register rtx tem;
4859 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4860 return x;
4862 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4863 && XEXP (XEXP (x, 0), 0) == reg
4864 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4865 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4866 return x;
4868 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4870 /* If REG occurs inside a MEM used in a bit-field reference,
4871 that is unacceptable. */
4872 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4873 return (rtx) (HOST_WIDE_INT) 1;
4876 if (x == reg)
4877 return (rtx) (HOST_WIDE_INT) 1;
4879 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4881 if (fmt[i] == 'e')
4883 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4884 if (value == 0)
4885 value = tem;
4886 else if (tem != 0)
4887 return (rtx) (HOST_WIDE_INT) 1;
4889 if (fmt[i] == 'E')
4891 register int j;
4892 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4894 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4895 if (value == 0)
4896 value = tem;
4897 else if (tem != 0)
4898 return (rtx) (HOST_WIDE_INT) 1;
4903 return value;
4906 /* Write information about registers and basic blocks into FILE.
4907 This is part of making a debugging dump. */
4909 void
4910 dump_flow_info (file)
4911 FILE *file;
4913 register int i;
4914 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4916 fprintf (file, "%d registers.\n", max_regno);
4917 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4918 if (REG_N_REFS (i))
4920 enum reg_class class, altclass;
4921 fprintf (file, "\nRegister %d used %d times across %d insns",
4922 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4923 if (REG_BASIC_BLOCK (i) >= 0)
4924 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4925 if (REG_N_SETS (i))
4926 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4927 (REG_N_SETS (i) == 1) ? "" : "s");
4928 if (REG_USERVAR_P (regno_reg_rtx[i]))
4929 fprintf (file, "; user var");
4930 if (REG_N_DEATHS (i) != 1)
4931 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4932 if (REG_N_CALLS_CROSSED (i) == 1)
4933 fprintf (file, "; crosses 1 call");
4934 else if (REG_N_CALLS_CROSSED (i))
4935 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4936 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4937 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4938 class = reg_preferred_class (i);
4939 altclass = reg_alternate_class (i);
4940 if (class != GENERAL_REGS || altclass != ALL_REGS)
4942 if (altclass == ALL_REGS || class == ALL_REGS)
4943 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4944 else if (altclass == NO_REGS)
4945 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4946 else
4947 fprintf (file, "; pref %s, else %s",
4948 reg_class_names[(int) class],
4949 reg_class_names[(int) altclass]);
4951 if (REGNO_POINTER_FLAG (i))
4952 fprintf (file, "; pointer");
4953 fprintf (file, ".\n");
4956 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4957 for (i = 0; i < n_basic_blocks; i++)
4959 register basic_block bb = BASIC_BLOCK (i);
4960 register int regno;
4961 register edge e;
4963 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4964 i, INSN_UID (bb->head), INSN_UID (bb->end));
4966 fprintf (file, "Predecessors: ");
4967 for (e = bb->pred; e ; e = e->pred_next)
4968 dump_edge_info (file, e, 0);
4970 fprintf (file, "\nSuccessors: ");
4971 for (e = bb->succ; e ; e = e->succ_next)
4972 dump_edge_info (file, e, 1);
4974 fprintf (file, "\nRegisters live at start:");
4975 if (bb->global_live_at_start)
4977 for (regno = 0; regno < max_regno; regno++)
4978 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4979 fprintf (file, " %d", regno);
4981 else
4982 fprintf (file, " n/a");
4984 fprintf (file, "\nRegisters live at end:");
4985 if (bb->global_live_at_end)
4987 for (regno = 0; regno < max_regno; regno++)
4988 if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4989 fprintf (file, " %d", regno);
4991 else
4992 fprintf (file, " n/a");
4994 putc('\n', file);
4997 putc('\n', file);
5000 void
5001 debug_flow_info ()
5003 dump_flow_info (stderr);
5006 static void
5007 dump_edge_info (file, e, do_succ)
5008 FILE *file;
5009 edge e;
5010 int do_succ;
5012 basic_block side = (do_succ ? e->dest : e->src);
5014 if (side == ENTRY_BLOCK_PTR)
5015 fputs (" ENTRY", file);
5016 else if (side == EXIT_BLOCK_PTR)
5017 fputs (" EXIT", file);
5018 else
5019 fprintf (file, " %d", side->index);
5021 if (e->flags)
5023 static const char * const bitnames[] = {
5024 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5026 int comma = 0;
5027 int i, flags = e->flags;
5029 fputc (' ', file);
5030 fputc ('(', file);
5031 for (i = 0; flags; i++)
5032 if (flags & (1 << i))
5034 flags &= ~(1 << i);
5036 if (comma)
5037 fputc (',', file);
5038 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5039 fputs (bitnames[i], file);
5040 else
5041 fprintf (file, "%d", i);
5042 comma = 1;
5044 fputc (')', file);
5049 /* Like print_rtl, but also print out live information for the start of each
5050 basic block. */
5052 void
5053 print_rtl_with_bb (outf, rtx_first)
5054 FILE *outf;
5055 rtx rtx_first;
5057 register rtx tmp_rtx;
5059 if (rtx_first == 0)
5060 fprintf (outf, "(nil)\n");
5061 else
5063 int i;
5064 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5065 int max_uid = get_max_uid ();
5066 basic_block *start = (basic_block *)
5067 xcalloc (max_uid, sizeof (basic_block));
5068 basic_block *end = (basic_block *)
5069 xcalloc (max_uid, sizeof (basic_block));
5070 enum bb_state *in_bb_p = (enum bb_state *)
5071 xcalloc (max_uid, sizeof (enum bb_state));
5073 for (i = n_basic_blocks - 1; i >= 0; i--)
5075 basic_block bb = BASIC_BLOCK (i);
5076 rtx x;
5078 start[INSN_UID (bb->head)] = bb;
5079 end[INSN_UID (bb->end)] = bb;
5080 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5082 enum bb_state state = IN_MULTIPLE_BB;
5083 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5084 state = IN_ONE_BB;
5085 in_bb_p[INSN_UID(x)] = state;
5087 if (x == bb->end)
5088 break;
5092 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5094 int did_output;
5095 basic_block bb;
5097 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5099 fprintf (outf, ";; Start of basic block %d, registers live:",
5100 bb->index);
5102 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
5104 fprintf (outf, " %d", i);
5105 if (i < FIRST_PSEUDO_REGISTER)
5106 fprintf (outf, " [%s]",
5107 reg_names[i]);
5109 putc ('\n', outf);
5112 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5113 && GET_CODE (tmp_rtx) != NOTE
5114 && GET_CODE (tmp_rtx) != BARRIER
5115 && ! obey_regdecls)
5116 fprintf (outf, ";; Insn is not within a basic block\n");
5117 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5118 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5120 did_output = print_rtl_single (outf, tmp_rtx);
5122 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5123 fprintf (outf, ";; End of basic block %d\n", bb->index);
5125 if (did_output)
5126 putc ('\n', outf);
5129 free (start);
5130 free (end);
5131 free (in_bb_p);
5134 if (current_function_epilogue_delay_list != 0)
5136 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5137 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5138 tmp_rtx = XEXP (tmp_rtx, 1))
5139 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5144 /* Integer list support. */
5146 /* Allocate a node from list *HEAD_PTR. */
5148 static int_list_ptr
5149 alloc_int_list_node (head_ptr)
5150 int_list_block **head_ptr;
5152 struct int_list_block *first_blk = *head_ptr;
5154 if (first_blk == NULL || first_blk->nodes_left <= 0)
5156 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
5157 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
5158 first_blk->next = *head_ptr;
5159 *head_ptr = first_blk;
5162 first_blk->nodes_left--;
5163 return &first_blk->nodes[first_blk->nodes_left];
5166 /* Pointer to head of predecessor/successor block list. */
5167 static int_list_block *pred_int_list_blocks;
5169 /* Add a new node to integer list LIST with value VAL.
5170 LIST is a pointer to a list object to allow for different implementations.
5171 If *LIST is initially NULL, the list is empty.
5172 The caller must not care whether the element is added to the front or
5173 to the end of the list (to allow for different implementations). */
5175 static int_list_ptr
5176 add_int_list_node (blk_list, list, val)
5177 int_list_block **blk_list;
5178 int_list **list;
5179 int val;
5181 int_list_ptr p = alloc_int_list_node (blk_list);
5183 p->val = val;
5184 p->next = *list;
5185 *list = p;
5186 return p;
5189 /* Free the blocks of lists at BLK_LIST. */
5191 void
5192 free_int_list (blk_list)
5193 int_list_block **blk_list;
5195 int_list_block *p, *next;
5197 for (p = *blk_list; p != NULL; p = next)
5199 next = p->next;
5200 free (p);
5203 /* Mark list as empty for the next function we compile. */
5204 *blk_list = NULL;
5207 /* Predecessor/successor computation. */
5209 /* Mark PRED_BB a precessor of SUCC_BB,
5210 and conversely SUCC_BB a successor of PRED_BB. */
5212 static void
5213 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
5214 int pred_bb;
5215 int succ_bb;
5216 int_list_ptr *s_preds;
5217 int_list_ptr *s_succs;
5218 int *num_preds;
5219 int *num_succs;
5221 if (succ_bb != EXIT_BLOCK)
5223 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
5224 num_preds[succ_bb]++;
5226 if (pred_bb != ENTRY_BLOCK)
5228 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
5229 num_succs[pred_bb]++;
5233 /* Convert edge lists into pred/succ lists for backward compatibility. */
5235 void
5236 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
5237 int_list_ptr *s_preds;
5238 int_list_ptr *s_succs;
5239 int *num_preds;
5240 int *num_succs;
5242 int i, n = n_basic_blocks;
5243 edge e;
5245 memset (s_preds, 0, n_basic_blocks * sizeof (int_list_ptr));
5246 memset (s_succs, 0, n_basic_blocks * sizeof (int_list_ptr));
5247 memset (num_preds, 0, n_basic_blocks * sizeof (int));
5248 memset (num_succs, 0, n_basic_blocks * sizeof (int));
5250 for (i = 0; i < n; ++i)
5252 basic_block bb = BASIC_BLOCK (i);
5254 for (e = bb->succ; e ; e = e->succ_next)
5255 add_pred_succ (i, e->dest->index, s_preds, s_succs,
5256 num_preds, num_succs);
5259 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
5260 add_pred_succ (ENTRY_BLOCK, e->dest->index, s_preds, s_succs,
5261 num_preds, num_succs);
5264 void
5265 dump_bb_data (file, preds, succs, live_info)
5266 FILE *file;
5267 int_list_ptr *preds;
5268 int_list_ptr *succs;
5269 int live_info;
5271 int bb;
5272 int_list_ptr p;
5274 fprintf (file, "BB data\n\n");
5275 for (bb = 0; bb < n_basic_blocks; bb++)
5277 fprintf (file, "BB %d, start %d, end %d\n", bb,
5278 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
5279 fprintf (file, " preds:");
5280 for (p = preds[bb]; p != NULL; p = p->next)
5282 int pred_bb = INT_LIST_VAL (p);
5283 if (pred_bb == ENTRY_BLOCK)
5284 fprintf (file, " entry");
5285 else
5286 fprintf (file, " %d", pred_bb);
5288 fprintf (file, "\n");
5289 fprintf (file, " succs:");
5290 for (p = succs[bb]; p != NULL; p = p->next)
5292 int succ_bb = INT_LIST_VAL (p);
5293 if (succ_bb == EXIT_BLOCK)
5294 fprintf (file, " exit");
5295 else
5296 fprintf (file, " %d", succ_bb);
5298 if (live_info)
5300 int regno;
5301 fprintf (file, "\nRegisters live at start:");
5302 for (regno = 0; regno < max_regno; regno++)
5303 if (REGNO_REG_SET_P (BASIC_BLOCK (bb)->global_live_at_start, regno))
5304 fprintf (file, " %d", regno);
5305 fprintf (file, "\n");
5307 fprintf (file, "\n");
5309 fprintf (file, "\n");
5312 /* Free basic block data storage. */
5314 void
5315 free_bb_mem ()
5317 free_int_list (&pred_int_list_blocks);
5320 /* Compute dominator relationships using new flow graph structures. */
5321 void
5322 compute_flow_dominators (dominators, post_dominators)
5323 sbitmap *dominators;
5324 sbitmap *post_dominators;
5326 int bb;
5327 sbitmap *temp_bitmap;
5328 edge e;
5329 basic_block *worklist, *tos;
5331 /* Allocate a worklist array/queue. Entries are only added to the
5332 list if they were not already on the list. So the size is
5333 bounded by the number of basic blocks. */
5334 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5335 * n_basic_blocks);
5337 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5338 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5340 if (dominators)
5342 /* The optimistic setting of dominators requires us to put every
5343 block on the work list initially. */
5344 for (bb = 0; bb < n_basic_blocks; bb++)
5346 *tos++ = BASIC_BLOCK (bb);
5347 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5350 /* We want a maximal solution, so initially assume everything dominates
5351 everything else. */
5352 sbitmap_vector_ones (dominators, n_basic_blocks);
5354 /* Mark successors of the entry block so we can identify them below. */
5355 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5356 e->dest->aux = ENTRY_BLOCK_PTR;
5358 /* Iterate until the worklist is empty. */
5359 while (tos != worklist)
5361 /* Take the first entry off the worklist. */
5362 basic_block b = *--tos;
5363 bb = b->index;
5365 /* Compute the intersection of the dominators of all the
5366 predecessor blocks.
5368 If one of the predecessor blocks is the ENTRY block, then the
5369 intersection of the dominators of the predecessor blocks is
5370 defined as the null set. We can identify such blocks by the
5371 special value in the AUX field in the block structure. */
5372 if (b->aux == ENTRY_BLOCK_PTR)
5374 /* Do not clear the aux field for blocks which are
5375 successors of the ENTRY block. That way we we never
5376 add them to the worklist again.
5378 The intersect of dominators of the preds of this block is
5379 defined as the null set. */
5380 sbitmap_zero (temp_bitmap[bb]);
5382 else
5384 /* Clear the aux field of this block so it can be added to
5385 the worklist again if necessary. */
5386 b->aux = NULL;
5387 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5390 /* Make sure each block always dominates itself. */
5391 SET_BIT (temp_bitmap[bb], bb);
5393 /* If the out state of this block changed, then we need to
5394 add the successors of this block to the worklist if they
5395 are not already on the worklist. */
5396 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5398 for (e = b->succ; e; e = e->succ_next)
5400 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5402 *tos++ = e->dest;
5403 e->dest->aux = e;
5410 if (post_dominators)
5412 /* The optimistic setting of dominators requires us to put every
5413 block on the work list initially. */
5414 for (bb = 0; bb < n_basic_blocks; bb++)
5416 *tos++ = BASIC_BLOCK (bb);
5417 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5420 /* We want a maximal solution, so initially assume everything post
5421 dominates everything else. */
5422 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5424 /* Mark predecessors of the exit block so we can identify them below. */
5425 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5426 e->src->aux = EXIT_BLOCK_PTR;
5428 /* Iterate until the worklist is empty. */
5429 while (tos != worklist)
5431 /* Take the first entry off the worklist. */
5432 basic_block b = *--tos;
5433 bb = b->index;
5435 /* Compute the intersection of the post dominators of all the
5436 successor blocks.
5438 If one of the successor blocks is the EXIT block, then the
5439 intersection of the dominators of the successor blocks is
5440 defined as the null set. We can identify such blocks by the
5441 special value in the AUX field in the block structure. */
5442 if (b->aux == EXIT_BLOCK_PTR)
5444 /* Do not clear the aux field for blocks which are
5445 predecessors of the EXIT block. That way we we never
5446 add them to the worklist again.
5448 The intersect of dominators of the succs of this block is
5449 defined as the null set. */
5450 sbitmap_zero (temp_bitmap[bb]);
5452 else
5454 /* Clear the aux field of this block so it can be added to
5455 the worklist again if necessary. */
5456 b->aux = NULL;
5457 sbitmap_intersection_of_succs (temp_bitmap[bb],
5458 post_dominators, bb);
5461 /* Make sure each block always post dominates itself. */
5462 SET_BIT (temp_bitmap[bb], bb);
5464 /* If the out state of this block changed, then we need to
5465 add the successors of this block to the worklist if they
5466 are not already on the worklist. */
5467 if (sbitmap_a_and_b (post_dominators[bb],
5468 post_dominators[bb],
5469 temp_bitmap[bb]))
5471 for (e = b->pred; e; e = e->pred_next)
5473 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5475 *tos++ = e->src;
5476 e->src->aux = e;
5482 free (temp_bitmap);
5485 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5487 void
5488 compute_immediate_dominators (idom, dominators)
5489 int *idom;
5490 sbitmap *dominators;
5492 sbitmap *tmp;
5493 int b;
5495 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5497 /* Begin with tmp(n) = dom(n) - { n }. */
5498 for (b = n_basic_blocks; --b >= 0; )
5500 sbitmap_copy (tmp[b], dominators[b]);
5501 RESET_BIT (tmp[b], b);
5504 /* Subtract out all of our dominator's dominators. */
5505 for (b = n_basic_blocks; --b >= 0; )
5507 sbitmap tmp_b = tmp[b];
5508 int s;
5510 for (s = n_basic_blocks; --s >= 0; )
5511 if (TEST_BIT (tmp_b, s))
5512 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5515 /* Find the one bit set in the bitmap and put it in the output array. */
5516 for (b = n_basic_blocks; --b >= 0; )
5518 int t;
5519 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5522 sbitmap_vector_free (tmp);
5525 /* Count for a single SET rtx, X. */
5527 static void
5528 count_reg_sets_1 (x)
5529 rtx x;
5531 register int regno;
5532 register rtx reg = SET_DEST (x);
5534 /* Find the register that's set/clobbered. */
5535 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5536 || GET_CODE (reg) == SIGN_EXTRACT
5537 || GET_CODE (reg) == STRICT_LOW_PART)
5538 reg = XEXP (reg, 0);
5540 if (GET_CODE (reg) == PARALLEL
5541 && GET_MODE (reg) == BLKmode)
5543 register int i;
5544 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5545 count_reg_sets_1 (XVECEXP (reg, 0, i));
5546 return;
5549 if (GET_CODE (reg) == REG)
5551 regno = REGNO (reg);
5552 if (regno >= FIRST_PSEUDO_REGISTER)
5554 /* Count (weighted) references, stores, etc. This counts a
5555 register twice if it is modified, but that is correct. */
5556 REG_N_SETS (regno)++;
5558 REG_N_REFS (regno) += loop_depth;
5563 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5564 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5566 static void
5567 count_reg_sets (x)
5568 rtx x;
5570 register RTX_CODE code = GET_CODE (x);
5572 if (code == SET || code == CLOBBER)
5573 count_reg_sets_1 (x);
5574 else if (code == PARALLEL)
5576 register int i;
5577 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5579 code = GET_CODE (XVECEXP (x, 0, i));
5580 if (code == SET || code == CLOBBER)
5581 count_reg_sets_1 (XVECEXP (x, 0, i));
5586 /* Increment REG_N_REFS by the current loop depth each register reference
5587 found in X. */
5589 static void
5590 count_reg_references (x)
5591 rtx x;
5593 register RTX_CODE code;
5595 retry:
5596 code = GET_CODE (x);
5597 switch (code)
5599 case LABEL_REF:
5600 case SYMBOL_REF:
5601 case CONST_INT:
5602 case CONST:
5603 case CONST_DOUBLE:
5604 case PC:
5605 case ADDR_VEC:
5606 case ADDR_DIFF_VEC:
5607 case ASM_INPUT:
5608 return;
5610 #ifdef HAVE_cc0
5611 case CC0:
5612 return;
5613 #endif
5615 case CLOBBER:
5616 /* If we are clobbering a MEM, mark any registers inside the address
5617 as being used. */
5618 if (GET_CODE (XEXP (x, 0)) == MEM)
5619 count_reg_references (XEXP (XEXP (x, 0), 0));
5620 return;
5622 case SUBREG:
5623 /* While we're here, optimize this case. */
5624 x = SUBREG_REG (x);
5626 /* In case the SUBREG is not of a register, don't optimize */
5627 if (GET_CODE (x) != REG)
5629 count_reg_references (x);
5630 return;
5633 /* ... fall through ... */
5635 case REG:
5636 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5637 REG_N_REFS (REGNO (x)) += loop_depth;
5638 return;
5640 case SET:
5642 register rtx testreg = SET_DEST (x);
5643 int mark_dest = 0;
5645 /* If storing into MEM, don't show it as being used. But do
5646 show the address as being used. */
5647 if (GET_CODE (testreg) == MEM)
5649 count_reg_references (XEXP (testreg, 0));
5650 count_reg_references (SET_SRC (x));
5651 return;
5654 /* Storing in STRICT_LOW_PART is like storing in a reg
5655 in that this SET might be dead, so ignore it in TESTREG.
5656 but in some other ways it is like using the reg.
5658 Storing in a SUBREG or a bit field is like storing the entire
5659 register in that if the register's value is not used
5660 then this SET is not needed. */
5661 while (GET_CODE (testreg) == STRICT_LOW_PART
5662 || GET_CODE (testreg) == ZERO_EXTRACT
5663 || GET_CODE (testreg) == SIGN_EXTRACT
5664 || GET_CODE (testreg) == SUBREG)
5666 /* Modifying a single register in an alternate mode
5667 does not use any of the old value. But these other
5668 ways of storing in a register do use the old value. */
5669 if (GET_CODE (testreg) == SUBREG
5670 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5672 else
5673 mark_dest = 1;
5675 testreg = XEXP (testreg, 0);
5678 /* If this is a store into a register,
5679 recursively scan the value being stored. */
5681 if ((GET_CODE (testreg) == PARALLEL
5682 && GET_MODE (testreg) == BLKmode)
5683 || GET_CODE (testreg) == REG)
5685 count_reg_references (SET_SRC (x));
5686 if (mark_dest)
5687 count_reg_references (SET_DEST (x));
5688 return;
5691 break;
5693 default:
5694 break;
5697 /* Recursively scan the operands of this expression. */
5700 register const char *fmt = GET_RTX_FORMAT (code);
5701 register int i;
5703 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5705 if (fmt[i] == 'e')
5707 /* Tail recursive case: save a function call level. */
5708 if (i == 0)
5710 x = XEXP (x, 0);
5711 goto retry;
5713 count_reg_references (XEXP (x, i));
5715 else if (fmt[i] == 'E')
5717 register int j;
5718 for (j = 0; j < XVECLEN (x, i); j++)
5719 count_reg_references (XVECEXP (x, i, j));
5725 /* Recompute register set/reference counts immediately prior to register
5726 allocation.
5728 This avoids problems with set/reference counts changing to/from values
5729 which have special meanings to the register allocators.
5731 Additionally, the reference counts are the primary component used by the
5732 register allocators to prioritize pseudos for allocation to hard regs.
5733 More accurate reference counts generally lead to better register allocation.
5735 F is the first insn to be scanned.
5736 LOOP_STEP denotes how much loop_depth should be incremented per
5737 loop nesting level in order to increase the ref count more for references
5738 in a loop.
5740 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5741 possibly other information which is used by the register allocators. */
5743 void
5744 recompute_reg_usage (f, loop_step)
5745 rtx f;
5746 int loop_step;
5748 rtx insn;
5749 int i, max_reg;
5751 /* Clear out the old data. */
5752 max_reg = max_reg_num ();
5753 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5755 REG_N_SETS (i) = 0;
5756 REG_N_REFS (i) = 0;
5759 /* Scan each insn in the chain and count how many times each register is
5760 set/used. */
5761 loop_depth = 1;
5762 for (insn = f; insn; insn = NEXT_INSN (insn))
5764 /* Keep track of loop depth. */
5765 if (GET_CODE (insn) == NOTE)
5767 /* Look for loop boundaries. */
5768 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
5769 loop_depth -= loop_step;
5770 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
5771 loop_depth += loop_step;
5773 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
5774 Abort now rather than setting register status incorrectly. */
5775 if (loop_depth == 0)
5776 abort ();
5778 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5780 rtx links;
5782 /* This call will increment REG_N_SETS for each SET or CLOBBER
5783 of a register in INSN. It will also increment REG_N_REFS
5784 by the loop depth for each set of a register in INSN. */
5785 count_reg_sets (PATTERN (insn));
5787 /* count_reg_sets does not detect autoincrement address modes, so
5788 detect them here by looking at the notes attached to INSN. */
5789 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5791 if (REG_NOTE_KIND (links) == REG_INC)
5792 /* Count (weighted) references, stores, etc. This counts a
5793 register twice if it is modified, but that is correct. */
5794 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5797 /* This call will increment REG_N_REFS by the current loop depth for
5798 each reference to a register in INSN. */
5799 count_reg_references (PATTERN (insn));
5801 /* count_reg_references will not include counts for arguments to
5802 function calls, so detect them here by examining the
5803 CALL_INSN_FUNCTION_USAGE data. */
5804 if (GET_CODE (insn) == CALL_INSN)
5806 rtx note;
5808 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5809 note;
5810 note = XEXP (note, 1))
5811 if (GET_CODE (XEXP (note, 0)) == USE)
5812 count_reg_references (XEXP (XEXP (note, 0), 0));
5818 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5819 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5820 of the number of registers that died. */
5823 count_or_remove_death_notes (blocks, kill)
5824 sbitmap blocks;
5825 int kill;
5827 int i, count = 0;
5829 for (i = n_basic_blocks - 1; i >= 0; --i)
5831 basic_block bb;
5832 rtx insn;
5834 if (blocks && ! TEST_BIT (blocks, i))
5835 continue;
5837 bb = BASIC_BLOCK (i);
5839 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5841 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5843 rtx *pprev = &REG_NOTES (insn);
5844 rtx link = *pprev;
5846 while (link)
5848 switch (REG_NOTE_KIND (link))
5850 case REG_DEAD:
5851 if (GET_CODE (XEXP (link, 0)) == REG)
5853 rtx reg = XEXP (link, 0);
5854 int n;
5856 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5857 n = 1;
5858 else
5859 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5860 count += n;
5862 /* FALLTHRU */
5864 case REG_UNUSED:
5865 if (kill)
5867 rtx next = XEXP (link, 1);
5868 free_EXPR_LIST_node (link);
5869 *pprev = link = next;
5870 break;
5872 /* FALLTHRU */
5874 default:
5875 pprev = &XEXP (link, 1);
5876 link = *pprev;
5877 break;
5882 if (insn == bb->end)
5883 break;
5887 return count;
5890 /* Record INSN's block as BB. */
5892 void
5893 set_block_for_insn (insn, bb)
5894 rtx insn;
5895 basic_block bb;
5897 size_t uid = INSN_UID (insn);
5898 if (uid >= basic_block_for_insn->num_elements)
5900 int new_size;
5902 /* Add one-eighth the size so we don't keep calling xrealloc. */
5903 new_size = uid + (uid + 7) / 8;
5905 VARRAY_GROW (basic_block_for_insn, new_size);
5907 VARRAY_BB (basic_block_for_insn, uid) = bb;
5910 /* Record INSN's block number as BB. */
5911 /* ??? This has got to go. */
5913 void
5914 set_block_num (insn, bb)
5915 rtx insn;
5916 int bb;
5918 set_block_for_insn (insn, BASIC_BLOCK (bb));
5921 /* Verify the CFG consistency. This function check some CFG invariants and
5922 aborts when something is wrong. Hope that this function will help to
5923 convert many optimization passes to preserve CFG consistent.
5925 Currently it does following checks:
5927 - test head/end pointers
5928 - overlapping of basic blocks
5929 - edge list corectness
5930 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5931 - tails of basic blocks (ensure that boundary is necesary)
5932 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5933 and NOTE_INSN_BASIC_BLOCK
5934 - check that all insns are in the basic blocks
5935 (except the switch handling code, barriers and notes)
5937 In future it can be extended check a lot of other stuff as well
5938 (reachability of basic blocks, life information, etc. etc.). */
5940 void
5941 verify_flow_info ()
5943 const int max_uid = get_max_uid ();
5944 const rtx rtx_first = get_insns ();
5945 basic_block *bb_info;
5946 rtx x;
5947 int i, err = 0;
5949 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5951 /* First pass check head/end pointers and set bb_info array used by
5952 later passes. */
5953 for (i = n_basic_blocks - 1; i >= 0; i--)
5955 basic_block bb = BASIC_BLOCK (i);
5957 /* Check the head pointer and make sure that it is pointing into
5958 insn list. */
5959 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5960 if (x == bb->head)
5961 break;
5962 if (!x)
5964 error ("Head insn %d for block %d not found in the insn stream.",
5965 INSN_UID (bb->head), bb->index);
5966 err = 1;
5969 /* Check the end pointer and make sure that it is pointing into
5970 insn list. */
5971 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5973 if (bb_info[INSN_UID (x)] != NULL)
5975 error ("Insn %d is in multiple basic blocks (%d and %d)",
5976 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5977 err = 1;
5979 bb_info[INSN_UID (x)] = bb;
5981 if (x == bb->end)
5982 break;
5984 if (!x)
5986 error ("End insn %d for block %d not found in the insn stream.",
5987 INSN_UID (bb->end), bb->index);
5988 err = 1;
5992 /* Now check the basic blocks (boundaries etc.) */
5993 for (i = n_basic_blocks - 1; i >= 0; i--)
5995 basic_block bb = BASIC_BLOCK (i);
5996 /* Check corectness of edge lists */
5997 edge e;
5999 e = bb->succ;
6000 while (e)
6002 if (e->src != bb)
6004 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6005 bb->index);
6006 fprintf (stderr, "Predecessor: ");
6007 dump_edge_info (stderr, e, 0);
6008 fprintf (stderr, "\nSuccessor: ");
6009 dump_edge_info (stderr, e, 1);
6010 fflush (stderr);
6011 err = 1;
6013 if (e->dest != EXIT_BLOCK_PTR)
6015 edge e2 = e->dest->pred;
6016 while (e2 && e2 != e)
6017 e2 = e2->pred_next;
6018 if (!e2)
6020 error ("Basic block %i edge lists are corrupted", bb->index);
6021 err = 1;
6024 e = e->succ_next;
6027 e = bb->pred;
6028 while (e)
6030 if (e->dest != bb)
6032 error ("Basic block %d pred edge is corrupted", bb->index);
6033 fputs ("Predecessor: ", stderr);
6034 dump_edge_info (stderr, e, 0);
6035 fputs ("\nSuccessor: ", stderr);
6036 dump_edge_info (stderr, e, 1);
6037 fputc ('\n', stderr);
6038 err = 1;
6040 if (e->src != ENTRY_BLOCK_PTR)
6042 edge e2 = e->src->succ;
6043 while (e2 && e2 != e)
6044 e2 = e2->succ_next;
6045 if (!e2)
6047 error ("Basic block %i edge lists are corrupted", bb->index);
6048 err = 1;
6051 e = e->pred_next;
6054 /* OK pointers are correct. Now check the header of basic
6055 block. It ought to contain optional CODE_LABEL followed
6056 by NOTE_BASIC_BLOCK. */
6057 x = bb->head;
6058 if (GET_CODE (x) == CODE_LABEL)
6060 if (bb->end == x)
6062 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6063 bb->index);
6064 err = 1;
6066 x = NEXT_INSN (x);
6068 if (GET_CODE (x) != NOTE
6069 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6070 || NOTE_BASIC_BLOCK (x) != bb)
6072 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6073 bb->index);
6074 err = 1;
6077 if (bb->end == x)
6079 /* Do checks for empty blocks here */
6081 else
6083 x = NEXT_INSN (x);
6084 while (x)
6086 if (GET_CODE (x) == NOTE
6087 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6089 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6090 INSN_UID (x), bb->index);
6091 err = 1;
6094 if (x == bb->end)
6095 break;
6097 if (GET_CODE (x) == JUMP_INSN
6098 || GET_CODE (x) == CODE_LABEL
6099 || GET_CODE (x) == BARRIER)
6101 error ("In basic block %d:", bb->index);
6102 fatal_insn ("Flow control insn inside a basic block", x);
6105 x = NEXT_INSN (x);
6110 x = rtx_first;
6111 while (x)
6113 if (!bb_info[INSN_UID (x)])
6115 switch (GET_CODE (x))
6117 case BARRIER:
6118 case NOTE:
6119 break;
6121 case CODE_LABEL:
6122 /* An addr_vec is placed outside any block block. */
6123 if (NEXT_INSN (x)
6124 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6125 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6126 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6128 x = NEXT_INSN (x);
6131 /* But in any case, non-deletable labels can appear anywhere. */
6132 break;
6134 default:
6135 fatal_insn ("Insn outside basic block", x);
6139 x = NEXT_INSN (x);
6142 if (err)
6143 abort ();
6145 /* Clean up. */
6146 free (bb_info);
6149 /* Functions to access an edge list with a vector representation.
6150 Enough data is kept such that given an index number, the
6151 pred and succ that edge reprsents can be determined, or
6152 given a pred and a succ, it's index number can be returned.
6153 This allows algorithms which comsume a lot of memory to
6154 represent the normally full matrix of edge (pred,succ) with a
6155 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6156 wasted space in the client code due to sparse flow graphs. */
6158 /* This functions initializes the edge list. Basically the entire
6159 flowgraph is processed, and all edges are assigned a number,
6160 and the data structure is filed in. */
6161 struct edge_list *
6162 create_edge_list ()
6164 struct edge_list *elist;
6165 edge e;
6166 int num_edges;
6167 int x;
6168 int block_count;
6170 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6172 num_edges = 0;
6174 /* Determine the number of edges in the flow graph by counting successor
6175 edges on each basic block. */
6176 for (x = 0; x < n_basic_blocks; x++)
6178 basic_block bb = BASIC_BLOCK (x);
6180 for (e = bb->succ; e; e = e->succ_next)
6181 num_edges++;
6183 /* Don't forget successors of the entry block. */
6184 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6185 num_edges++;
6187 elist = xmalloc (sizeof (struct edge_list));
6188 elist->num_blocks = block_count;
6189 elist->num_edges = num_edges;
6190 elist->index_to_edge = xmalloc (sizeof (edge) * num_edges);
6192 num_edges = 0;
6194 /* Follow successors of the entry block, and register these edges. */
6195 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6197 elist->index_to_edge[num_edges] = e;
6198 num_edges++;
6201 for (x = 0; x < n_basic_blocks; x++)
6203 basic_block bb = BASIC_BLOCK (x);
6205 /* Follow all successors of blocks, and register these edges. */
6206 for (e = bb->succ; e; e = e->succ_next)
6208 elist->index_to_edge[num_edges] = e;
6209 num_edges++;
6212 return elist;
6215 /* This function free's memory associated with an edge list. */
6216 void
6217 free_edge_list (elist)
6218 struct edge_list *elist;
6220 if (elist)
6222 free (elist->index_to_edge);
6223 free (elist);
6227 /* This function provides debug output showing an edge list. */
6228 void
6229 print_edge_list (f, elist)
6230 FILE *f;
6231 struct edge_list *elist;
6233 int x;
6234 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6235 elist->num_blocks - 2, elist->num_edges);
6237 for (x = 0; x < elist->num_edges; x++)
6239 fprintf (f, " %-4d - edge(", x);
6240 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6241 fprintf (f,"entry,");
6242 else
6243 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6245 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6246 fprintf (f,"exit)\n");
6247 else
6248 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6252 /* This function provides an internal consistancy check of an edge list,
6253 verifying that all edges are present, and that there are no
6254 extra edges. */
6255 void
6256 verify_edge_list (f, elist)
6257 FILE *f;
6258 struct edge_list *elist;
6260 int x, pred, succ, index;
6261 edge e;
6263 for (x = 0; x < n_basic_blocks; x++)
6265 basic_block bb = BASIC_BLOCK (x);
6267 for (e = bb->succ; e; e = e->succ_next)
6269 pred = e->src->index;
6270 succ = e->dest->index;
6271 index = EDGE_INDEX (elist, e->src, e->dest);
6272 if (index == EDGE_INDEX_NO_EDGE)
6274 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6275 continue;
6277 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6278 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6279 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6280 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6281 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6282 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6285 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6287 pred = e->src->index;
6288 succ = e->dest->index;
6289 index = EDGE_INDEX (elist, e->src, e->dest);
6290 if (index == EDGE_INDEX_NO_EDGE)
6292 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6293 continue;
6295 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6296 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6297 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6298 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6299 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6300 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6302 /* We've verified that all the edges are in the list, no lets make sure
6303 there are no spurious edges in the list. */
6305 for (pred = 0 ; pred < n_basic_blocks; pred++)
6306 for (succ = 0 ; succ < n_basic_blocks; succ++)
6308 basic_block p = BASIC_BLOCK (pred);
6309 basic_block s = BASIC_BLOCK (succ);
6311 int found_edge = 0;
6313 for (e = p->succ; e; e = e->succ_next)
6314 if (e->dest == s)
6316 found_edge = 1;
6317 break;
6319 for (e = s->pred; e; e = e->pred_next)
6320 if (e->src == p)
6322 found_edge = 1;
6323 break;
6325 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6326 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6327 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6328 pred, succ);
6329 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6330 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6331 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6332 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6333 BASIC_BLOCK (succ)));
6335 for (succ = 0 ; succ < n_basic_blocks; succ++)
6337 basic_block p = ENTRY_BLOCK_PTR;
6338 basic_block s = BASIC_BLOCK (succ);
6340 int found_edge = 0;
6342 for (e = p->succ; e; e = e->succ_next)
6343 if (e->dest == s)
6345 found_edge = 1;
6346 break;
6348 for (e = s->pred; e; e = e->pred_next)
6349 if (e->src == p)
6351 found_edge = 1;
6352 break;
6354 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6355 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6356 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6357 succ);
6358 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6359 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6360 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6361 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6362 BASIC_BLOCK (succ)));
6364 for (pred = 0 ; pred < n_basic_blocks; pred++)
6366 basic_block p = BASIC_BLOCK (pred);
6367 basic_block s = EXIT_BLOCK_PTR;
6369 int found_edge = 0;
6371 for (e = p->succ; e; e = e->succ_next)
6372 if (e->dest == s)
6374 found_edge = 1;
6375 break;
6377 for (e = s->pred; e; e = e->pred_next)
6378 if (e->src == p)
6380 found_edge = 1;
6381 break;
6383 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6384 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6385 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6386 pred);
6387 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6388 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6389 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6390 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6391 EXIT_BLOCK_PTR));
6395 /* This routine will determine what, if any, edge there is between
6396 a specified predecessor and successor. */
6399 find_edge_index (edge_list, pred, succ)
6400 struct edge_list *edge_list;
6401 basic_block pred, succ;
6403 int x;
6404 for (x = 0; x < NUM_EDGES (edge_list); x++)
6406 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6407 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6408 return x;
6410 return (EDGE_INDEX_NO_EDGE);
6413 /* This function will remove an edge from the flow graph. */
6414 static void
6415 remove_edge (e)
6416 edge e;
6418 edge last_pred = NULL;
6419 edge last_succ = NULL;
6420 edge tmp;
6421 basic_block src, dest;
6422 src = e->src;
6423 dest = e->dest;
6424 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6425 last_succ = tmp;
6427 if (!tmp)
6428 abort ();
6429 if (last_succ)
6430 last_succ->succ_next = e->succ_next;
6431 else
6432 src->succ = e->succ_next;
6434 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6435 last_pred = tmp;
6437 if (!tmp)
6438 abort ();
6439 if (last_pred)
6440 last_pred->pred_next = e->pred_next;
6441 else
6442 dest->pred = e->pred_next;
6444 n_edges--;
6445 free (e);
6448 /* This routine will remove any fake successor edges for a basic block.
6449 When the edge is removed, it is also removed from whatever predecessor
6450 list it is in. */
6451 static void
6452 remove_fake_successors (bb)
6453 basic_block bb;
6455 edge e;
6456 for (e = bb->succ; e ; )
6458 edge tmp = e;
6459 e = e->succ_next;
6460 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6461 remove_edge (tmp);
6465 /* This routine will remove all fake edges from the flow graph. If
6466 we remove all fake successors, it will automatically remove all
6467 fake predecessors. */
6468 void
6469 remove_fake_edges ()
6471 int x;
6473 for (x = 0; x < n_basic_blocks; x++)
6474 remove_fake_successors (BASIC_BLOCK (x));
6476 /* We've handled all successors except the entry block's. */
6477 remove_fake_successors (ENTRY_BLOCK_PTR);
6480 /* This functions will add a fake edge between any block which has no
6481 successors, and the exit block. Some data flow equations require these
6482 edges to exist. */
6483 void
6484 add_noreturn_fake_exit_edges ()
6486 int x;
6488 for (x = 0; x < n_basic_blocks; x++)
6489 if (BASIC_BLOCK (x)->succ == NULL)
6490 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);