Warning fixes:
[official-gcc.git] / gcc / flow.c
blob8bc8a110ba0c72d217ffe1b850deeb2fdd3858ab
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler.
23 It computes data flow information
24 which tells combine_instructions which insns to consider combining
25 and controls register allocation.
27 Additional data flow information that is too bulky to record
28 is generated during the analysis, and is used at that time to
29 create autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl
38 into basic blocks. It records the beginnings and ends of the
39 basic blocks in the vectors basic_block_head and basic_block_end,
40 and the number of blocks in n_basic_blocks.
42 find_basic_blocks also finds any unreachable loops
43 and deletes them.
45 ** life_analysis **
47 life_analysis is called immediately after find_basic_blocks.
48 It uses the basic block information to determine where each
49 hard or pseudo register is live.
51 ** live-register info **
53 The information about where each register is live is in two parts:
54 the REG_NOTES of insns, and the vector basic_block_live_at_start.
56 basic_block_live_at_start has an element for each basic block,
57 and the element is a bit-vector with a bit for each hard or pseudo
58 register. The bit is 1 if the register is live at the beginning
59 of the basic block.
61 Two types of elements can be added to an insn's REG_NOTES.
62 A REG_DEAD note is added to an insn's REG_NOTES for any register
63 that meets both of two conditions: The value in the register is not
64 needed in subsequent insns and the insn does not replace the value in
65 the register (in the case of multi-word hard registers, the value in
66 each register must be replaced by the insn to avoid a REG_DEAD note).
68 In the vast majority of cases, an object in a REG_DEAD note will be
69 used somewhere in the insn. The (rare) exception to this is if an
70 insn uses a multi-word hard register and only some of the registers are
71 needed in subsequent insns. In that case, REG_DEAD notes will be
72 provided for those hard registers that are not subsequently needed.
73 Partial REG_DEAD notes of this type do not occur when an insn sets
74 only some of the hard registers used in such a multi-word operand;
75 omitting REG_DEAD notes for objects stored in an insn is optional and
76 the desire to do so does not justify the complexity of the partial
77 REG_DEAD notes.
79 REG_UNUSED notes are added for each register that is set by the insn
80 but is unused subsequently (if every register set by the insn is unused
81 and the insn does not reference memory or have some other side-effect,
82 the insn is deleted instead). If only part of a multi-word hard
83 register is used in a subsequent insn, REG_UNUSED notes are made for
84 the parts that will not be used.
86 To determine which registers are live after any insn, one can
87 start from the beginning of the basic block and scan insns, noting
88 which registers are set by each insn and which die there.
90 ** Other actions of life_analysis **
92 life_analysis sets up the LOG_LINKS fields of insns because the
93 information needed to do so is readily available.
95 life_analysis deletes insns whose only effect is to store a value
96 that is never used.
98 life_analysis notices cases where a reference to a register as
99 a memory address can be combined with a preceding or following
100 incrementation or decrementation of the register. The separate
101 instruction to increment or decrement is deleted and the address
102 is changed to a POST_INC or similar rtx.
104 Each time an incrementing or decrementing address is created,
105 a REG_INC element is added to the insn's REG_NOTES list.
107 life_analysis fills in certain vectors containing information about
108 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
109 reg_n_calls_crosses and reg_basic_block.
111 life_analysis sets current_function_sp_is_unchanging if the function
112 doesn't modify the stack pointer. */
114 #include "config.h"
115 #include "system.h"
116 #include "rtl.h"
117 #include "basic-block.h"
118 #include "insn-config.h"
119 #include "regs.h"
120 #include "hard-reg-set.h"
121 #include "flags.h"
122 #include "output.h"
123 #include "except.h"
124 #include "toplev.h"
125 #include "recog.h"
127 #include "obstack.h"
128 #define obstack_chunk_alloc xmalloc
129 #define obstack_chunk_free free
131 /* The contents of the current function definition are allocated
132 in this obstack, and all are freed at the end of the function.
133 For top-level functions, this is temporary_obstack.
134 Separate obstacks are made for nested functions. */
136 extern struct obstack *function_obstack;
138 /* List of labels that must never be deleted. */
139 extern rtx forced_labels;
141 /* Get the basic block number of an insn.
142 This info should not be expected to remain available
143 after the end of life_analysis. */
145 /* This is the limit of the allocated space in the following two arrays. */
147 static int max_uid_for_flow;
149 #define BLOCK_NUM(INSN) uid_block_number[INSN_UID (INSN)]
151 /* This is where the BLOCK_NUM values are really stored.
152 This is set up by find_basic_blocks and used there and in life_analysis,
153 and then freed. */
155 int *uid_block_number;
157 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
159 #define INSN_VOLATILE(INSN) uid_volatile[INSN_UID (INSN)]
160 static char *uid_volatile;
162 /* Number of basic blocks in the current function. */
164 int n_basic_blocks;
166 /* Maximum register number used in this function, plus one. */
168 int max_regno;
170 /* Maximum number of SCRATCH rtx's used in any basic block of this
171 function. */
173 int max_scratch;
175 /* Number of SCRATCH rtx's in the current block. */
177 static int num_scratch;
179 /* Indexed by n, giving various register information */
181 varray_type reg_n_info;
183 /* Size of the reg_n_info table. */
185 unsigned int reg_n_max;
187 /* Element N is the next insn that uses (hard or pseudo) register number N
188 within the current basic block; or zero, if there is no such insn.
189 This is valid only during the final backward scan in propagate_block. */
191 static rtx *reg_next_use;
193 /* Size of a regset for the current function,
194 in (1) bytes and (2) elements. */
196 int regset_bytes;
197 int regset_size;
199 /* Element N is first insn in basic block N.
200 This info lasts until we finish compiling the function. */
202 rtx *basic_block_head;
204 /* Element N is last insn in basic block N.
205 This info lasts until we finish compiling the function. */
207 rtx *basic_block_end;
209 /* Element N indicates whether basic block N can be reached through a
210 computed jump. */
212 char *basic_block_computed_jump_target;
214 /* Element N is a regset describing the registers live
215 at the start of basic block N.
216 This info lasts until we finish compiling the function. */
218 regset *basic_block_live_at_start;
220 /* Regset of regs live when calls to `setjmp'-like functions happen. */
222 regset regs_live_at_setjmp;
224 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
225 that have to go in the same hard reg.
226 The first two regs in the list are a pair, and the next two
227 are another pair, etc. */
228 rtx regs_may_share;
230 /* Element N is nonzero if control can drop into basic block N
231 from the preceding basic block. Freed after life_analysis. */
233 static char *basic_block_drops_in;
235 /* Element N is depth within loops of the last insn in basic block number N.
236 Freed after life_analysis. */
238 static short *basic_block_loop_depth;
240 /* Depth within loops of basic block being scanned for lifetime analysis,
241 plus one. This is the weight attached to references to registers. */
243 static int loop_depth;
245 /* During propagate_block, this is non-zero if the value of CC0 is live. */
247 static int cc0_live;
249 /* During propagate_block, this contains the last MEM stored into. It
250 is used to eliminate consecutive stores to the same location. */
252 static rtx last_mem_set;
254 /* Set of registers that may be eliminable. These are handled specially
255 in updating regs_ever_live. */
257 static HARD_REG_SET elim_reg_set;
259 /* Forward declarations */
260 static void find_basic_blocks_1 PROTO((rtx, rtx));
261 static void make_edges PROTO((int));
262 static void mark_label_ref PROTO((rtx, rtx, int));
263 static int delete_unreachable_blocks PROTO((void));
264 static int delete_block PROTO((int));
265 static void life_analysis_1 PROTO((rtx, int));
266 static void propagate_block PROTO((regset, rtx, rtx, int,
267 regset, int));
268 static rtx flow_delete_insn PROTO((rtx));
269 static int set_noop_p PROTO((rtx));
270 static int noop_move_p PROTO((rtx));
271 static void record_volatile_insns PROTO((rtx));
272 static void mark_regs_live_at_end PROTO((regset));
273 static int insn_dead_p PROTO((rtx, regset, int));
274 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
275 static void mark_set_regs PROTO((regset, regset, rtx,
276 rtx, regset));
277 static void mark_set_1 PROTO((regset, regset, rtx,
278 rtx, regset));
279 #ifdef AUTO_INC_DEC
280 static void find_auto_inc PROTO((regset, rtx, rtx));
281 static int try_pre_increment_1 PROTO((rtx));
282 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
283 #endif
284 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
285 void dump_flow_info PROTO((FILE *));
286 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
287 int_list_ptr *, int *, int *));
288 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
289 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
290 int_list **, int));
291 static void init_regset_vector PROTO ((regset *, int,
292 struct obstack *));
293 static void count_reg_sets_1 PROTO ((rtx));
294 static void count_reg_sets PROTO ((rtx));
295 static void count_reg_references PROTO ((rtx));
296 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
298 /* Find basic blocks of the current function.
299 F is the first insn of the function and NREGS the number of register numbers
300 in use.
301 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
302 be reachable. This turns on a kludge that causes the control flow
303 information to be inaccurate and not suitable for passes like GCSE. */
305 void
306 find_basic_blocks (f, nregs, file)
307 rtx f;
308 int nregs;
309 FILE *file;
311 register rtx insn;
312 register int i;
313 rtx nonlocal_label_list = nonlocal_label_rtx_list ();
314 int in_libcall_block = 0;
316 /* Count the basic blocks. Also find maximum insn uid value used. */
319 rtx prev_call = 0;
320 register RTX_CODE prev_code = JUMP_INSN;
321 register RTX_CODE code;
322 int eh_region = 0;
323 int call_had_abnormal_edge = 0;
325 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
327 /* Track when we are inside in LIBCALL block. */
328 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
329 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
330 in_libcall_block = 1;
332 code = GET_CODE (insn);
334 /* A basic block starts at label, or after something that can jump. */
335 if (code == CODE_LABEL
336 || (GET_RTX_CLASS (code) == 'i'
337 && (prev_code == JUMP_INSN
338 || (prev_code == CALL_INSN && call_had_abnormal_edge)
339 || prev_code == BARRIER)))
341 i++;
343 /* If the previous insn was a call that did not create an
344 abnormal edge, we want to add a nop so that the CALL_INSN
345 itself is not at basic_block_end. This allows us to easily
346 distinguish between normal calls and those which create
347 abnormal edges in the flow graph. */
349 if (i > 0 && !call_had_abnormal_edge && prev_call != 0)
351 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
352 emit_insn_after (nop, prev_call);
355 /* We change the code of the CALL_INSN, so that it won't start a
356 new block. */
357 if (code == CALL_INSN && in_libcall_block)
358 code = INSN;
360 /* Record whether this call created an edge. */
361 if (code == CALL_INSN)
363 prev_call = insn;
364 call_had_abnormal_edge = (nonlocal_label_list != 0 || eh_region);
366 else if (code != NOTE && code != BARRIER)
367 prev_call = 0;
369 if (code != NOTE)
370 prev_code = code;
371 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
372 ++eh_region;
373 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
374 --eh_region;
376 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
377 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
378 in_libcall_block = 0;
382 n_basic_blocks = i;
384 max_uid_for_flow = get_max_uid ();
385 #ifdef AUTO_INC_DEC
386 /* Leave space for insns life_analysis makes in some cases for auto-inc.
387 These cases are rare, so we don't need too much space. */
388 max_uid_for_flow += max_uid_for_flow / 10;
389 #endif
391 /* Allocate some tables that last till end of compiling this function
392 and some needed only in find_basic_blocks and life_analysis. */
394 basic_block_head = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
395 basic_block_end = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
396 basic_block_drops_in = (char *) xmalloc (n_basic_blocks);
397 basic_block_computed_jump_target = (char *) oballoc (n_basic_blocks);
398 basic_block_loop_depth = (short *) xmalloc (n_basic_blocks * sizeof (short));
399 uid_block_number
400 = (int *) xmalloc ((max_uid_for_flow + 1) * sizeof (int));
401 uid_volatile = (char *) xmalloc (max_uid_for_flow + 1);
402 bzero (uid_volatile, max_uid_for_flow + 1);
404 find_basic_blocks_1 (f, nonlocal_label_list);
407 /* For communication between find_basic_blocks_1 and its subroutines. */
409 /* An array of CODE_LABELs, indexed by UID for the start of the active
410 EH handler for each insn in F. */
411 static int *active_eh_region;
412 static int *nested_eh_region;
414 /* Element N nonzero if basic block N can actually be reached. */
416 static char *block_live_static;
418 /* List of label_refs to all labels whose addresses are taken
419 and used as data. */
420 static rtx label_value_list;
422 /* a list of non-local labels in the function. */
423 static rtx nonlocal_label_list;
425 /* Find all basic blocks of the function whose first insn is F.
426 Store the correct data in the tables that describe the basic blocks,
427 set up the chains of references for each CODE_LABEL, and
428 delete any entire basic blocks that cannot be reached.
430 NONLOCAL_LABELS is a list of non-local labels in the function.
431 Blocks that are otherwise unreachable may be reachable with a non-local
432 goto.
433 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
434 be reachable. This turns on a kludge that causes the control flow
435 information to be inaccurate and not suitable for passes like GCSE. */
437 static void
438 find_basic_blocks_1 (f, nonlocal_labels)
439 rtx f, nonlocal_labels;
441 register rtx insn;
442 register int i;
443 register char *block_live = (char *) alloca (n_basic_blocks);
444 register char *block_marked = (char *) alloca (n_basic_blocks);
445 rtx note, eh_note;
446 enum rtx_code prev_code, code;
447 int depth, pass;
448 int in_libcall_block = 0;
449 int call_had_abnormal_edge = 0;
451 pass = 1;
452 active_eh_region = (int *) alloca ((max_uid_for_flow + 1) * sizeof (int));
453 nested_eh_region = (int *) alloca ((max_label_num () + 1) * sizeof (int));
454 nonlocal_label_list = nonlocal_labels;
455 restart:
457 label_value_list = 0;
458 block_live_static = block_live;
459 bzero (block_live, n_basic_blocks);
460 bzero (block_marked, n_basic_blocks);
461 bzero (basic_block_computed_jump_target, n_basic_blocks);
462 bzero ((char *) active_eh_region, (max_uid_for_flow + 1) * sizeof (int));
463 bzero ((char *) nested_eh_region, (max_label_num () + 1) * sizeof (int));
464 current_function_has_computed_jump = 0;
466 /* Initialize with just block 0 reachable and no blocks marked. */
467 if (n_basic_blocks > 0)
468 block_live[0] = 1;
470 /* Initialize the ref chain of each label to 0. Record where all the
471 blocks start and end and their depth in loops. For each insn, record
472 the block it is in. Also mark as reachable any blocks headed by labels
473 that must not be deleted. */
475 for (eh_note = NULL_RTX, insn = f, i = -1, prev_code = JUMP_INSN, depth = 1;
476 insn; insn = NEXT_INSN (insn))
479 /* Track when we are inside in LIBCALL block. */
480 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
481 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
482 in_libcall_block = 1;
484 code = GET_CODE (insn);
485 if (code == NOTE)
487 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
488 depth++;
489 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
490 depth--;
493 /* A basic block starts at label, or after something that can jump. */
494 else if (code == CODE_LABEL
495 || (GET_RTX_CLASS (code) == 'i'
496 && (prev_code == JUMP_INSN
497 || (prev_code == CALL_INSN && call_had_abnormal_edge)
498 || prev_code == BARRIER)))
500 basic_block_head[++i] = insn;
501 basic_block_end[i] = insn;
502 basic_block_loop_depth[i] = depth;
504 if (code == CODE_LABEL)
506 LABEL_REFS (insn) = insn;
507 /* Any label that cannot be deleted
508 is considered to start a reachable block. */
509 if (LABEL_PRESERVE_P (insn))
510 block_live[i] = 1;
514 else if (GET_RTX_CLASS (code) == 'i')
516 basic_block_end[i] = insn;
517 basic_block_loop_depth[i] = depth;
520 if (GET_RTX_CLASS (code) == 'i')
522 /* Make a list of all labels referred to other than by jumps. */
523 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
524 if (REG_NOTE_KIND (note) == REG_LABEL
525 && XEXP (note, 0) != eh_return_stub_label)
526 label_value_list = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
527 label_value_list);
530 /* Keep a lifo list of the currently active exception notes. */
531 if (GET_CODE (insn) == NOTE)
533 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
535 if (eh_note)
536 nested_eh_region [NOTE_BLOCK_NUMBER (insn)] =
537 NOTE_BLOCK_NUMBER (XEXP (eh_note, 0));
538 else
539 nested_eh_region [NOTE_BLOCK_NUMBER (insn)] = 0;
540 eh_note = gen_rtx_EXPR_LIST (VOIDmode,
541 insn, eh_note);
543 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
544 eh_note = XEXP (eh_note, 1);
546 /* If we encounter a CALL_INSN, note which exception handler it
547 might pass control to.
549 If doing asynchronous exceptions, record the active EH handler
550 for every insn, since most insns can throw. */
551 else if (eh_note
552 && (asynchronous_exceptions
553 || (GET_CODE (insn) == CALL_INSN
554 && ! in_libcall_block)))
555 active_eh_region[INSN_UID (insn)] =
556 NOTE_BLOCK_NUMBER (XEXP (eh_note, 0));
557 BLOCK_NUM (insn) = i;
559 /* We change the code of the CALL_INSN, so that it won't start a
560 new block. */
561 if (code == CALL_INSN && in_libcall_block)
562 code = INSN;
564 /* Record whether this call created an edge. */
565 if (code == CALL_INSN)
566 call_had_abnormal_edge = (nonlocal_label_list != 0 || eh_note);
568 if (code != NOTE)
569 prev_code = code;
571 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
572 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
573 in_libcall_block = 0;
576 /* During the second pass, `n_basic_blocks' is only an upper bound.
577 Only perform the sanity check for the first pass, and on the second
578 pass ensure `n_basic_blocks' is set to the correct value. */
579 if (pass == 1 && i + 1 != n_basic_blocks)
580 abort ();
581 n_basic_blocks = i + 1;
583 /* Record which basic blocks control can drop in to. */
585 for (i = 0; i < n_basic_blocks; i++)
587 for (insn = PREV_INSN (basic_block_head[i]);
588 insn && GET_CODE (insn) == NOTE; insn = PREV_INSN (insn))
591 basic_block_drops_in[i] = insn && GET_CODE (insn) != BARRIER;
594 /* Now find which basic blocks can actually be reached
595 and put all jump insns' LABEL_REFS onto the ref-chains
596 of their target labels. */
598 if (n_basic_blocks > 0)
600 int something_marked = 1;
601 int deleted = 0;
603 /* Pass over all blocks, marking each block that is reachable
604 and has not yet been marked.
605 Keep doing this until, in one pass, no blocks have been marked.
606 Then blocks_live and blocks_marked are identical and correct.
607 In addition, all jumps actually reachable have been marked. */
609 while (something_marked)
611 something_marked = 0;
612 for (i = 0; i < n_basic_blocks; i++)
613 if (block_live[i] && !block_marked[i])
615 block_marked[i] = 1;
616 something_marked = 1;
618 make_edges (i);
622 /* This should never happen. If it does that means we've computed an
623 incorrect flow graph, which can lead to aborts/crashes later in the
624 compiler or incorrect code generation.
626 We used to try and continue here, but that's just asking for trouble
627 later during the compile or at runtime. It's easier to debug the
628 problem here than later! */
629 for (i = 1; i < n_basic_blocks; i++)
630 if (block_live[i] && ! basic_block_drops_in[i]
631 && GET_CODE (basic_block_head[i]) == CODE_LABEL
632 && LABEL_REFS (basic_block_head[i]) == basic_block_head[i])
633 abort ();
635 if (! reload_completed)
636 deleted = delete_unreachable_blocks ();
638 /* There are pathological cases where one function calling hundreds of
639 nested inline functions can generate lots and lots of unreachable
640 blocks that jump can't delete. Since we don't use sparse matrices
641 a lot of memory will be needed to compile such functions.
642 Implementing sparse matrices is a fair bit of work and it is not
643 clear that they win more than they lose (we don't want to
644 unnecessarily slow down compilation of normal code). By making
645 another pass for the pathological case, we can greatly speed up
646 their compilation without hurting normal code. This works because
647 all the insns in the unreachable blocks have either been deleted or
648 turned into notes.
649 Note that we're talking about reducing memory usage by 10's of
650 megabytes and reducing compilation time by several minutes. */
651 /* ??? The choice of when to make another pass is a bit arbitrary,
652 and was derived from empirical data. */
653 if (pass == 1
654 && deleted > 200)
656 pass++;
657 n_basic_blocks -= deleted;
658 /* `n_basic_blocks' may not be correct at this point: two previously
659 separate blocks may now be merged. That's ok though as we
660 recalculate it during the second pass. It certainly can't be
661 any larger than the current value. */
662 goto restart;
667 /* Record INSN's block number as BB. */
669 void
670 set_block_num (insn, bb)
671 rtx insn;
672 int bb;
674 if (INSN_UID (insn) >= max_uid_for_flow)
676 /* Add one-eighth the size so we don't keep calling xrealloc. */
677 max_uid_for_flow = INSN_UID (insn) + (INSN_UID (insn) + 7) / 8;
678 uid_block_number = (int *)
679 xrealloc (uid_block_number, (max_uid_for_flow + 1) * sizeof (int));
681 BLOCK_NUM (insn) = bb;
685 /* Subroutines of find_basic_blocks. */
687 /* For basic block I, make edges and mark live all blocks which are reachable
688 from it. */
689 static void
690 make_edges (i)
691 int i;
693 rtx insn, x;
695 if (i + 1 < n_basic_blocks && basic_block_drops_in[i + 1])
696 block_live_static[i + 1] = 1;
697 insn = basic_block_end[i];
698 if (GET_CODE (insn) == JUMP_INSN)
699 mark_label_ref (PATTERN (insn), insn, 0);
701 /* If we have any forced labels, mark them as potentially reachable from
702 this block. */
703 for (x = forced_labels; x; x = XEXP (x, 1))
704 if (! LABEL_REF_NONLOCAL_P (x))
705 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
706 insn, 0);
708 /* Now scan the insns for this block, we may need to make edges for some of
709 them to various non-obvious locations (exception handlers, nonlocal
710 labels, etc). */
711 for (insn = basic_block_head[i];
712 insn != NEXT_INSN (basic_block_end[i]);
713 insn = NEXT_INSN (insn))
715 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
717 rtx note;
718 /* References to labels in non-jumping insns have REG_LABEL notes
719 attached to them.
721 This can happen for computed gotos; we don't care about them
722 here since the values are also on the label_value_list and will
723 be marked live if we find a live computed goto.
725 This can also happen when we take the address of a label to pass
726 as an argument to __throw. Note throw only uses the value to
727 determine what handler should be called -- ie the label is not
728 used as a jump target, it just marks regions in the code.
730 In theory we should be able to ignore the REG_LABEL notes, but
731 we have to make sure that the label and associated insns aren't
732 marked dead, so we make the block in question live and create an
733 edge from this insn to the label. This is not strictly correct,
734 but it is close enough for now.
736 See below for code that handles the eh_stub label specially. */
737 for (note = REG_NOTES (insn);
738 note;
739 note = XEXP (note, 1))
741 if (REG_NOTE_KIND (note) == REG_LABEL
742 && XEXP (note, 0) != eh_return_stub_label)
744 x = XEXP (note, 0);
745 block_live_static[BLOCK_NUM (x)] = 1;
746 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, x),
747 insn, 0);
751 /* If this is a computed jump, then mark it as reaching everything
752 on the label_value_list and forced_labels list. */
753 if (computed_jump_p (insn))
755 current_function_has_computed_jump = 1;
756 for (x = label_value_list; x; x = XEXP (x, 1))
758 int b = BLOCK_NUM (XEXP (x, 0));
759 basic_block_computed_jump_target[b] = 1;
760 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
761 insn, 0);
764 for (x = forced_labels; x; x = XEXP (x, 1))
766 int b = BLOCK_NUM (XEXP (x, 0));
767 basic_block_computed_jump_target[b] = 1;
768 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
769 insn, 0);
773 /* If this is a CALL_INSN, then mark it as reaching the active EH
774 handler for this CALL_INSN. If we're handling asynchronous
775 exceptions mark every insn as reaching the active EH handler.
777 Also mark the CALL_INSN as reaching any nonlocal goto sites. */
778 else if (asynchronous_exceptions
779 || (GET_CODE (insn) == CALL_INSN
780 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX)))
782 if (active_eh_region[INSN_UID (insn)])
784 int region;
785 handler_info *ptr;
786 region = active_eh_region[INSN_UID (insn)];
787 for ( ; region;
788 region = nested_eh_region[region])
790 ptr = get_first_handler (region);
791 for ( ; ptr ; ptr = ptr->next)
792 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
793 ptr->handler_label),
794 insn, 0);
797 if (! asynchronous_exceptions)
799 for (x = nonlocal_label_list; x; x = XEXP (x, 1))
800 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
801 insn, 0);
803 /* ??? This could be made smarter: in some cases it's possible
804 to tell that certain calls will not do a nonlocal goto.
806 For example, if the nested functions that do the nonlocal
807 gotos do not have their addresses taken, then only calls to
808 those functions or to other nested functions that use them
809 could possibly do nonlocal gotos. */
813 /* We know something about the structure of the function __throw in
814 libgcc2.c. It is the only function that ever contains eh_stub labels.
815 It modifies its return address so that the last block returns to one of
816 the eh_stub labels within it. So we have to make additional edges in
817 the flow graph. */
818 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
820 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, eh_return_stub_label),
821 basic_block_end[i], 0);
825 /* Check expression X for label references;
826 if one is found, add INSN to the label's chain of references.
828 CHECKDUP means check for and avoid creating duplicate references
829 from the same insn. Such duplicates do no serious harm but
830 can slow life analysis. CHECKDUP is set only when duplicates
831 are likely. */
833 static void
834 mark_label_ref (x, insn, checkdup)
835 rtx x, insn;
836 int checkdup;
838 register RTX_CODE code;
839 register int i;
840 register char *fmt;
842 /* We can be called with NULL when scanning label_value_list. */
843 if (x == 0)
844 return;
846 code = GET_CODE (x);
847 if (code == LABEL_REF)
849 register rtx label = XEXP (x, 0);
850 register rtx y;
851 if (GET_CODE (label) != CODE_LABEL)
852 abort ();
853 /* If the label was never emitted, this insn is junk,
854 but avoid a crash trying to refer to BLOCK_NUM (label).
855 This can happen as a result of a syntax error
856 and a diagnostic has already been printed. */
857 if (INSN_UID (label) == 0)
858 return;
859 CONTAINING_INSN (x) = insn;
860 /* if CHECKDUP is set, check for duplicate ref from same insn
861 and don't insert. */
862 if (checkdup)
863 for (y = LABEL_REFS (label); y != label; y = LABEL_NEXTREF (y))
864 if (CONTAINING_INSN (y) == insn)
865 return;
866 LABEL_NEXTREF (x) = LABEL_REFS (label);
867 LABEL_REFS (label) = x;
868 block_live_static[BLOCK_NUM (label)] = 1;
869 return;
872 fmt = GET_RTX_FORMAT (code);
873 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
875 if (fmt[i] == 'e')
876 mark_label_ref (XEXP (x, i), insn, 0);
877 if (fmt[i] == 'E')
879 register int j;
880 for (j = 0; j < XVECLEN (x, i); j++)
881 mark_label_ref (XVECEXP (x, i, j), insn, 1);
886 /* Now delete the code for any basic blocks that can't be reached.
887 They can occur because jump_optimize does not recognize unreachable loops
888 as unreachable.
889 Return the number of deleted blocks. */
890 static int
891 delete_unreachable_blocks ()
893 int deleted_handler = 0;
894 int deleted = 0;
895 int i;
896 rtx insn;
898 for (i = 0; i < n_basic_blocks; i++)
899 if (! block_live_static[i])
901 deleted++;
903 deleted_handler |= delete_block (i);
906 /* If we deleted an exception handler, we may have EH region
907 begin/end blocks to remove as well. */
908 if (deleted_handler)
909 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
910 if (GET_CODE (insn) == NOTE)
912 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
913 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
915 int num = CODE_LABEL_NUMBER (insn);
916 /* A NULL handler indicates a region is no longer needed */
917 if (get_first_handler (num) == NULL)
919 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
920 NOTE_SOURCE_FILE (insn) = 0;
924 return deleted;
927 /* Delete the insns in a (non-live) block. We physically delete every
928 non-note insn except the start and end (so basic_block_head/end needn't
929 be updated), we turn the latter into NOTE_INSN_DELETED notes.
931 We use to "delete" the insns by turning them into notes, but we may be
932 deleting lots of insns that subsequent passes would otherwise have to
933 process. Secondly, lots of deleted blocks in a row can really slow down
934 propagate_block since it will otherwise process insn-turned-notes multiple
935 times when it looks for loop begin/end notes.
937 Return nonzero if we deleted an exception handler. */
938 static int
939 delete_block (i)
940 int i;
942 int deleted_handler = 0;
943 rtx insn;
945 if (basic_block_head[i] != basic_block_end[i])
947 /* It would be quicker to delete all of these with a single
948 unchaining, rather than one at a time, but we need to keep
949 the NOTE's. */
950 insn = NEXT_INSN (basic_block_head[i]);
951 while (insn != basic_block_end[i])
953 if (GET_CODE (insn) == BARRIER)
954 abort ();
955 else if (GET_CODE (insn) != NOTE)
956 insn = flow_delete_insn (insn);
957 else
958 insn = NEXT_INSN (insn);
962 insn = basic_block_head[i];
963 if (GET_CODE (insn) != NOTE)
965 /* Turn the head into a deleted insn note. */
966 if (GET_CODE (insn) == BARRIER)
967 abort ();
969 /* If the head of this block is a CODE_LABEL, then it might
970 be the label for an exception handler which can't be
971 reached.
973 We need to remove the label from the exception_handler_label
974 list and remove the associated NOTE_EH_REGION_BEG and
975 NOTE_EH_REGION_END notes. */
976 if (GET_CODE (insn) == CODE_LABEL)
978 rtx x, *prev = &exception_handler_labels;
980 for (x = exception_handler_labels; x; x = XEXP (x, 1))
982 if (XEXP (x, 0) == insn)
984 /* Found a match, splice this label out of the
985 EH label list. */
986 *prev = XEXP (x, 1);
987 XEXP (x, 1) = NULL_RTX;
988 XEXP (x, 0) = NULL_RTX;
990 /* Remove the handler from all regions */
991 remove_handler (insn);
992 deleted_handler = 1;
993 break;
995 prev = &XEXP (x, 1);
999 PUT_CODE (insn, NOTE);
1000 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1001 NOTE_SOURCE_FILE (insn) = 0;
1003 insn = basic_block_end[i];
1004 if (GET_CODE (insn) != NOTE)
1006 /* Turn the tail into a deleted insn note. */
1007 if (GET_CODE (insn) == BARRIER)
1008 abort ();
1009 PUT_CODE (insn, NOTE);
1010 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1011 NOTE_SOURCE_FILE (insn) = 0;
1013 /* BARRIERs are between basic blocks, not part of one.
1014 Delete a BARRIER if the preceding jump is deleted.
1015 We cannot alter a BARRIER into a NOTE
1016 because it is too short; but we can really delete
1017 it because it is not part of a basic block. */
1018 if (NEXT_INSN (insn) != 0
1019 && GET_CODE (NEXT_INSN (insn)) == BARRIER)
1020 delete_insn (NEXT_INSN (insn));
1022 /* Each time we delete some basic blocks,
1023 see if there is a jump around them that is
1024 being turned into a no-op. If so, delete it. */
1026 if (block_live_static[i - 1])
1028 register int j;
1029 for (j = i + 1; j < n_basic_blocks; j++)
1030 if (block_live_static[j])
1032 rtx label;
1033 insn = basic_block_end[i - 1];
1034 if (GET_CODE (insn) == JUMP_INSN
1035 /* An unconditional jump is the only possibility
1036 we must check for, since a conditional one
1037 would make these blocks live. */
1038 && simplejump_p (insn)
1039 && (label = XEXP (SET_SRC (PATTERN (insn)), 0), 1)
1040 && INSN_UID (label) != 0
1041 && BLOCK_NUM (label) == j)
1043 int k;
1045 /* The deleted blocks still show up in the cfg,
1046 so we must set basic_block_drops_in for blocks
1047 I to J inclusive to keep the cfg accurate. */
1048 for (k = i; k <= j; k++)
1049 basic_block_drops_in[k] = 1;
1051 PUT_CODE (insn, NOTE);
1052 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1053 NOTE_SOURCE_FILE (insn) = 0;
1054 if (GET_CODE (NEXT_INSN (insn)) != BARRIER)
1055 abort ();
1056 delete_insn (NEXT_INSN (insn));
1058 break;
1062 return deleted_handler;
1065 /* Delete INSN by patching it out.
1066 Return the next insn. */
1068 static rtx
1069 flow_delete_insn (insn)
1070 rtx insn;
1072 /* ??? For the moment we assume we don't have to watch for NULLs here
1073 since the start/end of basic blocks aren't deleted like this. */
1074 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1075 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1076 return NEXT_INSN (insn);
1079 /* Perform data flow analysis.
1080 F is the first insn of the function and NREGS the number of register numbers
1081 in use. */
1083 void
1084 life_analysis (f, nregs, file)
1085 rtx f;
1086 int nregs;
1087 FILE *file;
1089 #ifdef ELIMINABLE_REGS
1090 register size_t i;
1091 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
1092 #endif
1094 /* Record which registers will be eliminated. We use this in
1095 mark_used_regs. */
1097 CLEAR_HARD_REG_SET (elim_reg_set);
1099 #ifdef ELIMINABLE_REGS
1100 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
1101 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
1102 #else
1103 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
1104 #endif
1106 life_analysis_1 (f, nregs);
1107 if (file)
1108 dump_flow_info (file);
1110 free_basic_block_vars (1);
1113 /* Free the variables allocated by find_basic_blocks.
1115 KEEP_HEAD_END_P is non-zero if basic_block_head and basic_block_end
1116 are not to be freed. */
1118 void
1119 free_basic_block_vars (keep_head_end_p)
1120 int keep_head_end_p;
1122 if (basic_block_drops_in)
1124 free (basic_block_drops_in);
1125 /* Tell dump_flow_info this isn't available anymore. */
1126 basic_block_drops_in = 0;
1128 if (basic_block_loop_depth)
1130 free (basic_block_loop_depth);
1131 basic_block_loop_depth = 0;
1133 if (uid_block_number)
1135 free (uid_block_number);
1136 uid_block_number = 0;
1138 if (uid_volatile)
1140 free (uid_volatile);
1141 uid_volatile = 0;
1144 if (! keep_head_end_p && basic_block_head)
1146 free (basic_block_head);
1147 basic_block_head = 0;
1148 free (basic_block_end);
1149 basic_block_end = 0;
1153 /* Return nonzero if the destination of SET equals the source. */
1154 static int
1155 set_noop_p (set)
1156 rtx set;
1158 rtx src = SET_SRC (set);
1159 rtx dst = SET_DEST (set);
1160 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
1161 && REGNO (src) == REGNO (dst))
1162 return 1;
1163 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
1164 || SUBREG_WORD (src) != SUBREG_WORD (dst))
1165 return 0;
1166 src = SUBREG_REG (src);
1167 dst = SUBREG_REG (dst);
1168 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
1169 && REGNO (src) == REGNO (dst))
1170 return 1;
1171 return 0;
1174 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1175 value to itself. */
1176 static int
1177 noop_move_p (insn)
1178 rtx insn;
1180 rtx pat = PATTERN (insn);
1182 /* Insns carrying these notes are useful later on. */
1183 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1184 return 0;
1186 if (GET_CODE (pat) == SET && set_noop_p (pat))
1187 return 1;
1189 if (GET_CODE (pat) == PARALLEL)
1191 int i;
1192 /* If nothing but SETs of registers to themselves,
1193 this insn can also be deleted. */
1194 for (i = 0; i < XVECLEN (pat, 0); i++)
1196 rtx tem = XVECEXP (pat, 0, i);
1198 if (GET_CODE (tem) == USE
1199 || GET_CODE (tem) == CLOBBER)
1200 continue;
1202 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1203 return 0;
1206 return 1;
1208 return 0;
1211 static void
1212 notice_stack_pointer_modification (x, pat)
1213 rtx x;
1214 rtx pat ATTRIBUTE_UNUSED;
1216 if (x == stack_pointer_rtx
1217 /* The stack pointer is only modified indirectly as the result
1218 of a push until later in flow. See the comments in rtl.texi
1219 regarding Embedded Side-Effects on Addresses. */
1220 || (GET_CODE (x) == MEM
1221 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
1222 || GET_CODE (XEXP (x, 0)) == PRE_INC
1223 || GET_CODE (XEXP (x, 0)) == POST_DEC
1224 || GET_CODE (XEXP (x, 0)) == POST_INC)
1225 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
1226 current_function_sp_is_unchanging = 0;
1229 /* Record which insns refer to any volatile memory
1230 or for any reason can't be deleted just because they are dead stores.
1231 Also, delete any insns that copy a register to itself.
1232 And see if the stack pointer is modified. */
1233 static void
1234 record_volatile_insns (f)
1235 rtx f;
1237 rtx insn;
1238 for (insn = f; insn; insn = NEXT_INSN (insn))
1240 enum rtx_code code1 = GET_CODE (insn);
1241 if (code1 == CALL_INSN)
1242 INSN_VOLATILE (insn) = 1;
1243 else if (code1 == INSN || code1 == JUMP_INSN)
1245 if (GET_CODE (PATTERN (insn)) != USE
1246 && volatile_refs_p (PATTERN (insn)))
1247 INSN_VOLATILE (insn) = 1;
1249 /* A SET that makes space on the stack cannot be dead.
1250 (Such SETs occur only for allocating variable-size data,
1251 so they will always have a PLUS or MINUS according to the
1252 direction of stack growth.)
1253 Even if this function never uses this stack pointer value,
1254 signal handlers do! */
1255 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
1256 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1257 #ifdef STACK_GROWS_DOWNWARD
1258 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
1259 #else
1260 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1261 #endif
1262 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
1263 INSN_VOLATILE (insn) = 1;
1265 /* Delete (in effect) any obvious no-op moves. */
1266 else if (noop_move_p (insn))
1268 PUT_CODE (insn, NOTE);
1269 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1270 NOTE_SOURCE_FILE (insn) = 0;
1274 /* Check if insn modifies the stack pointer. */
1275 if ( current_function_sp_is_unchanging
1276 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1277 note_stores (PATTERN (insn), notice_stack_pointer_modification);
1281 /* Mark those regs which are needed at the end of the function as live
1282 at the end of the last basic block. */
1283 static void
1284 mark_regs_live_at_end (set)
1285 regset set;
1287 int i;
1289 #ifdef EXIT_IGNORE_STACK
1290 if (! EXIT_IGNORE_STACK
1291 || (! FRAME_POINTER_REQUIRED
1292 && ! current_function_calls_alloca
1293 && flag_omit_frame_pointer)
1294 || current_function_sp_is_unchanging)
1295 #endif
1296 /* If exiting needs the right stack value,
1297 consider the stack pointer live at the end of the function. */
1298 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
1300 /* Mark the frame pointer is needed at the end of the function. If
1301 we end up eliminating it, it will be removed from the live list
1302 of each basic block by reload. */
1304 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
1305 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1306 /* If they are different, also mark the hard frame pointer as live */
1307 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
1308 #endif
1311 /* Mark all global registers and all registers used by the epilogue
1312 as being live at the end of the function since they may be
1313 referenced by our caller. */
1314 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1315 if (global_regs[i]
1316 #ifdef EPILOGUE_USES
1317 || EPILOGUE_USES (i)
1318 #endif
1320 SET_REGNO_REG_SET (set, i);
1323 /* Determine which registers are live at the start of each
1324 basic block of the function whose first insn is F.
1325 NREGS is the number of registers used in F.
1326 We allocate the vector basic_block_live_at_start
1327 and the regsets that it points to, and fill them with the data.
1328 regset_size and regset_bytes are also set here. */
1330 static void
1331 life_analysis_1 (f, nregs)
1332 rtx f;
1333 int nregs;
1335 int first_pass;
1336 int changed;
1337 /* For each basic block, a bitmask of regs
1338 live on exit from the block. */
1339 regset *basic_block_live_at_end;
1340 /* For each basic block, a bitmask of regs
1341 live on entry to a successor-block of this block.
1342 If this does not match basic_block_live_at_end,
1343 that must be updated, and the block must be rescanned. */
1344 regset *basic_block_new_live_at_end;
1345 /* For each basic block, a bitmask of regs
1346 whose liveness at the end of the basic block
1347 can make a difference in which regs are live on entry to the block.
1348 These are the regs that are set within the basic block,
1349 possibly excluding those that are used after they are set. */
1350 regset *basic_block_significant;
1351 register int i;
1352 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
1354 struct obstack flow_obstack;
1356 gcc_obstack_init (&flow_obstack);
1358 max_regno = nregs;
1360 /* The post-reload life analysis have (on a global basis) the same registers
1361 live as was computed by reload itself.
1363 Otherwise elimination offsets and such may be incorrect.
1365 Reload will make some registers as live even though they do not appear
1366 in the rtl. */
1367 if (reload_completed)
1368 bcopy (regs_ever_live, save_regs_ever_live, (sizeof (regs_ever_live)));
1370 bzero (regs_ever_live, sizeof regs_ever_live);
1372 /* Allocate and zero out many data structures
1373 that will record the data from lifetime analysis. */
1375 allocate_for_life_analysis ();
1377 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
1378 bzero ((char *) reg_next_use, nregs * sizeof (rtx));
1380 /* Set up several regset-vectors used internally within this function.
1381 Their meanings are documented above, with their declarations. */
1383 basic_block_live_at_end
1384 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1386 /* Don't use alloca since that leads to a crash rather than an error message
1387 if there isn't enough space.
1388 Don't use oballoc since we may need to allocate other things during
1389 this function on the temporary obstack. */
1390 init_regset_vector (basic_block_live_at_end, n_basic_blocks, &flow_obstack);
1392 basic_block_new_live_at_end
1393 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1394 init_regset_vector (basic_block_new_live_at_end, n_basic_blocks,
1395 &flow_obstack);
1397 basic_block_significant
1398 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1399 init_regset_vector (basic_block_significant, n_basic_blocks, &flow_obstack);
1401 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
1402 This will be cleared by record_volatile_insns if it encounters an insn
1403 which modifies the stack pointer. */
1404 current_function_sp_is_unchanging = !current_function_calls_alloca;
1406 record_volatile_insns (f);
1408 if (n_basic_blocks > 0)
1410 mark_regs_live_at_end (basic_block_live_at_end[n_basic_blocks - 1]);
1411 COPY_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1],
1412 basic_block_live_at_end[n_basic_blocks - 1]);
1415 /* Propagate life info through the basic blocks
1416 around the graph of basic blocks.
1418 This is a relaxation process: each time a new register
1419 is live at the end of the basic block, we must scan the block
1420 to determine which registers are, as a consequence, live at the beginning
1421 of that block. These registers must then be marked live at the ends
1422 of all the blocks that can transfer control to that block.
1423 The process continues until it reaches a fixed point. */
1425 first_pass = 1;
1426 changed = 1;
1427 while (changed)
1429 changed = 0;
1430 for (i = n_basic_blocks - 1; i >= 0; i--)
1432 int consider = first_pass;
1433 int must_rescan = first_pass;
1434 register int j;
1436 if (!first_pass)
1438 /* Set CONSIDER if this block needs thinking about at all
1439 (that is, if the regs live now at the end of it
1440 are not the same as were live at the end of it when
1441 we last thought about it).
1442 Set must_rescan if it needs to be thought about
1443 instruction by instruction (that is, if any additional
1444 reg that is live at the end now but was not live there before
1445 is one of the significant regs of this basic block). */
1447 EXECUTE_IF_AND_COMPL_IN_REG_SET
1448 (basic_block_new_live_at_end[i],
1449 basic_block_live_at_end[i], 0, j,
1451 consider = 1;
1452 if (!reload_completed
1453 && REGNO_REG_SET_P (basic_block_significant[i], j))
1455 must_rescan = 1;
1456 goto done;
1459 done:
1460 if (! consider)
1461 continue;
1464 /* The live_at_start of this block may be changing,
1465 so another pass will be required after this one. */
1466 changed = 1;
1468 if (! must_rescan)
1470 /* No complete rescan needed;
1471 just record those variables newly known live at end
1472 as live at start as well. */
1473 IOR_AND_COMPL_REG_SET (basic_block_live_at_start[i],
1474 basic_block_new_live_at_end[i],
1475 basic_block_live_at_end[i]);
1477 IOR_AND_COMPL_REG_SET (basic_block_live_at_end[i],
1478 basic_block_new_live_at_end[i],
1479 basic_block_live_at_end[i]);
1481 else
1483 /* Update the basic_block_live_at_start
1484 by propagation backwards through the block. */
1485 COPY_REG_SET (basic_block_live_at_end[i],
1486 basic_block_new_live_at_end[i]);
1487 COPY_REG_SET (basic_block_live_at_start[i],
1488 basic_block_live_at_end[i]);
1489 propagate_block (basic_block_live_at_start[i],
1490 basic_block_head[i], basic_block_end[i], 0,
1491 first_pass ? basic_block_significant[i]
1492 : (regset) 0,
1497 register rtx jump, head;
1499 /* Update the basic_block_new_live_at_end's of the block
1500 that falls through into this one (if any). */
1501 head = basic_block_head[i];
1502 if (basic_block_drops_in[i])
1503 IOR_REG_SET (basic_block_new_live_at_end[i-1],
1504 basic_block_live_at_start[i]);
1506 /* Update the basic_block_new_live_at_end's of
1507 all the blocks that jump to this one. */
1508 if (GET_CODE (head) == CODE_LABEL)
1509 for (jump = LABEL_REFS (head);
1510 jump != head;
1511 jump = LABEL_NEXTREF (jump))
1513 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
1514 IOR_REG_SET (basic_block_new_live_at_end[from_block],
1515 basic_block_live_at_start[i]);
1518 #ifdef USE_C_ALLOCA
1519 alloca (0);
1520 #endif
1522 first_pass = 0;
1525 /* The only pseudos that are live at the beginning of the function are
1526 those that were not set anywhere in the function. local-alloc doesn't
1527 know how to handle these correctly, so mark them as not local to any
1528 one basic block. */
1530 if (n_basic_blocks > 0)
1531 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[0],
1532 FIRST_PSEUDO_REGISTER, i,
1534 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1537 /* Now the life information is accurate.
1538 Make one more pass over each basic block
1539 to delete dead stores, create autoincrement addressing
1540 and record how many times each register is used, is set, or dies.
1542 To save time, we operate directly in basic_block_live_at_end[i],
1543 thus destroying it (in fact, converting it into a copy of
1544 basic_block_live_at_start[i]). This is ok now because
1545 basic_block_live_at_end[i] is no longer used past this point. */
1547 max_scratch = 0;
1549 for (i = 0; i < n_basic_blocks; i++)
1551 propagate_block (basic_block_live_at_end[i],
1552 basic_block_head[i], basic_block_end[i], 1,
1553 (regset) 0, i);
1554 #ifdef USE_C_ALLOCA
1555 alloca (0);
1556 #endif
1559 #if 0
1560 /* Something live during a setjmp should not be put in a register
1561 on certain machines which restore regs from stack frames
1562 rather than from the jmpbuf.
1563 But we don't need to do this for the user's variables, since
1564 ANSI says only volatile variables need this. */
1565 #ifdef LONGJMP_RESTORE_FROM_STACK
1566 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1567 FIRST_PSEUDO_REGISTER, i,
1569 if (regno_reg_rtx[i] != 0
1570 && ! REG_USERVAR_P (regno_reg_rtx[i]))
1572 REG_LIVE_LENGTH (i) = -1;
1573 REG_BASIC_BLOCK (i) = -1;
1576 #endif
1577 #endif
1579 /* We have a problem with any pseudoreg that
1580 lives across the setjmp. ANSI says that if a
1581 user variable does not change in value
1582 between the setjmp and the longjmp, then the longjmp preserves it.
1583 This includes longjmp from a place where the pseudo appears dead.
1584 (In principle, the value still exists if it is in scope.)
1585 If the pseudo goes in a hard reg, some other value may occupy
1586 that hard reg where this pseudo is dead, thus clobbering the pseudo.
1587 Conclusion: such a pseudo must not go in a hard reg. */
1588 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1589 FIRST_PSEUDO_REGISTER, i,
1591 if (regno_reg_rtx[i] != 0)
1593 REG_LIVE_LENGTH (i) = -1;
1594 REG_BASIC_BLOCK (i) = -1;
1598 /* Restore regs_ever_live that was provided by reload. */
1599 if (reload_completed)
1600 bcopy (save_regs_ever_live, regs_ever_live, (sizeof (regs_ever_live)));
1602 free_regset_vector (basic_block_live_at_end, n_basic_blocks);
1603 free_regset_vector (basic_block_new_live_at_end, n_basic_blocks);
1604 free_regset_vector (basic_block_significant, n_basic_blocks);
1605 basic_block_live_at_end = (regset *)0;
1606 basic_block_new_live_at_end = (regset *)0;
1607 basic_block_significant = (regset *)0;
1609 obstack_free (&flow_obstack, NULL_PTR);
1612 /* Subroutines of life analysis. */
1614 /* Allocate the permanent data structures that represent the results
1615 of life analysis. Not static since used also for stupid life analysis. */
1617 void
1618 allocate_for_life_analysis ()
1620 register int i;
1622 /* Recalculate the register space, in case it has grown. Old style
1623 vector oriented regsets would set regset_{size,bytes} here also. */
1624 allocate_reg_info (max_regno, FALSE, FALSE);
1626 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
1627 information, explicitly reset it here. The allocation should have
1628 already happened on the previous reg_scan pass. Make sure in case
1629 some more registers were allocated. */
1630 for (i = 0; i < max_regno; i++)
1631 REG_N_SETS (i) = 0;
1633 basic_block_live_at_start
1634 = (regset *) oballoc (n_basic_blocks * sizeof (regset));
1635 init_regset_vector (basic_block_live_at_start, n_basic_blocks,
1636 function_obstack);
1638 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
1639 CLEAR_REG_SET (regs_live_at_setjmp);
1642 /* Make each element of VECTOR point at a regset. The vector has
1643 NELTS elements, and space is allocated from the ALLOC_OBSTACK
1644 obstack. */
1646 static void
1647 init_regset_vector (vector, nelts, alloc_obstack)
1648 regset *vector;
1649 int nelts;
1650 struct obstack *alloc_obstack;
1652 register int i;
1654 for (i = 0; i < nelts; i++)
1656 vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
1657 CLEAR_REG_SET (vector[i]);
1661 /* Release any additional space allocated for each element of VECTOR point
1662 other than the regset header itself. The vector has NELTS elements. */
1664 void
1665 free_regset_vector (vector, nelts)
1666 regset *vector;
1667 int nelts;
1669 register int i;
1671 for (i = 0; i < nelts; i++)
1672 FREE_REG_SET (vector[i]);
1675 /* Compute the registers live at the beginning of a basic block
1676 from those live at the end.
1678 When called, OLD contains those live at the end.
1679 On return, it contains those live at the beginning.
1680 FIRST and LAST are the first and last insns of the basic block.
1682 FINAL is nonzero if we are doing the final pass which is not
1683 for computing the life info (since that has already been done)
1684 but for acting on it. On this pass, we delete dead stores,
1685 set up the logical links and dead-variables lists of instructions,
1686 and merge instructions for autoincrement and autodecrement addresses.
1688 SIGNIFICANT is nonzero only the first time for each basic block.
1689 If it is nonzero, it points to a regset in which we store
1690 a 1 for each register that is set within the block.
1692 BNUM is the number of the basic block. */
1694 static void
1695 propagate_block (old, first, last, final, significant, bnum)
1696 register regset old;
1697 rtx first;
1698 rtx last;
1699 int final;
1700 regset significant;
1701 int bnum;
1703 register rtx insn;
1704 rtx prev;
1705 regset live;
1706 regset dead;
1708 /* The loop depth may change in the middle of a basic block. Since we
1709 scan from end to beginning, we start with the depth at the end of the
1710 current basic block, and adjust as we pass ends and starts of loops. */
1711 loop_depth = basic_block_loop_depth[bnum];
1713 dead = ALLOCA_REG_SET ();
1714 live = ALLOCA_REG_SET ();
1716 cc0_live = 0;
1717 last_mem_set = 0;
1719 /* Include any notes at the end of the block in the scan.
1720 This is in case the block ends with a call to setjmp. */
1722 while (NEXT_INSN (last) != 0 && GET_CODE (NEXT_INSN (last)) == NOTE)
1724 /* Look for loop boundaries, we are going forward here. */
1725 last = NEXT_INSN (last);
1726 if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_BEG)
1727 loop_depth++;
1728 else if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_END)
1729 loop_depth--;
1732 if (final)
1734 register int i;
1736 num_scratch = 0;
1738 /* Process the regs live at the end of the block.
1739 Mark them as not local to any one basic block. */
1740 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
1742 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1746 /* Scan the block an insn at a time from end to beginning. */
1748 for (insn = last; ; insn = prev)
1750 prev = PREV_INSN (insn);
1752 if (GET_CODE (insn) == NOTE)
1754 /* Look for loop boundaries, remembering that we are going
1755 backwards. */
1756 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1757 loop_depth++;
1758 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1759 loop_depth--;
1761 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
1762 Abort now rather than setting register status incorrectly. */
1763 if (loop_depth == 0)
1764 abort ();
1766 /* If this is a call to `setjmp' et al,
1767 warn if any non-volatile datum is live. */
1769 if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
1770 IOR_REG_SET (regs_live_at_setjmp, old);
1773 /* Update the life-status of regs for this insn.
1774 First DEAD gets which regs are set in this insn
1775 then LIVE gets which regs are used in this insn.
1776 Then the regs live before the insn
1777 are those live after, with DEAD regs turned off,
1778 and then LIVE regs turned on. */
1780 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1782 register int i;
1783 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1784 int insn_is_dead
1785 = (insn_dead_p (PATTERN (insn), old, 0)
1786 /* Don't delete something that refers to volatile storage! */
1787 && ! INSN_VOLATILE (insn));
1788 int libcall_is_dead
1789 = (insn_is_dead && note != 0
1790 && libcall_dead_p (PATTERN (insn), old, note, insn));
1792 /* If an instruction consists of just dead store(s) on final pass,
1793 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
1794 We could really delete it with delete_insn, but that
1795 can cause trouble for first or last insn in a basic block. */
1796 if (!reload_completed && final && insn_is_dead)
1798 PUT_CODE (insn, NOTE);
1799 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1800 NOTE_SOURCE_FILE (insn) = 0;
1802 /* CC0 is now known to be dead. Either this insn used it,
1803 in which case it doesn't anymore, or clobbered it,
1804 so the next insn can't use it. */
1805 cc0_live = 0;
1807 /* If this insn is copying the return value from a library call,
1808 delete the entire library call. */
1809 if (libcall_is_dead)
1811 rtx first = XEXP (note, 0);
1812 rtx p = insn;
1813 while (INSN_DELETED_P (first))
1814 first = NEXT_INSN (first);
1815 while (p != first)
1817 p = PREV_INSN (p);
1818 PUT_CODE (p, NOTE);
1819 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
1820 NOTE_SOURCE_FILE (p) = 0;
1823 goto flushed;
1826 CLEAR_REG_SET (dead);
1827 CLEAR_REG_SET (live);
1829 /* See if this is an increment or decrement that can be
1830 merged into a following memory address. */
1831 #ifdef AUTO_INC_DEC
1833 register rtx x = single_set (insn);
1835 /* Does this instruction increment or decrement a register? */
1836 if (!reload_completed
1837 && final && x != 0
1838 && GET_CODE (SET_DEST (x)) == REG
1839 && (GET_CODE (SET_SRC (x)) == PLUS
1840 || GET_CODE (SET_SRC (x)) == MINUS)
1841 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
1842 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1843 /* Ok, look for a following memory ref we can combine with.
1844 If one is found, change the memory ref to a PRE_INC
1845 or PRE_DEC, cancel this insn, and return 1.
1846 Return 0 if nothing has been done. */
1847 && try_pre_increment_1 (insn))
1848 goto flushed;
1850 #endif /* AUTO_INC_DEC */
1852 /* If this is not the final pass, and this insn is copying the
1853 value of a library call and it's dead, don't scan the
1854 insns that perform the library call, so that the call's
1855 arguments are not marked live. */
1856 if (libcall_is_dead)
1858 /* Mark the dest reg as `significant'. */
1859 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
1861 insn = XEXP (note, 0);
1862 prev = PREV_INSN (insn);
1864 else if (GET_CODE (PATTERN (insn)) == SET
1865 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1866 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1867 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1868 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1869 /* We have an insn to pop a constant amount off the stack.
1870 (Such insns use PLUS regardless of the direction of the stack,
1871 and any insn to adjust the stack by a constant is always a pop.)
1872 These insns, if not dead stores, have no effect on life. */
1874 else
1876 /* Any regs live at the time of a call instruction
1877 must not go in a register clobbered by calls.
1878 Find all regs now live and record this for them. */
1880 if (GET_CODE (insn) == CALL_INSN && final)
1881 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
1883 REG_N_CALLS_CROSSED (i)++;
1886 /* LIVE gets the regs used in INSN;
1887 DEAD gets those set by it. Dead insns don't make anything
1888 live. */
1890 mark_set_regs (old, dead, PATTERN (insn),
1891 final ? insn : NULL_RTX, significant);
1893 /* If an insn doesn't use CC0, it becomes dead since we
1894 assume that every insn clobbers it. So show it dead here;
1895 mark_used_regs will set it live if it is referenced. */
1896 cc0_live = 0;
1898 if (! insn_is_dead)
1899 mark_used_regs (old, live, PATTERN (insn), final, insn);
1901 /* Sometimes we may have inserted something before INSN (such as
1902 a move) when we make an auto-inc. So ensure we will scan
1903 those insns. */
1904 #ifdef AUTO_INC_DEC
1905 prev = PREV_INSN (insn);
1906 #endif
1908 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1910 register int i;
1912 rtx note;
1914 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1915 note;
1916 note = XEXP (note, 1))
1917 if (GET_CODE (XEXP (note, 0)) == USE)
1918 mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
1919 final, insn);
1921 /* Each call clobbers all call-clobbered regs that are not
1922 global or fixed. Note that the function-value reg is a
1923 call-clobbered reg, and mark_set_regs has already had
1924 a chance to handle it. */
1926 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1927 if (call_used_regs[i] && ! global_regs[i]
1928 && ! fixed_regs[i])
1929 SET_REGNO_REG_SET (dead, i);
1931 /* The stack ptr is used (honorarily) by a CALL insn. */
1932 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
1934 /* Calls may also reference any of the global registers,
1935 so they are made live. */
1936 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1937 if (global_regs[i])
1938 mark_used_regs (old, live,
1939 gen_rtx_REG (reg_raw_mode[i], i),
1940 final, insn);
1942 /* Calls also clobber memory. */
1943 last_mem_set = 0;
1946 /* Update OLD for the registers used or set. */
1947 AND_COMPL_REG_SET (old, dead);
1948 IOR_REG_SET (old, live);
1952 /* On final pass, update counts of how many insns each reg is live
1953 at. */
1954 if (final)
1955 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
1956 { REG_LIVE_LENGTH (i)++; });
1958 flushed: ;
1959 if (insn == first)
1960 break;
1963 FREE_REG_SET (dead);
1964 FREE_REG_SET (live);
1966 if (num_scratch > max_scratch)
1967 max_scratch = num_scratch;
1970 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
1971 (SET expressions whose destinations are registers dead after the insn).
1972 NEEDED is the regset that says which regs are alive after the insn.
1974 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL. */
1976 static int
1977 insn_dead_p (x, needed, call_ok)
1978 rtx x;
1979 regset needed;
1980 int call_ok;
1982 enum rtx_code code = GET_CODE (x);
1984 /* If setting something that's a reg or part of one,
1985 see if that register's altered value will be live. */
1987 if (code == SET)
1989 rtx r = SET_DEST (x);
1991 /* A SET that is a subroutine call cannot be dead. */
1992 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
1993 return 0;
1995 #ifdef HAVE_cc0
1996 if (GET_CODE (r) == CC0)
1997 return ! cc0_live;
1998 #endif
2000 if (GET_CODE (r) == MEM && last_mem_set && ! MEM_VOLATILE_P (r)
2001 && rtx_equal_p (r, last_mem_set))
2002 return 1;
2004 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
2005 || GET_CODE (r) == ZERO_EXTRACT)
2006 r = SUBREG_REG (r);
2008 if (GET_CODE (r) == REG)
2010 int regno = REGNO (r);
2012 /* Don't delete insns to set global regs. */
2013 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2014 /* Make sure insns to set frame pointer aren't deleted. */
2015 || regno == FRAME_POINTER_REGNUM
2016 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2017 || regno == HARD_FRAME_POINTER_REGNUM
2018 #endif
2019 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2020 /* Make sure insns to set arg pointer are never deleted
2021 (if the arg pointer isn't fixed, there will be a USE for
2022 it, so we can treat it normally). */
2023 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2024 #endif
2025 || REGNO_REG_SET_P (needed, regno))
2026 return 0;
2028 /* If this is a hard register, verify that subsequent words are
2029 not needed. */
2030 if (regno < FIRST_PSEUDO_REGISTER)
2032 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2034 while (--n > 0)
2035 if (REGNO_REG_SET_P (needed, regno+n))
2036 return 0;
2039 return 1;
2043 /* If performing several activities,
2044 insn is dead if each activity is individually dead.
2045 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
2046 that's inside a PARALLEL doesn't make the insn worth keeping. */
2047 else if (code == PARALLEL)
2049 int i = XVECLEN (x, 0);
2051 for (i--; i >= 0; i--)
2052 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2053 && GET_CODE (XVECEXP (x, 0, i)) != USE
2054 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok))
2055 return 0;
2057 return 1;
2060 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
2061 is not necessarily true for hard registers. */
2062 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
2063 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2064 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
2065 return 1;
2067 /* We do not check other CLOBBER or USE here. An insn consisting of just
2068 a CLOBBER or just a USE should not be deleted. */
2069 return 0;
2072 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
2073 return 1 if the entire library call is dead.
2074 This is true if X copies a register (hard or pseudo)
2075 and if the hard return reg of the call insn is dead.
2076 (The caller should have tested the destination of X already for death.)
2078 If this insn doesn't just copy a register, then we don't
2079 have an ordinary libcall. In that case, cse could not have
2080 managed to substitute the source for the dest later on,
2081 so we can assume the libcall is dead.
2083 NEEDED is the bit vector of pseudoregs live before this insn.
2084 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
2086 static int
2087 libcall_dead_p (x, needed, note, insn)
2088 rtx x;
2089 regset needed;
2090 rtx note;
2091 rtx insn;
2093 register RTX_CODE code = GET_CODE (x);
2095 if (code == SET)
2097 register rtx r = SET_SRC (x);
2098 if (GET_CODE (r) == REG)
2100 rtx call = XEXP (note, 0);
2101 register int i;
2103 /* Find the call insn. */
2104 while (call != insn && GET_CODE (call) != CALL_INSN)
2105 call = NEXT_INSN (call);
2107 /* If there is none, do nothing special,
2108 since ordinary death handling can understand these insns. */
2109 if (call == insn)
2110 return 0;
2112 /* See if the hard reg holding the value is dead.
2113 If this is a PARALLEL, find the call within it. */
2114 call = PATTERN (call);
2115 if (GET_CODE (call) == PARALLEL)
2117 for (i = XVECLEN (call, 0) - 1; i >= 0; i--)
2118 if (GET_CODE (XVECEXP (call, 0, i)) == SET
2119 && GET_CODE (SET_SRC (XVECEXP (call, 0, i))) == CALL)
2120 break;
2122 /* This may be a library call that is returning a value
2123 via invisible pointer. Do nothing special, since
2124 ordinary death handling can understand these insns. */
2125 if (i < 0)
2126 return 0;
2128 call = XVECEXP (call, 0, i);
2131 return insn_dead_p (call, needed, 1);
2134 return 1;
2137 /* Return 1 if register REGNO was used before it was set, i.e. if it is
2138 live at function entry. Don't count global register variables, variables
2139 in registers that can be used for function arg passing, or variables in
2140 fixed hard registers. */
2143 regno_uninitialized (regno)
2144 int regno;
2146 if (n_basic_blocks == 0
2147 || (regno < FIRST_PSEUDO_REGISTER
2148 && (global_regs[regno]
2149 || fixed_regs[regno]
2150 || FUNCTION_ARG_REGNO_P (regno))))
2151 return 0;
2153 return REGNO_REG_SET_P (basic_block_live_at_start[0], regno);
2156 /* 1 if register REGNO was alive at a place where `setjmp' was called
2157 and was set more than once or is an argument.
2158 Such regs may be clobbered by `longjmp'. */
2161 regno_clobbered_at_setjmp (regno)
2162 int regno;
2164 if (n_basic_blocks == 0)
2165 return 0;
2167 return ((REG_N_SETS (regno) > 1
2168 || REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2169 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2172 /* Process the registers that are set within X.
2173 Their bits are set to 1 in the regset DEAD,
2174 because they are dead prior to this insn.
2176 If INSN is nonzero, it is the insn being processed
2177 and the fact that it is nonzero implies this is the FINAL pass
2178 in propagate_block. In this case, various info about register
2179 usage is stored, LOG_LINKS fields of insns are set up. */
2181 static void
2182 mark_set_regs (needed, dead, x, insn, significant)
2183 regset needed;
2184 regset dead;
2185 rtx x;
2186 rtx insn;
2187 regset significant;
2189 register RTX_CODE code = GET_CODE (x);
2191 if (code == SET || code == CLOBBER)
2192 mark_set_1 (needed, dead, x, insn, significant);
2193 else if (code == PARALLEL)
2195 register int i;
2196 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2198 code = GET_CODE (XVECEXP (x, 0, i));
2199 if (code == SET || code == CLOBBER)
2200 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
2205 /* Process a single SET rtx, X. */
2207 static void
2208 mark_set_1 (needed, dead, x, insn, significant)
2209 regset needed;
2210 regset dead;
2211 rtx x;
2212 rtx insn;
2213 regset significant;
2215 register int regno;
2216 register rtx reg = SET_DEST (x);
2218 /* Some targets place small structures in registers for
2219 return values of functions. We have to detect this
2220 case specially here to get correct flow information. */
2221 if (GET_CODE (reg) == PARALLEL
2222 && GET_MODE (reg) == BLKmode)
2224 register int i;
2226 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2227 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant);
2228 return;
2231 /* Modifying just one hardware register of a multi-reg value
2232 or just a byte field of a register
2233 does not mean the value from before this insn is now dead.
2234 But it does mean liveness of that register at the end of the block
2235 is significant.
2237 Within mark_set_1, however, we treat it as if the register is
2238 indeed modified. mark_used_regs will, however, also treat this
2239 register as being used. Thus, we treat these insns as setting a
2240 new value for the register as a function of its old value. This
2241 cases LOG_LINKS to be made appropriately and this will help combine. */
2243 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
2244 || GET_CODE (reg) == SIGN_EXTRACT
2245 || GET_CODE (reg) == STRICT_LOW_PART)
2246 reg = XEXP (reg, 0);
2248 /* If we are writing into memory or into a register mentioned in the
2249 address of the last thing stored into memory, show we don't know
2250 what the last store was. If we are writing memory, save the address
2251 unless it is volatile. */
2252 if (GET_CODE (reg) == MEM
2253 || (GET_CODE (reg) == REG
2254 && last_mem_set != 0 && reg_overlap_mentioned_p (reg, last_mem_set)))
2255 last_mem_set = 0;
2257 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2258 /* There are no REG_INC notes for SP, so we can't assume we'll see
2259 everything that invalidates it. To be safe, don't eliminate any
2260 stores though SP; none of them should be redundant anyway. */
2261 && ! reg_mentioned_p (stack_pointer_rtx, reg))
2262 last_mem_set = reg;
2264 if (GET_CODE (reg) == REG
2265 && (regno = REGNO (reg), regno != FRAME_POINTER_REGNUM)
2266 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2267 && regno != HARD_FRAME_POINTER_REGNUM
2268 #endif
2269 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2270 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2271 #endif
2272 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
2273 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
2275 int some_needed = REGNO_REG_SET_P (needed, regno);
2276 int some_not_needed = ! some_needed;
2278 /* Mark it as a significant register for this basic block. */
2279 if (significant)
2280 SET_REGNO_REG_SET (significant, regno);
2282 /* Mark it as dead before this insn. */
2283 SET_REGNO_REG_SET (dead, regno);
2285 /* A hard reg in a wide mode may really be multiple registers.
2286 If so, mark all of them just like the first. */
2287 if (regno < FIRST_PSEUDO_REGISTER)
2289 int n;
2291 /* Nothing below is needed for the stack pointer; get out asap.
2292 Eg, log links aren't needed, since combine won't use them. */
2293 if (regno == STACK_POINTER_REGNUM)
2294 return;
2296 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2297 while (--n > 0)
2299 int regno_n = regno + n;
2300 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2301 if (significant)
2302 SET_REGNO_REG_SET (significant, regno_n);
2304 SET_REGNO_REG_SET (dead, regno_n);
2305 some_needed |= needed_regno;
2306 some_not_needed |= ! needed_regno;
2309 /* Additional data to record if this is the final pass. */
2310 if (insn)
2312 register rtx y = reg_next_use[regno];
2313 register int blocknum = BLOCK_NUM (insn);
2315 /* If this is a hard reg, record this function uses the reg. */
2317 if (regno < FIRST_PSEUDO_REGISTER)
2319 register int i;
2320 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
2322 for (i = regno; i < endregno; i++)
2324 /* The next use is no longer "next", since a store
2325 intervenes. */
2326 reg_next_use[i] = 0;
2328 regs_ever_live[i] = 1;
2329 REG_N_SETS (i)++;
2332 else
2334 /* The next use is no longer "next", since a store
2335 intervenes. */
2336 reg_next_use[regno] = 0;
2338 /* Keep track of which basic blocks each reg appears in. */
2340 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2341 REG_BASIC_BLOCK (regno) = blocknum;
2342 else if (REG_BASIC_BLOCK (regno) != blocknum)
2343 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2345 /* Count (weighted) references, stores, etc. This counts a
2346 register twice if it is modified, but that is correct. */
2347 REG_N_SETS (regno)++;
2349 REG_N_REFS (regno) += loop_depth;
2351 /* The insns where a reg is live are normally counted
2352 elsewhere, but we want the count to include the insn
2353 where the reg is set, and the normal counting mechanism
2354 would not count it. */
2355 REG_LIVE_LENGTH (regno)++;
2358 if (! some_not_needed)
2360 /* Make a logical link from the next following insn
2361 that uses this register, back to this insn.
2362 The following insns have already been processed.
2364 We don't build a LOG_LINK for hard registers containing
2365 in ASM_OPERANDs. If these registers get replaced,
2366 we might wind up changing the semantics of the insn,
2367 even if reload can make what appear to be valid assignments
2368 later. */
2369 if (y && (BLOCK_NUM (y) == blocknum)
2370 && (regno >= FIRST_PSEUDO_REGISTER
2371 || asm_noperands (PATTERN (y)) < 0))
2372 LOG_LINKS (y)
2373 = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
2375 else if (! some_needed)
2377 /* Note that dead stores have already been deleted when possible
2378 If we get here, we have found a dead store that cannot
2379 be eliminated (because the same insn does something useful).
2380 Indicate this by marking the reg being set as dying here. */
2381 REG_NOTES (insn)
2382 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2383 REG_N_DEATHS (REGNO (reg))++;
2385 else
2387 /* This is a case where we have a multi-word hard register
2388 and some, but not all, of the words of the register are
2389 needed in subsequent insns. Write REG_UNUSED notes
2390 for those parts that were not needed. This case should
2391 be rare. */
2393 int i;
2395 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
2396 i >= 0; i--)
2397 if (!REGNO_REG_SET_P (needed, regno + i))
2398 REG_NOTES (insn)
2399 = gen_rtx_EXPR_LIST (REG_UNUSED,
2400 gen_rtx_REG (reg_raw_mode[regno + i],
2401 regno + i),
2402 REG_NOTES (insn));
2406 else if (GET_CODE (reg) == REG)
2407 reg_next_use[regno] = 0;
2409 /* If this is the last pass and this is a SCRATCH, show it will be dying
2410 here and count it. */
2411 else if (GET_CODE (reg) == SCRATCH && insn != 0)
2413 REG_NOTES (insn)
2414 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2415 num_scratch++;
2419 #ifdef AUTO_INC_DEC
2421 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
2422 reference. */
2424 static void
2425 find_auto_inc (needed, x, insn)
2426 regset needed;
2427 rtx x;
2428 rtx insn;
2430 rtx addr = XEXP (x, 0);
2431 HOST_WIDE_INT offset = 0;
2432 rtx set;
2434 /* Here we detect use of an index register which might be good for
2435 postincrement, postdecrement, preincrement, or predecrement. */
2437 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2438 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
2440 if (GET_CODE (addr) == REG)
2442 register rtx y;
2443 register int size = GET_MODE_SIZE (GET_MODE (x));
2444 rtx use;
2445 rtx incr;
2446 int regno = REGNO (addr);
2448 /* Is the next use an increment that might make auto-increment? */
2449 if ((incr = reg_next_use[regno]) != 0
2450 && (set = single_set (incr)) != 0
2451 && GET_CODE (set) == SET
2452 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
2453 /* Can't add side effects to jumps; if reg is spilled and
2454 reloaded, there's no way to store back the altered value. */
2455 && GET_CODE (insn) != JUMP_INSN
2456 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
2457 && XEXP (y, 0) == addr
2458 && GET_CODE (XEXP (y, 1)) == CONST_INT
2459 && (0
2460 #ifdef HAVE_POST_INCREMENT
2461 || (INTVAL (XEXP (y, 1)) == size && offset == 0)
2462 #endif
2463 #ifdef HAVE_POST_DECREMENT
2464 || (INTVAL (XEXP (y, 1)) == - size && offset == 0)
2465 #endif
2466 #ifdef HAVE_PRE_INCREMENT
2467 || (INTVAL (XEXP (y, 1)) == size && offset == size)
2468 #endif
2469 #ifdef HAVE_PRE_DECREMENT
2470 || (INTVAL (XEXP (y, 1)) == - size && offset == - size)
2471 #endif
2473 /* Make sure this reg appears only once in this insn. */
2474 && (use = find_use_as_address (PATTERN (insn), addr, offset),
2475 use != 0 && use != (rtx) 1))
2477 rtx q = SET_DEST (set);
2478 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
2479 ? (offset ? PRE_INC : POST_INC)
2480 : (offset ? PRE_DEC : POST_DEC));
2482 if (dead_or_set_p (incr, addr))
2484 /* This is the simple case. Try to make the auto-inc. If
2485 we can't, we are done. Otherwise, we will do any
2486 needed updates below. */
2487 if (! validate_change (insn, &XEXP (x, 0),
2488 gen_rtx_fmt_e (inc_code, Pmode, addr),
2490 return;
2492 else if (GET_CODE (q) == REG
2493 /* PREV_INSN used here to check the semi-open interval
2494 [insn,incr). */
2495 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
2496 /* We must also check for sets of q as q may be
2497 a call clobbered hard register and there may
2498 be a call between PREV_INSN (insn) and incr. */
2499 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
2501 /* We have *p followed sometime later by q = p+size.
2502 Both p and q must be live afterward,
2503 and q is not used between INSN and its assignment.
2504 Change it to q = p, ...*q..., q = q+size.
2505 Then fall into the usual case. */
2506 rtx insns, temp;
2508 start_sequence ();
2509 emit_move_insn (q, addr);
2510 insns = get_insns ();
2511 end_sequence ();
2513 /* If anything in INSNS have UID's that don't fit within the
2514 extra space we allocate earlier, we can't make this auto-inc.
2515 This should never happen. */
2516 for (temp = insns; temp; temp = NEXT_INSN (temp))
2518 if (INSN_UID (temp) > max_uid_for_flow)
2519 return;
2520 BLOCK_NUM (temp) = BLOCK_NUM (insn);
2523 /* If we can't make the auto-inc, or can't make the
2524 replacement into Y, exit. There's no point in making
2525 the change below if we can't do the auto-inc and doing
2526 so is not correct in the pre-inc case. */
2528 validate_change (insn, &XEXP (x, 0),
2529 gen_rtx_fmt_e (inc_code, Pmode, q),
2531 validate_change (incr, &XEXP (y, 0), q, 1);
2532 if (! apply_change_group ())
2533 return;
2535 /* We now know we'll be doing this change, so emit the
2536 new insn(s) and do the updates. */
2537 emit_insns_before (insns, insn);
2539 if (basic_block_head[BLOCK_NUM (insn)] == insn)
2540 basic_block_head[BLOCK_NUM (insn)] = insns;
2542 /* INCR will become a NOTE and INSN won't contain a
2543 use of ADDR. If a use of ADDR was just placed in
2544 the insn before INSN, make that the next use.
2545 Otherwise, invalidate it. */
2546 if (GET_CODE (PREV_INSN (insn)) == INSN
2547 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
2548 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
2549 reg_next_use[regno] = PREV_INSN (insn);
2550 else
2551 reg_next_use[regno] = 0;
2553 addr = q;
2554 regno = REGNO (q);
2556 /* REGNO is now used in INCR which is below INSN, but
2557 it previously wasn't live here. If we don't mark
2558 it as needed, we'll put a REG_DEAD note for it
2559 on this insn, which is incorrect. */
2560 SET_REGNO_REG_SET (needed, regno);
2562 /* If there are any calls between INSN and INCR, show
2563 that REGNO now crosses them. */
2564 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
2565 if (GET_CODE (temp) == CALL_INSN)
2566 REG_N_CALLS_CROSSED (regno)++;
2568 else
2569 return;
2571 /* If we haven't returned, it means we were able to make the
2572 auto-inc, so update the status. First, record that this insn
2573 has an implicit side effect. */
2575 REG_NOTES (insn)
2576 = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
2578 /* Modify the old increment-insn to simply copy
2579 the already-incremented value of our register. */
2580 if (! validate_change (incr, &SET_SRC (set), addr, 0))
2581 abort ();
2583 /* If that makes it a no-op (copying the register into itself) delete
2584 it so it won't appear to be a "use" and a "set" of this
2585 register. */
2586 if (SET_DEST (set) == addr)
2588 PUT_CODE (incr, NOTE);
2589 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
2590 NOTE_SOURCE_FILE (incr) = 0;
2593 if (regno >= FIRST_PSEUDO_REGISTER)
2595 /* Count an extra reference to the reg. When a reg is
2596 incremented, spilling it is worse, so we want to make
2597 that less likely. */
2598 REG_N_REFS (regno) += loop_depth;
2600 /* Count the increment as a setting of the register,
2601 even though it isn't a SET in rtl. */
2602 REG_N_SETS (regno)++;
2607 #endif /* AUTO_INC_DEC */
2609 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
2610 This is done assuming the registers needed from X
2611 are those that have 1-bits in NEEDED.
2613 On the final pass, FINAL is 1. This means try for autoincrement
2614 and count the uses and deaths of each pseudo-reg.
2616 INSN is the containing instruction. If INSN is dead, this function is not
2617 called. */
2619 static void
2620 mark_used_regs (needed, live, x, final, insn)
2621 regset needed;
2622 regset live;
2623 rtx x;
2624 int final;
2625 rtx insn;
2627 register RTX_CODE code;
2628 register int regno;
2629 int i;
2631 retry:
2632 code = GET_CODE (x);
2633 switch (code)
2635 case LABEL_REF:
2636 case SYMBOL_REF:
2637 case CONST_INT:
2638 case CONST:
2639 case CONST_DOUBLE:
2640 case PC:
2641 case ADDR_VEC:
2642 case ADDR_DIFF_VEC:
2643 case ASM_INPUT:
2644 return;
2646 #ifdef HAVE_cc0
2647 case CC0:
2648 cc0_live = 1;
2649 return;
2650 #endif
2652 case CLOBBER:
2653 /* If we are clobbering a MEM, mark any registers inside the address
2654 as being used. */
2655 if (GET_CODE (XEXP (x, 0)) == MEM)
2656 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
2657 return;
2659 case MEM:
2660 /* Invalidate the data for the last MEM stored, but only if MEM is
2661 something that can be stored into. */
2662 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2663 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
2664 ; /* needn't clear last_mem_set */
2665 else
2666 last_mem_set = 0;
2668 #ifdef AUTO_INC_DEC
2669 if (final)
2670 find_auto_inc (needed, x, insn);
2671 #endif
2672 break;
2674 case SUBREG:
2675 if (GET_CODE (SUBREG_REG (x)) == REG
2676 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
2677 && (GET_MODE_SIZE (GET_MODE (x))
2678 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2679 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
2681 /* While we're here, optimize this case. */
2682 x = SUBREG_REG (x);
2684 /* In case the SUBREG is not of a register, don't optimize */
2685 if (GET_CODE (x) != REG)
2687 mark_used_regs (needed, live, x, final, insn);
2688 return;
2691 /* ... fall through ... */
2693 case REG:
2694 /* See a register other than being set
2695 => mark it as needed. */
2697 regno = REGNO (x);
2699 int some_needed = REGNO_REG_SET_P (needed, regno);
2700 int some_not_needed = ! some_needed;
2702 SET_REGNO_REG_SET (live, regno);
2704 /* A hard reg in a wide mode may really be multiple registers.
2705 If so, mark all of them just like the first. */
2706 if (regno < FIRST_PSEUDO_REGISTER)
2708 int n;
2710 /* For stack ptr or fixed arg pointer,
2711 nothing below can be necessary, so waste no more time. */
2712 if (regno == STACK_POINTER_REGNUM
2713 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2714 || regno == HARD_FRAME_POINTER_REGNUM
2715 #endif
2716 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2717 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2718 #endif
2719 || regno == FRAME_POINTER_REGNUM)
2721 /* If this is a register we are going to try to eliminate,
2722 don't mark it live here. If we are successful in
2723 eliminating it, it need not be live unless it is used for
2724 pseudos, in which case it will have been set live when
2725 it was allocated to the pseudos. If the register will not
2726 be eliminated, reload will set it live at that point. */
2728 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
2729 regs_ever_live[regno] = 1;
2730 return;
2732 /* No death notes for global register variables;
2733 their values are live after this function exits. */
2734 if (global_regs[regno])
2736 if (final)
2737 reg_next_use[regno] = insn;
2738 return;
2741 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2742 while (--n > 0)
2744 int regno_n = regno + n;
2745 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2747 SET_REGNO_REG_SET (live, regno_n);
2748 some_needed |= needed_regno;
2749 some_not_needed |= ! needed_regno;
2752 if (final)
2754 /* Record where each reg is used, so when the reg
2755 is set we know the next insn that uses it. */
2757 reg_next_use[regno] = insn;
2759 if (regno < FIRST_PSEUDO_REGISTER)
2761 /* If a hard reg is being used,
2762 record that this function does use it. */
2764 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
2765 if (i == 0)
2766 i = 1;
2768 regs_ever_live[regno + --i] = 1;
2769 while (i > 0);
2771 else
2773 /* Keep track of which basic block each reg appears in. */
2775 register int blocknum = BLOCK_NUM (insn);
2777 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2778 REG_BASIC_BLOCK (regno) = blocknum;
2779 else if (REG_BASIC_BLOCK (regno) != blocknum)
2780 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2782 /* Count (weighted) number of uses of each reg. */
2784 REG_N_REFS (regno) += loop_depth;
2787 /* Record and count the insns in which a reg dies.
2788 If it is used in this insn and was dead below the insn
2789 then it dies in this insn. If it was set in this insn,
2790 we do not make a REG_DEAD note; likewise if we already
2791 made such a note. */
2793 if (some_not_needed
2794 && ! dead_or_set_p (insn, x)
2795 #if 0
2796 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
2797 #endif
2800 /* Check for the case where the register dying partially
2801 overlaps the register set by this insn. */
2802 if (regno < FIRST_PSEUDO_REGISTER
2803 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
2805 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2806 while (--n >= 0)
2807 some_needed |= dead_or_set_regno_p (insn, regno + n);
2810 /* If none of the words in X is needed, make a REG_DEAD
2811 note. Otherwise, we must make partial REG_DEAD notes. */
2812 if (! some_needed)
2814 REG_NOTES (insn)
2815 = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
2816 REG_N_DEATHS (regno)++;
2818 else
2820 int i;
2822 /* Don't make a REG_DEAD note for a part of a register
2823 that is set in the insn. */
2825 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
2826 i >= 0; i--)
2827 if (!REGNO_REG_SET_P (needed, regno + i)
2828 && ! dead_or_set_regno_p (insn, regno + i))
2829 REG_NOTES (insn)
2830 = gen_rtx_EXPR_LIST (REG_DEAD,
2831 gen_rtx_REG (reg_raw_mode[regno + i],
2832 regno + i),
2833 REG_NOTES (insn));
2838 return;
2840 case SET:
2842 register rtx testreg = SET_DEST (x);
2843 int mark_dest = 0;
2845 /* If storing into MEM, don't show it as being used. But do
2846 show the address as being used. */
2847 if (GET_CODE (testreg) == MEM)
2849 #ifdef AUTO_INC_DEC
2850 if (final)
2851 find_auto_inc (needed, testreg, insn);
2852 #endif
2853 mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
2854 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2855 return;
2858 /* Storing in STRICT_LOW_PART is like storing in a reg
2859 in that this SET might be dead, so ignore it in TESTREG.
2860 but in some other ways it is like using the reg.
2862 Storing in a SUBREG or a bit field is like storing the entire
2863 register in that if the register's value is not used
2864 then this SET is not needed. */
2865 while (GET_CODE (testreg) == STRICT_LOW_PART
2866 || GET_CODE (testreg) == ZERO_EXTRACT
2867 || GET_CODE (testreg) == SIGN_EXTRACT
2868 || GET_CODE (testreg) == SUBREG)
2870 if (GET_CODE (testreg) == SUBREG
2871 && GET_CODE (SUBREG_REG (testreg)) == REG
2872 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
2873 && (GET_MODE_SIZE (GET_MODE (testreg))
2874 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
2875 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
2877 /* Modifying a single register in an alternate mode
2878 does not use any of the old value. But these other
2879 ways of storing in a register do use the old value. */
2880 if (GET_CODE (testreg) == SUBREG
2881 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
2883 else
2884 mark_dest = 1;
2886 testreg = XEXP (testreg, 0);
2889 /* If this is a store into a register,
2890 recursively scan the value being stored. */
2892 if ((GET_CODE (testreg) == PARALLEL
2893 && GET_MODE (testreg) == BLKmode)
2894 || (GET_CODE (testreg) == REG
2895 && (regno = REGNO (testreg), regno != FRAME_POINTER_REGNUM)
2896 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2897 && regno != HARD_FRAME_POINTER_REGNUM
2898 #endif
2899 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2900 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2901 #endif
2903 /* We used to exclude global_regs here, but that seems wrong.
2904 Storing in them is like storing in mem. */
2906 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2907 if (mark_dest)
2908 mark_used_regs (needed, live, SET_DEST (x), final, insn);
2909 return;
2912 break;
2914 case RETURN:
2915 /* If exiting needs the right stack value, consider this insn as
2916 using the stack pointer. In any event, consider it as using
2917 all global registers and all registers used by return. */
2919 #ifdef EXIT_IGNORE_STACK
2920 if (! EXIT_IGNORE_STACK
2921 || (! FRAME_POINTER_REQUIRED
2922 && ! current_function_calls_alloca
2923 && flag_omit_frame_pointer))
2924 #endif
2925 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2927 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2928 if (global_regs[i]
2929 #ifdef EPILOGUE_USES
2930 || EPILOGUE_USES (i)
2931 #endif
2933 SET_REGNO_REG_SET (live, i);
2934 break;
2936 default:
2937 break;
2940 /* Recursively scan the operands of this expression. */
2943 register char *fmt = GET_RTX_FORMAT (code);
2944 register int i;
2946 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2948 if (fmt[i] == 'e')
2950 /* Tail recursive case: save a function call level. */
2951 if (i == 0)
2953 x = XEXP (x, 0);
2954 goto retry;
2956 mark_used_regs (needed, live, XEXP (x, i), final, insn);
2958 else if (fmt[i] == 'E')
2960 register int j;
2961 for (j = 0; j < XVECLEN (x, i); j++)
2962 mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
2968 #ifdef AUTO_INC_DEC
2970 static int
2971 try_pre_increment_1 (insn)
2972 rtx insn;
2974 /* Find the next use of this reg. If in same basic block,
2975 make it do pre-increment or pre-decrement if appropriate. */
2976 rtx x = single_set (insn);
2977 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
2978 * INTVAL (XEXP (SET_SRC (x), 1)));
2979 int regno = REGNO (SET_DEST (x));
2980 rtx y = reg_next_use[regno];
2981 if (y != 0
2982 && BLOCK_NUM (y) == BLOCK_NUM (insn)
2983 /* Don't do this if the reg dies, or gets set in y; a standard addressing
2984 mode would be better. */
2985 && ! dead_or_set_p (y, SET_DEST (x))
2986 && try_pre_increment (y, SET_DEST (x), amount))
2988 /* We have found a suitable auto-increment
2989 and already changed insn Y to do it.
2990 So flush this increment-instruction. */
2991 PUT_CODE (insn, NOTE);
2992 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2993 NOTE_SOURCE_FILE (insn) = 0;
2994 /* Count a reference to this reg for the increment
2995 insn we are deleting. When a reg is incremented.
2996 spilling it is worse, so we want to make that
2997 less likely. */
2998 if (regno >= FIRST_PSEUDO_REGISTER)
3000 REG_N_REFS (regno) += loop_depth;
3001 REG_N_SETS (regno)++;
3003 return 1;
3005 return 0;
3008 /* Try to change INSN so that it does pre-increment or pre-decrement
3009 addressing on register REG in order to add AMOUNT to REG.
3010 AMOUNT is negative for pre-decrement.
3011 Returns 1 if the change could be made.
3012 This checks all about the validity of the result of modifying INSN. */
3014 static int
3015 try_pre_increment (insn, reg, amount)
3016 rtx insn, reg;
3017 HOST_WIDE_INT amount;
3019 register rtx use;
3021 /* Nonzero if we can try to make a pre-increment or pre-decrement.
3022 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
3023 int pre_ok = 0;
3024 /* Nonzero if we can try to make a post-increment or post-decrement.
3025 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
3026 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
3027 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
3028 int post_ok = 0;
3030 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
3031 int do_post = 0;
3033 /* From the sign of increment, see which possibilities are conceivable
3034 on this target machine. */
3035 #ifdef HAVE_PRE_INCREMENT
3036 if (amount > 0)
3037 pre_ok = 1;
3038 #endif
3039 #ifdef HAVE_POST_INCREMENT
3040 if (amount > 0)
3041 post_ok = 1;
3042 #endif
3044 #ifdef HAVE_PRE_DECREMENT
3045 if (amount < 0)
3046 pre_ok = 1;
3047 #endif
3048 #ifdef HAVE_POST_DECREMENT
3049 if (amount < 0)
3050 post_ok = 1;
3051 #endif
3053 if (! (pre_ok || post_ok))
3054 return 0;
3056 /* It is not safe to add a side effect to a jump insn
3057 because if the incremented register is spilled and must be reloaded
3058 there would be no way to store the incremented value back in memory. */
3060 if (GET_CODE (insn) == JUMP_INSN)
3061 return 0;
3063 use = 0;
3064 if (pre_ok)
3065 use = find_use_as_address (PATTERN (insn), reg, 0);
3066 if (post_ok && (use == 0 || use == (rtx) 1))
3068 use = find_use_as_address (PATTERN (insn), reg, -amount);
3069 do_post = 1;
3072 if (use == 0 || use == (rtx) 1)
3073 return 0;
3075 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
3076 return 0;
3078 /* See if this combination of instruction and addressing mode exists. */
3079 if (! validate_change (insn, &XEXP (use, 0),
3080 gen_rtx_fmt_e (amount > 0
3081 ? (do_post ? POST_INC : PRE_INC)
3082 : (do_post ? POST_DEC : PRE_DEC),
3083 Pmode, reg), 0))
3084 return 0;
3086 /* Record that this insn now has an implicit side effect on X. */
3087 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
3088 return 1;
3091 #endif /* AUTO_INC_DEC */
3093 /* Find the place in the rtx X where REG is used as a memory address.
3094 Return the MEM rtx that so uses it.
3095 If PLUSCONST is nonzero, search instead for a memory address equivalent to
3096 (plus REG (const_int PLUSCONST)).
3098 If such an address does not appear, return 0.
3099 If REG appears more than once, or is used other than in such an address,
3100 return (rtx)1. */
3103 find_use_as_address (x, reg, plusconst)
3104 register rtx x;
3105 rtx reg;
3106 HOST_WIDE_INT plusconst;
3108 enum rtx_code code = GET_CODE (x);
3109 char *fmt = GET_RTX_FORMAT (code);
3110 register int i;
3111 register rtx value = 0;
3112 register rtx tem;
3114 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
3115 return x;
3117 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
3118 && XEXP (XEXP (x, 0), 0) == reg
3119 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3120 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
3121 return x;
3123 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
3125 /* If REG occurs inside a MEM used in a bit-field reference,
3126 that is unacceptable. */
3127 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
3128 return (rtx) (HOST_WIDE_INT) 1;
3131 if (x == reg)
3132 return (rtx) (HOST_WIDE_INT) 1;
3134 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3136 if (fmt[i] == 'e')
3138 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
3139 if (value == 0)
3140 value = tem;
3141 else if (tem != 0)
3142 return (rtx) (HOST_WIDE_INT) 1;
3144 if (fmt[i] == 'E')
3146 register int j;
3147 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3149 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
3150 if (value == 0)
3151 value = tem;
3152 else if (tem != 0)
3153 return (rtx) (HOST_WIDE_INT) 1;
3158 return value;
3161 /* Write information about registers and basic blocks into FILE.
3162 This is part of making a debugging dump. */
3164 void
3165 dump_flow_info (file)
3166 FILE *file;
3168 register int i;
3169 static char *reg_class_names[] = REG_CLASS_NAMES;
3171 fprintf (file, "%d registers.\n", max_regno);
3173 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3174 if (REG_N_REFS (i))
3176 enum reg_class class, altclass;
3177 fprintf (file, "\nRegister %d used %d times across %d insns",
3178 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
3179 if (REG_BASIC_BLOCK (i) >= 0)
3180 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
3181 if (REG_N_SETS (i))
3182 fprintf (file, "; set %d time%s", REG_N_SETS (i),
3183 (REG_N_SETS (i) == 1) ? "" : "s");
3184 if (REG_USERVAR_P (regno_reg_rtx[i]))
3185 fprintf (file, "; user var");
3186 if (REG_N_DEATHS (i) != 1)
3187 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
3188 if (REG_N_CALLS_CROSSED (i) == 1)
3189 fprintf (file, "; crosses 1 call");
3190 else if (REG_N_CALLS_CROSSED (i))
3191 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
3192 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
3193 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
3194 class = reg_preferred_class (i);
3195 altclass = reg_alternate_class (i);
3196 if (class != GENERAL_REGS || altclass != ALL_REGS)
3198 if (altclass == ALL_REGS || class == ALL_REGS)
3199 fprintf (file, "; pref %s", reg_class_names[(int) class]);
3200 else if (altclass == NO_REGS)
3201 fprintf (file, "; %s or none", reg_class_names[(int) class]);
3202 else
3203 fprintf (file, "; pref %s, else %s",
3204 reg_class_names[(int) class],
3205 reg_class_names[(int) altclass]);
3207 if (REGNO_POINTER_FLAG (i))
3208 fprintf (file, "; pointer");
3209 fprintf (file, ".\n");
3211 fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
3212 for (i = 0; i < n_basic_blocks; i++)
3214 register rtx head, jump;
3215 register int regno;
3216 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
3218 INSN_UID (basic_block_head[i]),
3219 INSN_UID (basic_block_end[i]));
3220 /* The control flow graph's storage is freed
3221 now when flow_analysis returns.
3222 Don't try to print it if it is gone. */
3223 if (basic_block_drops_in)
3225 fprintf (file, "Reached from blocks: ");
3226 head = basic_block_head[i];
3227 if (GET_CODE (head) == CODE_LABEL)
3228 for (jump = LABEL_REFS (head);
3229 jump != head;
3230 jump = LABEL_NEXTREF (jump))
3232 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
3233 fprintf (file, " %d", from_block);
3235 if (basic_block_drops_in[i])
3236 fprintf (file, " previous");
3238 fprintf (file, "\nRegisters live at start:");
3239 for (regno = 0; regno < max_regno; regno++)
3240 if (REGNO_REG_SET_P (basic_block_live_at_start[i], regno))
3241 fprintf (file, " %d", regno);
3242 fprintf (file, "\n");
3244 fprintf (file, "\n");
3248 /* Like print_rtl, but also print out live information for the start of each
3249 basic block. */
3251 void
3252 print_rtl_with_bb (outf, rtx_first)
3253 FILE *outf;
3254 rtx rtx_first;
3256 register rtx tmp_rtx;
3258 if (rtx_first == 0)
3259 fprintf (outf, "(nil)\n");
3261 else
3263 int i, bb;
3264 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
3265 int max_uid = get_max_uid ();
3266 int *start = (int *) alloca (max_uid * sizeof (int));
3267 int *end = (int *) alloca (max_uid * sizeof (int));
3268 enum bb_state *in_bb_p = (enum bb_state *)
3269 alloca (max_uid * sizeof (enum bb_state));
3271 for (i = 0; i < max_uid; i++)
3273 start[i] = end[i] = -1;
3274 in_bb_p[i] = NOT_IN_BB;
3277 for (i = n_basic_blocks-1; i >= 0; i--)
3279 rtx x;
3280 start[INSN_UID (basic_block_head[i])] = i;
3281 end[INSN_UID (basic_block_end[i])] = i;
3282 for (x = basic_block_head[i]; x != NULL_RTX; x = NEXT_INSN (x))
3284 in_bb_p[ INSN_UID(x)]
3285 = (in_bb_p[ INSN_UID(x)] == NOT_IN_BB)
3286 ? IN_ONE_BB : IN_MULTIPLE_BB;
3287 if (x == basic_block_end[i])
3288 break;
3292 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
3294 int did_output;
3296 if ((bb = start[INSN_UID (tmp_rtx)]) >= 0)
3298 fprintf (outf, ";; Start of basic block %d, registers live:",
3299 bb);
3301 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[bb], 0, i,
3303 fprintf (outf, " %d", i);
3304 if (i < FIRST_PSEUDO_REGISTER)
3305 fprintf (outf, " [%s]",
3306 reg_names[i]);
3308 putc ('\n', outf);
3311 if (in_bb_p[ INSN_UID(tmp_rtx)] == NOT_IN_BB
3312 && GET_CODE (tmp_rtx) != NOTE
3313 && GET_CODE (tmp_rtx) != BARRIER)
3314 fprintf (outf, ";; Insn is not within a basic block\n");
3315 else if (in_bb_p[ INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
3316 fprintf (outf, ";; Insn is in multiple basic blocks\n");
3318 did_output = print_rtl_single (outf, tmp_rtx);
3320 if ((bb = end[INSN_UID (tmp_rtx)]) >= 0)
3321 fprintf (outf, ";; End of basic block %d\n", bb);
3323 if (did_output)
3324 putc ('\n', outf);
3330 /* Integer list support. */
3332 /* Allocate a node from list *HEAD_PTR. */
3334 static int_list_ptr
3335 alloc_int_list_node (head_ptr)
3336 int_list_block **head_ptr;
3338 struct int_list_block *first_blk = *head_ptr;
3340 if (first_blk == NULL || first_blk->nodes_left <= 0)
3342 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
3343 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
3344 first_blk->next = *head_ptr;
3345 *head_ptr = first_blk;
3348 first_blk->nodes_left--;
3349 return &first_blk->nodes[first_blk->nodes_left];
3352 /* Pointer to head of predecessor/successor block list. */
3353 static int_list_block *pred_int_list_blocks;
3355 /* Add a new node to integer list LIST with value VAL.
3356 LIST is a pointer to a list object to allow for different implementations.
3357 If *LIST is initially NULL, the list is empty.
3358 The caller must not care whether the element is added to the front or
3359 to the end of the list (to allow for different implementations). */
3361 static int_list_ptr
3362 add_int_list_node (blk_list, list, val)
3363 int_list_block **blk_list;
3364 int_list **list;
3365 int val;
3367 int_list_ptr p = alloc_int_list_node (blk_list);
3369 p->val = val;
3370 p->next = *list;
3371 *list = p;
3372 return p;
3375 /* Free the blocks of lists at BLK_LIST. */
3377 void
3378 free_int_list (blk_list)
3379 int_list_block **blk_list;
3381 int_list_block *p, *next;
3383 for (p = *blk_list; p != NULL; p = next)
3385 next = p->next;
3386 free (p);
3389 /* Mark list as empty for the next function we compile. */
3390 *blk_list = NULL;
3393 /* Predecessor/successor computation. */
3395 /* Mark PRED_BB a precessor of SUCC_BB,
3396 and conversely SUCC_BB a successor of PRED_BB. */
3398 static void
3399 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
3400 int pred_bb;
3401 int succ_bb;
3402 int_list_ptr *s_preds;
3403 int_list_ptr *s_succs;
3404 int *num_preds;
3405 int *num_succs;
3407 if (succ_bb != EXIT_BLOCK)
3409 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
3410 num_preds[succ_bb]++;
3412 if (pred_bb != ENTRY_BLOCK)
3414 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
3415 num_succs[pred_bb]++;
3419 /* Compute the predecessors and successors for each block. */
3420 void
3421 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
3422 int_list_ptr *s_preds;
3423 int_list_ptr *s_succs;
3424 int *num_preds;
3425 int *num_succs;
3427 int bb, clear_local_bb_vars = 0;
3429 bzero ((char *) s_preds, n_basic_blocks * sizeof (int_list_ptr));
3430 bzero ((char *) s_succs, n_basic_blocks * sizeof (int_list_ptr));
3431 bzero ((char *) num_preds, n_basic_blocks * sizeof (int));
3432 bzero ((char *) num_succs, n_basic_blocks * sizeof (int));
3434 /* This routine can be called after life analysis; in that case
3435 basic_block_drops_in and uid_block_number will not be available
3436 and we must recompute their values. */
3437 if (basic_block_drops_in == NULL || uid_block_number == NULL)
3439 clear_local_bb_vars = 1;
3440 basic_block_drops_in = (char *) alloca (n_basic_blocks);
3441 uid_block_number = (int *) alloca ((get_max_uid () + 1) * sizeof (int));
3443 bzero ((char *) basic_block_drops_in, n_basic_blocks * sizeof (char));
3444 bzero ((char *) uid_block_number, n_basic_blocks * sizeof (int));
3446 /* Scan each basic block setting basic_block_drops_in and
3447 uid_block_number as needed. */
3448 for (bb = 0; bb < n_basic_blocks; bb++)
3450 rtx insn, stop_insn;
3452 if (bb == 0)
3453 stop_insn = NULL_RTX;
3454 else
3455 stop_insn = basic_block_end[bb-1];
3457 /* Look backwards from the start of this block. Stop if we
3458 hit the start of the function or the end of a previous
3459 block. Don't walk backwards through blocks that are just
3460 deleted insns! */
3461 for (insn = PREV_INSN (basic_block_head[bb]);
3462 insn && insn != stop_insn && GET_CODE (insn) == NOTE;
3463 insn = PREV_INSN (insn))
3466 /* Never set basic_block_drops_in for the first block. It is
3467 implicit.
3469 If we stopped on anything other than a BARRIER, then this
3470 block drops in. */
3471 if (bb != 0)
3472 basic_block_drops_in[bb] = (insn ? GET_CODE (insn) != BARRIER : 1);
3474 insn = basic_block_head[bb];
3475 while (insn)
3477 BLOCK_NUM (insn) = bb;
3478 if (insn == basic_block_end[bb])
3479 break;
3480 insn = NEXT_INSN (insn);
3485 for (bb = 0; bb < n_basic_blocks; bb++)
3487 rtx head;
3488 rtx jump;
3490 head = BLOCK_HEAD (bb);
3492 if (GET_CODE (head) == CODE_LABEL)
3493 for (jump = LABEL_REFS (head);
3494 jump != head;
3495 jump = LABEL_NEXTREF (jump))
3497 if (! INSN_DELETED_P (CONTAINING_INSN (jump))
3498 && (GET_CODE (CONTAINING_INSN (jump)) != NOTE
3499 || (NOTE_LINE_NUMBER (CONTAINING_INSN (jump))
3500 != NOTE_INSN_DELETED)))
3501 add_pred_succ (BLOCK_NUM (CONTAINING_INSN (jump)), bb,
3502 s_preds, s_succs, num_preds, num_succs);
3505 jump = BLOCK_END (bb);
3506 /* If this is a RETURN insn or a conditional jump in the last
3507 basic block, or a non-jump insn in the last basic block, then
3508 this block reaches the exit block. */
3509 if ((GET_CODE (jump) == JUMP_INSN && GET_CODE (PATTERN (jump)) == RETURN)
3510 || (((GET_CODE (jump) == JUMP_INSN
3511 && condjump_p (jump) && !simplejump_p (jump))
3512 || GET_CODE (jump) != JUMP_INSN)
3513 && (bb == n_basic_blocks - 1)))
3514 add_pred_succ (bb, EXIT_BLOCK, s_preds, s_succs, num_preds, num_succs);
3516 if (basic_block_drops_in[bb])
3517 add_pred_succ (bb - 1, bb, s_preds, s_succs, num_preds, num_succs);
3520 add_pred_succ (ENTRY_BLOCK, 0, s_preds, s_succs, num_preds, num_succs);
3523 /* If we allocated any variables in temporary storage, clear out the
3524 pointer to the local storage to avoid dangling pointers. */
3525 if (clear_local_bb_vars)
3527 basic_block_drops_in = NULL;
3528 uid_block_number = NULL;
3533 void
3534 dump_bb_data (file, preds, succs)
3535 FILE *file;
3536 int_list_ptr *preds;
3537 int_list_ptr *succs;
3539 int bb;
3540 int_list_ptr p;
3542 fprintf (file, "BB data\n\n");
3543 for (bb = 0; bb < n_basic_blocks; bb++)
3545 fprintf (file, "BB %d, start %d, end %d\n", bb,
3546 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
3547 fprintf (file, " preds:");
3548 for (p = preds[bb]; p != NULL; p = p->next)
3550 int pred_bb = INT_LIST_VAL (p);
3551 if (pred_bb == ENTRY_BLOCK)
3552 fprintf (file, " entry");
3553 else
3554 fprintf (file, " %d", pred_bb);
3556 fprintf (file, "\n");
3557 fprintf (file, " succs:");
3558 for (p = succs[bb]; p != NULL; p = p->next)
3560 int succ_bb = INT_LIST_VAL (p);
3561 if (succ_bb == EXIT_BLOCK)
3562 fprintf (file, " exit");
3563 else
3564 fprintf (file, " %d", succ_bb);
3566 fprintf (file, "\n");
3568 fprintf (file, "\n");
3571 void
3572 dump_sbitmap (file, bmap)
3573 FILE *file;
3574 sbitmap bmap;
3576 int i,j,n;
3577 int set_size = bmap->size;
3578 int total_bits = bmap->n_bits;
3580 fprintf (file, " ");
3581 for (i = n = 0; i < set_size && n < total_bits; i++)
3583 for (j = 0; j < SBITMAP_ELT_BITS && n < total_bits; j++, n++)
3585 if (n != 0 && n % 10 == 0)
3586 fprintf (file, " ");
3587 fprintf (file, "%d", (bmap->elms[i] & (1L << j)) != 0);
3590 fprintf (file, "\n");
3593 void
3594 dump_sbitmap_vector (file, title, subtitle, bmaps, n_maps)
3595 FILE *file;
3596 char *title, *subtitle;
3597 sbitmap *bmaps;
3598 int n_maps;
3600 int bb;
3602 fprintf (file, "%s\n", title);
3603 for (bb = 0; bb < n_maps; bb++)
3605 fprintf (file, "%s %d\n", subtitle, bb);
3606 dump_sbitmap (file, bmaps[bb]);
3608 fprintf (file, "\n");
3611 /* Free basic block data storage. */
3613 void
3614 free_bb_mem ()
3616 free_int_list (&pred_int_list_blocks);
3619 /* Bitmap manipulation routines. */
3621 /* Allocate a simple bitmap of N_ELMS bits. */
3623 sbitmap
3624 sbitmap_alloc (n_elms)
3625 int n_elms;
3627 int bytes, size, amt;
3628 sbitmap bmap;
3630 size = SBITMAP_SET_SIZE (n_elms);
3631 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3632 amt = (sizeof (struct simple_bitmap_def)
3633 + bytes - sizeof (SBITMAP_ELT_TYPE));
3634 bmap = (sbitmap) xmalloc (amt);
3635 bmap->n_bits = n_elms;
3636 bmap->size = size;
3637 bmap->bytes = bytes;
3638 return bmap;
3641 /* Allocate a vector of N_VECS bitmaps of N_ELMS bits. */
3643 sbitmap *
3644 sbitmap_vector_alloc (n_vecs, n_elms)
3645 int n_vecs, n_elms;
3647 int i, bytes, offset, elm_bytes, size, amt, vector_bytes;
3648 sbitmap *bitmap_vector;
3650 size = SBITMAP_SET_SIZE (n_elms);
3651 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3652 elm_bytes = (sizeof (struct simple_bitmap_def)
3653 + bytes - sizeof (SBITMAP_ELT_TYPE));
3654 vector_bytes = n_vecs * sizeof (sbitmap *);
3656 /* Round up `vector_bytes' to account for the alignment requirements
3657 of an sbitmap. One could allocate the vector-table and set of sbitmaps
3658 separately, but that requires maintaining two pointers or creating
3659 a cover struct to hold both pointers (so our result is still just
3660 one pointer). Neither is a bad idea, but this is simpler for now. */
3662 /* Based on DEFAULT_ALIGNMENT computation in obstack.c. */
3663 struct { char x; SBITMAP_ELT_TYPE y; } align;
3664 int alignment = (char *) & align.y - & align.x;
3665 vector_bytes = (vector_bytes + alignment - 1) & ~ (alignment - 1);
3668 amt = vector_bytes + (n_vecs * elm_bytes);
3669 bitmap_vector = (sbitmap *) xmalloc (amt);
3671 for (i = 0, offset = vector_bytes;
3672 i < n_vecs;
3673 i++, offset += elm_bytes)
3675 sbitmap b = (sbitmap) ((char *) bitmap_vector + offset);
3676 bitmap_vector[i] = b;
3677 b->n_bits = n_elms;
3678 b->size = size;
3679 b->bytes = bytes;
3682 return bitmap_vector;
3685 /* Copy sbitmap SRC to DST. */
3687 void
3688 sbitmap_copy (dst, src)
3689 sbitmap dst, src;
3691 bcopy ((PTR) src->elms, (PTR) dst->elms, sizeof (SBITMAP_ELT_TYPE) * dst->size);
3694 /* Zero all elements in a bitmap. */
3696 void
3697 sbitmap_zero (bmap)
3698 sbitmap bmap;
3700 bzero ((char *) bmap->elms, bmap->bytes);
3703 /* Set to ones all elements in a bitmap. */
3705 void
3706 sbitmap_ones (bmap)
3707 sbitmap bmap;
3709 memset (bmap->elms, -1, bmap->bytes);
3712 /* Zero a vector of N_VECS bitmaps. */
3714 void
3715 sbitmap_vector_zero (bmap, n_vecs)
3716 sbitmap *bmap;
3717 int n_vecs;
3719 int i;
3721 for (i = 0; i < n_vecs; i++)
3722 sbitmap_zero (bmap[i]);
3725 /* Set to ones a vector of N_VECS bitmaps. */
3727 void
3728 sbitmap_vector_ones (bmap, n_vecs)
3729 sbitmap *bmap;
3730 int n_vecs;
3732 int i;
3734 for (i = 0; i < n_vecs; i++)
3735 sbitmap_ones (bmap[i]);
3738 /* Set DST to be A union (B - C).
3739 DST = A | (B & ~C).
3740 Return non-zero if any change is made. */
3743 sbitmap_union_of_diff (dst, a, b, c)
3744 sbitmap dst, a, b, c;
3746 int i,changed;
3747 sbitmap_ptr dstp, ap, bp, cp;
3749 changed = 0;
3750 dstp = dst->elms;
3751 ap = a->elms;
3752 bp = b->elms;
3753 cp = c->elms;
3754 for (i = 0; i < dst->size; i++)
3756 SBITMAP_ELT_TYPE tmp = *ap | (*bp & ~*cp);
3757 if (*dstp != tmp)
3758 changed = 1;
3759 *dstp = tmp;
3760 dstp++; ap++; bp++; cp++;
3762 return changed;
3765 /* Set bitmap DST to the bitwise negation of the bitmap SRC. */
3767 void
3768 sbitmap_not (dst, src)
3769 sbitmap dst, src;
3771 int i;
3772 sbitmap_ptr dstp, ap;
3774 dstp = dst->elms;
3775 ap = src->elms;
3776 for (i = 0; i < dst->size; i++)
3778 SBITMAP_ELT_TYPE tmp = ~(*ap);
3779 *dstp = tmp;
3780 dstp++; ap++;
3784 /* Set the bits in DST to be the difference between the bits
3785 in A and the bits in B. i.e. dst = a - b.
3786 The - operator is implemented as a & (~b). */
3788 void
3789 sbitmap_difference (dst, a, b)
3790 sbitmap dst, a, b;
3792 int i;
3793 sbitmap_ptr dstp, ap, bp;
3795 dstp = dst->elms;
3796 ap = a->elms;
3797 bp = b->elms;
3798 for (i = 0; i < dst->size; i++)
3799 *dstp++ = *ap++ & (~*bp++);
3802 /* Set DST to be (A and B)).
3803 Return non-zero if any change is made. */
3806 sbitmap_a_and_b (dst, a, b)
3807 sbitmap dst, a, b;
3809 int i,changed;
3810 sbitmap_ptr dstp, ap, bp;
3812 changed = 0;
3813 dstp = dst->elms;
3814 ap = a->elms;
3815 bp = b->elms;
3816 for (i = 0; i < dst->size; i++)
3818 SBITMAP_ELT_TYPE tmp = *ap & *bp;
3819 if (*dstp != tmp)
3820 changed = 1;
3821 *dstp = tmp;
3822 dstp++; ap++; bp++;
3824 return changed;
3826 /* Set DST to be (A or B)).
3827 Return non-zero if any change is made. */
3830 sbitmap_a_or_b (dst, a, b)
3831 sbitmap dst, a, b;
3833 int i,changed;
3834 sbitmap_ptr dstp, ap, bp;
3836 changed = 0;
3837 dstp = dst->elms;
3838 ap = a->elms;
3839 bp = b->elms;
3840 for (i = 0; i < dst->size; i++)
3842 SBITMAP_ELT_TYPE tmp = *ap | *bp;
3843 if (*dstp != tmp)
3844 changed = 1;
3845 *dstp = tmp;
3846 dstp++; ap++; bp++;
3848 return changed;
3851 /* Set DST to be (A or (B and C)).
3852 Return non-zero if any change is made. */
3855 sbitmap_a_or_b_and_c (dst, a, b, c)
3856 sbitmap dst, a, b, c;
3858 int i,changed;
3859 sbitmap_ptr dstp, ap, bp, cp;
3861 changed = 0;
3862 dstp = dst->elms;
3863 ap = a->elms;
3864 bp = b->elms;
3865 cp = c->elms;
3866 for (i = 0; i < dst->size; i++)
3868 SBITMAP_ELT_TYPE tmp = *ap | (*bp & *cp);
3869 if (*dstp != tmp)
3870 changed = 1;
3871 *dstp = tmp;
3872 dstp++; ap++; bp++; cp++;
3874 return changed;
3877 /* Set DST to be (A ann (B or C)).
3878 Return non-zero if any change is made. */
3881 sbitmap_a_and_b_or_c (dst, a, b, c)
3882 sbitmap dst, a, b, c;
3884 int i,changed;
3885 sbitmap_ptr dstp, ap, bp, cp;
3887 changed = 0;
3888 dstp = dst->elms;
3889 ap = a->elms;
3890 bp = b->elms;
3891 cp = c->elms;
3892 for (i = 0; i < dst->size; i++)
3894 SBITMAP_ELT_TYPE tmp = *ap & (*bp | *cp);
3895 if (*dstp != tmp)
3896 changed = 1;
3897 *dstp = tmp;
3898 dstp++; ap++; bp++; cp++;
3900 return changed;
3903 /* Set the bitmap DST to the intersection of SRC of all predecessors or
3904 successors of block number BB (PRED_SUCC says which). */
3906 void
3907 sbitmap_intersect_of_predsucc (dst, src, bb, pred_succ)
3908 sbitmap dst;
3909 sbitmap *src;
3910 int bb;
3911 int_list_ptr *pred_succ;
3913 int_list_ptr ps;
3914 int ps_bb;
3915 int set_size = dst->size;
3917 ps = pred_succ[bb];
3919 /* It is possible that there are no predecessors(/successors).
3920 This can happen for example in unreachable code. */
3922 if (ps == NULL)
3924 /* In APL-speak this is the `and' reduction of the empty set and thus
3925 the result is the identity for `and'. */
3926 sbitmap_ones (dst);
3927 return;
3930 /* Set result to first predecessor/successor. */
3932 for ( ; ps != NULL; ps = ps->next)
3934 ps_bb = INT_LIST_VAL (ps);
3935 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3936 continue;
3937 sbitmap_copy (dst, src[ps_bb]);
3938 /* Break out since we're only doing first predecessor. */
3939 break;
3941 if (ps == NULL)
3942 return;
3944 /* Now do the remaining predecessors/successors. */
3946 for (ps = ps->next; ps != NULL; ps = ps->next)
3948 int i;
3949 sbitmap_ptr p,r;
3951 ps_bb = INT_LIST_VAL (ps);
3952 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3953 continue;
3955 p = src[ps_bb]->elms;
3956 r = dst->elms;
3958 for (i = 0; i < set_size; i++)
3959 *r++ &= *p++;
3963 /* Set the bitmap DST to the intersection of SRC of all predecessors
3964 of block number BB. */
3966 void
3967 sbitmap_intersect_of_predecessors (dst, src, bb, s_preds)
3968 sbitmap dst;
3969 sbitmap *src;
3970 int bb;
3971 int_list_ptr *s_preds;
3973 sbitmap_intersect_of_predsucc (dst, src, bb, s_preds);
3976 /* Set the bitmap DST to the intersection of SRC of all successors
3977 of block number BB. */
3979 void
3980 sbitmap_intersect_of_successors (dst, src, bb, s_succs)
3981 sbitmap dst;
3982 sbitmap *src;
3983 int bb;
3984 int_list_ptr *s_succs;
3986 sbitmap_intersect_of_predsucc (dst, src, bb, s_succs);
3989 /* Set the bitmap DST to the union of SRC of all predecessors/successors of
3990 block number BB. */
3992 void
3993 sbitmap_union_of_predsucc (dst, src, bb, pred_succ)
3994 sbitmap dst;
3995 sbitmap *src;
3996 int bb;
3997 int_list_ptr *pred_succ;
3999 int_list_ptr ps;
4000 int ps_bb;
4001 int set_size = dst->size;
4003 ps = pred_succ[bb];
4005 /* It is possible that there are no predecessors(/successors).
4006 This can happen for example in unreachable code. */
4008 if (ps == NULL)
4010 /* In APL-speak this is the `or' reduction of the empty set and thus
4011 the result is the identity for `or'. */
4012 sbitmap_zero (dst);
4013 return;
4016 /* Set result to first predecessor/successor. */
4018 for ( ; ps != NULL; ps = ps->next)
4020 ps_bb = INT_LIST_VAL (ps);
4021 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
4022 continue;
4023 sbitmap_copy (dst, src[ps_bb]);
4024 /* Break out since we're only doing first predecessor. */
4025 break;
4027 if (ps == NULL)
4028 return;
4030 /* Now do the remaining predecessors/successors. */
4032 for (ps = ps->next; ps != NULL; ps = ps->next)
4034 int i;
4035 sbitmap_ptr p,r;
4037 ps_bb = INT_LIST_VAL (ps);
4038 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
4039 continue;
4041 p = src[ps_bb]->elms;
4042 r = dst->elms;
4044 for (i = 0; i < set_size; i++)
4045 *r++ |= *p++;
4049 /* Set the bitmap DST to the union of SRC of all predecessors of
4050 block number BB. */
4052 void
4053 sbitmap_union_of_predecessors (dst, src, bb, s_preds)
4054 sbitmap dst;
4055 sbitmap *src;
4056 int bb;
4057 int_list_ptr *s_preds;
4059 sbitmap_union_of_predsucc (dst, src, bb, s_preds);
4062 /* Set the bitmap DST to the union of SRC of all predecessors of
4063 block number BB. */
4065 void
4066 sbitmap_union_of_successors (dst, src, bb, s_succ)
4067 sbitmap dst;
4068 sbitmap *src;
4069 int bb;
4070 int_list_ptr *s_succ;
4072 sbitmap_union_of_predsucc (dst, src, bb, s_succ);
4075 /* Compute dominator relationships. */
4076 void
4077 compute_dominators (dominators, post_dominators, s_preds, s_succs)
4078 sbitmap *dominators;
4079 sbitmap *post_dominators;
4080 int_list_ptr *s_preds;
4081 int_list_ptr *s_succs;
4083 int bb, changed, passes;
4084 sbitmap *temp_bitmap;
4086 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4087 sbitmap_vector_ones (dominators, n_basic_blocks);
4088 sbitmap_vector_ones (post_dominators, n_basic_blocks);
4089 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
4091 sbitmap_zero (dominators[0]);
4092 SET_BIT (dominators[0], 0);
4094 sbitmap_zero (post_dominators[n_basic_blocks-1]);
4095 SET_BIT (post_dominators[n_basic_blocks-1], 0);
4097 passes = 0;
4098 changed = 1;
4099 while (changed)
4101 changed = 0;
4102 for (bb = 1; bb < n_basic_blocks; bb++)
4104 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
4105 bb, s_preds);
4106 SET_BIT (temp_bitmap[bb], bb);
4107 changed |= sbitmap_a_and_b (dominators[bb],
4108 dominators[bb],
4109 temp_bitmap[bb]);
4110 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
4111 bb, s_succs);
4112 SET_BIT (temp_bitmap[bb], bb);
4113 changed |= sbitmap_a_and_b (post_dominators[bb],
4114 post_dominators[bb],
4115 temp_bitmap[bb]);
4117 passes++;
4120 free (temp_bitmap);
4123 /* Count for a single SET rtx, X. */
4125 static void
4126 count_reg_sets_1 (x)
4127 rtx x;
4129 register int regno;
4130 register rtx reg = SET_DEST (x);
4132 /* Find the register that's set/clobbered. */
4133 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4134 || GET_CODE (reg) == SIGN_EXTRACT
4135 || GET_CODE (reg) == STRICT_LOW_PART)
4136 reg = XEXP (reg, 0);
4138 if (GET_CODE (reg) == PARALLEL
4139 && GET_MODE (reg) == BLKmode)
4141 register int i;
4142 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4143 count_reg_sets_1 (XVECEXP (reg, 0, i));
4144 return;
4147 if (GET_CODE (reg) == REG)
4149 regno = REGNO (reg);
4150 if (regno >= FIRST_PSEUDO_REGISTER)
4152 /* Count (weighted) references, stores, etc. This counts a
4153 register twice if it is modified, but that is correct. */
4154 REG_N_SETS (regno)++;
4156 REG_N_REFS (regno) += loop_depth;
4161 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4162 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
4164 static void
4165 count_reg_sets (x)
4166 rtx x;
4168 register RTX_CODE code = GET_CODE (x);
4170 if (code == SET || code == CLOBBER)
4171 count_reg_sets_1 (x);
4172 else if (code == PARALLEL)
4174 register int i;
4175 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4177 code = GET_CODE (XVECEXP (x, 0, i));
4178 if (code == SET || code == CLOBBER)
4179 count_reg_sets_1 (XVECEXP (x, 0, i));
4184 /* Increment REG_N_REFS by the current loop depth each register reference
4185 found in X. */
4187 static void
4188 count_reg_references (x)
4189 rtx x;
4191 register RTX_CODE code;
4193 retry:
4194 code = GET_CODE (x);
4195 switch (code)
4197 case LABEL_REF:
4198 case SYMBOL_REF:
4199 case CONST_INT:
4200 case CONST:
4201 case CONST_DOUBLE:
4202 case PC:
4203 case ADDR_VEC:
4204 case ADDR_DIFF_VEC:
4205 case ASM_INPUT:
4206 return;
4208 #ifdef HAVE_cc0
4209 case CC0:
4210 return;
4211 #endif
4213 case CLOBBER:
4214 /* If we are clobbering a MEM, mark any registers inside the address
4215 as being used. */
4216 if (GET_CODE (XEXP (x, 0)) == MEM)
4217 count_reg_references (XEXP (XEXP (x, 0), 0));
4218 return;
4220 case SUBREG:
4221 /* While we're here, optimize this case. */
4222 x = SUBREG_REG (x);
4224 /* In case the SUBREG is not of a register, don't optimize */
4225 if (GET_CODE (x) != REG)
4227 count_reg_references (x);
4228 return;
4231 /* ... fall through ... */
4233 case REG:
4234 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
4235 REG_N_REFS (REGNO (x)) += loop_depth;
4236 return;
4238 case SET:
4240 register rtx testreg = SET_DEST (x);
4241 int mark_dest = 0;
4243 /* If storing into MEM, don't show it as being used. But do
4244 show the address as being used. */
4245 if (GET_CODE (testreg) == MEM)
4247 count_reg_references (XEXP (testreg, 0));
4248 count_reg_references (SET_SRC (x));
4249 return;
4252 /* Storing in STRICT_LOW_PART is like storing in a reg
4253 in that this SET might be dead, so ignore it in TESTREG.
4254 but in some other ways it is like using the reg.
4256 Storing in a SUBREG or a bit field is like storing the entire
4257 register in that if the register's value is not used
4258 then this SET is not needed. */
4259 while (GET_CODE (testreg) == STRICT_LOW_PART
4260 || GET_CODE (testreg) == ZERO_EXTRACT
4261 || GET_CODE (testreg) == SIGN_EXTRACT
4262 || GET_CODE (testreg) == SUBREG)
4264 /* Modifying a single register in an alternate mode
4265 does not use any of the old value. But these other
4266 ways of storing in a register do use the old value. */
4267 if (GET_CODE (testreg) == SUBREG
4268 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4270 else
4271 mark_dest = 1;
4273 testreg = XEXP (testreg, 0);
4276 /* If this is a store into a register,
4277 recursively scan the value being stored. */
4279 if ((GET_CODE (testreg) == PARALLEL
4280 && GET_MODE (testreg) == BLKmode)
4281 || GET_CODE (testreg) == REG)
4283 count_reg_references (SET_SRC (x));
4284 if (mark_dest)
4285 count_reg_references (SET_DEST (x));
4286 return;
4289 break;
4291 default:
4292 break;
4295 /* Recursively scan the operands of this expression. */
4298 register char *fmt = GET_RTX_FORMAT (code);
4299 register int i;
4301 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4303 if (fmt[i] == 'e')
4305 /* Tail recursive case: save a function call level. */
4306 if (i == 0)
4308 x = XEXP (x, 0);
4309 goto retry;
4311 count_reg_references (XEXP (x, i));
4313 else if (fmt[i] == 'E')
4315 register int j;
4316 for (j = 0; j < XVECLEN (x, i); j++)
4317 count_reg_references (XVECEXP (x, i, j));
4323 /* Recompute register set/reference counts immediately prior to register
4324 allocation.
4326 This avoids problems with set/reference counts changing to/from values
4327 which have special meanings to the register allocators.
4329 Additionally, the reference counts are the primary component used by the
4330 register allocators to prioritize pseudos for allocation to hard regs.
4331 More accurate reference counts generally lead to better register allocation.
4333 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4334 possibly other information which is used by the register allocators. */
4336 void
4337 recompute_reg_usage (f)
4338 rtx f;
4340 rtx insn;
4341 int i, max_reg;
4343 /* Clear out the old data. */
4344 max_reg = max_reg_num ();
4345 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
4347 REG_N_SETS (i) = 0;
4348 REG_N_REFS (i) = 0;
4351 /* Scan each insn in the chain and count how many times each register is
4352 set/used. */
4353 loop_depth = 1;
4354 for (insn = f; insn; insn = NEXT_INSN (insn))
4356 /* Keep track of loop depth. */
4357 if (GET_CODE (insn) == NOTE)
4359 /* Look for loop boundaries. */
4360 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
4361 loop_depth--;
4362 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
4363 loop_depth++;
4365 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
4366 Abort now rather than setting register status incorrectly. */
4367 if (loop_depth == 0)
4368 abort ();
4370 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4372 rtx links;
4374 /* This call will increment REG_N_SETS for each SET or CLOBBER
4375 of a register in INSN. It will also increment REG_N_REFS
4376 by the loop depth for each set of a register in INSN. */
4377 count_reg_sets (PATTERN (insn));
4379 /* count_reg_sets does not detect autoincrement address modes, so
4380 detect them here by looking at the notes attached to INSN. */
4381 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
4383 if (REG_NOTE_KIND (links) == REG_INC)
4384 /* Count (weighted) references, stores, etc. This counts a
4385 register twice if it is modified, but that is correct. */
4386 REG_N_SETS (REGNO (XEXP (links, 0)))++;
4389 /* This call will increment REG_N_REFS by the current loop depth for
4390 each reference to a register in INSN. */
4391 count_reg_references (PATTERN (insn));
4393 /* count_reg_references will not include counts for arguments to
4394 function calls, so detect them here by examining the
4395 CALL_INSN_FUNCTION_USAGE data. */
4396 if (GET_CODE (insn) == CALL_INSN)
4398 rtx note;
4400 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4401 note;
4402 note = XEXP (note, 1))
4403 if (GET_CODE (XEXP (note, 0)) == USE)
4404 count_reg_references (SET_DEST (XEXP (note, 0)));