[official-gcc.git] / gcc / flow.c
blob0161d94be64c47161296fb77fa7d2b9747a8210d
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler.
23 It computes data flow information
24 which tells combine_instructions which insns to consider combining
25 and controls register allocation.
27 Additional data flow information that is too bulky to record
28 is generated during the analysis, and is used at that time to
29 create autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl
38 into basic blocks. It records the beginnings and ends of the
39 basic blocks in the vectors basic_block_head and basic_block_end,
40 and the number of blocks in n_basic_blocks.
42 find_basic_blocks also finds any unreachable loops
43 and deletes them.
45 ** life_analysis **
47 life_analysis is called immediately after find_basic_blocks.
48 It uses the basic block information to determine where each
49 hard or pseudo register is live.
51 ** live-register info **
53 The information about where each register is live is in two parts:
54 the REG_NOTES of insns, and the vector basic_block_live_at_start.
56 basic_block_live_at_start has an element for each basic block,
57 and the element is a bit-vector with a bit for each hard or pseudo
58 register. The bit is 1 if the register is live at the beginning
59 of the basic block.
61 Two types of elements can be added to an insn's REG_NOTES.
62 A REG_DEAD note is added to an insn's REG_NOTES for any register
63 that meets both of two conditions: The value in the register is not
64 needed in subsequent insns and the insn does not replace the value in
65 the register (in the case of multi-word hard registers, the value in
66 each register must be replaced by the insn to avoid a REG_DEAD note).
68 In the vast majority of cases, an object in a REG_DEAD note will be
69 used somewhere in the insn. The (rare) exception to this is if an
70 insn uses a multi-word hard register and only some of the registers are
71 needed in subsequent insns. In that case, REG_DEAD notes will be
72 provided for those hard registers that are not subsequently needed.
73 Partial REG_DEAD notes of this type do not occur when an insn sets
74 only some of the hard registers used in such a multi-word operand;
75 omitting REG_DEAD notes for objects stored in an insn is optional and
76 the desire to do so does not justify the complexity of the partial
77 REG_DEAD notes.
79 REG_UNUSED notes are added for each register that is set by the insn
80 but is unused subsequently (if every register set by the insn is unused
81 and the insn does not reference memory or have some other side-effect,
82 the insn is deleted instead). If only part of a multi-word hard
83 register is used in a subsequent insn, REG_UNUSED notes are made for
84 the parts that will not be used.
86 To determine which registers are live after any insn, one can
87 start from the beginning of the basic block and scan insns, noting
88 which registers are set by each insn and which die there.
90 ** Other actions of life_analysis **
92 life_analysis sets up the LOG_LINKS fields of insns because the
93 information needed to do so is readily available.
95 life_analysis deletes insns whose only effect is to store a value
96 that is never used.
98 life_analysis notices cases where a reference to a register as
99 a memory address can be combined with a preceding or following
100 incrementation or decrementation of the register. The separate
101 instruction to increment or decrement is deleted and the address
102 is changed to a POST_INC or similar rtx.
104 Each time an incrementing or decrementing address is created,
105 a REG_INC element is added to the insn's REG_NOTES list.
107 life_analysis fills in certain vectors containing information about
108 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
109 reg_n_calls_crosses and reg_basic_block.
111 life_analysis sets current_function_sp_is_unchanging if the function
112 doesn't modify the stack pointer. */
114 #include "config.h"
115 #include "system.h"
116 #include "rtl.h"
117 #include "basic-block.h"
118 #include "insn-config.h"
119 #include "regs.h"
120 #include "hard-reg-set.h"
121 #include "flags.h"
122 #include "output.h"
123 #include "except.h"
124 #include "toplev.h"
126 #include "obstack.h"
127 #define obstack_chunk_alloc xmalloc
128 #define obstack_chunk_free free
130 /* The contents of the current function definition are allocated
131 in this obstack, and all are freed at the end of the function.
132 For top-level functions, this is temporary_obstack.
133 Separate obstacks are made for nested functions. */
135 extern struct obstack *function_obstack;
137 /* List of labels that must never be deleted. */
138 extern rtx forced_labels;
140 /* Get the basic block number of an insn.
141 This info should not be expected to remain available
142 after the end of life_analysis. */
144 /* This is the limit of the allocated space in the following two arrays. */
146 static int max_uid_for_flow;
148 #define BLOCK_NUM(INSN) uid_block_number[INSN_UID (INSN)]
150 /* This is where the BLOCK_NUM values are really stored.
151 This is set up by find_basic_blocks and used there and in life_analysis,
152 and then freed. */
154 int *uid_block_number;
156 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
158 #define INSN_VOLATILE(INSN) uid_volatile[INSN_UID (INSN)]
159 static char *uid_volatile;
161 /* Number of basic blocks in the current function. */
163 int n_basic_blocks;
165 /* Maximum register number used in this function, plus one. */
167 int max_regno;
169 /* Maximum number of SCRATCH rtx's used in any basic block of this
170 function. */
172 int max_scratch;
174 /* Number of SCRATCH rtx's in the current block. */
176 static int num_scratch;
178 /* Indexed by n, giving various register information */
180 varray_type reg_n_info;
182 /* Size of the reg_n_info table. */
184 unsigned int reg_n_max;
186 /* Element N is the next insn that uses (hard or pseudo) register number N
187 within the current basic block; or zero, if there is no such insn.
188 This is valid only during the final backward scan in propagate_block. */
190 static rtx *reg_next_use;
192 /* Size of a regset for the current function,
193 in (1) bytes and (2) elements. */
195 int regset_bytes;
196 int regset_size;
198 /* Element N is first insn in basic block N.
199 This info lasts until we finish compiling the function. */
201 rtx *basic_block_head;
203 /* Element N is last insn in basic block N.
204 This info lasts until we finish compiling the function. */
206 rtx *basic_block_end;
208 /* Element N indicates whether basic block N can be reached through a
209 computed jump. */
211 char *basic_block_computed_jump_target;
213 /* Element N is a regset describing the registers live
214 at the start of basic block N.
215 This info lasts until we finish compiling the function. */
217 regset *basic_block_live_at_start;
219 /* Regset of regs live when calls to `setjmp'-like functions happen. */
221 regset regs_live_at_setjmp;
223 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
224 that have to go in the same hard reg.
225 The first two regs in the list are a pair, and the next two
226 are another pair, etc. */
227 rtx regs_may_share;
229 /* Element N is nonzero if control can drop into basic block N
230 from the preceding basic block. Freed after life_analysis. */
232 static char *basic_block_drops_in;
234 /* Element N is depth within loops of the last insn in basic block number N.
235 Freed after life_analysis. */
237 static short *basic_block_loop_depth;
239 /* Depth within loops of basic block being scanned for lifetime analysis,
240 plus one. This is the weight attached to references to registers. */
242 static int loop_depth;
244 /* During propagate_block, this is non-zero if the value of CC0 is live. */
246 static int cc0_live;
248 /* During propagate_block, this contains the last MEM stored into. It
249 is used to eliminate consecutive stores to the same location. */
251 static rtx last_mem_set;
253 /* Set of registers that may be eliminable. These are handled specially
254 in updating regs_ever_live. */
256 static HARD_REG_SET elim_reg_set;
258 /* Forward declarations */
259 static void find_basic_blocks_1 PROTO((rtx, rtx));
260 static void make_edges PROTO((int));
261 static void mark_label_ref PROTO((rtx, rtx, int));
262 static int delete_unreachable_blocks PROTO((void));
263 static int delete_block PROTO((int));
264 static void life_analysis_1 PROTO((rtx, int));
265 static void propagate_block PROTO((regset, rtx, rtx, int,
266 regset, int));
267 static rtx flow_delete_insn PROTO((rtx));
268 static int set_noop_p PROTO((rtx));
269 static int noop_move_p PROTO((rtx));
270 static void record_volatile_insns PROTO((rtx));
271 static void mark_regs_live_at_end PROTO((regset));
272 static int insn_dead_p PROTO((rtx, regset, int));
273 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
274 static void mark_set_regs PROTO((regset, regset, rtx,
275 rtx, regset));
276 static void mark_set_1 PROTO((regset, regset, rtx,
277 rtx, regset));
278 #ifdef AUTO_INC_DEC
279 static void find_auto_inc PROTO((regset, rtx, rtx));
280 static int try_pre_increment_1 PROTO((rtx));
281 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
282 #endif
283 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
284 void dump_flow_info PROTO((FILE *));
285 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
286 int_list_ptr *, int *, int *));
287 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
288 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
289 int_list **, int));
290 static void init_regset_vector PROTO ((regset *, int,
291 struct obstack *));
292 static void count_reg_sets_1 PROTO ((rtx));
293 static void count_reg_sets PROTO ((rtx));
294 static void count_reg_references PROTO ((rtx));
295 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
297 /* Find basic blocks of the current function.
298 F is the first insn of the function and NREGS the number of register numbers
299 in use.
300 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
301 be reachable. This turns on a kludge that causes the control flow
302 information to be inaccurate and not suitable for passes like GCSE. */
304 void
305 find_basic_blocks (f, nregs, file)
306 rtx f;
307 int nregs;
308 FILE *file;
310 register rtx insn;
311 register int i;
312 rtx nonlocal_label_list = nonlocal_label_rtx_list ();
313 int in_libcall_block = 0;
314 int extra_uids_for_flow = 0;
316 /* Count the basic blocks. Also find maximum insn uid value used. */
319 register RTX_CODE prev_code = JUMP_INSN;
320 register RTX_CODE code;
321 int eh_region = 0;
322 int call_had_abnormal_edge = 0;
324 max_uid_for_flow = 0;
326 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
328 /* Track when we are inside in LIBCALL block. */
329 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
330 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
331 in_libcall_block = 1;
333 code = GET_CODE (insn);
334 if (INSN_UID (insn) > max_uid_for_flow)
335 max_uid_for_flow = INSN_UID (insn);
336 if (code == CODE_LABEL)
337 i++;
338 else if (GET_RTX_CLASS (code) == 'i')
340 if (prev_code == JUMP_INSN || prev_code == BARRIER)
341 i++;
342 else if (prev_code == CALL_INSN)
344 if (call_had_abnormal_edge)
345 i++;
346 else
348 /* Else this call does not force a new block to be
349 created. However, it may still be the end of a basic
350 block if it is followed by a CODE_LABEL or a BARRIER.
352 To disambiguate calls which force new blocks to be
353 created from those which just happen to be at the end
354 of a block we insert nops during find_basic_blocks_1
355 after calls which are the last insn in a block by
356 chance. We must account for such insns in
357 max_uid_for_flow. */
359 extra_uids_for_flow++;
364 /* We change the code of the CALL_INSN, so that it won't start a
365 new block. */
366 if (code == CALL_INSN && in_libcall_block)
367 code = INSN;
369 /* Record whether this call created an edge. */
370 if (code == CALL_INSN)
371 call_had_abnormal_edge = (nonlocal_label_list != 0 || eh_region);
373 if (code != NOTE)
374 prev_code = code;
375 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
376 ++eh_region;
377 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
378 --eh_region;
380 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
381 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
382 in_libcall_block = 0;
386 n_basic_blocks = i;
388 #ifdef AUTO_INC_DEC
389 /* Leave space for insns life_analysis makes in some cases for auto-inc.
390 These cases are rare, so we don't need too much space. */
391 max_uid_for_flow += max_uid_for_flow / 10;
392 #endif
393 max_uid_for_flow += extra_uids_for_flow;
395 /* Allocate some tables that last till end of compiling this function
396 and some needed only in find_basic_blocks and life_analysis. */
398 basic_block_head = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
399 basic_block_end = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
400 basic_block_drops_in = (char *) xmalloc (n_basic_blocks);
401 basic_block_computed_jump_target = (char *) oballoc (n_basic_blocks);
402 basic_block_loop_depth = (short *) xmalloc (n_basic_blocks * sizeof (short));
403 uid_block_number
404 = (int *) xmalloc ((max_uid_for_flow + 1) * sizeof (int));
405 uid_volatile = (char *) xmalloc (max_uid_for_flow + 1);
406 bzero (uid_volatile, max_uid_for_flow + 1);
408 find_basic_blocks_1 (f, nonlocal_label_list);
411 /* For communication between find_basic_blocks_1 and its subroutines. */
413 /* An array of CODE_LABELs, indexed by UID for the start of the active
414 EH handler for each insn in F. */
415 static int *active_eh_region;
416 static int *nested_eh_region;
418 /* Element N nonzero if basic block N can actually be reached. */
420 static char *block_live_static;
422 /* List of label_refs to all labels whose addresses are taken
423 and used as data. */
424 static rtx label_value_list;
426 /* a list of non-local labels in the function. */
427 static rtx nonlocal_label_list;
429 /* Find all basic blocks of the function whose first insn is F.
430 Store the correct data in the tables that describe the basic blocks,
431 set up the chains of references for each CODE_LABEL, and
432 delete any entire basic blocks that cannot be reached.
434 NONLOCAL_LABELS is a list of non-local labels in the function.
435 Blocks that are otherwise unreachable may be reachable with a non-local
436 goto.
437 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
438 be reachable. This turns on a kludge that causes the control flow
439 information to be inaccurate and not suitable for passes like GCSE. */
441 static void
442 find_basic_blocks_1 (f, nonlocal_labels)
443 rtx f, nonlocal_labels;
445 register rtx insn;
446 register int i;
447 register char *block_live = (char *) alloca (n_basic_blocks);
448 register char *block_marked = (char *) alloca (n_basic_blocks);
449 rtx note, eh_note;
450 enum rtx_code prev_code, code;
451 int depth, pass;
452 int in_libcall_block = 0;
453 int call_had_abnormal_edge = 0;
455 pass = 1;
456 active_eh_region = (int *) alloca ((max_uid_for_flow + 1) * sizeof (int));
457 nested_eh_region = (int *) alloca ((max_label_num () + 1) * sizeof (int));
458 nonlocal_label_list = nonlocal_labels;
459 restart:
461 label_value_list = 0;
462 block_live_static = block_live;
463 bzero (block_live, n_basic_blocks);
464 bzero (block_marked, n_basic_blocks);
465 bzero (basic_block_computed_jump_target, n_basic_blocks);
466 bzero ((char *) active_eh_region, (max_uid_for_flow + 1) * sizeof (int));
467 bzero ((char *) nested_eh_region, (max_label_num () + 1) * sizeof (int));
468 current_function_has_computed_jump = 0;
470 /* Initialize with just block 0 reachable and no blocks marked. */
471 if (n_basic_blocks > 0)
472 block_live[0] = 1;
474 /* Initialize the ref chain of each label to 0. Record where all the
475 blocks start and end and their depth in loops. For each insn, record
476 the block it is in. Also mark as reachable any blocks headed by labels
477 that must not be deleted. */
479 for (eh_note = NULL_RTX, insn = f, i = -1, prev_code = JUMP_INSN, depth = 1;
480 insn; insn = NEXT_INSN (insn))
483 /* Track when we are inside in LIBCALL block. */
484 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
485 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
486 in_libcall_block = 1;
488 code = GET_CODE (insn);
489 if (code == NOTE)
491 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
492 depth++;
493 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
494 depth--;
497 /* A basic block starts at label, or after something that can jump. */
498 else if (code == CODE_LABEL
499 || (GET_RTX_CLASS (code) == 'i'
500 && (prev_code == JUMP_INSN
501 || (prev_code == CALL_INSN && call_had_abnormal_edge)
502 || prev_code == BARRIER)))
504 basic_block_head[++i] = insn;
505 basic_block_end[i] = insn;
506 basic_block_loop_depth[i] = depth;
508 if (code == CODE_LABEL)
510 LABEL_REFS (insn) = insn;
511 /* Any label that cannot be deleted
512 is considered to start a reachable block. */
513 if (LABEL_PRESERVE_P (insn))
514 block_live[i] = 1;
517 /* If the previous insn was a call that did not create an
518 abnormal edge, we want to add a nop so that the CALL_INSN
519 itself is not at basic_block_end. This allows us to easily
520 distinguish between normal calls and those which create
521 abnormal edges in the flow graph. */
523 if (i > 0 && !call_had_abnormal_edge
524 && GET_CODE (basic_block_end[i-1]) == CALL_INSN)
526 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
527 nop = emit_insn_after (nop, basic_block_end[i-1]);
528 basic_block_end[i-1] = nop;
532 else if (GET_RTX_CLASS (code) == 'i')
534 basic_block_end[i] = insn;
535 basic_block_loop_depth[i] = depth;
538 if (GET_RTX_CLASS (code) == 'i')
540 /* Make a list of all labels referred to other than by jumps. */
541 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
542 if (REG_NOTE_KIND (note) == REG_LABEL
543 && XEXP (note, 0) != eh_return_stub_label)
544 label_value_list = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
545 label_value_list);
548 /* Keep a lifo list of the currently active exception notes. */
549 if (GET_CODE (insn) == NOTE)
551 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
553 if (eh_note)
554 nested_eh_region [NOTE_BLOCK_NUMBER (insn)] =
555 NOTE_BLOCK_NUMBER (XEXP (eh_note, 0));
556 else
557 nested_eh_region [NOTE_BLOCK_NUMBER (insn)] = 0;
558 eh_note = gen_rtx_EXPR_LIST (VOIDmode,
559 insn, eh_note);
561 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
562 eh_note = XEXP (eh_note, 1);
564 /* If we encounter a CALL_INSN, note which exception handler it
565 might pass control to.
567 If doing asynchronous exceptions, record the active EH handler
568 for every insn, since most insns can throw. */
569 else if (eh_note
570 && (asynchronous_exceptions
571 || (GET_CODE (insn) == CALL_INSN
572 && ! in_libcall_block)))
573 active_eh_region[INSN_UID (insn)] =
574 NOTE_BLOCK_NUMBER (XEXP (eh_note, 0));
575 BLOCK_NUM (insn) = i;
577 /* We change the code of the CALL_INSN, so that it won't start a
578 new block. */
579 if (code == CALL_INSN && in_libcall_block)
580 code = INSN;
582 /* Record whether this call created an edge. */
583 if (code == CALL_INSN)
584 call_had_abnormal_edge = (nonlocal_label_list != 0 || eh_note);
586 if (code != NOTE)
587 prev_code = code;
589 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
590 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
591 in_libcall_block = 0;
594 /* During the second pass, `n_basic_blocks' is only an upper bound.
595 Only perform the sanity check for the first pass, and on the second
596 pass ensure `n_basic_blocks' is set to the correct value. */
597 if (pass == 1 && i + 1 != n_basic_blocks)
598 abort ();
599 n_basic_blocks = i + 1;
601 /* Record which basic blocks control can drop in to. */
603 for (i = 0; i < n_basic_blocks; i++)
605 for (insn = PREV_INSN (basic_block_head[i]);
606 insn && GET_CODE (insn) == NOTE; insn = PREV_INSN (insn))
609 basic_block_drops_in[i] = insn && GET_CODE (insn) != BARRIER;
612 /* Now find which basic blocks can actually be reached
613 and put all jump insns' LABEL_REFS onto the ref-chains
614 of their target labels. */
616 if (n_basic_blocks > 0)
618 int something_marked = 1;
619 int deleted;
621 /* Pass over all blocks, marking each block that is reachable
622 and has not yet been marked.
623 Keep doing this until, in one pass, no blocks have been marked.
624 Then blocks_live and blocks_marked are identical and correct.
625 In addition, all jumps actually reachable have been marked. */
627 while (something_marked)
629 something_marked = 0;
630 for (i = 0; i < n_basic_blocks; i++)
631 if (block_live[i] && !block_marked[i])
633 block_marked[i] = 1;
634 something_marked = 1;
636 make_edges (i);
640 /* This should never happen. If it does that means we've computed an
641 incorrect flow graph, which can lead to aborts/crashes later in the
642 compiler or incorrect code generation.
644 We used to try and continue here, but that's just asking for trouble
645 later during the compile or at runtime. It's easier to debug the
646 problem here than later! */
647 for (i = 1; i < n_basic_blocks; i++)
648 if (block_live[i] && ! basic_block_drops_in[i]
649 && GET_CODE (basic_block_head[i]) == CODE_LABEL
650 && LABEL_REFS (basic_block_head[i]) == basic_block_head[i])
651 abort ();
653 deleted = delete_unreachable_blocks ();
655 /* There are pathological cases where one function calling hundreds of
656 nested inline functions can generate lots and lots of unreachable
657 blocks that jump can't delete. Since we don't use sparse matrices
658 a lot of memory will be needed to compile such functions.
659 Implementing sparse matrices is a fair bit of work and it is not
660 clear that they win more than they lose (we don't want to
661 unnecessarily slow down compilation of normal code). By making
662 another pass for the pathological case, we can greatly speed up
663 their compilation without hurting normal code. This works because
664 all the insns in the unreachable blocks have either been deleted or
665 turned into notes.
666 Note that we're talking about reducing memory usage by 10's of
667 megabytes and reducing compilation time by several minutes. */
668 /* ??? The choice of when to make another pass is a bit arbitrary,
669 and was derived from empirical data. */
670 if (pass == 1
671 && deleted > 200)
673 pass++;
674 n_basic_blocks -= deleted;
675 /* `n_basic_blocks' may not be correct at this point: two previously
676 separate blocks may now be merged. That's ok though as we
677 recalculate it during the second pass. It certainly can't be
678 any larger than the current value. */
679 goto restart;
684 /* Record INSN's block number as BB. */
686 void
687 set_block_num (insn, bb)
688 rtx insn;
689 int bb;
691 if (INSN_UID (insn) >= max_uid_for_flow)
693 /* Add one-eighth the size so we don't keep calling xrealloc. */
694 max_uid_for_flow = INSN_UID (insn) + (INSN_UID (insn) + 7) / 8;
695 uid_block_number = (int *)
696 xrealloc (uid_block_number, (max_uid_for_flow + 1) * sizeof (int));
698 BLOCK_NUM (insn) = bb;
702 /* Subroutines of find_basic_blocks. */
704 /* For basic block I, make edges and mark live all blocks which are reachable
705 from it. */
706 static void
707 make_edges (i)
708 int i;
710 rtx insn, x;
712 if (i + 1 < n_basic_blocks && basic_block_drops_in[i + 1])
713 block_live_static[i + 1] = 1;
714 insn = basic_block_end[i];
715 if (GET_CODE (insn) == JUMP_INSN)
716 mark_label_ref (PATTERN (insn), insn, 0);
718 /* If we have any forced labels, mark them as potentially reachable from
719 this block. */
720 for (x = forced_labels; x; x = XEXP (x, 1))
721 if (! LABEL_REF_NONLOCAL_P (x))
722 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
723 insn, 0);
725 /* Now scan the insns for this block, we may need to make edges for some of
726 them to various non-obvious locations (exception handlers, nonlocal
727 labels, etc). */
728 for (insn = basic_block_head[i];
729 insn != NEXT_INSN (basic_block_end[i]);
730 insn = NEXT_INSN (insn))
732 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
734 rtx note;
735 /* References to labels in non-jumping insns have REG_LABEL notes
736 attached to them.
738 This can happen for computed gotos; we don't care about them
739 here since the values are also on the label_value_list and will
740 be marked live if we find a live computed goto.
742 This can also happen when we take the address of a label to pass
743 as an argument to __throw. Note throw only uses the value to
744 determine what handler should be called -- ie the label is not
745 used as a jump target, it just marks regions in the code.
747 In theory we should be able to ignore the REG_LABEL notes, but
748 we have to make sure that the label and associated insns aren't
749 marked dead, so we make the block in question live and create an
750 edge from this insn to the label. This is not strictly correct,
751 but it is close enough for now.
753 See below for code that handles the eh_stub label specially. */
754 for (note = REG_NOTES (insn);
755 note;
756 note = XEXP (note, 1))
758 if (REG_NOTE_KIND (note) == REG_LABEL
759 && XEXP (note, 0) != eh_return_stub_label)
761 x = XEXP (note, 0);
762 block_live_static[BLOCK_NUM (x)] = 1;
763 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, x),
764 insn, 0);
768 /* If this is a computed jump, then mark it as reaching everything
769 on the label_value_list and forced_labels list. */
770 if (computed_jump_p (insn))
772 current_function_has_computed_jump = 1;
773 for (x = label_value_list; x; x = XEXP (x, 1))
775 int b = BLOCK_NUM (XEXP (x, 0));
776 basic_block_computed_jump_target[b] = 1;
777 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
778 insn, 0);
781 for (x = forced_labels; x; x = XEXP (x, 1))
783 int b = BLOCK_NUM (XEXP (x, 0));
784 basic_block_computed_jump_target[b] = 1;
785 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
786 insn, 0);
790 /* If this is a CALL_INSN, then mark it as reaching the active EH
791 handler for this CALL_INSN. If we're handling asynchronous
792 exceptions mark every insn as reaching the active EH handler.
794 Also mark the CALL_INSN as reaching any nonlocal goto sites. */
795 else if (asynchronous_exceptions
796 || (GET_CODE (insn) == CALL_INSN
797 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX)))
799 if (active_eh_region[INSN_UID (insn)])
801 int region;
802 handler_info *ptr;
803 region = active_eh_region[INSN_UID (insn)];
804 for ( ; region;
805 region = nested_eh_region[region])
807 ptr = get_first_handler (region);
808 for ( ; ptr ; ptr = ptr->next)
809 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
810 ptr->handler_label),
811 insn, 0);
814 if (! asynchronous_exceptions)
816 for (x = nonlocal_label_list; x; x = XEXP (x, 1))
817 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
818 insn, 0);
820 /* ??? This could be made smarter: in some cases it's possible
821 to tell that certain calls will not do a nonlocal goto.
823 For example, if the nested functions that do the nonlocal
824 gotos do not have their addresses taken, then only calls to
825 those functions or to other nested functions that use them
826 could possibly do nonlocal gotos. */
830 /* We know something about the structure of the function __throw in
831 libgcc2.c. It is the only function that ever contains eh_stub labels.
832 It modifies its return address so that the last block returns to one of
833 the eh_stub labels within it. So we have to make additional edges in
834 the flow graph. */
835 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
837 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, eh_return_stub_label),
838 basic_block_end[i], 0);
842 /* Check expression X for label references;
843 if one is found, add INSN to the label's chain of references.
845 CHECKDUP means check for and avoid creating duplicate references
846 from the same insn. Such duplicates do no serious harm but
847 can slow life analysis. CHECKDUP is set only when duplicates
848 are likely. */
850 static void
851 mark_label_ref (x, insn, checkdup)
852 rtx x, insn;
853 int checkdup;
855 register RTX_CODE code;
856 register int i;
857 register char *fmt;
859 /* We can be called with NULL when scanning label_value_list. */
860 if (x == 0)
861 return;
863 code = GET_CODE (x);
864 if (code == LABEL_REF)
866 register rtx label = XEXP (x, 0);
867 register rtx y;
868 if (GET_CODE (label) != CODE_LABEL)
869 abort ();
870 /* If the label was never emitted, this insn is junk,
871 but avoid a crash trying to refer to BLOCK_NUM (label).
872 This can happen as a result of a syntax error
873 and a diagnostic has already been printed. */
874 if (INSN_UID (label) == 0)
875 return;
876 CONTAINING_INSN (x) = insn;
877 /* if CHECKDUP is set, check for duplicate ref from same insn
878 and don't insert. */
879 if (checkdup)
880 for (y = LABEL_REFS (label); y != label; y = LABEL_NEXTREF (y))
881 if (CONTAINING_INSN (y) == insn)
882 return;
883 LABEL_NEXTREF (x) = LABEL_REFS (label);
884 LABEL_REFS (label) = x;
885 block_live_static[BLOCK_NUM (label)] = 1;
886 return;
889 fmt = GET_RTX_FORMAT (code);
890 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
892 if (fmt[i] == 'e')
893 mark_label_ref (XEXP (x, i), insn, 0);
894 if (fmt[i] == 'E')
896 register int j;
897 for (j = 0; j < XVECLEN (x, i); j++)
898 mark_label_ref (XVECEXP (x, i, j), insn, 1);
903 /* Now delete the code for any basic blocks that can't be reached.
904 They can occur because jump_optimize does not recognize unreachable loops
905 as unreachable.
906 Return the number of deleted blocks. */
907 static int
908 delete_unreachable_blocks ()
910 int deleted_handler = 0;
911 int deleted = 0;
912 int i;
913 rtx insn;
915 for (i = 0; i < n_basic_blocks; i++)
916 if (! block_live_static[i])
918 deleted++;
920 deleted_handler |= delete_block (i);
923 /* If we deleted an exception handler, we may have EH region
924 begin/end blocks to remove as well. */
925 if (deleted_handler)
926 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
927 if (GET_CODE (insn) == NOTE)
929 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
930 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
932 int num = CODE_LABEL_NUMBER (insn);
933 /* A NULL handler indicates a region is no longer needed */
934 if (get_first_handler (num) == NULL)
936 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
937 NOTE_SOURCE_FILE (insn) = 0;
941 return deleted;
944 /* Delete the insns in a (non-live) block. We physically delete every
945 non-note insn except the start and end (so basic_block_head/end needn't
946 be updated), we turn the latter into NOTE_INSN_DELETED notes.
948 We use to "delete" the insns by turning them into notes, but we may be
949 deleting lots of insns that subsequent passes would otherwise have to
950 process. Secondly, lots of deleted blocks in a row can really slow down
951 propagate_block since it will otherwise process insn-turned-notes multiple
952 times when it looks for loop begin/end notes.
954 Return nonzero if we deleted an exception handler. */
955 static int
956 delete_block (i)
957 int i;
959 int deleted_handler = 0;
960 rtx insn;
962 if (basic_block_head[i] != basic_block_end[i])
964 /* It would be quicker to delete all of these with a single
965 unchaining, rather than one at a time, but we need to keep
966 the NOTE's. */
967 insn = NEXT_INSN (basic_block_head[i]);
968 while (insn != basic_block_end[i])
970 if (GET_CODE (insn) == BARRIER)
971 abort ();
972 else if (GET_CODE (insn) != NOTE)
973 insn = flow_delete_insn (insn);
974 else
975 insn = NEXT_INSN (insn);
979 insn = basic_block_head[i];
980 if (GET_CODE (insn) != NOTE)
982 /* Turn the head into a deleted insn note. */
983 if (GET_CODE (insn) == BARRIER)
984 abort ();
986 /* If the head of this block is a CODE_LABEL, then it might
987 be the label for an exception handler which can't be
988 reached.
990 We need to remove the label from the exception_handler_label
991 list and remove the associated NOTE_EH_REGION_BEG and
992 NOTE_EH_REGION_END notes. */
993 if (GET_CODE (insn) == CODE_LABEL)
995 rtx x, *prev = &exception_handler_labels;
997 for (x = exception_handler_labels; x; x = XEXP (x, 1))
999 if (XEXP (x, 0) == insn)
1001 /* Found a match, splice this label out of the
1002 EH label list. */
1003 *prev = XEXP (x, 1);
1004 XEXP (x, 1) = NULL_RTX;
1005 XEXP (x, 0) = NULL_RTX;
1007 /* Remove the handler from all regions */
1008 remove_handler (insn);
1009 deleted_handler = 1;
1010 break;
1012 prev = &XEXP (x, 1);
1016 PUT_CODE (insn, NOTE);
1017 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1018 NOTE_SOURCE_FILE (insn) = 0;
1020 insn = basic_block_end[i];
1021 if (GET_CODE (insn) != NOTE)
1023 /* Turn the tail into a deleted insn note. */
1024 if (GET_CODE (insn) == BARRIER)
1025 abort ();
1026 PUT_CODE (insn, NOTE);
1027 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1028 NOTE_SOURCE_FILE (insn) = 0;
1030 /* BARRIERs are between basic blocks, not part of one.
1031 Delete a BARRIER if the preceding jump is deleted.
1032 We cannot alter a BARRIER into a NOTE
1033 because it is too short; but we can really delete
1034 it because it is not part of a basic block. */
1035 if (NEXT_INSN (insn) != 0
1036 && GET_CODE (NEXT_INSN (insn)) == BARRIER)
1037 delete_insn (NEXT_INSN (insn));
1039 /* Each time we delete some basic blocks,
1040 see if there is a jump around them that is
1041 being turned into a no-op. If so, delete it. */
1043 if (block_live_static[i - 1])
1045 register int j;
1046 for (j = i + 1; j < n_basic_blocks; j++)
1047 if (block_live_static[j])
1049 rtx label;
1050 insn = basic_block_end[i - 1];
1051 if (GET_CODE (insn) == JUMP_INSN
1052 /* An unconditional jump is the only possibility
1053 we must check for, since a conditional one
1054 would make these blocks live. */
1055 && simplejump_p (insn)
1056 && (label = XEXP (SET_SRC (PATTERN (insn)), 0), 1)
1057 && INSN_UID (label) != 0
1058 && BLOCK_NUM (label) == j)
1060 int k;
1062 /* The deleted blocks still show up in the cfg,
1063 so we must set basic_block_drops_in for blocks
1064 I to J inclusive to keep the cfg accurate. */
1065 for (k = i; k <= j; k++)
1066 basic_block_drops_in[k] = 1;
1068 PUT_CODE (insn, NOTE);
1069 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1070 NOTE_SOURCE_FILE (insn) = 0;
1071 if (GET_CODE (NEXT_INSN (insn)) != BARRIER)
1072 abort ();
1073 delete_insn (NEXT_INSN (insn));
1075 break;
1079 return deleted_handler;
1082 /* Delete INSN by patching it out.
1083 Return the next insn. */
1085 static rtx
1086 flow_delete_insn (insn)
1087 rtx insn;
1089 /* ??? For the moment we assume we don't have to watch for NULLs here
1090 since the start/end of basic blocks aren't deleted like this. */
1091 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1092 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1093 return NEXT_INSN (insn);
1096 /* Perform data flow analysis.
1097 F is the first insn of the function and NREGS the number of register numbers
1098 in use. */
1100 void
1101 life_analysis (f, nregs, file)
1102 rtx f;
1103 int nregs;
1104 FILE *file;
1106 #ifdef ELIMINABLE_REGS
1107 register size_t i;
1108 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
1109 #endif
1111 /* Record which registers will be eliminated. We use this in
1112 mark_used_regs. */
1114 CLEAR_HARD_REG_SET (elim_reg_set);
1116 #ifdef ELIMINABLE_REGS
1117 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
1118 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
1119 #else
1120 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
1121 #endif
1123 life_analysis_1 (f, nregs);
1124 if (file)
1125 dump_flow_info (file);
1127 free_basic_block_vars (1);
1130 /* Free the variables allocated by find_basic_blocks.
1132 KEEP_HEAD_END_P is non-zero if basic_block_head and basic_block_end
1133 are not to be freed. */
1135 void
1136 free_basic_block_vars (keep_head_end_p)
1137 int keep_head_end_p;
1139 if (basic_block_drops_in)
1141 free (basic_block_drops_in);
1142 /* Tell dump_flow_info this isn't available anymore. */
1143 basic_block_drops_in = 0;
1145 if (basic_block_loop_depth)
1147 free (basic_block_loop_depth);
1148 basic_block_loop_depth = 0;
1150 if (uid_block_number)
1152 free (uid_block_number);
1153 uid_block_number = 0;
1155 if (uid_volatile)
1157 free (uid_volatile);
1158 uid_volatile = 0;
1161 if (! keep_head_end_p && basic_block_head)
1163 free (basic_block_head);
1164 basic_block_head = 0;
1165 free (basic_block_end);
1166 basic_block_end = 0;
1170 /* Return nonzero if the destination of SET equals the source. */
1171 static int
1172 set_noop_p (set)
1173 rtx set;
1175 rtx src = SET_SRC (set);
1176 rtx dst = SET_DEST (set);
1177 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
1178 && REGNO (src) == REGNO (dst))
1179 return 1;
1180 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
1181 || SUBREG_WORD (src) != SUBREG_WORD (dst))
1182 return 0;
1183 src = SUBREG_REG (src);
1184 dst = SUBREG_REG (dst);
1185 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
1186 && REGNO (src) == REGNO (dst))
1187 return 1;
1188 return 0;
1191 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1192 value to itself. */
1193 static int
1194 noop_move_p (insn)
1195 rtx insn;
1197 rtx pat = PATTERN (insn);
1199 /* Insns carrying these notes are useful later on. */
1200 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1201 return 0;
1203 if (GET_CODE (pat) == SET && set_noop_p (pat))
1204 return 1;
1206 if (GET_CODE (pat) == PARALLEL)
1208 int i;
1209 /* If nothing but SETs of registers to themselves,
1210 this insn can also be deleted. */
1211 for (i = 0; i < XVECLEN (pat, 0); i++)
1213 rtx tem = XVECEXP (pat, 0, i);
1215 if (GET_CODE (tem) == USE
1216 || GET_CODE (tem) == CLOBBER)
1217 continue;
1219 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1220 return 0;
1223 return 1;
1225 return 0;
1228 static void
1229 notice_stack_pointer_modification (x, pat)
1230 rtx x;
1231 rtx pat ATTRIBUTE_UNUSED;
1233 if (x == stack_pointer_rtx
1234 /* The stack pointer is only modified indirectly as the result
1235 of a push until later in flow. See the comments in rtl.texi
1236 regarding Embedded Side-Effects on Addresses. */
1237 || (GET_CODE (x) == MEM
1238 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
1239 || GET_CODE (XEXP (x, 0)) == PRE_INC
1240 || GET_CODE (XEXP (x, 0)) == POST_DEC
1241 || GET_CODE (XEXP (x, 0)) == POST_INC)
1242 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
1243 current_function_sp_is_unchanging = 0;
1246 /* Record which insns refer to any volatile memory
1247 or for any reason can't be deleted just because they are dead stores.
1248 Also, delete any insns that copy a register to itself.
1249 And see if the stack pointer is modified. */
1250 static void
1251 record_volatile_insns (f)
1252 rtx f;
1254 rtx insn;
1255 for (insn = f; insn; insn = NEXT_INSN (insn))
1257 enum rtx_code code1 = GET_CODE (insn);
1258 if (code1 == CALL_INSN)
1259 INSN_VOLATILE (insn) = 1;
1260 else if (code1 == INSN || code1 == JUMP_INSN)
1262 if (GET_CODE (PATTERN (insn)) != USE
1263 && volatile_refs_p (PATTERN (insn)))
1264 INSN_VOLATILE (insn) = 1;
1266 /* A SET that makes space on the stack cannot be dead.
1267 (Such SETs occur only for allocating variable-size data,
1268 so they will always have a PLUS or MINUS according to the
1269 direction of stack growth.)
1270 Even if this function never uses this stack pointer value,
1271 signal handlers do! */
1272 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
1273 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1274 #ifdef STACK_GROWS_DOWNWARD
1275 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
1276 #else
1277 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1278 #endif
1279 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
1280 INSN_VOLATILE (insn) = 1;
1282 /* Delete (in effect) any obvious no-op moves. */
1283 else if (noop_move_p (insn))
1285 PUT_CODE (insn, NOTE);
1286 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1287 NOTE_SOURCE_FILE (insn) = 0;
1291 /* Check if insn modifies the stack pointer. */
1292 if ( current_function_sp_is_unchanging
1293 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1294 note_stores (PATTERN (insn), notice_stack_pointer_modification);
1298 /* Mark those regs which are needed at the end of the function as live
1299 at the end of the last basic block. */
1300 static void
1301 mark_regs_live_at_end (set)
1302 regset set;
1304 int i;
1306 #ifdef EXIT_IGNORE_STACK
1307 if (! EXIT_IGNORE_STACK
1308 || (! FRAME_POINTER_REQUIRED
1309 && ! current_function_calls_alloca
1310 && flag_omit_frame_pointer)
1311 || current_function_sp_is_unchanging)
1312 #endif
1313 /* If exiting needs the right stack value,
1314 consider the stack pointer live at the end of the function. */
1315 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
1317 /* Mark the frame pointer is needed at the end of the function. If
1318 we end up eliminating it, it will be removed from the live list
1319 of each basic block by reload. */
1321 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
1322 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1323 /* If they are different, also mark the hard frame pointer as live */
1324 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
1325 #endif
1328 /* Mark all global registers and all registers used by the epilogue
1329 as being live at the end of the function since they may be
1330 referenced by our caller. */
1331 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1332 if (global_regs[i]
1333 #ifdef EPILOGUE_USES
1334 || EPILOGUE_USES (i)
1335 #endif
1337 SET_REGNO_REG_SET (set, i);
1340 /* Determine which registers are live at the start of each
1341 basic block of the function whose first insn is F.
1342 NREGS is the number of registers used in F.
1343 We allocate the vector basic_block_live_at_start
1344 and the regsets that it points to, and fill them with the data.
1345 regset_size and regset_bytes are also set here. */
1347 static void
1348 life_analysis_1 (f, nregs)
1349 rtx f;
1350 int nregs;
1352 int first_pass;
1353 int changed;
1354 /* For each basic block, a bitmask of regs
1355 live on exit from the block. */
1356 regset *basic_block_live_at_end;
1357 /* For each basic block, a bitmask of regs
1358 live on entry to a successor-block of this block.
1359 If this does not match basic_block_live_at_end,
1360 that must be updated, and the block must be rescanned. */
1361 regset *basic_block_new_live_at_end;
1362 /* For each basic block, a bitmask of regs
1363 whose liveness at the end of the basic block
1364 can make a difference in which regs are live on entry to the block.
1365 These are the regs that are set within the basic block,
1366 possibly excluding those that are used after they are set. */
1367 regset *basic_block_significant;
1368 register int i;
1369 rtx insn;
1371 struct obstack flow_obstack;
1373 gcc_obstack_init (&flow_obstack);
1375 max_regno = nregs;
1377 bzero (regs_ever_live, sizeof regs_ever_live);
1379 /* Allocate and zero out many data structures
1380 that will record the data from lifetime analysis. */
1382 allocate_for_life_analysis ();
1384 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
1385 bzero ((char *) reg_next_use, nregs * sizeof (rtx));
1387 /* Set up several regset-vectors used internally within this function.
1388 Their meanings are documented above, with their declarations. */
1390 basic_block_live_at_end
1391 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1393 /* Don't use alloca since that leads to a crash rather than an error message
1394 if there isn't enough space.
1395 Don't use oballoc since we may need to allocate other things during
1396 this function on the temporary obstack. */
1397 init_regset_vector (basic_block_live_at_end, n_basic_blocks, &flow_obstack);
1399 basic_block_new_live_at_end
1400 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1401 init_regset_vector (basic_block_new_live_at_end, n_basic_blocks,
1402 &flow_obstack);
1404 basic_block_significant
1405 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1406 init_regset_vector (basic_block_significant, n_basic_blocks, &flow_obstack);
1408 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
1409 This will be cleared by record_volatile_insns if it encounters an insn
1410 which modifies the stack pointer. */
1411 current_function_sp_is_unchanging = !current_function_calls_alloca;
1413 record_volatile_insns (f);
1415 if (n_basic_blocks > 0)
1417 mark_regs_live_at_end (basic_block_live_at_end[n_basic_blocks - 1]);
1418 COPY_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1],
1419 basic_block_live_at_end[n_basic_blocks - 1]);
1422 /* Propagate life info through the basic blocks
1423 around the graph of basic blocks.
1425 This is a relaxation process: each time a new register
1426 is live at the end of the basic block, we must scan the block
1427 to determine which registers are, as a consequence, live at the beginning
1428 of that block. These registers must then be marked live at the ends
1429 of all the blocks that can transfer control to that block.
1430 The process continues until it reaches a fixed point. */
1432 first_pass = 1;
1433 changed = 1;
1434 while (changed)
1436 changed = 0;
1437 for (i = n_basic_blocks - 1; i >= 0; i--)
1439 int consider = first_pass;
1440 int must_rescan = first_pass;
1441 register int j;
1443 if (!first_pass)
1445 /* Set CONSIDER if this block needs thinking about at all
1446 (that is, if the regs live now at the end of it
1447 are not the same as were live at the end of it when
1448 we last thought about it).
1449 Set must_rescan if it needs to be thought about
1450 instruction by instruction (that is, if any additional
1451 reg that is live at the end now but was not live there before
1452 is one of the significant regs of this basic block). */
1454 EXECUTE_IF_AND_COMPL_IN_REG_SET
1455 (basic_block_new_live_at_end[i],
1456 basic_block_live_at_end[i], 0, j,
1458 consider = 1;
1459 if (REGNO_REG_SET_P (basic_block_significant[i], j))
1461 must_rescan = 1;
1462 goto done;
1465 done:
1466 if (! consider)
1467 continue;
1470 /* The live_at_start of this block may be changing,
1471 so another pass will be required after this one. */
1472 changed = 1;
1474 if (! must_rescan)
1476 /* No complete rescan needed;
1477 just record those variables newly known live at end
1478 as live at start as well. */
1479 IOR_AND_COMPL_REG_SET (basic_block_live_at_start[i],
1480 basic_block_new_live_at_end[i],
1481 basic_block_live_at_end[i]);
1483 IOR_AND_COMPL_REG_SET (basic_block_live_at_end[i],
1484 basic_block_new_live_at_end[i],
1485 basic_block_live_at_end[i]);
1487 else
1489 /* Update the basic_block_live_at_start
1490 by propagation backwards through the block. */
1491 COPY_REG_SET (basic_block_live_at_end[i],
1492 basic_block_new_live_at_end[i]);
1493 COPY_REG_SET (basic_block_live_at_start[i],
1494 basic_block_live_at_end[i]);
1495 propagate_block (basic_block_live_at_start[i],
1496 basic_block_head[i], basic_block_end[i], 0,
1497 first_pass ? basic_block_significant[i]
1498 : (regset) 0,
1503 register rtx jump, head;
1505 /* Update the basic_block_new_live_at_end's of the block
1506 that falls through into this one (if any). */
1507 head = basic_block_head[i];
1508 if (basic_block_drops_in[i])
1509 IOR_REG_SET (basic_block_new_live_at_end[i-1],
1510 basic_block_live_at_start[i]);
1512 /* Update the basic_block_new_live_at_end's of
1513 all the blocks that jump to this one. */
1514 if (GET_CODE (head) == CODE_LABEL)
1515 for (jump = LABEL_REFS (head);
1516 jump != head;
1517 jump = LABEL_NEXTREF (jump))
1519 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
1520 IOR_REG_SET (basic_block_new_live_at_end[from_block],
1521 basic_block_live_at_start[i]);
1524 #ifdef USE_C_ALLOCA
1525 alloca (0);
1526 #endif
1528 first_pass = 0;
1531 /* The only pseudos that are live at the beginning of the function are
1532 those that were not set anywhere in the function. local-alloc doesn't
1533 know how to handle these correctly, so mark them as not local to any
1534 one basic block. */
1536 if (n_basic_blocks > 0)
1537 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[0],
1538 FIRST_PSEUDO_REGISTER, i,
1540 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1543 /* Now the life information is accurate.
1544 Make one more pass over each basic block
1545 to delete dead stores, create autoincrement addressing
1546 and record how many times each register is used, is set, or dies.
1548 To save time, we operate directly in basic_block_live_at_end[i],
1549 thus destroying it (in fact, converting it into a copy of
1550 basic_block_live_at_start[i]). This is ok now because
1551 basic_block_live_at_end[i] is no longer used past this point. */
1553 max_scratch = 0;
1555 for (i = 0; i < n_basic_blocks; i++)
1557 propagate_block (basic_block_live_at_end[i],
1558 basic_block_head[i], basic_block_end[i], 1,
1559 (regset) 0, i);
1560 #ifdef USE_C_ALLOCA
1561 alloca (0);
1562 #endif
1565 #if 0
1566 /* Something live during a setjmp should not be put in a register
1567 on certain machines which restore regs from stack frames
1568 rather than from the jmpbuf.
1569 But we don't need to do this for the user's variables, since
1570 ANSI says only volatile variables need this. */
1571 #ifdef LONGJMP_RESTORE_FROM_STACK
1572 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1573 FIRST_PSEUDO_REGISTER, i,
1575 if (regno_reg_rtx[i] != 0
1576 && ! REG_USERVAR_P (regno_reg_rtx[i]))
1578 REG_LIVE_LENGTH (i) = -1;
1579 REG_BASIC_BLOCK (i) = -1;
1582 #endif
1583 #endif
1585 /* We have a problem with any pseudoreg that
1586 lives across the setjmp. ANSI says that if a
1587 user variable does not change in value
1588 between the setjmp and the longjmp, then the longjmp preserves it.
1589 This includes longjmp from a place where the pseudo appears dead.
1590 (In principle, the value still exists if it is in scope.)
1591 If the pseudo goes in a hard reg, some other value may occupy
1592 that hard reg where this pseudo is dead, thus clobbering the pseudo.
1593 Conclusion: such a pseudo must not go in a hard reg. */
1594 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1595 FIRST_PSEUDO_REGISTER, i,
1597 if (regno_reg_rtx[i] != 0)
1599 REG_LIVE_LENGTH (i) = -1;
1600 REG_BASIC_BLOCK (i) = -1;
1605 free_regset_vector (basic_block_live_at_end, n_basic_blocks);
1606 free_regset_vector (basic_block_new_live_at_end, n_basic_blocks);
1607 free_regset_vector (basic_block_significant, n_basic_blocks);
1608 basic_block_live_at_end = (regset *)0;
1609 basic_block_new_live_at_end = (regset *)0;
1610 basic_block_significant = (regset *)0;
1612 obstack_free (&flow_obstack, NULL_PTR);
1615 /* Subroutines of life analysis. */
1617 /* Allocate the permanent data structures that represent the results
1618 of life analysis. Not static since used also for stupid life analysis. */
1620 void
1621 allocate_for_life_analysis ()
1623 register int i;
1625 /* Recalculate the register space, in case it has grown. Old style
1626 vector oriented regsets would set regset_{size,bytes} here also. */
1627 allocate_reg_info (max_regno, FALSE, FALSE);
1629 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
1630 information, explicitly reset it here. The allocation should have
1631 already happened on the previous reg_scan pass. Make sure in case
1632 some more registers were allocated. */
1633 for (i = 0; i < max_regno; i++)
1634 REG_N_SETS (i) = 0;
1636 basic_block_live_at_start
1637 = (regset *) oballoc (n_basic_blocks * sizeof (regset));
1638 init_regset_vector (basic_block_live_at_start, n_basic_blocks,
1639 function_obstack);
1641 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
1642 CLEAR_REG_SET (regs_live_at_setjmp);
1645 /* Make each element of VECTOR point at a regset. The vector has
1646 NELTS elements, and space is allocated from the ALLOC_OBSTACK
1647 obstack. */
1649 static void
1650 init_regset_vector (vector, nelts, alloc_obstack)
1651 regset *vector;
1652 int nelts;
1653 struct obstack *alloc_obstack;
1655 register int i;
1657 for (i = 0; i < nelts; i++)
1659 vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
1660 CLEAR_REG_SET (vector[i]);
1664 /* Release any additional space allocated for each element of VECTOR point
1665 other than the regset header itself. The vector has NELTS elements. */
1667 void
1668 free_regset_vector (vector, nelts)
1669 regset *vector;
1670 int nelts;
1672 register int i;
1674 for (i = 0; i < nelts; i++)
1675 FREE_REG_SET (vector[i]);
1678 /* Compute the registers live at the beginning of a basic block
1679 from those live at the end.
1681 When called, OLD contains those live at the end.
1682 On return, it contains those live at the beginning.
1683 FIRST and LAST are the first and last insns of the basic block.
1685 FINAL is nonzero if we are doing the final pass which is not
1686 for computing the life info (since that has already been done)
1687 but for acting on it. On this pass, we delete dead stores,
1688 set up the logical links and dead-variables lists of instructions,
1689 and merge instructions for autoincrement and autodecrement addresses.
1691 SIGNIFICANT is nonzero only the first time for each basic block.
1692 If it is nonzero, it points to a regset in which we store
1693 a 1 for each register that is set within the block.
1695 BNUM is the number of the basic block. */
1697 static void
1698 propagate_block (old, first, last, final, significant, bnum)
1699 register regset old;
1700 rtx first;
1701 rtx last;
1702 int final;
1703 regset significant;
1704 int bnum;
1706 register rtx insn;
1707 rtx prev;
1708 regset live;
1709 regset dead;
1711 /* The following variables are used only if FINAL is nonzero. */
1712 /* This vector gets one element for each reg that has been live
1713 at any point in the basic block that has been scanned so far.
1714 SOMETIMES_MAX says how many elements are in use so far. */
1715 register int *regs_sometimes_live;
1716 int sometimes_max = 0;
1717 /* This regset has 1 for each reg that we have seen live so far.
1718 It and REGS_SOMETIMES_LIVE are updated together. */
1719 regset maxlive;
1721 /* The loop depth may change in the middle of a basic block. Since we
1722 scan from end to beginning, we start with the depth at the end of the
1723 current basic block, and adjust as we pass ends and starts of loops. */
1724 loop_depth = basic_block_loop_depth[bnum];
1726 dead = ALLOCA_REG_SET ();
1727 live = ALLOCA_REG_SET ();
1729 cc0_live = 0;
1730 last_mem_set = 0;
1732 /* Include any notes at the end of the block in the scan.
1733 This is in case the block ends with a call to setjmp. */
1735 while (NEXT_INSN (last) != 0 && GET_CODE (NEXT_INSN (last)) == NOTE)
1737 /* Look for loop boundaries, we are going forward here. */
1738 last = NEXT_INSN (last);
1739 if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_BEG)
1740 loop_depth++;
1741 else if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_END)
1742 loop_depth--;
1745 if (final)
1747 register int i;
1749 num_scratch = 0;
1750 maxlive = ALLOCA_REG_SET ();
1751 COPY_REG_SET (maxlive, old);
1752 regs_sometimes_live = (int *) alloca (max_regno * sizeof (int));
1754 /* Process the regs live at the end of the block.
1755 Enter them in MAXLIVE and REGS_SOMETIMES_LIVE.
1756 Also mark them as not local to any one basic block. */
1757 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
1759 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1760 regs_sometimes_live[sometimes_max] = i;
1761 sometimes_max++;
1765 /* Scan the block an insn at a time from end to beginning. */
1767 for (insn = last; ; insn = prev)
1769 prev = PREV_INSN (insn);
1771 if (GET_CODE (insn) == NOTE)
1773 /* Look for loop boundaries, remembering that we are going
1774 backwards. */
1775 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1776 loop_depth++;
1777 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1778 loop_depth--;
1780 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
1781 Abort now rather than setting register status incorrectly. */
1782 if (loop_depth == 0)
1783 abort ();
1785 /* If this is a call to `setjmp' et al,
1786 warn if any non-volatile datum is live. */
1788 if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
1789 IOR_REG_SET (regs_live_at_setjmp, old);
1792 /* Update the life-status of regs for this insn.
1793 First DEAD gets which regs are set in this insn
1794 then LIVE gets which regs are used in this insn.
1795 Then the regs live before the insn
1796 are those live after, with DEAD regs turned off,
1797 and then LIVE regs turned on. */
1799 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1801 register int i;
1802 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1803 int insn_is_dead
1804 = (insn_dead_p (PATTERN (insn), old, 0)
1805 /* Don't delete something that refers to volatile storage! */
1806 && ! INSN_VOLATILE (insn));
1807 int libcall_is_dead
1808 = (insn_is_dead && note != 0
1809 && libcall_dead_p (PATTERN (insn), old, note, insn));
1811 /* If an instruction consists of just dead store(s) on final pass,
1812 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
1813 We could really delete it with delete_insn, but that
1814 can cause trouble for first or last insn in a basic block. */
1815 if (final && insn_is_dead)
1817 PUT_CODE (insn, NOTE);
1818 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1819 NOTE_SOURCE_FILE (insn) = 0;
1821 /* CC0 is now known to be dead. Either this insn used it,
1822 in which case it doesn't anymore, or clobbered it,
1823 so the next insn can't use it. */
1824 cc0_live = 0;
1826 /* If this insn is copying the return value from a library call,
1827 delete the entire library call. */
1828 if (libcall_is_dead)
1830 rtx first = XEXP (note, 0);
1831 rtx p = insn;
1832 while (INSN_DELETED_P (first))
1833 first = NEXT_INSN (first);
1834 while (p != first)
1836 p = PREV_INSN (p);
1837 PUT_CODE (p, NOTE);
1838 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
1839 NOTE_SOURCE_FILE (p) = 0;
1842 goto flushed;
1845 CLEAR_REG_SET (dead);
1846 CLEAR_REG_SET (live);
1848 /* See if this is an increment or decrement that can be
1849 merged into a following memory address. */
1850 #ifdef AUTO_INC_DEC
1852 register rtx x = single_set (insn);
1854 /* Does this instruction increment or decrement a register? */
1855 if (final && x != 0
1856 && GET_CODE (SET_DEST (x)) == REG
1857 && (GET_CODE (SET_SRC (x)) == PLUS
1858 || GET_CODE (SET_SRC (x)) == MINUS)
1859 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
1860 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1861 /* Ok, look for a following memory ref we can combine with.
1862 If one is found, change the memory ref to a PRE_INC
1863 or PRE_DEC, cancel this insn, and return 1.
1864 Return 0 if nothing has been done. */
1865 && try_pre_increment_1 (insn))
1866 goto flushed;
1868 #endif /* AUTO_INC_DEC */
1870 /* If this is not the final pass, and this insn is copying the
1871 value of a library call and it's dead, don't scan the
1872 insns that perform the library call, so that the call's
1873 arguments are not marked live. */
1874 if (libcall_is_dead)
1876 /* Mark the dest reg as `significant'. */
1877 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
1879 insn = XEXP (note, 0);
1880 prev = PREV_INSN (insn);
1882 else if (GET_CODE (PATTERN (insn)) == SET
1883 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1884 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1885 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1886 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1887 /* We have an insn to pop a constant amount off the stack.
1888 (Such insns use PLUS regardless of the direction of the stack,
1889 and any insn to adjust the stack by a constant is always a pop.)
1890 These insns, if not dead stores, have no effect on life. */
1892 else
1894 /* LIVE gets the regs used in INSN;
1895 DEAD gets those set by it. Dead insns don't make anything
1896 live. */
1898 mark_set_regs (old, dead, PATTERN (insn),
1899 final ? insn : NULL_RTX, significant);
1901 /* If an insn doesn't use CC0, it becomes dead since we
1902 assume that every insn clobbers it. So show it dead here;
1903 mark_used_regs will set it live if it is referenced. */
1904 cc0_live = 0;
1906 if (! insn_is_dead)
1907 mark_used_regs (old, live, PATTERN (insn), final, insn);
1909 /* Sometimes we may have inserted something before INSN (such as
1910 a move) when we make an auto-inc. So ensure we will scan
1911 those insns. */
1912 #ifdef AUTO_INC_DEC
1913 prev = PREV_INSN (insn);
1914 #endif
1916 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1918 register int i;
1920 rtx note;
1922 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1923 note;
1924 note = XEXP (note, 1))
1925 if (GET_CODE (XEXP (note, 0)) == USE)
1926 mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
1927 final, insn);
1929 /* Each call clobbers all call-clobbered regs that are not
1930 global or fixed. Note that the function-value reg is a
1931 call-clobbered reg, and mark_set_regs has already had
1932 a chance to handle it. */
1934 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1935 if (call_used_regs[i] && ! global_regs[i]
1936 && ! fixed_regs[i])
1937 SET_REGNO_REG_SET (dead, i);
1939 /* The stack ptr is used (honorarily) by a CALL insn. */
1940 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
1942 /* Calls may also reference any of the global registers,
1943 so they are made live. */
1944 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1945 if (global_regs[i])
1946 mark_used_regs (old, live,
1947 gen_rtx_REG (reg_raw_mode[i], i),
1948 final, insn);
1950 /* Calls also clobber memory. */
1951 last_mem_set = 0;
1954 /* Update OLD for the registers used or set. */
1955 AND_COMPL_REG_SET (old, dead);
1956 IOR_REG_SET (old, live);
1958 if (GET_CODE (insn) == CALL_INSN && final)
1960 /* Any regs live at the time of a call instruction
1961 must not go in a register clobbered by calls.
1962 Find all regs now live and record this for them. */
1964 register int *p = regs_sometimes_live;
1966 for (i = 0; i < sometimes_max; i++, p++)
1967 if (REGNO_REG_SET_P (old, *p))
1968 REG_N_CALLS_CROSSED (*p)++;
1972 /* On final pass, add any additional sometimes-live regs
1973 into MAXLIVE and REGS_SOMETIMES_LIVE.
1974 Also update counts of how many insns each reg is live at. */
1976 if (final)
1978 register int regno;
1979 register int *p;
1981 EXECUTE_IF_AND_COMPL_IN_REG_SET
1982 (live, maxlive, 0, regno,
1984 regs_sometimes_live[sometimes_max++] = regno;
1985 SET_REGNO_REG_SET (maxlive, regno);
1988 p = regs_sometimes_live;
1989 for (i = 0; i < sometimes_max; i++)
1991 regno = *p++;
1992 if (REGNO_REG_SET_P (old, regno))
1993 REG_LIVE_LENGTH (regno)++;
1997 flushed: ;
1998 if (insn == first)
1999 break;
2002 FREE_REG_SET (dead);
2003 FREE_REG_SET (live);
2004 if (final)
2005 FREE_REG_SET (maxlive);
2007 if (num_scratch > max_scratch)
2008 max_scratch = num_scratch;
2011 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2012 (SET expressions whose destinations are registers dead after the insn).
2013 NEEDED is the regset that says which regs are alive after the insn.
2015 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL. */
2017 static int
2018 insn_dead_p (x, needed, call_ok)
2019 rtx x;
2020 regset needed;
2021 int call_ok;
2023 enum rtx_code code = GET_CODE (x);
2025 /* If setting something that's a reg or part of one,
2026 see if that register's altered value will be live. */
2028 if (code == SET)
2030 rtx r = SET_DEST (x);
2032 /* A SET that is a subroutine call cannot be dead. */
2033 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
2034 return 0;
2036 #ifdef HAVE_cc0
2037 if (GET_CODE (r) == CC0)
2038 return ! cc0_live;
2039 #endif
2041 if (GET_CODE (r) == MEM && last_mem_set && ! MEM_VOLATILE_P (r)
2042 && rtx_equal_p (r, last_mem_set))
2043 return 1;
2045 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
2046 || GET_CODE (r) == ZERO_EXTRACT)
2047 r = SUBREG_REG (r);
2049 if (GET_CODE (r) == REG)
2051 int regno = REGNO (r);
2053 /* Don't delete insns to set global regs. */
2054 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2055 /* Make sure insns to set frame pointer aren't deleted. */
2056 || regno == FRAME_POINTER_REGNUM
2057 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2058 || regno == HARD_FRAME_POINTER_REGNUM
2059 #endif
2060 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2061 /* Make sure insns to set arg pointer are never deleted
2062 (if the arg pointer isn't fixed, there will be a USE for
2063 it, so we can treat it normally). */
2064 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2065 #endif
2066 || REGNO_REG_SET_P (needed, regno))
2067 return 0;
2069 /* If this is a hard register, verify that subsequent words are
2070 not needed. */
2071 if (regno < FIRST_PSEUDO_REGISTER)
2073 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2075 while (--n > 0)
2076 if (REGNO_REG_SET_P (needed, regno+n))
2077 return 0;
2080 return 1;
2084 /* If performing several activities,
2085 insn is dead if each activity is individually dead.
2086 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
2087 that's inside a PARALLEL doesn't make the insn worth keeping. */
2088 else if (code == PARALLEL)
2090 int i = XVECLEN (x, 0);
2092 for (i--; i >= 0; i--)
2093 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2094 && GET_CODE (XVECEXP (x, 0, i)) != USE
2095 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok))
2096 return 0;
2098 return 1;
2101 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
2102 is not necessarily true for hard registers. */
2103 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
2104 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2105 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
2106 return 1;
2108 /* We do not check other CLOBBER or USE here. An insn consisting of just
2109 a CLOBBER or just a USE should not be deleted. */
2110 return 0;
2113 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
2114 return 1 if the entire library call is dead.
2115 This is true if X copies a register (hard or pseudo)
2116 and if the hard return reg of the call insn is dead.
2117 (The caller should have tested the destination of X already for death.)
2119 If this insn doesn't just copy a register, then we don't
2120 have an ordinary libcall. In that case, cse could not have
2121 managed to substitute the source for the dest later on,
2122 so we can assume the libcall is dead.
2124 NEEDED is the bit vector of pseudoregs live before this insn.
2125 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
2127 static int
2128 libcall_dead_p (x, needed, note, insn)
2129 rtx x;
2130 regset needed;
2131 rtx note;
2132 rtx insn;
2134 register RTX_CODE code = GET_CODE (x);
2136 if (code == SET)
2138 register rtx r = SET_SRC (x);
2139 if (GET_CODE (r) == REG)
2141 rtx call = XEXP (note, 0);
2142 register int i;
2144 /* Find the call insn. */
2145 while (call != insn && GET_CODE (call) != CALL_INSN)
2146 call = NEXT_INSN (call);
2148 /* If there is none, do nothing special,
2149 since ordinary death handling can understand these insns. */
2150 if (call == insn)
2151 return 0;
2153 /* See if the hard reg holding the value is dead.
2154 If this is a PARALLEL, find the call within it. */
2155 call = PATTERN (call);
2156 if (GET_CODE (call) == PARALLEL)
2158 for (i = XVECLEN (call, 0) - 1; i >= 0; i--)
2159 if (GET_CODE (XVECEXP (call, 0, i)) == SET
2160 && GET_CODE (SET_SRC (XVECEXP (call, 0, i))) == CALL)
2161 break;
2163 /* This may be a library call that is returning a value
2164 via invisible pointer. Do nothing special, since
2165 ordinary death handling can understand these insns. */
2166 if (i < 0)
2167 return 0;
2169 call = XVECEXP (call, 0, i);
2172 return insn_dead_p (call, needed, 1);
2175 return 1;
2178 /* Return 1 if register REGNO was used before it was set, i.e. if it is
2179 live at function entry. Don't count global register variables, variables
2180 in registers that can be used for function arg passing, or variables in
2181 fixed hard registers. */
2184 regno_uninitialized (regno)
2185 int regno;
2187 if (n_basic_blocks == 0
2188 || (regno < FIRST_PSEUDO_REGISTER
2189 && (global_regs[regno]
2190 || fixed_regs[regno]
2191 || FUNCTION_ARG_REGNO_P (regno))))
2192 return 0;
2194 return REGNO_REG_SET_P (basic_block_live_at_start[0], regno);
2197 /* 1 if register REGNO was alive at a place where `setjmp' was called
2198 and was set more than once or is an argument.
2199 Such regs may be clobbered by `longjmp'. */
2202 regno_clobbered_at_setjmp (regno)
2203 int regno;
2205 if (n_basic_blocks == 0)
2206 return 0;
2208 return ((REG_N_SETS (regno) > 1
2209 || REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2210 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2213 /* Process the registers that are set within X.
2214 Their bits are set to 1 in the regset DEAD,
2215 because they are dead prior to this insn.
2217 If INSN is nonzero, it is the insn being processed
2218 and the fact that it is nonzero implies this is the FINAL pass
2219 in propagate_block. In this case, various info about register
2220 usage is stored, LOG_LINKS fields of insns are set up. */
2222 static void
2223 mark_set_regs (needed, dead, x, insn, significant)
2224 regset needed;
2225 regset dead;
2226 rtx x;
2227 rtx insn;
2228 regset significant;
2230 register RTX_CODE code = GET_CODE (x);
2232 if (code == SET || code == CLOBBER)
2233 mark_set_1 (needed, dead, x, insn, significant);
2234 else if (code == PARALLEL)
2236 register int i;
2237 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2239 code = GET_CODE (XVECEXP (x, 0, i));
2240 if (code == SET || code == CLOBBER)
2241 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
2246 /* Process a single SET rtx, X. */
2248 static void
2249 mark_set_1 (needed, dead, x, insn, significant)
2250 regset needed;
2251 regset dead;
2252 rtx x;
2253 rtx insn;
2254 regset significant;
2256 register int regno;
2257 register rtx reg = SET_DEST (x);
2259 /* Some targets place small structures in registers for
2260 return values of functions. We have to detect this
2261 case specially here to get correct flow information. */
2262 if (GET_CODE (reg) == PARALLEL
2263 && GET_MODE (reg) == BLKmode)
2265 register int i;
2267 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2268 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant);
2269 return;
2272 /* Modifying just one hardware register of a multi-reg value
2273 or just a byte field of a register
2274 does not mean the value from before this insn is now dead.
2275 But it does mean liveness of that register at the end of the block
2276 is significant.
2278 Within mark_set_1, however, we treat it as if the register is
2279 indeed modified. mark_used_regs will, however, also treat this
2280 register as being used. Thus, we treat these insns as setting a
2281 new value for the register as a function of its old value. This
2282 cases LOG_LINKS to be made appropriately and this will help combine. */
2284 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
2285 || GET_CODE (reg) == SIGN_EXTRACT
2286 || GET_CODE (reg) == STRICT_LOW_PART)
2287 reg = XEXP (reg, 0);
2289 /* If we are writing into memory or into a register mentioned in the
2290 address of the last thing stored into memory, show we don't know
2291 what the last store was. If we are writing memory, save the address
2292 unless it is volatile. */
2293 if (GET_CODE (reg) == MEM
2294 || (GET_CODE (reg) == REG
2295 && last_mem_set != 0 && reg_overlap_mentioned_p (reg, last_mem_set)))
2296 last_mem_set = 0;
2298 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2299 /* There are no REG_INC notes for SP, so we can't assume we'll see
2300 everything that invalidates it. To be safe, don't eliminate any
2301 stores though SP; none of them should be redundant anyway. */
2302 && ! reg_mentioned_p (stack_pointer_rtx, reg))
2303 last_mem_set = reg;
2305 if (GET_CODE (reg) == REG
2306 && (regno = REGNO (reg), regno != FRAME_POINTER_REGNUM)
2307 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2308 && regno != HARD_FRAME_POINTER_REGNUM
2309 #endif
2310 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2311 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2312 #endif
2313 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
2314 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
2316 int some_needed = REGNO_REG_SET_P (needed, regno);
2317 int some_not_needed = ! some_needed;
2319 /* Mark it as a significant register for this basic block. */
2320 if (significant)
2321 SET_REGNO_REG_SET (significant, regno);
2323 /* Mark it as dead before this insn. */
2324 SET_REGNO_REG_SET (dead, regno);
2326 /* A hard reg in a wide mode may really be multiple registers.
2327 If so, mark all of them just like the first. */
2328 if (regno < FIRST_PSEUDO_REGISTER)
2330 int n;
2332 /* Nothing below is needed for the stack pointer; get out asap.
2333 Eg, log links aren't needed, since combine won't use them. */
2334 if (regno == STACK_POINTER_REGNUM)
2335 return;
2337 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2338 while (--n > 0)
2340 int regno_n = regno + n;
2341 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2342 if (significant)
2343 SET_REGNO_REG_SET (significant, regno_n);
2345 SET_REGNO_REG_SET (dead, regno_n);
2346 some_needed |= needed_regno;
2347 some_not_needed |= ! needed_regno;
2350 /* Additional data to record if this is the final pass. */
2351 if (insn)
2353 register rtx y = reg_next_use[regno];
2354 register int blocknum = BLOCK_NUM (insn);
2356 /* If this is a hard reg, record this function uses the reg. */
2358 if (regno < FIRST_PSEUDO_REGISTER)
2360 register int i;
2361 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
2363 for (i = regno; i < endregno; i++)
2365 /* The next use is no longer "next", since a store
2366 intervenes. */
2367 reg_next_use[i] = 0;
2369 regs_ever_live[i] = 1;
2370 REG_N_SETS (i)++;
2373 else
2375 /* The next use is no longer "next", since a store
2376 intervenes. */
2377 reg_next_use[regno] = 0;
2379 /* Keep track of which basic blocks each reg appears in. */
2381 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2382 REG_BASIC_BLOCK (regno) = blocknum;
2383 else if (REG_BASIC_BLOCK (regno) != blocknum)
2384 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2386 /* Count (weighted) references, stores, etc. This counts a
2387 register twice if it is modified, but that is correct. */
2388 REG_N_SETS (regno)++;
2390 REG_N_REFS (regno) += loop_depth;
2392 /* The insns where a reg is live are normally counted
2393 elsewhere, but we want the count to include the insn
2394 where the reg is set, and the normal counting mechanism
2395 would not count it. */
2396 REG_LIVE_LENGTH (regno)++;
2399 if (! some_not_needed)
2401 /* Make a logical link from the next following insn
2402 that uses this register, back to this insn.
2403 The following insns have already been processed.
2405 We don't build a LOG_LINK for hard registers containing
2406 in ASM_OPERANDs. If these registers get replaced,
2407 we might wind up changing the semantics of the insn,
2408 even if reload can make what appear to be valid assignments
2409 later. */
2410 if (y && (BLOCK_NUM (y) == blocknum)
2411 && (regno >= FIRST_PSEUDO_REGISTER
2412 || asm_noperands (PATTERN (y)) < 0))
2413 LOG_LINKS (y)
2414 = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
2416 else if (! some_needed)
2418 /* Note that dead stores have already been deleted when possible
2419 If we get here, we have found a dead store that cannot
2420 be eliminated (because the same insn does something useful).
2421 Indicate this by marking the reg being set as dying here. */
2422 REG_NOTES (insn)
2423 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2424 REG_N_DEATHS (REGNO (reg))++;
2426 else
2428 /* This is a case where we have a multi-word hard register
2429 and some, but not all, of the words of the register are
2430 needed in subsequent insns. Write REG_UNUSED notes
2431 for those parts that were not needed. This case should
2432 be rare. */
2434 int i;
2436 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
2437 i >= 0; i--)
2438 if (!REGNO_REG_SET_P (needed, regno + i))
2439 REG_NOTES (insn)
2440 = gen_rtx_EXPR_LIST (REG_UNUSED,
2441 gen_rtx_REG (reg_raw_mode[regno + i],
2442 regno + i),
2443 REG_NOTES (insn));
2447 else if (GET_CODE (reg) == REG)
2448 reg_next_use[regno] = 0;
2450 /* If this is the last pass and this is a SCRATCH, show it will be dying
2451 here and count it. */
2452 else if (GET_CODE (reg) == SCRATCH && insn != 0)
2454 REG_NOTES (insn)
2455 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2456 num_scratch++;
2460 #ifdef AUTO_INC_DEC
2462 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
2463 reference. */
2465 static void
2466 find_auto_inc (needed, x, insn)
2467 regset needed;
2468 rtx x;
2469 rtx insn;
2471 rtx addr = XEXP (x, 0);
2472 HOST_WIDE_INT offset = 0;
2473 rtx set;
2475 /* Here we detect use of an index register which might be good for
2476 postincrement, postdecrement, preincrement, or predecrement. */
2478 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2479 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
2481 if (GET_CODE (addr) == REG)
2483 register rtx y;
2484 register int size = GET_MODE_SIZE (GET_MODE (x));
2485 rtx use;
2486 rtx incr;
2487 int regno = REGNO (addr);
2489 /* Is the next use an increment that might make auto-increment? */
2490 if ((incr = reg_next_use[regno]) != 0
2491 && (set = single_set (incr)) != 0
2492 && GET_CODE (set) == SET
2493 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
2494 /* Can't add side effects to jumps; if reg is spilled and
2495 reloaded, there's no way to store back the altered value. */
2496 && GET_CODE (insn) != JUMP_INSN
2497 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
2498 && XEXP (y, 0) == addr
2499 && GET_CODE (XEXP (y, 1)) == CONST_INT
2500 && (0
2501 #ifdef HAVE_POST_INCREMENT
2502 || (INTVAL (XEXP (y, 1)) == size && offset == 0)
2503 #endif
2504 #ifdef HAVE_POST_DECREMENT
2505 || (INTVAL (XEXP (y, 1)) == - size && offset == 0)
2506 #endif
2507 #ifdef HAVE_PRE_INCREMENT
2508 || (INTVAL (XEXP (y, 1)) == size && offset == size)
2509 #endif
2510 #ifdef HAVE_PRE_DECREMENT
2511 || (INTVAL (XEXP (y, 1)) == - size && offset == - size)
2512 #endif
2514 /* Make sure this reg appears only once in this insn. */
2515 && (use = find_use_as_address (PATTERN (insn), addr, offset),
2516 use != 0 && use != (rtx) 1))
2518 rtx q = SET_DEST (set);
2519 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
2520 ? (offset ? PRE_INC : POST_INC)
2521 : (offset ? PRE_DEC : POST_DEC));
2523 if (dead_or_set_p (incr, addr))
2525 /* This is the simple case. Try to make the auto-inc. If
2526 we can't, we are done. Otherwise, we will do any
2527 needed updates below. */
2528 if (! validate_change (insn, &XEXP (x, 0),
2529 gen_rtx_fmt_e (inc_code, Pmode, addr),
2531 return;
2533 else if (GET_CODE (q) == REG
2534 /* PREV_INSN used here to check the semi-open interval
2535 [insn,incr). */
2536 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
2537 /* We must also check for sets of q as q may be
2538 a call clobbered hard register and there may
2539 be a call between PREV_INSN (insn) and incr. */
2540 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
2542 /* We have *p followed sometime later by q = p+size.
2543 Both p and q must be live afterward,
2544 and q is not used between INSN and its assignment.
2545 Change it to q = p, ...*q..., q = q+size.
2546 Then fall into the usual case. */
2547 rtx insns, temp;
2549 start_sequence ();
2550 emit_move_insn (q, addr);
2551 insns = get_insns ();
2552 end_sequence ();
2554 /* If anything in INSNS have UID's that don't fit within the
2555 extra space we allocate earlier, we can't make this auto-inc.
2556 This should never happen. */
2557 for (temp = insns; temp; temp = NEXT_INSN (temp))
2559 if (INSN_UID (temp) > max_uid_for_flow)
2560 return;
2561 BLOCK_NUM (temp) = BLOCK_NUM (insn);
2564 /* If we can't make the auto-inc, or can't make the
2565 replacement into Y, exit. There's no point in making
2566 the change below if we can't do the auto-inc and doing
2567 so is not correct in the pre-inc case. */
2569 validate_change (insn, &XEXP (x, 0),
2570 gen_rtx_fmt_e (inc_code, Pmode, q),
2572 validate_change (incr, &XEXP (y, 0), q, 1);
2573 if (! apply_change_group ())
2574 return;
2576 /* We now know we'll be doing this change, so emit the
2577 new insn(s) and do the updates. */
2578 emit_insns_before (insns, insn);
2580 if (basic_block_head[BLOCK_NUM (insn)] == insn)
2581 basic_block_head[BLOCK_NUM (insn)] = insns;
2583 /* INCR will become a NOTE and INSN won't contain a
2584 use of ADDR. If a use of ADDR was just placed in
2585 the insn before INSN, make that the next use.
2586 Otherwise, invalidate it. */
2587 if (GET_CODE (PREV_INSN (insn)) == INSN
2588 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
2589 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
2590 reg_next_use[regno] = PREV_INSN (insn);
2591 else
2592 reg_next_use[regno] = 0;
2594 addr = q;
2595 regno = REGNO (q);
2597 /* REGNO is now used in INCR which is below INSN, but
2598 it previously wasn't live here. If we don't mark
2599 it as needed, we'll put a REG_DEAD note for it
2600 on this insn, which is incorrect. */
2601 SET_REGNO_REG_SET (needed, regno);
2603 /* If there are any calls between INSN and INCR, show
2604 that REGNO now crosses them. */
2605 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
2606 if (GET_CODE (temp) == CALL_INSN)
2607 REG_N_CALLS_CROSSED (regno)++;
2609 else
2610 return;
2612 /* If we haven't returned, it means we were able to make the
2613 auto-inc, so update the status. First, record that this insn
2614 has an implicit side effect. */
2616 REG_NOTES (insn)
2617 = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
2619 /* Modify the old increment-insn to simply copy
2620 the already-incremented value of our register. */
2621 if (! validate_change (incr, &SET_SRC (set), addr, 0))
2622 abort ();
2624 /* If that makes it a no-op (copying the register into itself) delete
2625 it so it won't appear to be a "use" and a "set" of this
2626 register. */
2627 if (SET_DEST (set) == addr)
2629 PUT_CODE (incr, NOTE);
2630 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
2631 NOTE_SOURCE_FILE (incr) = 0;
2634 if (regno >= FIRST_PSEUDO_REGISTER)
2636 /* Count an extra reference to the reg. When a reg is
2637 incremented, spilling it is worse, so we want to make
2638 that less likely. */
2639 REG_N_REFS (regno) += loop_depth;
2641 /* Count the increment as a setting of the register,
2642 even though it isn't a SET in rtl. */
2643 REG_N_SETS (regno)++;
2648 #endif /* AUTO_INC_DEC */
2650 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
2651 This is done assuming the registers needed from X
2652 are those that have 1-bits in NEEDED.
2654 On the final pass, FINAL is 1. This means try for autoincrement
2655 and count the uses and deaths of each pseudo-reg.
2657 INSN is the containing instruction. If INSN is dead, this function is not
2658 called. */
2660 static void
2661 mark_used_regs (needed, live, x, final, insn)
2662 regset needed;
2663 regset live;
2664 rtx x;
2665 int final;
2666 rtx insn;
2668 register RTX_CODE code;
2669 register int regno;
2670 int i;
2672 retry:
2673 code = GET_CODE (x);
2674 switch (code)
2676 case LABEL_REF:
2677 case SYMBOL_REF:
2678 case CONST_INT:
2679 case CONST:
2680 case CONST_DOUBLE:
2681 case PC:
2682 case ADDR_VEC:
2683 case ADDR_DIFF_VEC:
2684 case ASM_INPUT:
2685 return;
2687 #ifdef HAVE_cc0
2688 case CC0:
2689 cc0_live = 1;
2690 return;
2691 #endif
2693 case CLOBBER:
2694 /* If we are clobbering a MEM, mark any registers inside the address
2695 as being used. */
2696 if (GET_CODE (XEXP (x, 0)) == MEM)
2697 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
2698 return;
2700 case MEM:
2701 /* Invalidate the data for the last MEM stored, but only if MEM is
2702 something that can be stored into. */
2703 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2704 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
2705 ; /* needn't clear last_mem_set */
2706 else
2707 last_mem_set = 0;
2709 #ifdef AUTO_INC_DEC
2710 if (final)
2711 find_auto_inc (needed, x, insn);
2712 #endif
2713 break;
2715 case SUBREG:
2716 if (GET_CODE (SUBREG_REG (x)) == REG
2717 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
2718 && (GET_MODE_SIZE (GET_MODE (x))
2719 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2720 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
2722 /* While we're here, optimize this case. */
2723 x = SUBREG_REG (x);
2725 /* In case the SUBREG is not of a register, don't optimize */
2726 if (GET_CODE (x) != REG)
2728 mark_used_regs (needed, live, x, final, insn);
2729 return;
2732 /* ... fall through ... */
2734 case REG:
2735 /* See a register other than being set
2736 => mark it as needed. */
2738 regno = REGNO (x);
2740 int some_needed = REGNO_REG_SET_P (needed, regno);
2741 int some_not_needed = ! some_needed;
2743 SET_REGNO_REG_SET (live, regno);
2745 /* A hard reg in a wide mode may really be multiple registers.
2746 If so, mark all of them just like the first. */
2747 if (regno < FIRST_PSEUDO_REGISTER)
2749 int n;
2751 /* For stack ptr or fixed arg pointer,
2752 nothing below can be necessary, so waste no more time. */
2753 if (regno == STACK_POINTER_REGNUM
2754 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2755 || regno == HARD_FRAME_POINTER_REGNUM
2756 #endif
2757 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2758 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2759 #endif
2760 || regno == FRAME_POINTER_REGNUM)
2762 /* If this is a register we are going to try to eliminate,
2763 don't mark it live here. If we are successful in
2764 eliminating it, it need not be live unless it is used for
2765 pseudos, in which case it will have been set live when
2766 it was allocated to the pseudos. If the register will not
2767 be eliminated, reload will set it live at that point. */
2769 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
2770 regs_ever_live[regno] = 1;
2771 return;
2773 /* No death notes for global register variables;
2774 their values are live after this function exits. */
2775 if (global_regs[regno])
2777 if (final)
2778 reg_next_use[regno] = insn;
2779 return;
2782 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2783 while (--n > 0)
2785 int regno_n = regno + n;
2786 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2788 SET_REGNO_REG_SET (live, regno_n);
2789 some_needed |= needed_regno;
2790 some_not_needed |= ! needed_regno;
2793 if (final)
2795 /* Record where each reg is used, so when the reg
2796 is set we know the next insn that uses it. */
2798 reg_next_use[regno] = insn;
2800 if (regno < FIRST_PSEUDO_REGISTER)
2802 /* If a hard reg is being used,
2803 record that this function does use it. */
2805 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
2806 if (i == 0)
2807 i = 1;
2809 regs_ever_live[regno + --i] = 1;
2810 while (i > 0);
2812 else
2814 /* Keep track of which basic block each reg appears in. */
2816 register int blocknum = BLOCK_NUM (insn);
2818 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2819 REG_BASIC_BLOCK (regno) = blocknum;
2820 else if (REG_BASIC_BLOCK (regno) != blocknum)
2821 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2823 /* Count (weighted) number of uses of each reg. */
2825 REG_N_REFS (regno) += loop_depth;
2828 /* Record and count the insns in which a reg dies.
2829 If it is used in this insn and was dead below the insn
2830 then it dies in this insn. If it was set in this insn,
2831 we do not make a REG_DEAD note; likewise if we already
2832 made such a note. */
2834 if (some_not_needed
2835 && ! dead_or_set_p (insn, x)
2836 #if 0
2837 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
2838 #endif
2841 /* Check for the case where the register dying partially
2842 overlaps the register set by this insn. */
2843 if (regno < FIRST_PSEUDO_REGISTER
2844 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
2846 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2847 while (--n >= 0)
2848 some_needed |= dead_or_set_regno_p (insn, regno + n);
2851 /* If none of the words in X is needed, make a REG_DEAD
2852 note. Otherwise, we must make partial REG_DEAD notes. */
2853 if (! some_needed)
2855 REG_NOTES (insn)
2856 = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
2857 REG_N_DEATHS (regno)++;
2859 else
2861 int i;
2863 /* Don't make a REG_DEAD note for a part of a register
2864 that is set in the insn. */
2866 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
2867 i >= 0; i--)
2868 if (!REGNO_REG_SET_P (needed, regno + i)
2869 && ! dead_or_set_regno_p (insn, regno + i))
2870 REG_NOTES (insn)
2871 = gen_rtx_EXPR_LIST (REG_DEAD,
2872 gen_rtx_REG (reg_raw_mode[regno + i],
2873 regno + i),
2874 REG_NOTES (insn));
2879 return;
2881 case SET:
2883 register rtx testreg = SET_DEST (x);
2884 int mark_dest = 0;
2886 /* If storing into MEM, don't show it as being used. But do
2887 show the address as being used. */
2888 if (GET_CODE (testreg) == MEM)
2890 #ifdef AUTO_INC_DEC
2891 if (final)
2892 find_auto_inc (needed, testreg, insn);
2893 #endif
2894 mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
2895 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2896 return;
2899 /* Storing in STRICT_LOW_PART is like storing in a reg
2900 in that this SET might be dead, so ignore it in TESTREG.
2901 but in some other ways it is like using the reg.
2903 Storing in a SUBREG or a bit field is like storing the entire
2904 register in that if the register's value is not used
2905 then this SET is not needed. */
2906 while (GET_CODE (testreg) == STRICT_LOW_PART
2907 || GET_CODE (testreg) == ZERO_EXTRACT
2908 || GET_CODE (testreg) == SIGN_EXTRACT
2909 || GET_CODE (testreg) == SUBREG)
2911 if (GET_CODE (testreg) == SUBREG
2912 && GET_CODE (SUBREG_REG (testreg)) == REG
2913 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
2914 && (GET_MODE_SIZE (GET_MODE (testreg))
2915 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
2916 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
2918 /* Modifying a single register in an alternate mode
2919 does not use any of the old value. But these other
2920 ways of storing in a register do use the old value. */
2921 if (GET_CODE (testreg) == SUBREG
2922 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
2924 else
2925 mark_dest = 1;
2927 testreg = XEXP (testreg, 0);
2930 /* If this is a store into a register,
2931 recursively scan the value being stored. */
2933 if ((GET_CODE (testreg) == PARALLEL
2934 && GET_MODE (testreg) == BLKmode)
2935 || (GET_CODE (testreg) == REG
2936 && (regno = REGNO (testreg), regno != FRAME_POINTER_REGNUM)
2937 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2938 && regno != HARD_FRAME_POINTER_REGNUM
2939 #endif
2940 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2941 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2942 #endif
2944 /* We used to exclude global_regs here, but that seems wrong.
2945 Storing in them is like storing in mem. */
2947 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2948 if (mark_dest)
2949 mark_used_regs (needed, live, SET_DEST (x), final, insn);
2950 return;
2953 break;
2955 case RETURN:
2956 /* If exiting needs the right stack value, consider this insn as
2957 using the stack pointer. In any event, consider it as using
2958 all global registers and all registers used by return. */
2960 #ifdef EXIT_IGNORE_STACK
2961 if (! EXIT_IGNORE_STACK
2962 || (! FRAME_POINTER_REQUIRED
2963 && ! current_function_calls_alloca
2964 && flag_omit_frame_pointer))
2965 #endif
2966 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2968 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2969 if (global_regs[i]
2970 #ifdef EPILOGUE_USES
2971 || EPILOGUE_USES (i)
2972 #endif
2974 SET_REGNO_REG_SET (live, i);
2975 break;
2977 default:
2978 break;
2981 /* Recursively scan the operands of this expression. */
2984 register char *fmt = GET_RTX_FORMAT (code);
2985 register int i;
2987 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2989 if (fmt[i] == 'e')
2991 /* Tail recursive case: save a function call level. */
2992 if (i == 0)
2994 x = XEXP (x, 0);
2995 goto retry;
2997 mark_used_regs (needed, live, XEXP (x, i), final, insn);
2999 else if (fmt[i] == 'E')
3001 register int j;
3002 for (j = 0; j < XVECLEN (x, i); j++)
3003 mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
3009 #ifdef AUTO_INC_DEC
3011 static int
3012 try_pre_increment_1 (insn)
3013 rtx insn;
3015 /* Find the next use of this reg. If in same basic block,
3016 make it do pre-increment or pre-decrement if appropriate. */
3017 rtx x = single_set (insn);
3018 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
3019 * INTVAL (XEXP (SET_SRC (x), 1)));
3020 int regno = REGNO (SET_DEST (x));
3021 rtx y = reg_next_use[regno];
3022 if (y != 0
3023 && BLOCK_NUM (y) == BLOCK_NUM (insn)
3024 /* Don't do this if the reg dies, or gets set in y; a standard addressing
3025 mode would be better. */
3026 && ! dead_or_set_p (y, SET_DEST (x))
3027 && try_pre_increment (y, SET_DEST (x), amount))
3029 /* We have found a suitable auto-increment
3030 and already changed insn Y to do it.
3031 So flush this increment-instruction. */
3032 PUT_CODE (insn, NOTE);
3033 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3034 NOTE_SOURCE_FILE (insn) = 0;
3035 /* Count a reference to this reg for the increment
3036 insn we are deleting. When a reg is incremented.
3037 spilling it is worse, so we want to make that
3038 less likely. */
3039 if (regno >= FIRST_PSEUDO_REGISTER)
3041 REG_N_REFS (regno) += loop_depth;
3042 REG_N_SETS (regno)++;
3044 return 1;
3046 return 0;
3049 /* Try to change INSN so that it does pre-increment or pre-decrement
3050 addressing on register REG in order to add AMOUNT to REG.
3051 AMOUNT is negative for pre-decrement.
3052 Returns 1 if the change could be made.
3053 This checks all about the validity of the result of modifying INSN. */
3055 static int
3056 try_pre_increment (insn, reg, amount)
3057 rtx insn, reg;
3058 HOST_WIDE_INT amount;
3060 register rtx use;
3062 /* Nonzero if we can try to make a pre-increment or pre-decrement.
3063 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
3064 int pre_ok = 0;
3065 /* Nonzero if we can try to make a post-increment or post-decrement.
3066 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
3067 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
3068 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
3069 int post_ok = 0;
3071 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
3072 int do_post = 0;
3074 /* From the sign of increment, see which possibilities are conceivable
3075 on this target machine. */
3076 #ifdef HAVE_PRE_INCREMENT
3077 if (amount > 0)
3078 pre_ok = 1;
3079 #endif
3080 #ifdef HAVE_POST_INCREMENT
3081 if (amount > 0)
3082 post_ok = 1;
3083 #endif
3085 #ifdef HAVE_PRE_DECREMENT
3086 if (amount < 0)
3087 pre_ok = 1;
3088 #endif
3089 #ifdef HAVE_POST_DECREMENT
3090 if (amount < 0)
3091 post_ok = 1;
3092 #endif
3094 if (! (pre_ok || post_ok))
3095 return 0;
3097 /* It is not safe to add a side effect to a jump insn
3098 because if the incremented register is spilled and must be reloaded
3099 there would be no way to store the incremented value back in memory. */
3101 if (GET_CODE (insn) == JUMP_INSN)
3102 return 0;
3104 use = 0;
3105 if (pre_ok)
3106 use = find_use_as_address (PATTERN (insn), reg, 0);
3107 if (post_ok && (use == 0 || use == (rtx) 1))
3109 use = find_use_as_address (PATTERN (insn), reg, -amount);
3110 do_post = 1;
3113 if (use == 0 || use == (rtx) 1)
3114 return 0;
3116 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
3117 return 0;
3119 /* See if this combination of instruction and addressing mode exists. */
3120 if (! validate_change (insn, &XEXP (use, 0),
3121 gen_rtx_fmt_e (amount > 0
3122 ? (do_post ? POST_INC : PRE_INC)
3123 : (do_post ? POST_DEC : PRE_DEC),
3124 Pmode, reg), 0))
3125 return 0;
3127 /* Record that this insn now has an implicit side effect on X. */
3128 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
3129 return 1;
3132 #endif /* AUTO_INC_DEC */
3134 /* Find the place in the rtx X where REG is used as a memory address.
3135 Return the MEM rtx that so uses it.
3136 If PLUSCONST is nonzero, search instead for a memory address equivalent to
3137 (plus REG (const_int PLUSCONST)).
3139 If such an address does not appear, return 0.
3140 If REG appears more than once, or is used other than in such an address,
3141 return (rtx)1. */
3144 find_use_as_address (x, reg, plusconst)
3145 register rtx x;
3146 rtx reg;
3147 HOST_WIDE_INT plusconst;
3149 enum rtx_code code = GET_CODE (x);
3150 char *fmt = GET_RTX_FORMAT (code);
3151 register int i;
3152 register rtx value = 0;
3153 register rtx tem;
3155 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
3156 return x;
3158 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
3159 && XEXP (XEXP (x, 0), 0) == reg
3160 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3161 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
3162 return x;
3164 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
3166 /* If REG occurs inside a MEM used in a bit-field reference,
3167 that is unacceptable. */
3168 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
3169 return (rtx) (HOST_WIDE_INT) 1;
3172 if (x == reg)
3173 return (rtx) (HOST_WIDE_INT) 1;
3175 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3177 if (fmt[i] == 'e')
3179 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
3180 if (value == 0)
3181 value = tem;
3182 else if (tem != 0)
3183 return (rtx) (HOST_WIDE_INT) 1;
3185 if (fmt[i] == 'E')
3187 register int j;
3188 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3190 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
3191 if (value == 0)
3192 value = tem;
3193 else if (tem != 0)
3194 return (rtx) (HOST_WIDE_INT) 1;
3199 return value;
3202 /* Write information about registers and basic blocks into FILE.
3203 This is part of making a debugging dump. */
3205 void
3206 dump_flow_info (file)
3207 FILE *file;
3209 register int i;
3210 static char *reg_class_names[] = REG_CLASS_NAMES;
3212 fprintf (file, "%d registers.\n", max_regno);
3214 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3215 if (REG_N_REFS (i))
3217 enum reg_class class, altclass;
3218 fprintf (file, "\nRegister %d used %d times across %d insns",
3219 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
3220 if (REG_BASIC_BLOCK (i) >= 0)
3221 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
3222 if (REG_N_SETS (i))
3223 fprintf (file, "; set %d time%s", REG_N_SETS (i),
3224 (REG_N_SETS (i) == 1) ? "" : "s");
3225 if (REG_USERVAR_P (regno_reg_rtx[i]))
3226 fprintf (file, "; user var");
3227 if (REG_N_DEATHS (i) != 1)
3228 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
3229 if (REG_N_CALLS_CROSSED (i) == 1)
3230 fprintf (file, "; crosses 1 call");
3231 else if (REG_N_CALLS_CROSSED (i))
3232 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
3233 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
3234 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
3235 class = reg_preferred_class (i);
3236 altclass = reg_alternate_class (i);
3237 if (class != GENERAL_REGS || altclass != ALL_REGS)
3239 if (altclass == ALL_REGS || class == ALL_REGS)
3240 fprintf (file, "; pref %s", reg_class_names[(int) class]);
3241 else if (altclass == NO_REGS)
3242 fprintf (file, "; %s or none", reg_class_names[(int) class]);
3243 else
3244 fprintf (file, "; pref %s, else %s",
3245 reg_class_names[(int) class],
3246 reg_class_names[(int) altclass]);
3248 if (REGNO_POINTER_FLAG (i))
3249 fprintf (file, "; pointer");
3250 fprintf (file, ".\n");
3252 fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
3253 for (i = 0; i < n_basic_blocks; i++)
3255 register rtx head, jump;
3256 register int regno;
3257 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
3259 INSN_UID (basic_block_head[i]),
3260 INSN_UID (basic_block_end[i]));
3261 /* The control flow graph's storage is freed
3262 now when flow_analysis returns.
3263 Don't try to print it if it is gone. */
3264 if (basic_block_drops_in)
3266 fprintf (file, "Reached from blocks: ");
3267 head = basic_block_head[i];
3268 if (GET_CODE (head) == CODE_LABEL)
3269 for (jump = LABEL_REFS (head);
3270 jump != head;
3271 jump = LABEL_NEXTREF (jump))
3273 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
3274 fprintf (file, " %d", from_block);
3276 if (basic_block_drops_in[i])
3277 fprintf (file, " previous");
3279 fprintf (file, "\nRegisters live at start:");
3280 for (regno = 0; regno < max_regno; regno++)
3281 if (REGNO_REG_SET_P (basic_block_live_at_start[i], regno))
3282 fprintf (file, " %d", regno);
3283 fprintf (file, "\n");
3285 fprintf (file, "\n");
3289 /* Like print_rtl, but also print out live information for the start of each
3290 basic block. */
3292 void
3293 print_rtl_with_bb (outf, rtx_first)
3294 FILE *outf;
3295 rtx rtx_first;
3297 register rtx tmp_rtx;
3299 if (rtx_first == 0)
3300 fprintf (outf, "(nil)\n");
3302 else
3304 int i, bb;
3305 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
3306 int max_uid = get_max_uid ();
3307 int *start = (int *) alloca (max_uid * sizeof (int));
3308 int *end = (int *) alloca (max_uid * sizeof (int));
3309 enum bb_state *in_bb_p = (enum bb_state *)
3310 alloca (max_uid * sizeof (enum bb_state));
3312 for (i = 0; i < max_uid; i++)
3314 start[i] = end[i] = -1;
3315 in_bb_p[i] = NOT_IN_BB;
3318 for (i = n_basic_blocks-1; i >= 0; i--)
3320 rtx x;
3321 start[INSN_UID (basic_block_head[i])] = i;
3322 end[INSN_UID (basic_block_end[i])] = i;
3323 for (x = basic_block_head[i]; x != NULL_RTX; x = NEXT_INSN (x))
3325 in_bb_p[ INSN_UID(x)]
3326 = (in_bb_p[ INSN_UID(x)] == NOT_IN_BB)
3327 ? IN_ONE_BB : IN_MULTIPLE_BB;
3328 if (x == basic_block_end[i])
3329 break;
3333 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
3335 int did_output;
3337 if ((bb = start[INSN_UID (tmp_rtx)]) >= 0)
3339 fprintf (outf, ";; Start of basic block %d, registers live:",
3340 bb);
3342 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[bb], 0, i,
3344 fprintf (outf, " %d", i);
3345 if (i < FIRST_PSEUDO_REGISTER)
3346 fprintf (outf, " [%s]",
3347 reg_names[i]);
3349 putc ('\n', outf);
3352 if (in_bb_p[ INSN_UID(tmp_rtx)] == NOT_IN_BB
3353 && GET_CODE (tmp_rtx) != NOTE
3354 && GET_CODE (tmp_rtx) != BARRIER)
3355 fprintf (outf, ";; Insn is not within a basic block\n");
3356 else if (in_bb_p[ INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
3357 fprintf (outf, ";; Insn is in multiple basic blocks\n");
3359 did_output = print_rtl_single (outf, tmp_rtx);
3361 if ((bb = end[INSN_UID (tmp_rtx)]) >= 0)
3362 fprintf (outf, ";; End of basic block %d\n", bb);
3364 if (did_output)
3365 putc ('\n', outf);
3371 /* Integer list support. */
3373 /* Allocate a node from list *HEAD_PTR. */
3375 static int_list_ptr
3376 alloc_int_list_node (head_ptr)
3377 int_list_block **head_ptr;
3379 struct int_list_block *first_blk = *head_ptr;
3381 if (first_blk == NULL || first_blk->nodes_left <= 0)
3383 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
3384 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
3385 first_blk->next = *head_ptr;
3386 *head_ptr = first_blk;
3389 first_blk->nodes_left--;
3390 return &first_blk->nodes[first_blk->nodes_left];
3393 /* Pointer to head of predecessor/successor block list. */
3394 static int_list_block *pred_int_list_blocks;
3396 /* Add a new node to integer list LIST with value VAL.
3397 LIST is a pointer to a list object to allow for different implementations.
3398 If *LIST is initially NULL, the list is empty.
3399 The caller must not care whether the element is added to the front or
3400 to the end of the list (to allow for different implementations). */
3402 static int_list_ptr
3403 add_int_list_node (blk_list, list, val)
3404 int_list_block **blk_list;
3405 int_list **list;
3406 int val;
3408 int_list_ptr p = alloc_int_list_node (blk_list);
3410 p->val = val;
3411 p->next = *list;
3412 *list = p;
3413 return p;
3416 /* Free the blocks of lists at BLK_LIST. */
3418 void
3419 free_int_list (blk_list)
3420 int_list_block **blk_list;
3422 int_list_block *p, *next;
3424 for (p = *blk_list; p != NULL; p = next)
3426 next = p->next;
3427 free (p);
3430 /* Mark list as empty for the next function we compile. */
3431 *blk_list = NULL;
3434 /* Predecessor/successor computation. */
3436 /* Mark PRED_BB a precessor of SUCC_BB,
3437 and conversely SUCC_BB a successor of PRED_BB. */
3439 static void
3440 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
3441 int pred_bb;
3442 int succ_bb;
3443 int_list_ptr *s_preds;
3444 int_list_ptr *s_succs;
3445 int *num_preds;
3446 int *num_succs;
3448 if (succ_bb != EXIT_BLOCK)
3450 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
3451 num_preds[succ_bb]++;
3453 if (pred_bb != ENTRY_BLOCK)
3455 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
3456 num_succs[pred_bb]++;
3460 /* Compute the predecessors and successors for each block. */
3461 void
3462 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
3463 int_list_ptr *s_preds;
3464 int_list_ptr *s_succs;
3465 int *num_preds;
3466 int *num_succs;
3468 int bb, clear_local_bb_vars = 0;
3470 bzero ((char *) s_preds, n_basic_blocks * sizeof (int_list_ptr));
3471 bzero ((char *) s_succs, n_basic_blocks * sizeof (int_list_ptr));
3472 bzero ((char *) num_preds, n_basic_blocks * sizeof (int));
3473 bzero ((char *) num_succs, n_basic_blocks * sizeof (int));
3475 /* This routine can be called after life analysis; in that case
3476 basic_block_drops_in and uid_block_number will not be available
3477 and we must recompute their values. */
3478 if (basic_block_drops_in == NULL || uid_block_number == NULL)
3480 clear_local_bb_vars = 1;
3481 basic_block_drops_in = (char *) alloca (n_basic_blocks);
3482 uid_block_number = (int *) alloca ((get_max_uid () + 1) * sizeof (int));
3484 bzero ((char *) basic_block_drops_in, n_basic_blocks * sizeof (char));
3485 bzero ((char *) uid_block_number, n_basic_blocks * sizeof (int));
3487 /* Scan each basic block setting basic_block_drops_in and
3488 uid_block_number as needed. */
3489 for (bb = 0; bb < n_basic_blocks; bb++)
3491 rtx insn, stop_insn;
3493 if (bb == 0)
3494 stop_insn = NULL_RTX;
3495 else
3496 stop_insn = basic_block_end[bb-1];
3498 /* Look backwards from the start of this block. Stop if we
3499 hit the start of the function or the end of a previous
3500 block. Don't walk backwards through blocks that are just
3501 deleted insns! */
3502 for (insn = PREV_INSN (basic_block_head[bb]);
3503 insn && insn != stop_insn && GET_CODE (insn) == NOTE;
3504 insn = PREV_INSN (insn))
3507 /* Never set basic_block_drops_in for the first block. It is
3508 implicit.
3510 If we stopped on anything other than a BARRIER, then this
3511 block drops in. */
3512 if (bb != 0)
3513 basic_block_drops_in[bb] = (insn ? GET_CODE (insn) != BARRIER : 1);
3515 insn = basic_block_head[bb];
3516 while (insn)
3518 BLOCK_NUM (insn) = bb;
3519 if (insn == basic_block_end[bb])
3520 break;
3521 insn = NEXT_INSN (insn);
3526 for (bb = 0; bb < n_basic_blocks; bb++)
3528 rtx head;
3529 rtx jump;
3531 head = BLOCK_HEAD (bb);
3533 if (GET_CODE (head) == CODE_LABEL)
3534 for (jump = LABEL_REFS (head);
3535 jump != head;
3536 jump = LABEL_NEXTREF (jump))
3538 if (! INSN_DELETED_P (CONTAINING_INSN (jump))
3539 && (GET_CODE (CONTAINING_INSN (jump)) != NOTE
3540 || (NOTE_LINE_NUMBER (CONTAINING_INSN (jump))
3541 != NOTE_INSN_DELETED)))
3542 add_pred_succ (BLOCK_NUM (CONTAINING_INSN (jump)), bb,
3543 s_preds, s_succs, num_preds, num_succs);
3546 jump = BLOCK_END (bb);
3547 /* If this is a RETURN insn or a conditional jump in the last
3548 basic block, or a non-jump insn in the last basic block, then
3549 this block reaches the exit block. */
3550 if ((GET_CODE (jump) == JUMP_INSN && GET_CODE (PATTERN (jump)) == RETURN)
3551 || (((GET_CODE (jump) == JUMP_INSN
3552 && condjump_p (jump) && !simplejump_p (jump))
3553 || GET_CODE (jump) != JUMP_INSN)
3554 && (bb == n_basic_blocks - 1)))
3555 add_pred_succ (bb, EXIT_BLOCK, s_preds, s_succs, num_preds, num_succs);
3557 if (basic_block_drops_in[bb])
3558 add_pred_succ (bb - 1, bb, s_preds, s_succs, num_preds, num_succs);
3561 add_pred_succ (ENTRY_BLOCK, 0, s_preds, s_succs, num_preds, num_succs);
3564 /* If we allocated any variables in temporary storage, clear out the
3565 pointer to the local storage to avoid dangling pointers. */
3566 if (clear_local_bb_vars)
3568 basic_block_drops_in = NULL;
3569 uid_block_number = NULL;
3574 void
3575 dump_bb_data (file, preds, succs)
3576 FILE *file;
3577 int_list_ptr *preds;
3578 int_list_ptr *succs;
3580 int bb;
3581 int_list_ptr p;
3583 fprintf (file, "BB data\n\n");
3584 for (bb = 0; bb < n_basic_blocks; bb++)
3586 fprintf (file, "BB %d, start %d, end %d\n", bb,
3587 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
3588 fprintf (file, " preds:");
3589 for (p = preds[bb]; p != NULL; p = p->next)
3591 int pred_bb = INT_LIST_VAL (p);
3592 if (pred_bb == ENTRY_BLOCK)
3593 fprintf (file, " entry");
3594 else
3595 fprintf (file, " %d", pred_bb);
3597 fprintf (file, "\n");
3598 fprintf (file, " succs:");
3599 for (p = succs[bb]; p != NULL; p = p->next)
3601 int succ_bb = INT_LIST_VAL (p);
3602 if (succ_bb == EXIT_BLOCK)
3603 fprintf (file, " exit");
3604 else
3605 fprintf (file, " %d", succ_bb);
3607 fprintf (file, "\n");
3609 fprintf (file, "\n");
3612 void
3613 dump_sbitmap (file, bmap)
3614 FILE *file;
3615 sbitmap bmap;
3617 int i,j,n;
3618 int set_size = bmap->size;
3619 int total_bits = bmap->n_bits;
3621 fprintf (file, " ");
3622 for (i = n = 0; i < set_size && n < total_bits; i++)
3624 for (j = 0; j < SBITMAP_ELT_BITS && n < total_bits; j++, n++)
3626 if (n != 0 && n % 10 == 0)
3627 fprintf (file, " ");
3628 fprintf (file, "%d", (bmap->elms[i] & (1L << j)) != 0);
3631 fprintf (file, "\n");
3634 void
3635 dump_sbitmap_vector (file, title, subtitle, bmaps, n_maps)
3636 FILE *file;
3637 char *title, *subtitle;
3638 sbitmap *bmaps;
3639 int n_maps;
3641 int bb;
3643 fprintf (file, "%s\n", title);
3644 for (bb = 0; bb < n_maps; bb++)
3646 fprintf (file, "%s %d\n", subtitle, bb);
3647 dump_sbitmap (file, bmaps[bb]);
3649 fprintf (file, "\n");
3652 /* Free basic block data storage. */
3654 void
3655 free_bb_mem ()
3657 free_int_list (&pred_int_list_blocks);
3660 /* Bitmap manipulation routines. */
3662 /* Allocate a simple bitmap of N_ELMS bits. */
3664 sbitmap
3665 sbitmap_alloc (n_elms)
3666 int n_elms;
3668 int bytes, size, amt;
3669 sbitmap bmap;
3671 size = SBITMAP_SET_SIZE (n_elms);
3672 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3673 amt = (sizeof (struct simple_bitmap_def)
3674 + bytes - sizeof (SBITMAP_ELT_TYPE));
3675 bmap = (sbitmap) xmalloc (amt);
3676 bmap->n_bits = n_elms;
3677 bmap->size = size;
3678 bmap->bytes = bytes;
3679 return bmap;
3682 /* Allocate a vector of N_VECS bitmaps of N_ELMS bits. */
3684 sbitmap *
3685 sbitmap_vector_alloc (n_vecs, n_elms)
3686 int n_vecs, n_elms;
3688 int i, bytes, offset, elm_bytes, size, amt, vector_bytes;
3689 sbitmap *bitmap_vector;
3691 size = SBITMAP_SET_SIZE (n_elms);
3692 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3693 elm_bytes = (sizeof (struct simple_bitmap_def)
3694 + bytes - sizeof (SBITMAP_ELT_TYPE));
3695 vector_bytes = n_vecs * sizeof (sbitmap *);
3697 /* Round up `vector_bytes' to account for the alignment requirements
3698 of an sbitmap. One could allocate the vector-table and set of sbitmaps
3699 separately, but that requires maintaining two pointers or creating
3700 a cover struct to hold both pointers (so our result is still just
3701 one pointer). Neither is a bad idea, but this is simpler for now. */
3703 /* Based on DEFAULT_ALIGNMENT computation in obstack.c. */
3704 struct { char x; SBITMAP_ELT_TYPE y; } align;
3705 int alignment = (char *) & align.y - & align.x;
3706 vector_bytes = (vector_bytes + alignment - 1) & ~ (alignment - 1);
3709 amt = vector_bytes + (n_vecs * elm_bytes);
3710 bitmap_vector = (sbitmap *) xmalloc (amt);
3712 for (i = 0, offset = vector_bytes;
3713 i < n_vecs;
3714 i++, offset += elm_bytes)
3716 sbitmap b = (sbitmap) ((char *) bitmap_vector + offset);
3717 bitmap_vector[i] = b;
3718 b->n_bits = n_elms;
3719 b->size = size;
3720 b->bytes = bytes;
3723 return bitmap_vector;
3726 /* Copy sbitmap SRC to DST. */
3728 void
3729 sbitmap_copy (dst, src)
3730 sbitmap dst, src;
3732 bcopy (src->elms, dst->elms, sizeof (SBITMAP_ELT_TYPE) * dst->size);
3735 /* Zero all elements in a bitmap. */
3737 void
3738 sbitmap_zero (bmap)
3739 sbitmap bmap;
3741 bzero ((char *) bmap->elms, bmap->bytes);
3744 /* Set to ones all elements in a bitmap. */
3746 void
3747 sbitmap_ones (bmap)
3748 sbitmap bmap;
3750 memset (bmap->elms, -1, bmap->bytes);
3753 /* Zero a vector of N_VECS bitmaps. */
3755 void
3756 sbitmap_vector_zero (bmap, n_vecs)
3757 sbitmap *bmap;
3758 int n_vecs;
3760 int i;
3762 for (i = 0; i < n_vecs; i++)
3763 sbitmap_zero (bmap[i]);
3766 /* Set to ones a vector of N_VECS bitmaps. */
3768 void
3769 sbitmap_vector_ones (bmap, n_vecs)
3770 sbitmap *bmap;
3771 int n_vecs;
3773 int i;
3775 for (i = 0; i < n_vecs; i++)
3776 sbitmap_ones (bmap[i]);
3779 /* Set DST to be A union (B - C).
3780 DST = A | (B & ~C).
3781 Return non-zero if any change is made. */
3784 sbitmap_union_of_diff (dst, a, b, c)
3785 sbitmap dst, a, b, c;
3787 int i,changed;
3788 sbitmap_ptr dstp, ap, bp, cp;
3790 changed = 0;
3791 dstp = dst->elms;
3792 ap = a->elms;
3793 bp = b->elms;
3794 cp = c->elms;
3795 for (i = 0; i < dst->size; i++)
3797 SBITMAP_ELT_TYPE tmp = *ap | (*bp & ~*cp);
3798 if (*dstp != tmp)
3799 changed = 1;
3800 *dstp = tmp;
3801 dstp++; ap++; bp++; cp++;
3803 return changed;
3806 /* Set bitmap DST to the bitwise negation of the bitmap SRC. */
3808 void
3809 sbitmap_not (dst, src)
3810 sbitmap dst, src;
3812 int i;
3813 sbitmap_ptr dstp, ap;
3815 dstp = dst->elms;
3816 ap = src->elms;
3817 for (i = 0; i < dst->size; i++)
3819 SBITMAP_ELT_TYPE tmp = ~(*ap);
3820 *dstp = tmp;
3821 dstp++; ap++;
3825 /* Set the bits in DST to be the difference between the bits
3826 in A and the bits in B. i.e. dst = a - b.
3827 The - operator is implemented as a & (~b). */
3829 void
3830 sbitmap_difference (dst, a, b)
3831 sbitmap dst, a, b;
3833 int i;
3834 sbitmap_ptr dstp, ap, bp;
3836 dstp = dst->elms;
3837 ap = a->elms;
3838 bp = b->elms;
3839 for (i = 0; i < dst->size; i++)
3840 *dstp++ = *ap++ & (~*bp++);
3843 /* Set DST to be (A and B)).
3844 Return non-zero if any change is made. */
3847 sbitmap_a_and_b (dst, a, b)
3848 sbitmap dst, a, b;
3850 int i,changed;
3851 sbitmap_ptr dstp, ap, bp;
3853 changed = 0;
3854 dstp = dst->elms;
3855 ap = a->elms;
3856 bp = b->elms;
3857 for (i = 0; i < dst->size; i++)
3859 SBITMAP_ELT_TYPE tmp = *ap & *bp;
3860 if (*dstp != tmp)
3861 changed = 1;
3862 *dstp = tmp;
3863 dstp++; ap++; bp++;
3865 return changed;
3867 /* Set DST to be (A or B)).
3868 Return non-zero if any change is made. */
3871 sbitmap_a_or_b (dst, a, b)
3872 sbitmap dst, a, b;
3874 int i,changed;
3875 sbitmap_ptr dstp, ap, bp;
3877 changed = 0;
3878 dstp = dst->elms;
3879 ap = a->elms;
3880 bp = b->elms;
3881 for (i = 0; i < dst->size; i++)
3883 SBITMAP_ELT_TYPE tmp = *ap | *bp;
3884 if (*dstp != tmp)
3885 changed = 1;
3886 *dstp = tmp;
3887 dstp++; ap++; bp++;
3889 return changed;
3892 /* Set DST to be (A or (B and C)).
3893 Return non-zero if any change is made. */
3896 sbitmap_a_or_b_and_c (dst, a, b, c)
3897 sbitmap dst, a, b, c;
3899 int i,changed;
3900 sbitmap_ptr dstp, ap, bp, cp;
3902 changed = 0;
3903 dstp = dst->elms;
3904 ap = a->elms;
3905 bp = b->elms;
3906 cp = c->elms;
3907 for (i = 0; i < dst->size; i++)
3909 SBITMAP_ELT_TYPE tmp = *ap | (*bp & *cp);
3910 if (*dstp != tmp)
3911 changed = 1;
3912 *dstp = tmp;
3913 dstp++; ap++; bp++; cp++;
3915 return changed;
3918 /* Set DST to be (A ann (B or C)).
3919 Return non-zero if any change is made. */
3922 sbitmap_a_and_b_or_c (dst, a, b, c)
3923 sbitmap dst, a, b, c;
3925 int i,changed;
3926 sbitmap_ptr dstp, ap, bp, cp;
3928 changed = 0;
3929 dstp = dst->elms;
3930 ap = a->elms;
3931 bp = b->elms;
3932 cp = c->elms;
3933 for (i = 0; i < dst->size; i++)
3935 SBITMAP_ELT_TYPE tmp = *ap & (*bp | *cp);
3936 if (*dstp != tmp)
3937 changed = 1;
3938 *dstp = tmp;
3939 dstp++; ap++; bp++; cp++;
3941 return changed;
3944 /* Set the bitmap DST to the intersection of SRC of all predecessors or
3945 successors of block number BB (PRED_SUCC says which). */
3947 void
3948 sbitmap_intersect_of_predsucc (dst, src, bb, pred_succ)
3949 sbitmap dst;
3950 sbitmap *src;
3951 int bb;
3952 int_list_ptr *pred_succ;
3954 int_list_ptr ps;
3955 int ps_bb;
3956 int set_size = dst->size;
3958 ps = pred_succ[bb];
3960 /* It is possible that there are no predecessors(/successors).
3961 This can happen for example in unreachable code. */
3963 if (ps == NULL)
3965 /* In APL-speak this is the `and' reduction of the empty set and thus
3966 the result is the identity for `and'. */
3967 sbitmap_ones (dst);
3968 return;
3971 /* Set result to first predecessor/successor. */
3973 for ( ; ps != NULL; ps = ps->next)
3975 ps_bb = INT_LIST_VAL (ps);
3976 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3977 continue;
3978 sbitmap_copy (dst, src[ps_bb]);
3979 /* Break out since we're only doing first predecessor. */
3980 break;
3982 if (ps == NULL)
3983 return;
3985 /* Now do the remaining predecessors/successors. */
3987 for (ps = ps->next; ps != NULL; ps = ps->next)
3989 int i;
3990 sbitmap_ptr p,r;
3992 ps_bb = INT_LIST_VAL (ps);
3993 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3994 continue;
3996 p = src[ps_bb]->elms;
3997 r = dst->elms;
3999 for (i = 0; i < set_size; i++)
4000 *r++ &= *p++;
4004 /* Set the bitmap DST to the intersection of SRC of all predecessors
4005 of block number BB. */
4007 void
4008 sbitmap_intersect_of_predecessors (dst, src, bb, s_preds)
4009 sbitmap dst;
4010 sbitmap *src;
4011 int bb;
4012 int_list_ptr *s_preds;
4014 sbitmap_intersect_of_predsucc (dst, src, bb, s_preds);
4017 /* Set the bitmap DST to the intersection of SRC of all successors
4018 of block number BB. */
4020 void
4021 sbitmap_intersect_of_successors (dst, src, bb, s_succs)
4022 sbitmap dst;
4023 sbitmap *src;
4024 int bb;
4025 int_list_ptr *s_succs;
4027 sbitmap_intersect_of_predsucc (dst, src, bb, s_succs);
4030 /* Set the bitmap DST to the union of SRC of all predecessors/successors of
4031 block number BB. */
4033 void
4034 sbitmap_union_of_predsucc (dst, src, bb, pred_succ)
4035 sbitmap dst;
4036 sbitmap *src;
4037 int bb;
4038 int_list_ptr *pred_succ;
4040 int_list_ptr ps;
4041 int ps_bb;
4042 int set_size = dst->size;
4044 ps = pred_succ[bb];
4046 /* It is possible that there are no predecessors(/successors).
4047 This can happen for example in unreachable code. */
4049 if (ps == NULL)
4051 /* In APL-speak this is the `or' reduction of the empty set and thus
4052 the result is the identity for `or'. */
4053 sbitmap_zero (dst);
4054 return;
4057 /* Set result to first predecessor/successor. */
4059 for ( ; ps != NULL; ps = ps->next)
4061 ps_bb = INT_LIST_VAL (ps);
4062 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
4063 continue;
4064 sbitmap_copy (dst, src[ps_bb]);
4065 /* Break out since we're only doing first predecessor. */
4066 break;
4068 if (ps == NULL)
4069 return;
4071 /* Now do the remaining predecessors/successors. */
4073 for (ps = ps->next; ps != NULL; ps = ps->next)
4075 int i;
4076 sbitmap_ptr p,r;
4078 ps_bb = INT_LIST_VAL (ps);
4079 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
4080 continue;
4082 p = src[ps_bb]->elms;
4083 r = dst->elms;
4085 for (i = 0; i < set_size; i++)
4086 *r++ |= *p++;
4090 /* Set the bitmap DST to the union of SRC of all predecessors of
4091 block number BB. */
4093 void
4094 sbitmap_union_of_predecessors (dst, src, bb, s_preds)
4095 sbitmap dst;
4096 sbitmap *src;
4097 int bb;
4098 int_list_ptr *s_preds;
4100 sbitmap_union_of_predsucc (dst, src, bb, s_preds);
4103 /* Set the bitmap DST to the union of SRC of all predecessors of
4104 block number BB. */
4106 void
4107 sbitmap_union_of_successors (dst, src, bb, s_succ)
4108 sbitmap dst;
4109 sbitmap *src;
4110 int bb;
4111 int_list_ptr *s_succ;
4113 sbitmap_union_of_predsucc (dst, src, bb, s_succ);
4116 /* Compute dominator relationships. */
4117 void
4118 compute_dominators (dominators, post_dominators, s_preds, s_succs)
4119 sbitmap *dominators;
4120 sbitmap *post_dominators;
4121 int_list_ptr *s_preds;
4122 int_list_ptr *s_succs;
4124 int bb, changed, passes;
4125 sbitmap *temp_bitmap;
4127 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4128 sbitmap_vector_ones (dominators, n_basic_blocks);
4129 sbitmap_vector_ones (post_dominators, n_basic_blocks);
4130 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
4132 sbitmap_zero (dominators[0]);
4133 SET_BIT (dominators[0], 0);
4135 sbitmap_zero (post_dominators[n_basic_blocks-1]);
4136 SET_BIT (post_dominators[n_basic_blocks-1], 0);
4138 passes = 0;
4139 changed = 1;
4140 while (changed)
4142 changed = 0;
4143 for (bb = 1; bb < n_basic_blocks; bb++)
4145 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
4146 bb, s_preds);
4147 SET_BIT (temp_bitmap[bb], bb);
4148 changed |= sbitmap_a_and_b (dominators[bb],
4149 dominators[bb],
4150 temp_bitmap[bb]);
4151 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
4152 bb, s_succs);
4153 SET_BIT (temp_bitmap[bb], bb);
4154 changed |= sbitmap_a_and_b (post_dominators[bb],
4155 post_dominators[bb],
4156 temp_bitmap[bb]);
4158 passes++;
4161 free (temp_bitmap);
4164 /* Count for a single SET rtx, X. */
4166 static void
4167 count_reg_sets_1 (x)
4168 rtx x;
4170 register int regno;
4171 register rtx reg = SET_DEST (x);
4173 /* Find the register that's set/clobbered. */
4174 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4175 || GET_CODE (reg) == SIGN_EXTRACT
4176 || GET_CODE (reg) == STRICT_LOW_PART)
4177 reg = XEXP (reg, 0);
4179 if (GET_CODE (reg) == PARALLEL
4180 && GET_MODE (reg) == BLKmode)
4182 register int i;
4183 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4184 count_reg_sets_1 (XVECEXP (reg, 0, i));
4185 return;
4188 if (GET_CODE (reg) == REG)
4190 regno = REGNO (reg);
4191 if (regno >= FIRST_PSEUDO_REGISTER)
4193 /* Count (weighted) references, stores, etc. This counts a
4194 register twice if it is modified, but that is correct. */
4195 REG_N_SETS (regno)++;
4197 REG_N_REFS (regno) += loop_depth;
4202 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4203 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
4205 static void
4206 count_reg_sets (x)
4207 rtx x;
4209 register RTX_CODE code = GET_CODE (x);
4211 if (code == SET || code == CLOBBER)
4212 count_reg_sets_1 (x);
4213 else if (code == PARALLEL)
4215 register int i;
4216 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4218 code = GET_CODE (XVECEXP (x, 0, i));
4219 if (code == SET || code == CLOBBER)
4220 count_reg_sets_1 (XVECEXP (x, 0, i));
4225 /* Increment REG_N_REFS by the current loop depth each register reference
4226 found in X. */
4228 static void
4229 count_reg_references (x)
4230 rtx x;
4232 register RTX_CODE code;
4234 retry:
4235 code = GET_CODE (x);
4236 switch (code)
4238 case LABEL_REF:
4239 case SYMBOL_REF:
4240 case CONST_INT:
4241 case CONST:
4242 case CONST_DOUBLE:
4243 case PC:
4244 case ADDR_VEC:
4245 case ADDR_DIFF_VEC:
4246 case ASM_INPUT:
4247 return;
4249 #ifdef HAVE_cc0
4250 case CC0:
4251 return;
4252 #endif
4254 case CLOBBER:
4255 /* If we are clobbering a MEM, mark any registers inside the address
4256 as being used. */
4257 if (GET_CODE (XEXP (x, 0)) == MEM)
4258 count_reg_references (XEXP (XEXP (x, 0), 0));
4259 return;
4261 case SUBREG:
4262 /* While we're here, optimize this case. */
4263 x = SUBREG_REG (x);
4265 /* In case the SUBREG is not of a register, don't optimize */
4266 if (GET_CODE (x) != REG)
4268 count_reg_references (x);
4269 return;
4272 /* ... fall through ... */
4274 case REG:
4275 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
4276 REG_N_REFS (REGNO (x)) += loop_depth;
4277 return;
4279 case SET:
4281 register rtx testreg = SET_DEST (x);
4282 int mark_dest = 0;
4284 /* If storing into MEM, don't show it as being used. But do
4285 show the address as being used. */
4286 if (GET_CODE (testreg) == MEM)
4288 count_reg_references (XEXP (testreg, 0));
4289 count_reg_references (SET_SRC (x));
4290 return;
4293 /* Storing in STRICT_LOW_PART is like storing in a reg
4294 in that this SET might be dead, so ignore it in TESTREG.
4295 but in some other ways it is like using the reg.
4297 Storing in a SUBREG or a bit field is like storing the entire
4298 register in that if the register's value is not used
4299 then this SET is not needed. */
4300 while (GET_CODE (testreg) == STRICT_LOW_PART
4301 || GET_CODE (testreg) == ZERO_EXTRACT
4302 || GET_CODE (testreg) == SIGN_EXTRACT
4303 || GET_CODE (testreg) == SUBREG)
4305 /* Modifying a single register in an alternate mode
4306 does not use any of the old value. But these other
4307 ways of storing in a register do use the old value. */
4308 if (GET_CODE (testreg) == SUBREG
4309 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4311 else
4312 mark_dest = 1;
4314 testreg = XEXP (testreg, 0);
4317 /* If this is a store into a register,
4318 recursively scan the value being stored. */
4320 if ((GET_CODE (testreg) == PARALLEL
4321 && GET_MODE (testreg) == BLKmode)
4322 || GET_CODE (testreg) == REG)
4324 count_reg_references (SET_SRC (x));
4325 if (mark_dest)
4326 count_reg_references (SET_DEST (x));
4327 return;
4330 break;
4332 default:
4333 break;
4336 /* Recursively scan the operands of this expression. */
4339 register char *fmt = GET_RTX_FORMAT (code);
4340 register int i;
4342 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4344 if (fmt[i] == 'e')
4346 /* Tail recursive case: save a function call level. */
4347 if (i == 0)
4349 x = XEXP (x, 0);
4350 goto retry;
4352 count_reg_references (XEXP (x, i));
4354 else if (fmt[i] == 'E')
4356 register int j;
4357 for (j = 0; j < XVECLEN (x, i); j++)
4358 count_reg_references (XVECEXP (x, i, j));
4364 /* Recompute register set/reference counts immediately prior to register
4365 allocation.
4367 This avoids problems with set/reference counts changing to/from values
4368 which have special meanings to the register allocators.
4370 Additionally, the reference counts are the primary component used by the
4371 register allocators to prioritize pseudos for allocation to hard regs.
4372 More accurate reference counts generally lead to better register allocation.
4374 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4375 possibly other information which is used by the register allocators. */
4377 void
4378 recompute_reg_usage (f)
4379 rtx f;
4381 rtx insn;
4382 int i, max_reg;
4384 /* Clear out the old data. */
4385 max_reg = max_reg_num ();
4386 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
4388 REG_N_SETS (i) = 0;
4389 REG_N_REFS (i) = 0;
4392 /* Scan each insn in the chain and count how many times each register is
4393 set/used. */
4394 loop_depth = 1;
4395 for (insn = f; insn; insn = NEXT_INSN (insn))
4397 /* Keep track of loop depth. */
4398 if (GET_CODE (insn) == NOTE)
4400 /* Look for loop boundaries. */
4401 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
4402 loop_depth--;
4403 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
4404 loop_depth++;
4406 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
4407 Abort now rather than setting register status incorrectly. */
4408 if (loop_depth == 0)
4409 abort ();
4411 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4413 rtx links;
4415 /* This call will increment REG_N_SETS for each SET or CLOBBER
4416 of a register in INSN. It will also increment REG_N_REFS
4417 by the loop depth for each set of a register in INSN. */
4418 count_reg_sets (PATTERN (insn));
4420 /* count_reg_sets does not detect autoincrement address modes, so
4421 detect them here by looking at the notes attached to INSN. */
4422 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
4424 if (REG_NOTE_KIND (links) == REG_INC)
4425 /* Count (weighted) references, stores, etc. This counts a
4426 register twice if it is modified, but that is correct. */
4427 REG_N_SETS (REGNO (XEXP (links, 0)))++;
4430 /* This call will increment REG_N_REFS by the current loop depth for
4431 each reference to a register in INSN. */
4432 count_reg_references (PATTERN (insn));
4434 /* count_reg_references will not include counts for arguments to
4435 function calls, so detect them here by examining the
4436 CALL_INSN_FUNCTION_USAGE data. */
4437 if (GET_CODE (insn) == CALL_INSN)
4439 rtx note;
4441 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4442 note;
4443 note = XEXP (note, 1))
4444 if (GET_CODE (XEXP (note, 0)) == USE)
4445 count_reg_references (SET_DEST (XEXP (note, 0)));