H
[official-gcc.git] / gcc / flow.c
bloba7865730d98ba5beb9219f54b6ba0af117499869
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler.
23 It computes data flow information
24 which tells combine_instructions which insns to consider combining
25 and controls register allocation.
27 Additional data flow information that is too bulky to record
28 is generated during the analysis, and is used at that time to
29 create autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl
38 into basic blocks. It records the beginnings and ends of the
39 basic blocks in the vectors basic_block_head and basic_block_end,
40 and the number of blocks in n_basic_blocks.
42 find_basic_blocks also finds any unreachable loops
43 and deletes them.
45 ** life_analysis **
47 life_analysis is called immediately after find_basic_blocks.
48 It uses the basic block information to determine where each
49 hard or pseudo register is live.
51 ** live-register info **
53 The information about where each register is live is in two parts:
54 the REG_NOTES of insns, and the vector basic_block_live_at_start.
56 basic_block_live_at_start has an element for each basic block,
57 and the element is a bit-vector with a bit for each hard or pseudo
58 register. The bit is 1 if the register is live at the beginning
59 of the basic block.
61 Two types of elements can be added to an insn's REG_NOTES.
62 A REG_DEAD note is added to an insn's REG_NOTES for any register
63 that meets both of two conditions: The value in the register is not
64 needed in subsequent insns and the insn does not replace the value in
65 the register (in the case of multi-word hard registers, the value in
66 each register must be replaced by the insn to avoid a REG_DEAD note).
68 In the vast majority of cases, an object in a REG_DEAD note will be
69 used somewhere in the insn. The (rare) exception to this is if an
70 insn uses a multi-word hard register and only some of the registers are
71 needed in subsequent insns. In that case, REG_DEAD notes will be
72 provided for those hard registers that are not subsequently needed.
73 Partial REG_DEAD notes of this type do not occur when an insn sets
74 only some of the hard registers used in such a multi-word operand;
75 omitting REG_DEAD notes for objects stored in an insn is optional and
76 the desire to do so does not justify the complexity of the partial
77 REG_DEAD notes.
79 REG_UNUSED notes are added for each register that is set by the insn
80 but is unused subsequently (if every register set by the insn is unused
81 and the insn does not reference memory or have some other side-effect,
82 the insn is deleted instead). If only part of a multi-word hard
83 register is used in a subsequent insn, REG_UNUSED notes are made for
84 the parts that will not be used.
86 To determine which registers are live after any insn, one can
87 start from the beginning of the basic block and scan insns, noting
88 which registers are set by each insn and which die there.
90 ** Other actions of life_analysis **
92 life_analysis sets up the LOG_LINKS fields of insns because the
93 information needed to do so is readily available.
95 life_analysis deletes insns whose only effect is to store a value
96 that is never used.
98 life_analysis notices cases where a reference to a register as
99 a memory address can be combined with a preceding or following
100 incrementation or decrementation of the register. The separate
101 instruction to increment or decrement is deleted and the address
102 is changed to a POST_INC or similar rtx.
104 Each time an incrementing or decrementing address is created,
105 a REG_INC element is added to the insn's REG_NOTES list.
107 life_analysis fills in certain vectors containing information about
108 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
109 reg_n_calls_crosses and reg_basic_block.
111 life_analysis sets current_function_sp_is_unchanging if the function
112 doesn't modify the stack pointer. */
114 #include "config.h"
115 #include "system.h"
116 #include "rtl.h"
117 #include "basic-block.h"
118 #include "insn-config.h"
119 #include "regs.h"
120 #include "hard-reg-set.h"
121 #include "flags.h"
122 #include "output.h"
123 #include "except.h"
124 #include "toplev.h"
126 #include "obstack.h"
127 #define obstack_chunk_alloc xmalloc
128 #define obstack_chunk_free free
130 /* The contents of the current function definition are allocated
131 in this obstack, and all are freed at the end of the function.
132 For top-level functions, this is temporary_obstack.
133 Separate obstacks are made for nested functions. */
135 extern struct obstack *function_obstack;
137 /* List of labels that must never be deleted. */
138 extern rtx forced_labels;
140 /* Get the basic block number of an insn.
141 This info should not be expected to remain available
142 after the end of life_analysis. */
144 /* This is the limit of the allocated space in the following two arrays. */
146 static int max_uid_for_flow;
148 #define BLOCK_NUM(INSN) uid_block_number[INSN_UID (INSN)]
150 /* This is where the BLOCK_NUM values are really stored.
151 This is set up by find_basic_blocks and used there and in life_analysis,
152 and then freed. */
154 int *uid_block_number;
156 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
158 #define INSN_VOLATILE(INSN) uid_volatile[INSN_UID (INSN)]
159 static char *uid_volatile;
161 /* Number of basic blocks in the current function. */
163 int n_basic_blocks;
165 /* Maximum register number used in this function, plus one. */
167 int max_regno;
169 /* Maximum number of SCRATCH rtx's used in any basic block of this
170 function. */
172 int max_scratch;
174 /* Number of SCRATCH rtx's in the current block. */
176 static int num_scratch;
178 /* Indexed by n, giving various register information */
180 varray_type reg_n_info;
182 /* Size of the reg_n_info table. */
184 unsigned int reg_n_max;
186 /* Element N is the next insn that uses (hard or pseudo) register number N
187 within the current basic block; or zero, if there is no such insn.
188 This is valid only during the final backward scan in propagate_block. */
190 static rtx *reg_next_use;
192 /* Size of a regset for the current function,
193 in (1) bytes and (2) elements. */
195 int regset_bytes;
196 int regset_size;
198 /* Element N is first insn in basic block N.
199 This info lasts until we finish compiling the function. */
201 rtx *basic_block_head;
203 /* Element N is last insn in basic block N.
204 This info lasts until we finish compiling the function. */
206 rtx *basic_block_end;
208 /* Element N indicates whether basic block N can be reached through a
209 computed jump. */
211 char *basic_block_computed_jump_target;
213 /* Element N is a regset describing the registers live
214 at the start of basic block N.
215 This info lasts until we finish compiling the function. */
217 regset *basic_block_live_at_start;
219 /* Regset of regs live when calls to `setjmp'-like functions happen. */
221 regset regs_live_at_setjmp;
223 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
224 that have to go in the same hard reg.
225 The first two regs in the list are a pair, and the next two
226 are another pair, etc. */
227 rtx regs_may_share;
229 /* Element N is nonzero if control can drop into basic block N
230 from the preceding basic block. Freed after life_analysis. */
232 static char *basic_block_drops_in;
234 /* Element N is depth within loops of the last insn in basic block number N.
235 Freed after life_analysis. */
237 static short *basic_block_loop_depth;
239 /* Depth within loops of basic block being scanned for lifetime analysis,
240 plus one. This is the weight attached to references to registers. */
242 static int loop_depth;
244 /* During propagate_block, this is non-zero if the value of CC0 is live. */
246 static int cc0_live;
248 /* During propagate_block, this contains the last MEM stored into. It
249 is used to eliminate consecutive stores to the same location. */
251 static rtx last_mem_set;
253 /* Set of registers that may be eliminable. These are handled specially
254 in updating regs_ever_live. */
256 static HARD_REG_SET elim_reg_set;
258 /* Forward declarations */
259 static void find_basic_blocks_1 PROTO((rtx, rtx));
260 static void make_edges PROTO((int));
261 static void mark_label_ref PROTO((rtx, rtx, int));
262 static int delete_unreachable_blocks PROTO((void));
263 static int delete_block PROTO((int));
264 static void life_analysis_1 PROTO((rtx, int));
265 static void propagate_block PROTO((regset, rtx, rtx, int,
266 regset, int));
267 static rtx flow_delete_insn PROTO((rtx));
268 static int set_noop_p PROTO((rtx));
269 static int noop_move_p PROTO((rtx));
270 static void record_volatile_insns PROTO((rtx));
271 static void mark_regs_live_at_end PROTO((regset));
272 static int insn_dead_p PROTO((rtx, regset, int));
273 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
274 static void mark_set_regs PROTO((regset, regset, rtx,
275 rtx, regset));
276 static void mark_set_1 PROTO((regset, regset, rtx,
277 rtx, regset));
278 #ifdef AUTO_INC_DEC
279 static void find_auto_inc PROTO((regset, rtx, rtx));
280 static int try_pre_increment_1 PROTO((rtx));
281 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
282 #endif
283 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
284 void dump_flow_info PROTO((FILE *));
285 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
286 int_list_ptr *, int *, int *));
287 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
288 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
289 int_list **, int));
290 static void init_regset_vector PROTO ((regset *, int,
291 struct obstack *));
292 static void count_reg_sets_1 PROTO ((rtx));
293 static void count_reg_sets PROTO ((rtx));
294 static void count_reg_references PROTO ((rtx));
295 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
297 /* Find basic blocks of the current function.
298 F is the first insn of the function and NREGS the number of register numbers
299 in use.
300 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
301 be reachable. This turns on a kludge that causes the control flow
302 information to be inaccurate and not suitable for passes like GCSE. */
304 void
305 find_basic_blocks (f, nregs, file)
306 rtx f;
307 int nregs;
308 FILE *file;
310 register rtx insn;
311 register int i;
312 rtx nonlocal_label_list = nonlocal_label_rtx_list ();
313 int in_libcall_block = 0;
315 /* Count the basic blocks. Also find maximum insn uid value used. */
318 rtx prev_call = 0;
319 register RTX_CODE prev_code = JUMP_INSN;
320 register RTX_CODE code;
321 int eh_region = 0;
322 int call_had_abnormal_edge = 0;
324 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
326 /* Track when we are inside in LIBCALL block. */
327 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
328 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
329 in_libcall_block = 1;
331 code = GET_CODE (insn);
333 /* A basic block starts at label, or after something that can jump. */
334 if (code == CODE_LABEL
335 || (GET_RTX_CLASS (code) == 'i'
336 && (prev_code == JUMP_INSN
337 || (prev_code == CALL_INSN && call_had_abnormal_edge)
338 || prev_code == BARRIER)))
340 i++;
342 /* If the previous insn was a call that did not create an
343 abnormal edge, we want to add a nop so that the CALL_INSN
344 itself is not at basic_block_end. This allows us to easily
345 distinguish between normal calls and those which create
346 abnormal edges in the flow graph. */
348 if (i > 0 && !call_had_abnormal_edge && prev_call != 0)
350 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
351 emit_insn_after (nop, prev_call);
354 /* We change the code of the CALL_INSN, so that it won't start a
355 new block. */
356 if (code == CALL_INSN && in_libcall_block)
357 code = INSN;
359 /* Record whether this call created an edge. */
360 if (code == CALL_INSN)
362 prev_call = insn;
363 call_had_abnormal_edge = (nonlocal_label_list != 0 || eh_region);
365 else if (code != NOTE && code != BARRIER)
366 prev_call = 0;
368 if (code != NOTE)
369 prev_code = code;
370 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
371 ++eh_region;
372 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
373 --eh_region;
375 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
376 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
377 in_libcall_block = 0;
381 n_basic_blocks = i;
383 max_uid_for_flow = get_max_uid ();
384 #ifdef AUTO_INC_DEC
385 /* Leave space for insns life_analysis makes in some cases for auto-inc.
386 These cases are rare, so we don't need too much space. */
387 max_uid_for_flow += max_uid_for_flow / 10;
388 #endif
390 /* Allocate some tables that last till end of compiling this function
391 and some needed only in find_basic_blocks and life_analysis. */
393 basic_block_head = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
394 basic_block_end = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
395 basic_block_drops_in = (char *) xmalloc (n_basic_blocks);
396 basic_block_computed_jump_target = (char *) oballoc (n_basic_blocks);
397 basic_block_loop_depth = (short *) xmalloc (n_basic_blocks * sizeof (short));
398 uid_block_number
399 = (int *) xmalloc ((max_uid_for_flow + 1) * sizeof (int));
400 uid_volatile = (char *) xmalloc (max_uid_for_flow + 1);
401 bzero (uid_volatile, max_uid_for_flow + 1);
403 find_basic_blocks_1 (f, nonlocal_label_list);
406 /* For communication between find_basic_blocks_1 and its subroutines. */
408 /* An array of CODE_LABELs, indexed by UID for the start of the active
409 EH handler for each insn in F. */
410 static int *active_eh_region;
411 static int *nested_eh_region;
413 /* Element N nonzero if basic block N can actually be reached. */
415 static char *block_live_static;
417 /* List of label_refs to all labels whose addresses are taken
418 and used as data. */
419 static rtx label_value_list;
421 /* a list of non-local labels in the function. */
422 static rtx nonlocal_label_list;
424 /* Find all basic blocks of the function whose first insn is F.
425 Store the correct data in the tables that describe the basic blocks,
426 set up the chains of references for each CODE_LABEL, and
427 delete any entire basic blocks that cannot be reached.
429 NONLOCAL_LABELS is a list of non-local labels in the function.
430 Blocks that are otherwise unreachable may be reachable with a non-local
431 goto.
432 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
433 be reachable. This turns on a kludge that causes the control flow
434 information to be inaccurate and not suitable for passes like GCSE. */
436 static void
437 find_basic_blocks_1 (f, nonlocal_labels)
438 rtx f, nonlocal_labels;
440 register rtx insn;
441 register int i;
442 register char *block_live = (char *) alloca (n_basic_blocks);
443 register char *block_marked = (char *) alloca (n_basic_blocks);
444 rtx note, eh_note;
445 enum rtx_code prev_code, code;
446 int depth, pass;
447 int in_libcall_block = 0;
448 int call_had_abnormal_edge = 0;
450 pass = 1;
451 active_eh_region = (int *) alloca ((max_uid_for_flow + 1) * sizeof (int));
452 nested_eh_region = (int *) alloca ((max_label_num () + 1) * sizeof (int));
453 nonlocal_label_list = nonlocal_labels;
454 restart:
456 label_value_list = 0;
457 block_live_static = block_live;
458 bzero (block_live, n_basic_blocks);
459 bzero (block_marked, n_basic_blocks);
460 bzero (basic_block_computed_jump_target, n_basic_blocks);
461 bzero ((char *) active_eh_region, (max_uid_for_flow + 1) * sizeof (int));
462 bzero ((char *) nested_eh_region, (max_label_num () + 1) * sizeof (int));
463 current_function_has_computed_jump = 0;
465 /* Initialize with just block 0 reachable and no blocks marked. */
466 if (n_basic_blocks > 0)
467 block_live[0] = 1;
469 /* Initialize the ref chain of each label to 0. Record where all the
470 blocks start and end and their depth in loops. For each insn, record
471 the block it is in. Also mark as reachable any blocks headed by labels
472 that must not be deleted. */
474 for (eh_note = NULL_RTX, insn = f, i = -1, prev_code = JUMP_INSN, depth = 1;
475 insn; insn = NEXT_INSN (insn))
478 /* Track when we are inside in LIBCALL block. */
479 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
480 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
481 in_libcall_block = 1;
483 code = GET_CODE (insn);
484 if (code == NOTE)
486 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
487 depth++;
488 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
489 depth--;
492 /* A basic block starts at label, or after something that can jump. */
493 else if (code == CODE_LABEL
494 || (GET_RTX_CLASS (code) == 'i'
495 && (prev_code == JUMP_INSN
496 || (prev_code == CALL_INSN && call_had_abnormal_edge)
497 || prev_code == BARRIER)))
499 basic_block_head[++i] = insn;
500 basic_block_end[i] = insn;
501 basic_block_loop_depth[i] = depth;
503 if (code == CODE_LABEL)
505 LABEL_REFS (insn) = insn;
506 /* Any label that cannot be deleted
507 is considered to start a reachable block. */
508 if (LABEL_PRESERVE_P (insn))
509 block_live[i] = 1;
513 else if (GET_RTX_CLASS (code) == 'i')
515 basic_block_end[i] = insn;
516 basic_block_loop_depth[i] = depth;
519 if (GET_RTX_CLASS (code) == 'i')
521 /* Make a list of all labels referred to other than by jumps. */
522 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
523 if (REG_NOTE_KIND (note) == REG_LABEL
524 && XEXP (note, 0) != eh_return_stub_label)
525 label_value_list = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
526 label_value_list);
529 /* Keep a lifo list of the currently active exception notes. */
530 if (GET_CODE (insn) == NOTE)
532 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
534 if (eh_note)
535 nested_eh_region [NOTE_BLOCK_NUMBER (insn)] =
536 NOTE_BLOCK_NUMBER (XEXP (eh_note, 0));
537 else
538 nested_eh_region [NOTE_BLOCK_NUMBER (insn)] = 0;
539 eh_note = gen_rtx_EXPR_LIST (VOIDmode,
540 insn, eh_note);
542 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
543 eh_note = XEXP (eh_note, 1);
545 /* If we encounter a CALL_INSN, note which exception handler it
546 might pass control to.
548 If doing asynchronous exceptions, record the active EH handler
549 for every insn, since most insns can throw. */
550 else if (eh_note
551 && (asynchronous_exceptions
552 || (GET_CODE (insn) == CALL_INSN
553 && ! in_libcall_block)))
554 active_eh_region[INSN_UID (insn)] =
555 NOTE_BLOCK_NUMBER (XEXP (eh_note, 0));
556 BLOCK_NUM (insn) = i;
558 /* We change the code of the CALL_INSN, so that it won't start a
559 new block. */
560 if (code == CALL_INSN && in_libcall_block)
561 code = INSN;
563 /* Record whether this call created an edge. */
564 if (code == CALL_INSN)
565 call_had_abnormal_edge = (nonlocal_label_list != 0 || eh_note);
567 if (code != NOTE)
568 prev_code = code;
570 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
571 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
572 in_libcall_block = 0;
575 /* During the second pass, `n_basic_blocks' is only an upper bound.
576 Only perform the sanity check for the first pass, and on the second
577 pass ensure `n_basic_blocks' is set to the correct value. */
578 if (pass == 1 && i + 1 != n_basic_blocks)
579 abort ();
580 n_basic_blocks = i + 1;
582 /* Record which basic blocks control can drop in to. */
584 for (i = 0; i < n_basic_blocks; i++)
586 for (insn = PREV_INSN (basic_block_head[i]);
587 insn && GET_CODE (insn) == NOTE; insn = PREV_INSN (insn))
590 basic_block_drops_in[i] = insn && GET_CODE (insn) != BARRIER;
593 /* Now find which basic blocks can actually be reached
594 and put all jump insns' LABEL_REFS onto the ref-chains
595 of their target labels. */
597 if (n_basic_blocks > 0)
599 int something_marked = 1;
600 int deleted;
602 /* Pass over all blocks, marking each block that is reachable
603 and has not yet been marked.
604 Keep doing this until, in one pass, no blocks have been marked.
605 Then blocks_live and blocks_marked are identical and correct.
606 In addition, all jumps actually reachable have been marked. */
608 while (something_marked)
610 something_marked = 0;
611 for (i = 0; i < n_basic_blocks; i++)
612 if (block_live[i] && !block_marked[i])
614 block_marked[i] = 1;
615 something_marked = 1;
617 make_edges (i);
621 /* This should never happen. If it does that means we've computed an
622 incorrect flow graph, which can lead to aborts/crashes later in the
623 compiler or incorrect code generation.
625 We used to try and continue here, but that's just asking for trouble
626 later during the compile or at runtime. It's easier to debug the
627 problem here than later! */
628 for (i = 1; i < n_basic_blocks; i++)
629 if (block_live[i] && ! basic_block_drops_in[i]
630 && GET_CODE (basic_block_head[i]) == CODE_LABEL
631 && LABEL_REFS (basic_block_head[i]) == basic_block_head[i])
632 abort ();
634 deleted = delete_unreachable_blocks ();
636 /* There are pathological cases where one function calling hundreds of
637 nested inline functions can generate lots and lots of unreachable
638 blocks that jump can't delete. Since we don't use sparse matrices
639 a lot of memory will be needed to compile such functions.
640 Implementing sparse matrices is a fair bit of work and it is not
641 clear that they win more than they lose (we don't want to
642 unnecessarily slow down compilation of normal code). By making
643 another pass for the pathological case, we can greatly speed up
644 their compilation without hurting normal code. This works because
645 all the insns in the unreachable blocks have either been deleted or
646 turned into notes.
647 Note that we're talking about reducing memory usage by 10's of
648 megabytes and reducing compilation time by several minutes. */
649 /* ??? The choice of when to make another pass is a bit arbitrary,
650 and was derived from empirical data. */
651 if (pass == 1
652 && deleted > 200)
654 pass++;
655 n_basic_blocks -= deleted;
656 /* `n_basic_blocks' may not be correct at this point: two previously
657 separate blocks may now be merged. That's ok though as we
658 recalculate it during the second pass. It certainly can't be
659 any larger than the current value. */
660 goto restart;
665 /* Record INSN's block number as BB. */
667 void
668 set_block_num (insn, bb)
669 rtx insn;
670 int bb;
672 if (INSN_UID (insn) >= max_uid_for_flow)
674 /* Add one-eighth the size so we don't keep calling xrealloc. */
675 max_uid_for_flow = INSN_UID (insn) + (INSN_UID (insn) + 7) / 8;
676 uid_block_number = (int *)
677 xrealloc (uid_block_number, (max_uid_for_flow + 1) * sizeof (int));
679 BLOCK_NUM (insn) = bb;
683 /* Subroutines of find_basic_blocks. */
685 /* For basic block I, make edges and mark live all blocks which are reachable
686 from it. */
687 static void
688 make_edges (i)
689 int i;
691 rtx insn, x;
693 if (i + 1 < n_basic_blocks && basic_block_drops_in[i + 1])
694 block_live_static[i + 1] = 1;
695 insn = basic_block_end[i];
696 if (GET_CODE (insn) == JUMP_INSN)
697 mark_label_ref (PATTERN (insn), insn, 0);
699 /* If we have any forced labels, mark them as potentially reachable from
700 this block. */
701 for (x = forced_labels; x; x = XEXP (x, 1))
702 if (! LABEL_REF_NONLOCAL_P (x))
703 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
704 insn, 0);
706 /* Now scan the insns for this block, we may need to make edges for some of
707 them to various non-obvious locations (exception handlers, nonlocal
708 labels, etc). */
709 for (insn = basic_block_head[i];
710 insn != NEXT_INSN (basic_block_end[i]);
711 insn = NEXT_INSN (insn))
713 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
715 rtx note;
716 /* References to labels in non-jumping insns have REG_LABEL notes
717 attached to them.
719 This can happen for computed gotos; we don't care about them
720 here since the values are also on the label_value_list and will
721 be marked live if we find a live computed goto.
723 This can also happen when we take the address of a label to pass
724 as an argument to __throw. Note throw only uses the value to
725 determine what handler should be called -- ie the label is not
726 used as a jump target, it just marks regions in the code.
728 In theory we should be able to ignore the REG_LABEL notes, but
729 we have to make sure that the label and associated insns aren't
730 marked dead, so we make the block in question live and create an
731 edge from this insn to the label. This is not strictly correct,
732 but it is close enough for now.
734 See below for code that handles the eh_stub label specially. */
735 for (note = REG_NOTES (insn);
736 note;
737 note = XEXP (note, 1))
739 if (REG_NOTE_KIND (note) == REG_LABEL
740 && XEXP (note, 0) != eh_return_stub_label)
742 x = XEXP (note, 0);
743 block_live_static[BLOCK_NUM (x)] = 1;
744 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, x),
745 insn, 0);
749 /* If this is a computed jump, then mark it as reaching everything
750 on the label_value_list and forced_labels list. */
751 if (computed_jump_p (insn))
753 current_function_has_computed_jump = 1;
754 for (x = label_value_list; x; x = XEXP (x, 1))
756 int b = BLOCK_NUM (XEXP (x, 0));
757 basic_block_computed_jump_target[b] = 1;
758 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
759 insn, 0);
762 for (x = forced_labels; x; x = XEXP (x, 1))
764 int b = BLOCK_NUM (XEXP (x, 0));
765 basic_block_computed_jump_target[b] = 1;
766 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
767 insn, 0);
771 /* If this is a CALL_INSN, then mark it as reaching the active EH
772 handler for this CALL_INSN. If we're handling asynchronous
773 exceptions mark every insn as reaching the active EH handler.
775 Also mark the CALL_INSN as reaching any nonlocal goto sites. */
776 else if (asynchronous_exceptions
777 || (GET_CODE (insn) == CALL_INSN
778 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX)))
780 if (active_eh_region[INSN_UID (insn)])
782 int region;
783 handler_info *ptr;
784 region = active_eh_region[INSN_UID (insn)];
785 for ( ; region;
786 region = nested_eh_region[region])
788 ptr = get_first_handler (region);
789 for ( ; ptr ; ptr = ptr->next)
790 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
791 ptr->handler_label),
792 insn, 0);
795 if (! asynchronous_exceptions)
797 for (x = nonlocal_label_list; x; x = XEXP (x, 1))
798 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
799 insn, 0);
801 /* ??? This could be made smarter: in some cases it's possible
802 to tell that certain calls will not do a nonlocal goto.
804 For example, if the nested functions that do the nonlocal
805 gotos do not have their addresses taken, then only calls to
806 those functions or to other nested functions that use them
807 could possibly do nonlocal gotos. */
811 /* We know something about the structure of the function __throw in
812 libgcc2.c. It is the only function that ever contains eh_stub labels.
813 It modifies its return address so that the last block returns to one of
814 the eh_stub labels within it. So we have to make additional edges in
815 the flow graph. */
816 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
818 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, eh_return_stub_label),
819 basic_block_end[i], 0);
823 /* Check expression X for label references;
824 if one is found, add INSN to the label's chain of references.
826 CHECKDUP means check for and avoid creating duplicate references
827 from the same insn. Such duplicates do no serious harm but
828 can slow life analysis. CHECKDUP is set only when duplicates
829 are likely. */
831 static void
832 mark_label_ref (x, insn, checkdup)
833 rtx x, insn;
834 int checkdup;
836 register RTX_CODE code;
837 register int i;
838 register char *fmt;
840 /* We can be called with NULL when scanning label_value_list. */
841 if (x == 0)
842 return;
844 code = GET_CODE (x);
845 if (code == LABEL_REF)
847 register rtx label = XEXP (x, 0);
848 register rtx y;
849 if (GET_CODE (label) != CODE_LABEL)
850 abort ();
851 /* If the label was never emitted, this insn is junk,
852 but avoid a crash trying to refer to BLOCK_NUM (label).
853 This can happen as a result of a syntax error
854 and a diagnostic has already been printed. */
855 if (INSN_UID (label) == 0)
856 return;
857 CONTAINING_INSN (x) = insn;
858 /* if CHECKDUP is set, check for duplicate ref from same insn
859 and don't insert. */
860 if (checkdup)
861 for (y = LABEL_REFS (label); y != label; y = LABEL_NEXTREF (y))
862 if (CONTAINING_INSN (y) == insn)
863 return;
864 LABEL_NEXTREF (x) = LABEL_REFS (label);
865 LABEL_REFS (label) = x;
866 block_live_static[BLOCK_NUM (label)] = 1;
867 return;
870 fmt = GET_RTX_FORMAT (code);
871 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
873 if (fmt[i] == 'e')
874 mark_label_ref (XEXP (x, i), insn, 0);
875 if (fmt[i] == 'E')
877 register int j;
878 for (j = 0; j < XVECLEN (x, i); j++)
879 mark_label_ref (XVECEXP (x, i, j), insn, 1);
884 /* Now delete the code for any basic blocks that can't be reached.
885 They can occur because jump_optimize does not recognize unreachable loops
886 as unreachable.
887 Return the number of deleted blocks. */
888 static int
889 delete_unreachable_blocks ()
891 int deleted_handler = 0;
892 int deleted = 0;
893 int i;
894 rtx insn;
896 for (i = 0; i < n_basic_blocks; i++)
897 if (! block_live_static[i])
899 deleted++;
901 deleted_handler |= delete_block (i);
904 /* If we deleted an exception handler, we may have EH region
905 begin/end blocks to remove as well. */
906 if (deleted_handler)
907 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
908 if (GET_CODE (insn) == NOTE)
910 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
911 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
913 int num = CODE_LABEL_NUMBER (insn);
914 /* A NULL handler indicates a region is no longer needed */
915 if (get_first_handler (num) == NULL)
917 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
918 NOTE_SOURCE_FILE (insn) = 0;
922 return deleted;
925 /* Delete the insns in a (non-live) block. We physically delete every
926 non-note insn except the start and end (so basic_block_head/end needn't
927 be updated), we turn the latter into NOTE_INSN_DELETED notes.
929 We use to "delete" the insns by turning them into notes, but we may be
930 deleting lots of insns that subsequent passes would otherwise have to
931 process. Secondly, lots of deleted blocks in a row can really slow down
932 propagate_block since it will otherwise process insn-turned-notes multiple
933 times when it looks for loop begin/end notes.
935 Return nonzero if we deleted an exception handler. */
936 static int
937 delete_block (i)
938 int i;
940 int deleted_handler = 0;
941 rtx insn;
943 if (basic_block_head[i] != basic_block_end[i])
945 /* It would be quicker to delete all of these with a single
946 unchaining, rather than one at a time, but we need to keep
947 the NOTE's. */
948 insn = NEXT_INSN (basic_block_head[i]);
949 while (insn != basic_block_end[i])
951 if (GET_CODE (insn) == BARRIER)
952 abort ();
953 else if (GET_CODE (insn) != NOTE)
954 insn = flow_delete_insn (insn);
955 else
956 insn = NEXT_INSN (insn);
960 insn = basic_block_head[i];
961 if (GET_CODE (insn) != NOTE)
963 /* Turn the head into a deleted insn note. */
964 if (GET_CODE (insn) == BARRIER)
965 abort ();
967 /* If the head of this block is a CODE_LABEL, then it might
968 be the label for an exception handler which can't be
969 reached.
971 We need to remove the label from the exception_handler_label
972 list and remove the associated NOTE_EH_REGION_BEG and
973 NOTE_EH_REGION_END notes. */
974 if (GET_CODE (insn) == CODE_LABEL)
976 rtx x, *prev = &exception_handler_labels;
978 for (x = exception_handler_labels; x; x = XEXP (x, 1))
980 if (XEXP (x, 0) == insn)
982 /* Found a match, splice this label out of the
983 EH label list. */
984 *prev = XEXP (x, 1);
985 XEXP (x, 1) = NULL_RTX;
986 XEXP (x, 0) = NULL_RTX;
988 /* Remove the handler from all regions */
989 remove_handler (insn);
990 deleted_handler = 1;
991 break;
993 prev = &XEXP (x, 1);
997 PUT_CODE (insn, NOTE);
998 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
999 NOTE_SOURCE_FILE (insn) = 0;
1001 insn = basic_block_end[i];
1002 if (GET_CODE (insn) != NOTE)
1004 /* Turn the tail into a deleted insn note. */
1005 if (GET_CODE (insn) == BARRIER)
1006 abort ();
1007 PUT_CODE (insn, NOTE);
1008 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1009 NOTE_SOURCE_FILE (insn) = 0;
1011 /* BARRIERs are between basic blocks, not part of one.
1012 Delete a BARRIER if the preceding jump is deleted.
1013 We cannot alter a BARRIER into a NOTE
1014 because it is too short; but we can really delete
1015 it because it is not part of a basic block. */
1016 if (NEXT_INSN (insn) != 0
1017 && GET_CODE (NEXT_INSN (insn)) == BARRIER)
1018 delete_insn (NEXT_INSN (insn));
1020 /* Each time we delete some basic blocks,
1021 see if there is a jump around them that is
1022 being turned into a no-op. If so, delete it. */
1024 if (block_live_static[i - 1])
1026 register int j;
1027 for (j = i + 1; j < n_basic_blocks; j++)
1028 if (block_live_static[j])
1030 rtx label;
1031 insn = basic_block_end[i - 1];
1032 if (GET_CODE (insn) == JUMP_INSN
1033 /* An unconditional jump is the only possibility
1034 we must check for, since a conditional one
1035 would make these blocks live. */
1036 && simplejump_p (insn)
1037 && (label = XEXP (SET_SRC (PATTERN (insn)), 0), 1)
1038 && INSN_UID (label) != 0
1039 && BLOCK_NUM (label) == j)
1041 int k;
1043 /* The deleted blocks still show up in the cfg,
1044 so we must set basic_block_drops_in for blocks
1045 I to J inclusive to keep the cfg accurate. */
1046 for (k = i; k <= j; k++)
1047 basic_block_drops_in[k] = 1;
1049 PUT_CODE (insn, NOTE);
1050 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1051 NOTE_SOURCE_FILE (insn) = 0;
1052 if (GET_CODE (NEXT_INSN (insn)) != BARRIER)
1053 abort ();
1054 delete_insn (NEXT_INSN (insn));
1056 break;
1060 return deleted_handler;
1063 /* Delete INSN by patching it out.
1064 Return the next insn. */
1066 static rtx
1067 flow_delete_insn (insn)
1068 rtx insn;
1070 /* ??? For the moment we assume we don't have to watch for NULLs here
1071 since the start/end of basic blocks aren't deleted like this. */
1072 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1073 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1074 return NEXT_INSN (insn);
1077 /* Perform data flow analysis.
1078 F is the first insn of the function and NREGS the number of register numbers
1079 in use. */
1081 void
1082 life_analysis (f, nregs, file)
1083 rtx f;
1084 int nregs;
1085 FILE *file;
1087 #ifdef ELIMINABLE_REGS
1088 register size_t i;
1089 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
1090 #endif
1092 /* Record which registers will be eliminated. We use this in
1093 mark_used_regs. */
1095 CLEAR_HARD_REG_SET (elim_reg_set);
1097 #ifdef ELIMINABLE_REGS
1098 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
1099 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
1100 #else
1101 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
1102 #endif
1104 life_analysis_1 (f, nregs);
1105 if (file)
1106 dump_flow_info (file);
1108 free_basic_block_vars (1);
1111 /* Free the variables allocated by find_basic_blocks.
1113 KEEP_HEAD_END_P is non-zero if basic_block_head and basic_block_end
1114 are not to be freed. */
1116 void
1117 free_basic_block_vars (keep_head_end_p)
1118 int keep_head_end_p;
1120 if (basic_block_drops_in)
1122 free (basic_block_drops_in);
1123 /* Tell dump_flow_info this isn't available anymore. */
1124 basic_block_drops_in = 0;
1126 if (basic_block_loop_depth)
1128 free (basic_block_loop_depth);
1129 basic_block_loop_depth = 0;
1131 if (uid_block_number)
1133 free (uid_block_number);
1134 uid_block_number = 0;
1136 if (uid_volatile)
1138 free (uid_volatile);
1139 uid_volatile = 0;
1142 if (! keep_head_end_p && basic_block_head)
1144 free (basic_block_head);
1145 basic_block_head = 0;
1146 free (basic_block_end);
1147 basic_block_end = 0;
1151 /* Return nonzero if the destination of SET equals the source. */
1152 static int
1153 set_noop_p (set)
1154 rtx set;
1156 rtx src = SET_SRC (set);
1157 rtx dst = SET_DEST (set);
1158 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
1159 && REGNO (src) == REGNO (dst))
1160 return 1;
1161 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
1162 || SUBREG_WORD (src) != SUBREG_WORD (dst))
1163 return 0;
1164 src = SUBREG_REG (src);
1165 dst = SUBREG_REG (dst);
1166 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
1167 && REGNO (src) == REGNO (dst))
1168 return 1;
1169 return 0;
1172 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1173 value to itself. */
1174 static int
1175 noop_move_p (insn)
1176 rtx insn;
1178 rtx pat = PATTERN (insn);
1180 /* Insns carrying these notes are useful later on. */
1181 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1182 return 0;
1184 if (GET_CODE (pat) == SET && set_noop_p (pat))
1185 return 1;
1187 if (GET_CODE (pat) == PARALLEL)
1189 int i;
1190 /* If nothing but SETs of registers to themselves,
1191 this insn can also be deleted. */
1192 for (i = 0; i < XVECLEN (pat, 0); i++)
1194 rtx tem = XVECEXP (pat, 0, i);
1196 if (GET_CODE (tem) == USE
1197 || GET_CODE (tem) == CLOBBER)
1198 continue;
1200 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1201 return 0;
1204 return 1;
1206 return 0;
1209 static void
1210 notice_stack_pointer_modification (x, pat)
1211 rtx x;
1212 rtx pat ATTRIBUTE_UNUSED;
1214 if (x == stack_pointer_rtx
1215 /* The stack pointer is only modified indirectly as the result
1216 of a push until later in flow. See the comments in rtl.texi
1217 regarding Embedded Side-Effects on Addresses. */
1218 || (GET_CODE (x) == MEM
1219 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
1220 || GET_CODE (XEXP (x, 0)) == PRE_INC
1221 || GET_CODE (XEXP (x, 0)) == POST_DEC
1222 || GET_CODE (XEXP (x, 0)) == POST_INC)
1223 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
1224 current_function_sp_is_unchanging = 0;
1227 /* Record which insns refer to any volatile memory
1228 or for any reason can't be deleted just because they are dead stores.
1229 Also, delete any insns that copy a register to itself.
1230 And see if the stack pointer is modified. */
1231 static void
1232 record_volatile_insns (f)
1233 rtx f;
1235 rtx insn;
1236 for (insn = f; insn; insn = NEXT_INSN (insn))
1238 enum rtx_code code1 = GET_CODE (insn);
1239 if (code1 == CALL_INSN)
1240 INSN_VOLATILE (insn) = 1;
1241 else if (code1 == INSN || code1 == JUMP_INSN)
1243 if (GET_CODE (PATTERN (insn)) != USE
1244 && volatile_refs_p (PATTERN (insn)))
1245 INSN_VOLATILE (insn) = 1;
1247 /* A SET that makes space on the stack cannot be dead.
1248 (Such SETs occur only for allocating variable-size data,
1249 so they will always have a PLUS or MINUS according to the
1250 direction of stack growth.)
1251 Even if this function never uses this stack pointer value,
1252 signal handlers do! */
1253 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
1254 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1255 #ifdef STACK_GROWS_DOWNWARD
1256 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
1257 #else
1258 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1259 #endif
1260 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
1261 INSN_VOLATILE (insn) = 1;
1263 /* Delete (in effect) any obvious no-op moves. */
1264 else if (noop_move_p (insn))
1266 PUT_CODE (insn, NOTE);
1267 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1268 NOTE_SOURCE_FILE (insn) = 0;
1272 /* Check if insn modifies the stack pointer. */
1273 if ( current_function_sp_is_unchanging
1274 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1275 note_stores (PATTERN (insn), notice_stack_pointer_modification);
1279 /* Mark those regs which are needed at the end of the function as live
1280 at the end of the last basic block. */
1281 static void
1282 mark_regs_live_at_end (set)
1283 regset set;
1285 int i;
1287 #ifdef EXIT_IGNORE_STACK
1288 if (! EXIT_IGNORE_STACK
1289 || (! FRAME_POINTER_REQUIRED
1290 && ! current_function_calls_alloca
1291 && flag_omit_frame_pointer)
1292 || current_function_sp_is_unchanging)
1293 #endif
1294 /* If exiting needs the right stack value,
1295 consider the stack pointer live at the end of the function. */
1296 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
1298 /* Mark the frame pointer is needed at the end of the function. If
1299 we end up eliminating it, it will be removed from the live list
1300 of each basic block by reload. */
1302 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
1303 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1304 /* If they are different, also mark the hard frame pointer as live */
1305 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
1306 #endif
1309 /* Mark all global registers and all registers used by the epilogue
1310 as being live at the end of the function since they may be
1311 referenced by our caller. */
1312 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1313 if (global_regs[i]
1314 #ifdef EPILOGUE_USES
1315 || EPILOGUE_USES (i)
1316 #endif
1318 SET_REGNO_REG_SET (set, i);
1321 /* Determine which registers are live at the start of each
1322 basic block of the function whose first insn is F.
1323 NREGS is the number of registers used in F.
1324 We allocate the vector basic_block_live_at_start
1325 and the regsets that it points to, and fill them with the data.
1326 regset_size and regset_bytes are also set here. */
1328 static void
1329 life_analysis_1 (f, nregs)
1330 rtx f;
1331 int nregs;
1333 int first_pass;
1334 int changed;
1335 /* For each basic block, a bitmask of regs
1336 live on exit from the block. */
1337 regset *basic_block_live_at_end;
1338 /* For each basic block, a bitmask of regs
1339 live on entry to a successor-block of this block.
1340 If this does not match basic_block_live_at_end,
1341 that must be updated, and the block must be rescanned. */
1342 regset *basic_block_new_live_at_end;
1343 /* For each basic block, a bitmask of regs
1344 whose liveness at the end of the basic block
1345 can make a difference in which regs are live on entry to the block.
1346 These are the regs that are set within the basic block,
1347 possibly excluding those that are used after they are set. */
1348 regset *basic_block_significant;
1349 register int i;
1351 struct obstack flow_obstack;
1353 gcc_obstack_init (&flow_obstack);
1355 max_regno = nregs;
1357 bzero (regs_ever_live, sizeof regs_ever_live);
1359 /* Allocate and zero out many data structures
1360 that will record the data from lifetime analysis. */
1362 allocate_for_life_analysis ();
1364 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
1365 bzero ((char *) reg_next_use, nregs * sizeof (rtx));
1367 /* Set up several regset-vectors used internally within this function.
1368 Their meanings are documented above, with their declarations. */
1370 basic_block_live_at_end
1371 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1373 /* Don't use alloca since that leads to a crash rather than an error message
1374 if there isn't enough space.
1375 Don't use oballoc since we may need to allocate other things during
1376 this function on the temporary obstack. */
1377 init_regset_vector (basic_block_live_at_end, n_basic_blocks, &flow_obstack);
1379 basic_block_new_live_at_end
1380 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1381 init_regset_vector (basic_block_new_live_at_end, n_basic_blocks,
1382 &flow_obstack);
1384 basic_block_significant
1385 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1386 init_regset_vector (basic_block_significant, n_basic_blocks, &flow_obstack);
1388 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
1389 This will be cleared by record_volatile_insns if it encounters an insn
1390 which modifies the stack pointer. */
1391 current_function_sp_is_unchanging = !current_function_calls_alloca;
1393 record_volatile_insns (f);
1395 if (n_basic_blocks > 0)
1397 mark_regs_live_at_end (basic_block_live_at_end[n_basic_blocks - 1]);
1398 COPY_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1],
1399 basic_block_live_at_end[n_basic_blocks - 1]);
1402 /* Propagate life info through the basic blocks
1403 around the graph of basic blocks.
1405 This is a relaxation process: each time a new register
1406 is live at the end of the basic block, we must scan the block
1407 to determine which registers are, as a consequence, live at the beginning
1408 of that block. These registers must then be marked live at the ends
1409 of all the blocks that can transfer control to that block.
1410 The process continues until it reaches a fixed point. */
1412 first_pass = 1;
1413 changed = 1;
1414 while (changed)
1416 changed = 0;
1417 for (i = n_basic_blocks - 1; i >= 0; i--)
1419 int consider = first_pass;
1420 int must_rescan = first_pass;
1421 register int j;
1423 if (!first_pass)
1425 /* Set CONSIDER if this block needs thinking about at all
1426 (that is, if the regs live now at the end of it
1427 are not the same as were live at the end of it when
1428 we last thought about it).
1429 Set must_rescan if it needs to be thought about
1430 instruction by instruction (that is, if any additional
1431 reg that is live at the end now but was not live there before
1432 is one of the significant regs of this basic block). */
1434 EXECUTE_IF_AND_COMPL_IN_REG_SET
1435 (basic_block_new_live_at_end[i],
1436 basic_block_live_at_end[i], 0, j,
1438 consider = 1;
1439 if (REGNO_REG_SET_P (basic_block_significant[i], j))
1441 must_rescan = 1;
1442 goto done;
1445 done:
1446 if (! consider)
1447 continue;
1450 /* The live_at_start of this block may be changing,
1451 so another pass will be required after this one. */
1452 changed = 1;
1454 if (! must_rescan)
1456 /* No complete rescan needed;
1457 just record those variables newly known live at end
1458 as live at start as well. */
1459 IOR_AND_COMPL_REG_SET (basic_block_live_at_start[i],
1460 basic_block_new_live_at_end[i],
1461 basic_block_live_at_end[i]);
1463 IOR_AND_COMPL_REG_SET (basic_block_live_at_end[i],
1464 basic_block_new_live_at_end[i],
1465 basic_block_live_at_end[i]);
1467 else
1469 /* Update the basic_block_live_at_start
1470 by propagation backwards through the block. */
1471 COPY_REG_SET (basic_block_live_at_end[i],
1472 basic_block_new_live_at_end[i]);
1473 COPY_REG_SET (basic_block_live_at_start[i],
1474 basic_block_live_at_end[i]);
1475 propagate_block (basic_block_live_at_start[i],
1476 basic_block_head[i], basic_block_end[i], 0,
1477 first_pass ? basic_block_significant[i]
1478 : (regset) 0,
1483 register rtx jump, head;
1485 /* Update the basic_block_new_live_at_end's of the block
1486 that falls through into this one (if any). */
1487 head = basic_block_head[i];
1488 if (basic_block_drops_in[i])
1489 IOR_REG_SET (basic_block_new_live_at_end[i-1],
1490 basic_block_live_at_start[i]);
1492 /* Update the basic_block_new_live_at_end's of
1493 all the blocks that jump to this one. */
1494 if (GET_CODE (head) == CODE_LABEL)
1495 for (jump = LABEL_REFS (head);
1496 jump != head;
1497 jump = LABEL_NEXTREF (jump))
1499 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
1500 IOR_REG_SET (basic_block_new_live_at_end[from_block],
1501 basic_block_live_at_start[i]);
1504 #ifdef USE_C_ALLOCA
1505 alloca (0);
1506 #endif
1508 first_pass = 0;
1511 /* The only pseudos that are live at the beginning of the function are
1512 those that were not set anywhere in the function. local-alloc doesn't
1513 know how to handle these correctly, so mark them as not local to any
1514 one basic block. */
1516 if (n_basic_blocks > 0)
1517 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[0],
1518 FIRST_PSEUDO_REGISTER, i,
1520 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1523 /* Now the life information is accurate.
1524 Make one more pass over each basic block
1525 to delete dead stores, create autoincrement addressing
1526 and record how many times each register is used, is set, or dies.
1528 To save time, we operate directly in basic_block_live_at_end[i],
1529 thus destroying it (in fact, converting it into a copy of
1530 basic_block_live_at_start[i]). This is ok now because
1531 basic_block_live_at_end[i] is no longer used past this point. */
1533 max_scratch = 0;
1535 for (i = 0; i < n_basic_blocks; i++)
1537 propagate_block (basic_block_live_at_end[i],
1538 basic_block_head[i], basic_block_end[i], 1,
1539 (regset) 0, i);
1540 #ifdef USE_C_ALLOCA
1541 alloca (0);
1542 #endif
1545 #if 0
1546 /* Something live during a setjmp should not be put in a register
1547 on certain machines which restore regs from stack frames
1548 rather than from the jmpbuf.
1549 But we don't need to do this for the user's variables, since
1550 ANSI says only volatile variables need this. */
1551 #ifdef LONGJMP_RESTORE_FROM_STACK
1552 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1553 FIRST_PSEUDO_REGISTER, i,
1555 if (regno_reg_rtx[i] != 0
1556 && ! REG_USERVAR_P (regno_reg_rtx[i]))
1558 REG_LIVE_LENGTH (i) = -1;
1559 REG_BASIC_BLOCK (i) = -1;
1562 #endif
1563 #endif
1565 /* We have a problem with any pseudoreg that
1566 lives across the setjmp. ANSI says that if a
1567 user variable does not change in value
1568 between the setjmp and the longjmp, then the longjmp preserves it.
1569 This includes longjmp from a place where the pseudo appears dead.
1570 (In principle, the value still exists if it is in scope.)
1571 If the pseudo goes in a hard reg, some other value may occupy
1572 that hard reg where this pseudo is dead, thus clobbering the pseudo.
1573 Conclusion: such a pseudo must not go in a hard reg. */
1574 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1575 FIRST_PSEUDO_REGISTER, i,
1577 if (regno_reg_rtx[i] != 0)
1579 REG_LIVE_LENGTH (i) = -1;
1580 REG_BASIC_BLOCK (i) = -1;
1585 free_regset_vector (basic_block_live_at_end, n_basic_blocks);
1586 free_regset_vector (basic_block_new_live_at_end, n_basic_blocks);
1587 free_regset_vector (basic_block_significant, n_basic_blocks);
1588 basic_block_live_at_end = (regset *)0;
1589 basic_block_new_live_at_end = (regset *)0;
1590 basic_block_significant = (regset *)0;
1592 obstack_free (&flow_obstack, NULL_PTR);
1595 /* Subroutines of life analysis. */
1597 /* Allocate the permanent data structures that represent the results
1598 of life analysis. Not static since used also for stupid life analysis. */
1600 void
1601 allocate_for_life_analysis ()
1603 register int i;
1605 /* Recalculate the register space, in case it has grown. Old style
1606 vector oriented regsets would set regset_{size,bytes} here also. */
1607 allocate_reg_info (max_regno, FALSE, FALSE);
1609 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
1610 information, explicitly reset it here. The allocation should have
1611 already happened on the previous reg_scan pass. Make sure in case
1612 some more registers were allocated. */
1613 for (i = 0; i < max_regno; i++)
1614 REG_N_SETS (i) = 0;
1616 basic_block_live_at_start
1617 = (regset *) oballoc (n_basic_blocks * sizeof (regset));
1618 init_regset_vector (basic_block_live_at_start, n_basic_blocks,
1619 function_obstack);
1621 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
1622 CLEAR_REG_SET (regs_live_at_setjmp);
1625 /* Make each element of VECTOR point at a regset. The vector has
1626 NELTS elements, and space is allocated from the ALLOC_OBSTACK
1627 obstack. */
1629 static void
1630 init_regset_vector (vector, nelts, alloc_obstack)
1631 regset *vector;
1632 int nelts;
1633 struct obstack *alloc_obstack;
1635 register int i;
1637 for (i = 0; i < nelts; i++)
1639 vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
1640 CLEAR_REG_SET (vector[i]);
1644 /* Release any additional space allocated for each element of VECTOR point
1645 other than the regset header itself. The vector has NELTS elements. */
1647 void
1648 free_regset_vector (vector, nelts)
1649 regset *vector;
1650 int nelts;
1652 register int i;
1654 for (i = 0; i < nelts; i++)
1655 FREE_REG_SET (vector[i]);
1658 /* Compute the registers live at the beginning of a basic block
1659 from those live at the end.
1661 When called, OLD contains those live at the end.
1662 On return, it contains those live at the beginning.
1663 FIRST and LAST are the first and last insns of the basic block.
1665 FINAL is nonzero if we are doing the final pass which is not
1666 for computing the life info (since that has already been done)
1667 but for acting on it. On this pass, we delete dead stores,
1668 set up the logical links and dead-variables lists of instructions,
1669 and merge instructions for autoincrement and autodecrement addresses.
1671 SIGNIFICANT is nonzero only the first time for each basic block.
1672 If it is nonzero, it points to a regset in which we store
1673 a 1 for each register that is set within the block.
1675 BNUM is the number of the basic block. */
1677 static void
1678 propagate_block (old, first, last, final, significant, bnum)
1679 register regset old;
1680 rtx first;
1681 rtx last;
1682 int final;
1683 regset significant;
1684 int bnum;
1686 register rtx insn;
1687 rtx prev;
1688 regset live;
1689 regset dead;
1691 /* The following variables are used only if FINAL is nonzero. */
1692 /* This vector gets one element for each reg that has been live
1693 at any point in the basic block that has been scanned so far.
1694 SOMETIMES_MAX says how many elements are in use so far. */
1695 register int *regs_sometimes_live;
1696 int sometimes_max = 0;
1697 /* This regset has 1 for each reg that we have seen live so far.
1698 It and REGS_SOMETIMES_LIVE are updated together. */
1699 regset maxlive;
1701 /* The loop depth may change in the middle of a basic block. Since we
1702 scan from end to beginning, we start with the depth at the end of the
1703 current basic block, and adjust as we pass ends and starts of loops. */
1704 loop_depth = basic_block_loop_depth[bnum];
1706 dead = ALLOCA_REG_SET ();
1707 live = ALLOCA_REG_SET ();
1709 cc0_live = 0;
1710 last_mem_set = 0;
1712 /* Include any notes at the end of the block in the scan.
1713 This is in case the block ends with a call to setjmp. */
1715 while (NEXT_INSN (last) != 0 && GET_CODE (NEXT_INSN (last)) == NOTE)
1717 /* Look for loop boundaries, we are going forward here. */
1718 last = NEXT_INSN (last);
1719 if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_BEG)
1720 loop_depth++;
1721 else if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_END)
1722 loop_depth--;
1725 if (final)
1727 register int i;
1729 num_scratch = 0;
1730 maxlive = ALLOCA_REG_SET ();
1731 COPY_REG_SET (maxlive, old);
1732 regs_sometimes_live = (int *) alloca (max_regno * sizeof (int));
1734 /* Process the regs live at the end of the block.
1735 Enter them in MAXLIVE and REGS_SOMETIMES_LIVE.
1736 Also mark them as not local to any one basic block. */
1737 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
1739 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1740 regs_sometimes_live[sometimes_max] = i;
1741 sometimes_max++;
1745 /* Scan the block an insn at a time from end to beginning. */
1747 for (insn = last; ; insn = prev)
1749 prev = PREV_INSN (insn);
1751 if (GET_CODE (insn) == NOTE)
1753 /* Look for loop boundaries, remembering that we are going
1754 backwards. */
1755 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1756 loop_depth++;
1757 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1758 loop_depth--;
1760 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
1761 Abort now rather than setting register status incorrectly. */
1762 if (loop_depth == 0)
1763 abort ();
1765 /* If this is a call to `setjmp' et al,
1766 warn if any non-volatile datum is live. */
1768 if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
1769 IOR_REG_SET (regs_live_at_setjmp, old);
1772 /* Update the life-status of regs for this insn.
1773 First DEAD gets which regs are set in this insn
1774 then LIVE gets which regs are used in this insn.
1775 Then the regs live before the insn
1776 are those live after, with DEAD regs turned off,
1777 and then LIVE regs turned on. */
1779 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1781 register int i;
1782 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1783 int insn_is_dead
1784 = (insn_dead_p (PATTERN (insn), old, 0)
1785 /* Don't delete something that refers to volatile storage! */
1786 && ! INSN_VOLATILE (insn));
1787 int libcall_is_dead
1788 = (insn_is_dead && note != 0
1789 && libcall_dead_p (PATTERN (insn), old, note, insn));
1791 /* If an instruction consists of just dead store(s) on final pass,
1792 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
1793 We could really delete it with delete_insn, but that
1794 can cause trouble for first or last insn in a basic block. */
1795 if (final && insn_is_dead)
1797 PUT_CODE (insn, NOTE);
1798 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1799 NOTE_SOURCE_FILE (insn) = 0;
1801 /* CC0 is now known to be dead. Either this insn used it,
1802 in which case it doesn't anymore, or clobbered it,
1803 so the next insn can't use it. */
1804 cc0_live = 0;
1806 /* If this insn is copying the return value from a library call,
1807 delete the entire library call. */
1808 if (libcall_is_dead)
1810 rtx first = XEXP (note, 0);
1811 rtx p = insn;
1812 while (INSN_DELETED_P (first))
1813 first = NEXT_INSN (first);
1814 while (p != first)
1816 p = PREV_INSN (p);
1817 PUT_CODE (p, NOTE);
1818 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
1819 NOTE_SOURCE_FILE (p) = 0;
1822 goto flushed;
1825 CLEAR_REG_SET (dead);
1826 CLEAR_REG_SET (live);
1828 /* See if this is an increment or decrement that can be
1829 merged into a following memory address. */
1830 #ifdef AUTO_INC_DEC
1832 register rtx x = single_set (insn);
1834 /* Does this instruction increment or decrement a register? */
1835 if (final && x != 0
1836 && GET_CODE (SET_DEST (x)) == REG
1837 && (GET_CODE (SET_SRC (x)) == PLUS
1838 || GET_CODE (SET_SRC (x)) == MINUS)
1839 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
1840 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1841 /* Ok, look for a following memory ref we can combine with.
1842 If one is found, change the memory ref to a PRE_INC
1843 or PRE_DEC, cancel this insn, and return 1.
1844 Return 0 if nothing has been done. */
1845 && try_pre_increment_1 (insn))
1846 goto flushed;
1848 #endif /* AUTO_INC_DEC */
1850 /* If this is not the final pass, and this insn is copying the
1851 value of a library call and it's dead, don't scan the
1852 insns that perform the library call, so that the call's
1853 arguments are not marked live. */
1854 if (libcall_is_dead)
1856 /* Mark the dest reg as `significant'. */
1857 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
1859 insn = XEXP (note, 0);
1860 prev = PREV_INSN (insn);
1862 else if (GET_CODE (PATTERN (insn)) == SET
1863 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1864 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1865 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1866 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1867 /* We have an insn to pop a constant amount off the stack.
1868 (Such insns use PLUS regardless of the direction of the stack,
1869 and any insn to adjust the stack by a constant is always a pop.)
1870 These insns, if not dead stores, have no effect on life. */
1872 else
1874 /* LIVE gets the regs used in INSN;
1875 DEAD gets those set by it. Dead insns don't make anything
1876 live. */
1878 mark_set_regs (old, dead, PATTERN (insn),
1879 final ? insn : NULL_RTX, significant);
1881 /* If an insn doesn't use CC0, it becomes dead since we
1882 assume that every insn clobbers it. So show it dead here;
1883 mark_used_regs will set it live if it is referenced. */
1884 cc0_live = 0;
1886 if (! insn_is_dead)
1887 mark_used_regs (old, live, PATTERN (insn), final, insn);
1889 /* Sometimes we may have inserted something before INSN (such as
1890 a move) when we make an auto-inc. So ensure we will scan
1891 those insns. */
1892 #ifdef AUTO_INC_DEC
1893 prev = PREV_INSN (insn);
1894 #endif
1896 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1898 register int i;
1900 rtx note;
1902 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1903 note;
1904 note = XEXP (note, 1))
1905 if (GET_CODE (XEXP (note, 0)) == USE)
1906 mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
1907 final, insn);
1909 /* Each call clobbers all call-clobbered regs that are not
1910 global or fixed. Note that the function-value reg is a
1911 call-clobbered reg, and mark_set_regs has already had
1912 a chance to handle it. */
1914 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1915 if (call_used_regs[i] && ! global_regs[i]
1916 && ! fixed_regs[i])
1917 SET_REGNO_REG_SET (dead, i);
1919 /* The stack ptr is used (honorarily) by a CALL insn. */
1920 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
1922 /* Calls may also reference any of the global registers,
1923 so they are made live. */
1924 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1925 if (global_regs[i])
1926 mark_used_regs (old, live,
1927 gen_rtx_REG (reg_raw_mode[i], i),
1928 final, insn);
1930 /* Calls also clobber memory. */
1931 last_mem_set = 0;
1934 /* Update OLD for the registers used or set. */
1935 AND_COMPL_REG_SET (old, dead);
1936 IOR_REG_SET (old, live);
1938 if (GET_CODE (insn) == CALL_INSN && final)
1940 /* Any regs live at the time of a call instruction
1941 must not go in a register clobbered by calls.
1942 Find all regs now live and record this for them. */
1944 register int *p = regs_sometimes_live;
1946 for (i = 0; i < sometimes_max; i++, p++)
1947 if (REGNO_REG_SET_P (old, *p))
1948 REG_N_CALLS_CROSSED (*p)++;
1952 /* On final pass, add any additional sometimes-live regs
1953 into MAXLIVE and REGS_SOMETIMES_LIVE.
1954 Also update counts of how many insns each reg is live at. */
1956 if (final)
1958 register int regno;
1959 register int *p;
1961 EXECUTE_IF_AND_COMPL_IN_REG_SET
1962 (live, maxlive, 0, regno,
1964 regs_sometimes_live[sometimes_max++] = regno;
1965 SET_REGNO_REG_SET (maxlive, regno);
1968 p = regs_sometimes_live;
1969 for (i = 0; i < sometimes_max; i++)
1971 regno = *p++;
1972 if (REGNO_REG_SET_P (old, regno))
1973 REG_LIVE_LENGTH (regno)++;
1977 flushed: ;
1978 if (insn == first)
1979 break;
1982 FREE_REG_SET (dead);
1983 FREE_REG_SET (live);
1984 if (final)
1985 FREE_REG_SET (maxlive);
1987 if (num_scratch > max_scratch)
1988 max_scratch = num_scratch;
1991 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
1992 (SET expressions whose destinations are registers dead after the insn).
1993 NEEDED is the regset that says which regs are alive after the insn.
1995 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL. */
1997 static int
1998 insn_dead_p (x, needed, call_ok)
1999 rtx x;
2000 regset needed;
2001 int call_ok;
2003 enum rtx_code code = GET_CODE (x);
2005 /* If setting something that's a reg or part of one,
2006 see if that register's altered value will be live. */
2008 if (code == SET)
2010 rtx r = SET_DEST (x);
2012 /* A SET that is a subroutine call cannot be dead. */
2013 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
2014 return 0;
2016 #ifdef HAVE_cc0
2017 if (GET_CODE (r) == CC0)
2018 return ! cc0_live;
2019 #endif
2021 if (GET_CODE (r) == MEM && last_mem_set && ! MEM_VOLATILE_P (r)
2022 && rtx_equal_p (r, last_mem_set))
2023 return 1;
2025 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
2026 || GET_CODE (r) == ZERO_EXTRACT)
2027 r = SUBREG_REG (r);
2029 if (GET_CODE (r) == REG)
2031 int regno = REGNO (r);
2033 /* Don't delete insns to set global regs. */
2034 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2035 /* Make sure insns to set frame pointer aren't deleted. */
2036 || regno == FRAME_POINTER_REGNUM
2037 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2038 || regno == HARD_FRAME_POINTER_REGNUM
2039 #endif
2040 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2041 /* Make sure insns to set arg pointer are never deleted
2042 (if the arg pointer isn't fixed, there will be a USE for
2043 it, so we can treat it normally). */
2044 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2045 #endif
2046 || REGNO_REG_SET_P (needed, regno))
2047 return 0;
2049 /* If this is a hard register, verify that subsequent words are
2050 not needed. */
2051 if (regno < FIRST_PSEUDO_REGISTER)
2053 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2055 while (--n > 0)
2056 if (REGNO_REG_SET_P (needed, regno+n))
2057 return 0;
2060 return 1;
2064 /* If performing several activities,
2065 insn is dead if each activity is individually dead.
2066 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
2067 that's inside a PARALLEL doesn't make the insn worth keeping. */
2068 else if (code == PARALLEL)
2070 int i = XVECLEN (x, 0);
2072 for (i--; i >= 0; i--)
2073 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2074 && GET_CODE (XVECEXP (x, 0, i)) != USE
2075 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok))
2076 return 0;
2078 return 1;
2081 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
2082 is not necessarily true for hard registers. */
2083 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
2084 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2085 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
2086 return 1;
2088 /* We do not check other CLOBBER or USE here. An insn consisting of just
2089 a CLOBBER or just a USE should not be deleted. */
2090 return 0;
2093 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
2094 return 1 if the entire library call is dead.
2095 This is true if X copies a register (hard or pseudo)
2096 and if the hard return reg of the call insn is dead.
2097 (The caller should have tested the destination of X already for death.)
2099 If this insn doesn't just copy a register, then we don't
2100 have an ordinary libcall. In that case, cse could not have
2101 managed to substitute the source for the dest later on,
2102 so we can assume the libcall is dead.
2104 NEEDED is the bit vector of pseudoregs live before this insn.
2105 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
2107 static int
2108 libcall_dead_p (x, needed, note, insn)
2109 rtx x;
2110 regset needed;
2111 rtx note;
2112 rtx insn;
2114 register RTX_CODE code = GET_CODE (x);
2116 if (code == SET)
2118 register rtx r = SET_SRC (x);
2119 if (GET_CODE (r) == REG)
2121 rtx call = XEXP (note, 0);
2122 register int i;
2124 /* Find the call insn. */
2125 while (call != insn && GET_CODE (call) != CALL_INSN)
2126 call = NEXT_INSN (call);
2128 /* If there is none, do nothing special,
2129 since ordinary death handling can understand these insns. */
2130 if (call == insn)
2131 return 0;
2133 /* See if the hard reg holding the value is dead.
2134 If this is a PARALLEL, find the call within it. */
2135 call = PATTERN (call);
2136 if (GET_CODE (call) == PARALLEL)
2138 for (i = XVECLEN (call, 0) - 1; i >= 0; i--)
2139 if (GET_CODE (XVECEXP (call, 0, i)) == SET
2140 && GET_CODE (SET_SRC (XVECEXP (call, 0, i))) == CALL)
2141 break;
2143 /* This may be a library call that is returning a value
2144 via invisible pointer. Do nothing special, since
2145 ordinary death handling can understand these insns. */
2146 if (i < 0)
2147 return 0;
2149 call = XVECEXP (call, 0, i);
2152 return insn_dead_p (call, needed, 1);
2155 return 1;
2158 /* Return 1 if register REGNO was used before it was set, i.e. if it is
2159 live at function entry. Don't count global register variables, variables
2160 in registers that can be used for function arg passing, or variables in
2161 fixed hard registers. */
2164 regno_uninitialized (regno)
2165 int regno;
2167 if (n_basic_blocks == 0
2168 || (regno < FIRST_PSEUDO_REGISTER
2169 && (global_regs[regno]
2170 || fixed_regs[regno]
2171 || FUNCTION_ARG_REGNO_P (regno))))
2172 return 0;
2174 return REGNO_REG_SET_P (basic_block_live_at_start[0], regno);
2177 /* 1 if register REGNO was alive at a place where `setjmp' was called
2178 and was set more than once or is an argument.
2179 Such regs may be clobbered by `longjmp'. */
2182 regno_clobbered_at_setjmp (regno)
2183 int regno;
2185 if (n_basic_blocks == 0)
2186 return 0;
2188 return ((REG_N_SETS (regno) > 1
2189 || REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2190 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2193 /* Process the registers that are set within X.
2194 Their bits are set to 1 in the regset DEAD,
2195 because they are dead prior to this insn.
2197 If INSN is nonzero, it is the insn being processed
2198 and the fact that it is nonzero implies this is the FINAL pass
2199 in propagate_block. In this case, various info about register
2200 usage is stored, LOG_LINKS fields of insns are set up. */
2202 static void
2203 mark_set_regs (needed, dead, x, insn, significant)
2204 regset needed;
2205 regset dead;
2206 rtx x;
2207 rtx insn;
2208 regset significant;
2210 register RTX_CODE code = GET_CODE (x);
2212 if (code == SET || code == CLOBBER)
2213 mark_set_1 (needed, dead, x, insn, significant);
2214 else if (code == PARALLEL)
2216 register int i;
2217 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2219 code = GET_CODE (XVECEXP (x, 0, i));
2220 if (code == SET || code == CLOBBER)
2221 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
2226 /* Process a single SET rtx, X. */
2228 static void
2229 mark_set_1 (needed, dead, x, insn, significant)
2230 regset needed;
2231 regset dead;
2232 rtx x;
2233 rtx insn;
2234 regset significant;
2236 register int regno;
2237 register rtx reg = SET_DEST (x);
2239 /* Some targets place small structures in registers for
2240 return values of functions. We have to detect this
2241 case specially here to get correct flow information. */
2242 if (GET_CODE (reg) == PARALLEL
2243 && GET_MODE (reg) == BLKmode)
2245 register int i;
2247 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2248 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant);
2249 return;
2252 /* Modifying just one hardware register of a multi-reg value
2253 or just a byte field of a register
2254 does not mean the value from before this insn is now dead.
2255 But it does mean liveness of that register at the end of the block
2256 is significant.
2258 Within mark_set_1, however, we treat it as if the register is
2259 indeed modified. mark_used_regs will, however, also treat this
2260 register as being used. Thus, we treat these insns as setting a
2261 new value for the register as a function of its old value. This
2262 cases LOG_LINKS to be made appropriately and this will help combine. */
2264 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
2265 || GET_CODE (reg) == SIGN_EXTRACT
2266 || GET_CODE (reg) == STRICT_LOW_PART)
2267 reg = XEXP (reg, 0);
2269 /* If we are writing into memory or into a register mentioned in the
2270 address of the last thing stored into memory, show we don't know
2271 what the last store was. If we are writing memory, save the address
2272 unless it is volatile. */
2273 if (GET_CODE (reg) == MEM
2274 || (GET_CODE (reg) == REG
2275 && last_mem_set != 0 && reg_overlap_mentioned_p (reg, last_mem_set)))
2276 last_mem_set = 0;
2278 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2279 /* There are no REG_INC notes for SP, so we can't assume we'll see
2280 everything that invalidates it. To be safe, don't eliminate any
2281 stores though SP; none of them should be redundant anyway. */
2282 && ! reg_mentioned_p (stack_pointer_rtx, reg))
2283 last_mem_set = reg;
2285 if (GET_CODE (reg) == REG
2286 && (regno = REGNO (reg), regno != FRAME_POINTER_REGNUM)
2287 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2288 && regno != HARD_FRAME_POINTER_REGNUM
2289 #endif
2290 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2291 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2292 #endif
2293 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
2294 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
2296 int some_needed = REGNO_REG_SET_P (needed, regno);
2297 int some_not_needed = ! some_needed;
2299 /* Mark it as a significant register for this basic block. */
2300 if (significant)
2301 SET_REGNO_REG_SET (significant, regno);
2303 /* Mark it as dead before this insn. */
2304 SET_REGNO_REG_SET (dead, regno);
2306 /* A hard reg in a wide mode may really be multiple registers.
2307 If so, mark all of them just like the first. */
2308 if (regno < FIRST_PSEUDO_REGISTER)
2310 int n;
2312 /* Nothing below is needed for the stack pointer; get out asap.
2313 Eg, log links aren't needed, since combine won't use them. */
2314 if (regno == STACK_POINTER_REGNUM)
2315 return;
2317 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2318 while (--n > 0)
2320 int regno_n = regno + n;
2321 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2322 if (significant)
2323 SET_REGNO_REG_SET (significant, regno_n);
2325 SET_REGNO_REG_SET (dead, regno_n);
2326 some_needed |= needed_regno;
2327 some_not_needed |= ! needed_regno;
2330 /* Additional data to record if this is the final pass. */
2331 if (insn)
2333 register rtx y = reg_next_use[regno];
2334 register int blocknum = BLOCK_NUM (insn);
2336 /* If this is a hard reg, record this function uses the reg. */
2338 if (regno < FIRST_PSEUDO_REGISTER)
2340 register int i;
2341 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
2343 for (i = regno; i < endregno; i++)
2345 /* The next use is no longer "next", since a store
2346 intervenes. */
2347 reg_next_use[i] = 0;
2349 regs_ever_live[i] = 1;
2350 REG_N_SETS (i)++;
2353 else
2355 /* The next use is no longer "next", since a store
2356 intervenes. */
2357 reg_next_use[regno] = 0;
2359 /* Keep track of which basic blocks each reg appears in. */
2361 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2362 REG_BASIC_BLOCK (regno) = blocknum;
2363 else if (REG_BASIC_BLOCK (regno) != blocknum)
2364 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2366 /* Count (weighted) references, stores, etc. This counts a
2367 register twice if it is modified, but that is correct. */
2368 REG_N_SETS (regno)++;
2370 REG_N_REFS (regno) += loop_depth;
2372 /* The insns where a reg is live are normally counted
2373 elsewhere, but we want the count to include the insn
2374 where the reg is set, and the normal counting mechanism
2375 would not count it. */
2376 REG_LIVE_LENGTH (regno)++;
2379 if (! some_not_needed)
2381 /* Make a logical link from the next following insn
2382 that uses this register, back to this insn.
2383 The following insns have already been processed.
2385 We don't build a LOG_LINK for hard registers containing
2386 in ASM_OPERANDs. If these registers get replaced,
2387 we might wind up changing the semantics of the insn,
2388 even if reload can make what appear to be valid assignments
2389 later. */
2390 if (y && (BLOCK_NUM (y) == blocknum)
2391 && (regno >= FIRST_PSEUDO_REGISTER
2392 || asm_noperands (PATTERN (y)) < 0))
2393 LOG_LINKS (y)
2394 = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
2396 else if (! some_needed)
2398 /* Note that dead stores have already been deleted when possible
2399 If we get here, we have found a dead store that cannot
2400 be eliminated (because the same insn does something useful).
2401 Indicate this by marking the reg being set as dying here. */
2402 REG_NOTES (insn)
2403 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2404 REG_N_DEATHS (REGNO (reg))++;
2406 else
2408 /* This is a case where we have a multi-word hard register
2409 and some, but not all, of the words of the register are
2410 needed in subsequent insns. Write REG_UNUSED notes
2411 for those parts that were not needed. This case should
2412 be rare. */
2414 int i;
2416 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
2417 i >= 0; i--)
2418 if (!REGNO_REG_SET_P (needed, regno + i))
2419 REG_NOTES (insn)
2420 = gen_rtx_EXPR_LIST (REG_UNUSED,
2421 gen_rtx_REG (reg_raw_mode[regno + i],
2422 regno + i),
2423 REG_NOTES (insn));
2427 else if (GET_CODE (reg) == REG)
2428 reg_next_use[regno] = 0;
2430 /* If this is the last pass and this is a SCRATCH, show it will be dying
2431 here and count it. */
2432 else if (GET_CODE (reg) == SCRATCH && insn != 0)
2434 REG_NOTES (insn)
2435 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2436 num_scratch++;
2440 #ifdef AUTO_INC_DEC
2442 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
2443 reference. */
2445 static void
2446 find_auto_inc (needed, x, insn)
2447 regset needed;
2448 rtx x;
2449 rtx insn;
2451 rtx addr = XEXP (x, 0);
2452 HOST_WIDE_INT offset = 0;
2453 rtx set;
2455 /* Here we detect use of an index register which might be good for
2456 postincrement, postdecrement, preincrement, or predecrement. */
2458 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2459 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
2461 if (GET_CODE (addr) == REG)
2463 register rtx y;
2464 register int size = GET_MODE_SIZE (GET_MODE (x));
2465 rtx use;
2466 rtx incr;
2467 int regno = REGNO (addr);
2469 /* Is the next use an increment that might make auto-increment? */
2470 if ((incr = reg_next_use[regno]) != 0
2471 && (set = single_set (incr)) != 0
2472 && GET_CODE (set) == SET
2473 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
2474 /* Can't add side effects to jumps; if reg is spilled and
2475 reloaded, there's no way to store back the altered value. */
2476 && GET_CODE (insn) != JUMP_INSN
2477 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
2478 && XEXP (y, 0) == addr
2479 && GET_CODE (XEXP (y, 1)) == CONST_INT
2480 && (0
2481 #ifdef HAVE_POST_INCREMENT
2482 || (INTVAL (XEXP (y, 1)) == size && offset == 0)
2483 #endif
2484 #ifdef HAVE_POST_DECREMENT
2485 || (INTVAL (XEXP (y, 1)) == - size && offset == 0)
2486 #endif
2487 #ifdef HAVE_PRE_INCREMENT
2488 || (INTVAL (XEXP (y, 1)) == size && offset == size)
2489 #endif
2490 #ifdef HAVE_PRE_DECREMENT
2491 || (INTVAL (XEXP (y, 1)) == - size && offset == - size)
2492 #endif
2494 /* Make sure this reg appears only once in this insn. */
2495 && (use = find_use_as_address (PATTERN (insn), addr, offset),
2496 use != 0 && use != (rtx) 1))
2498 rtx q = SET_DEST (set);
2499 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
2500 ? (offset ? PRE_INC : POST_INC)
2501 : (offset ? PRE_DEC : POST_DEC));
2503 if (dead_or_set_p (incr, addr))
2505 /* This is the simple case. Try to make the auto-inc. If
2506 we can't, we are done. Otherwise, we will do any
2507 needed updates below. */
2508 if (! validate_change (insn, &XEXP (x, 0),
2509 gen_rtx_fmt_e (inc_code, Pmode, addr),
2511 return;
2513 else if (GET_CODE (q) == REG
2514 /* PREV_INSN used here to check the semi-open interval
2515 [insn,incr). */
2516 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
2517 /* We must also check for sets of q as q may be
2518 a call clobbered hard register and there may
2519 be a call between PREV_INSN (insn) and incr. */
2520 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
2522 /* We have *p followed sometime later by q = p+size.
2523 Both p and q must be live afterward,
2524 and q is not used between INSN and its assignment.
2525 Change it to q = p, ...*q..., q = q+size.
2526 Then fall into the usual case. */
2527 rtx insns, temp;
2529 start_sequence ();
2530 emit_move_insn (q, addr);
2531 insns = get_insns ();
2532 end_sequence ();
2534 /* If anything in INSNS have UID's that don't fit within the
2535 extra space we allocate earlier, we can't make this auto-inc.
2536 This should never happen. */
2537 for (temp = insns; temp; temp = NEXT_INSN (temp))
2539 if (INSN_UID (temp) > max_uid_for_flow)
2540 return;
2541 BLOCK_NUM (temp) = BLOCK_NUM (insn);
2544 /* If we can't make the auto-inc, or can't make the
2545 replacement into Y, exit. There's no point in making
2546 the change below if we can't do the auto-inc and doing
2547 so is not correct in the pre-inc case. */
2549 validate_change (insn, &XEXP (x, 0),
2550 gen_rtx_fmt_e (inc_code, Pmode, q),
2552 validate_change (incr, &XEXP (y, 0), q, 1);
2553 if (! apply_change_group ())
2554 return;
2556 /* We now know we'll be doing this change, so emit the
2557 new insn(s) and do the updates. */
2558 emit_insns_before (insns, insn);
2560 if (basic_block_head[BLOCK_NUM (insn)] == insn)
2561 basic_block_head[BLOCK_NUM (insn)] = insns;
2563 /* INCR will become a NOTE and INSN won't contain a
2564 use of ADDR. If a use of ADDR was just placed in
2565 the insn before INSN, make that the next use.
2566 Otherwise, invalidate it. */
2567 if (GET_CODE (PREV_INSN (insn)) == INSN
2568 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
2569 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
2570 reg_next_use[regno] = PREV_INSN (insn);
2571 else
2572 reg_next_use[regno] = 0;
2574 addr = q;
2575 regno = REGNO (q);
2577 /* REGNO is now used in INCR which is below INSN, but
2578 it previously wasn't live here. If we don't mark
2579 it as needed, we'll put a REG_DEAD note for it
2580 on this insn, which is incorrect. */
2581 SET_REGNO_REG_SET (needed, regno);
2583 /* If there are any calls between INSN and INCR, show
2584 that REGNO now crosses them. */
2585 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
2586 if (GET_CODE (temp) == CALL_INSN)
2587 REG_N_CALLS_CROSSED (regno)++;
2589 else
2590 return;
2592 /* If we haven't returned, it means we were able to make the
2593 auto-inc, so update the status. First, record that this insn
2594 has an implicit side effect. */
2596 REG_NOTES (insn)
2597 = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
2599 /* Modify the old increment-insn to simply copy
2600 the already-incremented value of our register. */
2601 if (! validate_change (incr, &SET_SRC (set), addr, 0))
2602 abort ();
2604 /* If that makes it a no-op (copying the register into itself) delete
2605 it so it won't appear to be a "use" and a "set" of this
2606 register. */
2607 if (SET_DEST (set) == addr)
2609 PUT_CODE (incr, NOTE);
2610 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
2611 NOTE_SOURCE_FILE (incr) = 0;
2614 if (regno >= FIRST_PSEUDO_REGISTER)
2616 /* Count an extra reference to the reg. When a reg is
2617 incremented, spilling it is worse, so we want to make
2618 that less likely. */
2619 REG_N_REFS (regno) += loop_depth;
2621 /* Count the increment as a setting of the register,
2622 even though it isn't a SET in rtl. */
2623 REG_N_SETS (regno)++;
2628 #endif /* AUTO_INC_DEC */
2630 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
2631 This is done assuming the registers needed from X
2632 are those that have 1-bits in NEEDED.
2634 On the final pass, FINAL is 1. This means try for autoincrement
2635 and count the uses and deaths of each pseudo-reg.
2637 INSN is the containing instruction. If INSN is dead, this function is not
2638 called. */
2640 static void
2641 mark_used_regs (needed, live, x, final, insn)
2642 regset needed;
2643 regset live;
2644 rtx x;
2645 int final;
2646 rtx insn;
2648 register RTX_CODE code;
2649 register int regno;
2650 int i;
2652 retry:
2653 code = GET_CODE (x);
2654 switch (code)
2656 case LABEL_REF:
2657 case SYMBOL_REF:
2658 case CONST_INT:
2659 case CONST:
2660 case CONST_DOUBLE:
2661 case PC:
2662 case ADDR_VEC:
2663 case ADDR_DIFF_VEC:
2664 case ASM_INPUT:
2665 return;
2667 #ifdef HAVE_cc0
2668 case CC0:
2669 cc0_live = 1;
2670 return;
2671 #endif
2673 case CLOBBER:
2674 /* If we are clobbering a MEM, mark any registers inside the address
2675 as being used. */
2676 if (GET_CODE (XEXP (x, 0)) == MEM)
2677 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
2678 return;
2680 case MEM:
2681 /* Invalidate the data for the last MEM stored, but only if MEM is
2682 something that can be stored into. */
2683 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2684 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
2685 ; /* needn't clear last_mem_set */
2686 else
2687 last_mem_set = 0;
2689 #ifdef AUTO_INC_DEC
2690 if (final)
2691 find_auto_inc (needed, x, insn);
2692 #endif
2693 break;
2695 case SUBREG:
2696 if (GET_CODE (SUBREG_REG (x)) == REG
2697 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
2698 && (GET_MODE_SIZE (GET_MODE (x))
2699 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2700 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
2702 /* While we're here, optimize this case. */
2703 x = SUBREG_REG (x);
2705 /* In case the SUBREG is not of a register, don't optimize */
2706 if (GET_CODE (x) != REG)
2708 mark_used_regs (needed, live, x, final, insn);
2709 return;
2712 /* ... fall through ... */
2714 case REG:
2715 /* See a register other than being set
2716 => mark it as needed. */
2718 regno = REGNO (x);
2720 int some_needed = REGNO_REG_SET_P (needed, regno);
2721 int some_not_needed = ! some_needed;
2723 SET_REGNO_REG_SET (live, regno);
2725 /* A hard reg in a wide mode may really be multiple registers.
2726 If so, mark all of them just like the first. */
2727 if (regno < FIRST_PSEUDO_REGISTER)
2729 int n;
2731 /* For stack ptr or fixed arg pointer,
2732 nothing below can be necessary, so waste no more time. */
2733 if (regno == STACK_POINTER_REGNUM
2734 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2735 || regno == HARD_FRAME_POINTER_REGNUM
2736 #endif
2737 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2738 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2739 #endif
2740 || regno == FRAME_POINTER_REGNUM)
2742 /* If this is a register we are going to try to eliminate,
2743 don't mark it live here. If we are successful in
2744 eliminating it, it need not be live unless it is used for
2745 pseudos, in which case it will have been set live when
2746 it was allocated to the pseudos. If the register will not
2747 be eliminated, reload will set it live at that point. */
2749 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
2750 regs_ever_live[regno] = 1;
2751 return;
2753 /* No death notes for global register variables;
2754 their values are live after this function exits. */
2755 if (global_regs[regno])
2757 if (final)
2758 reg_next_use[regno] = insn;
2759 return;
2762 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2763 while (--n > 0)
2765 int regno_n = regno + n;
2766 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2768 SET_REGNO_REG_SET (live, regno_n);
2769 some_needed |= needed_regno;
2770 some_not_needed |= ! needed_regno;
2773 if (final)
2775 /* Record where each reg is used, so when the reg
2776 is set we know the next insn that uses it. */
2778 reg_next_use[regno] = insn;
2780 if (regno < FIRST_PSEUDO_REGISTER)
2782 /* If a hard reg is being used,
2783 record that this function does use it. */
2785 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
2786 if (i == 0)
2787 i = 1;
2789 regs_ever_live[regno + --i] = 1;
2790 while (i > 0);
2792 else
2794 /* Keep track of which basic block each reg appears in. */
2796 register int blocknum = BLOCK_NUM (insn);
2798 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2799 REG_BASIC_BLOCK (regno) = blocknum;
2800 else if (REG_BASIC_BLOCK (regno) != blocknum)
2801 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2803 /* Count (weighted) number of uses of each reg. */
2805 REG_N_REFS (regno) += loop_depth;
2808 /* Record and count the insns in which a reg dies.
2809 If it is used in this insn and was dead below the insn
2810 then it dies in this insn. If it was set in this insn,
2811 we do not make a REG_DEAD note; likewise if we already
2812 made such a note. */
2814 if (some_not_needed
2815 && ! dead_or_set_p (insn, x)
2816 #if 0
2817 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
2818 #endif
2821 /* Check for the case where the register dying partially
2822 overlaps the register set by this insn. */
2823 if (regno < FIRST_PSEUDO_REGISTER
2824 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
2826 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2827 while (--n >= 0)
2828 some_needed |= dead_or_set_regno_p (insn, regno + n);
2831 /* If none of the words in X is needed, make a REG_DEAD
2832 note. Otherwise, we must make partial REG_DEAD notes. */
2833 if (! some_needed)
2835 REG_NOTES (insn)
2836 = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
2837 REG_N_DEATHS (regno)++;
2839 else
2841 int i;
2843 /* Don't make a REG_DEAD note for a part of a register
2844 that is set in the insn. */
2846 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
2847 i >= 0; i--)
2848 if (!REGNO_REG_SET_P (needed, regno + i)
2849 && ! dead_or_set_regno_p (insn, regno + i))
2850 REG_NOTES (insn)
2851 = gen_rtx_EXPR_LIST (REG_DEAD,
2852 gen_rtx_REG (reg_raw_mode[regno + i],
2853 regno + i),
2854 REG_NOTES (insn));
2859 return;
2861 case SET:
2863 register rtx testreg = SET_DEST (x);
2864 int mark_dest = 0;
2866 /* If storing into MEM, don't show it as being used. But do
2867 show the address as being used. */
2868 if (GET_CODE (testreg) == MEM)
2870 #ifdef AUTO_INC_DEC
2871 if (final)
2872 find_auto_inc (needed, testreg, insn);
2873 #endif
2874 mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
2875 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2876 return;
2879 /* Storing in STRICT_LOW_PART is like storing in a reg
2880 in that this SET might be dead, so ignore it in TESTREG.
2881 but in some other ways it is like using the reg.
2883 Storing in a SUBREG or a bit field is like storing the entire
2884 register in that if the register's value is not used
2885 then this SET is not needed. */
2886 while (GET_CODE (testreg) == STRICT_LOW_PART
2887 || GET_CODE (testreg) == ZERO_EXTRACT
2888 || GET_CODE (testreg) == SIGN_EXTRACT
2889 || GET_CODE (testreg) == SUBREG)
2891 if (GET_CODE (testreg) == SUBREG
2892 && GET_CODE (SUBREG_REG (testreg)) == REG
2893 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
2894 && (GET_MODE_SIZE (GET_MODE (testreg))
2895 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
2896 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
2898 /* Modifying a single register in an alternate mode
2899 does not use any of the old value. But these other
2900 ways of storing in a register do use the old value. */
2901 if (GET_CODE (testreg) == SUBREG
2902 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
2904 else
2905 mark_dest = 1;
2907 testreg = XEXP (testreg, 0);
2910 /* If this is a store into a register,
2911 recursively scan the value being stored. */
2913 if ((GET_CODE (testreg) == PARALLEL
2914 && GET_MODE (testreg) == BLKmode)
2915 || (GET_CODE (testreg) == REG
2916 && (regno = REGNO (testreg), regno != FRAME_POINTER_REGNUM)
2917 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2918 && regno != HARD_FRAME_POINTER_REGNUM
2919 #endif
2920 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2921 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2922 #endif
2924 /* We used to exclude global_regs here, but that seems wrong.
2925 Storing in them is like storing in mem. */
2927 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2928 if (mark_dest)
2929 mark_used_regs (needed, live, SET_DEST (x), final, insn);
2930 return;
2933 break;
2935 case RETURN:
2936 /* If exiting needs the right stack value, consider this insn as
2937 using the stack pointer. In any event, consider it as using
2938 all global registers and all registers used by return. */
2940 #ifdef EXIT_IGNORE_STACK
2941 if (! EXIT_IGNORE_STACK
2942 || (! FRAME_POINTER_REQUIRED
2943 && ! current_function_calls_alloca
2944 && flag_omit_frame_pointer))
2945 #endif
2946 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2948 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2949 if (global_regs[i]
2950 #ifdef EPILOGUE_USES
2951 || EPILOGUE_USES (i)
2952 #endif
2954 SET_REGNO_REG_SET (live, i);
2955 break;
2957 default:
2958 break;
2961 /* Recursively scan the operands of this expression. */
2964 register char *fmt = GET_RTX_FORMAT (code);
2965 register int i;
2967 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2969 if (fmt[i] == 'e')
2971 /* Tail recursive case: save a function call level. */
2972 if (i == 0)
2974 x = XEXP (x, 0);
2975 goto retry;
2977 mark_used_regs (needed, live, XEXP (x, i), final, insn);
2979 else if (fmt[i] == 'E')
2981 register int j;
2982 for (j = 0; j < XVECLEN (x, i); j++)
2983 mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
2989 #ifdef AUTO_INC_DEC
2991 static int
2992 try_pre_increment_1 (insn)
2993 rtx insn;
2995 /* Find the next use of this reg. If in same basic block,
2996 make it do pre-increment or pre-decrement if appropriate. */
2997 rtx x = single_set (insn);
2998 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
2999 * INTVAL (XEXP (SET_SRC (x), 1)));
3000 int regno = REGNO (SET_DEST (x));
3001 rtx y = reg_next_use[regno];
3002 if (y != 0
3003 && BLOCK_NUM (y) == BLOCK_NUM (insn)
3004 /* Don't do this if the reg dies, or gets set in y; a standard addressing
3005 mode would be better. */
3006 && ! dead_or_set_p (y, SET_DEST (x))
3007 && try_pre_increment (y, SET_DEST (x), amount))
3009 /* We have found a suitable auto-increment
3010 and already changed insn Y to do it.
3011 So flush this increment-instruction. */
3012 PUT_CODE (insn, NOTE);
3013 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3014 NOTE_SOURCE_FILE (insn) = 0;
3015 /* Count a reference to this reg for the increment
3016 insn we are deleting. When a reg is incremented.
3017 spilling it is worse, so we want to make that
3018 less likely. */
3019 if (regno >= FIRST_PSEUDO_REGISTER)
3021 REG_N_REFS (regno) += loop_depth;
3022 REG_N_SETS (regno)++;
3024 return 1;
3026 return 0;
3029 /* Try to change INSN so that it does pre-increment or pre-decrement
3030 addressing on register REG in order to add AMOUNT to REG.
3031 AMOUNT is negative for pre-decrement.
3032 Returns 1 if the change could be made.
3033 This checks all about the validity of the result of modifying INSN. */
3035 static int
3036 try_pre_increment (insn, reg, amount)
3037 rtx insn, reg;
3038 HOST_WIDE_INT amount;
3040 register rtx use;
3042 /* Nonzero if we can try to make a pre-increment or pre-decrement.
3043 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
3044 int pre_ok = 0;
3045 /* Nonzero if we can try to make a post-increment or post-decrement.
3046 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
3047 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
3048 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
3049 int post_ok = 0;
3051 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
3052 int do_post = 0;
3054 /* From the sign of increment, see which possibilities are conceivable
3055 on this target machine. */
3056 #ifdef HAVE_PRE_INCREMENT
3057 if (amount > 0)
3058 pre_ok = 1;
3059 #endif
3060 #ifdef HAVE_POST_INCREMENT
3061 if (amount > 0)
3062 post_ok = 1;
3063 #endif
3065 #ifdef HAVE_PRE_DECREMENT
3066 if (amount < 0)
3067 pre_ok = 1;
3068 #endif
3069 #ifdef HAVE_POST_DECREMENT
3070 if (amount < 0)
3071 post_ok = 1;
3072 #endif
3074 if (! (pre_ok || post_ok))
3075 return 0;
3077 /* It is not safe to add a side effect to a jump insn
3078 because if the incremented register is spilled and must be reloaded
3079 there would be no way to store the incremented value back in memory. */
3081 if (GET_CODE (insn) == JUMP_INSN)
3082 return 0;
3084 use = 0;
3085 if (pre_ok)
3086 use = find_use_as_address (PATTERN (insn), reg, 0);
3087 if (post_ok && (use == 0 || use == (rtx) 1))
3089 use = find_use_as_address (PATTERN (insn), reg, -amount);
3090 do_post = 1;
3093 if (use == 0 || use == (rtx) 1)
3094 return 0;
3096 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
3097 return 0;
3099 /* See if this combination of instruction and addressing mode exists. */
3100 if (! validate_change (insn, &XEXP (use, 0),
3101 gen_rtx_fmt_e (amount > 0
3102 ? (do_post ? POST_INC : PRE_INC)
3103 : (do_post ? POST_DEC : PRE_DEC),
3104 Pmode, reg), 0))
3105 return 0;
3107 /* Record that this insn now has an implicit side effect on X. */
3108 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
3109 return 1;
3112 #endif /* AUTO_INC_DEC */
3114 /* Find the place in the rtx X where REG is used as a memory address.
3115 Return the MEM rtx that so uses it.
3116 If PLUSCONST is nonzero, search instead for a memory address equivalent to
3117 (plus REG (const_int PLUSCONST)).
3119 If such an address does not appear, return 0.
3120 If REG appears more than once, or is used other than in such an address,
3121 return (rtx)1. */
3124 find_use_as_address (x, reg, plusconst)
3125 register rtx x;
3126 rtx reg;
3127 HOST_WIDE_INT plusconst;
3129 enum rtx_code code = GET_CODE (x);
3130 char *fmt = GET_RTX_FORMAT (code);
3131 register int i;
3132 register rtx value = 0;
3133 register rtx tem;
3135 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
3136 return x;
3138 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
3139 && XEXP (XEXP (x, 0), 0) == reg
3140 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3141 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
3142 return x;
3144 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
3146 /* If REG occurs inside a MEM used in a bit-field reference,
3147 that is unacceptable. */
3148 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
3149 return (rtx) (HOST_WIDE_INT) 1;
3152 if (x == reg)
3153 return (rtx) (HOST_WIDE_INT) 1;
3155 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3157 if (fmt[i] == 'e')
3159 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
3160 if (value == 0)
3161 value = tem;
3162 else if (tem != 0)
3163 return (rtx) (HOST_WIDE_INT) 1;
3165 if (fmt[i] == 'E')
3167 register int j;
3168 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3170 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
3171 if (value == 0)
3172 value = tem;
3173 else if (tem != 0)
3174 return (rtx) (HOST_WIDE_INT) 1;
3179 return value;
3182 /* Write information about registers and basic blocks into FILE.
3183 This is part of making a debugging dump. */
3185 void
3186 dump_flow_info (file)
3187 FILE *file;
3189 register int i;
3190 static char *reg_class_names[] = REG_CLASS_NAMES;
3192 fprintf (file, "%d registers.\n", max_regno);
3194 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3195 if (REG_N_REFS (i))
3197 enum reg_class class, altclass;
3198 fprintf (file, "\nRegister %d used %d times across %d insns",
3199 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
3200 if (REG_BASIC_BLOCK (i) >= 0)
3201 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
3202 if (REG_N_SETS (i))
3203 fprintf (file, "; set %d time%s", REG_N_SETS (i),
3204 (REG_N_SETS (i) == 1) ? "" : "s");
3205 if (REG_USERVAR_P (regno_reg_rtx[i]))
3206 fprintf (file, "; user var");
3207 if (REG_N_DEATHS (i) != 1)
3208 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
3209 if (REG_N_CALLS_CROSSED (i) == 1)
3210 fprintf (file, "; crosses 1 call");
3211 else if (REG_N_CALLS_CROSSED (i))
3212 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
3213 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
3214 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
3215 class = reg_preferred_class (i);
3216 altclass = reg_alternate_class (i);
3217 if (class != GENERAL_REGS || altclass != ALL_REGS)
3219 if (altclass == ALL_REGS || class == ALL_REGS)
3220 fprintf (file, "; pref %s", reg_class_names[(int) class]);
3221 else if (altclass == NO_REGS)
3222 fprintf (file, "; %s or none", reg_class_names[(int) class]);
3223 else
3224 fprintf (file, "; pref %s, else %s",
3225 reg_class_names[(int) class],
3226 reg_class_names[(int) altclass]);
3228 if (REGNO_POINTER_FLAG (i))
3229 fprintf (file, "; pointer");
3230 fprintf (file, ".\n");
3232 fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
3233 for (i = 0; i < n_basic_blocks; i++)
3235 register rtx head, jump;
3236 register int regno;
3237 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
3239 INSN_UID (basic_block_head[i]),
3240 INSN_UID (basic_block_end[i]));
3241 /* The control flow graph's storage is freed
3242 now when flow_analysis returns.
3243 Don't try to print it if it is gone. */
3244 if (basic_block_drops_in)
3246 fprintf (file, "Reached from blocks: ");
3247 head = basic_block_head[i];
3248 if (GET_CODE (head) == CODE_LABEL)
3249 for (jump = LABEL_REFS (head);
3250 jump != head;
3251 jump = LABEL_NEXTREF (jump))
3253 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
3254 fprintf (file, " %d", from_block);
3256 if (basic_block_drops_in[i])
3257 fprintf (file, " previous");
3259 fprintf (file, "\nRegisters live at start:");
3260 for (regno = 0; regno < max_regno; regno++)
3261 if (REGNO_REG_SET_P (basic_block_live_at_start[i], regno))
3262 fprintf (file, " %d", regno);
3263 fprintf (file, "\n");
3265 fprintf (file, "\n");
3269 /* Like print_rtl, but also print out live information for the start of each
3270 basic block. */
3272 void
3273 print_rtl_with_bb (outf, rtx_first)
3274 FILE *outf;
3275 rtx rtx_first;
3277 register rtx tmp_rtx;
3279 if (rtx_first == 0)
3280 fprintf (outf, "(nil)\n");
3282 else
3284 int i, bb;
3285 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
3286 int max_uid = get_max_uid ();
3287 int *start = (int *) alloca (max_uid * sizeof (int));
3288 int *end = (int *) alloca (max_uid * sizeof (int));
3289 enum bb_state *in_bb_p = (enum bb_state *)
3290 alloca (max_uid * sizeof (enum bb_state));
3292 for (i = 0; i < max_uid; i++)
3294 start[i] = end[i] = -1;
3295 in_bb_p[i] = NOT_IN_BB;
3298 for (i = n_basic_blocks-1; i >= 0; i--)
3300 rtx x;
3301 start[INSN_UID (basic_block_head[i])] = i;
3302 end[INSN_UID (basic_block_end[i])] = i;
3303 for (x = basic_block_head[i]; x != NULL_RTX; x = NEXT_INSN (x))
3305 in_bb_p[ INSN_UID(x)]
3306 = (in_bb_p[ INSN_UID(x)] == NOT_IN_BB)
3307 ? IN_ONE_BB : IN_MULTIPLE_BB;
3308 if (x == basic_block_end[i])
3309 break;
3313 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
3315 int did_output;
3317 if ((bb = start[INSN_UID (tmp_rtx)]) >= 0)
3319 fprintf (outf, ";; Start of basic block %d, registers live:",
3320 bb);
3322 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[bb], 0, i,
3324 fprintf (outf, " %d", i);
3325 if (i < FIRST_PSEUDO_REGISTER)
3326 fprintf (outf, " [%s]",
3327 reg_names[i]);
3329 putc ('\n', outf);
3332 if (in_bb_p[ INSN_UID(tmp_rtx)] == NOT_IN_BB
3333 && GET_CODE (tmp_rtx) != NOTE
3334 && GET_CODE (tmp_rtx) != BARRIER)
3335 fprintf (outf, ";; Insn is not within a basic block\n");
3336 else if (in_bb_p[ INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
3337 fprintf (outf, ";; Insn is in multiple basic blocks\n");
3339 did_output = print_rtl_single (outf, tmp_rtx);
3341 if ((bb = end[INSN_UID (tmp_rtx)]) >= 0)
3342 fprintf (outf, ";; End of basic block %d\n", bb);
3344 if (did_output)
3345 putc ('\n', outf);
3351 /* Integer list support. */
3353 /* Allocate a node from list *HEAD_PTR. */
3355 static int_list_ptr
3356 alloc_int_list_node (head_ptr)
3357 int_list_block **head_ptr;
3359 struct int_list_block *first_blk = *head_ptr;
3361 if (first_blk == NULL || first_blk->nodes_left <= 0)
3363 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
3364 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
3365 first_blk->next = *head_ptr;
3366 *head_ptr = first_blk;
3369 first_blk->nodes_left--;
3370 return &first_blk->nodes[first_blk->nodes_left];
3373 /* Pointer to head of predecessor/successor block list. */
3374 static int_list_block *pred_int_list_blocks;
3376 /* Add a new node to integer list LIST with value VAL.
3377 LIST is a pointer to a list object to allow for different implementations.
3378 If *LIST is initially NULL, the list is empty.
3379 The caller must not care whether the element is added to the front or
3380 to the end of the list (to allow for different implementations). */
3382 static int_list_ptr
3383 add_int_list_node (blk_list, list, val)
3384 int_list_block **blk_list;
3385 int_list **list;
3386 int val;
3388 int_list_ptr p = alloc_int_list_node (blk_list);
3390 p->val = val;
3391 p->next = *list;
3392 *list = p;
3393 return p;
3396 /* Free the blocks of lists at BLK_LIST. */
3398 void
3399 free_int_list (blk_list)
3400 int_list_block **blk_list;
3402 int_list_block *p, *next;
3404 for (p = *blk_list; p != NULL; p = next)
3406 next = p->next;
3407 free (p);
3410 /* Mark list as empty for the next function we compile. */
3411 *blk_list = NULL;
3414 /* Predecessor/successor computation. */
3416 /* Mark PRED_BB a precessor of SUCC_BB,
3417 and conversely SUCC_BB a successor of PRED_BB. */
3419 static void
3420 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
3421 int pred_bb;
3422 int succ_bb;
3423 int_list_ptr *s_preds;
3424 int_list_ptr *s_succs;
3425 int *num_preds;
3426 int *num_succs;
3428 if (succ_bb != EXIT_BLOCK)
3430 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
3431 num_preds[succ_bb]++;
3433 if (pred_bb != ENTRY_BLOCK)
3435 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
3436 num_succs[pred_bb]++;
3440 /* Compute the predecessors and successors for each block. */
3441 void
3442 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
3443 int_list_ptr *s_preds;
3444 int_list_ptr *s_succs;
3445 int *num_preds;
3446 int *num_succs;
3448 int bb, clear_local_bb_vars = 0;
3450 bzero ((char *) s_preds, n_basic_blocks * sizeof (int_list_ptr));
3451 bzero ((char *) s_succs, n_basic_blocks * sizeof (int_list_ptr));
3452 bzero ((char *) num_preds, n_basic_blocks * sizeof (int));
3453 bzero ((char *) num_succs, n_basic_blocks * sizeof (int));
3455 /* This routine can be called after life analysis; in that case
3456 basic_block_drops_in and uid_block_number will not be available
3457 and we must recompute their values. */
3458 if (basic_block_drops_in == NULL || uid_block_number == NULL)
3460 clear_local_bb_vars = 1;
3461 basic_block_drops_in = (char *) alloca (n_basic_blocks);
3462 uid_block_number = (int *) alloca ((get_max_uid () + 1) * sizeof (int));
3464 bzero ((char *) basic_block_drops_in, n_basic_blocks * sizeof (char));
3465 bzero ((char *) uid_block_number, n_basic_blocks * sizeof (int));
3467 /* Scan each basic block setting basic_block_drops_in and
3468 uid_block_number as needed. */
3469 for (bb = 0; bb < n_basic_blocks; bb++)
3471 rtx insn, stop_insn;
3473 if (bb == 0)
3474 stop_insn = NULL_RTX;
3475 else
3476 stop_insn = basic_block_end[bb-1];
3478 /* Look backwards from the start of this block. Stop if we
3479 hit the start of the function or the end of a previous
3480 block. Don't walk backwards through blocks that are just
3481 deleted insns! */
3482 for (insn = PREV_INSN (basic_block_head[bb]);
3483 insn && insn != stop_insn && GET_CODE (insn) == NOTE;
3484 insn = PREV_INSN (insn))
3487 /* Never set basic_block_drops_in for the first block. It is
3488 implicit.
3490 If we stopped on anything other than a BARRIER, then this
3491 block drops in. */
3492 if (bb != 0)
3493 basic_block_drops_in[bb] = (insn ? GET_CODE (insn) != BARRIER : 1);
3495 insn = basic_block_head[bb];
3496 while (insn)
3498 BLOCK_NUM (insn) = bb;
3499 if (insn == basic_block_end[bb])
3500 break;
3501 insn = NEXT_INSN (insn);
3506 for (bb = 0; bb < n_basic_blocks; bb++)
3508 rtx head;
3509 rtx jump;
3511 head = BLOCK_HEAD (bb);
3513 if (GET_CODE (head) == CODE_LABEL)
3514 for (jump = LABEL_REFS (head);
3515 jump != head;
3516 jump = LABEL_NEXTREF (jump))
3518 if (! INSN_DELETED_P (CONTAINING_INSN (jump))
3519 && (GET_CODE (CONTAINING_INSN (jump)) != NOTE
3520 || (NOTE_LINE_NUMBER (CONTAINING_INSN (jump))
3521 != NOTE_INSN_DELETED)))
3522 add_pred_succ (BLOCK_NUM (CONTAINING_INSN (jump)), bb,
3523 s_preds, s_succs, num_preds, num_succs);
3526 jump = BLOCK_END (bb);
3527 /* If this is a RETURN insn or a conditional jump in the last
3528 basic block, or a non-jump insn in the last basic block, then
3529 this block reaches the exit block. */
3530 if ((GET_CODE (jump) == JUMP_INSN && GET_CODE (PATTERN (jump)) == RETURN)
3531 || (((GET_CODE (jump) == JUMP_INSN
3532 && condjump_p (jump) && !simplejump_p (jump))
3533 || GET_CODE (jump) != JUMP_INSN)
3534 && (bb == n_basic_blocks - 1)))
3535 add_pred_succ (bb, EXIT_BLOCK, s_preds, s_succs, num_preds, num_succs);
3537 if (basic_block_drops_in[bb])
3538 add_pred_succ (bb - 1, bb, s_preds, s_succs, num_preds, num_succs);
3541 add_pred_succ (ENTRY_BLOCK, 0, s_preds, s_succs, num_preds, num_succs);
3544 /* If we allocated any variables in temporary storage, clear out the
3545 pointer to the local storage to avoid dangling pointers. */
3546 if (clear_local_bb_vars)
3548 basic_block_drops_in = NULL;
3549 uid_block_number = NULL;
3554 void
3555 dump_bb_data (file, preds, succs)
3556 FILE *file;
3557 int_list_ptr *preds;
3558 int_list_ptr *succs;
3560 int bb;
3561 int_list_ptr p;
3563 fprintf (file, "BB data\n\n");
3564 for (bb = 0; bb < n_basic_blocks; bb++)
3566 fprintf (file, "BB %d, start %d, end %d\n", bb,
3567 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
3568 fprintf (file, " preds:");
3569 for (p = preds[bb]; p != NULL; p = p->next)
3571 int pred_bb = INT_LIST_VAL (p);
3572 if (pred_bb == ENTRY_BLOCK)
3573 fprintf (file, " entry");
3574 else
3575 fprintf (file, " %d", pred_bb);
3577 fprintf (file, "\n");
3578 fprintf (file, " succs:");
3579 for (p = succs[bb]; p != NULL; p = p->next)
3581 int succ_bb = INT_LIST_VAL (p);
3582 if (succ_bb == EXIT_BLOCK)
3583 fprintf (file, " exit");
3584 else
3585 fprintf (file, " %d", succ_bb);
3587 fprintf (file, "\n");
3589 fprintf (file, "\n");
3592 void
3593 dump_sbitmap (file, bmap)
3594 FILE *file;
3595 sbitmap bmap;
3597 int i,j,n;
3598 int set_size = bmap->size;
3599 int total_bits = bmap->n_bits;
3601 fprintf (file, " ");
3602 for (i = n = 0; i < set_size && n < total_bits; i++)
3604 for (j = 0; j < SBITMAP_ELT_BITS && n < total_bits; j++, n++)
3606 if (n != 0 && n % 10 == 0)
3607 fprintf (file, " ");
3608 fprintf (file, "%d", (bmap->elms[i] & (1L << j)) != 0);
3611 fprintf (file, "\n");
3614 void
3615 dump_sbitmap_vector (file, title, subtitle, bmaps, n_maps)
3616 FILE *file;
3617 char *title, *subtitle;
3618 sbitmap *bmaps;
3619 int n_maps;
3621 int bb;
3623 fprintf (file, "%s\n", title);
3624 for (bb = 0; bb < n_maps; bb++)
3626 fprintf (file, "%s %d\n", subtitle, bb);
3627 dump_sbitmap (file, bmaps[bb]);
3629 fprintf (file, "\n");
3632 /* Free basic block data storage. */
3634 void
3635 free_bb_mem ()
3637 free_int_list (&pred_int_list_blocks);
3640 /* Bitmap manipulation routines. */
3642 /* Allocate a simple bitmap of N_ELMS bits. */
3644 sbitmap
3645 sbitmap_alloc (n_elms)
3646 int n_elms;
3648 int bytes, size, amt;
3649 sbitmap bmap;
3651 size = SBITMAP_SET_SIZE (n_elms);
3652 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3653 amt = (sizeof (struct simple_bitmap_def)
3654 + bytes - sizeof (SBITMAP_ELT_TYPE));
3655 bmap = (sbitmap) xmalloc (amt);
3656 bmap->n_bits = n_elms;
3657 bmap->size = size;
3658 bmap->bytes = bytes;
3659 return bmap;
3662 /* Allocate a vector of N_VECS bitmaps of N_ELMS bits. */
3664 sbitmap *
3665 sbitmap_vector_alloc (n_vecs, n_elms)
3666 int n_vecs, n_elms;
3668 int i, bytes, offset, elm_bytes, size, amt, vector_bytes;
3669 sbitmap *bitmap_vector;
3671 size = SBITMAP_SET_SIZE (n_elms);
3672 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3673 elm_bytes = (sizeof (struct simple_bitmap_def)
3674 + bytes - sizeof (SBITMAP_ELT_TYPE));
3675 vector_bytes = n_vecs * sizeof (sbitmap *);
3677 /* Round up `vector_bytes' to account for the alignment requirements
3678 of an sbitmap. One could allocate the vector-table and set of sbitmaps
3679 separately, but that requires maintaining two pointers or creating
3680 a cover struct to hold both pointers (so our result is still just
3681 one pointer). Neither is a bad idea, but this is simpler for now. */
3683 /* Based on DEFAULT_ALIGNMENT computation in obstack.c. */
3684 struct { char x; SBITMAP_ELT_TYPE y; } align;
3685 int alignment = (char *) & align.y - & align.x;
3686 vector_bytes = (vector_bytes + alignment - 1) & ~ (alignment - 1);
3689 amt = vector_bytes + (n_vecs * elm_bytes);
3690 bitmap_vector = (sbitmap *) xmalloc (amt);
3692 for (i = 0, offset = vector_bytes;
3693 i < n_vecs;
3694 i++, offset += elm_bytes)
3696 sbitmap b = (sbitmap) ((char *) bitmap_vector + offset);
3697 bitmap_vector[i] = b;
3698 b->n_bits = n_elms;
3699 b->size = size;
3700 b->bytes = bytes;
3703 return bitmap_vector;
3706 /* Copy sbitmap SRC to DST. */
3708 void
3709 sbitmap_copy (dst, src)
3710 sbitmap dst, src;
3712 bcopy (src->elms, dst->elms, sizeof (SBITMAP_ELT_TYPE) * dst->size);
3715 /* Zero all elements in a bitmap. */
3717 void
3718 sbitmap_zero (bmap)
3719 sbitmap bmap;
3721 bzero ((char *) bmap->elms, bmap->bytes);
3724 /* Set to ones all elements in a bitmap. */
3726 void
3727 sbitmap_ones (bmap)
3728 sbitmap bmap;
3730 memset (bmap->elms, -1, bmap->bytes);
3733 /* Zero a vector of N_VECS bitmaps. */
3735 void
3736 sbitmap_vector_zero (bmap, n_vecs)
3737 sbitmap *bmap;
3738 int n_vecs;
3740 int i;
3742 for (i = 0; i < n_vecs; i++)
3743 sbitmap_zero (bmap[i]);
3746 /* Set to ones a vector of N_VECS bitmaps. */
3748 void
3749 sbitmap_vector_ones (bmap, n_vecs)
3750 sbitmap *bmap;
3751 int n_vecs;
3753 int i;
3755 for (i = 0; i < n_vecs; i++)
3756 sbitmap_ones (bmap[i]);
3759 /* Set DST to be A union (B - C).
3760 DST = A | (B & ~C).
3761 Return non-zero if any change is made. */
3764 sbitmap_union_of_diff (dst, a, b, c)
3765 sbitmap dst, a, b, c;
3767 int i,changed;
3768 sbitmap_ptr dstp, ap, bp, cp;
3770 changed = 0;
3771 dstp = dst->elms;
3772 ap = a->elms;
3773 bp = b->elms;
3774 cp = c->elms;
3775 for (i = 0; i < dst->size; i++)
3777 SBITMAP_ELT_TYPE tmp = *ap | (*bp & ~*cp);
3778 if (*dstp != tmp)
3779 changed = 1;
3780 *dstp = tmp;
3781 dstp++; ap++; bp++; cp++;
3783 return changed;
3786 /* Set bitmap DST to the bitwise negation of the bitmap SRC. */
3788 void
3789 sbitmap_not (dst, src)
3790 sbitmap dst, src;
3792 int i;
3793 sbitmap_ptr dstp, ap;
3795 dstp = dst->elms;
3796 ap = src->elms;
3797 for (i = 0; i < dst->size; i++)
3799 SBITMAP_ELT_TYPE tmp = ~(*ap);
3800 *dstp = tmp;
3801 dstp++; ap++;
3805 /* Set the bits in DST to be the difference between the bits
3806 in A and the bits in B. i.e. dst = a - b.
3807 The - operator is implemented as a & (~b). */
3809 void
3810 sbitmap_difference (dst, a, b)
3811 sbitmap dst, a, b;
3813 int i;
3814 sbitmap_ptr dstp, ap, bp;
3816 dstp = dst->elms;
3817 ap = a->elms;
3818 bp = b->elms;
3819 for (i = 0; i < dst->size; i++)
3820 *dstp++ = *ap++ & (~*bp++);
3823 /* Set DST to be (A and B)).
3824 Return non-zero if any change is made. */
3827 sbitmap_a_and_b (dst, a, b)
3828 sbitmap dst, a, b;
3830 int i,changed;
3831 sbitmap_ptr dstp, ap, bp;
3833 changed = 0;
3834 dstp = dst->elms;
3835 ap = a->elms;
3836 bp = b->elms;
3837 for (i = 0; i < dst->size; i++)
3839 SBITMAP_ELT_TYPE tmp = *ap & *bp;
3840 if (*dstp != tmp)
3841 changed = 1;
3842 *dstp = tmp;
3843 dstp++; ap++; bp++;
3845 return changed;
3847 /* Set DST to be (A or B)).
3848 Return non-zero if any change is made. */
3851 sbitmap_a_or_b (dst, a, b)
3852 sbitmap dst, a, b;
3854 int i,changed;
3855 sbitmap_ptr dstp, ap, bp;
3857 changed = 0;
3858 dstp = dst->elms;
3859 ap = a->elms;
3860 bp = b->elms;
3861 for (i = 0; i < dst->size; i++)
3863 SBITMAP_ELT_TYPE tmp = *ap | *bp;
3864 if (*dstp != tmp)
3865 changed = 1;
3866 *dstp = tmp;
3867 dstp++; ap++; bp++;
3869 return changed;
3872 /* Set DST to be (A or (B and C)).
3873 Return non-zero if any change is made. */
3876 sbitmap_a_or_b_and_c (dst, a, b, c)
3877 sbitmap dst, a, b, c;
3879 int i,changed;
3880 sbitmap_ptr dstp, ap, bp, cp;
3882 changed = 0;
3883 dstp = dst->elms;
3884 ap = a->elms;
3885 bp = b->elms;
3886 cp = c->elms;
3887 for (i = 0; i < dst->size; i++)
3889 SBITMAP_ELT_TYPE tmp = *ap | (*bp & *cp);
3890 if (*dstp != tmp)
3891 changed = 1;
3892 *dstp = tmp;
3893 dstp++; ap++; bp++; cp++;
3895 return changed;
3898 /* Set DST to be (A ann (B or C)).
3899 Return non-zero if any change is made. */
3902 sbitmap_a_and_b_or_c (dst, a, b, c)
3903 sbitmap dst, a, b, c;
3905 int i,changed;
3906 sbitmap_ptr dstp, ap, bp, cp;
3908 changed = 0;
3909 dstp = dst->elms;
3910 ap = a->elms;
3911 bp = b->elms;
3912 cp = c->elms;
3913 for (i = 0; i < dst->size; i++)
3915 SBITMAP_ELT_TYPE tmp = *ap & (*bp | *cp);
3916 if (*dstp != tmp)
3917 changed = 1;
3918 *dstp = tmp;
3919 dstp++; ap++; bp++; cp++;
3921 return changed;
3924 /* Set the bitmap DST to the intersection of SRC of all predecessors or
3925 successors of block number BB (PRED_SUCC says which). */
3927 void
3928 sbitmap_intersect_of_predsucc (dst, src, bb, pred_succ)
3929 sbitmap dst;
3930 sbitmap *src;
3931 int bb;
3932 int_list_ptr *pred_succ;
3934 int_list_ptr ps;
3935 int ps_bb;
3936 int set_size = dst->size;
3938 ps = pred_succ[bb];
3940 /* It is possible that there are no predecessors(/successors).
3941 This can happen for example in unreachable code. */
3943 if (ps == NULL)
3945 /* In APL-speak this is the `and' reduction of the empty set and thus
3946 the result is the identity for `and'. */
3947 sbitmap_ones (dst);
3948 return;
3951 /* Set result to first predecessor/successor. */
3953 for ( ; ps != NULL; ps = ps->next)
3955 ps_bb = INT_LIST_VAL (ps);
3956 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3957 continue;
3958 sbitmap_copy (dst, src[ps_bb]);
3959 /* Break out since we're only doing first predecessor. */
3960 break;
3962 if (ps == NULL)
3963 return;
3965 /* Now do the remaining predecessors/successors. */
3967 for (ps = ps->next; ps != NULL; ps = ps->next)
3969 int i;
3970 sbitmap_ptr p,r;
3972 ps_bb = INT_LIST_VAL (ps);
3973 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3974 continue;
3976 p = src[ps_bb]->elms;
3977 r = dst->elms;
3979 for (i = 0; i < set_size; i++)
3980 *r++ &= *p++;
3984 /* Set the bitmap DST to the intersection of SRC of all predecessors
3985 of block number BB. */
3987 void
3988 sbitmap_intersect_of_predecessors (dst, src, bb, s_preds)
3989 sbitmap dst;
3990 sbitmap *src;
3991 int bb;
3992 int_list_ptr *s_preds;
3994 sbitmap_intersect_of_predsucc (dst, src, bb, s_preds);
3997 /* Set the bitmap DST to the intersection of SRC of all successors
3998 of block number BB. */
4000 void
4001 sbitmap_intersect_of_successors (dst, src, bb, s_succs)
4002 sbitmap dst;
4003 sbitmap *src;
4004 int bb;
4005 int_list_ptr *s_succs;
4007 sbitmap_intersect_of_predsucc (dst, src, bb, s_succs);
4010 /* Set the bitmap DST to the union of SRC of all predecessors/successors of
4011 block number BB. */
4013 void
4014 sbitmap_union_of_predsucc (dst, src, bb, pred_succ)
4015 sbitmap dst;
4016 sbitmap *src;
4017 int bb;
4018 int_list_ptr *pred_succ;
4020 int_list_ptr ps;
4021 int ps_bb;
4022 int set_size = dst->size;
4024 ps = pred_succ[bb];
4026 /* It is possible that there are no predecessors(/successors).
4027 This can happen for example in unreachable code. */
4029 if (ps == NULL)
4031 /* In APL-speak this is the `or' reduction of the empty set and thus
4032 the result is the identity for `or'. */
4033 sbitmap_zero (dst);
4034 return;
4037 /* Set result to first predecessor/successor. */
4039 for ( ; ps != NULL; ps = ps->next)
4041 ps_bb = INT_LIST_VAL (ps);
4042 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
4043 continue;
4044 sbitmap_copy (dst, src[ps_bb]);
4045 /* Break out since we're only doing first predecessor. */
4046 break;
4048 if (ps == NULL)
4049 return;
4051 /* Now do the remaining predecessors/successors. */
4053 for (ps = ps->next; ps != NULL; ps = ps->next)
4055 int i;
4056 sbitmap_ptr p,r;
4058 ps_bb = INT_LIST_VAL (ps);
4059 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
4060 continue;
4062 p = src[ps_bb]->elms;
4063 r = dst->elms;
4065 for (i = 0; i < set_size; i++)
4066 *r++ |= *p++;
4070 /* Set the bitmap DST to the union of SRC of all predecessors of
4071 block number BB. */
4073 void
4074 sbitmap_union_of_predecessors (dst, src, bb, s_preds)
4075 sbitmap dst;
4076 sbitmap *src;
4077 int bb;
4078 int_list_ptr *s_preds;
4080 sbitmap_union_of_predsucc (dst, src, bb, s_preds);
4083 /* Set the bitmap DST to the union of SRC of all predecessors of
4084 block number BB. */
4086 void
4087 sbitmap_union_of_successors (dst, src, bb, s_succ)
4088 sbitmap dst;
4089 sbitmap *src;
4090 int bb;
4091 int_list_ptr *s_succ;
4093 sbitmap_union_of_predsucc (dst, src, bb, s_succ);
4096 /* Compute dominator relationships. */
4097 void
4098 compute_dominators (dominators, post_dominators, s_preds, s_succs)
4099 sbitmap *dominators;
4100 sbitmap *post_dominators;
4101 int_list_ptr *s_preds;
4102 int_list_ptr *s_succs;
4104 int bb, changed, passes;
4105 sbitmap *temp_bitmap;
4107 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4108 sbitmap_vector_ones (dominators, n_basic_blocks);
4109 sbitmap_vector_ones (post_dominators, n_basic_blocks);
4110 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
4112 sbitmap_zero (dominators[0]);
4113 SET_BIT (dominators[0], 0);
4115 sbitmap_zero (post_dominators[n_basic_blocks-1]);
4116 SET_BIT (post_dominators[n_basic_blocks-1], 0);
4118 passes = 0;
4119 changed = 1;
4120 while (changed)
4122 changed = 0;
4123 for (bb = 1; bb < n_basic_blocks; bb++)
4125 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
4126 bb, s_preds);
4127 SET_BIT (temp_bitmap[bb], bb);
4128 changed |= sbitmap_a_and_b (dominators[bb],
4129 dominators[bb],
4130 temp_bitmap[bb]);
4131 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
4132 bb, s_succs);
4133 SET_BIT (temp_bitmap[bb], bb);
4134 changed |= sbitmap_a_and_b (post_dominators[bb],
4135 post_dominators[bb],
4136 temp_bitmap[bb]);
4138 passes++;
4141 free (temp_bitmap);
4144 /* Count for a single SET rtx, X. */
4146 static void
4147 count_reg_sets_1 (x)
4148 rtx x;
4150 register int regno;
4151 register rtx reg = SET_DEST (x);
4153 /* Find the register that's set/clobbered. */
4154 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4155 || GET_CODE (reg) == SIGN_EXTRACT
4156 || GET_CODE (reg) == STRICT_LOW_PART)
4157 reg = XEXP (reg, 0);
4159 if (GET_CODE (reg) == PARALLEL
4160 && GET_MODE (reg) == BLKmode)
4162 register int i;
4163 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4164 count_reg_sets_1 (XVECEXP (reg, 0, i));
4165 return;
4168 if (GET_CODE (reg) == REG)
4170 regno = REGNO (reg);
4171 if (regno >= FIRST_PSEUDO_REGISTER)
4173 /* Count (weighted) references, stores, etc. This counts a
4174 register twice if it is modified, but that is correct. */
4175 REG_N_SETS (regno)++;
4177 REG_N_REFS (regno) += loop_depth;
4182 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4183 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
4185 static void
4186 count_reg_sets (x)
4187 rtx x;
4189 register RTX_CODE code = GET_CODE (x);
4191 if (code == SET || code == CLOBBER)
4192 count_reg_sets_1 (x);
4193 else if (code == PARALLEL)
4195 register int i;
4196 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4198 code = GET_CODE (XVECEXP (x, 0, i));
4199 if (code == SET || code == CLOBBER)
4200 count_reg_sets_1 (XVECEXP (x, 0, i));
4205 /* Increment REG_N_REFS by the current loop depth each register reference
4206 found in X. */
4208 static void
4209 count_reg_references (x)
4210 rtx x;
4212 register RTX_CODE code;
4214 retry:
4215 code = GET_CODE (x);
4216 switch (code)
4218 case LABEL_REF:
4219 case SYMBOL_REF:
4220 case CONST_INT:
4221 case CONST:
4222 case CONST_DOUBLE:
4223 case PC:
4224 case ADDR_VEC:
4225 case ADDR_DIFF_VEC:
4226 case ASM_INPUT:
4227 return;
4229 #ifdef HAVE_cc0
4230 case CC0:
4231 return;
4232 #endif
4234 case CLOBBER:
4235 /* If we are clobbering a MEM, mark any registers inside the address
4236 as being used. */
4237 if (GET_CODE (XEXP (x, 0)) == MEM)
4238 count_reg_references (XEXP (XEXP (x, 0), 0));
4239 return;
4241 case SUBREG:
4242 /* While we're here, optimize this case. */
4243 x = SUBREG_REG (x);
4245 /* In case the SUBREG is not of a register, don't optimize */
4246 if (GET_CODE (x) != REG)
4248 count_reg_references (x);
4249 return;
4252 /* ... fall through ... */
4254 case REG:
4255 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
4256 REG_N_REFS (REGNO (x)) += loop_depth;
4257 return;
4259 case SET:
4261 register rtx testreg = SET_DEST (x);
4262 int mark_dest = 0;
4264 /* If storing into MEM, don't show it as being used. But do
4265 show the address as being used. */
4266 if (GET_CODE (testreg) == MEM)
4268 count_reg_references (XEXP (testreg, 0));
4269 count_reg_references (SET_SRC (x));
4270 return;
4273 /* Storing in STRICT_LOW_PART is like storing in a reg
4274 in that this SET might be dead, so ignore it in TESTREG.
4275 but in some other ways it is like using the reg.
4277 Storing in a SUBREG or a bit field is like storing the entire
4278 register in that if the register's value is not used
4279 then this SET is not needed. */
4280 while (GET_CODE (testreg) == STRICT_LOW_PART
4281 || GET_CODE (testreg) == ZERO_EXTRACT
4282 || GET_CODE (testreg) == SIGN_EXTRACT
4283 || GET_CODE (testreg) == SUBREG)
4285 /* Modifying a single register in an alternate mode
4286 does not use any of the old value. But these other
4287 ways of storing in a register do use the old value. */
4288 if (GET_CODE (testreg) == SUBREG
4289 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4291 else
4292 mark_dest = 1;
4294 testreg = XEXP (testreg, 0);
4297 /* If this is a store into a register,
4298 recursively scan the value being stored. */
4300 if ((GET_CODE (testreg) == PARALLEL
4301 && GET_MODE (testreg) == BLKmode)
4302 || GET_CODE (testreg) == REG)
4304 count_reg_references (SET_SRC (x));
4305 if (mark_dest)
4306 count_reg_references (SET_DEST (x));
4307 return;
4310 break;
4312 default:
4313 break;
4316 /* Recursively scan the operands of this expression. */
4319 register char *fmt = GET_RTX_FORMAT (code);
4320 register int i;
4322 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4324 if (fmt[i] == 'e')
4326 /* Tail recursive case: save a function call level. */
4327 if (i == 0)
4329 x = XEXP (x, 0);
4330 goto retry;
4332 count_reg_references (XEXP (x, i));
4334 else if (fmt[i] == 'E')
4336 register int j;
4337 for (j = 0; j < XVECLEN (x, i); j++)
4338 count_reg_references (XVECEXP (x, i, j));
4344 /* Recompute register set/reference counts immediately prior to register
4345 allocation.
4347 This avoids problems with set/reference counts changing to/from values
4348 which have special meanings to the register allocators.
4350 Additionally, the reference counts are the primary component used by the
4351 register allocators to prioritize pseudos for allocation to hard regs.
4352 More accurate reference counts generally lead to better register allocation.
4354 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4355 possibly other information which is used by the register allocators. */
4357 void
4358 recompute_reg_usage (f)
4359 rtx f;
4361 rtx insn;
4362 int i, max_reg;
4364 /* Clear out the old data. */
4365 max_reg = max_reg_num ();
4366 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
4368 REG_N_SETS (i) = 0;
4369 REG_N_REFS (i) = 0;
4372 /* Scan each insn in the chain and count how many times each register is
4373 set/used. */
4374 loop_depth = 1;
4375 for (insn = f; insn; insn = NEXT_INSN (insn))
4377 /* Keep track of loop depth. */
4378 if (GET_CODE (insn) == NOTE)
4380 /* Look for loop boundaries. */
4381 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
4382 loop_depth--;
4383 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
4384 loop_depth++;
4386 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
4387 Abort now rather than setting register status incorrectly. */
4388 if (loop_depth == 0)
4389 abort ();
4391 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4393 rtx links;
4395 /* This call will increment REG_N_SETS for each SET or CLOBBER
4396 of a register in INSN. It will also increment REG_N_REFS
4397 by the loop depth for each set of a register in INSN. */
4398 count_reg_sets (PATTERN (insn));
4400 /* count_reg_sets does not detect autoincrement address modes, so
4401 detect them here by looking at the notes attached to INSN. */
4402 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
4404 if (REG_NOTE_KIND (links) == REG_INC)
4405 /* Count (weighted) references, stores, etc. This counts a
4406 register twice if it is modified, but that is correct. */
4407 REG_N_SETS (REGNO (XEXP (links, 0)))++;
4410 /* This call will increment REG_N_REFS by the current loop depth for
4411 each reference to a register in INSN. */
4412 count_reg_references (PATTERN (insn));
4414 /* count_reg_references will not include counts for arguments to
4415 function calls, so detect them here by examining the
4416 CALL_INSN_FUNCTION_USAGE data. */
4417 if (GET_CODE (insn) == CALL_INSN)
4419 rtx note;
4421 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4422 note;
4423 note = XEXP (note, 1))
4424 if (GET_CODE (XEXP (note, 0)) == USE)
4425 count_reg_references (SET_DEST (XEXP (note, 0)));