1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler.
23 It computes data flow information
24 which tells combine_instructions which insns to consider combining
25 and controls register allocation.
27 Additional data flow information that is too bulky to record
28 is generated during the analysis, and is used at that time to
29 create autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl
38 into basic blocks. It records the beginnings and ends of the
39 basic blocks in the vectors basic_block_head and basic_block_end,
40 and the number of blocks in n_basic_blocks.
42 find_basic_blocks also finds any unreachable loops
47 life_analysis is called immediately after find_basic_blocks.
48 It uses the basic block information to determine where each
49 hard or pseudo register is live.
51 ** live-register info **
53 The information about where each register is live is in two parts:
54 the REG_NOTES of insns, and the vector basic_block_live_at_start.
56 basic_block_live_at_start has an element for each basic block,
57 and the element is a bit-vector with a bit for each hard or pseudo
58 register. The bit is 1 if the register is live at the beginning
61 Two types of elements can be added to an insn's REG_NOTES.
62 A REG_DEAD note is added to an insn's REG_NOTES for any register
63 that meets both of two conditions: The value in the register is not
64 needed in subsequent insns and the insn does not replace the value in
65 the register (in the case of multi-word hard registers, the value in
66 each register must be replaced by the insn to avoid a REG_DEAD note).
68 In the vast majority of cases, an object in a REG_DEAD note will be
69 used somewhere in the insn. The (rare) exception to this is if an
70 insn uses a multi-word hard register and only some of the registers are
71 needed in subsequent insns. In that case, REG_DEAD notes will be
72 provided for those hard registers that are not subsequently needed.
73 Partial REG_DEAD notes of this type do not occur when an insn sets
74 only some of the hard registers used in such a multi-word operand;
75 omitting REG_DEAD notes for objects stored in an insn is optional and
76 the desire to do so does not justify the complexity of the partial
79 REG_UNUSED notes are added for each register that is set by the insn
80 but is unused subsequently (if every register set by the insn is unused
81 and the insn does not reference memory or have some other side-effect,
82 the insn is deleted instead). If only part of a multi-word hard
83 register is used in a subsequent insn, REG_UNUSED notes are made for
84 the parts that will not be used.
86 To determine which registers are live after any insn, one can
87 start from the beginning of the basic block and scan insns, noting
88 which registers are set by each insn and which die there.
90 ** Other actions of life_analysis **
92 life_analysis sets up the LOG_LINKS fields of insns because the
93 information needed to do so is readily available.
95 life_analysis deletes insns whose only effect is to store a value
98 life_analysis notices cases where a reference to a register as
99 a memory address can be combined with a preceding or following
100 incrementation or decrementation of the register. The separate
101 instruction to increment or decrement is deleted and the address
102 is changed to a POST_INC or similar rtx.
104 Each time an incrementing or decrementing address is created,
105 a REG_INC element is added to the insn's REG_NOTES list.
107 life_analysis fills in certain vectors containing information about
108 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
109 reg_n_calls_crosses and reg_basic_block. */
114 #include "basic-block.h"
115 #include "insn-config.h"
117 #include "hard-reg-set.h"
124 #define obstack_chunk_alloc xmalloc
125 #define obstack_chunk_free free
127 /* The contents of the current function definition are allocated
128 in this obstack, and all are freed at the end of the function.
129 For top-level functions, this is temporary_obstack.
130 Separate obstacks are made for nested functions. */
132 extern struct obstack
*function_obstack
;
134 /* List of labels that must never be deleted. */
135 extern rtx forced_labels
;
137 /* Get the basic block number of an insn.
138 This info should not be expected to remain available
139 after the end of life_analysis. */
141 /* This is the limit of the allocated space in the following two arrays. */
143 static int max_uid_for_flow
;
145 #define BLOCK_NUM(INSN) uid_block_number[INSN_UID (INSN)]
147 /* This is where the BLOCK_NUM values are really stored.
148 This is set up by find_basic_blocks and used there and in life_analysis,
151 int *uid_block_number
;
153 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
155 #define INSN_VOLATILE(INSN) uid_volatile[INSN_UID (INSN)]
156 static char *uid_volatile
;
158 /* Number of basic blocks in the current function. */
162 /* Maximum register number used in this function, plus one. */
166 /* Maximum number of SCRATCH rtx's used in any basic block of this
171 /* Number of SCRATCH rtx's in the current block. */
173 static int num_scratch
;
175 /* Indexed by n, giving various register information */
177 reg_info
*reg_n_info
;
179 /* Size of the reg_n_info table. */
181 unsigned int reg_n_max
;
183 /* Element N is the next insn that uses (hard or pseudo) register number N
184 within the current basic block; or zero, if there is no such insn.
185 This is valid only during the final backward scan in propagate_block. */
187 static rtx
*reg_next_use
;
189 /* Size of a regset for the current function,
190 in (1) bytes and (2) elements. */
195 /* Element N is first insn in basic block N.
196 This info lasts until we finish compiling the function. */
198 rtx
*basic_block_head
;
200 /* Element N is last insn in basic block N.
201 This info lasts until we finish compiling the function. */
203 rtx
*basic_block_end
;
205 /* Element N indicates whether basic block N can be reached through a
208 char *basic_block_computed_jump_target
;
210 /* Element N is a regset describing the registers live
211 at the start of basic block N.
212 This info lasts until we finish compiling the function. */
214 regset
*basic_block_live_at_start
;
216 /* Regset of regs live when calls to `setjmp'-like functions happen. */
218 regset regs_live_at_setjmp
;
220 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
221 that have to go in the same hard reg.
222 The first two regs in the list are a pair, and the next two
223 are another pair, etc. */
226 /* Element N is nonzero if control can drop into basic block N
227 from the preceding basic block. Freed after life_analysis. */
229 static char *basic_block_drops_in
;
231 /* Element N is depth within loops of the last insn in basic block number N.
232 Freed after life_analysis. */
234 static short *basic_block_loop_depth
;
236 /* Element N nonzero if basic block N can actually be reached.
237 Vector exists only during find_basic_blocks. */
239 static char *block_live_static
;
241 /* Depth within loops of basic block being scanned for lifetime analysis,
242 plus one. This is the weight attached to references to registers. */
244 static int loop_depth
;
246 /* During propagate_block, this is non-zero if the value of CC0 is live. */
250 /* During propagate_block, this contains the last MEM stored into. It
251 is used to eliminate consecutive stores to the same location. */
253 static rtx last_mem_set
;
255 /* Set of registers that may be eliminable. These are handled specially
256 in updating regs_ever_live. */
258 static HARD_REG_SET elim_reg_set
;
260 /* Forward declarations */
261 static void find_basic_blocks_1
PROTO((rtx
, rtx
, int));
262 static void mark_label_ref
PROTO((rtx
, rtx
, int));
263 static void life_analysis_1
PROTO((rtx
, int));
264 static void propagate_block
PROTO((regset
, rtx
, rtx
, int,
266 static rtx flow_delete_insn
PROTO((rtx
));
267 static int insn_dead_p
PROTO((rtx
, regset
, int));
268 static int libcall_dead_p
PROTO((rtx
, regset
, rtx
, rtx
));
269 static void mark_set_regs
PROTO((regset
, regset
, rtx
,
271 static void mark_set_1
PROTO((regset
, regset
, rtx
,
274 static void find_auto_inc
PROTO((regset
, rtx
, rtx
));
275 static int try_pre_increment_1
PROTO((rtx
));
276 static int try_pre_increment
PROTO((rtx
, rtx
, HOST_WIDE_INT
));
278 static void mark_used_regs
PROTO((regset
, regset
, rtx
, int, rtx
));
279 void dump_flow_info
PROTO((FILE *));
280 static void add_pred_succ
PROTO ((int, int, int_list_ptr
*,
281 int_list_ptr
*, int *, int *));
282 static int_list_ptr alloc_int_list_node
PROTO ((int_list_block
**));
283 static int_list_ptr add_int_list_node
PROTO ((int_list_block
**,
285 static void init_regset_vector
PROTO ((regset
*, int,
288 /* Find basic blocks of the current function.
289 F is the first insn of the function and NREGS the number of register numbers
291 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
292 be reachable. This turns on a kludge that causes the control flow
293 information to be inaccurate and not suitable for passes like GCSE. */
296 find_basic_blocks (f
, nregs
, file
, live_reachable_p
)
300 int live_reachable_p
;
304 rtx nonlocal_label_list
= nonlocal_label_rtx_list ();
305 int in_libcall_block
= 0;
307 /* Count the basic blocks. Also find maximum insn uid value used. */
310 register RTX_CODE prev_code
= JUMP_INSN
;
311 register RTX_CODE code
;
314 max_uid_for_flow
= 0;
316 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
319 /* Track when we are inside in LIBCALL block. */
320 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
321 && find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
322 in_libcall_block
= 1;
324 code
= GET_CODE (insn
);
325 if (INSN_UID (insn
) > max_uid_for_flow
)
326 max_uid_for_flow
= INSN_UID (insn
);
327 if (code
== CODE_LABEL
328 || (GET_RTX_CLASS (code
) == 'i'
329 && (prev_code
== JUMP_INSN
330 || (prev_code
== CALL_INSN
331 && (nonlocal_label_list
!= 0 || eh_region
)
332 && ! in_libcall_block
)
333 || prev_code
== BARRIER
)))
336 if (code
== CALL_INSN
&& find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
341 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
343 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
)
346 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
347 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
348 in_libcall_block
= 0;
355 /* Leave space for insns life_analysis makes in some cases for auto-inc.
356 These cases are rare, so we don't need too much space. */
357 max_uid_for_flow
+= max_uid_for_flow
/ 10;
360 /* Allocate some tables that last till end of compiling this function
361 and some needed only in find_basic_blocks and life_analysis. */
363 basic_block_head
= (rtx
*) xmalloc (n_basic_blocks
* sizeof (rtx
));
364 basic_block_end
= (rtx
*) xmalloc (n_basic_blocks
* sizeof (rtx
));
365 basic_block_drops_in
= (char *) xmalloc (n_basic_blocks
);
366 basic_block_computed_jump_target
= (char *) oballoc (n_basic_blocks
);
367 basic_block_loop_depth
= (short *) xmalloc (n_basic_blocks
* sizeof (short));
369 = (int *) xmalloc ((max_uid_for_flow
+ 1) * sizeof (int));
370 uid_volatile
= (char *) xmalloc (max_uid_for_flow
+ 1);
371 bzero (uid_volatile
, max_uid_for_flow
+ 1);
373 find_basic_blocks_1 (f
, nonlocal_label_list
, live_reachable_p
);
376 /* Find all basic blocks of the function whose first insn is F.
377 Store the correct data in the tables that describe the basic blocks,
378 set up the chains of references for each CODE_LABEL, and
379 delete any entire basic blocks that cannot be reached.
381 NONLOCAL_LABEL_LIST is a list of non-local labels in the function.
382 Blocks that are otherwise unreachable may be reachable with a non-local
384 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
385 be reachable. This turns on a kludge that causes the control flow
386 information to be inaccurate and not suitable for passes like GCSE. */
389 find_basic_blocks_1 (f
, nonlocal_label_list
, live_reachable_p
)
390 rtx f
, nonlocal_label_list
;
391 int live_reachable_p
;
395 register char *block_live
= (char *) alloca (n_basic_blocks
);
396 register char *block_marked
= (char *) alloca (n_basic_blocks
);
397 /* An array of CODE_LABELs, indexed by UID for the start of the active
398 EH handler for each insn in F. */
399 int *active_eh_region
;
400 int *nested_eh_region
;
401 /* List of label_refs to all labels whose addresses are taken
403 rtx label_value_list
;
404 rtx x
, note
, eh_note
;
405 enum rtx_code prev_code
, code
;
407 int in_libcall_block
= 0;
408 int deleted_handler
= 0;
411 active_eh_region
= (int *) alloca ((max_uid_for_flow
+ 1) * sizeof (int));
412 nested_eh_region
= (int *) alloca ((max_label_num () + 1) * sizeof (int));
415 label_value_list
= 0;
416 block_live_static
= block_live
;
417 bzero (block_live
, n_basic_blocks
);
418 bzero (block_marked
, n_basic_blocks
);
419 bzero (basic_block_computed_jump_target
, n_basic_blocks
);
420 bzero ((char *) active_eh_region
, (max_uid_for_flow
+ 1) * sizeof (int));
421 bzero ((char *) nested_eh_region
, (max_label_num () + 1) * sizeof (int));
422 current_function_has_computed_jump
= 0;
424 /* Initialize with just block 0 reachable and no blocks marked. */
425 if (n_basic_blocks
> 0)
428 /* Initialize the ref chain of each label to 0. Record where all the
429 blocks start and end and their depth in loops. For each insn, record
430 the block it is in. Also mark as reachable any blocks headed by labels
431 that must not be deleted. */
433 for (eh_note
= NULL_RTX
, insn
= f
, i
= -1, prev_code
= JUMP_INSN
, depth
= 1;
434 insn
; insn
= NEXT_INSN (insn
))
437 /* Track when we are inside in LIBCALL block. */
438 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
439 && find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
440 in_libcall_block
= 1;
442 code
= GET_CODE (insn
);
445 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
447 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
)
451 /* A basic block starts at label, or after something that can jump. */
452 else if (code
== CODE_LABEL
453 || (GET_RTX_CLASS (code
) == 'i'
454 && (prev_code
== JUMP_INSN
455 || (prev_code
== CALL_INSN
456 && (nonlocal_label_list
!= 0 || eh_note
)
457 && ! in_libcall_block
)
458 || prev_code
== BARRIER
)))
460 basic_block_head
[++i
] = insn
;
461 basic_block_end
[i
] = insn
;
462 basic_block_loop_depth
[i
] = depth
;
464 if (code
== CODE_LABEL
)
466 LABEL_REFS (insn
) = insn
;
467 /* Any label that cannot be deleted
468 is considered to start a reachable block. */
469 if (LABEL_PRESERVE_P (insn
))
474 else if (GET_RTX_CLASS (code
) == 'i')
476 basic_block_end
[i
] = insn
;
477 basic_block_loop_depth
[i
] = depth
;
480 if (GET_RTX_CLASS (code
) == 'i')
482 /* Make a list of all labels referred to other than by jumps. */
483 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
484 if (REG_NOTE_KIND (note
) == REG_LABEL
)
485 label_value_list
= gen_rtx_EXPR_LIST (VOIDmode
, XEXP (note
, 0),
489 /* Keep a lifo list of the currently active exception notes. */
490 if (GET_CODE (insn
) == NOTE
)
492 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
495 nested_eh_region
[NOTE_BLOCK_NUMBER (insn
)] =
496 NOTE_BLOCK_NUMBER (XEXP (eh_note
, 0));
498 nested_eh_region
[NOTE_BLOCK_NUMBER (insn
)] = 0;
499 eh_note
= gen_rtx_EXPR_LIST (VOIDmode
,
502 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
)
503 eh_note
= XEXP (eh_note
, 1);
505 /* If we encounter a CALL_INSN, note which exception handler it
506 might pass control to.
508 If doing asynchronous exceptions, record the active EH handler
509 for every insn, since most insns can throw. */
511 && (asynchronous_exceptions
512 || (GET_CODE (insn
) == CALL_INSN
513 && ! in_libcall_block
)))
514 active_eh_region
[INSN_UID (insn
)] =
515 NOTE_BLOCK_NUMBER (XEXP (eh_note
, 0));
516 BLOCK_NUM (insn
) = i
;
521 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
522 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
523 in_libcall_block
= 0;
526 /* During the second pass, `n_basic_blocks' is only an upper bound.
527 Only perform the sanity check for the first pass, and on the second
528 pass ensure `n_basic_blocks' is set to the correct value. */
529 if (pass
== 1 && i
+ 1 != n_basic_blocks
)
531 n_basic_blocks
= i
+ 1;
533 /* Record which basic blocks control can drop in to. */
535 for (i
= 0; i
< n_basic_blocks
; i
++)
537 for (insn
= PREV_INSN (basic_block_head
[i
]);
538 insn
&& GET_CODE (insn
) == NOTE
; insn
= PREV_INSN (insn
))
541 basic_block_drops_in
[i
] = insn
&& GET_CODE (insn
) != BARRIER
;
544 /* Now find which basic blocks can actually be reached
545 and put all jump insns' LABEL_REFS onto the ref-chains
546 of their target labels. */
548 if (n_basic_blocks
> 0)
550 int something_marked
= 1;
553 /* Pass over all blocks, marking each block that is reachable
554 and has not yet been marked.
555 Keep doing this until, in one pass, no blocks have been marked.
556 Then blocks_live and blocks_marked are identical and correct.
557 In addition, all jumps actually reachable have been marked. */
559 while (something_marked
)
561 something_marked
= 0;
562 for (i
= 0; i
< n_basic_blocks
; i
++)
563 if (block_live
[i
] && !block_marked
[i
])
566 something_marked
= 1;
567 if (i
+ 1 < n_basic_blocks
&& basic_block_drops_in
[i
+ 1])
568 block_live
[i
+ 1] = 1;
569 insn
= basic_block_end
[i
];
570 if (GET_CODE (insn
) == JUMP_INSN
)
571 mark_label_ref (PATTERN (insn
), insn
, 0);
573 /* If we have any forced labels, mark them as potentially
574 reachable from this block. */
575 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
576 if (! LABEL_REF_NONLOCAL_P (x
))
577 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode
, XEXP (x
, 0)),
580 /* Now scan the insns for this block, we may need to make
581 edges for some of them to various non-obvious locations
582 (exception handlers, nonlocal labels, etc). */
583 for (insn
= basic_block_head
[i
];
584 insn
!= NEXT_INSN (basic_block_end
[i
]);
585 insn
= NEXT_INSN (insn
))
587 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
590 /* References to labels in non-jumping insns have
591 REG_LABEL notes attached to them.
593 This can happen for computed gotos; we don't care
594 about them here since the values are also on the
595 label_value_list and will be marked live if we find
596 a live computed goto.
598 This can also happen when we take the address of
599 a label to pass as an argument to __throw. Note
600 throw only uses the value to determine what handler
601 should be called -- ie the label is not used as
602 a jump target, it just marks regions in the code.
604 In theory we should be able to ignore the REG_LABEL
605 notes, but we have to make sure that the label and
606 associated insns aren't marked dead, so we make
607 the block in question live and create an edge from
608 this insn to the label. This is not strictly
609 correct, but it is close enough for now. */
610 for (note
= REG_NOTES (insn
);
612 note
= XEXP (note
, 1))
614 if (REG_NOTE_KIND (note
) == REG_LABEL
)
617 block_live
[BLOCK_NUM (x
)] = 1;
618 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode
, x
),
623 /* If this is a computed jump, then mark it as
624 reaching everything on the label_value_list
625 and forced_labels list. */
626 if (computed_jump_p (insn
))
628 current_function_has_computed_jump
= 1;
629 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
631 int b
= BLOCK_NUM (XEXP (x
, 0));
632 basic_block_computed_jump_target
[b
] = 1;
633 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode
,
638 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
640 int b
= BLOCK_NUM (XEXP (x
, 0));
641 basic_block_computed_jump_target
[b
] = 1;
642 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode
,
648 /* If this is a CALL_INSN, then mark it as reaching
649 the active EH handler for this CALL_INSN. If
650 we're handling asynchronous exceptions mark every
651 insn as reaching the active EH handler.
653 Also mark the CALL_INSN as reaching any nonlocal
655 else if (asynchronous_exceptions
656 || (GET_CODE (insn
) == CALL_INSN
657 && ! find_reg_note (insn
, REG_RETVAL
,
660 if (active_eh_region
[INSN_UID (insn
)])
664 region
= active_eh_region
[INSN_UID (insn
)];
666 region
= nested_eh_region
[region
])
668 ptr
= get_first_handler (region
);
669 for ( ; ptr
; ptr
= ptr
->next
)
670 mark_label_ref (gen_rtx_LABEL_REF
671 (VOIDmode
, ptr
->handler_label
), insn
, 0);
674 if (!asynchronous_exceptions
)
676 for (x
= nonlocal_label_list
;
679 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode
,
683 /* ??? This could be made smarter:
684 in some cases it's possible to tell that
685 certain calls will not do a nonlocal goto.
687 For example, if the nested functions that
688 do the nonlocal gotos do not have their
689 addresses taken, then only calls to those
690 functions or to other nested functions that
691 use them could possibly do nonlocal gotos. */
698 /* This should never happen. If it does that means we've computed an
699 incorrect flow graph, which can lead to aborts/crashes later in the
700 compiler or incorrect code generation.
702 We used to try and continue here, but that's just asking for trouble
703 later during the compile or at runtime. It's easier to debug the
704 problem here than later! */
705 for (i
= 1; i
< n_basic_blocks
; i
++)
706 if (block_live
[i
] && ! basic_block_drops_in
[i
]
707 && GET_CODE (basic_block_head
[i
]) == CODE_LABEL
708 && LABEL_REFS (basic_block_head
[i
]) == basic_block_head
[i
])
711 /* Now delete the code for any basic blocks that can't be reached.
712 They can occur because jump_optimize does not recognize
713 unreachable loops as unreachable. */
716 for (i
= 0; i
< n_basic_blocks
; i
++)
721 /* Delete the insns in a (non-live) block. We physically delete
722 every non-note insn except the start and end (so
723 basic_block_head/end needn't be updated), we turn the latter
724 into NOTE_INSN_DELETED notes.
725 We use to "delete" the insns by turning them into notes, but
726 we may be deleting lots of insns that subsequent passes would
727 otherwise have to process. Secondly, lots of deleted blocks in
728 a row can really slow down propagate_block since it will
729 otherwise process insn-turned-notes multiple times when it
730 looks for loop begin/end notes. */
731 if (basic_block_head
[i
] != basic_block_end
[i
])
733 /* It would be quicker to delete all of these with a single
734 unchaining, rather than one at a time, but we need to keep
736 insn
= NEXT_INSN (basic_block_head
[i
]);
737 while (insn
!= basic_block_end
[i
])
739 if (GET_CODE (insn
) == BARRIER
)
741 else if (GET_CODE (insn
) != NOTE
)
742 insn
= flow_delete_insn (insn
);
744 insn
= NEXT_INSN (insn
);
747 insn
= basic_block_head
[i
];
748 if (GET_CODE (insn
) != NOTE
)
750 /* Turn the head into a deleted insn note. */
751 if (GET_CODE (insn
) == BARRIER
)
754 /* If the head of this block is a CODE_LABEL, then it might
755 be the label for an exception handler which can't be
758 We need to remove the label from the exception_handler_label
759 list and remove the associated NOTE_EH_REGION_BEG and
760 NOTE_EH_REGION_END notes. */
761 if (GET_CODE (insn
) == CODE_LABEL
)
763 rtx x
, *prev
= &exception_handler_labels
;
765 for (x
= exception_handler_labels
; x
; x
= XEXP (x
, 1))
767 if (XEXP (x
, 0) == insn
)
769 /* Found a match, splice this label out of the
772 XEXP (x
, 1) = NULL_RTX
;
773 XEXP (x
, 0) = NULL_RTX
;
775 /* Remove the handler from all regions */
776 remove_handler (insn
);
784 PUT_CODE (insn
, NOTE
);
785 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
786 NOTE_SOURCE_FILE (insn
) = 0;
788 insn
= basic_block_end
[i
];
789 if (GET_CODE (insn
) != NOTE
)
791 /* Turn the tail into a deleted insn note. */
792 if (GET_CODE (insn
) == BARRIER
)
794 PUT_CODE (insn
, NOTE
);
795 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
796 NOTE_SOURCE_FILE (insn
) = 0;
798 /* BARRIERs are between basic blocks, not part of one.
799 Delete a BARRIER if the preceding jump is deleted.
800 We cannot alter a BARRIER into a NOTE
801 because it is too short; but we can really delete
802 it because it is not part of a basic block. */
803 if (NEXT_INSN (insn
) != 0
804 && GET_CODE (NEXT_INSN (insn
)) == BARRIER
)
805 delete_insn (NEXT_INSN (insn
));
807 /* Each time we delete some basic blocks,
808 see if there is a jump around them that is
809 being turned into a no-op. If so, delete it. */
811 if (block_live
[i
- 1])
814 for (j
= i
+ 1; j
< n_basic_blocks
; j
++)
818 insn
= basic_block_end
[i
- 1];
819 if (GET_CODE (insn
) == JUMP_INSN
820 /* An unconditional jump is the only possibility
821 we must check for, since a conditional one
822 would make these blocks live. */
823 && simplejump_p (insn
)
824 && (label
= XEXP (SET_SRC (PATTERN (insn
)), 0), 1)
825 && INSN_UID (label
) != 0
826 && BLOCK_NUM (label
) == j
)
830 /* The deleted blocks still show up in the cfg,
831 so we must set basic_block_drops_in for blocks
832 I to J inclusive to keep the cfg accurate. */
833 for (k
= i
; k
<= j
; k
++)
834 basic_block_drops_in
[k
] = 1;
836 PUT_CODE (insn
, NOTE
);
837 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
838 NOTE_SOURCE_FILE (insn
) = 0;
839 if (GET_CODE (NEXT_INSN (insn
)) != BARRIER
)
841 delete_insn (NEXT_INSN (insn
));
847 /* If we deleted an exception handler, we may have EH region
848 begin/end blocks to remove as well. */
850 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
851 if (GET_CODE (insn
) == NOTE
)
853 if ((NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
) ||
854 (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
))
856 int num
= CODE_LABEL_NUMBER (insn
);
857 /* A NULL handler indicates a region is no longer needed */
858 if (get_first_handler (num
) == NULL
)
860 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
861 NOTE_SOURCE_FILE (insn
) = 0;
866 /* There are pathological cases where one function calling hundreds of
867 nested inline functions can generate lots and lots of unreachable
868 blocks that jump can't delete. Since we don't use sparse matrices
869 a lot of memory will be needed to compile such functions.
870 Implementing sparse matrices is a fair bit of work and it is not
871 clear that they win more than they lose (we don't want to
872 unnecessarily slow down compilation of normal code). By making
873 another pass for the pathological case, we can greatly speed up
874 their compilation without hurting normal code. This works because
875 all the insns in the unreachable blocks have either been deleted or
877 Note that we're talking about reducing memory usage by 10's of
878 megabytes and reducing compilation time by several minutes. */
879 /* ??? The choice of when to make another pass is a bit arbitrary,
880 and was derived from empirical data. */
885 n_basic_blocks
-= deleted
;
886 /* `n_basic_blocks' may not be correct at this point: two previously
887 separate blocks may now be merged. That's ok though as we
888 recalculate it during the second pass. It certainly can't be
889 any larger than the current value. */
895 /* Record INSN's block number as BB. */
898 set_block_num (insn
, bb
)
902 if (INSN_UID (insn
) >= max_uid_for_flow
)
904 /* Add one-eighth the size so we don't keep calling xrealloc. */
905 max_uid_for_flow
= INSN_UID (insn
) + (INSN_UID (insn
) + 7) / 8;
906 uid_block_number
= (int *)
907 xrealloc (uid_block_number
, (max_uid_for_flow
+ 1) * sizeof (int));
909 BLOCK_NUM (insn
) = bb
;
913 /* Subroutines of find_basic_blocks. */
915 /* Check expression X for label references;
916 if one is found, add INSN to the label's chain of references.
918 CHECKDUP means check for and avoid creating duplicate references
919 from the same insn. Such duplicates do no serious harm but
920 can slow life analysis. CHECKDUP is set only when duplicates
924 mark_label_ref (x
, insn
, checkdup
)
928 register RTX_CODE code
;
932 /* We can be called with NULL when scanning label_value_list. */
937 if (code
== LABEL_REF
)
939 register rtx label
= XEXP (x
, 0);
941 if (GET_CODE (label
) != CODE_LABEL
)
943 /* If the label was never emitted, this insn is junk,
944 but avoid a crash trying to refer to BLOCK_NUM (label).
945 This can happen as a result of a syntax error
946 and a diagnostic has already been printed. */
947 if (INSN_UID (label
) == 0)
949 CONTAINING_INSN (x
) = insn
;
950 /* if CHECKDUP is set, check for duplicate ref from same insn
953 for (y
= LABEL_REFS (label
); y
!= label
; y
= LABEL_NEXTREF (y
))
954 if (CONTAINING_INSN (y
) == insn
)
956 LABEL_NEXTREF (x
) = LABEL_REFS (label
);
957 LABEL_REFS (label
) = x
;
958 block_live_static
[BLOCK_NUM (label
)] = 1;
962 fmt
= GET_RTX_FORMAT (code
);
963 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
966 mark_label_ref (XEXP (x
, i
), insn
, 0);
970 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
971 mark_label_ref (XVECEXP (x
, i
, j
), insn
, 1);
976 /* Delete INSN by patching it out.
977 Return the next insn. */
980 flow_delete_insn (insn
)
983 /* ??? For the moment we assume we don't have to watch for NULLs here
984 since the start/end of basic blocks aren't deleted like this. */
985 NEXT_INSN (PREV_INSN (insn
)) = NEXT_INSN (insn
);
986 PREV_INSN (NEXT_INSN (insn
)) = PREV_INSN (insn
);
987 return NEXT_INSN (insn
);
990 /* Perform data flow analysis.
991 F is the first insn of the function and NREGS the number of register numbers
995 life_analysis (f
, nregs
, file
)
1000 #ifdef ELIMINABLE_REGS
1002 static struct {int from
, to
; } eliminables
[] = ELIMINABLE_REGS
;
1005 /* Record which registers will be eliminated. We use this in
1008 CLEAR_HARD_REG_SET (elim_reg_set
);
1010 #ifdef ELIMINABLE_REGS
1011 for (i
= 0; i
< sizeof eliminables
/ sizeof eliminables
[0]; i
++)
1012 SET_HARD_REG_BIT (elim_reg_set
, eliminables
[i
].from
);
1014 SET_HARD_REG_BIT (elim_reg_set
, FRAME_POINTER_REGNUM
);
1017 life_analysis_1 (f
, nregs
);
1019 dump_flow_info (file
);
1021 free_basic_block_vars (1);
1024 /* Free the variables allocated by find_basic_blocks.
1026 KEEP_HEAD_END_P is non-zero if basic_block_head and basic_block_end
1027 are not to be freed. */
1030 free_basic_block_vars (keep_head_end_p
)
1031 int keep_head_end_p
;
1033 if (basic_block_drops_in
)
1035 free (basic_block_drops_in
);
1036 /* Tell dump_flow_info this isn't available anymore. */
1037 basic_block_drops_in
= 0;
1039 if (basic_block_loop_depth
)
1041 free (basic_block_loop_depth
);
1042 basic_block_loop_depth
= 0;
1044 if (uid_block_number
)
1046 free (uid_block_number
);
1047 uid_block_number
= 0;
1051 free (uid_volatile
);
1055 if (! keep_head_end_p
&& basic_block_head
)
1057 free (basic_block_head
);
1058 basic_block_head
= 0;
1059 free (basic_block_end
);
1060 basic_block_end
= 0;
1064 /* Determine which registers are live at the start of each
1065 basic block of the function whose first insn is F.
1066 NREGS is the number of registers used in F.
1067 We allocate the vector basic_block_live_at_start
1068 and the regsets that it points to, and fill them with the data.
1069 regset_size and regset_bytes are also set here. */
1072 life_analysis_1 (f
, nregs
)
1078 /* For each basic block, a bitmask of regs
1079 live on exit from the block. */
1080 regset
*basic_block_live_at_end
;
1081 /* For each basic block, a bitmask of regs
1082 live on entry to a successor-block of this block.
1083 If this does not match basic_block_live_at_end,
1084 that must be updated, and the block must be rescanned. */
1085 regset
*basic_block_new_live_at_end
;
1086 /* For each basic block, a bitmask of regs
1087 whose liveness at the end of the basic block
1088 can make a difference in which regs are live on entry to the block.
1089 These are the regs that are set within the basic block,
1090 possibly excluding those that are used after they are set. */
1091 regset
*basic_block_significant
;
1095 struct obstack flow_obstack
;
1097 gcc_obstack_init (&flow_obstack
);
1101 bzero (regs_ever_live
, sizeof regs_ever_live
);
1103 /* Allocate and zero out many data structures
1104 that will record the data from lifetime analysis. */
1106 allocate_for_life_analysis ();
1108 reg_next_use
= (rtx
*) alloca (nregs
* sizeof (rtx
));
1109 bzero ((char *) reg_next_use
, nregs
* sizeof (rtx
));
1111 /* Set up several regset-vectors used internally within this function.
1112 Their meanings are documented above, with their declarations. */
1114 basic_block_live_at_end
1115 = (regset
*) alloca (n_basic_blocks
* sizeof (regset
));
1117 /* Don't use alloca since that leads to a crash rather than an error message
1118 if there isn't enough space.
1119 Don't use oballoc since we may need to allocate other things during
1120 this function on the temporary obstack. */
1121 init_regset_vector (basic_block_live_at_end
, n_basic_blocks
, &flow_obstack
);
1123 basic_block_new_live_at_end
1124 = (regset
*) alloca (n_basic_blocks
* sizeof (regset
));
1125 init_regset_vector (basic_block_new_live_at_end
, n_basic_blocks
,
1128 basic_block_significant
1129 = (regset
*) alloca (n_basic_blocks
* sizeof (regset
));
1130 init_regset_vector (basic_block_significant
, n_basic_blocks
, &flow_obstack
);
1132 /* Record which insns refer to any volatile memory
1133 or for any reason can't be deleted just because they are dead stores.
1134 Also, delete any insns that copy a register to itself. */
1136 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
1138 enum rtx_code code1
= GET_CODE (insn
);
1139 if (code1
== CALL_INSN
)
1140 INSN_VOLATILE (insn
) = 1;
1141 else if (code1
== INSN
|| code1
== JUMP_INSN
)
1143 /* Delete (in effect) any obvious no-op moves. */
1144 if (GET_CODE (PATTERN (insn
)) == SET
1145 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
1146 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
1147 && (REGNO (SET_DEST (PATTERN (insn
)))
1148 == REGNO (SET_SRC (PATTERN (insn
))))
1149 /* Insns carrying these notes are useful later on. */
1150 && ! find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1152 PUT_CODE (insn
, NOTE
);
1153 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1154 NOTE_SOURCE_FILE (insn
) = 0;
1156 /* Delete (in effect) any obvious no-op moves. */
1157 else if (GET_CODE (PATTERN (insn
)) == SET
1158 && GET_CODE (SET_DEST (PATTERN (insn
))) == SUBREG
1159 && GET_CODE (SUBREG_REG (SET_DEST (PATTERN (insn
)))) == REG
1160 && GET_CODE (SET_SRC (PATTERN (insn
))) == SUBREG
1161 && GET_CODE (SUBREG_REG (SET_SRC (PATTERN (insn
)))) == REG
1162 && (REGNO (SUBREG_REG (SET_DEST (PATTERN (insn
))))
1163 == REGNO (SUBREG_REG (SET_SRC (PATTERN (insn
)))))
1164 && SUBREG_WORD (SET_DEST (PATTERN (insn
))) ==
1165 SUBREG_WORD (SET_SRC (PATTERN (insn
)))
1166 /* Insns carrying these notes are useful later on. */
1167 && ! find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1169 PUT_CODE (insn
, NOTE
);
1170 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1171 NOTE_SOURCE_FILE (insn
) = 0;
1173 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1175 /* If nothing but SETs of registers to themselves,
1176 this insn can also be deleted. */
1177 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1179 rtx tem
= XVECEXP (PATTERN (insn
), 0, i
);
1181 if (GET_CODE (tem
) == USE
1182 || GET_CODE (tem
) == CLOBBER
)
1185 if (GET_CODE (tem
) != SET
1186 || GET_CODE (SET_DEST (tem
)) != REG
1187 || GET_CODE (SET_SRC (tem
)) != REG
1188 || REGNO (SET_DEST (tem
)) != REGNO (SET_SRC (tem
)))
1192 if (i
== XVECLEN (PATTERN (insn
), 0)
1193 /* Insns carrying these notes are useful later on. */
1194 && ! find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1196 PUT_CODE (insn
, NOTE
);
1197 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1198 NOTE_SOURCE_FILE (insn
) = 0;
1201 INSN_VOLATILE (insn
) = volatile_refs_p (PATTERN (insn
));
1203 else if (GET_CODE (PATTERN (insn
)) != USE
)
1204 INSN_VOLATILE (insn
) = volatile_refs_p (PATTERN (insn
));
1205 /* A SET that makes space on the stack cannot be dead.
1206 (Such SETs occur only for allocating variable-size data,
1207 so they will always have a PLUS or MINUS according to the
1208 direction of stack growth.)
1209 Even if this function never uses this stack pointer value,
1210 signal handlers do! */
1211 else if (code1
== INSN
&& GET_CODE (PATTERN (insn
)) == SET
1212 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
1213 #ifdef STACK_GROWS_DOWNWARD
1214 && GET_CODE (SET_SRC (PATTERN (insn
))) == MINUS
1216 && GET_CODE (SET_SRC (PATTERN (insn
))) == PLUS
1218 && XEXP (SET_SRC (PATTERN (insn
)), 0) == stack_pointer_rtx
)
1219 INSN_VOLATILE (insn
) = 1;
1223 if (n_basic_blocks
> 0)
1224 #ifdef EXIT_IGNORE_STACK
1225 if (! EXIT_IGNORE_STACK
1226 || (! FRAME_POINTER_REQUIRED
1227 && ! current_function_calls_alloca
1228 && flag_omit_frame_pointer
))
1231 /* If exiting needs the right stack value,
1232 consider the stack pointer live at the end of the function. */
1233 SET_REGNO_REG_SET (basic_block_live_at_end
[n_basic_blocks
- 1],
1234 STACK_POINTER_REGNUM
);
1235 SET_REGNO_REG_SET (basic_block_new_live_at_end
[n_basic_blocks
- 1],
1236 STACK_POINTER_REGNUM
);
1239 /* Mark the frame pointer is needed at the end of the function. If
1240 we end up eliminating it, it will be removed from the live list
1241 of each basic block by reload. */
1243 if (n_basic_blocks
> 0)
1245 SET_REGNO_REG_SET (basic_block_live_at_end
[n_basic_blocks
- 1],
1246 FRAME_POINTER_REGNUM
);
1247 SET_REGNO_REG_SET (basic_block_new_live_at_end
[n_basic_blocks
- 1],
1248 FRAME_POINTER_REGNUM
);
1249 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1250 /* If they are different, also mark the hard frame pointer as live */
1251 SET_REGNO_REG_SET (basic_block_live_at_end
[n_basic_blocks
- 1],
1252 HARD_FRAME_POINTER_REGNUM
);
1253 SET_REGNO_REG_SET (basic_block_new_live_at_end
[n_basic_blocks
- 1],
1254 HARD_FRAME_POINTER_REGNUM
);
1258 /* Mark all global registers and all registers used by the epilogue
1259 as being live at the end of the function since they may be
1260 referenced by our caller. */
1262 if (n_basic_blocks
> 0)
1263 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1265 #ifdef EPILOGUE_USES
1266 || EPILOGUE_USES (i
)
1270 SET_REGNO_REG_SET (basic_block_live_at_end
[n_basic_blocks
- 1], i
);
1271 SET_REGNO_REG_SET (basic_block_new_live_at_end
[n_basic_blocks
- 1], i
);
1274 /* Propagate life info through the basic blocks
1275 around the graph of basic blocks.
1277 This is a relaxation process: each time a new register
1278 is live at the end of the basic block, we must scan the block
1279 to determine which registers are, as a consequence, live at the beginning
1280 of that block. These registers must then be marked live at the ends
1281 of all the blocks that can transfer control to that block.
1282 The process continues until it reaches a fixed point. */
1289 for (i
= n_basic_blocks
- 1; i
>= 0; i
--)
1291 int consider
= first_pass
;
1292 int must_rescan
= first_pass
;
1297 /* Set CONSIDER if this block needs thinking about at all
1298 (that is, if the regs live now at the end of it
1299 are not the same as were live at the end of it when
1300 we last thought about it).
1301 Set must_rescan if it needs to be thought about
1302 instruction by instruction (that is, if any additional
1303 reg that is live at the end now but was not live there before
1304 is one of the significant regs of this basic block). */
1306 EXECUTE_IF_AND_COMPL_IN_REG_SET
1307 (basic_block_new_live_at_end
[i
],
1308 basic_block_live_at_end
[i
], 0, j
,
1311 if (REGNO_REG_SET_P (basic_block_significant
[i
], j
))
1322 /* The live_at_start of this block may be changing,
1323 so another pass will be required after this one. */
1328 /* No complete rescan needed;
1329 just record those variables newly known live at end
1330 as live at start as well. */
1331 IOR_AND_COMPL_REG_SET (basic_block_live_at_start
[i
],
1332 basic_block_new_live_at_end
[i
],
1333 basic_block_live_at_end
[i
]);
1335 IOR_AND_COMPL_REG_SET (basic_block_live_at_end
[i
],
1336 basic_block_new_live_at_end
[i
],
1337 basic_block_live_at_end
[i
]);
1341 /* Update the basic_block_live_at_start
1342 by propagation backwards through the block. */
1343 COPY_REG_SET (basic_block_live_at_end
[i
],
1344 basic_block_new_live_at_end
[i
]);
1345 COPY_REG_SET (basic_block_live_at_start
[i
],
1346 basic_block_live_at_end
[i
]);
1347 propagate_block (basic_block_live_at_start
[i
],
1348 basic_block_head
[i
], basic_block_end
[i
], 0,
1349 first_pass
? basic_block_significant
[i
]
1355 register rtx jump
, head
;
1357 /* Update the basic_block_new_live_at_end's of the block
1358 that falls through into this one (if any). */
1359 head
= basic_block_head
[i
];
1360 if (basic_block_drops_in
[i
])
1361 IOR_REG_SET (basic_block_new_live_at_end
[i
-1],
1362 basic_block_live_at_start
[i
]);
1364 /* Update the basic_block_new_live_at_end's of
1365 all the blocks that jump to this one. */
1366 if (GET_CODE (head
) == CODE_LABEL
)
1367 for (jump
= LABEL_REFS (head
);
1369 jump
= LABEL_NEXTREF (jump
))
1371 register int from_block
= BLOCK_NUM (CONTAINING_INSN (jump
));
1372 IOR_REG_SET (basic_block_new_live_at_end
[from_block
],
1373 basic_block_live_at_start
[i
]);
1383 /* The only pseudos that are live at the beginning of the function are
1384 those that were not set anywhere in the function. local-alloc doesn't
1385 know how to handle these correctly, so mark them as not local to any
1388 if (n_basic_blocks
> 0)
1389 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start
[0],
1390 FIRST_PSEUDO_REGISTER
, i
,
1392 REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
;
1395 /* Now the life information is accurate.
1396 Make one more pass over each basic block
1397 to delete dead stores, create autoincrement addressing
1398 and record how many times each register is used, is set, or dies.
1400 To save time, we operate directly in basic_block_live_at_end[i],
1401 thus destroying it (in fact, converting it into a copy of
1402 basic_block_live_at_start[i]). This is ok now because
1403 basic_block_live_at_end[i] is no longer used past this point. */
1407 for (i
= 0; i
< n_basic_blocks
; i
++)
1409 propagate_block (basic_block_live_at_end
[i
],
1410 basic_block_head
[i
], basic_block_end
[i
], 1,
1418 /* Something live during a setjmp should not be put in a register
1419 on certain machines which restore regs from stack frames
1420 rather than from the jmpbuf.
1421 But we don't need to do this for the user's variables, since
1422 ANSI says only volatile variables need this. */
1423 #ifdef LONGJMP_RESTORE_FROM_STACK
1424 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp
,
1425 FIRST_PSEUDO_REGISTER
, i
,
1427 if (regno_reg_rtx
[i
] != 0
1428 && ! REG_USERVAR_P (regno_reg_rtx
[i
]))
1430 REG_LIVE_LENGTH (i
) = -1;
1431 REG_BASIC_BLOCK (i
) = -1;
1437 /* We have a problem with any pseudoreg that
1438 lives across the setjmp. ANSI says that if a
1439 user variable does not change in value
1440 between the setjmp and the longjmp, then the longjmp preserves it.
1441 This includes longjmp from a place where the pseudo appears dead.
1442 (In principle, the value still exists if it is in scope.)
1443 If the pseudo goes in a hard reg, some other value may occupy
1444 that hard reg where this pseudo is dead, thus clobbering the pseudo.
1445 Conclusion: such a pseudo must not go in a hard reg. */
1446 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp
,
1447 FIRST_PSEUDO_REGISTER
, i
,
1449 if (regno_reg_rtx
[i
] != 0)
1451 REG_LIVE_LENGTH (i
) = -1;
1452 REG_BASIC_BLOCK (i
) = -1;
1457 free_regset_vector (basic_block_live_at_end
, n_basic_blocks
);
1458 free_regset_vector (basic_block_new_live_at_end
, n_basic_blocks
);
1459 free_regset_vector (basic_block_significant
, n_basic_blocks
);
1460 basic_block_live_at_end
= (regset
*)0;
1461 basic_block_new_live_at_end
= (regset
*)0;
1462 basic_block_significant
= (regset
*)0;
1464 obstack_free (&flow_obstack
, NULL_PTR
);
1467 /* Subroutines of life analysis. */
1469 /* Allocate the permanent data structures that represent the results
1470 of life analysis. Not static since used also for stupid life analysis. */
1473 allocate_for_life_analysis ()
1477 /* Recalculate the register space, in case it has grown. Old style
1478 vector oriented regsets would set regset_{size,bytes} here also. */
1479 allocate_reg_info (max_regno
, FALSE
, FALSE
);
1481 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
1482 information, explicitly reset it here. The allocation should have
1483 already happened on the previous reg_scan pass. Make sure in case
1484 some more registers were allocated. */
1485 for (i
= 0; i
< max_regno
; i
++)
1488 basic_block_live_at_start
1489 = (regset
*) oballoc (n_basic_blocks
* sizeof (regset
));
1490 init_regset_vector (basic_block_live_at_start
, n_basic_blocks
,
1493 regs_live_at_setjmp
= OBSTACK_ALLOC_REG_SET (function_obstack
);
1494 CLEAR_REG_SET (regs_live_at_setjmp
);
1497 /* Make each element of VECTOR point at a regset. The vector has
1498 NELTS elements, and space is allocated from the ALLOC_OBSTACK
1502 init_regset_vector (vector
, nelts
, alloc_obstack
)
1505 struct obstack
*alloc_obstack
;
1509 for (i
= 0; i
< nelts
; i
++)
1511 vector
[i
] = OBSTACK_ALLOC_REG_SET (alloc_obstack
);
1512 CLEAR_REG_SET (vector
[i
]);
1516 /* Release any additional space allocated for each element of VECTOR point
1517 other than the regset header itself. The vector has NELTS elements. */
1520 free_regset_vector (vector
, nelts
)
1526 for (i
= 0; i
< nelts
; i
++)
1527 FREE_REG_SET (vector
[i
]);
1530 /* Compute the registers live at the beginning of a basic block
1531 from those live at the end.
1533 When called, OLD contains those live at the end.
1534 On return, it contains those live at the beginning.
1535 FIRST and LAST are the first and last insns of the basic block.
1537 FINAL is nonzero if we are doing the final pass which is not
1538 for computing the life info (since that has already been done)
1539 but for acting on it. On this pass, we delete dead stores,
1540 set up the logical links and dead-variables lists of instructions,
1541 and merge instructions for autoincrement and autodecrement addresses.
1543 SIGNIFICANT is nonzero only the first time for each basic block.
1544 If it is nonzero, it points to a regset in which we store
1545 a 1 for each register that is set within the block.
1547 BNUM is the number of the basic block. */
1550 propagate_block (old
, first
, last
, final
, significant
, bnum
)
1551 register regset old
;
1563 /* The following variables are used only if FINAL is nonzero. */
1564 /* This vector gets one element for each reg that has been live
1565 at any point in the basic block that has been scanned so far.
1566 SOMETIMES_MAX says how many elements are in use so far. */
1567 register int *regs_sometimes_live
;
1568 int sometimes_max
= 0;
1569 /* This regset has 1 for each reg that we have seen live so far.
1570 It and REGS_SOMETIMES_LIVE are updated together. */
1573 /* The loop depth may change in the middle of a basic block. Since we
1574 scan from end to beginning, we start with the depth at the end of the
1575 current basic block, and adjust as we pass ends and starts of loops. */
1576 loop_depth
= basic_block_loop_depth
[bnum
];
1578 dead
= ALLOCA_REG_SET ();
1579 live
= ALLOCA_REG_SET ();
1584 /* Include any notes at the end of the block in the scan.
1585 This is in case the block ends with a call to setjmp. */
1587 while (NEXT_INSN (last
) != 0 && GET_CODE (NEXT_INSN (last
)) == NOTE
)
1589 /* Look for loop boundaries, we are going forward here. */
1590 last
= NEXT_INSN (last
);
1591 if (NOTE_LINE_NUMBER (last
) == NOTE_INSN_LOOP_BEG
)
1593 else if (NOTE_LINE_NUMBER (last
) == NOTE_INSN_LOOP_END
)
1602 maxlive
= ALLOCA_REG_SET ();
1603 COPY_REG_SET (maxlive
, old
);
1604 regs_sometimes_live
= (int *) alloca (max_regno
* sizeof (int));
1606 /* Process the regs live at the end of the block.
1607 Enter them in MAXLIVE and REGS_SOMETIMES_LIVE.
1608 Also mark them as not local to any one basic block. */
1609 EXECUTE_IF_SET_IN_REG_SET (old
, 0, i
,
1611 REG_BASIC_BLOCK (i
) = REG_BLOCK_GLOBAL
;
1612 regs_sometimes_live
[sometimes_max
] = i
;
1617 /* Scan the block an insn at a time from end to beginning. */
1619 for (insn
= last
; ; insn
= prev
)
1621 prev
= PREV_INSN (insn
);
1623 if (GET_CODE (insn
) == NOTE
)
1625 /* Look for loop boundaries, remembering that we are going
1627 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
)
1629 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
1632 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
1633 Abort now rather than setting register status incorrectly. */
1634 if (loop_depth
== 0)
1637 /* If this is a call to `setjmp' et al,
1638 warn if any non-volatile datum is live. */
1640 if (final
&& NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
1641 IOR_REG_SET (regs_live_at_setjmp
, old
);
1644 /* Update the life-status of regs for this insn.
1645 First DEAD gets which regs are set in this insn
1646 then LIVE gets which regs are used in this insn.
1647 Then the regs live before the insn
1648 are those live after, with DEAD regs turned off,
1649 and then LIVE regs turned on. */
1651 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1654 rtx note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
1656 = (insn_dead_p (PATTERN (insn
), old
, 0)
1657 /* Don't delete something that refers to volatile storage! */
1658 && ! INSN_VOLATILE (insn
));
1660 = (insn_is_dead
&& note
!= 0
1661 && libcall_dead_p (PATTERN (insn
), old
, note
, insn
));
1663 /* If an instruction consists of just dead store(s) on final pass,
1664 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
1665 We could really delete it with delete_insn, but that
1666 can cause trouble for first or last insn in a basic block. */
1667 if (final
&& insn_is_dead
)
1669 PUT_CODE (insn
, NOTE
);
1670 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1671 NOTE_SOURCE_FILE (insn
) = 0;
1673 /* CC0 is now known to be dead. Either this insn used it,
1674 in which case it doesn't anymore, or clobbered it,
1675 so the next insn can't use it. */
1678 /* If this insn is copying the return value from a library call,
1679 delete the entire library call. */
1680 if (libcall_is_dead
)
1682 rtx first
= XEXP (note
, 0);
1684 while (INSN_DELETED_P (first
))
1685 first
= NEXT_INSN (first
);
1690 NOTE_LINE_NUMBER (p
) = NOTE_INSN_DELETED
;
1691 NOTE_SOURCE_FILE (p
) = 0;
1697 CLEAR_REG_SET (dead
);
1698 CLEAR_REG_SET (live
);
1700 /* See if this is an increment or decrement that can be
1701 merged into a following memory address. */
1704 register rtx x
= single_set (insn
);
1706 /* Does this instruction increment or decrement a register? */
1708 && GET_CODE (SET_DEST (x
)) == REG
1709 && (GET_CODE (SET_SRC (x
)) == PLUS
1710 || GET_CODE (SET_SRC (x
)) == MINUS
)
1711 && XEXP (SET_SRC (x
), 0) == SET_DEST (x
)
1712 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
1713 /* Ok, look for a following memory ref we can combine with.
1714 If one is found, change the memory ref to a PRE_INC
1715 or PRE_DEC, cancel this insn, and return 1.
1716 Return 0 if nothing has been done. */
1717 && try_pre_increment_1 (insn
))
1720 #endif /* AUTO_INC_DEC */
1722 /* If this is not the final pass, and this insn is copying the
1723 value of a library call and it's dead, don't scan the
1724 insns that perform the library call, so that the call's
1725 arguments are not marked live. */
1726 if (libcall_is_dead
)
1728 /* Mark the dest reg as `significant'. */
1729 mark_set_regs (old
, dead
, PATTERN (insn
), NULL_RTX
, significant
);
1731 insn
= XEXP (note
, 0);
1732 prev
= PREV_INSN (insn
);
1734 else if (GET_CODE (PATTERN (insn
)) == SET
1735 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
1736 && GET_CODE (SET_SRC (PATTERN (insn
))) == PLUS
1737 && XEXP (SET_SRC (PATTERN (insn
)), 0) == stack_pointer_rtx
1738 && GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == CONST_INT
)
1739 /* We have an insn to pop a constant amount off the stack.
1740 (Such insns use PLUS regardless of the direction of the stack,
1741 and any insn to adjust the stack by a constant is always a pop.)
1742 These insns, if not dead stores, have no effect on life. */
1746 /* LIVE gets the regs used in INSN;
1747 DEAD gets those set by it. Dead insns don't make anything
1750 mark_set_regs (old
, dead
, PATTERN (insn
),
1751 final
? insn
: NULL_RTX
, significant
);
1753 /* If an insn doesn't use CC0, it becomes dead since we
1754 assume that every insn clobbers it. So show it dead here;
1755 mark_used_regs will set it live if it is referenced. */
1759 mark_used_regs (old
, live
, PATTERN (insn
), final
, insn
);
1761 /* Sometimes we may have inserted something before INSN (such as
1762 a move) when we make an auto-inc. So ensure we will scan
1765 prev
= PREV_INSN (insn
);
1768 if (! insn_is_dead
&& GET_CODE (insn
) == CALL_INSN
)
1774 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
1776 note
= XEXP (note
, 1))
1777 if (GET_CODE (XEXP (note
, 0)) == USE
)
1778 mark_used_regs (old
, live
, SET_DEST (XEXP (note
, 0)),
1781 /* Each call clobbers all call-clobbered regs that are not
1782 global or fixed. Note that the function-value reg is a
1783 call-clobbered reg, and mark_set_regs has already had
1784 a chance to handle it. */
1786 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1787 if (call_used_regs
[i
] && ! global_regs
[i
]
1789 SET_REGNO_REG_SET (dead
, i
);
1791 /* The stack ptr is used (honorarily) by a CALL insn. */
1792 SET_REGNO_REG_SET (live
, STACK_POINTER_REGNUM
);
1794 /* Calls may also reference any of the global registers,
1795 so they are made live. */
1796 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1798 mark_used_regs (old
, live
,
1799 gen_rtx_REG (reg_raw_mode
[i
], i
),
1802 /* Calls also clobber memory. */
1806 /* Update OLD for the registers used or set. */
1807 AND_COMPL_REG_SET (old
, dead
);
1808 IOR_REG_SET (old
, live
);
1810 if (GET_CODE (insn
) == CALL_INSN
&& final
)
1812 /* Any regs live at the time of a call instruction
1813 must not go in a register clobbered by calls.
1814 Find all regs now live and record this for them. */
1816 register int *p
= regs_sometimes_live
;
1818 for (i
= 0; i
< sometimes_max
; i
++, p
++)
1819 if (REGNO_REG_SET_P (old
, *p
))
1820 REG_N_CALLS_CROSSED (*p
)++;
1824 /* On final pass, add any additional sometimes-live regs
1825 into MAXLIVE and REGS_SOMETIMES_LIVE.
1826 Also update counts of how many insns each reg is live at. */
1833 EXECUTE_IF_AND_COMPL_IN_REG_SET
1834 (live
, maxlive
, 0, regno
,
1836 regs_sometimes_live
[sometimes_max
++] = regno
;
1837 SET_REGNO_REG_SET (maxlive
, regno
);
1840 p
= regs_sometimes_live
;
1841 for (i
= 0; i
< sometimes_max
; i
++)
1844 if (REGNO_REG_SET_P (old
, regno
))
1845 REG_LIVE_LENGTH (regno
)++;
1854 FREE_REG_SET (dead
);
1855 FREE_REG_SET (live
);
1857 FREE_REG_SET (maxlive
);
1859 if (num_scratch
> max_scratch
)
1860 max_scratch
= num_scratch
;
1863 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
1864 (SET expressions whose destinations are registers dead after the insn).
1865 NEEDED is the regset that says which regs are alive after the insn.
1867 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL. */
1870 insn_dead_p (x
, needed
, call_ok
)
1875 enum rtx_code code
= GET_CODE (x
);
1877 /* If setting something that's a reg or part of one,
1878 see if that register's altered value will be live. */
1882 rtx r
= SET_DEST (x
);
1884 /* A SET that is a subroutine call cannot be dead. */
1885 if (! call_ok
&& GET_CODE (SET_SRC (x
)) == CALL
)
1889 if (GET_CODE (r
) == CC0
)
1893 if (GET_CODE (r
) == MEM
&& last_mem_set
&& ! MEM_VOLATILE_P (r
)
1894 && rtx_equal_p (r
, last_mem_set
))
1897 while (GET_CODE (r
) == SUBREG
|| GET_CODE (r
) == STRICT_LOW_PART
1898 || GET_CODE (r
) == ZERO_EXTRACT
)
1901 if (GET_CODE (r
) == REG
)
1903 int regno
= REGNO (r
);
1905 /* Don't delete insns to set global regs. */
1906 if ((regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
1907 /* Make sure insns to set frame pointer aren't deleted. */
1908 || regno
== FRAME_POINTER_REGNUM
1909 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1910 || regno
== HARD_FRAME_POINTER_REGNUM
1912 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1913 /* Make sure insns to set arg pointer are never deleted
1914 (if the arg pointer isn't fixed, there will be a USE for
1915 it, so we can treat it normally). */
1916 || (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
1918 || REGNO_REG_SET_P (needed
, regno
))
1921 /* If this is a hard register, verify that subsequent words are
1923 if (regno
< FIRST_PSEUDO_REGISTER
)
1925 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (r
));
1928 if (REGNO_REG_SET_P (needed
, regno
+n
))
1936 /* If performing several activities,
1937 insn is dead if each activity is individually dead.
1938 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
1939 that's inside a PARALLEL doesn't make the insn worth keeping. */
1940 else if (code
== PARALLEL
)
1942 int i
= XVECLEN (x
, 0);
1944 for (i
--; i
>= 0; i
--)
1945 if (GET_CODE (XVECEXP (x
, 0, i
)) != CLOBBER
1946 && GET_CODE (XVECEXP (x
, 0, i
)) != USE
1947 && ! insn_dead_p (XVECEXP (x
, 0, i
), needed
, call_ok
))
1953 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
1954 is not necessarily true for hard registers. */
1955 else if (code
== CLOBBER
&& GET_CODE (XEXP (x
, 0)) == REG
1956 && REGNO (XEXP (x
, 0)) >= FIRST_PSEUDO_REGISTER
1957 && ! REGNO_REG_SET_P (needed
, REGNO (XEXP (x
, 0))))
1960 /* We do not check other CLOBBER or USE here. An insn consisting of just
1961 a CLOBBER or just a USE should not be deleted. */
1965 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
1966 return 1 if the entire library call is dead.
1967 This is true if X copies a register (hard or pseudo)
1968 and if the hard return reg of the call insn is dead.
1969 (The caller should have tested the destination of X already for death.)
1971 If this insn doesn't just copy a register, then we don't
1972 have an ordinary libcall. In that case, cse could not have
1973 managed to substitute the source for the dest later on,
1974 so we can assume the libcall is dead.
1976 NEEDED is the bit vector of pseudoregs live before this insn.
1977 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
1980 libcall_dead_p (x
, needed
, note
, insn
)
1986 register RTX_CODE code
= GET_CODE (x
);
1990 register rtx r
= SET_SRC (x
);
1991 if (GET_CODE (r
) == REG
)
1993 rtx call
= XEXP (note
, 0);
1996 /* Find the call insn. */
1997 while (call
!= insn
&& GET_CODE (call
) != CALL_INSN
)
1998 call
= NEXT_INSN (call
);
2000 /* If there is none, do nothing special,
2001 since ordinary death handling can understand these insns. */
2005 /* See if the hard reg holding the value is dead.
2006 If this is a PARALLEL, find the call within it. */
2007 call
= PATTERN (call
);
2008 if (GET_CODE (call
) == PARALLEL
)
2010 for (i
= XVECLEN (call
, 0) - 1; i
>= 0; i
--)
2011 if (GET_CODE (XVECEXP (call
, 0, i
)) == SET
2012 && GET_CODE (SET_SRC (XVECEXP (call
, 0, i
))) == CALL
)
2015 /* This may be a library call that is returning a value
2016 via invisible pointer. Do nothing special, since
2017 ordinary death handling can understand these insns. */
2021 call
= XVECEXP (call
, 0, i
);
2024 return insn_dead_p (call
, needed
, 1);
2030 /* Return 1 if register REGNO was used before it was set.
2031 In other words, if it is live at function entry.
2032 Don't count global register variables or variables in registers
2033 that can be used for function arg passing, though. */
2036 regno_uninitialized (regno
)
2039 if (n_basic_blocks
== 0
2040 || (regno
< FIRST_PSEUDO_REGISTER
2041 && (global_regs
[regno
] || FUNCTION_ARG_REGNO_P (regno
))))
2044 return REGNO_REG_SET_P (basic_block_live_at_start
[0], regno
);
2047 /* 1 if register REGNO was alive at a place where `setjmp' was called
2048 and was set more than once or is an argument.
2049 Such regs may be clobbered by `longjmp'. */
2052 regno_clobbered_at_setjmp (regno
)
2055 if (n_basic_blocks
== 0)
2058 return ((REG_N_SETS (regno
) > 1
2059 || REGNO_REG_SET_P (basic_block_live_at_start
[0], regno
))
2060 && REGNO_REG_SET_P (regs_live_at_setjmp
, regno
));
2063 /* Process the registers that are set within X.
2064 Their bits are set to 1 in the regset DEAD,
2065 because they are dead prior to this insn.
2067 If INSN is nonzero, it is the insn being processed
2068 and the fact that it is nonzero implies this is the FINAL pass
2069 in propagate_block. In this case, various info about register
2070 usage is stored, LOG_LINKS fields of insns are set up. */
2073 mark_set_regs (needed
, dead
, x
, insn
, significant
)
2080 register RTX_CODE code
= GET_CODE (x
);
2082 if (code
== SET
|| code
== CLOBBER
)
2083 mark_set_1 (needed
, dead
, x
, insn
, significant
);
2084 else if (code
== PARALLEL
)
2087 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
2089 code
= GET_CODE (XVECEXP (x
, 0, i
));
2090 if (code
== SET
|| code
== CLOBBER
)
2091 mark_set_1 (needed
, dead
, XVECEXP (x
, 0, i
), insn
, significant
);
2096 /* Process a single SET rtx, X. */
2099 mark_set_1 (needed
, dead
, x
, insn
, significant
)
2107 register rtx reg
= SET_DEST (x
);
2109 /* Modifying just one hardware register of a multi-reg value
2110 or just a byte field of a register
2111 does not mean the value from before this insn is now dead.
2112 But it does mean liveness of that register at the end of the block
2115 Within mark_set_1, however, we treat it as if the register is
2116 indeed modified. mark_used_regs will, however, also treat this
2117 register as being used. Thus, we treat these insns as setting a
2118 new value for the register as a function of its old value. This
2119 cases LOG_LINKS to be made appropriately and this will help combine. */
2121 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
2122 || GET_CODE (reg
) == SIGN_EXTRACT
2123 || GET_CODE (reg
) == STRICT_LOW_PART
)
2124 reg
= XEXP (reg
, 0);
2126 /* If we are writing into memory or into a register mentioned in the
2127 address of the last thing stored into memory, show we don't know
2128 what the last store was. If we are writing memory, save the address
2129 unless it is volatile. */
2130 if (GET_CODE (reg
) == MEM
2131 || (GET_CODE (reg
) == REG
2132 && last_mem_set
!= 0 && reg_overlap_mentioned_p (reg
, last_mem_set
)))
2135 if (GET_CODE (reg
) == MEM
&& ! side_effects_p (reg
)
2136 /* There are no REG_INC notes for SP, so we can't assume we'll see
2137 everything that invalidates it. To be safe, don't eliminate any
2138 stores though SP; none of them should be redundant anyway. */
2139 && ! reg_mentioned_p (stack_pointer_rtx
, reg
))
2142 if (GET_CODE (reg
) == REG
2143 && (regno
= REGNO (reg
), regno
!= FRAME_POINTER_REGNUM
)
2144 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2145 && regno
!= HARD_FRAME_POINTER_REGNUM
2147 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2148 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
2150 && ! (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
]))
2151 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
2153 int some_needed
= REGNO_REG_SET_P (needed
, regno
);
2154 int some_not_needed
= ! some_needed
;
2156 /* Mark it as a significant register for this basic block. */
2158 SET_REGNO_REG_SET (significant
, regno
);
2160 /* Mark it as dead before this insn. */
2161 SET_REGNO_REG_SET (dead
, regno
);
2163 /* A hard reg in a wide mode may really be multiple registers.
2164 If so, mark all of them just like the first. */
2165 if (regno
< FIRST_PSEUDO_REGISTER
)
2169 /* Nothing below is needed for the stack pointer; get out asap.
2170 Eg, log links aren't needed, since combine won't use them. */
2171 if (regno
== STACK_POINTER_REGNUM
)
2174 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
2177 int regno_n
= regno
+ n
;
2178 int needed_regno
= REGNO_REG_SET_P (needed
, regno_n
);
2180 SET_REGNO_REG_SET (significant
, regno_n
);
2182 SET_REGNO_REG_SET (dead
, regno_n
);
2183 some_needed
|= needed_regno
;
2184 some_not_needed
|= ! needed_regno
;
2187 /* Additional data to record if this is the final pass. */
2190 register rtx y
= reg_next_use
[regno
];
2191 register int blocknum
= BLOCK_NUM (insn
);
2193 /* If this is a hard reg, record this function uses the reg. */
2195 if (regno
< FIRST_PSEUDO_REGISTER
)
2198 int endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
2200 for (i
= regno
; i
< endregno
; i
++)
2202 /* The next use is no longer "next", since a store
2204 reg_next_use
[i
] = 0;
2206 regs_ever_live
[i
] = 1;
2212 /* The next use is no longer "next", since a store
2214 reg_next_use
[regno
] = 0;
2216 /* Keep track of which basic blocks each reg appears in. */
2218 if (REG_BASIC_BLOCK (regno
) == REG_BLOCK_UNKNOWN
)
2219 REG_BASIC_BLOCK (regno
) = blocknum
;
2220 else if (REG_BASIC_BLOCK (regno
) != blocknum
)
2221 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
2223 /* Count (weighted) references, stores, etc. This counts a
2224 register twice if it is modified, but that is correct. */
2225 REG_N_SETS (regno
)++;
2227 REG_N_REFS (regno
) += loop_depth
;
2229 /* The insns where a reg is live are normally counted
2230 elsewhere, but we want the count to include the insn
2231 where the reg is set, and the normal counting mechanism
2232 would not count it. */
2233 REG_LIVE_LENGTH (regno
)++;
2236 if (! some_not_needed
)
2238 /* Make a logical link from the next following insn
2239 that uses this register, back to this insn.
2240 The following insns have already been processed.
2242 We don't build a LOG_LINK for hard registers containing
2243 in ASM_OPERANDs. If these registers get replaced,
2244 we might wind up changing the semantics of the insn,
2245 even if reload can make what appear to be valid assignments
2247 if (y
&& (BLOCK_NUM (y
) == blocknum
)
2248 && (regno
>= FIRST_PSEUDO_REGISTER
2249 || asm_noperands (PATTERN (y
)) < 0))
2251 = gen_rtx_INSN_LIST (VOIDmode
, insn
, LOG_LINKS (y
));
2253 else if (! some_needed
)
2255 /* Note that dead stores have already been deleted when possible
2256 If we get here, we have found a dead store that cannot
2257 be eliminated (because the same insn does something useful).
2258 Indicate this by marking the reg being set as dying here. */
2260 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
2261 REG_N_DEATHS (REGNO (reg
))++;
2265 /* This is a case where we have a multi-word hard register
2266 and some, but not all, of the words of the register are
2267 needed in subsequent insns. Write REG_UNUSED notes
2268 for those parts that were not needed. This case should
2273 for (i
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) - 1;
2275 if (!REGNO_REG_SET_P (needed
, regno
+ i
))
2277 = gen_rtx_EXPR_LIST (REG_UNUSED
,
2278 gen_rtx_REG (reg_raw_mode
[regno
+ i
],
2284 else if (GET_CODE (reg
) == REG
)
2285 reg_next_use
[regno
] = 0;
2287 /* If this is the last pass and this is a SCRATCH, show it will be dying
2288 here and count it. */
2289 else if (GET_CODE (reg
) == SCRATCH
&& insn
!= 0)
2292 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
2299 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
2303 find_auto_inc (needed
, x
, insn
)
2308 rtx addr
= XEXP (x
, 0);
2309 HOST_WIDE_INT offset
= 0;
2312 /* Here we detect use of an index register which might be good for
2313 postincrement, postdecrement, preincrement, or predecrement. */
2315 if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
2316 offset
= INTVAL (XEXP (addr
, 1)), addr
= XEXP (addr
, 0);
2318 if (GET_CODE (addr
) == REG
)
2321 register int size
= GET_MODE_SIZE (GET_MODE (x
));
2324 int regno
= REGNO (addr
);
2326 /* Is the next use an increment that might make auto-increment? */
2327 if ((incr
= reg_next_use
[regno
]) != 0
2328 && (set
= single_set (incr
)) != 0
2329 && GET_CODE (set
) == SET
2330 && BLOCK_NUM (incr
) == BLOCK_NUM (insn
)
2331 /* Can't add side effects to jumps; if reg is spilled and
2332 reloaded, there's no way to store back the altered value. */
2333 && GET_CODE (insn
) != JUMP_INSN
2334 && (y
= SET_SRC (set
), GET_CODE (y
) == PLUS
)
2335 && XEXP (y
, 0) == addr
2336 && GET_CODE (XEXP (y
, 1)) == CONST_INT
2338 #ifdef HAVE_POST_INCREMENT
2339 || (INTVAL (XEXP (y
, 1)) == size
&& offset
== 0)
2341 #ifdef HAVE_POST_DECREMENT
2342 || (INTVAL (XEXP (y
, 1)) == - size
&& offset
== 0)
2344 #ifdef HAVE_PRE_INCREMENT
2345 || (INTVAL (XEXP (y
, 1)) == size
&& offset
== size
)
2347 #ifdef HAVE_PRE_DECREMENT
2348 || (INTVAL (XEXP (y
, 1)) == - size
&& offset
== - size
)
2351 /* Make sure this reg appears only once in this insn. */
2352 && (use
= find_use_as_address (PATTERN (insn
), addr
, offset
),
2353 use
!= 0 && use
!= (rtx
) 1))
2355 rtx q
= SET_DEST (set
);
2356 enum rtx_code inc_code
= (INTVAL (XEXP (y
, 1)) == size
2357 ? (offset
? PRE_INC
: POST_INC
)
2358 : (offset
? PRE_DEC
: POST_DEC
));
2360 if (dead_or_set_p (incr
, addr
))
2362 /* This is the simple case. Try to make the auto-inc. If
2363 we can't, we are done. Otherwise, we will do any
2364 needed updates below. */
2365 if (! validate_change (insn
, &XEXP (x
, 0),
2366 gen_rtx_fmt_e (inc_code
, Pmode
, addr
),
2370 else if (GET_CODE (q
) == REG
2371 /* PREV_INSN used here to check the semi-open interval
2373 && ! reg_used_between_p (q
, PREV_INSN (insn
), incr
)
2374 /* We must also check for sets of q as q may be
2375 a call clobbered hard register and there may
2376 be a call between PREV_INSN (insn) and incr. */
2377 && ! reg_set_between_p (q
, PREV_INSN (insn
), incr
))
2379 /* We have *p followed sometime later by q = p+size.
2380 Both p and q must be live afterward,
2381 and q is not used between INSN and it's assignment.
2382 Change it to q = p, ...*q..., q = q+size.
2383 Then fall into the usual case. */
2387 emit_move_insn (q
, addr
);
2388 insns
= get_insns ();
2391 /* If anything in INSNS have UID's that don't fit within the
2392 extra space we allocate earlier, we can't make this auto-inc.
2393 This should never happen. */
2394 for (temp
= insns
; temp
; temp
= NEXT_INSN (temp
))
2396 if (INSN_UID (temp
) > max_uid_for_flow
)
2398 BLOCK_NUM (temp
) = BLOCK_NUM (insn
);
2401 /* If we can't make the auto-inc, or can't make the
2402 replacement into Y, exit. There's no point in making
2403 the change below if we can't do the auto-inc and doing
2404 so is not correct in the pre-inc case. */
2406 validate_change (insn
, &XEXP (x
, 0),
2407 gen_rtx_fmt_e (inc_code
, Pmode
, q
),
2409 validate_change (incr
, &XEXP (y
, 0), q
, 1);
2410 if (! apply_change_group ())
2413 /* We now know we'll be doing this change, so emit the
2414 new insn(s) and do the updates. */
2415 emit_insns_before (insns
, insn
);
2417 if (basic_block_head
[BLOCK_NUM (insn
)] == insn
)
2418 basic_block_head
[BLOCK_NUM (insn
)] = insns
;
2420 /* INCR will become a NOTE and INSN won't contain a
2421 use of ADDR. If a use of ADDR was just placed in
2422 the insn before INSN, make that the next use.
2423 Otherwise, invalidate it. */
2424 if (GET_CODE (PREV_INSN (insn
)) == INSN
2425 && GET_CODE (PATTERN (PREV_INSN (insn
))) == SET
2426 && SET_SRC (PATTERN (PREV_INSN (insn
))) == addr
)
2427 reg_next_use
[regno
] = PREV_INSN (insn
);
2429 reg_next_use
[regno
] = 0;
2434 /* REGNO is now used in INCR which is below INSN, but
2435 it previously wasn't live here. If we don't mark
2436 it as needed, we'll put a REG_DEAD note for it
2437 on this insn, which is incorrect. */
2438 SET_REGNO_REG_SET (needed
, regno
);
2440 /* If there are any calls between INSN and INCR, show
2441 that REGNO now crosses them. */
2442 for (temp
= insn
; temp
!= incr
; temp
= NEXT_INSN (temp
))
2443 if (GET_CODE (temp
) == CALL_INSN
)
2444 REG_N_CALLS_CROSSED (regno
)++;
2449 /* If we haven't returned, it means we were able to make the
2450 auto-inc, so update the status. First, record that this insn
2451 has an implicit side effect. */
2454 = gen_rtx_EXPR_LIST (REG_INC
, addr
, REG_NOTES (insn
));
2456 /* Modify the old increment-insn to simply copy
2457 the already-incremented value of our register. */
2458 if (! validate_change (incr
, &SET_SRC (set
), addr
, 0))
2461 /* If that makes it a no-op (copying the register into itself) delete
2462 it so it won't appear to be a "use" and a "set" of this
2464 if (SET_DEST (set
) == addr
)
2466 PUT_CODE (incr
, NOTE
);
2467 NOTE_LINE_NUMBER (incr
) = NOTE_INSN_DELETED
;
2468 NOTE_SOURCE_FILE (incr
) = 0;
2471 if (regno
>= FIRST_PSEUDO_REGISTER
)
2473 /* Count an extra reference to the reg. When a reg is
2474 incremented, spilling it is worse, so we want to make
2475 that less likely. */
2476 REG_N_REFS (regno
) += loop_depth
;
2478 /* Count the increment as a setting of the register,
2479 even though it isn't a SET in rtl. */
2480 REG_N_SETS (regno
)++;
2485 #endif /* AUTO_INC_DEC */
2487 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
2488 This is done assuming the registers needed from X
2489 are those that have 1-bits in NEEDED.
2491 On the final pass, FINAL is 1. This means try for autoincrement
2492 and count the uses and deaths of each pseudo-reg.
2494 INSN is the containing instruction. If INSN is dead, this function is not
2498 mark_used_regs (needed
, live
, x
, final
, insn
)
2505 register RTX_CODE code
;
2510 code
= GET_CODE (x
);
2531 /* If we are clobbering a MEM, mark any registers inside the address
2533 if (GET_CODE (XEXP (x
, 0)) == MEM
)
2534 mark_used_regs (needed
, live
, XEXP (XEXP (x
, 0), 0), final
, insn
);
2538 /* Invalidate the data for the last MEM stored, but only if MEM is
2539 something that can be stored into. */
2540 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2541 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
2542 ; /* needn't clear last_mem_set */
2548 find_auto_inc (needed
, x
, insn
);
2553 if (GET_CODE (SUBREG_REG (x
)) == REG
2554 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
2555 && (GET_MODE_SIZE (GET_MODE (x
))
2556 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))))
2557 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x
))) = 1;
2559 /* While we're here, optimize this case. */
2562 /* In case the SUBREG is not of a register, don't optimize */
2563 if (GET_CODE (x
) != REG
)
2565 mark_used_regs (needed
, live
, x
, final
, insn
);
2569 /* ... fall through ... */
2572 /* See a register other than being set
2573 => mark it as needed. */
2577 int some_needed
= REGNO_REG_SET_P (needed
, regno
);
2578 int some_not_needed
= ! some_needed
;
2580 SET_REGNO_REG_SET (live
, regno
);
2582 /* A hard reg in a wide mode may really be multiple registers.
2583 If so, mark all of them just like the first. */
2584 if (regno
< FIRST_PSEUDO_REGISTER
)
2588 /* For stack ptr or fixed arg pointer,
2589 nothing below can be necessary, so waste no more time. */
2590 if (regno
== STACK_POINTER_REGNUM
2591 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2592 || regno
== HARD_FRAME_POINTER_REGNUM
2594 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2595 || (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
2597 || regno
== FRAME_POINTER_REGNUM
)
2599 /* If this is a register we are going to try to eliminate,
2600 don't mark it live here. If we are successful in
2601 eliminating it, it need not be live unless it is used for
2602 pseudos, in which case it will have been set live when
2603 it was allocated to the pseudos. If the register will not
2604 be eliminated, reload will set it live at that point. */
2606 if (! TEST_HARD_REG_BIT (elim_reg_set
, regno
))
2607 regs_ever_live
[regno
] = 1;
2610 /* No death notes for global register variables;
2611 their values are live after this function exits. */
2612 if (global_regs
[regno
])
2615 reg_next_use
[regno
] = insn
;
2619 n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
2622 int regno_n
= regno
+ n
;
2623 int needed_regno
= REGNO_REG_SET_P (needed
, regno_n
);
2625 SET_REGNO_REG_SET (live
, regno_n
);
2626 some_needed
|= needed_regno
;
2627 some_not_needed
|= ! needed_regno
;
2632 /* Record where each reg is used, so when the reg
2633 is set we know the next insn that uses it. */
2635 reg_next_use
[regno
] = insn
;
2637 if (regno
< FIRST_PSEUDO_REGISTER
)
2639 /* If a hard reg is being used,
2640 record that this function does use it. */
2642 i
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
2646 regs_ever_live
[regno
+ --i
] = 1;
2651 /* Keep track of which basic block each reg appears in. */
2653 register int blocknum
= BLOCK_NUM (insn
);
2655 if (REG_BASIC_BLOCK (regno
) == REG_BLOCK_UNKNOWN
)
2656 REG_BASIC_BLOCK (regno
) = blocknum
;
2657 else if (REG_BASIC_BLOCK (regno
) != blocknum
)
2658 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
2660 /* Count (weighted) number of uses of each reg. */
2662 REG_N_REFS (regno
) += loop_depth
;
2665 /* Record and count the insns in which a reg dies.
2666 If it is used in this insn and was dead below the insn
2667 then it dies in this insn. If it was set in this insn,
2668 we do not make a REG_DEAD note; likewise if we already
2669 made such a note. */
2672 && ! dead_or_set_p (insn
, x
)
2674 && (regno
>= FIRST_PSEUDO_REGISTER
|| ! fixed_regs
[regno
])
2678 /* Check for the case where the register dying partially
2679 overlaps the register set by this insn. */
2680 if (regno
< FIRST_PSEUDO_REGISTER
2681 && HARD_REGNO_NREGS (regno
, GET_MODE (x
)) > 1)
2683 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
2685 some_needed
|= dead_or_set_regno_p (insn
, regno
+ n
);
2688 /* If none of the words in X is needed, make a REG_DEAD
2689 note. Otherwise, we must make partial REG_DEAD notes. */
2693 = gen_rtx_EXPR_LIST (REG_DEAD
, x
, REG_NOTES (insn
));
2694 REG_N_DEATHS (regno
)++;
2700 /* Don't make a REG_DEAD note for a part of a register
2701 that is set in the insn. */
2703 for (i
= HARD_REGNO_NREGS (regno
, GET_MODE (x
)) - 1;
2705 if (!REGNO_REG_SET_P (needed
, regno
+ i
)
2706 && ! dead_or_set_regno_p (insn
, regno
+ i
))
2708 = gen_rtx_EXPR_LIST (REG_DEAD
,
2709 gen_rtx_REG (reg_raw_mode
[regno
+ i
],
2720 register rtx testreg
= SET_DEST (x
);
2723 /* If storing into MEM, don't show it as being used. But do
2724 show the address as being used. */
2725 if (GET_CODE (testreg
) == MEM
)
2729 find_auto_inc (needed
, testreg
, insn
);
2731 mark_used_regs (needed
, live
, XEXP (testreg
, 0), final
, insn
);
2732 mark_used_regs (needed
, live
, SET_SRC (x
), final
, insn
);
2736 /* Storing in STRICT_LOW_PART is like storing in a reg
2737 in that this SET might be dead, so ignore it in TESTREG.
2738 but in some other ways it is like using the reg.
2740 Storing in a SUBREG or a bit field is like storing the entire
2741 register in that if the register's value is not used
2742 then this SET is not needed. */
2743 while (GET_CODE (testreg
) == STRICT_LOW_PART
2744 || GET_CODE (testreg
) == ZERO_EXTRACT
2745 || GET_CODE (testreg
) == SIGN_EXTRACT
2746 || GET_CODE (testreg
) == SUBREG
)
2748 if (GET_CODE (testreg
) == SUBREG
2749 && GET_CODE (SUBREG_REG (testreg
)) == REG
2750 && REGNO (SUBREG_REG (testreg
)) >= FIRST_PSEUDO_REGISTER
2751 && (GET_MODE_SIZE (GET_MODE (testreg
))
2752 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg
)))))
2753 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg
))) = 1;
2755 /* Modifying a single register in an alternate mode
2756 does not use any of the old value. But these other
2757 ways of storing in a register do use the old value. */
2758 if (GET_CODE (testreg
) == SUBREG
2759 && !(REG_SIZE (SUBREG_REG (testreg
)) > REG_SIZE (testreg
)))
2764 testreg
= XEXP (testreg
, 0);
2767 /* If this is a store into a register,
2768 recursively scan the value being stored. */
2770 if (GET_CODE (testreg
) == REG
2771 && (regno
= REGNO (testreg
), regno
!= FRAME_POINTER_REGNUM
)
2772 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2773 && regno
!= HARD_FRAME_POINTER_REGNUM
2775 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2776 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
2779 /* We used to exclude global_regs here, but that seems wrong.
2780 Storing in them is like storing in mem. */
2782 mark_used_regs (needed
, live
, SET_SRC (x
), final
, insn
);
2784 mark_used_regs (needed
, live
, SET_DEST (x
), final
, insn
);
2791 /* If exiting needs the right stack value, consider this insn as
2792 using the stack pointer. In any event, consider it as using
2793 all global registers and all registers used by return. */
2795 #ifdef EXIT_IGNORE_STACK
2796 if (! EXIT_IGNORE_STACK
2797 || (! FRAME_POINTER_REQUIRED
2798 && ! current_function_calls_alloca
2799 && flag_omit_frame_pointer
))
2801 SET_REGNO_REG_SET (live
, STACK_POINTER_REGNUM
);
2803 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2805 #ifdef EPILOGUE_USES
2806 || EPILOGUE_USES (i
)
2809 SET_REGNO_REG_SET (live
, i
);
2816 /* Recursively scan the operands of this expression. */
2819 register char *fmt
= GET_RTX_FORMAT (code
);
2822 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2826 /* Tail recursive case: save a function call level. */
2832 mark_used_regs (needed
, live
, XEXP (x
, i
), final
, insn
);
2834 else if (fmt
[i
] == 'E')
2837 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2838 mark_used_regs (needed
, live
, XVECEXP (x
, i
, j
), final
, insn
);
2847 try_pre_increment_1 (insn
)
2850 /* Find the next use of this reg. If in same basic block,
2851 make it do pre-increment or pre-decrement if appropriate. */
2852 rtx x
= single_set (insn
);
2853 HOST_WIDE_INT amount
= ((GET_CODE (SET_SRC (x
)) == PLUS
? 1 : -1)
2854 * INTVAL (XEXP (SET_SRC (x
), 1)));
2855 int regno
= REGNO (SET_DEST (x
));
2856 rtx y
= reg_next_use
[regno
];
2858 && BLOCK_NUM (y
) == BLOCK_NUM (insn
)
2859 /* Don't do this if the reg dies, or gets set in y; a standard addressing
2860 mode would be better. */
2861 && ! dead_or_set_p (y
, SET_DEST (x
))
2862 && try_pre_increment (y
, SET_DEST (x
), amount
))
2864 /* We have found a suitable auto-increment
2865 and already changed insn Y to do it.
2866 So flush this increment-instruction. */
2867 PUT_CODE (insn
, NOTE
);
2868 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2869 NOTE_SOURCE_FILE (insn
) = 0;
2870 /* Count a reference to this reg for the increment
2871 insn we are deleting. When a reg is incremented.
2872 spilling it is worse, so we want to make that
2874 if (regno
>= FIRST_PSEUDO_REGISTER
)
2876 REG_N_REFS (regno
) += loop_depth
;
2877 REG_N_SETS (regno
)++;
2884 /* Try to change INSN so that it does pre-increment or pre-decrement
2885 addressing on register REG in order to add AMOUNT to REG.
2886 AMOUNT is negative for pre-decrement.
2887 Returns 1 if the change could be made.
2888 This checks all about the validity of the result of modifying INSN. */
2891 try_pre_increment (insn
, reg
, amount
)
2893 HOST_WIDE_INT amount
;
2897 /* Nonzero if we can try to make a pre-increment or pre-decrement.
2898 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
2900 /* Nonzero if we can try to make a post-increment or post-decrement.
2901 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
2902 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
2903 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
2906 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
2909 /* From the sign of increment, see which possibilities are conceivable
2910 on this target machine. */
2911 #ifdef HAVE_PRE_INCREMENT
2915 #ifdef HAVE_POST_INCREMENT
2920 #ifdef HAVE_PRE_DECREMENT
2924 #ifdef HAVE_POST_DECREMENT
2929 if (! (pre_ok
|| post_ok
))
2932 /* It is not safe to add a side effect to a jump insn
2933 because if the incremented register is spilled and must be reloaded
2934 there would be no way to store the incremented value back in memory. */
2936 if (GET_CODE (insn
) == JUMP_INSN
)
2941 use
= find_use_as_address (PATTERN (insn
), reg
, 0);
2942 if (post_ok
&& (use
== 0 || use
== (rtx
) 1))
2944 use
= find_use_as_address (PATTERN (insn
), reg
, -amount
);
2948 if (use
== 0 || use
== (rtx
) 1)
2951 if (GET_MODE_SIZE (GET_MODE (use
)) != (amount
> 0 ? amount
: - amount
))
2954 /* See if this combination of instruction and addressing mode exists. */
2955 if (! validate_change (insn
, &XEXP (use
, 0),
2956 gen_rtx_fmt_e (amount
> 0
2957 ? (do_post
? POST_INC
: PRE_INC
)
2958 : (do_post
? POST_DEC
: PRE_DEC
),
2962 /* Record that this insn now has an implicit side effect on X. */
2963 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_INC
, reg
, REG_NOTES (insn
));
2967 #endif /* AUTO_INC_DEC */
2969 /* Find the place in the rtx X where REG is used as a memory address.
2970 Return the MEM rtx that so uses it.
2971 If PLUSCONST is nonzero, search instead for a memory address equivalent to
2972 (plus REG (const_int PLUSCONST)).
2974 If such an address does not appear, return 0.
2975 If REG appears more than once, or is used other than in such an address,
2979 find_use_as_address (x
, reg
, plusconst
)
2982 HOST_WIDE_INT plusconst
;
2984 enum rtx_code code
= GET_CODE (x
);
2985 char *fmt
= GET_RTX_FORMAT (code
);
2987 register rtx value
= 0;
2990 if (code
== MEM
&& XEXP (x
, 0) == reg
&& plusconst
== 0)
2993 if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == PLUS
2994 && XEXP (XEXP (x
, 0), 0) == reg
2995 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2996 && INTVAL (XEXP (XEXP (x
, 0), 1)) == plusconst
)
2999 if (code
== SIGN_EXTRACT
|| code
== ZERO_EXTRACT
)
3001 /* If REG occurs inside a MEM used in a bit-field reference,
3002 that is unacceptable. */
3003 if (find_use_as_address (XEXP (x
, 0), reg
, 0) != 0)
3004 return (rtx
) (HOST_WIDE_INT
) 1;
3008 return (rtx
) (HOST_WIDE_INT
) 1;
3010 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3014 tem
= find_use_as_address (XEXP (x
, i
), reg
, plusconst
);
3018 return (rtx
) (HOST_WIDE_INT
) 1;
3023 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3025 tem
= find_use_as_address (XVECEXP (x
, i
, j
), reg
, plusconst
);
3029 return (rtx
) (HOST_WIDE_INT
) 1;
3037 /* Write information about registers and basic blocks into FILE.
3038 This is part of making a debugging dump. */
3041 dump_flow_info (file
)
3045 static char *reg_class_names
[] = REG_CLASS_NAMES
;
3047 fprintf (file
, "%d registers.\n", max_regno
);
3049 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3052 enum reg_class
class, altclass
;
3053 fprintf (file
, "\nRegister %d used %d times across %d insns",
3054 i
, REG_N_REFS (i
), REG_LIVE_LENGTH (i
));
3055 if (REG_BASIC_BLOCK (i
) >= 0)
3056 fprintf (file
, " in block %d", REG_BASIC_BLOCK (i
));
3058 fprintf (file
, "; set %d time%s", REG_N_SETS (i
),
3059 (REG_N_SETS (i
) == 1) ? "" : "s");
3060 if (REG_USERVAR_P (regno_reg_rtx
[i
]))
3061 fprintf (file
, "; user var");
3062 if (REG_N_DEATHS (i
) != 1)
3063 fprintf (file
, "; dies in %d places", REG_N_DEATHS (i
));
3064 if (REG_N_CALLS_CROSSED (i
) == 1)
3065 fprintf (file
, "; crosses 1 call");
3066 else if (REG_N_CALLS_CROSSED (i
))
3067 fprintf (file
, "; crosses %d calls", REG_N_CALLS_CROSSED (i
));
3068 if (PSEUDO_REGNO_BYTES (i
) != UNITS_PER_WORD
)
3069 fprintf (file
, "; %d bytes", PSEUDO_REGNO_BYTES (i
));
3070 class = reg_preferred_class (i
);
3071 altclass
= reg_alternate_class (i
);
3072 if (class != GENERAL_REGS
|| altclass
!= ALL_REGS
)
3074 if (altclass
== ALL_REGS
|| class == ALL_REGS
)
3075 fprintf (file
, "; pref %s", reg_class_names
[(int) class]);
3076 else if (altclass
== NO_REGS
)
3077 fprintf (file
, "; %s or none", reg_class_names
[(int) class]);
3079 fprintf (file
, "; pref %s, else %s",
3080 reg_class_names
[(int) class],
3081 reg_class_names
[(int) altclass
]);
3083 if (REGNO_POINTER_FLAG (i
))
3084 fprintf (file
, "; pointer");
3085 fprintf (file
, ".\n");
3087 fprintf (file
, "\n%d basic blocks.\n", n_basic_blocks
);
3088 for (i
= 0; i
< n_basic_blocks
; i
++)
3090 register rtx head
, jump
;
3092 fprintf (file
, "\nBasic block %d: first insn %d, last %d.\n",
3094 INSN_UID (basic_block_head
[i
]),
3095 INSN_UID (basic_block_end
[i
]));
3096 /* The control flow graph's storage is freed
3097 now when flow_analysis returns.
3098 Don't try to print it if it is gone. */
3099 if (basic_block_drops_in
)
3101 fprintf (file
, "Reached from blocks: ");
3102 head
= basic_block_head
[i
];
3103 if (GET_CODE (head
) == CODE_LABEL
)
3104 for (jump
= LABEL_REFS (head
);
3106 jump
= LABEL_NEXTREF (jump
))
3108 register int from_block
= BLOCK_NUM (CONTAINING_INSN (jump
));
3109 fprintf (file
, " %d", from_block
);
3111 if (basic_block_drops_in
[i
])
3112 fprintf (file
, " previous");
3114 fprintf (file
, "\nRegisters live at start:");
3115 for (regno
= 0; regno
< max_regno
; regno
++)
3116 if (REGNO_REG_SET_P (basic_block_live_at_start
[i
], regno
))
3117 fprintf (file
, " %d", regno
);
3118 fprintf (file
, "\n");
3120 fprintf (file
, "\n");
3124 /* Like print_rtl, but also print out live information for the start of each
3128 print_rtl_with_bb (outf
, rtx_first
)
3132 register rtx tmp_rtx
;
3135 fprintf (outf
, "(nil)\n");
3140 enum bb_state
{ NOT_IN_BB
, IN_ONE_BB
, IN_MULTIPLE_BB
};
3141 int max_uid
= get_max_uid ();
3142 int *start
= (int *) alloca (max_uid
* sizeof (int));
3143 int *end
= (int *) alloca (max_uid
* sizeof (int));
3144 char *in_bb_p
= (char *) alloca (max_uid
* sizeof (enum bb_state
));
3146 for (i
= 0; i
< max_uid
; i
++)
3148 start
[i
] = end
[i
] = -1;
3149 in_bb_p
[i
] = NOT_IN_BB
;
3152 for (i
= n_basic_blocks
-1; i
>= 0; i
--)
3155 start
[INSN_UID (basic_block_head
[i
])] = i
;
3156 end
[INSN_UID (basic_block_end
[i
])] = i
;
3157 for (x
= basic_block_head
[i
]; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
3159 in_bb_p
[ INSN_UID(x
)]
3160 = (in_bb_p
[ INSN_UID(x
)] == NOT_IN_BB
)
3161 ? IN_ONE_BB
: IN_MULTIPLE_BB
;
3162 if (x
== basic_block_end
[i
])
3167 for (tmp_rtx
= rtx_first
; NULL
!= tmp_rtx
; tmp_rtx
= NEXT_INSN (tmp_rtx
))
3169 if ((bb
= start
[INSN_UID (tmp_rtx
)]) >= 0)
3171 fprintf (outf
, ";; Start of basic block %d, registers live:",
3174 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start
[bb
], 0, i
,
3176 fprintf (outf
, " %d", i
);
3177 if (i
< FIRST_PSEUDO_REGISTER
)
3178 fprintf (outf
, " [%s]",
3184 if (in_bb_p
[ INSN_UID(tmp_rtx
)] == NOT_IN_BB
3185 && GET_CODE (tmp_rtx
) != NOTE
3186 && GET_CODE (tmp_rtx
) != BARRIER
)
3187 fprintf (outf
, ";; Insn is not within a basic block\n");
3188 else if (in_bb_p
[ INSN_UID(tmp_rtx
)] == IN_MULTIPLE_BB
)
3189 fprintf (outf
, ";; Insn is in multiple basic blocks\n");
3191 print_rtl_single (outf
, tmp_rtx
);
3193 if ((bb
= end
[INSN_UID (tmp_rtx
)]) >= 0)
3194 fprintf (outf
, ";; End of basic block %d\n", bb
);
3202 /* Integer list support. */
3204 /* Allocate a node from list *HEAD_PTR. */
3207 alloc_int_list_node (head_ptr
)
3208 int_list_block
**head_ptr
;
3210 struct int_list_block
*first_blk
= *head_ptr
;
3212 if (first_blk
== NULL
|| first_blk
->nodes_left
<= 0)
3214 first_blk
= (struct int_list_block
*) xmalloc (sizeof (struct int_list_block
));
3215 first_blk
->nodes_left
= INT_LIST_NODES_IN_BLK
;
3216 first_blk
->next
= *head_ptr
;
3217 *head_ptr
= first_blk
;
3220 first_blk
->nodes_left
--;
3221 return &first_blk
->nodes
[first_blk
->nodes_left
];
3224 /* Pointer to head of predecessor/successor block list. */
3225 static int_list_block
*pred_int_list_blocks
;
3227 /* Add a new node to integer list LIST with value VAL.
3228 LIST is a pointer to a list object to allow for different implementations.
3229 If *LIST is initially NULL, the list is empty.
3230 The caller must not care whether the element is added to the front or
3231 to the end of the list (to allow for different implementations). */
3234 add_int_list_node (blk_list
, list
, val
)
3235 int_list_block
**blk_list
;
3239 int_list_ptr p
= alloc_int_list_node (blk_list
);
3247 /* Free the blocks of lists at BLK_LIST. */
3250 free_int_list (blk_list
)
3251 int_list_block
**blk_list
;
3253 int_list_block
*p
, *next
;
3255 for (p
= *blk_list
; p
!= NULL
; p
= next
)
3261 /* Mark list as empty for the next function we compile. */
3265 /* Predecessor/successor computation. */
3267 /* Mark PRED_BB a precessor of SUCC_BB,
3268 and conversely SUCC_BB a successor of PRED_BB. */
3271 add_pred_succ (pred_bb
, succ_bb
, s_preds
, s_succs
, num_preds
, num_succs
)
3274 int_list_ptr
*s_preds
;
3275 int_list_ptr
*s_succs
;
3279 if (succ_bb
!= EXIT_BLOCK
)
3281 add_int_list_node (&pred_int_list_blocks
, &s_preds
[succ_bb
], pred_bb
);
3282 num_preds
[succ_bb
]++;
3284 if (pred_bb
!= ENTRY_BLOCK
)
3286 add_int_list_node (&pred_int_list_blocks
, &s_succs
[pred_bb
], succ_bb
);
3287 num_succs
[pred_bb
]++;
3291 /* Compute the predecessors and successors for each block. */
3293 compute_preds_succs (s_preds
, s_succs
, num_preds
, num_succs
)
3294 int_list_ptr
*s_preds
;
3295 int_list_ptr
*s_succs
;
3299 int bb
, clear_local_bb_vars
= 0;
3301 bzero ((char *) s_preds
, n_basic_blocks
* sizeof (int_list_ptr
));
3302 bzero ((char *) s_succs
, n_basic_blocks
* sizeof (int_list_ptr
));
3303 bzero ((char *) num_preds
, n_basic_blocks
* sizeof (int));
3304 bzero ((char *) num_succs
, n_basic_blocks
* sizeof (int));
3306 /* This routine can be called after life analysis; in that case
3307 basic_block_drops_in and uid_block_number will not be available
3308 and we must recompute their values. */
3309 if (basic_block_drops_in
== NULL
|| uid_block_number
== NULL
)
3311 clear_local_bb_vars
= 1;
3312 basic_block_drops_in
= (char *) alloca (n_basic_blocks
);
3313 uid_block_number
= (int *) alloca ((get_max_uid () + 1) * sizeof (int));
3315 bzero ((char *) basic_block_drops_in
, n_basic_blocks
* sizeof (char));
3316 bzero ((char *) uid_block_number
, n_basic_blocks
* sizeof (int));
3318 /* Scan each basic block setting basic_block_drops_in and
3319 uid_block_number as needed. */
3320 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3322 rtx insn
, stop_insn
;
3325 stop_insn
= NULL_RTX
;
3327 stop_insn
= basic_block_end
[bb
-1];
3329 /* Look backwards from the start of this block. Stop if we
3330 hit the start of the function or the end of a previous
3331 block. Don't walk backwards through blocks that are just
3333 for (insn
= PREV_INSN (basic_block_head
[bb
]);
3334 insn
&& insn
!= stop_insn
&& GET_CODE (insn
) == NOTE
;
3335 insn
= PREV_INSN (insn
))
3338 /* Never set basic_block_drops_in for the first block. It is
3341 If we stopped on anything other than a BARRIER, then this
3344 basic_block_drops_in
[bb
] = (insn
? GET_CODE (insn
) != BARRIER
: 1);
3346 insn
= basic_block_head
[bb
];
3349 BLOCK_NUM (insn
) = bb
;
3350 if (insn
== basic_block_end
[bb
])
3352 insn
= NEXT_INSN (insn
);
3357 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3362 head
= BLOCK_HEAD (bb
);
3364 if (GET_CODE (head
) == CODE_LABEL
)
3365 for (jump
= LABEL_REFS (head
);
3367 jump
= LABEL_NEXTREF (jump
))
3369 if (! INSN_DELETED_P (CONTAINING_INSN (jump
))
3370 && (GET_CODE (CONTAINING_INSN (jump
)) != NOTE
3371 || (NOTE_LINE_NUMBER (CONTAINING_INSN (jump
))
3372 != NOTE_INSN_DELETED
)))
3373 add_pred_succ (BLOCK_NUM (CONTAINING_INSN (jump
)), bb
,
3374 s_preds
, s_succs
, num_preds
, num_succs
);
3377 jump
= BLOCK_END (bb
);
3378 /* If this is a RETURN insn or a conditional jump in the last
3379 basic block, or a non-jump insn in the last basic block, then
3380 this block reaches the exit block. */
3381 if ((GET_CODE (jump
) == JUMP_INSN
&& GET_CODE (PATTERN (jump
)) == RETURN
)
3382 || (((GET_CODE (jump
) == JUMP_INSN
3383 && condjump_p (jump
) && !simplejump_p (jump
))
3384 || GET_CODE (jump
) != JUMP_INSN
)
3385 && (bb
== n_basic_blocks
- 1)))
3386 add_pred_succ (bb
, EXIT_BLOCK
, s_preds
, s_succs
, num_preds
, num_succs
);
3388 if (basic_block_drops_in
[bb
])
3389 add_pred_succ (bb
- 1, bb
, s_preds
, s_succs
, num_preds
, num_succs
);
3392 add_pred_succ (ENTRY_BLOCK
, 0, s_preds
, s_succs
, num_preds
, num_succs
);
3395 /* If we allocated any variables in temporary storage, clear out the
3396 pointer to the local storage to avoid dangling pointers. */
3397 if (clear_local_bb_vars
)
3399 basic_block_drops_in
= NULL
;
3400 uid_block_number
= NULL
;
3406 dump_bb_data (file
, preds
, succs
)
3408 int_list_ptr
*preds
;
3409 int_list_ptr
*succs
;
3414 fprintf (file
, "BB data\n\n");
3415 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3417 fprintf (file
, "BB %d, start %d, end %d\n", bb
,
3418 INSN_UID (BLOCK_HEAD (bb
)), INSN_UID (BLOCK_END (bb
)));
3419 fprintf (file
, " preds:");
3420 for (p
= preds
[bb
]; p
!= NULL
; p
= p
->next
)
3422 int pred_bb
= INT_LIST_VAL (p
);
3423 if (pred_bb
== ENTRY_BLOCK
)
3424 fprintf (file
, " entry");
3426 fprintf (file
, " %d", pred_bb
);
3428 fprintf (file
, "\n");
3429 fprintf (file
, " succs:");
3430 for (p
= succs
[bb
]; p
!= NULL
; p
= p
->next
)
3432 int succ_bb
= INT_LIST_VAL (p
);
3433 if (succ_bb
== EXIT_BLOCK
)
3434 fprintf (file
, " exit");
3436 fprintf (file
, " %d", succ_bb
);
3438 fprintf (file
, "\n");
3440 fprintf (file
, "\n");
3444 dump_sbitmap (file
, bmap
)
3449 int set_size
= bmap
->size
;
3450 int total_bits
= bmap
->n_bits
;
3452 fprintf (file
, " ");
3453 for (i
= n
= 0; i
< set_size
&& n
< total_bits
; i
++)
3455 for (j
= 0; j
< SBITMAP_ELT_BITS
&& n
< total_bits
; j
++, n
++)
3457 if (n
!= 0 && n
% 10 == 0)
3458 fprintf (file
, " ");
3459 fprintf (file
, "%d", (bmap
->elms
[i
] & (1L << j
)) != 0);
3462 fprintf (file
, "\n");
3466 dump_sbitmap_vector (file
, title
, subtitle
, bmaps
, n_maps
)
3468 char *title
, *subtitle
;
3474 fprintf (file
, "%s\n", title
);
3475 for (bb
= 0; bb
< n_maps
; bb
++)
3477 fprintf (file
, "%s %d\n", subtitle
, bb
);
3478 dump_sbitmap (file
, bmaps
[bb
]);
3480 fprintf (file
, "\n");
3483 /* Free basic block data storage. */
3488 free_int_list (&pred_int_list_blocks
);
3491 /* Bitmap manipulation routines. */
3493 /* Allocate a simple bitmap of N_ELMS bits. */
3496 sbitmap_alloc (n_elms
)
3499 int bytes
, size
, amt
;
3502 size
= SBITMAP_SET_SIZE (n_elms
);
3503 bytes
= size
* sizeof (SBITMAP_ELT_TYPE
);
3504 amt
= (sizeof (struct simple_bitmap_def
)
3505 + bytes
- sizeof (SBITMAP_ELT_TYPE
));
3506 bmap
= (sbitmap
) xmalloc (amt
);
3507 bmap
->n_bits
= n_elms
;
3509 bmap
->bytes
= bytes
;
3513 /* Allocate a vector of N_VECS bitmaps of N_ELMS bits. */
3516 sbitmap_vector_alloc (n_vecs
, n_elms
)
3519 int i
, bytes
, offset
, elm_bytes
, size
, amt
, vector_bytes
;
3520 sbitmap
*bitmap_vector
;
3522 size
= SBITMAP_SET_SIZE (n_elms
);
3523 bytes
= size
* sizeof (SBITMAP_ELT_TYPE
);
3524 elm_bytes
= (sizeof (struct simple_bitmap_def
)
3525 + bytes
- sizeof (SBITMAP_ELT_TYPE
));
3526 vector_bytes
= n_vecs
* sizeof (sbitmap
*);
3528 /* Round up `vector_bytes' to account for the alignment requirements
3529 of an sbitmap. One could allocate the vector-table and set of sbitmaps
3530 separately, but that requires maintaining two pointers or creating
3531 a cover struct to hold both pointers (so our result is still just
3532 one pointer). Neither is a bad idea, but this is simpler for now. */
3534 /* Based on DEFAULT_ALIGNMENT computation in obstack.c. */
3535 struct { char x
; SBITMAP_ELT_TYPE y
; } align
;
3536 int alignment
= (char *) & align
.y
- & align
.x
;
3537 vector_bytes
= (vector_bytes
+ alignment
- 1) & ~ (alignment
- 1);
3540 amt
= vector_bytes
+ (n_vecs
* elm_bytes
);
3541 bitmap_vector
= (sbitmap
*) xmalloc (amt
);
3543 for (i
= 0, offset
= vector_bytes
;
3545 i
++, offset
+= elm_bytes
)
3547 sbitmap b
= (sbitmap
) ((char *) bitmap_vector
+ offset
);
3548 bitmap_vector
[i
] = b
;
3554 return bitmap_vector
;
3557 /* Copy sbitmap SRC to DST. */
3560 sbitmap_copy (dst
, src
)
3568 for (i
= 0; i
< dst
->size
; i
++)
3572 /* Zero all elements in a bitmap. */
3578 bzero ((char *) bmap
->elms
, bmap
->bytes
);
3581 /* Set to ones all elements in a bitmap. */
3587 memset (bmap
->elms
, -1, bmap
->bytes
);
3590 /* Zero a vector of N_VECS bitmaps. */
3593 sbitmap_vector_zero (bmap
, n_vecs
)
3599 for (i
= 0; i
< n_vecs
; i
++)
3600 sbitmap_zero (bmap
[i
]);
3603 /* Set to ones a vector of N_VECS bitmaps. */
3606 sbitmap_vector_ones (bmap
, n_vecs
)
3612 for (i
= 0; i
< n_vecs
; i
++)
3613 sbitmap_ones (bmap
[i
]);
3616 /* Set DST to be A union (B - C).
3618 Return non-zero if any change is made. */
3621 sbitmap_union_of_diff (dst
, a
, b
, c
)
3622 sbitmap dst
, a
, b
, c
;
3625 sbitmap_ptr dstp
, ap
, bp
, cp
;
3632 for (i
= 0; i
< dst
->size
; i
++)
3634 SBITMAP_ELT_TYPE tmp
= *ap
| (*bp
& ~*cp
);
3638 dstp
++; ap
++; bp
++; cp
++;
3643 /* Set bitmap DST to the bitwise negation of the bitmap SRC. */
3646 sbitmap_not (dst
, src
)
3650 sbitmap_ptr dstp
, ap
;
3654 for (i
= 0; i
< dst
->size
; i
++)
3656 SBITMAP_ELT_TYPE tmp
= ~(*ap
);
3662 /* Set the bits in DST to be the difference between the bits
3663 in A and the bits in B. i.e. dst = a - b.
3664 The - operator is implemented as a & (~b). */
3667 sbitmap_difference (dst
, a
, b
)
3671 sbitmap_ptr dstp
, ap
, bp
;
3676 for (i
= 0; i
< dst
->size
; i
++)
3677 *dstp
++ = *ap
++ & (~*bp
++);
3680 /* Set DST to be (A and B)).
3681 Return non-zero if any change is made. */
3684 sbitmap_a_and_b (dst
, a
, b
)
3688 sbitmap_ptr dstp
, ap
, bp
;
3694 for (i
= 0; i
< dst
->size
; i
++)
3696 SBITMAP_ELT_TYPE tmp
= *ap
& *bp
;
3704 /* Set DST to be (A or B)).
3705 Return non-zero if any change is made. */
3708 sbitmap_a_or_b (dst
, a
, b
)
3712 sbitmap_ptr dstp
, ap
, bp
;
3718 for (i
= 0; i
< dst
->size
; i
++)
3720 SBITMAP_ELT_TYPE tmp
= *ap
| *bp
;
3729 /* Set DST to be (A or (B and C)).
3730 Return non-zero if any change is made. */
3733 sbitmap_a_or_b_and_c (dst
, a
, b
, c
)
3734 sbitmap dst
, a
, b
, c
;
3737 sbitmap_ptr dstp
, ap
, bp
, cp
;
3744 for (i
= 0; i
< dst
->size
; i
++)
3746 SBITMAP_ELT_TYPE tmp
= *ap
| (*bp
& *cp
);
3750 dstp
++; ap
++; bp
++; cp
++;
3755 /* Set DST to be (A ann (B or C)).
3756 Return non-zero if any change is made. */
3759 sbitmap_a_and_b_or_c (dst
, a
, b
, c
)
3760 sbitmap dst
, a
, b
, c
;
3763 sbitmap_ptr dstp
, ap
, bp
, cp
;
3770 for (i
= 0; i
< dst
->size
; i
++)
3772 SBITMAP_ELT_TYPE tmp
= *ap
& (*bp
| *cp
);
3776 dstp
++; ap
++; bp
++; cp
++;
3781 /* Set the bitmap DST to the intersection of SRC of all predecessors or
3782 successors of block number BB (PRED_SUCC says which). */
3785 sbitmap_intersect_of_predsucc (dst
, src
, bb
, pred_succ
)
3789 int_list_ptr
*pred_succ
;
3793 int set_size
= dst
->size
;
3797 /* It is possible that there are no predecessors(/successors).
3798 This can happen for example in unreachable code. */
3802 /* In APL-speak this is the `and' reduction of the empty set and thus
3803 the result is the identity for `and'. */
3808 /* Set result to first predecessor/successor. */
3810 for ( ; ps
!= NULL
; ps
= ps
->next
)
3812 ps_bb
= INT_LIST_VAL (ps
);
3813 if (ps_bb
== ENTRY_BLOCK
|| ps_bb
== EXIT_BLOCK
)
3815 sbitmap_copy (dst
, src
[ps_bb
]);
3816 /* Break out since we're only doing first predecessor. */
3822 /* Now do the remaining predecessors/successors. */
3824 for (ps
= ps
->next
; ps
!= NULL
; ps
= ps
->next
)
3829 ps_bb
= INT_LIST_VAL (ps
);
3830 if (ps_bb
== ENTRY_BLOCK
|| ps_bb
== EXIT_BLOCK
)
3833 p
= src
[ps_bb
]->elms
;
3836 for (i
= 0; i
< set_size
; i
++)
3841 /* Set the bitmap DST to the intersection of SRC of all predecessors
3842 of block number BB. */
3845 sbitmap_intersect_of_predecessors (dst
, src
, bb
, s_preds
)
3849 int_list_ptr
*s_preds
;
3851 sbitmap_intersect_of_predsucc (dst
, src
, bb
, s_preds
);
3854 /* Set the bitmap DST to the intersection of SRC of all successors
3855 of block number BB. */
3858 sbitmap_intersect_of_successors (dst
, src
, bb
, s_succs
)
3862 int_list_ptr
*s_succs
;
3864 sbitmap_intersect_of_predsucc (dst
, src
, bb
, s_succs
);
3867 /* Set the bitmap DST to the union of SRC of all predecessors/successors of
3871 sbitmap_union_of_predsucc (dst
, src
, bb
, pred_succ
)
3875 int_list_ptr
*pred_succ
;
3879 int set_size
= dst
->size
;
3883 /* It is possible that there are no predecessors(/successors).
3884 This can happen for example in unreachable code. */
3888 /* In APL-speak this is the `or' reduction of the empty set and thus
3889 the result is the identity for `or'. */
3894 /* Set result to first predecessor/successor. */
3896 for ( ; ps
!= NULL
; ps
= ps
->next
)
3898 ps_bb
= INT_LIST_VAL (ps
);
3899 if (ps_bb
== ENTRY_BLOCK
|| ps_bb
== EXIT_BLOCK
)
3901 sbitmap_copy (dst
, src
[ps_bb
]);
3902 /* Break out since we're only doing first predecessor. */
3908 /* Now do the remaining predecessors/successors. */
3910 for (ps
= ps
->next
; ps
!= NULL
; ps
= ps
->next
)
3915 ps_bb
= INT_LIST_VAL (ps
);
3916 if (ps_bb
== ENTRY_BLOCK
|| ps_bb
== EXIT_BLOCK
)
3919 p
= src
[ps_bb
]->elms
;
3922 for (i
= 0; i
< set_size
; i
++)
3927 /* Set the bitmap DST to the union of SRC of all predecessors of
3931 sbitmap_union_of_predecessors (dst
, src
, bb
, s_preds
)
3935 int_list_ptr
*s_preds
;
3937 sbitmap_union_of_predsucc (dst
, src
, bb
, s_preds
);
3940 /* Set the bitmap DST to the union of SRC of all predecessors of
3944 sbitmap_union_of_successors (dst
, src
, bb
, s_succ
)
3948 int_list_ptr
*s_succ
;
3950 sbitmap_union_of_predsucc (dst
, src
, bb
, s_succ
);
3953 /* Compute dominator relationships. */
3955 compute_dominators (dominators
, post_dominators
, s_preds
, s_succs
)
3956 sbitmap
*dominators
;
3957 sbitmap
*post_dominators
;
3958 int_list_ptr
*s_preds
;
3959 int_list_ptr
*s_succs
;
3961 int bb
, changed
, passes
;
3962 sbitmap
*temp_bitmap
;
3964 temp_bitmap
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
3965 sbitmap_vector_ones (dominators
, n_basic_blocks
);
3966 sbitmap_vector_ones (post_dominators
, n_basic_blocks
);
3967 sbitmap_vector_zero (temp_bitmap
, n_basic_blocks
);
3969 sbitmap_zero (dominators
[0]);
3970 SET_BIT (dominators
[0], 0);
3972 sbitmap_zero (post_dominators
[n_basic_blocks
-1]);
3973 SET_BIT (post_dominators
[n_basic_blocks
-1], 0);
3980 for (bb
= 1; bb
< n_basic_blocks
; bb
++)
3982 sbitmap_intersect_of_predecessors (temp_bitmap
[bb
], dominators
,
3984 SET_BIT (temp_bitmap
[bb
], bb
);
3985 changed
|= sbitmap_a_and_b (dominators
[bb
],
3988 sbitmap_intersect_of_successors (temp_bitmap
[bb
], post_dominators
,
3990 SET_BIT (temp_bitmap
[bb
], bb
);
3991 changed
|= sbitmap_a_and_b (post_dominators
[bb
],
3992 post_dominators
[bb
],