2002-11-07 Jason Thorpe <thorpej@wasabisystems.com>
[official-gcc.git] / gcc / flow.c
blob180796268c67d2d702657d154e0ba84ef44a914e
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "expr.h"
137 #include "ssa.h"
138 #include "timevar.h"
140 #include "obstack.h"
141 #include "splay-tree.h"
143 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
144 the stack pointer does not matter. The value is tested only in
145 functions that have frame pointers.
146 No definition is equivalent to always zero. */
147 #ifndef EXIT_IGNORE_STACK
148 #define EXIT_IGNORE_STACK 0
149 #endif
151 #ifndef HAVE_epilogue
152 #define HAVE_epilogue 0
153 #endif
154 #ifndef HAVE_prologue
155 #define HAVE_prologue 0
156 #endif
157 #ifndef HAVE_sibcall_epilogue
158 #define HAVE_sibcall_epilogue 0
159 #endif
161 #ifndef LOCAL_REGNO
162 #define LOCAL_REGNO(REGNO) 0
163 #endif
164 #ifndef EPILOGUE_USES
165 #define EPILOGUE_USES(REGNO) 0
166 #endif
167 #ifndef EH_USES
168 #define EH_USES(REGNO) 0
169 #endif
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
174 #endif
175 #endif
177 /* Nonzero if the second flow pass has completed. */
178 int flow2_completed;
180 /* Maximum register number used in this function, plus one. */
182 int max_regno;
184 /* Indexed by n, giving various register information */
186 varray_type reg_n_info;
188 /* Size of a regset for the current function,
189 in (1) bytes and (2) elements. */
191 int regset_bytes;
192 int regset_size;
194 /* Regset of regs live when calls to `setjmp'-like functions happen. */
195 /* ??? Does this exist only for the setjmp-clobbered warning message? */
197 regset regs_live_at_setjmp;
199 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
200 that have to go in the same hard reg.
201 The first two regs in the list are a pair, and the next two
202 are another pair, etc. */
203 rtx regs_may_share;
205 /* Callback that determines if it's ok for a function to have no
206 noreturn attribute. */
207 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
209 /* Set of registers that may be eliminable. These are handled specially
210 in updating regs_ever_live. */
212 static HARD_REG_SET elim_reg_set;
214 /* Holds information for tracking conditional register life information. */
215 struct reg_cond_life_info
217 /* A boolean expression of conditions under which a register is dead. */
218 rtx condition;
219 /* Conditions under which a register is dead at the basic block end. */
220 rtx orig_condition;
222 /* A boolean expression of conditions under which a register has been
223 stored into. */
224 rtx stores;
226 /* ??? Could store mask of bytes that are dead, so that we could finally
227 track lifetimes of multi-word registers accessed via subregs. */
230 /* For use in communicating between propagate_block and its subroutines.
231 Holds all information needed to compute life and def-use information. */
233 struct propagate_block_info
235 /* The basic block we're considering. */
236 basic_block bb;
238 /* Bit N is set if register N is conditionally or unconditionally live. */
239 regset reg_live;
241 /* Bit N is set if register N is set this insn. */
242 regset new_set;
244 /* Element N is the next insn that uses (hard or pseudo) register N
245 within the current basic block; or zero, if there is no such insn. */
246 rtx *reg_next_use;
248 /* Contains a list of all the MEMs we are tracking for dead store
249 elimination. */
250 rtx mem_set_list;
252 /* If non-null, record the set of registers set unconditionally in the
253 basic block. */
254 regset local_set;
256 /* If non-null, record the set of registers set conditionally in the
257 basic block. */
258 regset cond_local_set;
260 #ifdef HAVE_conditional_execution
261 /* Indexed by register number, holds a reg_cond_life_info for each
262 register that is not unconditionally live or dead. */
263 splay_tree reg_cond_dead;
265 /* Bit N is set if register N is in an expression in reg_cond_dead. */
266 regset reg_cond_reg;
267 #endif
269 /* The length of mem_set_list. */
270 int mem_set_list_len;
272 /* Nonzero if the value of CC0 is live. */
273 int cc0_live;
275 /* Flags controling the set of information propagate_block collects. */
276 int flags;
279 /* Number of dead insns removed. */
280 static int ndead;
282 /* Maximum length of pbi->mem_set_list before we start dropping
283 new elements on the floor. */
284 #define MAX_MEM_SET_LIST_LEN 100
286 /* Forward declarations */
287 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
288 static void verify_wide_reg PARAMS ((int, basic_block));
289 static void verify_local_live_at_start PARAMS ((regset, basic_block));
290 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
291 static void notice_stack_pointer_modification PARAMS ((rtx));
292 static void mark_reg PARAMS ((rtx, void *));
293 static void mark_regs_live_at_end PARAMS ((regset));
294 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
295 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
296 static void propagate_block_delete_insn PARAMS ((rtx));
297 static rtx propagate_block_delete_libcall PARAMS ((rtx, rtx));
298 static int insn_dead_p PARAMS ((struct propagate_block_info *,
299 rtx, int, rtx));
300 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
301 rtx, rtx));
302 static void mark_set_regs PARAMS ((struct propagate_block_info *,
303 rtx, rtx));
304 static void mark_set_1 PARAMS ((struct propagate_block_info *,
305 enum rtx_code, rtx, rtx,
306 rtx, int));
307 static int find_regno_partial PARAMS ((rtx *, void *));
309 #ifdef HAVE_conditional_execution
310 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
311 int, rtx));
312 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
313 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
314 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
315 int));
316 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
317 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
318 static rtx not_reg_cond PARAMS ((rtx));
319 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
320 #endif
321 #ifdef AUTO_INC_DEC
322 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
323 rtx, rtx, rtx, rtx, rtx));
324 static void find_auto_inc PARAMS ((struct propagate_block_info *,
325 rtx, rtx));
326 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
327 rtx));
328 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
329 #endif
330 static void mark_used_reg PARAMS ((struct propagate_block_info *,
331 rtx, rtx, rtx));
332 static void mark_used_regs PARAMS ((struct propagate_block_info *,
333 rtx, rtx, rtx));
334 void dump_flow_info PARAMS ((FILE *));
335 void debug_flow_info PARAMS ((void));
336 static void add_to_mem_set_list PARAMS ((struct propagate_block_info *,
337 rtx));
338 static int invalidate_mems_from_autoinc PARAMS ((rtx *, void *));
339 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
340 rtx));
341 static void clear_log_links PARAMS ((sbitmap));
344 void
345 check_function_return_warnings ()
347 if (warn_missing_noreturn
348 && !TREE_THIS_VOLATILE (cfun->decl)
349 && EXIT_BLOCK_PTR->pred == NULL
350 && (lang_missing_noreturn_ok_p
351 && !lang_missing_noreturn_ok_p (cfun->decl)))
352 warning ("function might be possible candidate for attribute `noreturn'");
354 /* If we have a path to EXIT, then we do return. */
355 if (TREE_THIS_VOLATILE (cfun->decl)
356 && EXIT_BLOCK_PTR->pred != NULL)
357 warning ("`noreturn' function does return");
359 /* If the clobber_return_insn appears in some basic block, then we
360 do reach the end without returning a value. */
361 else if (warn_return_type
362 && cfun->x_clobber_return_insn != NULL
363 && EXIT_BLOCK_PTR->pred != NULL)
365 int max_uid = get_max_uid ();
367 /* If clobber_return_insn was excised by jump1, then renumber_insns
368 can make max_uid smaller than the number still recorded in our rtx.
369 That's fine, since this is a quick way of verifying that the insn
370 is no longer in the chain. */
371 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
373 rtx insn;
375 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
376 if (insn == cfun->x_clobber_return_insn)
378 warning ("control reaches end of non-void function");
379 break;
385 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
386 note associated with the BLOCK. */
389 first_insn_after_basic_block_note (block)
390 basic_block block;
392 rtx insn;
394 /* Get the first instruction in the block. */
395 insn = block->head;
397 if (insn == NULL_RTX)
398 return NULL_RTX;
399 if (GET_CODE (insn) == CODE_LABEL)
400 insn = NEXT_INSN (insn);
401 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
402 abort ();
404 return NEXT_INSN (insn);
407 /* Perform data flow analysis.
408 F is the first insn of the function; FLAGS is a set of PROP_* flags
409 to be used in accumulating flow info. */
411 void
412 life_analysis (f, file, flags)
413 rtx f;
414 FILE *file;
415 int flags;
417 int i;
418 #ifdef ELIMINABLE_REGS
419 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
420 #endif
422 /* Record which registers will be eliminated. We use this in
423 mark_used_regs. */
425 CLEAR_HARD_REG_SET (elim_reg_set);
427 #ifdef ELIMINABLE_REGS
428 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
429 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
430 #else
431 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
432 #endif
435 #ifdef CANNOT_CHANGE_MODE_CLASS
436 if (flags & PROP_REG_INFO)
437 for (i=0; i < NUM_MACHINE_MODES; ++i)
438 INIT_REG_SET (&subregs_of_mode[i]);
439 #endif
441 if (! optimize)
442 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
444 /* The post-reload life analysis have (on a global basis) the same
445 registers live as was computed by reload itself. elimination
446 Otherwise offsets and such may be incorrect.
448 Reload will make some registers as live even though they do not
449 appear in the rtl.
451 We don't want to create new auto-incs after reload, since they
452 are unlikely to be useful and can cause problems with shared
453 stack slots. */
454 if (reload_completed)
455 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
457 /* We want alias analysis information for local dead store elimination. */
458 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
459 init_alias_analysis ();
461 /* Always remove no-op moves. Do this before other processing so
462 that we don't have to keep re-scanning them. */
463 delete_noop_moves (f);
465 /* Some targets can emit simpler epilogues if they know that sp was
466 not ever modified during the function. After reload, of course,
467 we've already emitted the epilogue so there's no sense searching. */
468 if (! reload_completed)
469 notice_stack_pointer_modification (f);
471 /* Allocate and zero out data structures that will record the
472 data from lifetime analysis. */
473 allocate_reg_life_data ();
474 allocate_bb_life_data ();
476 /* Find the set of registers live on function exit. */
477 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
479 /* "Update" life info from zero. It'd be nice to begin the
480 relaxation with just the exit and noreturn blocks, but that set
481 is not immediately handy. */
483 if (flags & PROP_REG_INFO)
484 memset (regs_ever_live, 0, sizeof (regs_ever_live));
485 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
487 /* Clean up. */
488 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
489 end_alias_analysis ();
491 if (file)
492 dump_flow_info (file);
494 free_basic_block_vars (1);
496 /* Removing dead insns should've made jumptables really dead. */
497 delete_dead_jumptables ();
500 /* A subroutine of verify_wide_reg, called through for_each_rtx.
501 Search for REGNO. If found, return 2 if it is not wider than
502 word_mode. */
504 static int
505 verify_wide_reg_1 (px, pregno)
506 rtx *px;
507 void *pregno;
509 rtx x = *px;
510 unsigned int regno = *(int *) pregno;
512 if (GET_CODE (x) == REG && REGNO (x) == regno)
514 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
515 return 2;
516 return 1;
518 return 0;
521 /* A subroutine of verify_local_live_at_start. Search through insns
522 of BB looking for register REGNO. */
524 static void
525 verify_wide_reg (regno, bb)
526 int regno;
527 basic_block bb;
529 rtx head = bb->head, end = bb->end;
531 while (1)
533 if (INSN_P (head))
535 int r = for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno);
536 if (r == 1)
537 return;
538 if (r == 2)
539 break;
541 if (head == end)
542 break;
543 head = NEXT_INSN (head);
546 if (rtl_dump_file)
548 fprintf (rtl_dump_file, "Register %d died unexpectedly.\n", regno);
549 dump_bb (bb, rtl_dump_file);
551 abort ();
554 /* A subroutine of update_life_info. Verify that there are no untoward
555 changes in live_at_start during a local update. */
557 static void
558 verify_local_live_at_start (new_live_at_start, bb)
559 regset new_live_at_start;
560 basic_block bb;
562 if (reload_completed)
564 /* After reload, there are no pseudos, nor subregs of multi-word
565 registers. The regsets should exactly match. */
566 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
568 if (rtl_dump_file)
570 fprintf (rtl_dump_file,
571 "live_at_start mismatch in bb %d, aborting\nNew:\n",
572 bb->index);
573 debug_bitmap_file (rtl_dump_file, new_live_at_start);
574 fputs ("Old:\n", rtl_dump_file);
575 dump_bb (bb, rtl_dump_file);
577 abort ();
580 else
582 int i;
584 /* Find the set of changed registers. */
585 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
587 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
589 /* No registers should die. */
590 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
592 if (rtl_dump_file)
594 fprintf (rtl_dump_file,
595 "Register %d died unexpectedly.\n", i);
596 dump_bb (bb, rtl_dump_file);
598 abort ();
601 /* Verify that the now-live register is wider than word_mode. */
602 verify_wide_reg (i, bb);
607 /* Updates life information starting with the basic blocks set in BLOCKS.
608 If BLOCKS is null, consider it to be the universal set.
610 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
611 we are only expecting local modifications to basic blocks. If we find
612 extra registers live at the beginning of a block, then we either killed
613 useful data, or we have a broken split that wants data not provided.
614 If we find registers removed from live_at_start, that means we have
615 a broken peephole that is killing a register it shouldn't.
617 ??? This is not true in one situation -- when a pre-reload splitter
618 generates subregs of a multi-word pseudo, current life analysis will
619 lose the kill. So we _can_ have a pseudo go live. How irritating.
621 Including PROP_REG_INFO does not properly refresh regs_ever_live
622 unless the caller resets it to zero. */
625 update_life_info (blocks, extent, prop_flags)
626 sbitmap blocks;
627 enum update_life_extent extent;
628 int prop_flags;
630 regset tmp;
631 regset_head tmp_head;
632 int i;
633 int stabilized_prop_flags = prop_flags;
634 basic_block bb;
636 tmp = INITIALIZE_REG_SET (tmp_head);
637 ndead = 0;
639 timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
640 ? TV_LIFE_UPDATE : TV_LIFE);
642 /* Changes to the CFG are only allowed when
643 doing a global update for the entire CFG. */
644 if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
645 && (extent == UPDATE_LIFE_LOCAL || blocks))
646 abort ();
648 /* For a global update, we go through the relaxation process again. */
649 if (extent != UPDATE_LIFE_LOCAL)
651 for ( ; ; )
653 int changed = 0;
655 calculate_global_regs_live (blocks, blocks,
656 prop_flags & (PROP_SCAN_DEAD_CODE
657 | PROP_SCAN_DEAD_STORES
658 | PROP_ALLOW_CFG_CHANGES));
660 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
661 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
662 break;
664 /* Removing dead code may allow the CFG to be simplified which
665 in turn may allow for further dead code detection / removal. */
666 FOR_EACH_BB_REVERSE (bb)
668 COPY_REG_SET (tmp, bb->global_live_at_end);
669 changed |= propagate_block (bb, tmp, NULL, NULL,
670 prop_flags & (PROP_SCAN_DEAD_CODE
671 | PROP_SCAN_DEAD_STORES
672 | PROP_KILL_DEAD_CODE));
675 /* Don't pass PROP_SCAN_DEAD_CODE or PROP_KILL_DEAD_CODE to
676 subsequent propagate_block calls, since removing or acting as
677 removing dead code can affect global register liveness, which
678 is supposed to be finalized for this call after this loop. */
679 stabilized_prop_flags
680 &= ~(PROP_SCAN_DEAD_CODE | PROP_SCAN_DEAD_STORES
681 | PROP_KILL_DEAD_CODE);
683 if (! changed)
684 break;
686 /* We repeat regardless of what cleanup_cfg says. If there were
687 instructions deleted above, that might have been only a
688 partial improvement (see MAX_MEM_SET_LIST_LEN usage).
689 Further improvement may be possible. */
690 cleanup_cfg (CLEANUP_EXPENSIVE);
693 /* If asked, remove notes from the blocks we'll update. */
694 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
695 count_or_remove_death_notes (blocks, 1);
698 /* Clear log links in case we are asked to (re)compute them. */
699 if (prop_flags & PROP_LOG_LINKS)
700 clear_log_links (blocks);
702 if (blocks)
704 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
706 bb = BASIC_BLOCK (i);
708 COPY_REG_SET (tmp, bb->global_live_at_end);
709 propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
711 if (extent == UPDATE_LIFE_LOCAL)
712 verify_local_live_at_start (tmp, bb);
715 else
717 FOR_EACH_BB_REVERSE (bb)
719 COPY_REG_SET (tmp, bb->global_live_at_end);
721 propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
723 if (extent == UPDATE_LIFE_LOCAL)
724 verify_local_live_at_start (tmp, bb);
728 FREE_REG_SET (tmp);
730 if (prop_flags & PROP_REG_INFO)
732 /* The only pseudos that are live at the beginning of the function
733 are those that were not set anywhere in the function. local-alloc
734 doesn't know how to handle these correctly, so mark them as not
735 local to any one basic block. */
736 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
737 FIRST_PSEUDO_REGISTER, i,
738 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
740 /* We have a problem with any pseudoreg that lives across the setjmp.
741 ANSI says that if a user variable does not change in value between
742 the setjmp and the longjmp, then the longjmp preserves it. This
743 includes longjmp from a place where the pseudo appears dead.
744 (In principle, the value still exists if it is in scope.)
745 If the pseudo goes in a hard reg, some other value may occupy
746 that hard reg where this pseudo is dead, thus clobbering the pseudo.
747 Conclusion: such a pseudo must not go in a hard reg. */
748 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
749 FIRST_PSEUDO_REGISTER, i,
751 if (regno_reg_rtx[i] != 0)
753 REG_LIVE_LENGTH (i) = -1;
754 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
758 timevar_pop ((extent == UPDATE_LIFE_LOCAL || blocks)
759 ? TV_LIFE_UPDATE : TV_LIFE);
760 if (ndead && rtl_dump_file)
761 fprintf (rtl_dump_file, "deleted %i dead insns\n", ndead);
762 return ndead;
765 /* Update life information in all blocks where BB_DIRTY is set. */
768 update_life_info_in_dirty_blocks (extent, prop_flags)
769 enum update_life_extent extent;
770 int prop_flags;
772 sbitmap update_life_blocks = sbitmap_alloc (last_basic_block);
773 int n = 0;
774 basic_block bb;
775 int retval = 0;
777 sbitmap_zero (update_life_blocks);
778 FOR_EACH_BB (bb)
780 if (extent == UPDATE_LIFE_LOCAL)
782 if (bb->flags & BB_DIRTY)
784 SET_BIT (update_life_blocks, bb->index);
785 n++;
788 else
790 /* ??? Bootstrap with -march=pentium4 fails to terminate
791 with only a partial life update. */
792 SET_BIT (update_life_blocks, bb->index);
793 if (bb->flags & BB_DIRTY)
794 n++;
798 if (n)
799 retval = update_life_info (update_life_blocks, extent, prop_flags);
801 sbitmap_free (update_life_blocks);
802 return retval;
805 /* Free the variables allocated by find_basic_blocks.
807 KEEP_HEAD_END_P is nonzero if basic_block_info is not to be freed. */
809 void
810 free_basic_block_vars (keep_head_end_p)
811 int keep_head_end_p;
813 if (! keep_head_end_p)
815 if (basic_block_info)
817 clear_edges ();
818 VARRAY_FREE (basic_block_info);
820 n_basic_blocks = 0;
821 last_basic_block = 0;
823 ENTRY_BLOCK_PTR->aux = NULL;
824 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
825 EXIT_BLOCK_PTR->aux = NULL;
826 EXIT_BLOCK_PTR->global_live_at_start = NULL;
830 /* Delete any insns that copy a register to itself. */
833 delete_noop_moves (f)
834 rtx f ATTRIBUTE_UNUSED;
836 rtx insn, next;
837 basic_block bb;
838 int nnoops = 0;
840 FOR_EACH_BB (bb)
842 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
844 next = NEXT_INSN (insn);
845 if (INSN_P (insn) && noop_move_p (insn))
847 rtx note;
849 /* If we're about to remove the first insn of a libcall
850 then move the libcall note to the next real insn and
851 update the retval note. */
852 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX))
853 && XEXP (note, 0) != insn)
855 rtx new_libcall_insn = next_real_insn (insn);
856 rtx retval_note = find_reg_note (XEXP (note, 0),
857 REG_RETVAL, NULL_RTX);
858 REG_NOTES (new_libcall_insn)
859 = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
860 REG_NOTES (new_libcall_insn));
861 XEXP (retval_note, 0) = new_libcall_insn;
864 delete_insn_and_edges (insn);
865 nnoops++;
869 if (nnoops && rtl_dump_file)
870 fprintf (rtl_dump_file, "deleted %i noop moves", nnoops);
871 return nnoops;
874 /* Delete any jump tables never referenced. We can't delete them at the
875 time of removing tablejump insn as they are referenced by the preceding
876 insns computing the destination, so we delay deleting and garbagecollect
877 them once life information is computed. */
878 void
879 delete_dead_jumptables ()
881 rtx insn, next;
882 for (insn = get_insns (); insn; insn = next)
884 next = NEXT_INSN (insn);
885 if (GET_CODE (insn) == CODE_LABEL
886 && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
887 && GET_CODE (next) == JUMP_INSN
888 && (GET_CODE (PATTERN (next)) == ADDR_VEC
889 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
891 if (rtl_dump_file)
892 fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
893 delete_insn (NEXT_INSN (insn));
894 delete_insn (insn);
895 next = NEXT_INSN (next);
900 /* Determine if the stack pointer is constant over the life of the function.
901 Only useful before prologues have been emitted. */
903 static void
904 notice_stack_pointer_modification_1 (x, pat, data)
905 rtx x;
906 rtx pat ATTRIBUTE_UNUSED;
907 void *data ATTRIBUTE_UNUSED;
909 if (x == stack_pointer_rtx
910 /* The stack pointer is only modified indirectly as the result
911 of a push until later in flow. See the comments in rtl.texi
912 regarding Embedded Side-Effects on Addresses. */
913 || (GET_CODE (x) == MEM
914 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
915 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
916 current_function_sp_is_unchanging = 0;
919 static void
920 notice_stack_pointer_modification (f)
921 rtx f;
923 rtx insn;
925 /* Assume that the stack pointer is unchanging if alloca hasn't
926 been used. */
927 current_function_sp_is_unchanging = !current_function_calls_alloca;
928 if (! current_function_sp_is_unchanging)
929 return;
931 for (insn = f; insn; insn = NEXT_INSN (insn))
933 if (INSN_P (insn))
935 /* Check if insn modifies the stack pointer. */
936 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
937 NULL);
938 if (! current_function_sp_is_unchanging)
939 return;
944 /* Mark a register in SET. Hard registers in large modes get all
945 of their component registers set as well. */
947 static void
948 mark_reg (reg, xset)
949 rtx reg;
950 void *xset;
952 regset set = (regset) xset;
953 int regno = REGNO (reg);
955 if (GET_MODE (reg) == BLKmode)
956 abort ();
958 SET_REGNO_REG_SET (set, regno);
959 if (regno < FIRST_PSEUDO_REGISTER)
961 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
962 while (--n > 0)
963 SET_REGNO_REG_SET (set, regno + n);
967 /* Mark those regs which are needed at the end of the function as live
968 at the end of the last basic block. */
970 static void
971 mark_regs_live_at_end (set)
972 regset set;
974 unsigned int i;
976 /* If exiting needs the right stack value, consider the stack pointer
977 live at the end of the function. */
978 if ((HAVE_epilogue && reload_completed)
979 || ! EXIT_IGNORE_STACK
980 || (! FRAME_POINTER_REQUIRED
981 && ! current_function_calls_alloca
982 && flag_omit_frame_pointer)
983 || current_function_sp_is_unchanging)
985 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
988 /* Mark the frame pointer if needed at the end of the function. If
989 we end up eliminating it, it will be removed from the live list
990 of each basic block by reload. */
992 if (! reload_completed || frame_pointer_needed)
994 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
995 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
996 /* If they are different, also mark the hard frame pointer as live. */
997 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
998 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
999 #endif
1002 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1003 /* Many architectures have a GP register even without flag_pic.
1004 Assume the pic register is not in use, or will be handled by
1005 other means, if it is not fixed. */
1006 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1007 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1008 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
1009 #endif
1011 /* Mark all global registers, and all registers used by the epilogue
1012 as being live at the end of the function since they may be
1013 referenced by our caller. */
1014 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1015 if (global_regs[i] || EPILOGUE_USES (i))
1016 SET_REGNO_REG_SET (set, i);
1018 if (HAVE_epilogue && reload_completed)
1020 /* Mark all call-saved registers that we actually used. */
1021 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1022 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
1023 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1024 SET_REGNO_REG_SET (set, i);
1027 #ifdef EH_RETURN_DATA_REGNO
1028 /* Mark the registers that will contain data for the handler. */
1029 if (reload_completed && current_function_calls_eh_return)
1030 for (i = 0; ; ++i)
1032 unsigned regno = EH_RETURN_DATA_REGNO(i);
1033 if (regno == INVALID_REGNUM)
1034 break;
1035 SET_REGNO_REG_SET (set, regno);
1037 #endif
1038 #ifdef EH_RETURN_STACKADJ_RTX
1039 if ((! HAVE_epilogue || ! reload_completed)
1040 && current_function_calls_eh_return)
1042 rtx tmp = EH_RETURN_STACKADJ_RTX;
1043 if (tmp && REG_P (tmp))
1044 mark_reg (tmp, set);
1046 #endif
1047 #ifdef EH_RETURN_HANDLER_RTX
1048 if ((! HAVE_epilogue || ! reload_completed)
1049 && current_function_calls_eh_return)
1051 rtx tmp = EH_RETURN_HANDLER_RTX;
1052 if (tmp && REG_P (tmp))
1053 mark_reg (tmp, set);
1055 #endif
1057 /* Mark function return value. */
1058 diddle_return_value (mark_reg, set);
1061 /* Callback function for for_each_successor_phi. DATA is a regset.
1062 Sets the SRC_REGNO, the regno of the phi alternative for phi node
1063 INSN, in the regset. */
1065 static int
1066 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
1067 rtx insn ATTRIBUTE_UNUSED;
1068 int dest_regno ATTRIBUTE_UNUSED;
1069 int src_regno;
1070 void *data;
1072 regset live = (regset) data;
1073 SET_REGNO_REG_SET (live, src_regno);
1074 return 0;
1077 /* Propagate global life info around the graph of basic blocks. Begin
1078 considering blocks with their corresponding bit set in BLOCKS_IN.
1079 If BLOCKS_IN is null, consider it the universal set.
1081 BLOCKS_OUT is set for every block that was changed. */
1083 static void
1084 calculate_global_regs_live (blocks_in, blocks_out, flags)
1085 sbitmap blocks_in, blocks_out;
1086 int flags;
1088 basic_block *queue, *qhead, *qtail, *qend, bb;
1089 regset tmp, new_live_at_end, invalidated_by_call;
1090 regset_head tmp_head, invalidated_by_call_head;
1091 regset_head new_live_at_end_head;
1092 int i;
1094 /* Some passes used to forget clear aux field of basic block causing
1095 sick behavior here. */
1096 #ifdef ENABLE_CHECKING
1097 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1098 if (bb->aux)
1099 abort ();
1100 #endif
1102 tmp = INITIALIZE_REG_SET (tmp_head);
1103 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
1104 invalidated_by_call = INITIALIZE_REG_SET (invalidated_by_call_head);
1106 /* Inconveniently, this is only readily available in hard reg set form. */
1107 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1108 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1109 SET_REGNO_REG_SET (invalidated_by_call, i);
1111 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
1112 because the `head == tail' style test for an empty queue doesn't
1113 work with a full queue. */
1114 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
1115 qtail = queue;
1116 qhead = qend = queue + n_basic_blocks + 2;
1118 /* Queue the blocks set in the initial mask. Do this in reverse block
1119 number order so that we are more likely for the first round to do
1120 useful work. We use AUX non-null to flag that the block is queued. */
1121 if (blocks_in)
1123 FOR_EACH_BB (bb)
1124 if (TEST_BIT (blocks_in, bb->index))
1126 *--qhead = bb;
1127 bb->aux = bb;
1130 else
1132 FOR_EACH_BB (bb)
1134 *--qhead = bb;
1135 bb->aux = bb;
1139 /* We clean aux when we remove the initially-enqueued bbs, but we
1140 don't enqueue ENTRY and EXIT initially, so clean them upfront and
1141 unconditionally. */
1142 ENTRY_BLOCK_PTR->aux = EXIT_BLOCK_PTR->aux = NULL;
1144 if (blocks_out)
1145 sbitmap_zero (blocks_out);
1147 /* We work through the queue until there are no more blocks. What
1148 is live at the end of this block is precisely the union of what
1149 is live at the beginning of all its successors. So, we set its
1150 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
1151 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
1152 this block by walking through the instructions in this block in
1153 reverse order and updating as we go. If that changed
1154 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
1155 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
1157 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
1158 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
1159 must either be live at the end of the block, or used within the
1160 block. In the latter case, it will certainly never disappear
1161 from GLOBAL_LIVE_AT_START. In the former case, the register
1162 could go away only if it disappeared from GLOBAL_LIVE_AT_START
1163 for one of the successor blocks. By induction, that cannot
1164 occur. */
1165 while (qhead != qtail)
1167 int rescan, changed;
1168 basic_block bb;
1169 edge e;
1171 bb = *qhead++;
1172 if (qhead == qend)
1173 qhead = queue;
1174 bb->aux = NULL;
1176 /* Begin by propagating live_at_start from the successor blocks. */
1177 CLEAR_REG_SET (new_live_at_end);
1179 if (bb->succ)
1180 for (e = bb->succ; e; e = e->succ_next)
1182 basic_block sb = e->dest;
1184 /* Call-clobbered registers die across exception and
1185 call edges. */
1186 /* ??? Abnormal call edges ignored for the moment, as this gets
1187 confused by sibling call edges, which crashes reg-stack. */
1188 if (e->flags & EDGE_EH)
1190 bitmap_operation (tmp, sb->global_live_at_start,
1191 invalidated_by_call, BITMAP_AND_COMPL);
1192 IOR_REG_SET (new_live_at_end, tmp);
1194 else
1195 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
1197 /* If a target saves one register in another (instead of on
1198 the stack) the save register will need to be live for EH. */
1199 if (e->flags & EDGE_EH)
1200 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1201 if (EH_USES (i))
1202 SET_REGNO_REG_SET (new_live_at_end, i);
1204 else
1206 /* This might be a noreturn function that throws. And
1207 even if it isn't, getting the unwind info right helps
1208 debugging. */
1209 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1210 if (EH_USES (i))
1211 SET_REGNO_REG_SET (new_live_at_end, i);
1214 /* The all-important stack pointer must always be live. */
1215 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
1217 /* Before reload, there are a few registers that must be forced
1218 live everywhere -- which might not already be the case for
1219 blocks within infinite loops. */
1220 if (! reload_completed)
1222 /* Any reference to any pseudo before reload is a potential
1223 reference of the frame pointer. */
1224 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
1226 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1227 /* Pseudos with argument area equivalences may require
1228 reloading via the argument pointer. */
1229 if (fixed_regs[ARG_POINTER_REGNUM])
1230 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
1231 #endif
1233 /* Any constant, or pseudo with constant equivalences, may
1234 require reloading from memory using the pic register. */
1235 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1236 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1237 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
1240 /* Regs used in phi nodes are not included in
1241 global_live_at_start, since they are live only along a
1242 particular edge. Set those regs that are live because of a
1243 phi node alternative corresponding to this particular block. */
1244 if (in_ssa_form)
1245 for_each_successor_phi (bb, &set_phi_alternative_reg,
1246 new_live_at_end);
1248 if (bb == ENTRY_BLOCK_PTR)
1250 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1251 continue;
1254 /* On our first pass through this block, we'll go ahead and continue.
1255 Recognize first pass by local_set NULL. On subsequent passes, we
1256 get to skip out early if live_at_end wouldn't have changed. */
1258 if (bb->local_set == NULL)
1260 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1261 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1262 rescan = 1;
1264 else
1266 /* If any bits were removed from live_at_end, we'll have to
1267 rescan the block. This wouldn't be necessary if we had
1268 precalculated local_live, however with PROP_SCAN_DEAD_CODE
1269 local_live is really dependent on live_at_end. */
1270 CLEAR_REG_SET (tmp);
1271 rescan = bitmap_operation (tmp, bb->global_live_at_end,
1272 new_live_at_end, BITMAP_AND_COMPL);
1274 if (! rescan)
1276 /* If any of the registers in the new live_at_end set are
1277 conditionally set in this basic block, we must rescan.
1278 This is because conditional lifetimes at the end of the
1279 block do not just take the live_at_end set into account,
1280 but also the liveness at the start of each successor
1281 block. We can miss changes in those sets if we only
1282 compare the new live_at_end against the previous one. */
1283 CLEAR_REG_SET (tmp);
1284 rescan = bitmap_operation (tmp, new_live_at_end,
1285 bb->cond_local_set, BITMAP_AND);
1288 if (! rescan)
1290 /* Find the set of changed bits. Take this opportunity
1291 to notice that this set is empty and early out. */
1292 CLEAR_REG_SET (tmp);
1293 changed = bitmap_operation (tmp, bb->global_live_at_end,
1294 new_live_at_end, BITMAP_XOR);
1295 if (! changed)
1296 continue;
1298 /* If any of the changed bits overlap with local_set,
1299 we'll have to rescan the block. Detect overlap by
1300 the AND with ~local_set turning off bits. */
1301 rescan = bitmap_operation (tmp, tmp, bb->local_set,
1302 BITMAP_AND_COMPL);
1306 /* Let our caller know that BB changed enough to require its
1307 death notes updated. */
1308 if (blocks_out)
1309 SET_BIT (blocks_out, bb->index);
1311 if (! rescan)
1313 /* Add to live_at_start the set of all registers in
1314 new_live_at_end that aren't in the old live_at_end. */
1316 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
1317 BITMAP_AND_COMPL);
1318 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1320 changed = bitmap_operation (bb->global_live_at_start,
1321 bb->global_live_at_start,
1322 tmp, BITMAP_IOR);
1323 if (! changed)
1324 continue;
1326 else
1328 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1330 /* Rescan the block insn by insn to turn (a copy of) live_at_end
1331 into live_at_start. */
1332 propagate_block (bb, new_live_at_end, bb->local_set,
1333 bb->cond_local_set, flags);
1335 /* If live_at start didn't change, no need to go farther. */
1336 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
1337 continue;
1339 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
1342 /* Queue all predecessors of BB so that we may re-examine
1343 their live_at_end. */
1344 for (e = bb->pred; e; e = e->pred_next)
1346 basic_block pb = e->src;
1347 if (pb->aux == NULL)
1349 *qtail++ = pb;
1350 if (qtail == qend)
1351 qtail = queue;
1352 pb->aux = pb;
1357 FREE_REG_SET (tmp);
1358 FREE_REG_SET (new_live_at_end);
1359 FREE_REG_SET (invalidated_by_call);
1361 if (blocks_out)
1363 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
1365 basic_block bb = BASIC_BLOCK (i);
1366 FREE_REG_SET (bb->local_set);
1367 FREE_REG_SET (bb->cond_local_set);
1370 else
1372 FOR_EACH_BB (bb)
1374 FREE_REG_SET (bb->local_set);
1375 FREE_REG_SET (bb->cond_local_set);
1379 free (queue);
1383 /* This structure is used to pass parameters to an from the
1384 the function find_regno_partial(). It is used to pass in the
1385 register number we are looking, as well as to return any rtx
1386 we find. */
1388 typedef struct {
1389 unsigned regno_to_find;
1390 rtx retval;
1391 } find_regno_partial_param;
1394 /* Find the rtx for the reg numbers specified in 'data' if it is
1395 part of an expression which only uses part of the register. Return
1396 it in the structure passed in. */
1397 static int
1398 find_regno_partial (ptr, data)
1399 rtx *ptr;
1400 void *data;
1402 find_regno_partial_param *param = (find_regno_partial_param *)data;
1403 unsigned reg = param->regno_to_find;
1404 param->retval = NULL_RTX;
1406 if (*ptr == NULL_RTX)
1407 return 0;
1409 switch (GET_CODE (*ptr))
1411 case ZERO_EXTRACT:
1412 case SIGN_EXTRACT:
1413 case STRICT_LOW_PART:
1414 if (GET_CODE (XEXP (*ptr, 0)) == REG && REGNO (XEXP (*ptr, 0)) == reg)
1416 param->retval = XEXP (*ptr, 0);
1417 return 1;
1419 break;
1421 case SUBREG:
1422 if (GET_CODE (SUBREG_REG (*ptr)) == REG
1423 && REGNO (SUBREG_REG (*ptr)) == reg)
1425 param->retval = SUBREG_REG (*ptr);
1426 return 1;
1428 break;
1430 default:
1431 break;
1434 return 0;
1437 /* Process all immediate successors of the entry block looking for pseudo
1438 registers which are live on entry. Find all of those whose first
1439 instance is a partial register reference of some kind, and initialize
1440 them to 0 after the entry block. This will prevent bit sets within
1441 registers whose value is unknown, and may contain some kind of sticky
1442 bits we don't want. */
1445 initialize_uninitialized_subregs ()
1447 rtx insn;
1448 edge e;
1449 int reg, did_something = 0;
1450 find_regno_partial_param param;
1452 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1454 basic_block bb = e->dest;
1455 regset map = bb->global_live_at_start;
1456 EXECUTE_IF_SET_IN_REG_SET (map,
1457 FIRST_PSEUDO_REGISTER, reg,
1459 int uid = REGNO_FIRST_UID (reg);
1460 rtx i;
1462 /* Find an insn which mentions the register we are looking for.
1463 Its preferable to have an instance of the register's rtl since
1464 there may be various flags set which we need to duplicate.
1465 If we can't find it, its probably an automatic whose initial
1466 value doesn't matter, or hopefully something we don't care about. */
1467 for (i = get_insns (); i && INSN_UID (i) != uid; i = NEXT_INSN (i))
1469 if (i != NULL_RTX)
1471 /* Found the insn, now get the REG rtx, if we can. */
1472 param.regno_to_find = reg;
1473 for_each_rtx (&i, find_regno_partial, &param);
1474 if (param.retval != NULL_RTX)
1476 insn = gen_move_insn (param.retval,
1477 CONST0_RTX (GET_MODE (param.retval)));
1478 insert_insn_on_edge (insn, e);
1479 did_something = 1;
1485 if (did_something)
1486 commit_edge_insertions ();
1487 return did_something;
1491 /* Subroutines of life analysis. */
1493 /* Allocate the permanent data structures that represent the results
1494 of life analysis. Not static since used also for stupid life analysis. */
1496 void
1497 allocate_bb_life_data ()
1499 basic_block bb;
1501 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1503 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1504 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1507 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1510 void
1511 allocate_reg_life_data ()
1513 int i;
1515 max_regno = max_reg_num ();
1517 /* Recalculate the register space, in case it has grown. Old style
1518 vector oriented regsets would set regset_{size,bytes} here also. */
1519 allocate_reg_info (max_regno, FALSE, FALSE);
1521 /* Reset all the data we'll collect in propagate_block and its
1522 subroutines. */
1523 for (i = 0; i < max_regno; i++)
1525 REG_N_SETS (i) = 0;
1526 REG_N_REFS (i) = 0;
1527 REG_N_DEATHS (i) = 0;
1528 REG_N_CALLS_CROSSED (i) = 0;
1529 REG_LIVE_LENGTH (i) = 0;
1530 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
1534 /* Delete dead instructions for propagate_block. */
1536 static void
1537 propagate_block_delete_insn (insn)
1538 rtx insn;
1540 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
1542 /* If the insn referred to a label, and that label was attached to
1543 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
1544 pretty much mandatory to delete it, because the ADDR_VEC may be
1545 referencing labels that no longer exist.
1547 INSN may reference a deleted label, particularly when a jump
1548 table has been optimized into a direct jump. There's no
1549 real good way to fix up the reference to the deleted label
1550 when the label is deleted, so we just allow it here. */
1552 if (inote && GET_CODE (inote) == CODE_LABEL)
1554 rtx label = XEXP (inote, 0);
1555 rtx next;
1557 /* The label may be forced if it has been put in the constant
1558 pool. If that is the only use we must discard the table
1559 jump following it, but not the label itself. */
1560 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
1561 && (next = next_nonnote_insn (label)) != NULL
1562 && GET_CODE (next) == JUMP_INSN
1563 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1564 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1566 rtx pat = PATTERN (next);
1567 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1568 int len = XVECLEN (pat, diff_vec_p);
1569 int i;
1571 for (i = 0; i < len; i++)
1572 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
1574 delete_insn_and_edges (next);
1575 ndead++;
1579 delete_insn_and_edges (insn);
1580 ndead++;
1583 /* Delete dead libcalls for propagate_block. Return the insn
1584 before the libcall. */
1586 static rtx
1587 propagate_block_delete_libcall ( insn, note)
1588 rtx insn, note;
1590 rtx first = XEXP (note, 0);
1591 rtx before = PREV_INSN (first);
1593 delete_insn_chain_and_edges (first, insn);
1594 ndead++;
1595 return before;
1598 /* Update the life-status of regs for one insn. Return the previous insn. */
1601 propagate_one_insn (pbi, insn)
1602 struct propagate_block_info *pbi;
1603 rtx insn;
1605 rtx prev = PREV_INSN (insn);
1606 int flags = pbi->flags;
1607 int insn_is_dead = 0;
1608 int libcall_is_dead = 0;
1609 rtx note;
1610 int i;
1612 if (! INSN_P (insn))
1613 return prev;
1615 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1616 if (flags & PROP_SCAN_DEAD_CODE)
1618 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
1619 libcall_is_dead = (insn_is_dead && note != 0
1620 && libcall_dead_p (pbi, note, insn));
1623 /* If an instruction consists of just dead store(s) on final pass,
1624 delete it. */
1625 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
1627 /* If we're trying to delete a prologue or epilogue instruction
1628 that isn't flagged as possibly being dead, something is wrong.
1629 But if we are keeping the stack pointer depressed, we might well
1630 be deleting insns that are used to compute the amount to update
1631 it by, so they are fine. */
1632 if (reload_completed
1633 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1634 && (TYPE_RETURNS_STACK_DEPRESSED
1635 (TREE_TYPE (current_function_decl))))
1636 && (((HAVE_epilogue || HAVE_prologue)
1637 && prologue_epilogue_contains (insn))
1638 || (HAVE_sibcall_epilogue
1639 && sibcall_epilogue_contains (insn)))
1640 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
1641 fatal_insn ("Attempt to delete prologue/epilogue insn:", insn);
1643 /* Record sets. Do this even for dead instructions, since they
1644 would have killed the values if they hadn't been deleted. */
1645 mark_set_regs (pbi, PATTERN (insn), insn);
1647 /* CC0 is now known to be dead. Either this insn used it,
1648 in which case it doesn't anymore, or clobbered it,
1649 so the next insn can't use it. */
1650 pbi->cc0_live = 0;
1652 if (libcall_is_dead)
1653 prev = propagate_block_delete_libcall ( insn, note);
1654 else
1657 /* If INSN contains a RETVAL note and is dead, but the libcall
1658 as a whole is not dead, then we want to remove INSN, but
1659 not the whole libcall sequence.
1661 However, we need to also remove the dangling REG_LIBCALL
1662 note so that we do not have mis-matched LIBCALL/RETVAL
1663 notes. In theory we could find a new location for the
1664 REG_RETVAL note, but it hardly seems worth the effort.
1666 NOTE at this point will be the RETVAL note if it exists. */
1667 if (note)
1669 rtx libcall_note;
1671 libcall_note
1672 = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
1673 remove_note (XEXP (note, 0), libcall_note);
1676 /* Similarly if INSN contains a LIBCALL note, remove the
1677 dnagling REG_RETVAL note. */
1678 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
1679 if (note)
1681 rtx retval_note;
1683 retval_note
1684 = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
1685 remove_note (XEXP (note, 0), retval_note);
1688 /* Now delete INSN. */
1689 propagate_block_delete_insn (insn);
1692 return prev;
1695 /* See if this is an increment or decrement that can be merged into
1696 a following memory address. */
1697 #ifdef AUTO_INC_DEC
1699 rtx x = single_set (insn);
1701 /* Does this instruction increment or decrement a register? */
1702 if ((flags & PROP_AUTOINC)
1703 && x != 0
1704 && GET_CODE (SET_DEST (x)) == REG
1705 && (GET_CODE (SET_SRC (x)) == PLUS
1706 || GET_CODE (SET_SRC (x)) == MINUS)
1707 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
1708 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1709 /* Ok, look for a following memory ref we can combine with.
1710 If one is found, change the memory ref to a PRE_INC
1711 or PRE_DEC, cancel this insn, and return 1.
1712 Return 0 if nothing has been done. */
1713 && try_pre_increment_1 (pbi, insn))
1714 return prev;
1716 #endif /* AUTO_INC_DEC */
1718 CLEAR_REG_SET (pbi->new_set);
1720 /* If this is not the final pass, and this insn is copying the value of
1721 a library call and it's dead, don't scan the insns that perform the
1722 library call, so that the call's arguments are not marked live. */
1723 if (libcall_is_dead)
1725 /* Record the death of the dest reg. */
1726 mark_set_regs (pbi, PATTERN (insn), insn);
1728 insn = XEXP (note, 0);
1729 return PREV_INSN (insn);
1731 else if (GET_CODE (PATTERN (insn)) == SET
1732 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1733 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1734 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1735 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1736 /* We have an insn to pop a constant amount off the stack.
1737 (Such insns use PLUS regardless of the direction of the stack,
1738 and any insn to adjust the stack by a constant is always a pop.)
1739 These insns, if not dead stores, have no effect on life, though
1740 they do have an effect on the memory stores we are tracking. */
1741 invalidate_mems_from_set (pbi, stack_pointer_rtx);
1742 else
1744 rtx note;
1745 /* Any regs live at the time of a call instruction must not go
1746 in a register clobbered by calls. Find all regs now live and
1747 record this for them. */
1749 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
1750 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1751 { REG_N_CALLS_CROSSED (i)++; });
1753 /* Record sets. Do this even for dead instructions, since they
1754 would have killed the values if they hadn't been deleted. */
1755 mark_set_regs (pbi, PATTERN (insn), insn);
1757 if (GET_CODE (insn) == CALL_INSN)
1759 int i;
1760 rtx note, cond;
1762 cond = NULL_RTX;
1763 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1764 cond = COND_EXEC_TEST (PATTERN (insn));
1766 /* Non-constant calls clobber memory, constant calls do not
1767 clobber memory, though they may clobber outgoing arguments
1768 on the stack. */
1769 if (! CONST_OR_PURE_CALL_P (insn))
1771 free_EXPR_LIST_list (&pbi->mem_set_list);
1772 pbi->mem_set_list_len = 0;
1774 else
1775 invalidate_mems_from_set (pbi, stack_pointer_rtx);
1777 /* There may be extra registers to be clobbered. */
1778 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1779 note;
1780 note = XEXP (note, 1))
1781 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1782 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
1783 cond, insn, pbi->flags);
1785 /* Calls change all call-used and global registers. */
1786 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1787 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1789 /* We do not want REG_UNUSED notes for these registers. */
1790 mark_set_1 (pbi, CLOBBER, regno_reg_rtx[i], cond, insn,
1791 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
1795 /* If an insn doesn't use CC0, it becomes dead since we assume
1796 that every insn clobbers it. So show it dead here;
1797 mark_used_regs will set it live if it is referenced. */
1798 pbi->cc0_live = 0;
1800 /* Record uses. */
1801 if (! insn_is_dead)
1802 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
1803 if ((flags & PROP_EQUAL_NOTES)
1804 && ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX))
1805 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX))))
1806 mark_used_regs (pbi, XEXP (note, 0), NULL_RTX, insn);
1808 /* Sometimes we may have inserted something before INSN (such as a move)
1809 when we make an auto-inc. So ensure we will scan those insns. */
1810 #ifdef AUTO_INC_DEC
1811 prev = PREV_INSN (insn);
1812 #endif
1814 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1816 int i;
1817 rtx note, cond;
1819 cond = NULL_RTX;
1820 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1821 cond = COND_EXEC_TEST (PATTERN (insn));
1823 /* Calls use their arguments. */
1824 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1825 note;
1826 note = XEXP (note, 1))
1827 if (GET_CODE (XEXP (note, 0)) == USE)
1828 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
1829 cond, insn);
1831 /* The stack ptr is used (honorarily) by a CALL insn. */
1832 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
1834 /* Calls may also reference any of the global registers,
1835 so they are made live. */
1836 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1837 if (global_regs[i])
1838 mark_used_reg (pbi, regno_reg_rtx[i], cond, insn);
1842 /* On final pass, update counts of how many insns in which each reg
1843 is live. */
1844 if (flags & PROP_REG_INFO)
1845 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1846 { REG_LIVE_LENGTH (i)++; });
1848 return prev;
1851 /* Initialize a propagate_block_info struct for public consumption.
1852 Note that the structure itself is opaque to this file, but that
1853 the user can use the regsets provided here. */
1855 struct propagate_block_info *
1856 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
1857 basic_block bb;
1858 regset live, local_set, cond_local_set;
1859 int flags;
1861 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
1863 pbi->bb = bb;
1864 pbi->reg_live = live;
1865 pbi->mem_set_list = NULL_RTX;
1866 pbi->mem_set_list_len = 0;
1867 pbi->local_set = local_set;
1868 pbi->cond_local_set = cond_local_set;
1869 pbi->cc0_live = 0;
1870 pbi->flags = flags;
1872 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
1873 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
1874 else
1875 pbi->reg_next_use = NULL;
1877 pbi->new_set = BITMAP_XMALLOC ();
1879 #ifdef HAVE_conditional_execution
1880 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
1881 free_reg_cond_life_info);
1882 pbi->reg_cond_reg = BITMAP_XMALLOC ();
1884 /* If this block ends in a conditional branch, for each register live
1885 from one side of the branch and not the other, record the register
1886 as conditionally dead. */
1887 if (GET_CODE (bb->end) == JUMP_INSN
1888 && any_condjump_p (bb->end))
1890 regset_head diff_head;
1891 regset diff = INITIALIZE_REG_SET (diff_head);
1892 basic_block bb_true, bb_false;
1893 rtx cond_true, cond_false, set_src;
1894 int i;
1896 /* Identify the successor blocks. */
1897 bb_true = bb->succ->dest;
1898 if (bb->succ->succ_next != NULL)
1900 bb_false = bb->succ->succ_next->dest;
1902 if (bb->succ->flags & EDGE_FALLTHRU)
1904 basic_block t = bb_false;
1905 bb_false = bb_true;
1906 bb_true = t;
1908 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
1909 abort ();
1911 else
1913 /* This can happen with a conditional jump to the next insn. */
1914 if (JUMP_LABEL (bb->end) != bb_true->head)
1915 abort ();
1917 /* Simplest way to do nothing. */
1918 bb_false = bb_true;
1921 /* Extract the condition from the branch. */
1922 set_src = SET_SRC (pc_set (bb->end));
1923 cond_true = XEXP (set_src, 0);
1924 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
1925 GET_MODE (cond_true), XEXP (cond_true, 0),
1926 XEXP (cond_true, 1));
1927 if (GET_CODE (XEXP (set_src, 1)) == PC)
1929 rtx t = cond_false;
1930 cond_false = cond_true;
1931 cond_true = t;
1934 /* Compute which register lead different lives in the successors. */
1935 if (bitmap_operation (diff, bb_true->global_live_at_start,
1936 bb_false->global_live_at_start, BITMAP_XOR))
1938 rtx reg = XEXP (cond_true, 0);
1940 if (GET_CODE (reg) == SUBREG)
1941 reg = SUBREG_REG (reg);
1943 if (GET_CODE (reg) != REG)
1944 abort ();
1946 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
1948 /* For each such register, mark it conditionally dead. */
1949 EXECUTE_IF_SET_IN_REG_SET
1950 (diff, 0, i,
1952 struct reg_cond_life_info *rcli;
1953 rtx cond;
1955 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
1957 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
1958 cond = cond_false;
1959 else
1960 cond = cond_true;
1961 rcli->condition = cond;
1962 rcli->stores = const0_rtx;
1963 rcli->orig_condition = cond;
1965 splay_tree_insert (pbi->reg_cond_dead, i,
1966 (splay_tree_value) rcli);
1970 FREE_REG_SET (diff);
1972 #endif
1974 /* If this block has no successors, any stores to the frame that aren't
1975 used later in the block are dead. So make a pass over the block
1976 recording any such that are made and show them dead at the end. We do
1977 a very conservative and simple job here. */
1978 if (optimize
1979 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1980 && (TYPE_RETURNS_STACK_DEPRESSED
1981 (TREE_TYPE (current_function_decl))))
1982 && (flags & PROP_SCAN_DEAD_STORES)
1983 && (bb->succ == NULL
1984 || (bb->succ->succ_next == NULL
1985 && bb->succ->dest == EXIT_BLOCK_PTR
1986 && ! current_function_calls_eh_return)))
1988 rtx insn, set;
1989 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
1990 if (GET_CODE (insn) == INSN
1991 && (set = single_set (insn))
1992 && GET_CODE (SET_DEST (set)) == MEM)
1994 rtx mem = SET_DEST (set);
1995 rtx canon_mem = canon_rtx (mem);
1997 /* This optimization is performed by faking a store to the
1998 memory at the end of the block. This doesn't work for
1999 unchanging memories because multiple stores to unchanging
2000 memory is illegal and alias analysis doesn't consider it. */
2001 if (RTX_UNCHANGING_P (canon_mem))
2002 continue;
2004 if (XEXP (canon_mem, 0) == frame_pointer_rtx
2005 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
2006 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
2007 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
2008 add_to_mem_set_list (pbi, canon_mem);
2012 return pbi;
2015 /* Release a propagate_block_info struct. */
2017 void
2018 free_propagate_block_info (pbi)
2019 struct propagate_block_info *pbi;
2021 free_EXPR_LIST_list (&pbi->mem_set_list);
2023 BITMAP_XFREE (pbi->new_set);
2025 #ifdef HAVE_conditional_execution
2026 splay_tree_delete (pbi->reg_cond_dead);
2027 BITMAP_XFREE (pbi->reg_cond_reg);
2028 #endif
2030 if (pbi->reg_next_use)
2031 free (pbi->reg_next_use);
2033 free (pbi);
2036 /* Compute the registers live at the beginning of a basic block BB from
2037 those live at the end.
2039 When called, REG_LIVE contains those live at the end. On return, it
2040 contains those live at the beginning.
2042 LOCAL_SET, if non-null, will be set with all registers killed
2043 unconditionally by this basic block.
2044 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
2045 killed conditionally by this basic block. If there is any unconditional
2046 set of a register, then the corresponding bit will be set in LOCAL_SET
2047 and cleared in COND_LOCAL_SET.
2048 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
2049 case, the resulting set will be equal to the union of the two sets that
2050 would otherwise be computed.
2052 Return nonzero if an INSN is deleted (i.e. by dead code removal). */
2055 propagate_block (bb, live, local_set, cond_local_set, flags)
2056 basic_block bb;
2057 regset live;
2058 regset local_set;
2059 regset cond_local_set;
2060 int flags;
2062 struct propagate_block_info *pbi;
2063 rtx insn, prev;
2064 int changed;
2066 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
2068 if (flags & PROP_REG_INFO)
2070 int i;
2072 /* Process the regs live at the end of the block.
2073 Mark them as not local to any one basic block. */
2074 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
2075 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2078 /* Scan the block an insn at a time from end to beginning. */
2080 changed = 0;
2081 for (insn = bb->end;; insn = prev)
2083 /* If this is a call to `setjmp' et al, warn if any
2084 non-volatile datum is live. */
2085 if ((flags & PROP_REG_INFO)
2086 && GET_CODE (insn) == CALL_INSN
2087 && find_reg_note (insn, REG_SETJMP, NULL))
2088 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
2090 prev = propagate_one_insn (pbi, insn);
2091 changed |= NEXT_INSN (prev) != insn;
2093 if (insn == bb->head)
2094 break;
2097 free_propagate_block_info (pbi);
2099 return changed;
2102 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2103 (SET expressions whose destinations are registers dead after the insn).
2104 NEEDED is the regset that says which regs are alive after the insn.
2106 Unless CALL_OK is nonzero, an insn is needed if it contains a CALL.
2108 If X is the entire body of an insn, NOTES contains the reg notes
2109 pertaining to the insn. */
2111 static int
2112 insn_dead_p (pbi, x, call_ok, notes)
2113 struct propagate_block_info *pbi;
2114 rtx x;
2115 int call_ok;
2116 rtx notes ATTRIBUTE_UNUSED;
2118 enum rtx_code code = GET_CODE (x);
2120 /* Don't eliminate insns that may trap. */
2121 if (flag_non_call_exceptions && may_trap_p (x))
2122 return 0;
2124 #ifdef AUTO_INC_DEC
2125 /* As flow is invoked after combine, we must take existing AUTO_INC
2126 expressions into account. */
2127 for (; notes; notes = XEXP (notes, 1))
2129 if (REG_NOTE_KIND (notes) == REG_INC)
2131 int regno = REGNO (XEXP (notes, 0));
2133 /* Don't delete insns to set global regs. */
2134 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2135 || REGNO_REG_SET_P (pbi->reg_live, regno))
2136 return 0;
2139 #endif
2141 /* If setting something that's a reg or part of one,
2142 see if that register's altered value will be live. */
2144 if (code == SET)
2146 rtx r = SET_DEST (x);
2148 #ifdef HAVE_cc0
2149 if (GET_CODE (r) == CC0)
2150 return ! pbi->cc0_live;
2151 #endif
2153 /* A SET that is a subroutine call cannot be dead. */
2154 if (GET_CODE (SET_SRC (x)) == CALL)
2156 if (! call_ok)
2157 return 0;
2160 /* Don't eliminate loads from volatile memory or volatile asms. */
2161 else if (volatile_refs_p (SET_SRC (x)))
2162 return 0;
2164 if (GET_CODE (r) == MEM)
2166 rtx temp, canon_r;
2168 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
2169 return 0;
2171 canon_r = canon_rtx (r);
2173 /* Walk the set of memory locations we are currently tracking
2174 and see if one is an identical match to this memory location.
2175 If so, this memory write is dead (remember, we're walking
2176 backwards from the end of the block to the start). Since
2177 rtx_equal_p does not check the alias set or flags, we also
2178 must have the potential for them to conflict (anti_dependence). */
2179 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
2180 if (anti_dependence (r, XEXP (temp, 0)))
2182 rtx mem = XEXP (temp, 0);
2184 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
2185 && (GET_MODE_SIZE (GET_MODE (canon_r))
2186 <= GET_MODE_SIZE (GET_MODE (mem))))
2187 return 1;
2189 #ifdef AUTO_INC_DEC
2190 /* Check if memory reference matches an auto increment. Only
2191 post increment/decrement or modify are valid. */
2192 if (GET_MODE (mem) == GET_MODE (r)
2193 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
2194 || GET_CODE (XEXP (mem, 0)) == POST_INC
2195 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
2196 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
2197 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
2198 return 1;
2199 #endif
2202 else
2204 while (GET_CODE (r) == SUBREG
2205 || GET_CODE (r) == STRICT_LOW_PART
2206 || GET_CODE (r) == ZERO_EXTRACT)
2207 r = XEXP (r, 0);
2209 if (GET_CODE (r) == REG)
2211 int regno = REGNO (r);
2213 /* Obvious. */
2214 if (REGNO_REG_SET_P (pbi->reg_live, regno))
2215 return 0;
2217 /* If this is a hard register, verify that subsequent
2218 words are not needed. */
2219 if (regno < FIRST_PSEUDO_REGISTER)
2221 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2223 while (--n > 0)
2224 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
2225 return 0;
2228 /* Don't delete insns to set global regs. */
2229 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2230 return 0;
2232 /* Make sure insns to set the stack pointer aren't deleted. */
2233 if (regno == STACK_POINTER_REGNUM)
2234 return 0;
2236 /* ??? These bits might be redundant with the force live bits
2237 in calculate_global_regs_live. We would delete from
2238 sequential sets; whether this actually affects real code
2239 for anything but the stack pointer I don't know. */
2240 /* Make sure insns to set the frame pointer aren't deleted. */
2241 if (regno == FRAME_POINTER_REGNUM
2242 && (! reload_completed || frame_pointer_needed))
2243 return 0;
2244 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2245 if (regno == HARD_FRAME_POINTER_REGNUM
2246 && (! reload_completed || frame_pointer_needed))
2247 return 0;
2248 #endif
2250 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2251 /* Make sure insns to set arg pointer are never deleted
2252 (if the arg pointer isn't fixed, there will be a USE
2253 for it, so we can treat it normally). */
2254 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2255 return 0;
2256 #endif
2258 /* Otherwise, the set is dead. */
2259 return 1;
2264 /* If performing several activities, insn is dead if each activity
2265 is individually dead. Also, CLOBBERs and USEs can be ignored; a
2266 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
2267 worth keeping. */
2268 else if (code == PARALLEL)
2270 int i = XVECLEN (x, 0);
2272 for (i--; i >= 0; i--)
2273 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2274 && GET_CODE (XVECEXP (x, 0, i)) != USE
2275 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
2276 return 0;
2278 return 1;
2281 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
2282 is not necessarily true for hard registers. */
2283 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
2284 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2285 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
2286 return 1;
2288 /* We do not check other CLOBBER or USE here. An insn consisting of just
2289 a CLOBBER or just a USE should not be deleted. */
2290 return 0;
2293 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
2294 return 1 if the entire library call is dead.
2295 This is true if INSN copies a register (hard or pseudo)
2296 and if the hard return reg of the call insn is dead.
2297 (The caller should have tested the destination of the SET inside
2298 INSN already for death.)
2300 If this insn doesn't just copy a register, then we don't
2301 have an ordinary libcall. In that case, cse could not have
2302 managed to substitute the source for the dest later on,
2303 so we can assume the libcall is dead.
2305 PBI is the block info giving pseudoregs live before this insn.
2306 NOTE is the REG_RETVAL note of the insn. */
2308 static int
2309 libcall_dead_p (pbi, note, insn)
2310 struct propagate_block_info *pbi;
2311 rtx note;
2312 rtx insn;
2314 rtx x = single_set (insn);
2316 if (x)
2318 rtx r = SET_SRC (x);
2320 if (GET_CODE (r) == REG)
2322 rtx call = XEXP (note, 0);
2323 rtx call_pat;
2324 int i;
2326 /* Find the call insn. */
2327 while (call != insn && GET_CODE (call) != CALL_INSN)
2328 call = NEXT_INSN (call);
2330 /* If there is none, do nothing special,
2331 since ordinary death handling can understand these insns. */
2332 if (call == insn)
2333 return 0;
2335 /* See if the hard reg holding the value is dead.
2336 If this is a PARALLEL, find the call within it. */
2337 call_pat = PATTERN (call);
2338 if (GET_CODE (call_pat) == PARALLEL)
2340 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
2341 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
2342 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
2343 break;
2345 /* This may be a library call that is returning a value
2346 via invisible pointer. Do nothing special, since
2347 ordinary death handling can understand these insns. */
2348 if (i < 0)
2349 return 0;
2351 call_pat = XVECEXP (call_pat, 0, i);
2354 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
2357 return 1;
2360 /* Return 1 if register REGNO was used before it was set, i.e. if it is
2361 live at function entry. Don't count global register variables, variables
2362 in registers that can be used for function arg passing, or variables in
2363 fixed hard registers. */
2366 regno_uninitialized (regno)
2367 unsigned int regno;
2369 if (n_basic_blocks == 0
2370 || (regno < FIRST_PSEUDO_REGISTER
2371 && (global_regs[regno]
2372 || fixed_regs[regno]
2373 || FUNCTION_ARG_REGNO_P (regno))))
2374 return 0;
2376 return REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno);
2379 /* 1 if register REGNO was alive at a place where `setjmp' was called
2380 and was set more than once or is an argument.
2381 Such regs may be clobbered by `longjmp'. */
2384 regno_clobbered_at_setjmp (regno)
2385 int regno;
2387 if (n_basic_blocks == 0)
2388 return 0;
2390 return ((REG_N_SETS (regno) > 1
2391 || REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno))
2392 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2395 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
2396 maximal list size; look for overlaps in mode and select the largest. */
2397 static void
2398 add_to_mem_set_list (pbi, mem)
2399 struct propagate_block_info *pbi;
2400 rtx mem;
2402 rtx i;
2404 /* We don't know how large a BLKmode store is, so we must not
2405 take them into consideration. */
2406 if (GET_MODE (mem) == BLKmode)
2407 return;
2409 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
2411 rtx e = XEXP (i, 0);
2412 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
2414 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
2416 #ifdef AUTO_INC_DEC
2417 /* If we must store a copy of the mem, we can just modify
2418 the mode of the stored copy. */
2419 if (pbi->flags & PROP_AUTOINC)
2420 PUT_MODE (e, GET_MODE (mem));
2421 else
2422 #endif
2423 XEXP (i, 0) = mem;
2425 return;
2429 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
2431 #ifdef AUTO_INC_DEC
2432 /* Store a copy of mem, otherwise the address may be
2433 scrogged by find_auto_inc. */
2434 if (pbi->flags & PROP_AUTOINC)
2435 mem = shallow_copy_rtx (mem);
2436 #endif
2437 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
2438 pbi->mem_set_list_len++;
2442 /* INSN references memory, possibly using autoincrement addressing modes.
2443 Find any entries on the mem_set_list that need to be invalidated due
2444 to an address change. */
2446 static int
2447 invalidate_mems_from_autoinc (px, data)
2448 rtx *px;
2449 void *data;
2451 rtx x = *px;
2452 struct propagate_block_info *pbi = data;
2454 if (GET_RTX_CLASS (GET_CODE (x)) == 'a')
2456 invalidate_mems_from_set (pbi, XEXP (x, 0));
2457 return -1;
2460 return 0;
2463 /* EXP is a REG. Remove any dependent entries from pbi->mem_set_list. */
2465 static void
2466 invalidate_mems_from_set (pbi, exp)
2467 struct propagate_block_info *pbi;
2468 rtx exp;
2470 rtx temp = pbi->mem_set_list;
2471 rtx prev = NULL_RTX;
2472 rtx next;
2474 while (temp)
2476 next = XEXP (temp, 1);
2477 if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
2479 /* Splice this entry out of the list. */
2480 if (prev)
2481 XEXP (prev, 1) = next;
2482 else
2483 pbi->mem_set_list = next;
2484 free_EXPR_LIST_node (temp);
2485 pbi->mem_set_list_len--;
2487 else
2488 prev = temp;
2489 temp = next;
2493 /* Process the registers that are set within X. Their bits are set to
2494 1 in the regset DEAD, because they are dead prior to this insn.
2496 If INSN is nonzero, it is the insn being processed.
2498 FLAGS is the set of operations to perform. */
2500 static void
2501 mark_set_regs (pbi, x, insn)
2502 struct propagate_block_info *pbi;
2503 rtx x, insn;
2505 rtx cond = NULL_RTX;
2506 rtx link;
2507 enum rtx_code code;
2509 if (insn)
2510 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2512 if (REG_NOTE_KIND (link) == REG_INC)
2513 mark_set_1 (pbi, SET, XEXP (link, 0),
2514 (GET_CODE (x) == COND_EXEC
2515 ? COND_EXEC_TEST (x) : NULL_RTX),
2516 insn, pbi->flags);
2518 retry:
2519 switch (code = GET_CODE (x))
2521 case SET:
2522 case CLOBBER:
2523 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
2524 return;
2526 case COND_EXEC:
2527 cond = COND_EXEC_TEST (x);
2528 x = COND_EXEC_CODE (x);
2529 goto retry;
2531 case PARALLEL:
2533 int i;
2535 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2537 rtx sub = XVECEXP (x, 0, i);
2538 switch (code = GET_CODE (sub))
2540 case COND_EXEC:
2541 if (cond != NULL_RTX)
2542 abort ();
2544 cond = COND_EXEC_TEST (sub);
2545 sub = COND_EXEC_CODE (sub);
2546 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
2547 break;
2548 /* Fall through. */
2550 case SET:
2551 case CLOBBER:
2552 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
2553 break;
2555 default:
2556 break;
2559 break;
2562 default:
2563 break;
2567 /* Process a single set, which appears in INSN. REG (which may not
2568 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
2569 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
2570 If the set is conditional (because it appear in a COND_EXEC), COND
2571 will be the condition. */
2573 static void
2574 mark_set_1 (pbi, code, reg, cond, insn, flags)
2575 struct propagate_block_info *pbi;
2576 enum rtx_code code;
2577 rtx reg, cond, insn;
2578 int flags;
2580 int regno_first = -1, regno_last = -1;
2581 unsigned long not_dead = 0;
2582 int i;
2584 /* Modifying just one hardware register of a multi-reg value or just a
2585 byte field of a register does not mean the value from before this insn
2586 is now dead. Of course, if it was dead after it's unused now. */
2588 switch (GET_CODE (reg))
2590 case PARALLEL:
2591 /* Some targets place small structures in registers for return values of
2592 functions. We have to detect this case specially here to get correct
2593 flow information. */
2594 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2595 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
2596 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
2597 flags);
2598 return;
2600 case ZERO_EXTRACT:
2601 case SIGN_EXTRACT:
2602 case STRICT_LOW_PART:
2603 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
2605 reg = XEXP (reg, 0);
2606 while (GET_CODE (reg) == SUBREG
2607 || GET_CODE (reg) == ZERO_EXTRACT
2608 || GET_CODE (reg) == SIGN_EXTRACT
2609 || GET_CODE (reg) == STRICT_LOW_PART);
2610 if (GET_CODE (reg) == MEM)
2611 break;
2612 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
2613 /* Fall through. */
2615 case REG:
2616 regno_last = regno_first = REGNO (reg);
2617 if (regno_first < FIRST_PSEUDO_REGISTER)
2618 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
2619 break;
2621 case SUBREG:
2622 if (GET_CODE (SUBREG_REG (reg)) == REG)
2624 enum machine_mode outer_mode = GET_MODE (reg);
2625 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
2627 /* Identify the range of registers affected. This is moderately
2628 tricky for hard registers. See alter_subreg. */
2630 regno_last = regno_first = REGNO (SUBREG_REG (reg));
2631 if (regno_first < FIRST_PSEUDO_REGISTER)
2633 regno_first += subreg_regno_offset (regno_first, inner_mode,
2634 SUBREG_BYTE (reg),
2635 outer_mode);
2636 regno_last = (regno_first
2637 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
2639 /* Since we've just adjusted the register number ranges, make
2640 sure REG matches. Otherwise some_was_live will be clear
2641 when it shouldn't have been, and we'll create incorrect
2642 REG_UNUSED notes. */
2643 reg = gen_rtx_REG (outer_mode, regno_first);
2645 else
2647 /* If the number of words in the subreg is less than the number
2648 of words in the full register, we have a well-defined partial
2649 set. Otherwise the high bits are undefined.
2651 This is only really applicable to pseudos, since we just took
2652 care of multi-word hard registers. */
2653 if (((GET_MODE_SIZE (outer_mode)
2654 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
2655 < ((GET_MODE_SIZE (inner_mode)
2656 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
2657 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
2658 regno_first);
2660 reg = SUBREG_REG (reg);
2663 else
2664 reg = SUBREG_REG (reg);
2665 break;
2667 default:
2668 break;
2671 /* If this set is a MEM, then it kills any aliased writes.
2672 If this set is a REG, then it kills any MEMs which use the reg. */
2673 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
2675 if (GET_CODE (reg) == REG)
2676 invalidate_mems_from_set (pbi, reg);
2678 /* If the memory reference had embedded side effects (autoincrement
2679 address modes. Then we may need to kill some entries on the
2680 memory set list. */
2681 if (insn && GET_CODE (reg) == MEM)
2682 for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
2684 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2685 /* ??? With more effort we could track conditional memory life. */
2686 && ! cond)
2687 add_to_mem_set_list (pbi, canon_rtx (reg));
2690 if (GET_CODE (reg) == REG
2691 && ! (regno_first == FRAME_POINTER_REGNUM
2692 && (! reload_completed || frame_pointer_needed))
2693 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2694 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
2695 && (! reload_completed || frame_pointer_needed))
2696 #endif
2697 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2698 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
2699 #endif
2702 int some_was_live = 0, some_was_dead = 0;
2704 for (i = regno_first; i <= regno_last; ++i)
2706 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
2707 if (pbi->local_set)
2709 /* Order of the set operation matters here since both
2710 sets may be the same. */
2711 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
2712 if (cond != NULL_RTX
2713 && ! REGNO_REG_SET_P (pbi->local_set, i))
2714 SET_REGNO_REG_SET (pbi->cond_local_set, i);
2715 else
2716 SET_REGNO_REG_SET (pbi->local_set, i);
2718 if (code != CLOBBER)
2719 SET_REGNO_REG_SET (pbi->new_set, i);
2721 some_was_live |= needed_regno;
2722 some_was_dead |= ! needed_regno;
2725 #ifdef HAVE_conditional_execution
2726 /* Consider conditional death in deciding that the register needs
2727 a death note. */
2728 if (some_was_live && ! not_dead
2729 /* The stack pointer is never dead. Well, not strictly true,
2730 but it's very difficult to tell from here. Hopefully
2731 combine_stack_adjustments will fix up the most egregious
2732 errors. */
2733 && regno_first != STACK_POINTER_REGNUM)
2735 for (i = regno_first; i <= regno_last; ++i)
2736 if (! mark_regno_cond_dead (pbi, i, cond))
2737 not_dead |= ((unsigned long) 1) << (i - regno_first);
2739 #endif
2741 /* Additional data to record if this is the final pass. */
2742 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
2743 | PROP_DEATH_NOTES | PROP_AUTOINC))
2745 rtx y;
2746 int blocknum = pbi->bb->index;
2748 y = NULL_RTX;
2749 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2751 y = pbi->reg_next_use[regno_first];
2753 /* The next use is no longer next, since a store intervenes. */
2754 for (i = regno_first; i <= regno_last; ++i)
2755 pbi->reg_next_use[i] = 0;
2758 if (flags & PROP_REG_INFO)
2760 for (i = regno_first; i <= regno_last; ++i)
2762 /* Count (weighted) references, stores, etc. This counts a
2763 register twice if it is modified, but that is correct. */
2764 REG_N_SETS (i) += 1;
2765 REG_N_REFS (i) += 1;
2766 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
2768 /* The insns where a reg is live are normally counted
2769 elsewhere, but we want the count to include the insn
2770 where the reg is set, and the normal counting mechanism
2771 would not count it. */
2772 REG_LIVE_LENGTH (i) += 1;
2775 /* If this is a hard reg, record this function uses the reg. */
2776 if (regno_first < FIRST_PSEUDO_REGISTER)
2778 for (i = regno_first; i <= regno_last; i++)
2779 regs_ever_live[i] = 1;
2781 else
2783 /* Keep track of which basic blocks each reg appears in. */
2784 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
2785 REG_BASIC_BLOCK (regno_first) = blocknum;
2786 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
2787 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
2791 if (! some_was_dead)
2793 if (flags & PROP_LOG_LINKS)
2795 /* Make a logical link from the next following insn
2796 that uses this register, back to this insn.
2797 The following insns have already been processed.
2799 We don't build a LOG_LINK for hard registers containing
2800 in ASM_OPERANDs. If these registers get replaced,
2801 we might wind up changing the semantics of the insn,
2802 even if reload can make what appear to be valid
2803 assignments later. */
2804 if (y && (BLOCK_NUM (y) == blocknum)
2805 && (regno_first >= FIRST_PSEUDO_REGISTER
2806 || asm_noperands (PATTERN (y)) < 0))
2807 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
2810 else if (not_dead)
2812 else if (! some_was_live)
2814 if (flags & PROP_REG_INFO)
2815 REG_N_DEATHS (regno_first) += 1;
2817 if (flags & PROP_DEATH_NOTES)
2819 /* Note that dead stores have already been deleted
2820 when possible. If we get here, we have found a
2821 dead store that cannot be eliminated (because the
2822 same insn does something useful). Indicate this
2823 by marking the reg being set as dying here. */
2824 REG_NOTES (insn)
2825 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2828 else
2830 if (flags & PROP_DEATH_NOTES)
2832 /* This is a case where we have a multi-word hard register
2833 and some, but not all, of the words of the register are
2834 needed in subsequent insns. Write REG_UNUSED notes
2835 for those parts that were not needed. This case should
2836 be rare. */
2838 for (i = regno_first; i <= regno_last; ++i)
2839 if (! REGNO_REG_SET_P (pbi->reg_live, i))
2840 REG_NOTES (insn)
2841 = alloc_EXPR_LIST (REG_UNUSED,
2842 regno_reg_rtx[i],
2843 REG_NOTES (insn));
2848 /* Mark the register as being dead. */
2849 if (some_was_live
2850 /* The stack pointer is never dead. Well, not strictly true,
2851 but it's very difficult to tell from here. Hopefully
2852 combine_stack_adjustments will fix up the most egregious
2853 errors. */
2854 && regno_first != STACK_POINTER_REGNUM)
2856 for (i = regno_first; i <= regno_last; ++i)
2857 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
2858 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
2861 else if (GET_CODE (reg) == REG)
2863 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2864 pbi->reg_next_use[regno_first] = 0;
2867 /* If this is the last pass and this is a SCRATCH, show it will be dying
2868 here and count it. */
2869 else if (GET_CODE (reg) == SCRATCH)
2871 if (flags & PROP_DEATH_NOTES)
2872 REG_NOTES (insn)
2873 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2877 #ifdef HAVE_conditional_execution
2878 /* Mark REGNO conditionally dead.
2879 Return true if the register is now unconditionally dead. */
2881 static int
2882 mark_regno_cond_dead (pbi, regno, cond)
2883 struct propagate_block_info *pbi;
2884 int regno;
2885 rtx cond;
2887 /* If this is a store to a predicate register, the value of the
2888 predicate is changing, we don't know that the predicate as seen
2889 before is the same as that seen after. Flush all dependent
2890 conditions from reg_cond_dead. This will make all such
2891 conditionally live registers unconditionally live. */
2892 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
2893 flush_reg_cond_reg (pbi, regno);
2895 /* If this is an unconditional store, remove any conditional
2896 life that may have existed. */
2897 if (cond == NULL_RTX)
2898 splay_tree_remove (pbi->reg_cond_dead, regno);
2899 else
2901 splay_tree_node node;
2902 struct reg_cond_life_info *rcli;
2903 rtx ncond;
2905 /* Otherwise this is a conditional set. Record that fact.
2906 It may have been conditionally used, or there may be a
2907 subsequent set with a complimentary condition. */
2909 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
2910 if (node == NULL)
2912 /* The register was unconditionally live previously.
2913 Record the current condition as the condition under
2914 which it is dead. */
2915 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
2916 rcli->condition = cond;
2917 rcli->stores = cond;
2918 rcli->orig_condition = const0_rtx;
2919 splay_tree_insert (pbi->reg_cond_dead, regno,
2920 (splay_tree_value) rcli);
2922 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2924 /* Not unconditionally dead. */
2925 return 0;
2927 else
2929 /* The register was conditionally live previously.
2930 Add the new condition to the old. */
2931 rcli = (struct reg_cond_life_info *) node->value;
2932 ncond = rcli->condition;
2933 ncond = ior_reg_cond (ncond, cond, 1);
2934 if (rcli->stores == const0_rtx)
2935 rcli->stores = cond;
2936 else if (rcli->stores != const1_rtx)
2937 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
2939 /* If the register is now unconditionally dead, remove the entry
2940 in the splay_tree. A register is unconditionally dead if the
2941 dead condition ncond is true. A register is also unconditionally
2942 dead if the sum of all conditional stores is an unconditional
2943 store (stores is true), and the dead condition is identically the
2944 same as the original dead condition initialized at the end of
2945 the block. This is a pointer compare, not an rtx_equal_p
2946 compare. */
2947 if (ncond == const1_rtx
2948 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
2949 splay_tree_remove (pbi->reg_cond_dead, regno);
2950 else
2952 rcli->condition = ncond;
2954 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2956 /* Not unconditionally dead. */
2957 return 0;
2962 return 1;
2965 /* Called from splay_tree_delete for pbi->reg_cond_life. */
2967 static void
2968 free_reg_cond_life_info (value)
2969 splay_tree_value value;
2971 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
2972 free (rcli);
2975 /* Helper function for flush_reg_cond_reg. */
2977 static int
2978 flush_reg_cond_reg_1 (node, data)
2979 splay_tree_node node;
2980 void *data;
2982 struct reg_cond_life_info *rcli;
2983 int *xdata = (int *) data;
2984 unsigned int regno = xdata[0];
2986 /* Don't need to search if last flushed value was farther on in
2987 the in-order traversal. */
2988 if (xdata[1] >= (int) node->key)
2989 return 0;
2991 /* Splice out portions of the expression that refer to regno. */
2992 rcli = (struct reg_cond_life_info *) node->value;
2993 rcli->condition = elim_reg_cond (rcli->condition, regno);
2994 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
2995 rcli->stores = elim_reg_cond (rcli->stores, regno);
2997 /* If the entire condition is now false, signal the node to be removed. */
2998 if (rcli->condition == const0_rtx)
3000 xdata[1] = node->key;
3001 return -1;
3003 else if (rcli->condition == const1_rtx)
3004 abort ();
3006 return 0;
3009 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
3011 static void
3012 flush_reg_cond_reg (pbi, regno)
3013 struct propagate_block_info *pbi;
3014 int regno;
3016 int pair[2];
3018 pair[0] = regno;
3019 pair[1] = -1;
3020 while (splay_tree_foreach (pbi->reg_cond_dead,
3021 flush_reg_cond_reg_1, pair) == -1)
3022 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
3024 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
3027 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
3028 For ior/and, the ADD flag determines whether we want to add the new
3029 condition X to the old one unconditionally. If it is zero, we will
3030 only return a new expression if X allows us to simplify part of
3031 OLD, otherwise we return NULL to the caller.
3032 If ADD is nonzero, we will return a new condition in all cases. The
3033 toplevel caller of one of these functions should always pass 1 for
3034 ADD. */
3036 static rtx
3037 ior_reg_cond (old, x, add)
3038 rtx old, x;
3039 int add;
3041 rtx op0, op1;
3043 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
3045 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
3046 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
3047 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3048 return const1_rtx;
3049 if (GET_CODE (x) == GET_CODE (old)
3050 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3051 return old;
3052 if (! add)
3053 return NULL;
3054 return gen_rtx_IOR (0, old, x);
3057 switch (GET_CODE (old))
3059 case IOR:
3060 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3061 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3062 if (op0 != NULL || op1 != NULL)
3064 if (op0 == const0_rtx)
3065 return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3066 if (op1 == const0_rtx)
3067 return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3068 if (op0 == const1_rtx || op1 == const1_rtx)
3069 return const1_rtx;
3070 if (op0 == NULL)
3071 op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3072 else if (rtx_equal_p (x, op0))
3073 /* (x | A) | x ~ (x | A). */
3074 return old;
3075 if (op1 == NULL)
3076 op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3077 else if (rtx_equal_p (x, op1))
3078 /* (A | x) | x ~ (A | x). */
3079 return old;
3080 return gen_rtx_IOR (0, op0, op1);
3082 if (! add)
3083 return NULL;
3084 return gen_rtx_IOR (0, old, x);
3086 case AND:
3087 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3088 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3089 if (op0 != NULL || op1 != NULL)
3091 if (op0 == const1_rtx)
3092 return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3093 if (op1 == const1_rtx)
3094 return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3095 if (op0 == const0_rtx || op1 == const0_rtx)
3096 return const0_rtx;
3097 if (op0 == NULL)
3098 op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3099 else if (rtx_equal_p (x, op0))
3100 /* (x & A) | x ~ x. */
3101 return op0;
3102 if (op1 == NULL)
3103 op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3104 else if (rtx_equal_p (x, op1))
3105 /* (A & x) | x ~ x. */
3106 return op1;
3107 return gen_rtx_AND (0, op0, op1);
3109 if (! add)
3110 return NULL;
3111 return gen_rtx_IOR (0, old, x);
3113 case NOT:
3114 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3115 if (op0 != NULL)
3116 return not_reg_cond (op0);
3117 if (! add)
3118 return NULL;
3119 return gen_rtx_IOR (0, old, x);
3121 default:
3122 abort ();
3126 static rtx
3127 not_reg_cond (x)
3128 rtx x;
3130 enum rtx_code x_code;
3132 if (x == const0_rtx)
3133 return const1_rtx;
3134 else if (x == const1_rtx)
3135 return const0_rtx;
3136 x_code = GET_CODE (x);
3137 if (x_code == NOT)
3138 return XEXP (x, 0);
3139 if (GET_RTX_CLASS (x_code) == '<'
3140 && GET_CODE (XEXP (x, 0)) == REG)
3142 if (XEXP (x, 1) != const0_rtx)
3143 abort ();
3145 return gen_rtx_fmt_ee (reverse_condition (x_code),
3146 VOIDmode, XEXP (x, 0), const0_rtx);
3148 return gen_rtx_NOT (0, x);
3151 static rtx
3152 and_reg_cond (old, x, add)
3153 rtx old, x;
3154 int add;
3156 rtx op0, op1;
3158 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
3160 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
3161 && GET_CODE (x) == reverse_condition (GET_CODE (old))
3162 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3163 return const0_rtx;
3164 if (GET_CODE (x) == GET_CODE (old)
3165 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3166 return old;
3167 if (! add)
3168 return NULL;
3169 return gen_rtx_AND (0, old, x);
3172 switch (GET_CODE (old))
3174 case IOR:
3175 op0 = and_reg_cond (XEXP (old, 0), x, 0);
3176 op1 = and_reg_cond (XEXP (old, 1), x, 0);
3177 if (op0 != NULL || op1 != NULL)
3179 if (op0 == const0_rtx)
3180 return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3181 if (op1 == const0_rtx)
3182 return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3183 if (op0 == const1_rtx || op1 == const1_rtx)
3184 return const1_rtx;
3185 if (op0 == NULL)
3186 op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3187 else if (rtx_equal_p (x, op0))
3188 /* (x | A) & x ~ x. */
3189 return op0;
3190 if (op1 == NULL)
3191 op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3192 else if (rtx_equal_p (x, op1))
3193 /* (A | x) & x ~ x. */
3194 return op1;
3195 return gen_rtx_IOR (0, op0, op1);
3197 if (! add)
3198 return NULL;
3199 return gen_rtx_AND (0, old, x);
3201 case AND:
3202 op0 = and_reg_cond (XEXP (old, 0), x, 0);
3203 op1 = and_reg_cond (XEXP (old, 1), x, 0);
3204 if (op0 != NULL || op1 != NULL)
3206 if (op0 == const1_rtx)
3207 return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3208 if (op1 == const1_rtx)
3209 return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3210 if (op0 == const0_rtx || op1 == const0_rtx)
3211 return const0_rtx;
3212 if (op0 == NULL)
3213 op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3214 else if (rtx_equal_p (x, op0))
3215 /* (x & A) & x ~ (x & A). */
3216 return old;
3217 if (op1 == NULL)
3218 op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3219 else if (rtx_equal_p (x, op1))
3220 /* (A & x) & x ~ (A & x). */
3221 return old;
3222 return gen_rtx_AND (0, op0, op1);
3224 if (! add)
3225 return NULL;
3226 return gen_rtx_AND (0, old, x);
3228 case NOT:
3229 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3230 if (op0 != NULL)
3231 return not_reg_cond (op0);
3232 if (! add)
3233 return NULL;
3234 return gen_rtx_AND (0, old, x);
3236 default:
3237 abort ();
3241 /* Given a condition X, remove references to reg REGNO and return the
3242 new condition. The removal will be done so that all conditions
3243 involving REGNO are considered to evaluate to false. This function
3244 is used when the value of REGNO changes. */
3246 static rtx
3247 elim_reg_cond (x, regno)
3248 rtx x;
3249 unsigned int regno;
3251 rtx op0, op1;
3253 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3255 if (REGNO (XEXP (x, 0)) == regno)
3256 return const0_rtx;
3257 return x;
3260 switch (GET_CODE (x))
3262 case AND:
3263 op0 = elim_reg_cond (XEXP (x, 0), regno);
3264 op1 = elim_reg_cond (XEXP (x, 1), regno);
3265 if (op0 == const0_rtx || op1 == const0_rtx)
3266 return const0_rtx;
3267 if (op0 == const1_rtx)
3268 return op1;
3269 if (op1 == const1_rtx)
3270 return op0;
3271 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3272 return x;
3273 return gen_rtx_AND (0, op0, op1);
3275 case IOR:
3276 op0 = elim_reg_cond (XEXP (x, 0), regno);
3277 op1 = elim_reg_cond (XEXP (x, 1), regno);
3278 if (op0 == const1_rtx || op1 == const1_rtx)
3279 return const1_rtx;
3280 if (op0 == const0_rtx)
3281 return op1;
3282 if (op1 == const0_rtx)
3283 return op0;
3284 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3285 return x;
3286 return gen_rtx_IOR (0, op0, op1);
3288 case NOT:
3289 op0 = elim_reg_cond (XEXP (x, 0), regno);
3290 if (op0 == const0_rtx)
3291 return const1_rtx;
3292 if (op0 == const1_rtx)
3293 return const0_rtx;
3294 if (op0 != XEXP (x, 0))
3295 return not_reg_cond (op0);
3296 return x;
3298 default:
3299 abort ();
3302 #endif /* HAVE_conditional_execution */
3304 #ifdef AUTO_INC_DEC
3306 /* Try to substitute the auto-inc expression INC as the address inside
3307 MEM which occurs in INSN. Currently, the address of MEM is an expression
3308 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
3309 that has a single set whose source is a PLUS of INCR_REG and something
3310 else. */
3312 static void
3313 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
3314 struct propagate_block_info *pbi;
3315 rtx inc, insn, mem, incr, incr_reg;
3317 int regno = REGNO (incr_reg);
3318 rtx set = single_set (incr);
3319 rtx q = SET_DEST (set);
3320 rtx y = SET_SRC (set);
3321 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
3323 /* Make sure this reg appears only once in this insn. */
3324 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
3325 return;
3327 if (dead_or_set_p (incr, incr_reg)
3328 /* Mustn't autoinc an eliminable register. */
3329 && (regno >= FIRST_PSEUDO_REGISTER
3330 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
3332 /* This is the simple case. Try to make the auto-inc. If
3333 we can't, we are done. Otherwise, we will do any
3334 needed updates below. */
3335 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
3336 return;
3338 else if (GET_CODE (q) == REG
3339 /* PREV_INSN used here to check the semi-open interval
3340 [insn,incr). */
3341 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
3342 /* We must also check for sets of q as q may be
3343 a call clobbered hard register and there may
3344 be a call between PREV_INSN (insn) and incr. */
3345 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
3347 /* We have *p followed sometime later by q = p+size.
3348 Both p and q must be live afterward,
3349 and q is not used between INSN and its assignment.
3350 Change it to q = p, ...*q..., q = q+size.
3351 Then fall into the usual case. */
3352 rtx insns, temp;
3354 start_sequence ();
3355 emit_move_insn (q, incr_reg);
3356 insns = get_insns ();
3357 end_sequence ();
3359 /* If we can't make the auto-inc, or can't make the
3360 replacement into Y, exit. There's no point in making
3361 the change below if we can't do the auto-inc and doing
3362 so is not correct in the pre-inc case. */
3364 XEXP (inc, 0) = q;
3365 validate_change (insn, &XEXP (mem, 0), inc, 1);
3366 validate_change (incr, &XEXP (y, opnum), q, 1);
3367 if (! apply_change_group ())
3368 return;
3370 /* We now know we'll be doing this change, so emit the
3371 new insn(s) and do the updates. */
3372 emit_insn_before (insns, insn);
3374 if (pbi->bb->head == insn)
3375 pbi->bb->head = insns;
3377 /* INCR will become a NOTE and INSN won't contain a
3378 use of INCR_REG. If a use of INCR_REG was just placed in
3379 the insn before INSN, make that the next use.
3380 Otherwise, invalidate it. */
3381 if (GET_CODE (PREV_INSN (insn)) == INSN
3382 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3383 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
3384 pbi->reg_next_use[regno] = PREV_INSN (insn);
3385 else
3386 pbi->reg_next_use[regno] = 0;
3388 incr_reg = q;
3389 regno = REGNO (q);
3391 /* REGNO is now used in INCR which is below INSN, but
3392 it previously wasn't live here. If we don't mark
3393 it as live, we'll put a REG_DEAD note for it
3394 on this insn, which is incorrect. */
3395 SET_REGNO_REG_SET (pbi->reg_live, regno);
3397 /* If there are any calls between INSN and INCR, show
3398 that REGNO now crosses them. */
3399 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3400 if (GET_CODE (temp) == CALL_INSN)
3401 REG_N_CALLS_CROSSED (regno)++;
3403 /* Invalidate alias info for Q since we just changed its value. */
3404 clear_reg_alias_info (q);
3406 else
3407 return;
3409 /* If we haven't returned, it means we were able to make the
3410 auto-inc, so update the status. First, record that this insn
3411 has an implicit side effect. */
3413 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
3415 /* Modify the old increment-insn to simply copy
3416 the already-incremented value of our register. */
3417 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
3418 abort ();
3420 /* If that makes it a no-op (copying the register into itself) delete
3421 it so it won't appear to be a "use" and a "set" of this
3422 register. */
3423 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
3425 /* If the original source was dead, it's dead now. */
3426 rtx note;
3428 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
3430 remove_note (incr, note);
3431 if (XEXP (note, 0) != incr_reg)
3432 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
3435 PUT_CODE (incr, NOTE);
3436 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3437 NOTE_SOURCE_FILE (incr) = 0;
3440 if (regno >= FIRST_PSEUDO_REGISTER)
3442 /* Count an extra reference to the reg. When a reg is
3443 incremented, spilling it is worse, so we want to make
3444 that less likely. */
3445 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3447 /* Count the increment as a setting of the register,
3448 even though it isn't a SET in rtl. */
3449 REG_N_SETS (regno)++;
3453 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
3454 reference. */
3456 static void
3457 find_auto_inc (pbi, x, insn)
3458 struct propagate_block_info *pbi;
3459 rtx x;
3460 rtx insn;
3462 rtx addr = XEXP (x, 0);
3463 HOST_WIDE_INT offset = 0;
3464 rtx set, y, incr, inc_val;
3465 int regno;
3466 int size = GET_MODE_SIZE (GET_MODE (x));
3468 if (GET_CODE (insn) == JUMP_INSN)
3469 return;
3471 /* Here we detect use of an index register which might be good for
3472 postincrement, postdecrement, preincrement, or predecrement. */
3474 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3475 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3477 if (GET_CODE (addr) != REG)
3478 return;
3480 regno = REGNO (addr);
3482 /* Is the next use an increment that might make auto-increment? */
3483 incr = pbi->reg_next_use[regno];
3484 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
3485 return;
3486 set = single_set (incr);
3487 if (set == 0 || GET_CODE (set) != SET)
3488 return;
3489 y = SET_SRC (set);
3491 if (GET_CODE (y) != PLUS)
3492 return;
3494 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
3495 inc_val = XEXP (y, 1);
3496 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
3497 inc_val = XEXP (y, 0);
3498 else
3499 return;
3501 if (GET_CODE (inc_val) == CONST_INT)
3503 if (HAVE_POST_INCREMENT
3504 && (INTVAL (inc_val) == size && offset == 0))
3505 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
3506 incr, addr);
3507 else if (HAVE_POST_DECREMENT
3508 && (INTVAL (inc_val) == -size && offset == 0))
3509 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
3510 incr, addr);
3511 else if (HAVE_PRE_INCREMENT
3512 && (INTVAL (inc_val) == size && offset == size))
3513 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
3514 incr, addr);
3515 else if (HAVE_PRE_DECREMENT
3516 && (INTVAL (inc_val) == -size && offset == -size))
3517 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
3518 incr, addr);
3519 else if (HAVE_POST_MODIFY_DISP && offset == 0)
3520 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3521 gen_rtx_PLUS (Pmode,
3522 addr,
3523 inc_val)),
3524 insn, x, incr, addr);
3526 else if (GET_CODE (inc_val) == REG
3527 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
3528 NEXT_INSN (incr)))
3531 if (HAVE_POST_MODIFY_REG && offset == 0)
3532 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3533 gen_rtx_PLUS (Pmode,
3534 addr,
3535 inc_val)),
3536 insn, x, incr, addr);
3540 #endif /* AUTO_INC_DEC */
3542 static void
3543 mark_used_reg (pbi, reg, cond, insn)
3544 struct propagate_block_info *pbi;
3545 rtx reg;
3546 rtx cond ATTRIBUTE_UNUSED;
3547 rtx insn;
3549 unsigned int regno_first, regno_last, i;
3550 int some_was_live, some_was_dead, some_not_set;
3552 regno_last = regno_first = REGNO (reg);
3553 if (regno_first < FIRST_PSEUDO_REGISTER)
3554 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
3556 /* Find out if any of this register is live after this instruction. */
3557 some_was_live = some_was_dead = 0;
3558 for (i = regno_first; i <= regno_last; ++i)
3560 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
3561 some_was_live |= needed_regno;
3562 some_was_dead |= ! needed_regno;
3565 /* Find out if any of the register was set this insn. */
3566 some_not_set = 0;
3567 for (i = regno_first; i <= regno_last; ++i)
3568 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
3570 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3572 /* Record where each reg is used, so when the reg is set we know
3573 the next insn that uses it. */
3574 pbi->reg_next_use[regno_first] = insn;
3577 if (pbi->flags & PROP_REG_INFO)
3579 if (regno_first < FIRST_PSEUDO_REGISTER)
3581 /* If this is a register we are going to try to eliminate,
3582 don't mark it live here. If we are successful in
3583 eliminating it, it need not be live unless it is used for
3584 pseudos, in which case it will have been set live when it
3585 was allocated to the pseudos. If the register will not
3586 be eliminated, reload will set it live at that point.
3588 Otherwise, record that this function uses this register. */
3589 /* ??? The PPC backend tries to "eliminate" on the pic
3590 register to itself. This should be fixed. In the mean
3591 time, hack around it. */
3593 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
3594 && (regno_first == FRAME_POINTER_REGNUM
3595 || regno_first == ARG_POINTER_REGNUM)))
3596 for (i = regno_first; i <= regno_last; ++i)
3597 regs_ever_live[i] = 1;
3599 else
3601 /* Keep track of which basic block each reg appears in. */
3603 int blocknum = pbi->bb->index;
3604 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
3605 REG_BASIC_BLOCK (regno_first) = blocknum;
3606 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
3607 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
3609 /* Count (weighted) number of uses of each reg. */
3610 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
3611 REG_N_REFS (regno_first)++;
3615 /* Record and count the insns in which a reg dies. If it is used in
3616 this insn and was dead below the insn then it dies in this insn.
3617 If it was set in this insn, we do not make a REG_DEAD note;
3618 likewise if we already made such a note. */
3619 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
3620 && some_was_dead
3621 && some_not_set)
3623 /* Check for the case where the register dying partially
3624 overlaps the register set by this insn. */
3625 if (regno_first != regno_last)
3626 for (i = regno_first; i <= regno_last; ++i)
3627 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
3629 /* If none of the words in X is needed, make a REG_DEAD note.
3630 Otherwise, we must make partial REG_DEAD notes. */
3631 if (! some_was_live)
3633 if ((pbi->flags & PROP_DEATH_NOTES)
3634 && ! find_regno_note (insn, REG_DEAD, regno_first))
3635 REG_NOTES (insn)
3636 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3638 if (pbi->flags & PROP_REG_INFO)
3639 REG_N_DEATHS (regno_first)++;
3641 else
3643 /* Don't make a REG_DEAD note for a part of a register
3644 that is set in the insn. */
3645 for (i = regno_first; i <= regno_last; ++i)
3646 if (! REGNO_REG_SET_P (pbi->reg_live, i)
3647 && ! dead_or_set_regno_p (insn, i))
3648 REG_NOTES (insn)
3649 = alloc_EXPR_LIST (REG_DEAD,
3650 regno_reg_rtx[i],
3651 REG_NOTES (insn));
3655 /* Mark the register as being live. */
3656 for (i = regno_first; i <= regno_last; ++i)
3658 #ifdef HAVE_conditional_execution
3659 int this_was_live = REGNO_REG_SET_P (pbi->reg_live, i);
3660 #endif
3662 SET_REGNO_REG_SET (pbi->reg_live, i);
3664 #ifdef HAVE_conditional_execution
3665 /* If this is a conditional use, record that fact. If it is later
3666 conditionally set, we'll know to kill the register. */
3667 if (cond != NULL_RTX)
3669 splay_tree_node node;
3670 struct reg_cond_life_info *rcli;
3671 rtx ncond;
3673 if (this_was_live)
3675 node = splay_tree_lookup (pbi->reg_cond_dead, i);
3676 if (node == NULL)
3678 /* The register was unconditionally live previously.
3679 No need to do anything. */
3681 else
3683 /* The register was conditionally live previously.
3684 Subtract the new life cond from the old death cond. */
3685 rcli = (struct reg_cond_life_info *) node->value;
3686 ncond = rcli->condition;
3687 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
3689 /* If the register is now unconditionally live,
3690 remove the entry in the splay_tree. */
3691 if (ncond == const0_rtx)
3692 splay_tree_remove (pbi->reg_cond_dead, i);
3693 else
3695 rcli->condition = ncond;
3696 SET_REGNO_REG_SET (pbi->reg_cond_reg,
3697 REGNO (XEXP (cond, 0)));
3701 else
3703 /* The register was not previously live at all. Record
3704 the condition under which it is still dead. */
3705 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3706 rcli->condition = not_reg_cond (cond);
3707 rcli->stores = const0_rtx;
3708 rcli->orig_condition = const0_rtx;
3709 splay_tree_insert (pbi->reg_cond_dead, i,
3710 (splay_tree_value) rcli);
3712 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3715 else if (this_was_live)
3717 /* The register may have been conditionally live previously, but
3718 is now unconditionally live. Remove it from the conditionally
3719 dead list, so that a conditional set won't cause us to think
3720 it dead. */
3721 splay_tree_remove (pbi->reg_cond_dead, i);
3723 #endif
3727 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
3728 This is done assuming the registers needed from X are those that
3729 have 1-bits in PBI->REG_LIVE.
3731 INSN is the containing instruction. If INSN is dead, this function
3732 is not called. */
3734 static void
3735 mark_used_regs (pbi, x, cond, insn)
3736 struct propagate_block_info *pbi;
3737 rtx x, cond, insn;
3739 RTX_CODE code;
3740 int regno;
3741 int flags = pbi->flags;
3743 retry:
3744 if (!x)
3745 return;
3746 code = GET_CODE (x);
3747 switch (code)
3749 case LABEL_REF:
3750 case SYMBOL_REF:
3751 case CONST_INT:
3752 case CONST:
3753 case CONST_DOUBLE:
3754 case CONST_VECTOR:
3755 case PC:
3756 case ADDR_VEC:
3757 case ADDR_DIFF_VEC:
3758 return;
3760 #ifdef HAVE_cc0
3761 case CC0:
3762 pbi->cc0_live = 1;
3763 return;
3764 #endif
3766 case CLOBBER:
3767 /* If we are clobbering a MEM, mark any registers inside the address
3768 as being used. */
3769 if (GET_CODE (XEXP (x, 0)) == MEM)
3770 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
3771 return;
3773 case MEM:
3774 /* Don't bother watching stores to mems if this is not the
3775 final pass. We'll not be deleting dead stores this round. */
3776 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
3778 /* Invalidate the data for the last MEM stored, but only if MEM is
3779 something that can be stored into. */
3780 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3781 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3782 /* Needn't clear the memory set list. */
3784 else
3786 rtx temp = pbi->mem_set_list;
3787 rtx prev = NULL_RTX;
3788 rtx next;
3790 while (temp)
3792 next = XEXP (temp, 1);
3793 if (anti_dependence (XEXP (temp, 0), x))
3795 /* Splice temp out of the list. */
3796 if (prev)
3797 XEXP (prev, 1) = next;
3798 else
3799 pbi->mem_set_list = next;
3800 free_EXPR_LIST_node (temp);
3801 pbi->mem_set_list_len--;
3803 else
3804 prev = temp;
3805 temp = next;
3809 /* If the memory reference had embedded side effects (autoincrement
3810 address modes. Then we may need to kill some entries on the
3811 memory set list. */
3812 if (insn)
3813 for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
3816 #ifdef AUTO_INC_DEC
3817 if (flags & PROP_AUTOINC)
3818 find_auto_inc (pbi, x, insn);
3819 #endif
3820 break;
3822 case SUBREG:
3823 #ifdef CANNOT_CHANGE_MODE_CLASS
3824 if (GET_CODE (SUBREG_REG (x)) == REG
3825 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
3826 SET_REGNO_REG_SET (&subregs_of_mode[GET_MODE (x)],
3827 REGNO (SUBREG_REG (x)));
3828 #endif
3830 /* While we're here, optimize this case. */
3831 x = SUBREG_REG (x);
3832 if (GET_CODE (x) != REG)
3833 goto retry;
3834 /* Fall through. */
3836 case REG:
3837 /* See a register other than being set => mark it as needed. */
3838 mark_used_reg (pbi, x, cond, insn);
3839 return;
3841 case SET:
3843 rtx testreg = SET_DEST (x);
3844 int mark_dest = 0;
3846 /* If storing into MEM, don't show it as being used. But do
3847 show the address as being used. */
3848 if (GET_CODE (testreg) == MEM)
3850 #ifdef AUTO_INC_DEC
3851 if (flags & PROP_AUTOINC)
3852 find_auto_inc (pbi, testreg, insn);
3853 #endif
3854 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
3855 mark_used_regs (pbi, SET_SRC (x), cond, insn);
3856 return;
3859 /* Storing in STRICT_LOW_PART is like storing in a reg
3860 in that this SET might be dead, so ignore it in TESTREG.
3861 but in some other ways it is like using the reg.
3863 Storing in a SUBREG or a bit field is like storing the entire
3864 register in that if the register's value is not used
3865 then this SET is not needed. */
3866 while (GET_CODE (testreg) == STRICT_LOW_PART
3867 || GET_CODE (testreg) == ZERO_EXTRACT
3868 || GET_CODE (testreg) == SIGN_EXTRACT
3869 || GET_CODE (testreg) == SUBREG)
3871 #ifdef CANNOT_CHANGE_MODE_CLASS
3872 if (GET_CODE (testreg) == SUBREG
3873 && GET_CODE (SUBREG_REG (testreg)) == REG
3874 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER)
3875 SET_REGNO_REG_SET (&subregs_of_mode[GET_MODE (testreg)],
3876 REGNO (SUBREG_REG (testreg)));
3877 #endif
3879 /* Modifying a single register in an alternate mode
3880 does not use any of the old value. But these other
3881 ways of storing in a register do use the old value. */
3882 if (GET_CODE (testreg) == SUBREG
3883 && !((REG_BYTES (SUBREG_REG (testreg))
3884 + UNITS_PER_WORD - 1) / UNITS_PER_WORD
3885 > (REG_BYTES (testreg)
3886 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
3888 else
3889 mark_dest = 1;
3891 testreg = XEXP (testreg, 0);
3894 /* If this is a store into a register or group of registers,
3895 recursively scan the value being stored. */
3897 if ((GET_CODE (testreg) == PARALLEL
3898 && GET_MODE (testreg) == BLKmode)
3899 || (GET_CODE (testreg) == REG
3900 && (regno = REGNO (testreg),
3901 ! (regno == FRAME_POINTER_REGNUM
3902 && (! reload_completed || frame_pointer_needed)))
3903 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3904 && ! (regno == HARD_FRAME_POINTER_REGNUM
3905 && (! reload_completed || frame_pointer_needed))
3906 #endif
3907 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3908 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3909 #endif
3912 if (mark_dest)
3913 mark_used_regs (pbi, SET_DEST (x), cond, insn);
3914 mark_used_regs (pbi, SET_SRC (x), cond, insn);
3915 return;
3918 break;
3920 case ASM_OPERANDS:
3921 case UNSPEC_VOLATILE:
3922 case TRAP_IF:
3923 case ASM_INPUT:
3925 /* Traditional and volatile asm instructions must be considered to use
3926 and clobber all hard registers, all pseudo-registers and all of
3927 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
3929 Consider for instance a volatile asm that changes the fpu rounding
3930 mode. An insn should not be moved across this even if it only uses
3931 pseudo-regs because it might give an incorrectly rounded result.
3933 ?!? Unfortunately, marking all hard registers as live causes massive
3934 problems for the register allocator and marking all pseudos as live
3935 creates mountains of uninitialized variable warnings.
3937 So for now, just clear the memory set list and mark any regs
3938 we can find in ASM_OPERANDS as used. */
3939 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
3941 free_EXPR_LIST_list (&pbi->mem_set_list);
3942 pbi->mem_set_list_len = 0;
3945 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
3946 We can not just fall through here since then we would be confused
3947 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
3948 traditional asms unlike their normal usage. */
3949 if (code == ASM_OPERANDS)
3951 int j;
3953 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3954 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
3956 break;
3959 case COND_EXEC:
3960 if (cond != NULL_RTX)
3961 abort ();
3963 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
3965 cond = COND_EXEC_TEST (x);
3966 x = COND_EXEC_CODE (x);
3967 goto retry;
3969 case PHI:
3970 /* We _do_not_ want to scan operands of phi nodes. Operands of
3971 a phi function are evaluated only when control reaches this
3972 block along a particular edge. Therefore, regs that appear
3973 as arguments to phi should not be added to the global live at
3974 start. */
3975 return;
3977 default:
3978 break;
3981 /* Recursively scan the operands of this expression. */
3984 const char * const fmt = GET_RTX_FORMAT (code);
3985 int i;
3987 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3989 if (fmt[i] == 'e')
3991 /* Tail recursive case: save a function call level. */
3992 if (i == 0)
3994 x = XEXP (x, 0);
3995 goto retry;
3997 mark_used_regs (pbi, XEXP (x, i), cond, insn);
3999 else if (fmt[i] == 'E')
4001 int j;
4002 for (j = 0; j < XVECLEN (x, i); j++)
4003 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4009 #ifdef AUTO_INC_DEC
4011 static int
4012 try_pre_increment_1 (pbi, insn)
4013 struct propagate_block_info *pbi;
4014 rtx insn;
4016 /* Find the next use of this reg. If in same basic block,
4017 make it do pre-increment or pre-decrement if appropriate. */
4018 rtx x = single_set (insn);
4019 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4020 * INTVAL (XEXP (SET_SRC (x), 1)));
4021 int regno = REGNO (SET_DEST (x));
4022 rtx y = pbi->reg_next_use[regno];
4023 if (y != 0
4024 && SET_DEST (x) != stack_pointer_rtx
4025 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4026 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4027 mode would be better. */
4028 && ! dead_or_set_p (y, SET_DEST (x))
4029 && try_pre_increment (y, SET_DEST (x), amount))
4031 /* We have found a suitable auto-increment and already changed
4032 insn Y to do it. So flush this increment instruction. */
4033 propagate_block_delete_insn (insn);
4035 /* Count a reference to this reg for the increment insn we are
4036 deleting. When a reg is incremented, spilling it is worse,
4037 so we want to make that less likely. */
4038 if (regno >= FIRST_PSEUDO_REGISTER)
4040 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
4041 REG_N_SETS (regno)++;
4044 /* Flush any remembered memories depending on the value of
4045 the incremented register. */
4046 invalidate_mems_from_set (pbi, SET_DEST (x));
4048 return 1;
4050 return 0;
4053 /* Try to change INSN so that it does pre-increment or pre-decrement
4054 addressing on register REG in order to add AMOUNT to REG.
4055 AMOUNT is negative for pre-decrement.
4056 Returns 1 if the change could be made.
4057 This checks all about the validity of the result of modifying INSN. */
4059 static int
4060 try_pre_increment (insn, reg, amount)
4061 rtx insn, reg;
4062 HOST_WIDE_INT amount;
4064 rtx use;
4066 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4067 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4068 int pre_ok = 0;
4069 /* Nonzero if we can try to make a post-increment or post-decrement.
4070 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4071 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4072 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4073 int post_ok = 0;
4075 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4076 int do_post = 0;
4078 /* From the sign of increment, see which possibilities are conceivable
4079 on this target machine. */
4080 if (HAVE_PRE_INCREMENT && amount > 0)
4081 pre_ok = 1;
4082 if (HAVE_POST_INCREMENT && amount > 0)
4083 post_ok = 1;
4085 if (HAVE_PRE_DECREMENT && amount < 0)
4086 pre_ok = 1;
4087 if (HAVE_POST_DECREMENT && amount < 0)
4088 post_ok = 1;
4090 if (! (pre_ok || post_ok))
4091 return 0;
4093 /* It is not safe to add a side effect to a jump insn
4094 because if the incremented register is spilled and must be reloaded
4095 there would be no way to store the incremented value back in memory. */
4097 if (GET_CODE (insn) == JUMP_INSN)
4098 return 0;
4100 use = 0;
4101 if (pre_ok)
4102 use = find_use_as_address (PATTERN (insn), reg, 0);
4103 if (post_ok && (use == 0 || use == (rtx) (size_t) 1))
4105 use = find_use_as_address (PATTERN (insn), reg, -amount);
4106 do_post = 1;
4109 if (use == 0 || use == (rtx) (size_t) 1)
4110 return 0;
4112 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4113 return 0;
4115 /* See if this combination of instruction and addressing mode exists. */
4116 if (! validate_change (insn, &XEXP (use, 0),
4117 gen_rtx_fmt_e (amount > 0
4118 ? (do_post ? POST_INC : PRE_INC)
4119 : (do_post ? POST_DEC : PRE_DEC),
4120 Pmode, reg), 0))
4121 return 0;
4123 /* Record that this insn now has an implicit side effect on X. */
4124 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4125 return 1;
4128 #endif /* AUTO_INC_DEC */
4130 /* Find the place in the rtx X where REG is used as a memory address.
4131 Return the MEM rtx that so uses it.
4132 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4133 (plus REG (const_int PLUSCONST)).
4135 If such an address does not appear, return 0.
4136 If REG appears more than once, or is used other than in such an address,
4137 return (rtx) 1. */
4140 find_use_as_address (x, reg, plusconst)
4141 rtx x;
4142 rtx reg;
4143 HOST_WIDE_INT plusconst;
4145 enum rtx_code code = GET_CODE (x);
4146 const char * const fmt = GET_RTX_FORMAT (code);
4147 int i;
4148 rtx value = 0;
4149 rtx tem;
4151 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4152 return x;
4154 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4155 && XEXP (XEXP (x, 0), 0) == reg
4156 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4157 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4158 return x;
4160 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4162 /* If REG occurs inside a MEM used in a bit-field reference,
4163 that is unacceptable. */
4164 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4165 return (rtx) (size_t) 1;
4168 if (x == reg)
4169 return (rtx) (size_t) 1;
4171 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4173 if (fmt[i] == 'e')
4175 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4176 if (value == 0)
4177 value = tem;
4178 else if (tem != 0)
4179 return (rtx) (size_t) 1;
4181 else if (fmt[i] == 'E')
4183 int j;
4184 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4186 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4187 if (value == 0)
4188 value = tem;
4189 else if (tem != 0)
4190 return (rtx) (size_t) 1;
4195 return value;
4198 /* Write information about registers and basic blocks into FILE.
4199 This is part of making a debugging dump. */
4201 void
4202 dump_regset (r, outf)
4203 regset r;
4204 FILE *outf;
4206 int i;
4207 if (r == NULL)
4209 fputs (" (nil)", outf);
4210 return;
4213 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4215 fprintf (outf, " %d", i);
4216 if (i < FIRST_PSEUDO_REGISTER)
4217 fprintf (outf, " [%s]",
4218 reg_names[i]);
4222 /* Print a human-reaable representation of R on the standard error
4223 stream. This function is designed to be used from within the
4224 debugger. */
4226 void
4227 debug_regset (r)
4228 regset r;
4230 dump_regset (r, stderr);
4231 putc ('\n', stderr);
4234 /* Recompute register set/reference counts immediately prior to register
4235 allocation.
4237 This avoids problems with set/reference counts changing to/from values
4238 which have special meanings to the register allocators.
4240 Additionally, the reference counts are the primary component used by the
4241 register allocators to prioritize pseudos for allocation to hard regs.
4242 More accurate reference counts generally lead to better register allocation.
4244 F is the first insn to be scanned.
4246 LOOP_STEP denotes how much loop_depth should be incremented per
4247 loop nesting level in order to increase the ref count more for
4248 references in a loop.
4250 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4251 possibly other information which is used by the register allocators. */
4253 void
4254 recompute_reg_usage (f, loop_step)
4255 rtx f ATTRIBUTE_UNUSED;
4256 int loop_step ATTRIBUTE_UNUSED;
4258 allocate_reg_life_data ();
4259 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
4262 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
4263 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
4264 of the number of registers that died. */
4267 count_or_remove_death_notes (blocks, kill)
4268 sbitmap blocks;
4269 int kill;
4271 int count = 0;
4272 basic_block bb;
4274 FOR_EACH_BB_REVERSE (bb)
4276 rtx insn;
4278 if (blocks && ! TEST_BIT (blocks, bb->index))
4279 continue;
4281 for (insn = bb->head;; insn = NEXT_INSN (insn))
4283 if (INSN_P (insn))
4285 rtx *pprev = &REG_NOTES (insn);
4286 rtx link = *pprev;
4288 while (link)
4290 switch (REG_NOTE_KIND (link))
4292 case REG_DEAD:
4293 if (GET_CODE (XEXP (link, 0)) == REG)
4295 rtx reg = XEXP (link, 0);
4296 int n;
4298 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
4299 n = 1;
4300 else
4301 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
4302 count += n;
4304 /* Fall through. */
4306 case REG_UNUSED:
4307 if (kill)
4309 rtx next = XEXP (link, 1);
4310 free_EXPR_LIST_node (link);
4311 *pprev = link = next;
4312 break;
4314 /* Fall through. */
4316 default:
4317 pprev = &XEXP (link, 1);
4318 link = *pprev;
4319 break;
4324 if (insn == bb->end)
4325 break;
4329 return count;
4331 /* Clear LOG_LINKS fields of insns in a selected blocks or whole chain
4332 if blocks is NULL. */
4334 static void
4335 clear_log_links (blocks)
4336 sbitmap blocks;
4338 rtx insn;
4339 int i;
4341 if (!blocks)
4343 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4344 if (INSN_P (insn))
4345 free_INSN_LIST_list (&LOG_LINKS (insn));
4347 else
4348 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4350 basic_block bb = BASIC_BLOCK (i);
4352 for (insn = bb->head; insn != NEXT_INSN (bb->end);
4353 insn = NEXT_INSN (insn))
4354 if (INSN_P (insn))
4355 free_INSN_LIST_list (&LOG_LINKS (insn));
4359 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
4360 correspond to the hard registers, if any, set in that map. This
4361 could be done far more efficiently by having all sorts of special-cases
4362 with moving single words, but probably isn't worth the trouble. */
4364 void
4365 reg_set_to_hard_reg_set (to, from)
4366 HARD_REG_SET *to;
4367 bitmap from;
4369 int i;
4371 EXECUTE_IF_SET_IN_BITMAP
4372 (from, 0, i,
4374 if (i >= FIRST_PSEUDO_REGISTER)
4375 return;
4376 SET_HARD_REG_BIT (*to, i);