PR rtl-optimization/20070 / part1
[official-gcc.git] / gcc / flow.c
blobf958bfdb66e7880a088c9d9b9b28251007c40970
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED, REG_N_THROWING_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "coretypes.h"
124 #include "tm.h"
125 #include "tree.h"
126 #include "rtl.h"
127 #include "tm_p.h"
128 #include "hard-reg-set.h"
129 #include "basic-block.h"
130 #include "insn-config.h"
131 #include "regs.h"
132 #include "flags.h"
133 #include "output.h"
134 #include "function.h"
135 #include "except.h"
136 #include "toplev.h"
137 #include "recog.h"
138 #include "expr.h"
139 #include "timevar.h"
141 #include "obstack.h"
142 #include "splay-tree.h"
143 #include "tree-pass.h"
144 #include "params.h"
146 #ifndef HAVE_epilogue
147 #define HAVE_epilogue 0
148 #endif
149 #ifndef HAVE_prologue
150 #define HAVE_prologue 0
151 #endif
152 #ifndef HAVE_sibcall_epilogue
153 #define HAVE_sibcall_epilogue 0
154 #endif
156 #ifndef EPILOGUE_USES
157 #define EPILOGUE_USES(REGNO) 0
158 #endif
159 #ifndef EH_USES
160 #define EH_USES(REGNO) 0
161 #endif
163 #ifdef HAVE_conditional_execution
164 #ifndef REVERSE_CONDEXEC_PREDICATES_P
165 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) \
166 (GET_CODE ((x)) == reversed_comparison_code ((y), NULL))
167 #endif
168 #endif
170 /* This is the maximum number of times we process any given block if the
171 latest loop depth count is smaller than this number. Only used for the
172 failure strategy to avoid infinite loops in calculate_global_regs_live. */
173 #define MAX_LIVENESS_ROUNDS 20
175 /* Nonzero if the second flow pass has completed. */
176 int flow2_completed;
178 /* Maximum register number used in this function, plus one. */
180 int max_regno;
182 /* Indexed by n, giving various register information */
184 varray_type reg_n_info;
186 /* Regset of regs live when calls to `setjmp'-like functions happen. */
187 /* ??? Does this exist only for the setjmp-clobbered warning message? */
189 static regset regs_live_at_setjmp;
191 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
192 that have to go in the same hard reg.
193 The first two regs in the list are a pair, and the next two
194 are another pair, etc. */
195 rtx regs_may_share;
197 /* Set of registers that may be eliminable. These are handled specially
198 in updating regs_ever_live. */
200 static HARD_REG_SET elim_reg_set;
202 /* Holds information for tracking conditional register life information. */
203 struct reg_cond_life_info
205 /* A boolean expression of conditions under which a register is dead. */
206 rtx condition;
207 /* Conditions under which a register is dead at the basic block end. */
208 rtx orig_condition;
210 /* A boolean expression of conditions under which a register has been
211 stored into. */
212 rtx stores;
214 /* ??? Could store mask of bytes that are dead, so that we could finally
215 track lifetimes of multi-word registers accessed via subregs. */
218 /* For use in communicating between propagate_block and its subroutines.
219 Holds all information needed to compute life and def-use information. */
221 struct propagate_block_info
223 /* The basic block we're considering. */
224 basic_block bb;
226 /* Bit N is set if register N is conditionally or unconditionally live. */
227 regset reg_live;
229 /* Bit N is set if register N is set this insn. */
230 regset new_set;
232 /* Element N is the next insn that uses (hard or pseudo) register N
233 within the current basic block; or zero, if there is no such insn. */
234 rtx *reg_next_use;
236 /* Contains a list of all the MEMs we are tracking for dead store
237 elimination. */
238 rtx mem_set_list;
240 /* If non-null, record the set of registers set unconditionally in the
241 basic block. */
242 regset local_set;
244 /* If non-null, record the set of registers set conditionally in the
245 basic block. */
246 regset cond_local_set;
248 #ifdef HAVE_conditional_execution
249 /* Indexed by register number, holds a reg_cond_life_info for each
250 register that is not unconditionally live or dead. */
251 splay_tree reg_cond_dead;
253 /* Bit N is set if register N is in an expression in reg_cond_dead. */
254 regset reg_cond_reg;
255 #endif
257 /* The length of mem_set_list. */
258 int mem_set_list_len;
260 /* Nonzero if the value of CC0 is live. */
261 int cc0_live;
263 /* Flags controlling the set of information propagate_block collects. */
264 int flags;
265 /* Index of instruction being processed. */
266 int insn_num;
269 /* Number of dead insns removed. */
270 static int ndead;
272 /* When PROP_REG_INFO set, array contains pbi->insn_num of instruction
273 where given register died. When the register is marked alive, we use the
274 information to compute amount of instructions life range cross.
275 (remember, we are walking backward). This can be computed as current
276 pbi->insn_num - reg_deaths[regno].
277 At the end of processing each basic block, the remaining live registers
278 are inspected and live ranges are increased same way so liverange of global
279 registers are computed correctly.
281 The array is maintained clear for dead registers, so it can be safely reused
282 for next basic block without expensive memset of the whole array after
283 reseting pbi->insn_num to 0. */
285 static int *reg_deaths;
287 /* Forward declarations */
288 static int verify_wide_reg_1 (rtx *, void *);
289 static void verify_wide_reg (int, basic_block);
290 static void verify_local_live_at_start (regset, basic_block);
291 static void notice_stack_pointer_modification_1 (rtx, rtx, void *);
292 static void notice_stack_pointer_modification (void);
293 static void mark_reg (rtx, void *);
294 static void mark_regs_live_at_end (regset);
295 static void calculate_global_regs_live (sbitmap, sbitmap, int);
296 static void propagate_block_delete_insn (rtx);
297 static rtx propagate_block_delete_libcall (rtx, rtx);
298 static int insn_dead_p (struct propagate_block_info *, rtx, int, rtx);
299 static int libcall_dead_p (struct propagate_block_info *, rtx, rtx);
300 static void mark_set_regs (struct propagate_block_info *, rtx, rtx);
301 static void mark_set_1 (struct propagate_block_info *, enum rtx_code, rtx,
302 rtx, rtx, int);
303 static int find_regno_partial (rtx *, void *);
305 #ifdef HAVE_conditional_execution
306 static int mark_regno_cond_dead (struct propagate_block_info *, int, rtx);
307 static void free_reg_cond_life_info (splay_tree_value);
308 static int flush_reg_cond_reg_1 (splay_tree_node, void *);
309 static void flush_reg_cond_reg (struct propagate_block_info *, int);
310 static rtx elim_reg_cond (rtx, unsigned int);
311 static rtx ior_reg_cond (rtx, rtx, int);
312 static rtx not_reg_cond (rtx);
313 static rtx and_reg_cond (rtx, rtx, int);
314 #endif
315 #ifdef AUTO_INC_DEC
316 static void attempt_auto_inc (struct propagate_block_info *, rtx, rtx, rtx,
317 rtx, rtx);
318 static void find_auto_inc (struct propagate_block_info *, rtx, rtx);
319 static int try_pre_increment_1 (struct propagate_block_info *, rtx);
320 static int try_pre_increment (rtx, rtx, HOST_WIDE_INT);
321 #endif
322 static void mark_used_reg (struct propagate_block_info *, rtx, rtx, rtx);
323 static void mark_used_regs (struct propagate_block_info *, rtx, rtx, rtx);
324 void debug_flow_info (void);
325 static void add_to_mem_set_list (struct propagate_block_info *, rtx);
326 static int invalidate_mems_from_autoinc (rtx *, void *);
327 static void invalidate_mems_from_set (struct propagate_block_info *, rtx);
328 static void clear_log_links (sbitmap);
329 static int count_or_remove_death_notes_bb (basic_block, int);
330 static void allocate_bb_life_data (void);
332 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
333 note associated with the BLOCK. */
336 first_insn_after_basic_block_note (basic_block block)
338 rtx insn;
340 /* Get the first instruction in the block. */
341 insn = BB_HEAD (block);
343 if (insn == NULL_RTX)
344 return NULL_RTX;
345 if (LABEL_P (insn))
346 insn = NEXT_INSN (insn);
347 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
349 return NEXT_INSN (insn);
352 /* Perform data flow analysis for the whole control flow graph.
353 FLAGS is a set of PROP_* flags to be used in accumulating flow info. */
355 void
356 life_analysis (FILE *file, int flags)
358 #ifdef ELIMINABLE_REGS
359 int i;
360 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
361 #endif
363 /* Record which registers will be eliminated. We use this in
364 mark_used_regs. */
366 CLEAR_HARD_REG_SET (elim_reg_set);
368 #ifdef ELIMINABLE_REGS
369 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
370 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
371 #else
372 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
373 #endif
376 #ifdef CANNOT_CHANGE_MODE_CLASS
377 if (flags & PROP_REG_INFO)
378 init_subregs_of_mode ();
379 #endif
381 if (! optimize)
382 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
384 /* The post-reload life analysis have (on a global basis) the same
385 registers live as was computed by reload itself. elimination
386 Otherwise offsets and such may be incorrect.
388 Reload will make some registers as live even though they do not
389 appear in the rtl.
391 We don't want to create new auto-incs after reload, since they
392 are unlikely to be useful and can cause problems with shared
393 stack slots. */
394 if (reload_completed)
395 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
397 /* We want alias analysis information for local dead store elimination. */
398 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
399 init_alias_analysis ();
401 /* Always remove no-op moves. Do this before other processing so
402 that we don't have to keep re-scanning them. */
403 delete_noop_moves ();
405 /* Some targets can emit simpler epilogues if they know that sp was
406 not ever modified during the function. After reload, of course,
407 we've already emitted the epilogue so there's no sense searching. */
408 if (! reload_completed)
409 notice_stack_pointer_modification ();
411 /* Allocate and zero out data structures that will record the
412 data from lifetime analysis. */
413 allocate_reg_life_data ();
414 allocate_bb_life_data ();
416 /* Find the set of registers live on function exit. */
417 mark_regs_live_at_end (EXIT_BLOCK_PTR->il.rtl->global_live_at_start);
419 /* "Update" life info from zero. It'd be nice to begin the
420 relaxation with just the exit and noreturn blocks, but that set
421 is not immediately handy. */
423 if (flags & PROP_REG_INFO)
425 memset (regs_ever_live, 0, sizeof (regs_ever_live));
426 memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
428 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
429 if (reg_deaths)
431 free (reg_deaths);
432 reg_deaths = NULL;
435 /* Clean up. */
436 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
437 end_alias_analysis ();
439 if (file)
440 dump_flow_info (file);
442 /* Removing dead insns should have made jumptables really dead. */
443 delete_dead_jumptables ();
446 /* A subroutine of verify_wide_reg, called through for_each_rtx.
447 Search for REGNO. If found, return 2 if it is not wider than
448 word_mode. */
450 static int
451 verify_wide_reg_1 (rtx *px, void *pregno)
453 rtx x = *px;
454 unsigned int regno = *(int *) pregno;
456 if (REG_P (x) && REGNO (x) == regno)
458 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
459 return 2;
460 return 1;
462 return 0;
465 /* A subroutine of verify_local_live_at_start. Search through insns
466 of BB looking for register REGNO. */
468 static void
469 verify_wide_reg (int regno, basic_block bb)
471 rtx head = BB_HEAD (bb), end = BB_END (bb);
473 while (1)
475 if (INSN_P (head))
477 int r = for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno);
478 if (r == 1)
479 return;
480 if (r == 2)
481 break;
483 if (head == end)
484 break;
485 head = NEXT_INSN (head);
487 if (dump_file)
489 fprintf (dump_file, "Register %d died unexpectedly.\n", regno);
490 dump_bb (bb, dump_file, 0);
492 fatal_error ("internal consistency failure");
495 /* A subroutine of update_life_info. Verify that there are no untoward
496 changes in live_at_start during a local update. */
498 static void
499 verify_local_live_at_start (regset new_live_at_start, basic_block bb)
501 if (reload_completed)
503 /* After reload, there are no pseudos, nor subregs of multi-word
504 registers. The regsets should exactly match. */
505 if (! REG_SET_EQUAL_P (new_live_at_start,
506 bb->il.rtl->global_live_at_start))
508 if (dump_file)
510 fprintf (dump_file,
511 "live_at_start mismatch in bb %d, aborting\nNew:\n",
512 bb->index);
513 debug_bitmap_file (dump_file, new_live_at_start);
514 fputs ("Old:\n", dump_file);
515 dump_bb (bb, dump_file, 0);
517 fatal_error ("internal consistency failure");
520 else
522 unsigned i;
523 reg_set_iterator rsi;
525 /* Find the set of changed registers. */
526 XOR_REG_SET (new_live_at_start, bb->il.rtl->global_live_at_start);
528 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i, rsi)
530 /* No registers should die. */
531 if (REGNO_REG_SET_P (bb->il.rtl->global_live_at_start, i))
533 if (dump_file)
535 fprintf (dump_file,
536 "Register %d died unexpectedly.\n", i);
537 dump_bb (bb, dump_file, 0);
539 fatal_error ("internal consistency failure");
541 /* Verify that the now-live register is wider than word_mode. */
542 verify_wide_reg (i, bb);
547 /* Updates life information starting with the basic blocks set in BLOCKS.
548 If BLOCKS is null, consider it to be the universal set.
550 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholing,
551 we are only expecting local modifications to basic blocks. If we find
552 extra registers live at the beginning of a block, then we either killed
553 useful data, or we have a broken split that wants data not provided.
554 If we find registers removed from live_at_start, that means we have
555 a broken peephole that is killing a register it shouldn't.
557 ??? This is not true in one situation -- when a pre-reload splitter
558 generates subregs of a multi-word pseudo, current life analysis will
559 lose the kill. So we _can_ have a pseudo go live. How irritating.
561 It is also not true when a peephole decides that it doesn't need one
562 or more of the inputs.
564 Including PROP_REG_INFO does not properly refresh regs_ever_live
565 unless the caller resets it to zero. */
568 update_life_info (sbitmap blocks, enum update_life_extent extent,
569 int prop_flags)
571 regset tmp;
572 unsigned i = 0;
573 int stabilized_prop_flags = prop_flags;
574 basic_block bb;
576 tmp = ALLOC_REG_SET (&reg_obstack);
577 ndead = 0;
579 if ((prop_flags & PROP_REG_INFO) && !reg_deaths)
580 reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);
582 timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
583 ? TV_LIFE_UPDATE : TV_LIFE);
585 /* Changes to the CFG are only allowed when
586 doing a global update for the entire CFG. */
587 gcc_assert (!(prop_flags & PROP_ALLOW_CFG_CHANGES)
588 || (extent != UPDATE_LIFE_LOCAL && !blocks));
590 /* For a global update, we go through the relaxation process again. */
591 if (extent != UPDATE_LIFE_LOCAL)
593 for ( ; ; )
595 int changed = 0;
597 calculate_global_regs_live (blocks, blocks,
598 prop_flags & (PROP_SCAN_DEAD_CODE
599 | PROP_SCAN_DEAD_STORES
600 | PROP_ALLOW_CFG_CHANGES));
602 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
603 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
604 break;
606 /* Removing dead code may allow the CFG to be simplified which
607 in turn may allow for further dead code detection / removal. */
608 FOR_EACH_BB_REVERSE (bb)
610 COPY_REG_SET (tmp, bb->il.rtl->global_live_at_end);
611 changed |= propagate_block (bb, tmp, NULL, NULL,
612 prop_flags & (PROP_SCAN_DEAD_CODE
613 | PROP_SCAN_DEAD_STORES
614 | PROP_KILL_DEAD_CODE));
617 /* Don't pass PROP_SCAN_DEAD_CODE or PROP_KILL_DEAD_CODE to
618 subsequent propagate_block calls, since removing or acting as
619 removing dead code can affect global register liveness, which
620 is supposed to be finalized for this call after this loop. */
621 stabilized_prop_flags
622 &= ~(PROP_SCAN_DEAD_CODE | PROP_SCAN_DEAD_STORES
623 | PROP_KILL_DEAD_CODE);
625 if (! changed)
626 break;
628 /* We repeat regardless of what cleanup_cfg says. If there were
629 instructions deleted above, that might have been only a
630 partial improvement (see PARAM_MAX_FLOW_MEMORY_LOCATIONS usage).
631 Further improvement may be possible. */
632 cleanup_cfg (CLEANUP_EXPENSIVE);
634 /* Zap the life information from the last round. If we don't
635 do this, we can wind up with registers that no longer appear
636 in the code being marked live at entry. */
637 FOR_EACH_BB (bb)
639 CLEAR_REG_SET (bb->il.rtl->global_live_at_start);
640 CLEAR_REG_SET (bb->il.rtl->global_live_at_end);
644 /* If asked, remove notes from the blocks we'll update. */
645 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
646 count_or_remove_death_notes (blocks,
647 prop_flags & PROP_POST_REGSTACK ? -1 : 1);
650 /* Clear log links in case we are asked to (re)compute them. */
651 if (prop_flags & PROP_LOG_LINKS)
652 clear_log_links (blocks);
654 if (blocks)
656 sbitmap_iterator sbi;
658 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, sbi)
660 bb = BASIC_BLOCK (i);
662 COPY_REG_SET (tmp, bb->il.rtl->global_live_at_end);
663 propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
665 if (extent == UPDATE_LIFE_LOCAL)
666 verify_local_live_at_start (tmp, bb);
669 else
671 FOR_EACH_BB_REVERSE (bb)
673 COPY_REG_SET (tmp, bb->il.rtl->global_live_at_end);
675 propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
677 if (extent == UPDATE_LIFE_LOCAL)
678 verify_local_live_at_start (tmp, bb);
682 FREE_REG_SET (tmp);
684 if (prop_flags & PROP_REG_INFO)
686 reg_set_iterator rsi;
688 /* The only pseudos that are live at the beginning of the function
689 are those that were not set anywhere in the function. local-alloc
690 doesn't know how to handle these correctly, so mark them as not
691 local to any one basic block. */
692 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->il.rtl->global_live_at_end,
693 FIRST_PSEUDO_REGISTER, i, rsi)
694 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
696 /* We have a problem with any pseudoreg that lives across the setjmp.
697 ANSI says that if a user variable does not change in value between
698 the setjmp and the longjmp, then the longjmp preserves it. This
699 includes longjmp from a place where the pseudo appears dead.
700 (In principle, the value still exists if it is in scope.)
701 If the pseudo goes in a hard reg, some other value may occupy
702 that hard reg where this pseudo is dead, thus clobbering the pseudo.
703 Conclusion: such a pseudo must not go in a hard reg. */
704 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
705 FIRST_PSEUDO_REGISTER, i, rsi)
707 if (regno_reg_rtx[i] != 0)
709 REG_LIVE_LENGTH (i) = -1;
710 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
714 if (reg_deaths)
716 free (reg_deaths);
717 reg_deaths = NULL;
719 timevar_pop ((extent == UPDATE_LIFE_LOCAL || blocks)
720 ? TV_LIFE_UPDATE : TV_LIFE);
721 if (ndead && dump_file)
722 fprintf (dump_file, "deleted %i dead insns\n", ndead);
723 return ndead;
726 /* Update life information in all blocks where BB_DIRTY is set. */
729 update_life_info_in_dirty_blocks (enum update_life_extent extent, int prop_flags)
731 sbitmap update_life_blocks = sbitmap_alloc (last_basic_block);
732 int n = 0;
733 basic_block bb;
734 int retval = 0;
736 sbitmap_zero (update_life_blocks);
737 FOR_EACH_BB (bb)
739 if (bb->flags & BB_DIRTY)
741 SET_BIT (update_life_blocks, bb->index);
742 n++;
746 if (n)
747 retval = update_life_info (update_life_blocks, extent, prop_flags);
749 sbitmap_free (update_life_blocks);
750 return retval;
753 /* Free the variables allocated by find_basic_blocks. */
755 void
756 free_basic_block_vars (void)
758 if (basic_block_info)
760 clear_edges ();
761 basic_block_info = NULL;
763 n_basic_blocks = 0;
764 last_basic_block = 0;
765 n_edges = 0;
767 label_to_block_map = NULL;
769 ENTRY_BLOCK_PTR->aux = NULL;
770 ENTRY_BLOCK_PTR->il.rtl->global_live_at_end = NULL;
771 EXIT_BLOCK_PTR->aux = NULL;
772 EXIT_BLOCK_PTR->il.rtl->global_live_at_start = NULL;
775 /* Delete any insns that copy a register to itself. */
778 delete_noop_moves (void)
780 rtx insn, next;
781 basic_block bb;
782 int nnoops = 0;
784 FOR_EACH_BB (bb)
786 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
788 next = NEXT_INSN (insn);
789 if (INSN_P (insn) && noop_move_p (insn))
791 rtx note;
793 /* If we're about to remove the first insn of a libcall
794 then move the libcall note to the next real insn and
795 update the retval note. */
796 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX))
797 && XEXP (note, 0) != insn)
799 rtx new_libcall_insn = next_real_insn (insn);
800 rtx retval_note = find_reg_note (XEXP (note, 0),
801 REG_RETVAL, NULL_RTX);
802 REG_NOTES (new_libcall_insn)
803 = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
804 REG_NOTES (new_libcall_insn));
805 XEXP (retval_note, 0) = new_libcall_insn;
808 delete_insn_and_edges (insn);
809 nnoops++;
813 if (nnoops && dump_file)
814 fprintf (dump_file, "deleted %i noop moves", nnoops);
815 return nnoops;
818 /* Delete any jump tables never referenced. We can't delete them at the
819 time of removing tablejump insn as they are referenced by the preceding
820 insns computing the destination, so we delay deleting and garbagecollect
821 them once life information is computed. */
822 void
823 delete_dead_jumptables (void)
825 basic_block bb;
827 /* A dead jump table does not belong to any basic block. Scan insns
828 between two adjacent basic blocks. */
829 FOR_EACH_BB (bb)
831 rtx insn, next;
833 for (insn = NEXT_INSN (BB_END (bb));
834 insn && !NOTE_INSN_BASIC_BLOCK_P (insn);
835 insn = next)
837 next = NEXT_INSN (insn);
838 if (LABEL_P (insn)
839 && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
840 && JUMP_P (next)
841 && (GET_CODE (PATTERN (next)) == ADDR_VEC
842 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
844 rtx label = insn, jump = next;
846 if (dump_file)
847 fprintf (dump_file, "Dead jumptable %i removed\n",
848 INSN_UID (insn));
850 next = NEXT_INSN (next);
851 delete_insn (jump);
852 delete_insn (label);
858 /* Determine if the stack pointer is constant over the life of the function.
859 Only useful before prologues have been emitted. */
861 static void
862 notice_stack_pointer_modification_1 (rtx x, rtx pat ATTRIBUTE_UNUSED,
863 void *data ATTRIBUTE_UNUSED)
865 if (x == stack_pointer_rtx
866 /* The stack pointer is only modified indirectly as the result
867 of a push until later in flow. See the comments in rtl.texi
868 regarding Embedded Side-Effects on Addresses. */
869 || (MEM_P (x)
870 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == RTX_AUTOINC
871 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
872 current_function_sp_is_unchanging = 0;
875 static void
876 notice_stack_pointer_modification (void)
878 basic_block bb;
879 rtx insn;
881 /* Assume that the stack pointer is unchanging if alloca hasn't
882 been used. */
883 current_function_sp_is_unchanging = !current_function_calls_alloca;
884 if (! current_function_sp_is_unchanging)
885 return;
887 FOR_EACH_BB (bb)
888 FOR_BB_INSNS (bb, insn)
890 if (INSN_P (insn))
892 /* Check if insn modifies the stack pointer. */
893 note_stores (PATTERN (insn),
894 notice_stack_pointer_modification_1,
895 NULL);
896 if (! current_function_sp_is_unchanging)
897 return;
902 /* Mark a register in SET. Hard registers in large modes get all
903 of their component registers set as well. */
905 static void
906 mark_reg (rtx reg, void *xset)
908 regset set = (regset) xset;
909 int regno = REGNO (reg);
911 gcc_assert (GET_MODE (reg) != BLKmode);
913 SET_REGNO_REG_SET (set, regno);
914 if (regno < FIRST_PSEUDO_REGISTER)
916 int n = hard_regno_nregs[regno][GET_MODE (reg)];
917 while (--n > 0)
918 SET_REGNO_REG_SET (set, regno + n);
922 /* Mark those regs which are needed at the end of the function as live
923 at the end of the last basic block. */
925 static void
926 mark_regs_live_at_end (regset set)
928 unsigned int i;
930 /* If exiting needs the right stack value, consider the stack pointer
931 live at the end of the function. */
932 if ((HAVE_epilogue && epilogue_completed)
933 || ! EXIT_IGNORE_STACK
934 || (! FRAME_POINTER_REQUIRED
935 && ! current_function_calls_alloca
936 && flag_omit_frame_pointer)
937 || current_function_sp_is_unchanging)
939 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
942 /* Mark the frame pointer if needed at the end of the function. If
943 we end up eliminating it, it will be removed from the live list
944 of each basic block by reload. */
946 if (! reload_completed || frame_pointer_needed)
948 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
949 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
950 /* If they are different, also mark the hard frame pointer as live. */
951 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
952 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
953 #endif
956 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
957 /* Many architectures have a GP register even without flag_pic.
958 Assume the pic register is not in use, or will be handled by
959 other means, if it is not fixed. */
960 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
961 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
962 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
963 #endif
965 /* Mark all global registers, and all registers used by the epilogue
966 as being live at the end of the function since they may be
967 referenced by our caller. */
968 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
969 if (global_regs[i] || EPILOGUE_USES (i))
970 SET_REGNO_REG_SET (set, i);
972 if (HAVE_epilogue && epilogue_completed)
974 /* Mark all call-saved registers that we actually used. */
975 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
976 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
977 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
978 SET_REGNO_REG_SET (set, i);
981 #ifdef EH_RETURN_DATA_REGNO
982 /* Mark the registers that will contain data for the handler. */
983 if (reload_completed && current_function_calls_eh_return)
984 for (i = 0; ; ++i)
986 unsigned regno = EH_RETURN_DATA_REGNO(i);
987 if (regno == INVALID_REGNUM)
988 break;
989 SET_REGNO_REG_SET (set, regno);
991 #endif
992 #ifdef EH_RETURN_STACKADJ_RTX
993 if ((! HAVE_epilogue || ! epilogue_completed)
994 && current_function_calls_eh_return)
996 rtx tmp = EH_RETURN_STACKADJ_RTX;
997 if (tmp && REG_P (tmp))
998 mark_reg (tmp, set);
1000 #endif
1001 #ifdef EH_RETURN_HANDLER_RTX
1002 if ((! HAVE_epilogue || ! epilogue_completed)
1003 && current_function_calls_eh_return)
1005 rtx tmp = EH_RETURN_HANDLER_RTX;
1006 if (tmp && REG_P (tmp))
1007 mark_reg (tmp, set);
1009 #endif
1011 /* Mark function return value. */
1012 diddle_return_value (mark_reg, set);
1015 /* Propagate global life info around the graph of basic blocks. Begin
1016 considering blocks with their corresponding bit set in BLOCKS_IN.
1017 If BLOCKS_IN is null, consider it the universal set.
1019 BLOCKS_OUT is set for every block that was changed. */
1021 static void
1022 calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags)
1024 basic_block *queue, *qhead, *qtail, *qend, bb;
1025 regset tmp, new_live_at_end, invalidated_by_call;
1026 regset registers_made_dead;
1027 bool failure_strategy_required = false;
1028 int *block_accesses;
1030 /* The registers that are modified within this in block. */
1031 regset *local_sets;
1033 /* The registers that are conditionally modified within this block.
1034 In other words, regs that are set only as part of a COND_EXEC. */
1035 regset *cond_local_sets;
1037 unsigned int i;
1039 /* Some passes used to forget clear aux field of basic block causing
1040 sick behavior here. */
1041 #ifdef ENABLE_CHECKING
1042 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1043 gcc_assert (!bb->aux);
1044 #endif
1046 tmp = ALLOC_REG_SET (&reg_obstack);
1047 new_live_at_end = ALLOC_REG_SET (&reg_obstack);
1048 invalidated_by_call = ALLOC_REG_SET (&reg_obstack);
1049 registers_made_dead = ALLOC_REG_SET (&reg_obstack);
1051 /* Inconveniently, this is only readily available in hard reg set form. */
1052 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1053 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1054 SET_REGNO_REG_SET (invalidated_by_call, i);
1056 /* Allocate space for the sets of local properties. */
1057 local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
1058 sizeof (regset));
1059 cond_local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
1060 sizeof (regset));
1062 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
1063 because the `head == tail' style test for an empty queue doesn't
1064 work with a full queue. */
1065 queue = xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1)) * sizeof (*queue));
1066 qtail = queue;
1067 qhead = qend = queue + n_basic_blocks - (INVALID_BLOCK + 1);
1069 /* Queue the blocks set in the initial mask. Do this in reverse block
1070 number order so that we are more likely for the first round to do
1071 useful work. We use AUX non-null to flag that the block is queued. */
1072 if (blocks_in)
1074 FOR_EACH_BB (bb)
1075 if (TEST_BIT (blocks_in, bb->index))
1077 *--qhead = bb;
1078 bb->aux = bb;
1081 else
1083 FOR_EACH_BB (bb)
1085 *--qhead = bb;
1086 bb->aux = bb;
1090 block_accesses = xcalloc (last_basic_block, sizeof (int));
1092 /* We clean aux when we remove the initially-enqueued bbs, but we
1093 don't enqueue ENTRY and EXIT initially, so clean them upfront and
1094 unconditionally. */
1095 ENTRY_BLOCK_PTR->aux = EXIT_BLOCK_PTR->aux = NULL;
1097 if (blocks_out)
1098 sbitmap_zero (blocks_out);
1100 /* We work through the queue until there are no more blocks. What
1101 is live at the end of this block is precisely the union of what
1102 is live at the beginning of all its successors. So, we set its
1103 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
1104 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
1105 this block by walking through the instructions in this block in
1106 reverse order and updating as we go. If that changed
1107 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
1108 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
1110 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
1111 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
1112 must either be live at the end of the block, or used within the
1113 block. In the latter case, it will certainly never disappear
1114 from GLOBAL_LIVE_AT_START. In the former case, the register
1115 could go away only if it disappeared from GLOBAL_LIVE_AT_START
1116 for one of the successor blocks. By induction, that cannot
1117 occur.
1119 ??? This reasoning doesn't work if we start from non-empty initial
1120 GLOBAL_LIVE_AT_START sets. And there are actually two problems:
1121 1) Updating may not terminate (endless oscillation).
1122 2) Even if it does (and it usually does), the resulting information
1123 may be inaccurate. Consider for example the following case:
1125 a = ...;
1126 while (...) {...} -- 'a' not mentioned at all
1127 ... = a;
1129 If the use of 'a' is deleted between two calculations of liveness
1130 information and the initial sets are not cleared, the information
1131 about a's liveness will get stuck inside the loop and the set will
1132 appear not to be dead.
1134 We do not attempt to solve 2) -- the information is conservatively
1135 correct (i.e. we never claim that something live is dead) and the
1136 amount of optimization opportunities missed due to this problem is
1137 not significant.
1139 1) is more serious. In order to fix it, we monitor the number of times
1140 each block is processed. Once one of the blocks has been processed more
1141 times than the maximum number of rounds, we use the following strategy:
1142 When a register disappears from one of the sets, we add it to a MAKE_DEAD
1143 set, remove all registers in this set from all GLOBAL_LIVE_AT_* sets and
1144 add the blocks with changed sets into the queue. Thus we are guaranteed
1145 to terminate (the worst case corresponds to all registers in MADE_DEAD,
1146 in which case the original reasoning above is valid), but in general we
1147 only fix up a few offending registers.
1149 The maximum number of rounds for computing liveness is the largest of
1150 MAX_LIVENESS_ROUNDS and the latest loop depth count for this function. */
1152 while (qhead != qtail)
1154 int rescan, changed;
1155 basic_block bb;
1156 edge e;
1157 edge_iterator ei;
1159 bb = *qhead++;
1160 if (qhead == qend)
1161 qhead = queue;
1162 bb->aux = NULL;
1164 /* Should we start using the failure strategy? */
1165 if (bb != ENTRY_BLOCK_PTR)
1167 int max_liveness_rounds =
1168 MAX (MAX_LIVENESS_ROUNDS, cfun->max_loop_depth);
1170 block_accesses[bb->index]++;
1171 if (block_accesses[bb->index] > max_liveness_rounds)
1172 failure_strategy_required = true;
1175 /* Begin by propagating live_at_start from the successor blocks. */
1176 CLEAR_REG_SET (new_live_at_end);
1178 if (EDGE_COUNT (bb->succs) > 0)
1179 FOR_EACH_EDGE (e, ei, bb->succs)
1181 basic_block sb = e->dest;
1183 /* Call-clobbered registers die across exception and
1184 call edges. */
1185 /* ??? Abnormal call edges ignored for the moment, as this gets
1186 confused by sibling call edges, which crashes reg-stack. */
1187 if (e->flags & EDGE_EH)
1188 bitmap_ior_and_compl_into (new_live_at_end,
1189 sb->il.rtl->global_live_at_start,
1190 invalidated_by_call);
1191 else
1192 IOR_REG_SET (new_live_at_end, sb->il.rtl->global_live_at_start);
1194 /* If a target saves one register in another (instead of on
1195 the stack) the save register will need to be live for EH. */
1196 if (e->flags & EDGE_EH)
1197 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1198 if (EH_USES (i))
1199 SET_REGNO_REG_SET (new_live_at_end, i);
1201 else
1203 /* This might be a noreturn function that throws. And
1204 even if it isn't, getting the unwind info right helps
1205 debugging. */
1206 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1207 if (EH_USES (i))
1208 SET_REGNO_REG_SET (new_live_at_end, i);
1211 /* The all-important stack pointer must always be live. */
1212 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
1214 /* Before reload, there are a few registers that must be forced
1215 live everywhere -- which might not already be the case for
1216 blocks within infinite loops. */
1217 if (! reload_completed)
1219 /* Any reference to any pseudo before reload is a potential
1220 reference of the frame pointer. */
1221 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
1223 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1224 /* Pseudos with argument area equivalences may require
1225 reloading via the argument pointer. */
1226 if (fixed_regs[ARG_POINTER_REGNUM])
1227 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
1228 #endif
1230 /* Any constant, or pseudo with constant equivalences, may
1231 require reloading from memory using the pic register. */
1232 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1233 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1234 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
1237 if (bb == ENTRY_BLOCK_PTR)
1239 COPY_REG_SET (bb->il.rtl->global_live_at_end, new_live_at_end);
1240 continue;
1243 /* On our first pass through this block, we'll go ahead and continue.
1244 Recognize first pass by checking if local_set is NULL for this
1245 basic block. On subsequent passes, we get to skip out early if
1246 live_at_end wouldn't have changed. */
1248 if (local_sets[bb->index - (INVALID_BLOCK + 1)] == NULL)
1250 local_sets[bb->index - (INVALID_BLOCK + 1)]
1251 = ALLOC_REG_SET (&reg_obstack);
1252 cond_local_sets[bb->index - (INVALID_BLOCK + 1)]
1253 = ALLOC_REG_SET (&reg_obstack);
1254 rescan = 1;
1256 else
1258 /* If any bits were removed from live_at_end, we'll have to
1259 rescan the block. This wouldn't be necessary if we had
1260 precalculated local_live, however with PROP_SCAN_DEAD_CODE
1261 local_live is really dependent on live_at_end. */
1262 rescan = bitmap_intersect_compl_p (bb->il.rtl->global_live_at_end,
1263 new_live_at_end);
1265 if (!rescan)
1267 regset cond_local_set;
1269 /* If any of the registers in the new live_at_end set are
1270 conditionally set in this basic block, we must rescan.
1271 This is because conditional lifetimes at the end of the
1272 block do not just take the live_at_end set into
1273 account, but also the liveness at the start of each
1274 successor block. We can miss changes in those sets if
1275 we only compare the new live_at_end against the
1276 previous one. */
1277 cond_local_set = cond_local_sets[bb->index - (INVALID_BLOCK + 1)];
1278 rescan = bitmap_intersect_p (new_live_at_end, cond_local_set);
1281 if (!rescan)
1283 regset local_set;
1285 /* Find the set of changed bits. Take this opportunity
1286 to notice that this set is empty and early out. */
1287 bitmap_xor (tmp, bb->il.rtl->global_live_at_end, new_live_at_end);
1288 if (bitmap_empty_p (tmp))
1289 continue;
1291 /* If any of the changed bits overlap with local_sets[bb],
1292 we'll have to rescan the block. */
1293 local_set = local_sets[bb->index - (INVALID_BLOCK + 1)];
1294 rescan = bitmap_intersect_p (tmp, local_set);
1298 /* Let our caller know that BB changed enough to require its
1299 death notes updated. */
1300 if (blocks_out)
1301 SET_BIT (blocks_out, bb->index);
1303 if (! rescan)
1305 /* Add to live_at_start the set of all registers in
1306 new_live_at_end that aren't in the old live_at_end. */
1308 changed = bitmap_ior_and_compl_into (bb->il.rtl->global_live_at_start,
1309 new_live_at_end,
1310 bb->il.rtl->global_live_at_end);
1311 COPY_REG_SET (bb->il.rtl->global_live_at_end, new_live_at_end);
1312 if (! changed)
1313 continue;
1315 else
1317 COPY_REG_SET (bb->il.rtl->global_live_at_end, new_live_at_end);
1319 /* Rescan the block insn by insn to turn (a copy of) live_at_end
1320 into live_at_start. */
1321 propagate_block (bb, new_live_at_end,
1322 local_sets[bb->index - (INVALID_BLOCK + 1)],
1323 cond_local_sets[bb->index - (INVALID_BLOCK + 1)],
1324 flags);
1326 /* If live_at start didn't change, no need to go farther. */
1327 if (REG_SET_EQUAL_P (bb->il.rtl->global_live_at_start,
1328 new_live_at_end))
1329 continue;
1331 if (failure_strategy_required)
1333 /* Get the list of registers that were removed from the
1334 bb->global_live_at_start set. */
1335 bitmap_and_compl (tmp, bb->il.rtl->global_live_at_start,
1336 new_live_at_end);
1337 if (!bitmap_empty_p (tmp))
1339 bool pbb_changed;
1340 basic_block pbb;
1342 /* It should not happen that one of registers we have
1343 removed last time is disappears again before any other
1344 register does. */
1345 pbb_changed = bitmap_ior_into (registers_made_dead, tmp);
1346 gcc_assert (pbb_changed);
1348 /* Now remove the registers from all sets. */
1349 FOR_EACH_BB (pbb)
1351 pbb_changed = false;
1353 pbb_changed
1354 |= bitmap_and_compl_into
1355 (pbb->il.rtl->global_live_at_start,
1356 registers_made_dead);
1357 pbb_changed
1358 |= bitmap_and_compl_into
1359 (pbb->il.rtl->global_live_at_end,
1360 registers_made_dead);
1361 if (!pbb_changed)
1362 continue;
1364 /* Note the (possible) change. */
1365 if (blocks_out)
1366 SET_BIT (blocks_out, pbb->index);
1368 /* Makes sure to really rescan the block. */
1369 if (local_sets[pbb->index - (INVALID_BLOCK + 1)])
1371 FREE_REG_SET (local_sets[pbb->index - (INVALID_BLOCK + 1)]);
1372 FREE_REG_SET (cond_local_sets[pbb->index - (INVALID_BLOCK + 1)]);
1373 local_sets[pbb->index - (INVALID_BLOCK + 1)] = 0;
1376 /* Add it to the queue. */
1377 if (pbb->aux == NULL)
1379 *qtail++ = pbb;
1380 if (qtail == qend)
1381 qtail = queue;
1382 pbb->aux = pbb;
1385 continue;
1387 } /* end of failure_strategy_required */
1389 COPY_REG_SET (bb->il.rtl->global_live_at_start, new_live_at_end);
1392 /* Queue all predecessors of BB so that we may re-examine
1393 their live_at_end. */
1394 FOR_EACH_EDGE (e, ei, bb->preds)
1396 basic_block pb = e->src;
1397 if (pb->aux == NULL)
1399 *qtail++ = pb;
1400 if (qtail == qend)
1401 qtail = queue;
1402 pb->aux = pb;
1407 FREE_REG_SET (tmp);
1408 FREE_REG_SET (new_live_at_end);
1409 FREE_REG_SET (invalidated_by_call);
1410 FREE_REG_SET (registers_made_dead);
1412 if (blocks_out)
1414 sbitmap_iterator sbi;
1416 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i, sbi)
1418 basic_block bb = BASIC_BLOCK (i);
1419 FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
1420 FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
1423 else
1425 FOR_EACH_BB (bb)
1427 FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
1428 FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
1432 free (block_accesses);
1433 free (queue);
1434 free (cond_local_sets);
1435 free (local_sets);
1439 /* This structure is used to pass parameters to and from the
1440 the function find_regno_partial(). It is used to pass in the
1441 register number we are looking, as well as to return any rtx
1442 we find. */
1444 typedef struct {
1445 unsigned regno_to_find;
1446 rtx retval;
1447 } find_regno_partial_param;
1450 /* Find the rtx for the reg numbers specified in 'data' if it is
1451 part of an expression which only uses part of the register. Return
1452 it in the structure passed in. */
1453 static int
1454 find_regno_partial (rtx *ptr, void *data)
1456 find_regno_partial_param *param = (find_regno_partial_param *)data;
1457 unsigned reg = param->regno_to_find;
1458 param->retval = NULL_RTX;
1460 if (*ptr == NULL_RTX)
1461 return 0;
1463 switch (GET_CODE (*ptr))
1465 case ZERO_EXTRACT:
1466 case SIGN_EXTRACT:
1467 case STRICT_LOW_PART:
1468 if (REG_P (XEXP (*ptr, 0)) && REGNO (XEXP (*ptr, 0)) == reg)
1470 param->retval = XEXP (*ptr, 0);
1471 return 1;
1473 break;
1475 case SUBREG:
1476 if (REG_P (SUBREG_REG (*ptr))
1477 && REGNO (SUBREG_REG (*ptr)) == reg)
1479 param->retval = SUBREG_REG (*ptr);
1480 return 1;
1482 break;
1484 default:
1485 break;
1488 return 0;
1491 /* Process all immediate successors of the entry block looking for pseudo
1492 registers which are live on entry. Find all of those whose first
1493 instance is a partial register reference of some kind, and initialize
1494 them to 0 after the entry block. This will prevent bit sets within
1495 registers whose value is unknown, and may contain some kind of sticky
1496 bits we don't want. */
1499 initialize_uninitialized_subregs (void)
1501 rtx insn;
1502 edge e;
1503 unsigned reg, did_something = 0;
1504 find_regno_partial_param param;
1505 edge_iterator ei;
1507 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1509 basic_block bb = e->dest;
1510 regset map = bb->il.rtl->global_live_at_start;
1511 reg_set_iterator rsi;
1513 EXECUTE_IF_SET_IN_REG_SET (map, FIRST_PSEUDO_REGISTER, reg, rsi)
1515 int uid = REGNO_FIRST_UID (reg);
1516 rtx i;
1518 /* Find an insn which mentions the register we are looking for.
1519 Its preferable to have an instance of the register's rtl since
1520 there may be various flags set which we need to duplicate.
1521 If we can't find it, its probably an automatic whose initial
1522 value doesn't matter, or hopefully something we don't care about. */
1523 for (i = get_insns (); i && INSN_UID (i) != uid; i = NEXT_INSN (i))
1525 if (i != NULL_RTX)
1527 /* Found the insn, now get the REG rtx, if we can. */
1528 param.regno_to_find = reg;
1529 for_each_rtx (&i, find_regno_partial, &param);
1530 if (param.retval != NULL_RTX)
1532 start_sequence ();
1533 emit_move_insn (param.retval,
1534 CONST0_RTX (GET_MODE (param.retval)));
1535 insn = get_insns ();
1536 end_sequence ();
1537 insert_insn_on_edge (insn, e);
1538 did_something = 1;
1544 if (did_something)
1545 commit_edge_insertions ();
1546 return did_something;
1550 /* Subroutines of life analysis. */
1552 /* Allocate the permanent data structures that represent the results
1553 of life analysis. */
1555 static void
1556 allocate_bb_life_data (void)
1558 basic_block bb;
1560 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1562 bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1563 bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1566 regs_live_at_setjmp = ALLOC_REG_SET (&reg_obstack);
1569 void
1570 allocate_reg_life_data (void)
1572 int i;
1574 max_regno = max_reg_num ();
1575 gcc_assert (!reg_deaths);
1576 reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);
1578 /* Recalculate the register space, in case it has grown. Old style
1579 vector oriented regsets would set regset_{size,bytes} here also. */
1580 allocate_reg_info (max_regno, FALSE, FALSE);
1582 /* Reset all the data we'll collect in propagate_block and its
1583 subroutines. */
1584 for (i = 0; i < max_regno; i++)
1586 REG_N_SETS (i) = 0;
1587 REG_N_REFS (i) = 0;
1588 REG_N_DEATHS (i) = 0;
1589 REG_N_CALLS_CROSSED (i) = 0;
1590 REG_N_THROWING_CALLS_CROSSED (i) = 0;
1591 REG_LIVE_LENGTH (i) = 0;
1592 REG_FREQ (i) = 0;
1593 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
1597 /* Delete dead instructions for propagate_block. */
1599 static void
1600 propagate_block_delete_insn (rtx insn)
1602 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
1604 /* If the insn referred to a label, and that label was attached to
1605 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
1606 pretty much mandatory to delete it, because the ADDR_VEC may be
1607 referencing labels that no longer exist.
1609 INSN may reference a deleted label, particularly when a jump
1610 table has been optimized into a direct jump. There's no
1611 real good way to fix up the reference to the deleted label
1612 when the label is deleted, so we just allow it here. */
1614 if (inote && LABEL_P (inote))
1616 rtx label = XEXP (inote, 0);
1617 rtx next;
1619 /* The label may be forced if it has been put in the constant
1620 pool. If that is the only use we must discard the table
1621 jump following it, but not the label itself. */
1622 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
1623 && (next = next_nonnote_insn (label)) != NULL
1624 && JUMP_P (next)
1625 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1626 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1628 rtx pat = PATTERN (next);
1629 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1630 int len = XVECLEN (pat, diff_vec_p);
1631 int i;
1633 for (i = 0; i < len; i++)
1634 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
1636 delete_insn_and_edges (next);
1637 ndead++;
1641 delete_insn_and_edges (insn);
1642 ndead++;
1645 /* Delete dead libcalls for propagate_block. Return the insn
1646 before the libcall. */
1648 static rtx
1649 propagate_block_delete_libcall (rtx insn, rtx note)
1651 rtx first = XEXP (note, 0);
1652 rtx before = PREV_INSN (first);
1654 delete_insn_chain_and_edges (first, insn);
1655 ndead++;
1656 return before;
1659 /* Update the life-status of regs for one insn. Return the previous insn. */
1662 propagate_one_insn (struct propagate_block_info *pbi, rtx insn)
1664 rtx prev = PREV_INSN (insn);
1665 int flags = pbi->flags;
1666 int insn_is_dead = 0;
1667 int libcall_is_dead = 0;
1668 rtx note;
1669 unsigned i;
1671 if (! INSN_P (insn))
1672 return prev;
1674 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1675 if (flags & PROP_SCAN_DEAD_CODE)
1677 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
1678 libcall_is_dead = (insn_is_dead && note != 0
1679 && libcall_dead_p (pbi, note, insn));
1682 /* If an instruction consists of just dead store(s) on final pass,
1683 delete it. */
1684 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
1686 /* If we're trying to delete a prologue or epilogue instruction
1687 that isn't flagged as possibly being dead, something is wrong.
1688 But if we are keeping the stack pointer depressed, we might well
1689 be deleting insns that are used to compute the amount to update
1690 it by, so they are fine. */
1691 if (reload_completed
1692 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1693 && (TYPE_RETURNS_STACK_DEPRESSED
1694 (TREE_TYPE (current_function_decl))))
1695 && (((HAVE_epilogue || HAVE_prologue)
1696 && prologue_epilogue_contains (insn))
1697 || (HAVE_sibcall_epilogue
1698 && sibcall_epilogue_contains (insn)))
1699 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
1700 fatal_insn ("Attempt to delete prologue/epilogue insn:", insn);
1702 /* Record sets. Do this even for dead instructions, since they
1703 would have killed the values if they hadn't been deleted. To
1704 be consistent, we also have to emit a clobber when we delete
1705 an insn that clobbers a live register. */
1706 pbi->flags |= PROP_DEAD_INSN;
1707 mark_set_regs (pbi, PATTERN (insn), insn);
1708 pbi->flags &= ~PROP_DEAD_INSN;
1710 /* CC0 is now known to be dead. Either this insn used it,
1711 in which case it doesn't anymore, or clobbered it,
1712 so the next insn can't use it. */
1713 pbi->cc0_live = 0;
1715 if (libcall_is_dead)
1716 prev = propagate_block_delete_libcall (insn, note);
1717 else
1720 /* If INSN contains a RETVAL note and is dead, but the libcall
1721 as a whole is not dead, then we want to remove INSN, but
1722 not the whole libcall sequence.
1724 However, we need to also remove the dangling REG_LIBCALL
1725 note so that we do not have mis-matched LIBCALL/RETVAL
1726 notes. In theory we could find a new location for the
1727 REG_RETVAL note, but it hardly seems worth the effort.
1729 NOTE at this point will be the RETVAL note if it exists. */
1730 if (note)
1732 rtx libcall_note;
1734 libcall_note
1735 = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
1736 remove_note (XEXP (note, 0), libcall_note);
1739 /* Similarly if INSN contains a LIBCALL note, remove the
1740 dangling REG_RETVAL note. */
1741 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
1742 if (note)
1744 rtx retval_note;
1746 retval_note
1747 = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
1748 remove_note (XEXP (note, 0), retval_note);
1751 /* Now delete INSN. */
1752 propagate_block_delete_insn (insn);
1755 return prev;
1758 /* See if this is an increment or decrement that can be merged into
1759 a following memory address. */
1760 #ifdef AUTO_INC_DEC
1762 rtx x = single_set (insn);
1764 /* Does this instruction increment or decrement a register? */
1765 if ((flags & PROP_AUTOINC)
1766 && x != 0
1767 && REG_P (SET_DEST (x))
1768 && (GET_CODE (SET_SRC (x)) == PLUS
1769 || GET_CODE (SET_SRC (x)) == MINUS)
1770 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
1771 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1772 /* Ok, look for a following memory ref we can combine with.
1773 If one is found, change the memory ref to a PRE_INC
1774 or PRE_DEC, cancel this insn, and return 1.
1775 Return 0 if nothing has been done. */
1776 && try_pre_increment_1 (pbi, insn))
1777 return prev;
1779 #endif /* AUTO_INC_DEC */
1781 CLEAR_REG_SET (pbi->new_set);
1783 /* If this is not the final pass, and this insn is copying the value of
1784 a library call and it's dead, don't scan the insns that perform the
1785 library call, so that the call's arguments are not marked live. */
1786 if (libcall_is_dead)
1788 /* Record the death of the dest reg. */
1789 mark_set_regs (pbi, PATTERN (insn), insn);
1791 insn = XEXP (note, 0);
1792 return PREV_INSN (insn);
1794 else if (GET_CODE (PATTERN (insn)) == SET
1795 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1796 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1797 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1798 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1800 /* We have an insn to pop a constant amount off the stack.
1801 (Such insns use PLUS regardless of the direction of the stack,
1802 and any insn to adjust the stack by a constant is always a pop
1803 or part of a push.)
1804 These insns, if not dead stores, have no effect on life, though
1805 they do have an effect on the memory stores we are tracking. */
1806 invalidate_mems_from_set (pbi, stack_pointer_rtx);
1807 /* Still, we need to update local_set, lest ifcvt.c:dead_or_predicable
1808 concludes that the stack pointer is not modified. */
1809 mark_set_regs (pbi, PATTERN (insn), insn);
1811 else
1813 /* Any regs live at the time of a call instruction must not go
1814 in a register clobbered by calls. Find all regs now live and
1815 record this for them. */
1817 if (CALL_P (insn) && (flags & PROP_REG_INFO))
1819 reg_set_iterator rsi;
1820 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
1821 REG_N_CALLS_CROSSED (i)++;
1822 if (can_throw_internal (insn))
1823 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
1824 REG_N_THROWING_CALLS_CROSSED (i)++;
1827 /* Record sets. Do this even for dead instructions, since they
1828 would have killed the values if they hadn't been deleted. */
1829 mark_set_regs (pbi, PATTERN (insn), insn);
1831 if (CALL_P (insn))
1833 regset live_at_end;
1834 bool sibcall_p;
1835 rtx note, cond;
1836 int i;
1838 cond = NULL_RTX;
1839 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1840 cond = COND_EXEC_TEST (PATTERN (insn));
1842 /* Non-constant calls clobber memory, constant calls do not
1843 clobber memory, though they may clobber outgoing arguments
1844 on the stack. */
1845 if (! CONST_OR_PURE_CALL_P (insn))
1847 free_EXPR_LIST_list (&pbi->mem_set_list);
1848 pbi->mem_set_list_len = 0;
1850 else
1851 invalidate_mems_from_set (pbi, stack_pointer_rtx);
1853 /* There may be extra registers to be clobbered. */
1854 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1855 note;
1856 note = XEXP (note, 1))
1857 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1858 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
1859 cond, insn, pbi->flags);
1861 /* Calls change all call-used and global registers; sibcalls do not
1862 clobber anything that must be preserved at end-of-function,
1863 except for return values. */
1865 sibcall_p = SIBLING_CALL_P (insn);
1866 live_at_end = EXIT_BLOCK_PTR->il.rtl->global_live_at_start;
1867 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1868 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)
1869 && ! (sibcall_p
1870 && REGNO_REG_SET_P (live_at_end, i)
1871 && ! refers_to_regno_p (i, i+1,
1872 current_function_return_rtx,
1873 (rtx *) 0)))
1875 enum rtx_code code = global_regs[i] ? SET : CLOBBER;
1876 /* We do not want REG_UNUSED notes for these registers. */
1877 mark_set_1 (pbi, code, regno_reg_rtx[i], cond, insn,
1878 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
1882 /* If an insn doesn't use CC0, it becomes dead since we assume
1883 that every insn clobbers it. So show it dead here;
1884 mark_used_regs will set it live if it is referenced. */
1885 pbi->cc0_live = 0;
1887 /* Record uses. */
1888 if (! insn_is_dead)
1889 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
1891 /* Sometimes we may have inserted something before INSN (such as a move)
1892 when we make an auto-inc. So ensure we will scan those insns. */
1893 #ifdef AUTO_INC_DEC
1894 prev = PREV_INSN (insn);
1895 #endif
1897 if (! insn_is_dead && CALL_P (insn))
1899 int i;
1900 rtx note, cond;
1902 cond = NULL_RTX;
1903 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1904 cond = COND_EXEC_TEST (PATTERN (insn));
1906 /* Calls use their arguments, and may clobber memory which
1907 address involves some register. */
1908 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1909 note;
1910 note = XEXP (note, 1))
1911 /* We find USE or CLOBBER entities in a FUNCTION_USAGE list: both
1912 of which mark_used_regs knows how to handle. */
1913 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0), cond, insn);
1915 /* The stack ptr is used (honorarily) by a CALL insn. */
1916 if ((flags & PROP_REG_INFO)
1917 && !REGNO_REG_SET_P (pbi->reg_live, STACK_POINTER_REGNUM))
1918 reg_deaths[STACK_POINTER_REGNUM] = pbi->insn_num;
1919 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
1921 /* Calls may also reference any of the global registers,
1922 so they are made live. */
1923 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1924 if (global_regs[i])
1925 mark_used_reg (pbi, regno_reg_rtx[i], cond, insn);
1929 pbi->insn_num++;
1931 return prev;
1934 /* Initialize a propagate_block_info struct for public consumption.
1935 Note that the structure itself is opaque to this file, but that
1936 the user can use the regsets provided here. */
1938 struct propagate_block_info *
1939 init_propagate_block_info (basic_block bb, regset live, regset local_set,
1940 regset cond_local_set, int flags)
1942 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
1944 pbi->bb = bb;
1945 pbi->reg_live = live;
1946 pbi->mem_set_list = NULL_RTX;
1947 pbi->mem_set_list_len = 0;
1948 pbi->local_set = local_set;
1949 pbi->cond_local_set = cond_local_set;
1950 pbi->cc0_live = 0;
1951 pbi->flags = flags;
1952 pbi->insn_num = 0;
1954 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
1955 pbi->reg_next_use = xcalloc (max_reg_num (), sizeof (rtx));
1956 else
1957 pbi->reg_next_use = NULL;
1959 pbi->new_set = BITMAP_ALLOC (NULL);
1961 #ifdef HAVE_conditional_execution
1962 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
1963 free_reg_cond_life_info);
1964 pbi->reg_cond_reg = BITMAP_ALLOC (NULL);
1966 /* If this block ends in a conditional branch, for each register
1967 live from one side of the branch and not the other, record the
1968 register as conditionally dead. */
1969 if (JUMP_P (BB_END (bb))
1970 && any_condjump_p (BB_END (bb)))
1972 regset diff = ALLOC_REG_SET (&reg_obstack);
1973 basic_block bb_true, bb_false;
1974 unsigned i;
1976 /* Identify the successor blocks. */
1977 bb_true = EDGE_SUCC (bb, 0)->dest;
1978 if (!single_succ_p (bb))
1980 bb_false = EDGE_SUCC (bb, 1)->dest;
1982 if (EDGE_SUCC (bb, 0)->flags & EDGE_FALLTHRU)
1984 basic_block t = bb_false;
1985 bb_false = bb_true;
1986 bb_true = t;
1988 else
1989 gcc_assert (EDGE_SUCC (bb, 1)->flags & EDGE_FALLTHRU);
1991 else
1993 /* This can happen with a conditional jump to the next insn. */
1994 gcc_assert (JUMP_LABEL (BB_END (bb)) == BB_HEAD (bb_true));
1996 /* Simplest way to do nothing. */
1997 bb_false = bb_true;
2000 /* Compute which register lead different lives in the successors. */
2001 bitmap_xor (diff, bb_true->il.rtl->global_live_at_start,
2002 bb_false->il.rtl->global_live_at_start);
2004 if (!bitmap_empty_p (diff))
2006 /* Extract the condition from the branch. */
2007 rtx set_src = SET_SRC (pc_set (BB_END (bb)));
2008 rtx cond_true = XEXP (set_src, 0);
2009 rtx reg = XEXP (cond_true, 0);
2010 enum rtx_code inv_cond;
2012 if (GET_CODE (reg) == SUBREG)
2013 reg = SUBREG_REG (reg);
2015 /* We can only track conditional lifetimes if the condition is
2016 in the form of a reversible comparison of a register against
2017 zero. If the condition is more complex than that, then it is
2018 safe not to record any information. */
2019 inv_cond = reversed_comparison_code (cond_true, BB_END (bb));
2020 if (inv_cond != UNKNOWN
2021 && REG_P (reg)
2022 && XEXP (cond_true, 1) == const0_rtx)
2024 rtx cond_false
2025 = gen_rtx_fmt_ee (inv_cond,
2026 GET_MODE (cond_true), XEXP (cond_true, 0),
2027 XEXP (cond_true, 1));
2028 reg_set_iterator rsi;
2030 if (GET_CODE (XEXP (set_src, 1)) == PC)
2032 rtx t = cond_false;
2033 cond_false = cond_true;
2034 cond_true = t;
2037 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
2039 /* For each such register, mark it conditionally dead. */
2040 EXECUTE_IF_SET_IN_REG_SET (diff, 0, i, rsi)
2042 struct reg_cond_life_info *rcli;
2043 rtx cond;
2045 rcli = xmalloc (sizeof (*rcli));
2047 if (REGNO_REG_SET_P (bb_true->il.rtl->global_live_at_start,
2049 cond = cond_false;
2050 else
2051 cond = cond_true;
2052 rcli->condition = cond;
2053 rcli->stores = const0_rtx;
2054 rcli->orig_condition = cond;
2056 splay_tree_insert (pbi->reg_cond_dead, i,
2057 (splay_tree_value) rcli);
2062 FREE_REG_SET (diff);
2064 #endif
2066 /* If this block has no successors, any stores to the frame that aren't
2067 used later in the block are dead. So make a pass over the block
2068 recording any such that are made and show them dead at the end. We do
2069 a very conservative and simple job here. */
2070 if (optimize
2071 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
2072 && (TYPE_RETURNS_STACK_DEPRESSED
2073 (TREE_TYPE (current_function_decl))))
2074 && (flags & PROP_SCAN_DEAD_STORES)
2075 && (EDGE_COUNT (bb->succs) == 0
2076 || (single_succ_p (bb)
2077 && single_succ (bb) == EXIT_BLOCK_PTR
2078 && ! current_function_calls_eh_return)))
2080 rtx insn, set;
2081 for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
2082 if (NONJUMP_INSN_P (insn)
2083 && (set = single_set (insn))
2084 && MEM_P (SET_DEST (set)))
2086 rtx mem = SET_DEST (set);
2087 rtx canon_mem = canon_rtx (mem);
2089 if (XEXP (canon_mem, 0) == frame_pointer_rtx
2090 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
2091 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
2092 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
2093 add_to_mem_set_list (pbi, canon_mem);
2097 return pbi;
2100 /* Release a propagate_block_info struct. */
2102 void
2103 free_propagate_block_info (struct propagate_block_info *pbi)
2105 free_EXPR_LIST_list (&pbi->mem_set_list);
2107 BITMAP_FREE (pbi->new_set);
2109 #ifdef HAVE_conditional_execution
2110 splay_tree_delete (pbi->reg_cond_dead);
2111 BITMAP_FREE (pbi->reg_cond_reg);
2112 #endif
2114 if (pbi->flags & PROP_REG_INFO)
2116 int num = pbi->insn_num;
2117 unsigned i;
2118 reg_set_iterator rsi;
2120 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
2122 REG_LIVE_LENGTH (i) += num - reg_deaths[i];
2123 reg_deaths[i] = 0;
2126 if (pbi->reg_next_use)
2127 free (pbi->reg_next_use);
2129 free (pbi);
2132 /* Compute the registers live at the beginning of a basic block BB from
2133 those live at the end.
2135 When called, REG_LIVE contains those live at the end. On return, it
2136 contains those live at the beginning.
2138 LOCAL_SET, if non-null, will be set with all registers killed
2139 unconditionally by this basic block.
2140 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
2141 killed conditionally by this basic block. If there is any unconditional
2142 set of a register, then the corresponding bit will be set in LOCAL_SET
2143 and cleared in COND_LOCAL_SET.
2144 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
2145 case, the resulting set will be equal to the union of the two sets that
2146 would otherwise be computed.
2148 Return nonzero if an INSN is deleted (i.e. by dead code removal). */
2151 propagate_block (basic_block bb, regset live, regset local_set,
2152 regset cond_local_set, int flags)
2154 struct propagate_block_info *pbi;
2155 rtx insn, prev;
2156 int changed;
2158 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
2160 if (flags & PROP_REG_INFO)
2162 unsigned i;
2163 reg_set_iterator rsi;
2165 /* Process the regs live at the end of the block.
2166 Mark them as not local to any one basic block. */
2167 EXECUTE_IF_SET_IN_REG_SET (live, 0, i, rsi)
2168 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2171 /* Scan the block an insn at a time from end to beginning. */
2173 changed = 0;
2174 for (insn = BB_END (bb); ; insn = prev)
2176 /* If this is a call to `setjmp' et al, warn if any
2177 non-volatile datum is live. */
2178 if ((flags & PROP_REG_INFO)
2179 && CALL_P (insn)
2180 && find_reg_note (insn, REG_SETJMP, NULL))
2181 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
2183 prev = propagate_one_insn (pbi, insn);
2184 if (!prev)
2185 changed |= insn != get_insns ();
2186 else
2187 changed |= NEXT_INSN (prev) != insn;
2189 if (insn == BB_HEAD (bb))
2190 break;
2193 free_propagate_block_info (pbi);
2195 return changed;
2198 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2199 (SET expressions whose destinations are registers dead after the insn).
2200 NEEDED is the regset that says which regs are alive after the insn.
2202 Unless CALL_OK is nonzero, an insn is needed if it contains a CALL.
2204 If X is the entire body of an insn, NOTES contains the reg notes
2205 pertaining to the insn. */
2207 static int
2208 insn_dead_p (struct propagate_block_info *pbi, rtx x, int call_ok,
2209 rtx notes ATTRIBUTE_UNUSED)
2211 enum rtx_code code = GET_CODE (x);
2213 /* Don't eliminate insns that may trap. */
2214 if (flag_non_call_exceptions && may_trap_p (x))
2215 return 0;
2217 #ifdef AUTO_INC_DEC
2218 /* As flow is invoked after combine, we must take existing AUTO_INC
2219 expressions into account. */
2220 for (; notes; notes = XEXP (notes, 1))
2222 if (REG_NOTE_KIND (notes) == REG_INC)
2224 int regno = REGNO (XEXP (notes, 0));
2226 /* Don't delete insns to set global regs. */
2227 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2228 || REGNO_REG_SET_P (pbi->reg_live, regno))
2229 return 0;
2232 #endif
2234 /* If setting something that's a reg or part of one,
2235 see if that register's altered value will be live. */
2237 if (code == SET)
2239 rtx r = SET_DEST (x);
2241 #ifdef HAVE_cc0
2242 if (GET_CODE (r) == CC0)
2243 return ! pbi->cc0_live;
2244 #endif
2246 /* A SET that is a subroutine call cannot be dead. */
2247 if (GET_CODE (SET_SRC (x)) == CALL)
2249 if (! call_ok)
2250 return 0;
2253 /* Don't eliminate loads from volatile memory or volatile asms. */
2254 else if (volatile_refs_p (SET_SRC (x)))
2255 return 0;
2257 if (MEM_P (r))
2259 rtx temp, canon_r;
2261 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
2262 return 0;
2264 canon_r = canon_rtx (r);
2266 /* Walk the set of memory locations we are currently tracking
2267 and see if one is an identical match to this memory location.
2268 If so, this memory write is dead (remember, we're walking
2269 backwards from the end of the block to the start). Since
2270 rtx_equal_p does not check the alias set or flags, we also
2271 must have the potential for them to conflict (anti_dependence). */
2272 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
2273 if (anti_dependence (r, XEXP (temp, 0)))
2275 rtx mem = XEXP (temp, 0);
2277 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
2278 && (GET_MODE_SIZE (GET_MODE (canon_r))
2279 <= GET_MODE_SIZE (GET_MODE (mem))))
2280 return 1;
2282 #ifdef AUTO_INC_DEC
2283 /* Check if memory reference matches an auto increment. Only
2284 post increment/decrement or modify are valid. */
2285 if (GET_MODE (mem) == GET_MODE (r)
2286 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
2287 || GET_CODE (XEXP (mem, 0)) == POST_INC
2288 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
2289 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
2290 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
2291 return 1;
2292 #endif
2295 else
2297 while (GET_CODE (r) == SUBREG
2298 || GET_CODE (r) == STRICT_LOW_PART
2299 || GET_CODE (r) == ZERO_EXTRACT)
2300 r = XEXP (r, 0);
2302 if (REG_P (r))
2304 int regno = REGNO (r);
2306 /* Obvious. */
2307 if (REGNO_REG_SET_P (pbi->reg_live, regno))
2308 return 0;
2310 /* If this is a hard register, verify that subsequent
2311 words are not needed. */
2312 if (regno < FIRST_PSEUDO_REGISTER)
2314 int n = hard_regno_nregs[regno][GET_MODE (r)];
2316 while (--n > 0)
2317 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
2318 return 0;
2321 /* Don't delete insns to set global regs. */
2322 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2323 return 0;
2325 /* Make sure insns to set the stack pointer aren't deleted. */
2326 if (regno == STACK_POINTER_REGNUM)
2327 return 0;
2329 /* ??? These bits might be redundant with the force live bits
2330 in calculate_global_regs_live. We would delete from
2331 sequential sets; whether this actually affects real code
2332 for anything but the stack pointer I don't know. */
2333 /* Make sure insns to set the frame pointer aren't deleted. */
2334 if (regno == FRAME_POINTER_REGNUM
2335 && (! reload_completed || frame_pointer_needed))
2336 return 0;
2337 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2338 if (regno == HARD_FRAME_POINTER_REGNUM
2339 && (! reload_completed || frame_pointer_needed))
2340 return 0;
2341 #endif
2343 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2344 /* Make sure insns to set arg pointer are never deleted
2345 (if the arg pointer isn't fixed, there will be a USE
2346 for it, so we can treat it normally). */
2347 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2348 return 0;
2349 #endif
2351 /* Otherwise, the set is dead. */
2352 return 1;
2357 /* If performing several activities, insn is dead if each activity
2358 is individually dead. Also, CLOBBERs and USEs can be ignored; a
2359 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
2360 worth keeping. */
2361 else if (code == PARALLEL)
2363 int i = XVECLEN (x, 0);
2365 for (i--; i >= 0; i--)
2366 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2367 && GET_CODE (XVECEXP (x, 0, i)) != USE
2368 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
2369 return 0;
2371 return 1;
2374 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
2375 is not necessarily true for hard registers until after reload. */
2376 else if (code == CLOBBER)
2378 if (REG_P (XEXP (x, 0))
2379 && (REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2380 || reload_completed)
2381 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
2382 return 1;
2385 /* ??? A base USE is a historical relic. It ought not be needed anymore.
2386 Instances where it is still used are either (1) temporary and the USE
2387 escaped the pass, (2) cruft and the USE need not be emitted anymore,
2388 or (3) hiding bugs elsewhere that are not properly representing data
2389 flow. */
2391 return 0;
2394 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
2395 return 1 if the entire library call is dead.
2396 This is true if INSN copies a register (hard or pseudo)
2397 and if the hard return reg of the call insn is dead.
2398 (The caller should have tested the destination of the SET inside
2399 INSN already for death.)
2401 If this insn doesn't just copy a register, then we don't
2402 have an ordinary libcall. In that case, cse could not have
2403 managed to substitute the source for the dest later on,
2404 so we can assume the libcall is dead.
2406 PBI is the block info giving pseudoregs live before this insn.
2407 NOTE is the REG_RETVAL note of the insn. */
2409 static int
2410 libcall_dead_p (struct propagate_block_info *pbi, rtx note, rtx insn)
2412 rtx x = single_set (insn);
2414 if (x)
2416 rtx r = SET_SRC (x);
2418 if (REG_P (r) || GET_CODE (r) == SUBREG)
2420 rtx call = XEXP (note, 0);
2421 rtx call_pat;
2422 int i;
2424 /* Find the call insn. */
2425 while (call != insn && !CALL_P (call))
2426 call = NEXT_INSN (call);
2428 /* If there is none, do nothing special,
2429 since ordinary death handling can understand these insns. */
2430 if (call == insn)
2431 return 0;
2433 /* See if the hard reg holding the value is dead.
2434 If this is a PARALLEL, find the call within it. */
2435 call_pat = PATTERN (call);
2436 if (GET_CODE (call_pat) == PARALLEL)
2438 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
2439 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
2440 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
2441 break;
2443 /* This may be a library call that is returning a value
2444 via invisible pointer. Do nothing special, since
2445 ordinary death handling can understand these insns. */
2446 if (i < 0)
2447 return 0;
2449 call_pat = XVECEXP (call_pat, 0, i);
2452 if (! insn_dead_p (pbi, call_pat, 1, REG_NOTES (call)))
2453 return 0;
2455 while ((insn = PREV_INSN (insn)) != call)
2457 if (! INSN_P (insn))
2458 continue;
2459 if (! insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn)))
2460 return 0;
2462 return 1;
2465 return 0;
2468 /* 1 if register REGNO was alive at a place where `setjmp' was called
2469 and was set more than once or is an argument.
2470 Such regs may be clobbered by `longjmp'. */
2473 regno_clobbered_at_setjmp (int regno)
2475 if (n_basic_blocks == 0)
2476 return 0;
2478 return ((REG_N_SETS (regno) > 1
2479 || REGNO_REG_SET_P (ENTRY_BLOCK_PTR->il.rtl->global_live_at_end,
2480 regno))
2481 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2484 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
2485 maximal list size; look for overlaps in mode and select the largest. */
2486 static void
2487 add_to_mem_set_list (struct propagate_block_info *pbi, rtx mem)
2489 rtx i;
2491 /* We don't know how large a BLKmode store is, so we must not
2492 take them into consideration. */
2493 if (GET_MODE (mem) == BLKmode)
2494 return;
2496 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
2498 rtx e = XEXP (i, 0);
2499 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
2501 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
2503 #ifdef AUTO_INC_DEC
2504 /* If we must store a copy of the mem, we can just modify
2505 the mode of the stored copy. */
2506 if (pbi->flags & PROP_AUTOINC)
2507 PUT_MODE (e, GET_MODE (mem));
2508 else
2509 #endif
2510 XEXP (i, 0) = mem;
2512 return;
2516 if (pbi->mem_set_list_len < PARAM_VALUE (PARAM_MAX_FLOW_MEMORY_LOCATIONS))
2518 #ifdef AUTO_INC_DEC
2519 /* Store a copy of mem, otherwise the address may be
2520 scrogged by find_auto_inc. */
2521 if (pbi->flags & PROP_AUTOINC)
2522 mem = shallow_copy_rtx (mem);
2523 #endif
2524 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
2525 pbi->mem_set_list_len++;
2529 /* INSN references memory, possibly using autoincrement addressing modes.
2530 Find any entries on the mem_set_list that need to be invalidated due
2531 to an address change. */
2533 static int
2534 invalidate_mems_from_autoinc (rtx *px, void *data)
2536 rtx x = *px;
2537 struct propagate_block_info *pbi = data;
2539 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
2541 invalidate_mems_from_set (pbi, XEXP (x, 0));
2542 return -1;
2545 return 0;
2548 /* EXP is a REG or MEM. Remove any dependent entries from
2549 pbi->mem_set_list. */
2551 static void
2552 invalidate_mems_from_set (struct propagate_block_info *pbi, rtx exp)
2554 rtx temp = pbi->mem_set_list;
2555 rtx prev = NULL_RTX;
2556 rtx next;
2558 while (temp)
2560 next = XEXP (temp, 1);
2561 if ((REG_P (exp) && reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
2562 /* When we get an EXP that is a mem here, we want to check if EXP
2563 overlaps the *address* of any of the mems in the list (i.e. not
2564 whether the mems actually overlap; that's done elsewhere). */
2565 || (MEM_P (exp)
2566 && reg_overlap_mentioned_p (exp, XEXP (XEXP (temp, 0), 0))))
2568 /* Splice this entry out of the list. */
2569 if (prev)
2570 XEXP (prev, 1) = next;
2571 else
2572 pbi->mem_set_list = next;
2573 free_EXPR_LIST_node (temp);
2574 pbi->mem_set_list_len--;
2576 else
2577 prev = temp;
2578 temp = next;
2582 /* Process the registers that are set within X. Their bits are set to
2583 1 in the regset DEAD, because they are dead prior to this insn.
2585 If INSN is nonzero, it is the insn being processed.
2587 FLAGS is the set of operations to perform. */
2589 static void
2590 mark_set_regs (struct propagate_block_info *pbi, rtx x, rtx insn)
2592 rtx cond = NULL_RTX;
2593 rtx link;
2594 enum rtx_code code;
2595 int flags = pbi->flags;
2597 if (insn)
2598 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2600 if (REG_NOTE_KIND (link) == REG_INC)
2601 mark_set_1 (pbi, SET, XEXP (link, 0),
2602 (GET_CODE (x) == COND_EXEC
2603 ? COND_EXEC_TEST (x) : NULL_RTX),
2604 insn, flags);
2606 retry:
2607 switch (code = GET_CODE (x))
2609 case SET:
2610 if (GET_CODE (XEXP (x, 1)) == ASM_OPERANDS)
2611 flags |= PROP_ASM_SCAN;
2612 /* Fall through */
2613 case CLOBBER:
2614 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, flags);
2615 return;
2617 case COND_EXEC:
2618 cond = COND_EXEC_TEST (x);
2619 x = COND_EXEC_CODE (x);
2620 goto retry;
2622 case PARALLEL:
2624 int i;
2626 /* We must scan forwards. If we have an asm, we need to set
2627 the PROP_ASM_SCAN flag before scanning the clobbers. */
2628 for (i = 0; i < XVECLEN (x, 0); i++)
2630 rtx sub = XVECEXP (x, 0, i);
2631 switch (code = GET_CODE (sub))
2633 case COND_EXEC:
2634 gcc_assert (!cond);
2636 cond = COND_EXEC_TEST (sub);
2637 sub = COND_EXEC_CODE (sub);
2638 if (GET_CODE (sub) == SET)
2639 goto mark_set;
2640 if (GET_CODE (sub) == CLOBBER)
2641 goto mark_clob;
2642 break;
2644 case SET:
2645 mark_set:
2646 if (GET_CODE (XEXP (sub, 1)) == ASM_OPERANDS)
2647 flags |= PROP_ASM_SCAN;
2648 /* Fall through */
2649 case CLOBBER:
2650 mark_clob:
2651 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, flags);
2652 break;
2654 case ASM_OPERANDS:
2655 flags |= PROP_ASM_SCAN;
2656 break;
2658 default:
2659 break;
2662 break;
2665 default:
2666 break;
2670 /* Process a single set, which appears in INSN. REG (which may not
2671 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
2672 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
2673 If the set is conditional (because it appear in a COND_EXEC), COND
2674 will be the condition. */
2676 static void
2677 mark_set_1 (struct propagate_block_info *pbi, enum rtx_code code, rtx reg, rtx cond, rtx insn, int flags)
2679 int regno_first = -1, regno_last = -1;
2680 unsigned long not_dead = 0;
2681 int i;
2683 /* Modifying just one hardware register of a multi-reg value or just a
2684 byte field of a register does not mean the value from before this insn
2685 is now dead. Of course, if it was dead after it's unused now. */
2687 switch (GET_CODE (reg))
2689 case PARALLEL:
2690 /* Some targets place small structures in registers for return values of
2691 functions. We have to detect this case specially here to get correct
2692 flow information. */
2693 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2694 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
2695 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
2696 flags);
2697 return;
2699 case SIGN_EXTRACT:
2700 /* SIGN_EXTRACT cannot be an lvalue. */
2701 gcc_unreachable ();
2703 case ZERO_EXTRACT:
2704 case STRICT_LOW_PART:
2705 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
2707 reg = XEXP (reg, 0);
2708 while (GET_CODE (reg) == SUBREG
2709 || GET_CODE (reg) == ZERO_EXTRACT
2710 || GET_CODE (reg) == STRICT_LOW_PART);
2711 if (MEM_P (reg))
2712 break;
2713 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
2714 /* Fall through. */
2716 case REG:
2717 regno_last = regno_first = REGNO (reg);
2718 if (regno_first < FIRST_PSEUDO_REGISTER)
2719 regno_last += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
2720 break;
2722 case SUBREG:
2723 if (REG_P (SUBREG_REG (reg)))
2725 enum machine_mode outer_mode = GET_MODE (reg);
2726 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
2728 /* Identify the range of registers affected. This is moderately
2729 tricky for hard registers. See alter_subreg. */
2731 regno_last = regno_first = REGNO (SUBREG_REG (reg));
2732 if (regno_first < FIRST_PSEUDO_REGISTER)
2734 regno_first += subreg_regno_offset (regno_first, inner_mode,
2735 SUBREG_BYTE (reg),
2736 outer_mode);
2737 regno_last = (regno_first
2738 + hard_regno_nregs[regno_first][outer_mode] - 1);
2740 /* Since we've just adjusted the register number ranges, make
2741 sure REG matches. Otherwise some_was_live will be clear
2742 when it shouldn't have been, and we'll create incorrect
2743 REG_UNUSED notes. */
2744 reg = gen_rtx_REG (outer_mode, regno_first);
2746 else
2748 /* If the number of words in the subreg is less than the number
2749 of words in the full register, we have a well-defined partial
2750 set. Otherwise the high bits are undefined.
2752 This is only really applicable to pseudos, since we just took
2753 care of multi-word hard registers. */
2754 if (((GET_MODE_SIZE (outer_mode)
2755 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
2756 < ((GET_MODE_SIZE (inner_mode)
2757 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
2758 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
2759 regno_first);
2761 reg = SUBREG_REG (reg);
2764 else
2765 reg = SUBREG_REG (reg);
2766 break;
2768 default:
2769 break;
2772 /* If this set is a MEM, then it kills any aliased writes and any
2773 other MEMs which use it.
2774 If this set is a REG, then it kills any MEMs which use the reg. */
2775 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
2777 if (REG_P (reg) || MEM_P (reg))
2778 invalidate_mems_from_set (pbi, reg);
2780 /* If the memory reference had embedded side effects (autoincrement
2781 address modes) then we may need to kill some entries on the
2782 memory set list. */
2783 if (insn && MEM_P (reg))
2784 for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
2786 if (MEM_P (reg) && ! side_effects_p (reg)
2787 /* ??? With more effort we could track conditional memory life. */
2788 && ! cond)
2789 add_to_mem_set_list (pbi, canon_rtx (reg));
2792 if (REG_P (reg)
2793 && ! (regno_first == FRAME_POINTER_REGNUM
2794 && (! reload_completed || frame_pointer_needed))
2795 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2796 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
2797 && (! reload_completed || frame_pointer_needed))
2798 #endif
2799 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2800 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
2801 #endif
2804 int some_was_live = 0, some_was_dead = 0;
2806 for (i = regno_first; i <= regno_last; ++i)
2808 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
2809 if (pbi->local_set)
2811 /* Order of the set operation matters here since both
2812 sets may be the same. */
2813 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
2814 if (cond != NULL_RTX
2815 && ! REGNO_REG_SET_P (pbi->local_set, i))
2816 SET_REGNO_REG_SET (pbi->cond_local_set, i);
2817 else
2818 SET_REGNO_REG_SET (pbi->local_set, i);
2820 if (code != CLOBBER || needed_regno)
2821 SET_REGNO_REG_SET (pbi->new_set, i);
2823 some_was_live |= needed_regno;
2824 some_was_dead |= ! needed_regno;
2827 #ifdef HAVE_conditional_execution
2828 /* Consider conditional death in deciding that the register needs
2829 a death note. */
2830 if (some_was_live && ! not_dead
2831 /* The stack pointer is never dead. Well, not strictly true,
2832 but it's very difficult to tell from here. Hopefully
2833 combine_stack_adjustments will fix up the most egregious
2834 errors. */
2835 && regno_first != STACK_POINTER_REGNUM)
2837 for (i = regno_first; i <= regno_last; ++i)
2838 if (! mark_regno_cond_dead (pbi, i, cond))
2839 not_dead |= ((unsigned long) 1) << (i - regno_first);
2841 #endif
2843 /* Additional data to record if this is the final pass. */
2844 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
2845 | PROP_DEATH_NOTES | PROP_AUTOINC))
2847 rtx y;
2848 int blocknum = pbi->bb->index;
2850 y = NULL_RTX;
2851 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2853 y = pbi->reg_next_use[regno_first];
2855 /* The next use is no longer next, since a store intervenes. */
2856 for (i = regno_first; i <= regno_last; ++i)
2857 pbi->reg_next_use[i] = 0;
2860 if (flags & PROP_REG_INFO)
2862 for (i = regno_first; i <= regno_last; ++i)
2864 /* Count (weighted) references, stores, etc. This counts a
2865 register twice if it is modified, but that is correct. */
2866 REG_N_SETS (i) += 1;
2867 REG_N_REFS (i) += 1;
2868 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
2870 /* The insns where a reg is live are normally counted
2871 elsewhere, but we want the count to include the insn
2872 where the reg is set, and the normal counting mechanism
2873 would not count it. */
2874 REG_LIVE_LENGTH (i) += 1;
2877 /* If this is a hard reg, record this function uses the reg. */
2878 if (regno_first < FIRST_PSEUDO_REGISTER)
2880 for (i = regno_first; i <= regno_last; i++)
2881 regs_ever_live[i] = 1;
2882 if (flags & PROP_ASM_SCAN)
2883 for (i = regno_first; i <= regno_last; i++)
2884 regs_asm_clobbered[i] = 1;
2886 else
2888 /* Keep track of which basic blocks each reg appears in. */
2889 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
2890 REG_BASIC_BLOCK (regno_first) = blocknum;
2891 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
2892 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
2896 if (! some_was_dead)
2898 if (flags & PROP_LOG_LINKS)
2900 /* Make a logical link from the next following insn
2901 that uses this register, back to this insn.
2902 The following insns have already been processed.
2904 We don't build a LOG_LINK for hard registers containing
2905 in ASM_OPERANDs. If these registers get replaced,
2906 we might wind up changing the semantics of the insn,
2907 even if reload can make what appear to be valid
2908 assignments later.
2910 We don't build a LOG_LINK for global registers to
2911 or from a function call. We don't want to let
2912 combine think that it knows what is going on with
2913 global registers. */
2914 if (y && (BLOCK_NUM (y) == blocknum)
2915 && (regno_first >= FIRST_PSEUDO_REGISTER
2916 || (asm_noperands (PATTERN (y)) < 0
2917 && ! ((CALL_P (insn)
2918 || CALL_P (y))
2919 && global_regs[regno_first]))))
2920 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
2923 else if (not_dead)
2925 else if (! some_was_live)
2927 if (flags & PROP_REG_INFO)
2928 REG_N_DEATHS (regno_first) += 1;
2930 if (flags & PROP_DEATH_NOTES
2931 #ifdef STACK_REGS
2932 && (!(flags & PROP_POST_REGSTACK)
2933 || !IN_RANGE (REGNO (reg), FIRST_STACK_REG,
2934 LAST_STACK_REG))
2935 #endif
2938 /* Note that dead stores have already been deleted
2939 when possible. If we get here, we have found a
2940 dead store that cannot be eliminated (because the
2941 same insn does something useful). Indicate this
2942 by marking the reg being set as dying here. */
2943 REG_NOTES (insn)
2944 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2947 else
2949 if (flags & PROP_DEATH_NOTES
2950 #ifdef STACK_REGS
2951 && (!(flags & PROP_POST_REGSTACK)
2952 || !IN_RANGE (REGNO (reg), FIRST_STACK_REG,
2953 LAST_STACK_REG))
2954 #endif
2957 /* This is a case where we have a multi-word hard register
2958 and some, but not all, of the words of the register are
2959 needed in subsequent insns. Write REG_UNUSED notes
2960 for those parts that were not needed. This case should
2961 be rare. */
2963 for (i = regno_first; i <= regno_last; ++i)
2964 if (! REGNO_REG_SET_P (pbi->reg_live, i))
2965 REG_NOTES (insn)
2966 = alloc_EXPR_LIST (REG_UNUSED,
2967 regno_reg_rtx[i],
2968 REG_NOTES (insn));
2973 /* Mark the register as being dead. */
2974 if (some_was_live
2975 /* The stack pointer is never dead. Well, not strictly true,
2976 but it's very difficult to tell from here. Hopefully
2977 combine_stack_adjustments will fix up the most egregious
2978 errors. */
2979 && regno_first != STACK_POINTER_REGNUM)
2981 for (i = regno_first; i <= regno_last; ++i)
2982 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
2984 if ((pbi->flags & PROP_REG_INFO)
2985 && REGNO_REG_SET_P (pbi->reg_live, i))
2987 REG_LIVE_LENGTH (i) += pbi->insn_num - reg_deaths[i];
2988 reg_deaths[i] = 0;
2990 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
2992 if (flags & PROP_DEAD_INSN)
2993 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, reg), insn);
2996 else if (REG_P (reg))
2998 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2999 pbi->reg_next_use[regno_first] = 0;
3001 if ((flags & PROP_REG_INFO) != 0
3002 && (flags & PROP_ASM_SCAN) != 0
3003 && regno_first < FIRST_PSEUDO_REGISTER)
3005 for (i = regno_first; i <= regno_last; i++)
3006 regs_asm_clobbered[i] = 1;
3010 /* If this is the last pass and this is a SCRATCH, show it will be dying
3011 here and count it. */
3012 else if (GET_CODE (reg) == SCRATCH)
3014 if (flags & PROP_DEATH_NOTES
3015 #ifdef STACK_REGS
3016 && (!(flags & PROP_POST_REGSTACK)
3017 || !IN_RANGE (REGNO (reg), FIRST_STACK_REG, LAST_STACK_REG))
3018 #endif
3020 REG_NOTES (insn)
3021 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3025 #ifdef HAVE_conditional_execution
3026 /* Mark REGNO conditionally dead.
3027 Return true if the register is now unconditionally dead. */
3029 static int
3030 mark_regno_cond_dead (struct propagate_block_info *pbi, int regno, rtx cond)
3032 /* If this is a store to a predicate register, the value of the
3033 predicate is changing, we don't know that the predicate as seen
3034 before is the same as that seen after. Flush all dependent
3035 conditions from reg_cond_dead. This will make all such
3036 conditionally live registers unconditionally live. */
3037 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
3038 flush_reg_cond_reg (pbi, regno);
3040 /* If this is an unconditional store, remove any conditional
3041 life that may have existed. */
3042 if (cond == NULL_RTX)
3043 splay_tree_remove (pbi->reg_cond_dead, regno);
3044 else
3046 splay_tree_node node;
3047 struct reg_cond_life_info *rcli;
3048 rtx ncond;
3050 /* Otherwise this is a conditional set. Record that fact.
3051 It may have been conditionally used, or there may be a
3052 subsequent set with a complementary condition. */
3054 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
3055 if (node == NULL)
3057 /* The register was unconditionally live previously.
3058 Record the current condition as the condition under
3059 which it is dead. */
3060 rcli = xmalloc (sizeof (*rcli));
3061 rcli->condition = cond;
3062 rcli->stores = cond;
3063 rcli->orig_condition = const0_rtx;
3064 splay_tree_insert (pbi->reg_cond_dead, regno,
3065 (splay_tree_value) rcli);
3067 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3069 /* Not unconditionally dead. */
3070 return 0;
3072 else
3074 /* The register was conditionally live previously.
3075 Add the new condition to the old. */
3076 rcli = (struct reg_cond_life_info *) node->value;
3077 ncond = rcli->condition;
3078 ncond = ior_reg_cond (ncond, cond, 1);
3079 if (rcli->stores == const0_rtx)
3080 rcli->stores = cond;
3081 else if (rcli->stores != const1_rtx)
3082 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
3084 /* If the register is now unconditionally dead, remove the entry
3085 in the splay_tree. A register is unconditionally dead if the
3086 dead condition ncond is true. A register is also unconditionally
3087 dead if the sum of all conditional stores is an unconditional
3088 store (stores is true), and the dead condition is identically the
3089 same as the original dead condition initialized at the end of
3090 the block. This is a pointer compare, not an rtx_equal_p
3091 compare. */
3092 if (ncond == const1_rtx
3093 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
3094 splay_tree_remove (pbi->reg_cond_dead, regno);
3095 else
3097 rcli->condition = ncond;
3099 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3101 /* Not unconditionally dead. */
3102 return 0;
3107 return 1;
3110 /* Called from splay_tree_delete for pbi->reg_cond_life. */
3112 static void
3113 free_reg_cond_life_info (splay_tree_value value)
3115 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
3116 free (rcli);
3119 /* Helper function for flush_reg_cond_reg. */
3121 static int
3122 flush_reg_cond_reg_1 (splay_tree_node node, void *data)
3124 struct reg_cond_life_info *rcli;
3125 int *xdata = (int *) data;
3126 unsigned int regno = xdata[0];
3128 /* Don't need to search if last flushed value was farther on in
3129 the in-order traversal. */
3130 if (xdata[1] >= (int) node->key)
3131 return 0;
3133 /* Splice out portions of the expression that refer to regno. */
3134 rcli = (struct reg_cond_life_info *) node->value;
3135 rcli->condition = elim_reg_cond (rcli->condition, regno);
3136 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
3137 rcli->stores = elim_reg_cond (rcli->stores, regno);
3139 /* If the entire condition is now false, signal the node to be removed. */
3140 if (rcli->condition == const0_rtx)
3142 xdata[1] = node->key;
3143 return -1;
3145 else
3146 gcc_assert (rcli->condition != const1_rtx);
3148 return 0;
3151 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
3153 static void
3154 flush_reg_cond_reg (struct propagate_block_info *pbi, int regno)
3156 int pair[2];
3158 pair[0] = regno;
3159 pair[1] = -1;
3160 while (splay_tree_foreach (pbi->reg_cond_dead,
3161 flush_reg_cond_reg_1, pair) == -1)
3162 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
3164 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
3167 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
3168 For ior/and, the ADD flag determines whether we want to add the new
3169 condition X to the old one unconditionally. If it is zero, we will
3170 only return a new expression if X allows us to simplify part of
3171 OLD, otherwise we return NULL to the caller.
3172 If ADD is nonzero, we will return a new condition in all cases. The
3173 toplevel caller of one of these functions should always pass 1 for
3174 ADD. */
3176 static rtx
3177 ior_reg_cond (rtx old, rtx x, int add)
3179 rtx op0, op1;
3181 if (COMPARISON_P (old))
3183 if (COMPARISON_P (x)
3184 && REVERSE_CONDEXEC_PREDICATES_P (x, old)
3185 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3186 return const1_rtx;
3187 if (GET_CODE (x) == GET_CODE (old)
3188 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3189 return old;
3190 if (! add)
3191 return NULL;
3192 return gen_rtx_IOR (0, old, x);
3195 switch (GET_CODE (old))
3197 case IOR:
3198 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3199 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3200 if (op0 != NULL || op1 != NULL)
3202 if (op0 == const0_rtx)
3203 return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3204 if (op1 == const0_rtx)
3205 return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3206 if (op0 == const1_rtx || op1 == const1_rtx)
3207 return const1_rtx;
3208 if (op0 == NULL)
3209 op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3210 else if (rtx_equal_p (x, op0))
3211 /* (x | A) | x ~ (x | A). */
3212 return old;
3213 if (op1 == NULL)
3214 op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3215 else if (rtx_equal_p (x, op1))
3216 /* (A | x) | x ~ (A | x). */
3217 return old;
3218 return gen_rtx_IOR (0, op0, op1);
3220 if (! add)
3221 return NULL;
3222 return gen_rtx_IOR (0, old, x);
3224 case AND:
3225 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3226 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3227 if (op0 != NULL || op1 != NULL)
3229 if (op0 == const1_rtx)
3230 return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3231 if (op1 == const1_rtx)
3232 return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3233 if (op0 == const0_rtx || op1 == const0_rtx)
3234 return const0_rtx;
3235 if (op0 == NULL)
3236 op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3237 else if (rtx_equal_p (x, op0))
3238 /* (x & A) | x ~ x. */
3239 return op0;
3240 if (op1 == NULL)
3241 op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3242 else if (rtx_equal_p (x, op1))
3243 /* (A & x) | x ~ x. */
3244 return op1;
3245 return gen_rtx_AND (0, op0, op1);
3247 if (! add)
3248 return NULL;
3249 return gen_rtx_IOR (0, old, x);
3251 case NOT:
3252 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3253 if (op0 != NULL)
3254 return not_reg_cond (op0);
3255 if (! add)
3256 return NULL;
3257 return gen_rtx_IOR (0, old, x);
3259 default:
3260 gcc_unreachable ();
3264 static rtx
3265 not_reg_cond (rtx x)
3267 if (x == const0_rtx)
3268 return const1_rtx;
3269 else if (x == const1_rtx)
3270 return const0_rtx;
3271 if (GET_CODE (x) == NOT)
3272 return XEXP (x, 0);
3273 if (COMPARISON_P (x)
3274 && REG_P (XEXP (x, 0)))
3276 gcc_assert (XEXP (x, 1) == const0_rtx);
3278 return gen_rtx_fmt_ee (reversed_comparison_code (x, NULL),
3279 VOIDmode, XEXP (x, 0), const0_rtx);
3281 return gen_rtx_NOT (0, x);
3284 static rtx
3285 and_reg_cond (rtx old, rtx x, int add)
3287 rtx op0, op1;
3289 if (COMPARISON_P (old))
3291 if (COMPARISON_P (x)
3292 && GET_CODE (x) == reversed_comparison_code (old, NULL)
3293 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3294 return const0_rtx;
3295 if (GET_CODE (x) == GET_CODE (old)
3296 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3297 return old;
3298 if (! add)
3299 return NULL;
3300 return gen_rtx_AND (0, old, x);
3303 switch (GET_CODE (old))
3305 case IOR:
3306 op0 = and_reg_cond (XEXP (old, 0), x, 0);
3307 op1 = and_reg_cond (XEXP (old, 1), x, 0);
3308 if (op0 != NULL || op1 != NULL)
3310 if (op0 == const0_rtx)
3311 return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3312 if (op1 == const0_rtx)
3313 return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3314 if (op0 == const1_rtx || op1 == const1_rtx)
3315 return const1_rtx;
3316 if (op0 == NULL)
3317 op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3318 else if (rtx_equal_p (x, op0))
3319 /* (x | A) & x ~ x. */
3320 return op0;
3321 if (op1 == NULL)
3322 op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3323 else if (rtx_equal_p (x, op1))
3324 /* (A | x) & x ~ x. */
3325 return op1;
3326 return gen_rtx_IOR (0, op0, op1);
3328 if (! add)
3329 return NULL;
3330 return gen_rtx_AND (0, old, x);
3332 case AND:
3333 op0 = and_reg_cond (XEXP (old, 0), x, 0);
3334 op1 = and_reg_cond (XEXP (old, 1), x, 0);
3335 if (op0 != NULL || op1 != NULL)
3337 if (op0 == const1_rtx)
3338 return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3339 if (op1 == const1_rtx)
3340 return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3341 if (op0 == const0_rtx || op1 == const0_rtx)
3342 return const0_rtx;
3343 if (op0 == NULL)
3344 op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3345 else if (rtx_equal_p (x, op0))
3346 /* (x & A) & x ~ (x & A). */
3347 return old;
3348 if (op1 == NULL)
3349 op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3350 else if (rtx_equal_p (x, op1))
3351 /* (A & x) & x ~ (A & x). */
3352 return old;
3353 return gen_rtx_AND (0, op0, op1);
3355 if (! add)
3356 return NULL;
3357 return gen_rtx_AND (0, old, x);
3359 case NOT:
3360 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3361 if (op0 != NULL)
3362 return not_reg_cond (op0);
3363 if (! add)
3364 return NULL;
3365 return gen_rtx_AND (0, old, x);
3367 default:
3368 gcc_unreachable ();
3372 /* Given a condition X, remove references to reg REGNO and return the
3373 new condition. The removal will be done so that all conditions
3374 involving REGNO are considered to evaluate to false. This function
3375 is used when the value of REGNO changes. */
3377 static rtx
3378 elim_reg_cond (rtx x, unsigned int regno)
3380 rtx op0, op1;
3382 if (COMPARISON_P (x))
3384 if (REGNO (XEXP (x, 0)) == regno)
3385 return const0_rtx;
3386 return x;
3389 switch (GET_CODE (x))
3391 case AND:
3392 op0 = elim_reg_cond (XEXP (x, 0), regno);
3393 op1 = elim_reg_cond (XEXP (x, 1), regno);
3394 if (op0 == const0_rtx || op1 == const0_rtx)
3395 return const0_rtx;
3396 if (op0 == const1_rtx)
3397 return op1;
3398 if (op1 == const1_rtx)
3399 return op0;
3400 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3401 return x;
3402 return gen_rtx_AND (0, op0, op1);
3404 case IOR:
3405 op0 = elim_reg_cond (XEXP (x, 0), regno);
3406 op1 = elim_reg_cond (XEXP (x, 1), regno);
3407 if (op0 == const1_rtx || op1 == const1_rtx)
3408 return const1_rtx;
3409 if (op0 == const0_rtx)
3410 return op1;
3411 if (op1 == const0_rtx)
3412 return op0;
3413 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3414 return x;
3415 return gen_rtx_IOR (0, op0, op1);
3417 case NOT:
3418 op0 = elim_reg_cond (XEXP (x, 0), regno);
3419 if (op0 == const0_rtx)
3420 return const1_rtx;
3421 if (op0 == const1_rtx)
3422 return const0_rtx;
3423 if (op0 != XEXP (x, 0))
3424 return not_reg_cond (op0);
3425 return x;
3427 default:
3428 gcc_unreachable ();
3431 #endif /* HAVE_conditional_execution */
3433 #ifdef AUTO_INC_DEC
3435 /* Try to substitute the auto-inc expression INC as the address inside
3436 MEM which occurs in INSN. Currently, the address of MEM is an expression
3437 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
3438 that has a single set whose source is a PLUS of INCR_REG and something
3439 else. */
3441 static void
3442 attempt_auto_inc (struct propagate_block_info *pbi, rtx inc, rtx insn,
3443 rtx mem, rtx incr, rtx incr_reg)
3445 int regno = REGNO (incr_reg);
3446 rtx set = single_set (incr);
3447 rtx q = SET_DEST (set);
3448 rtx y = SET_SRC (set);
3449 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
3450 int changed;
3452 /* Make sure this reg appears only once in this insn. */
3453 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
3454 return;
3456 if (dead_or_set_p (incr, incr_reg)
3457 /* Mustn't autoinc an eliminable register. */
3458 && (regno >= FIRST_PSEUDO_REGISTER
3459 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
3461 /* This is the simple case. Try to make the auto-inc. If
3462 we can't, we are done. Otherwise, we will do any
3463 needed updates below. */
3464 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
3465 return;
3467 else if (REG_P (q)
3468 /* PREV_INSN used here to check the semi-open interval
3469 [insn,incr). */
3470 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
3471 /* We must also check for sets of q as q may be
3472 a call clobbered hard register and there may
3473 be a call between PREV_INSN (insn) and incr. */
3474 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
3476 /* We have *p followed sometime later by q = p+size.
3477 Both p and q must be live afterward,
3478 and q is not used between INSN and its assignment.
3479 Change it to q = p, ...*q..., q = q+size.
3480 Then fall into the usual case. */
3481 rtx insns, temp;
3483 start_sequence ();
3484 emit_move_insn (q, incr_reg);
3485 insns = get_insns ();
3486 end_sequence ();
3488 /* If we can't make the auto-inc, or can't make the
3489 replacement into Y, exit. There's no point in making
3490 the change below if we can't do the auto-inc and doing
3491 so is not correct in the pre-inc case. */
3493 XEXP (inc, 0) = q;
3494 validate_change (insn, &XEXP (mem, 0), inc, 1);
3495 validate_change (incr, &XEXP (y, opnum), q, 1);
3496 if (! apply_change_group ())
3497 return;
3499 /* We now know we'll be doing this change, so emit the
3500 new insn(s) and do the updates. */
3501 emit_insn_before (insns, insn);
3503 if (BB_HEAD (pbi->bb) == insn)
3504 BB_HEAD (pbi->bb) = insns;
3506 /* INCR will become a NOTE and INSN won't contain a
3507 use of INCR_REG. If a use of INCR_REG was just placed in
3508 the insn before INSN, make that the next use.
3509 Otherwise, invalidate it. */
3510 if (NONJUMP_INSN_P (PREV_INSN (insn))
3511 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3512 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
3513 pbi->reg_next_use[regno] = PREV_INSN (insn);
3514 else
3515 pbi->reg_next_use[regno] = 0;
3517 incr_reg = q;
3518 regno = REGNO (q);
3520 if ((pbi->flags & PROP_REG_INFO)
3521 && !REGNO_REG_SET_P (pbi->reg_live, regno))
3522 reg_deaths[regno] = pbi->insn_num;
3524 /* REGNO is now used in INCR which is below INSN, but
3525 it previously wasn't live here. If we don't mark
3526 it as live, we'll put a REG_DEAD note for it
3527 on this insn, which is incorrect. */
3528 SET_REGNO_REG_SET (pbi->reg_live, regno);
3530 /* If there are any calls between INSN and INCR, show
3531 that REGNO now crosses them. */
3532 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3533 if (CALL_P (temp))
3535 REG_N_CALLS_CROSSED (regno)++;
3536 if (can_throw_internal (temp))
3537 REG_N_THROWING_CALLS_CROSSED (regno)++;
3540 /* Invalidate alias info for Q since we just changed its value. */
3541 clear_reg_alias_info (q);
3543 else
3544 return;
3546 /* If we haven't returned, it means we were able to make the
3547 auto-inc, so update the status. First, record that this insn
3548 has an implicit side effect. */
3550 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
3552 /* Modify the old increment-insn to simply copy
3553 the already-incremented value of our register. */
3554 changed = validate_change (incr, &SET_SRC (set), incr_reg, 0);
3555 gcc_assert (changed);
3557 /* If that makes it a no-op (copying the register into itself) delete
3558 it so it won't appear to be a "use" and a "set" of this
3559 register. */
3560 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
3562 /* If the original source was dead, it's dead now. */
3563 rtx note;
3565 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
3567 remove_note (incr, note);
3568 if (XEXP (note, 0) != incr_reg)
3570 unsigned int regno = REGNO (XEXP (note, 0));
3572 if ((pbi->flags & PROP_REG_INFO)
3573 && REGNO_REG_SET_P (pbi->reg_live, regno))
3575 REG_LIVE_LENGTH (regno) += pbi->insn_num - reg_deaths[regno];
3576 reg_deaths[regno] = 0;
3578 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
3582 SET_INSN_DELETED (incr);
3585 if (regno >= FIRST_PSEUDO_REGISTER)
3587 /* Count an extra reference to the reg. When a reg is
3588 incremented, spilling it is worse, so we want to make
3589 that less likely. */
3590 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3592 /* Count the increment as a setting of the register,
3593 even though it isn't a SET in rtl. */
3594 REG_N_SETS (regno)++;
3598 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
3599 reference. */
3601 static void
3602 find_auto_inc (struct propagate_block_info *pbi, rtx x, rtx insn)
3604 rtx addr = XEXP (x, 0);
3605 HOST_WIDE_INT offset = 0;
3606 rtx set, y, incr, inc_val;
3607 int regno;
3608 int size = GET_MODE_SIZE (GET_MODE (x));
3610 if (JUMP_P (insn))
3611 return;
3613 /* Here we detect use of an index register which might be good for
3614 postincrement, postdecrement, preincrement, or predecrement. */
3616 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3617 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3619 if (!REG_P (addr))
3620 return;
3622 regno = REGNO (addr);
3624 /* Is the next use an increment that might make auto-increment? */
3625 incr = pbi->reg_next_use[regno];
3626 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
3627 return;
3628 set = single_set (incr);
3629 if (set == 0 || GET_CODE (set) != SET)
3630 return;
3631 y = SET_SRC (set);
3633 if (GET_CODE (y) != PLUS)
3634 return;
3636 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
3637 inc_val = XEXP (y, 1);
3638 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
3639 inc_val = XEXP (y, 0);
3640 else
3641 return;
3643 if (GET_CODE (inc_val) == CONST_INT)
3645 if (HAVE_POST_INCREMENT
3646 && (INTVAL (inc_val) == size && offset == 0))
3647 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
3648 incr, addr);
3649 else if (HAVE_POST_DECREMENT
3650 && (INTVAL (inc_val) == -size && offset == 0))
3651 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
3652 incr, addr);
3653 else if (HAVE_PRE_INCREMENT
3654 && (INTVAL (inc_val) == size && offset == size))
3655 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
3656 incr, addr);
3657 else if (HAVE_PRE_DECREMENT
3658 && (INTVAL (inc_val) == -size && offset == -size))
3659 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
3660 incr, addr);
3661 else if (HAVE_POST_MODIFY_DISP && offset == 0)
3662 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3663 gen_rtx_PLUS (Pmode,
3664 addr,
3665 inc_val)),
3666 insn, x, incr, addr);
3667 else if (HAVE_PRE_MODIFY_DISP && offset == INTVAL (inc_val))
3668 attempt_auto_inc (pbi, gen_rtx_PRE_MODIFY (Pmode, addr,
3669 gen_rtx_PLUS (Pmode,
3670 addr,
3671 inc_val)),
3672 insn, x, incr, addr);
3674 else if (REG_P (inc_val)
3675 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
3676 NEXT_INSN (incr)))
3679 if (HAVE_POST_MODIFY_REG && offset == 0)
3680 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3681 gen_rtx_PLUS (Pmode,
3682 addr,
3683 inc_val)),
3684 insn, x, incr, addr);
3688 #endif /* AUTO_INC_DEC */
3690 static void
3691 mark_used_reg (struct propagate_block_info *pbi, rtx reg,
3692 rtx cond ATTRIBUTE_UNUSED, rtx insn)
3694 unsigned int regno_first, regno_last, i;
3695 int some_was_live, some_was_dead, some_not_set;
3697 regno_last = regno_first = REGNO (reg);
3698 if (regno_first < FIRST_PSEUDO_REGISTER)
3699 regno_last += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
3701 /* Find out if any of this register is live after this instruction. */
3702 some_was_live = some_was_dead = 0;
3703 for (i = regno_first; i <= regno_last; ++i)
3705 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
3706 some_was_live |= needed_regno;
3707 some_was_dead |= ! needed_regno;
3710 /* Find out if any of the register was set this insn. */
3711 some_not_set = 0;
3712 for (i = regno_first; i <= regno_last; ++i)
3713 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
3715 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3717 /* Record where each reg is used, so when the reg is set we know
3718 the next insn that uses it. */
3719 pbi->reg_next_use[regno_first] = insn;
3722 if (pbi->flags & PROP_REG_INFO)
3724 if (regno_first < FIRST_PSEUDO_REGISTER)
3726 /* If this is a register we are going to try to eliminate,
3727 don't mark it live here. If we are successful in
3728 eliminating it, it need not be live unless it is used for
3729 pseudos, in which case it will have been set live when it
3730 was allocated to the pseudos. If the register will not
3731 be eliminated, reload will set it live at that point.
3733 Otherwise, record that this function uses this register. */
3734 /* ??? The PPC backend tries to "eliminate" on the pic
3735 register to itself. This should be fixed. In the mean
3736 time, hack around it. */
3738 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
3739 && (regno_first == FRAME_POINTER_REGNUM
3740 || regno_first == ARG_POINTER_REGNUM)))
3741 for (i = regno_first; i <= regno_last; ++i)
3742 regs_ever_live[i] = 1;
3744 else
3746 /* Keep track of which basic block each reg appears in. */
3748 int blocknum = pbi->bb->index;
3749 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
3750 REG_BASIC_BLOCK (regno_first) = blocknum;
3751 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
3752 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
3754 /* Count (weighted) number of uses of each reg. */
3755 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
3756 REG_N_REFS (regno_first)++;
3758 for (i = regno_first; i <= regno_last; ++i)
3759 if (! REGNO_REG_SET_P (pbi->reg_live, i))
3761 gcc_assert (!reg_deaths[i]);
3762 reg_deaths[i] = pbi->insn_num;
3766 /* Record and count the insns in which a reg dies. If it is used in
3767 this insn and was dead below the insn then it dies in this insn.
3768 If it was set in this insn, we do not make a REG_DEAD note;
3769 likewise if we already made such a note. */
3770 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
3771 && some_was_dead
3772 && some_not_set)
3774 /* Check for the case where the register dying partially
3775 overlaps the register set by this insn. */
3776 if (regno_first != regno_last)
3777 for (i = regno_first; i <= regno_last; ++i)
3778 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
3780 /* If none of the words in X is needed, make a REG_DEAD note.
3781 Otherwise, we must make partial REG_DEAD notes. */
3782 if (! some_was_live)
3784 if ((pbi->flags & PROP_DEATH_NOTES)
3785 #ifdef STACK_REGS
3786 && (!(pbi->flags & PROP_POST_REGSTACK)
3787 || !IN_RANGE (REGNO (reg), FIRST_STACK_REG, LAST_STACK_REG))
3788 #endif
3789 && ! find_regno_note (insn, REG_DEAD, regno_first))
3790 REG_NOTES (insn)
3791 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3793 if (pbi->flags & PROP_REG_INFO)
3794 REG_N_DEATHS (regno_first)++;
3796 else
3798 /* Don't make a REG_DEAD note for a part of a register
3799 that is set in the insn. */
3800 for (i = regno_first; i <= regno_last; ++i)
3801 if (! REGNO_REG_SET_P (pbi->reg_live, i)
3802 && ! dead_or_set_regno_p (insn, i))
3803 REG_NOTES (insn)
3804 = alloc_EXPR_LIST (REG_DEAD,
3805 regno_reg_rtx[i],
3806 REG_NOTES (insn));
3810 /* Mark the register as being live. */
3811 for (i = regno_first; i <= regno_last; ++i)
3813 #ifdef HAVE_conditional_execution
3814 int this_was_live = REGNO_REG_SET_P (pbi->reg_live, i);
3815 #endif
3817 SET_REGNO_REG_SET (pbi->reg_live, i);
3819 #ifdef HAVE_conditional_execution
3820 /* If this is a conditional use, record that fact. If it is later
3821 conditionally set, we'll know to kill the register. */
3822 if (cond != NULL_RTX)
3824 splay_tree_node node;
3825 struct reg_cond_life_info *rcli;
3826 rtx ncond;
3828 if (this_was_live)
3830 node = splay_tree_lookup (pbi->reg_cond_dead, i);
3831 if (node == NULL)
3833 /* The register was unconditionally live previously.
3834 No need to do anything. */
3836 else
3838 /* The register was conditionally live previously.
3839 Subtract the new life cond from the old death cond. */
3840 rcli = (struct reg_cond_life_info *) node->value;
3841 ncond = rcli->condition;
3842 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
3844 /* If the register is now unconditionally live,
3845 remove the entry in the splay_tree. */
3846 if (ncond == const0_rtx)
3847 splay_tree_remove (pbi->reg_cond_dead, i);
3848 else
3850 rcli->condition = ncond;
3851 SET_REGNO_REG_SET (pbi->reg_cond_reg,
3852 REGNO (XEXP (cond, 0)));
3856 else
3858 /* The register was not previously live at all. Record
3859 the condition under which it is still dead. */
3860 rcli = xmalloc (sizeof (*rcli));
3861 rcli->condition = not_reg_cond (cond);
3862 rcli->stores = const0_rtx;
3863 rcli->orig_condition = const0_rtx;
3864 splay_tree_insert (pbi->reg_cond_dead, i,
3865 (splay_tree_value) rcli);
3867 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3870 else if (this_was_live)
3872 /* The register may have been conditionally live previously, but
3873 is now unconditionally live. Remove it from the conditionally
3874 dead list, so that a conditional set won't cause us to think
3875 it dead. */
3876 splay_tree_remove (pbi->reg_cond_dead, i);
3878 #endif
3882 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
3883 This is done assuming the registers needed from X are those that
3884 have 1-bits in PBI->REG_LIVE.
3886 INSN is the containing instruction. If INSN is dead, this function
3887 is not called. */
3889 static void
3890 mark_used_regs (struct propagate_block_info *pbi, rtx x, rtx cond, rtx insn)
3892 RTX_CODE code;
3893 int regno;
3894 int flags = pbi->flags;
3896 retry:
3897 if (!x)
3898 return;
3899 code = GET_CODE (x);
3900 switch (code)
3902 case LABEL_REF:
3903 case SYMBOL_REF:
3904 case CONST_INT:
3905 case CONST:
3906 case CONST_DOUBLE:
3907 case CONST_VECTOR:
3908 case PC:
3909 case ADDR_VEC:
3910 case ADDR_DIFF_VEC:
3911 return;
3913 #ifdef HAVE_cc0
3914 case CC0:
3915 pbi->cc0_live = 1;
3916 return;
3917 #endif
3919 case CLOBBER:
3920 /* If we are clobbering a MEM, mark any registers inside the address
3921 as being used. */
3922 if (MEM_P (XEXP (x, 0)))
3923 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
3924 return;
3926 case MEM:
3927 /* Don't bother watching stores to mems if this is not the
3928 final pass. We'll not be deleting dead stores this round. */
3929 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
3931 /* Invalidate the data for the last MEM stored, but only if MEM is
3932 something that can be stored into. */
3933 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3934 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3935 /* Needn't clear the memory set list. */
3937 else
3939 rtx temp = pbi->mem_set_list;
3940 rtx prev = NULL_RTX;
3941 rtx next;
3943 while (temp)
3945 next = XEXP (temp, 1);
3946 if (anti_dependence (XEXP (temp, 0), x))
3948 /* Splice temp out of the list. */
3949 if (prev)
3950 XEXP (prev, 1) = next;
3951 else
3952 pbi->mem_set_list = next;
3953 free_EXPR_LIST_node (temp);
3954 pbi->mem_set_list_len--;
3956 else
3957 prev = temp;
3958 temp = next;
3962 /* If the memory reference had embedded side effects (autoincrement
3963 address modes. Then we may need to kill some entries on the
3964 memory set list. */
3965 if (insn)
3966 for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
3969 #ifdef AUTO_INC_DEC
3970 if (flags & PROP_AUTOINC)
3971 find_auto_inc (pbi, x, insn);
3972 #endif
3973 break;
3975 case SUBREG:
3976 #ifdef CANNOT_CHANGE_MODE_CLASS
3977 if (flags & PROP_REG_INFO)
3978 record_subregs_of_mode (x);
3979 #endif
3981 /* While we're here, optimize this case. */
3982 x = SUBREG_REG (x);
3983 if (!REG_P (x))
3984 goto retry;
3985 /* Fall through. */
3987 case REG:
3988 /* See a register other than being set => mark it as needed. */
3989 mark_used_reg (pbi, x, cond, insn);
3990 return;
3992 case SET:
3994 rtx testreg = SET_DEST (x);
3995 int mark_dest = 0;
3997 /* If storing into MEM, don't show it as being used. But do
3998 show the address as being used. */
3999 if (MEM_P (testreg))
4001 #ifdef AUTO_INC_DEC
4002 if (flags & PROP_AUTOINC)
4003 find_auto_inc (pbi, testreg, insn);
4004 #endif
4005 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
4006 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4007 return;
4010 /* Storing in STRICT_LOW_PART is like storing in a reg
4011 in that this SET might be dead, so ignore it in TESTREG.
4012 but in some other ways it is like using the reg.
4014 Storing in a SUBREG or a bit field is like storing the entire
4015 register in that if the register's value is not used
4016 then this SET is not needed. */
4017 while (GET_CODE (testreg) == STRICT_LOW_PART
4018 || GET_CODE (testreg) == ZERO_EXTRACT
4019 || GET_CODE (testreg) == SUBREG)
4021 #ifdef CANNOT_CHANGE_MODE_CLASS
4022 if ((flags & PROP_REG_INFO) && GET_CODE (testreg) == SUBREG)
4023 record_subregs_of_mode (testreg);
4024 #endif
4026 /* Modifying a single register in an alternate mode
4027 does not use any of the old value. But these other
4028 ways of storing in a register do use the old value. */
4029 if (GET_CODE (testreg) == SUBREG
4030 && !((REG_BYTES (SUBREG_REG (testreg))
4031 + UNITS_PER_WORD - 1) / UNITS_PER_WORD
4032 > (REG_BYTES (testreg)
4033 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4035 else
4036 mark_dest = 1;
4038 testreg = XEXP (testreg, 0);
4041 /* If this is a store into a register or group of registers,
4042 recursively scan the value being stored. */
4044 if ((GET_CODE (testreg) == PARALLEL
4045 && GET_MODE (testreg) == BLKmode)
4046 || (REG_P (testreg)
4047 && (regno = REGNO (testreg),
4048 ! (regno == FRAME_POINTER_REGNUM
4049 && (! reload_completed || frame_pointer_needed)))
4050 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4051 && ! (regno == HARD_FRAME_POINTER_REGNUM
4052 && (! reload_completed || frame_pointer_needed))
4053 #endif
4054 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4055 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4056 #endif
4059 if (mark_dest)
4060 mark_used_regs (pbi, SET_DEST (x), cond, insn);
4061 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4062 return;
4065 break;
4067 case ASM_OPERANDS:
4068 case UNSPEC_VOLATILE:
4069 case TRAP_IF:
4070 case ASM_INPUT:
4072 /* Traditional and volatile asm instructions must be considered to use
4073 and clobber all hard registers, all pseudo-registers and all of
4074 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4076 Consider for instance a volatile asm that changes the fpu rounding
4077 mode. An insn should not be moved across this even if it only uses
4078 pseudo-regs because it might give an incorrectly rounded result.
4080 ?!? Unfortunately, marking all hard registers as live causes massive
4081 problems for the register allocator and marking all pseudos as live
4082 creates mountains of uninitialized variable warnings.
4084 So for now, just clear the memory set list and mark any regs
4085 we can find in ASM_OPERANDS as used. */
4086 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4088 free_EXPR_LIST_list (&pbi->mem_set_list);
4089 pbi->mem_set_list_len = 0;
4092 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4093 We can not just fall through here since then we would be confused
4094 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4095 traditional asms unlike their normal usage. */
4096 if (code == ASM_OPERANDS)
4098 int j;
4100 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4101 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
4103 break;
4106 case COND_EXEC:
4107 gcc_assert (!cond);
4109 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
4111 cond = COND_EXEC_TEST (x);
4112 x = COND_EXEC_CODE (x);
4113 goto retry;
4115 default:
4116 break;
4119 /* Recursively scan the operands of this expression. */
4122 const char * const fmt = GET_RTX_FORMAT (code);
4123 int i;
4125 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4127 if (fmt[i] == 'e')
4129 /* Tail recursive case: save a function call level. */
4130 if (i == 0)
4132 x = XEXP (x, 0);
4133 goto retry;
4135 mark_used_regs (pbi, XEXP (x, i), cond, insn);
4137 else if (fmt[i] == 'E')
4139 int j;
4140 for (j = 0; j < XVECLEN (x, i); j++)
4141 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4147 #ifdef AUTO_INC_DEC
4149 static int
4150 try_pre_increment_1 (struct propagate_block_info *pbi, rtx insn)
4152 /* Find the next use of this reg. If in same basic block,
4153 make it do pre-increment or pre-decrement if appropriate. */
4154 rtx x = single_set (insn);
4155 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4156 * INTVAL (XEXP (SET_SRC (x), 1)));
4157 int regno = REGNO (SET_DEST (x));
4158 rtx y = pbi->reg_next_use[regno];
4159 if (y != 0
4160 && SET_DEST (x) != stack_pointer_rtx
4161 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4162 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4163 mode would be better. */
4164 && ! dead_or_set_p (y, SET_DEST (x))
4165 && try_pre_increment (y, SET_DEST (x), amount))
4167 /* We have found a suitable auto-increment and already changed
4168 insn Y to do it. So flush this increment instruction. */
4169 propagate_block_delete_insn (insn);
4171 /* Count a reference to this reg for the increment insn we are
4172 deleting. When a reg is incremented, spilling it is worse,
4173 so we want to make that less likely. */
4174 if (regno >= FIRST_PSEUDO_REGISTER)
4176 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
4177 REG_N_SETS (regno)++;
4180 /* Flush any remembered memories depending on the value of
4181 the incremented register. */
4182 invalidate_mems_from_set (pbi, SET_DEST (x));
4184 return 1;
4186 return 0;
4189 /* Try to change INSN so that it does pre-increment or pre-decrement
4190 addressing on register REG in order to add AMOUNT to REG.
4191 AMOUNT is negative for pre-decrement.
4192 Returns 1 if the change could be made.
4193 This checks all about the validity of the result of modifying INSN. */
4195 static int
4196 try_pre_increment (rtx insn, rtx reg, HOST_WIDE_INT amount)
4198 rtx use;
4200 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4201 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4202 int pre_ok = 0;
4203 /* Nonzero if we can try to make a post-increment or post-decrement.
4204 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4205 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4206 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4207 int post_ok = 0;
4209 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4210 int do_post = 0;
4212 /* From the sign of increment, see which possibilities are conceivable
4213 on this target machine. */
4214 if (HAVE_PRE_INCREMENT && amount > 0)
4215 pre_ok = 1;
4216 if (HAVE_POST_INCREMENT && amount > 0)
4217 post_ok = 1;
4219 if (HAVE_PRE_DECREMENT && amount < 0)
4220 pre_ok = 1;
4221 if (HAVE_POST_DECREMENT && amount < 0)
4222 post_ok = 1;
4224 if (! (pre_ok || post_ok))
4225 return 0;
4227 /* It is not safe to add a side effect to a jump insn
4228 because if the incremented register is spilled and must be reloaded
4229 there would be no way to store the incremented value back in memory. */
4231 if (JUMP_P (insn))
4232 return 0;
4234 use = 0;
4235 if (pre_ok)
4236 use = find_use_as_address (PATTERN (insn), reg, 0);
4237 if (post_ok && (use == 0 || use == (rtx) (size_t) 1))
4239 use = find_use_as_address (PATTERN (insn), reg, -amount);
4240 do_post = 1;
4243 if (use == 0 || use == (rtx) (size_t) 1)
4244 return 0;
4246 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4247 return 0;
4249 /* See if this combination of instruction and addressing mode exists. */
4250 if (! validate_change (insn, &XEXP (use, 0),
4251 gen_rtx_fmt_e (amount > 0
4252 ? (do_post ? POST_INC : PRE_INC)
4253 : (do_post ? POST_DEC : PRE_DEC),
4254 Pmode, reg), 0))
4255 return 0;
4257 /* Record that this insn now has an implicit side effect on X. */
4258 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4259 return 1;
4262 #endif /* AUTO_INC_DEC */
4264 /* Find the place in the rtx X where REG is used as a memory address.
4265 Return the MEM rtx that so uses it.
4266 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4267 (plus REG (const_int PLUSCONST)).
4269 If such an address does not appear, return 0.
4270 If REG appears more than once, or is used other than in such an address,
4271 return (rtx) 1. */
4274 find_use_as_address (rtx x, rtx reg, HOST_WIDE_INT plusconst)
4276 enum rtx_code code = GET_CODE (x);
4277 const char * const fmt = GET_RTX_FORMAT (code);
4278 int i;
4279 rtx value = 0;
4280 rtx tem;
4282 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4283 return x;
4285 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4286 && XEXP (XEXP (x, 0), 0) == reg
4287 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4288 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4289 return x;
4291 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4293 /* If REG occurs inside a MEM used in a bit-field reference,
4294 that is unacceptable. */
4295 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4296 return (rtx) (size_t) 1;
4299 if (x == reg)
4300 return (rtx) (size_t) 1;
4302 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4304 if (fmt[i] == 'e')
4306 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4307 if (value == 0)
4308 value = tem;
4309 else if (tem != 0)
4310 return (rtx) (size_t) 1;
4312 else if (fmt[i] == 'E')
4314 int j;
4315 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4317 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4318 if (value == 0)
4319 value = tem;
4320 else if (tem != 0)
4321 return (rtx) (size_t) 1;
4326 return value;
4329 /* Write information about registers and basic blocks into FILE.
4330 This is part of making a debugging dump. */
4332 void
4333 dump_regset (regset r, FILE *outf)
4335 unsigned i;
4336 reg_set_iterator rsi;
4338 if (r == NULL)
4340 fputs (" (nil)", outf);
4341 return;
4344 EXECUTE_IF_SET_IN_REG_SET (r, 0, i, rsi)
4346 fprintf (outf, " %d", i);
4347 if (i < FIRST_PSEUDO_REGISTER)
4348 fprintf (outf, " [%s]",
4349 reg_names[i]);
4353 /* Print a human-readable representation of R on the standard error
4354 stream. This function is designed to be used from within the
4355 debugger. */
4357 void
4358 debug_regset (regset r)
4360 dump_regset (r, stderr);
4361 putc ('\n', stderr);
4364 /* Recompute register set/reference counts immediately prior to register
4365 allocation.
4367 This avoids problems with set/reference counts changing to/from values
4368 which have special meanings to the register allocators.
4370 Additionally, the reference counts are the primary component used by the
4371 register allocators to prioritize pseudos for allocation to hard regs.
4372 More accurate reference counts generally lead to better register allocation.
4374 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4375 possibly other information which is used by the register allocators. */
4377 void
4378 recompute_reg_usage (void)
4380 allocate_reg_life_data ();
4381 /* distribute_notes in combiner fails to convert some of the
4382 REG_UNUSED notes to REG_DEAD notes. This causes CHECK_DEAD_NOTES
4383 in sched1 to die. To solve this update the DEATH_NOTES
4384 here. */
4385 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO | PROP_DEATH_NOTES);
4387 if (dump_file)
4388 dump_flow_info (dump_file);
4391 struct tree_opt_pass pass_recompute_reg_usage =
4393 "life2", /* name */
4394 NULL, /* gate */
4395 recompute_reg_usage, /* execute */
4396 NULL, /* sub */
4397 NULL, /* next */
4398 0, /* static_pass_number */
4399 0, /* tv_id */
4400 0, /* properties_required */
4401 0, /* properties_provided */
4402 0, /* properties_destroyed */
4403 0, /* todo_flags_start */
4404 TODO_dump_func, /* todo_flags_finish */
4405 'f' /* letter */
4408 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
4409 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
4410 of the number of registers that died.
4411 If KILL is 1, remove old REG_DEAD / REG_UNUSED notes. If it is 0, don't.
4412 if it is -1, remove them unless they pertain to a stack reg. */
4415 count_or_remove_death_notes (sbitmap blocks, int kill)
4417 int count = 0;
4418 unsigned int i = 0;
4419 basic_block bb;
4421 /* This used to be a loop over all the blocks with a membership test
4422 inside the loop. That can be amazingly expensive on a large CFG
4423 when only a small number of bits are set in BLOCKs (for example,
4424 the calls from the scheduler typically have very few bits set).
4426 For extra credit, someone should convert BLOCKS to a bitmap rather
4427 than an sbitmap. */
4428 if (blocks)
4430 sbitmap_iterator sbi;
4432 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, sbi)
4434 count += count_or_remove_death_notes_bb (BASIC_BLOCK (i), kill);
4437 else
4439 FOR_EACH_BB (bb)
4441 count += count_or_remove_death_notes_bb (bb, kill);
4445 return count;
4448 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from basic
4449 block BB. Returns a count of the number of registers that died. */
4451 static int
4452 count_or_remove_death_notes_bb (basic_block bb, int kill)
4454 int count = 0;
4455 rtx insn;
4457 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
4459 if (INSN_P (insn))
4461 rtx *pprev = &REG_NOTES (insn);
4462 rtx link = *pprev;
4464 while (link)
4466 switch (REG_NOTE_KIND (link))
4468 case REG_DEAD:
4469 if (REG_P (XEXP (link, 0)))
4471 rtx reg = XEXP (link, 0);
4472 int n;
4474 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
4475 n = 1;
4476 else
4477 n = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
4478 count += n;
4481 /* Fall through. */
4483 case REG_UNUSED:
4484 if (kill > 0
4485 || (kill
4486 #ifdef STACK_REGS
4487 && (!REG_P (XEXP (link, 0))
4488 || !IN_RANGE (REGNO (XEXP (link, 0)),
4489 FIRST_STACK_REG, LAST_STACK_REG))
4490 #endif
4493 rtx next = XEXP (link, 1);
4494 free_EXPR_LIST_node (link);
4495 *pprev = link = next;
4496 break;
4498 /* Fall through. */
4500 default:
4501 pprev = &XEXP (link, 1);
4502 link = *pprev;
4503 break;
4508 if (insn == BB_END (bb))
4509 break;
4512 return count;
4515 /* Clear LOG_LINKS fields of insns in a selected blocks or whole chain
4516 if blocks is NULL. */
4518 static void
4519 clear_log_links (sbitmap blocks)
4521 rtx insn;
4523 if (!blocks)
4525 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4526 if (INSN_P (insn))
4527 free_INSN_LIST_list (&LOG_LINKS (insn));
4529 else
4531 unsigned int i = 0;
4532 sbitmap_iterator sbi;
4534 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, sbi)
4536 basic_block bb = BASIC_BLOCK (i);
4538 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
4539 insn = NEXT_INSN (insn))
4540 if (INSN_P (insn))
4541 free_INSN_LIST_list (&LOG_LINKS (insn));
4546 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
4547 correspond to the hard registers, if any, set in that map. This
4548 could be done far more efficiently by having all sorts of special-cases
4549 with moving single words, but probably isn't worth the trouble. */
4551 void
4552 reg_set_to_hard_reg_set (HARD_REG_SET *to, bitmap from)
4554 unsigned i;
4555 bitmap_iterator bi;
4557 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4559 if (i >= FIRST_PSEUDO_REGISTER)
4560 return;
4561 SET_HARD_REG_BIT (*to, i);
4566 static bool
4567 gate_remove_death_notes (void)
4569 return flag_profile_values;
4572 static void
4573 rest_of_handle_remove_death_notes (void)
4575 count_or_remove_death_notes (NULL, 1);
4578 struct tree_opt_pass pass_remove_death_notes =
4580 "ednotes", /* name */
4581 gate_remove_death_notes, /* gate */
4582 rest_of_handle_remove_death_notes, /* execute */
4583 NULL, /* sub */
4584 NULL, /* next */
4585 0, /* static_pass_number */
4586 0, /* tv_id */
4587 0, /* properties_required */
4588 0, /* properties_provided */
4589 0, /* properties_destroyed */
4590 0, /* todo_flags_start */
4591 0, /* todo_flags_finish */
4592 0 /* letter */
4595 /* Perform life analysis. */
4596 static void
4597 rest_of_handle_life (void)
4599 regclass_init ();
4601 life_analysis (dump_file, PROP_FINAL);
4602 if (optimize)
4603 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE | CLEANUP_LOG_LINKS
4604 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
4606 if (extra_warnings)
4608 setjmp_vars_warning (DECL_INITIAL (current_function_decl));
4609 setjmp_args_warning ();
4612 if (optimize)
4614 if (initialize_uninitialized_subregs ())
4616 /* Insns were inserted, and possibly pseudos created, so
4617 things might look a bit different. */
4618 allocate_reg_life_data ();
4619 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
4620 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
4624 no_new_pseudos = 1;
4627 struct tree_opt_pass pass_life =
4629 "life1", /* name */
4630 NULL, /* gate */
4631 rest_of_handle_life, /* execute */
4632 NULL, /* sub */
4633 NULL, /* next */
4634 0, /* static_pass_number */
4635 TV_FLOW, /* tv_id */
4636 0, /* properties_required */
4637 0, /* properties_provided */
4638 0, /* properties_destroyed */
4639 TODO_verify_flow, /* todo_flags_start */
4640 TODO_dump_func |
4641 TODO_ggc_collect, /* todo_flags_finish */
4642 'f' /* letter */
4645 static void
4646 rest_of_handle_flow2 (void)
4648 /* If optimizing, then go ahead and split insns now. */
4649 #ifndef STACK_REGS
4650 if (optimize > 0)
4651 #endif
4652 split_all_insns (0);
4654 if (flag_branch_target_load_optimize)
4655 branch_target_load_optimize (epilogue_completed);
4657 if (optimize)
4658 cleanup_cfg (CLEANUP_EXPENSIVE);
4660 /* On some machines, the prologue and epilogue code, or parts thereof,
4661 can be represented as RTL. Doing so lets us schedule insns between
4662 it and the rest of the code and also allows delayed branch
4663 scheduling to operate in the epilogue. */
4664 thread_prologue_and_epilogue_insns (get_insns ());
4665 epilogue_completed = 1;
4666 flow2_completed = 1;
4669 struct tree_opt_pass pass_flow2 =
4671 "flow2", /* name */
4672 NULL, /* gate */
4673 rest_of_handle_flow2, /* execute */
4674 NULL, /* sub */
4675 NULL, /* next */
4676 0, /* static_pass_number */
4677 TV_FLOW2, /* tv_id */
4678 0, /* properties_required */
4679 0, /* properties_provided */
4680 0, /* properties_destroyed */
4681 TODO_verify_flow, /* todo_flags_start */
4682 TODO_dump_func |
4683 TODO_ggc_collect, /* todo_flags_finish */
4684 'w' /* letter */