* lib/objc.exp: Add -lposix4 on Solaris 2.6 and Solaris 2.7.
[official-gcc.git] / gcc / flow.c
blobd5f1f3aad297e3367ca4b469516f6f45319c041f
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "expr.h"
137 #include "ssa.h"
139 #include "obstack.h"
140 #include "splay-tree.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
145 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
146 the stack pointer does not matter. The value is tested only in
147 functions that have frame pointers.
148 No definition is equivalent to always zero. */
149 #ifndef EXIT_IGNORE_STACK
150 #define EXIT_IGNORE_STACK 0
151 #endif
153 #ifndef HAVE_epilogue
154 #define HAVE_epilogue 0
155 #endif
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
158 #endif
159 #ifndef HAVE_sibcall_epilogue
160 #define HAVE_sibcall_epilogue 0
161 #endif
163 #ifndef LOCAL_REGNO
164 #define LOCAL_REGNO(REGNO) 0
165 #endif
166 #ifndef EPILOGUE_USES
167 #define EPILOGUE_USES(REGNO) 0
168 #endif
170 /* The obstack on which the flow graph components are allocated. */
172 struct obstack flow_obstack;
173 static char *flow_firstobj;
175 /* Number of basic blocks in the current function. */
177 int n_basic_blocks;
179 /* Number of edges in the current function. */
181 int n_edges;
183 /* The basic block array. */
185 varray_type basic_block_info;
187 /* The special entry and exit blocks. */
189 struct basic_block_def entry_exit_blocks[2]
190 = {{NULL, /* head */
191 NULL, /* end */
192 NULL, /* pred */
193 NULL, /* succ */
194 NULL, /* local_set */
195 NULL, /* cond_local_set */
196 NULL, /* global_live_at_start */
197 NULL, /* global_live_at_end */
198 NULL, /* aux */
199 ENTRY_BLOCK, /* index */
200 0, /* loop_depth */
201 0 /* count */
204 NULL, /* head */
205 NULL, /* end */
206 NULL, /* pred */
207 NULL, /* succ */
208 NULL, /* local_set */
209 NULL, /* cond_local_set */
210 NULL, /* global_live_at_start */
211 NULL, /* global_live_at_end */
212 NULL, /* aux */
213 EXIT_BLOCK, /* index */
214 0, /* loop_depth */
215 0 /* count */
219 /* Nonzero if the second flow pass has completed. */
220 int flow2_completed;
222 /* Maximum register number used in this function, plus one. */
224 int max_regno;
226 /* Indexed by n, giving various register information */
228 varray_type reg_n_info;
230 /* Size of a regset for the current function,
231 in (1) bytes and (2) elements. */
233 int regset_bytes;
234 int regset_size;
236 /* Regset of regs live when calls to `setjmp'-like functions happen. */
237 /* ??? Does this exist only for the setjmp-clobbered warning message? */
239 regset regs_live_at_setjmp;
241 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
242 that have to go in the same hard reg.
243 The first two regs in the list are a pair, and the next two
244 are another pair, etc. */
245 rtx regs_may_share;
247 /* Callback that determines if it's ok for a function to have no
248 noreturn attribute. */
249 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
251 /* Set of registers that may be eliminable. These are handled specially
252 in updating regs_ever_live. */
254 static HARD_REG_SET elim_reg_set;
256 /* The basic block structure for every insn, indexed by uid. */
258 varray_type basic_block_for_insn;
260 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
261 /* ??? Should probably be using LABEL_NUSES instead. It would take a
262 bit of surgery to be able to use or co-opt the routines in jump. */
264 static rtx label_value_list;
265 static rtx tail_recursion_label_list;
267 /* Holds information for tracking conditional register life information. */
268 struct reg_cond_life_info
270 /* A boolean expression of conditions under which a register is dead. */
271 rtx condition;
272 /* Conditions under which a register is dead at the basic block end. */
273 rtx orig_condition;
275 /* A boolean expression of conditions under which a register has been
276 stored into. */
277 rtx stores;
279 /* ??? Could store mask of bytes that are dead, so that we could finally
280 track lifetimes of multi-word registers accessed via subregs. */
283 /* For use in communicating between propagate_block and its subroutines.
284 Holds all information needed to compute life and def-use information. */
286 struct propagate_block_info
288 /* The basic block we're considering. */
289 basic_block bb;
291 /* Bit N is set if register N is conditionally or unconditionally live. */
292 regset reg_live;
294 /* Bit N is set if register N is set this insn. */
295 regset new_set;
297 /* Element N is the next insn that uses (hard or pseudo) register N
298 within the current basic block; or zero, if there is no such insn. */
299 rtx *reg_next_use;
301 /* Contains a list of all the MEMs we are tracking for dead store
302 elimination. */
303 rtx mem_set_list;
305 /* If non-null, record the set of registers set unconditionally in the
306 basic block. */
307 regset local_set;
309 /* If non-null, record the set of registers set conditionally in the
310 basic block. */
311 regset cond_local_set;
313 #ifdef HAVE_conditional_execution
314 /* Indexed by register number, holds a reg_cond_life_info for each
315 register that is not unconditionally live or dead. */
316 splay_tree reg_cond_dead;
318 /* Bit N is set if register N is in an expression in reg_cond_dead. */
319 regset reg_cond_reg;
320 #endif
322 /* The length of mem_set_list. */
323 int mem_set_list_len;
325 /* Non-zero if the value of CC0 is live. */
326 int cc0_live;
328 /* Flags controling the set of information propagate_block collects. */
329 int flags;
332 /* Maximum length of pbi->mem_set_list before we start dropping
333 new elements on the floor. */
334 #define MAX_MEM_SET_LIST_LEN 100
336 /* Store the data structures necessary for depth-first search. */
337 struct depth_first_search_dsS {
338 /* stack for backtracking during the algorithm */
339 basic_block *stack;
341 /* number of edges in the stack. That is, positions 0, ..., sp-1
342 have edges. */
343 unsigned int sp;
345 /* record of basic blocks already seen by depth-first search */
346 sbitmap visited_blocks;
348 typedef struct depth_first_search_dsS *depth_first_search_ds;
350 /* Forward declarations */
351 static int count_basic_blocks PARAMS ((rtx));
352 static void find_basic_blocks_1 PARAMS ((rtx));
353 static rtx find_label_refs PARAMS ((rtx, rtx));
354 static void clear_edges PARAMS ((void));
355 static void make_edges PARAMS ((rtx));
356 static void make_label_edge PARAMS ((sbitmap *, basic_block,
357 rtx, int));
358 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
359 static void mark_critical_edges PARAMS ((void));
361 static void commit_one_edge_insertion PARAMS ((edge));
363 static void delete_unreachable_blocks PARAMS ((void));
364 static int can_delete_note_p PARAMS ((rtx));
365 static void expunge_block PARAMS ((basic_block));
366 static int can_delete_label_p PARAMS ((rtx));
367 static int tail_recursion_label_p PARAMS ((rtx));
368 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
369 basic_block));
370 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
371 basic_block));
372 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
373 static void try_merge_blocks PARAMS ((void));
374 static void tidy_fallthru_edges PARAMS ((void));
375 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
376 static void verify_wide_reg PARAMS ((int, rtx, rtx));
377 static void verify_local_live_at_start PARAMS ((regset, basic_block));
378 static int set_noop_p PARAMS ((rtx));
379 static int noop_move_p PARAMS ((rtx));
380 static void delete_noop_moves PARAMS ((rtx));
381 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
382 static void notice_stack_pointer_modification PARAMS ((rtx));
383 static void mark_reg PARAMS ((rtx, void *));
384 static void mark_regs_live_at_end PARAMS ((regset));
385 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
386 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
387 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
388 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
389 static int insn_dead_p PARAMS ((struct propagate_block_info *,
390 rtx, int, rtx));
391 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
392 rtx, rtx));
393 static void mark_set_regs PARAMS ((struct propagate_block_info *,
394 rtx, rtx));
395 static void mark_set_1 PARAMS ((struct propagate_block_info *,
396 enum rtx_code, rtx, rtx,
397 rtx, int));
398 #ifdef HAVE_conditional_execution
399 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
400 int, rtx));
401 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
402 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
403 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
404 int));
405 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
406 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
407 static rtx not_reg_cond PARAMS ((rtx));
408 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
409 #endif
410 #ifdef AUTO_INC_DEC
411 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
412 rtx, rtx, rtx, rtx, rtx));
413 static void find_auto_inc PARAMS ((struct propagate_block_info *,
414 rtx, rtx));
415 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
416 rtx));
417 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
418 #endif
419 static void mark_used_reg PARAMS ((struct propagate_block_info *,
420 rtx, rtx, rtx));
421 static void mark_used_regs PARAMS ((struct propagate_block_info *,
422 rtx, rtx, rtx));
423 void dump_flow_info PARAMS ((FILE *));
424 void debug_flow_info PARAMS ((void));
425 static void dump_edge_info PARAMS ((FILE *, edge, int));
426 static void print_rtl_and_abort PARAMS ((void));
428 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
429 rtx));
430 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
431 rtx));
432 static void remove_fake_successors PARAMS ((basic_block));
433 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
434 FILE *));
435 static void flow_edge_list_print PARAMS ((const char *, const edge *,
436 int, FILE *));
437 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
438 FILE *));
439 static int flow_loop_nested_p PARAMS ((struct loop *,
440 struct loop *));
441 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
442 edge **));
443 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
444 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
445 static int flow_depth_first_order_compute PARAMS ((int *, int *));
446 static void flow_dfs_compute_reverse_init
447 PARAMS ((depth_first_search_ds));
448 static void flow_dfs_compute_reverse_add_bb
449 PARAMS ((depth_first_search_ds, basic_block));
450 static basic_block flow_dfs_compute_reverse_execute
451 PARAMS ((depth_first_search_ds));
452 static void flow_dfs_compute_reverse_finish
453 PARAMS ((depth_first_search_ds));
454 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
455 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
456 const sbitmap *));
457 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
458 static void flow_loops_tree_build PARAMS ((struct loops *));
459 static int flow_loop_level_compute PARAMS ((struct loop *, int));
460 static int flow_loops_level_compute PARAMS ((struct loops *));
461 static void allocate_bb_life_data PARAMS ((void));
463 /* Find basic blocks of the current function.
464 F is the first insn of the function and NREGS the number of register
465 numbers in use. */
467 void
468 find_basic_blocks (f, nregs, file)
469 rtx f;
470 int nregs ATTRIBUTE_UNUSED;
471 FILE *file ATTRIBUTE_UNUSED;
473 int max_uid;
475 /* Flush out existing data. */
476 if (basic_block_info != NULL)
478 int i;
480 clear_edges ();
482 /* Clear bb->aux on all extant basic blocks. We'll use this as a
483 tag for reuse during create_basic_block, just in case some pass
484 copies around basic block notes improperly. */
485 for (i = 0; i < n_basic_blocks; ++i)
486 BASIC_BLOCK (i)->aux = NULL;
488 VARRAY_FREE (basic_block_info);
491 n_basic_blocks = count_basic_blocks (f);
493 /* Size the basic block table. The actual structures will be allocated
494 by find_basic_blocks_1, since we want to keep the structure pointers
495 stable across calls to find_basic_blocks. */
496 /* ??? This whole issue would be much simpler if we called find_basic_blocks
497 exactly once, and thereafter we don't have a single long chain of
498 instructions at all until close to the end of compilation when we
499 actually lay them out. */
501 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
503 find_basic_blocks_1 (f);
505 /* Record the block to which an insn belongs. */
506 /* ??? This should be done another way, by which (perhaps) a label is
507 tagged directly with the basic block that it starts. It is used for
508 more than that currently, but IMO that is the only valid use. */
510 max_uid = get_max_uid ();
511 #ifdef AUTO_INC_DEC
512 /* Leave space for insns life_analysis makes in some cases for auto-inc.
513 These cases are rare, so we don't need too much space. */
514 max_uid += max_uid / 10;
515 #endif
517 compute_bb_for_insn (max_uid);
519 /* Discover the edges of our cfg. */
520 make_edges (label_value_list);
522 /* Do very simple cleanup now, for the benefit of code that runs between
523 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
524 tidy_fallthru_edges ();
526 mark_critical_edges ();
528 #ifdef ENABLE_CHECKING
529 verify_flow_info ();
530 #endif
533 void
534 check_function_return_warnings ()
536 if (warn_missing_noreturn
537 && !TREE_THIS_VOLATILE (cfun->decl)
538 && EXIT_BLOCK_PTR->pred == NULL
539 && (lang_missing_noreturn_ok_p
540 && !lang_missing_noreturn_ok_p (cfun->decl)))
541 warning ("function might be possible candidate for attribute `noreturn'");
543 /* If we have a path to EXIT, then we do return. */
544 if (TREE_THIS_VOLATILE (cfun->decl)
545 && EXIT_BLOCK_PTR->pred != NULL)
546 warning ("`noreturn' function does return");
548 /* If the clobber_return_insn appears in some basic block, then we
549 do reach the end without returning a value. */
550 else if (warn_return_type
551 && cfun->x_clobber_return_insn != NULL
552 && EXIT_BLOCK_PTR->pred != NULL)
554 int max_uid = get_max_uid ();
556 /* If clobber_return_insn was excised by jump1, then renumber_insns
557 can make max_uid smaller than the number still recorded in our rtx.
558 That's fine, since this is a quick way of verifying that the insn
559 is no longer in the chain. */
560 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
562 /* Recompute insn->block mapping, since the initial mapping is
563 set before we delete unreachable blocks. */
564 compute_bb_for_insn (max_uid);
566 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
567 warning ("control reaches end of non-void function");
572 /* Count the basic blocks of the function. */
574 static int
575 count_basic_blocks (f)
576 rtx f;
578 register rtx insn;
579 register RTX_CODE prev_code;
580 register int count = 0;
581 int saw_abnormal_edge = 0;
583 prev_code = JUMP_INSN;
584 for (insn = f; insn; insn = NEXT_INSN (insn))
586 enum rtx_code code = GET_CODE (insn);
588 if (code == CODE_LABEL
589 || (GET_RTX_CLASS (code) == 'i'
590 && (prev_code == JUMP_INSN
591 || prev_code == BARRIER
592 || saw_abnormal_edge)))
594 saw_abnormal_edge = 0;
595 count++;
598 /* Record whether this insn created an edge. */
599 if (code == CALL_INSN)
601 rtx note;
603 /* If there is a nonlocal goto label and the specified
604 region number isn't -1, we have an edge. */
605 if (nonlocal_goto_handler_labels
606 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
607 || INTVAL (XEXP (note, 0)) >= 0))
608 saw_abnormal_edge = 1;
610 else if (can_throw_internal (insn))
611 saw_abnormal_edge = 1;
613 else if (flag_non_call_exceptions
614 && code == INSN
615 && can_throw_internal (insn))
616 saw_abnormal_edge = 1;
618 if (code != NOTE)
619 prev_code = code;
622 /* The rest of the compiler works a bit smoother when we don't have to
623 check for the edge case of do-nothing functions with no basic blocks. */
624 if (count == 0)
626 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
627 count = 1;
630 return count;
633 /* Scan a list of insns for labels referred to other than by jumps.
634 This is used to scan the alternatives of a call placeholder. */
635 static rtx
636 find_label_refs (f, lvl)
637 rtx f;
638 rtx lvl;
640 rtx insn;
642 for (insn = f; insn; insn = NEXT_INSN (insn))
643 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
645 rtx note;
647 /* Make a list of all labels referred to other than by jumps
648 (which just don't have the REG_LABEL notes).
650 Make a special exception for labels followed by an ADDR*VEC,
651 as this would be a part of the tablejump setup code.
653 Make a special exception to registers loaded with label
654 values just before jump insns that use them. */
656 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
657 if (REG_NOTE_KIND (note) == REG_LABEL)
659 rtx lab = XEXP (note, 0), next;
661 if ((next = next_nonnote_insn (lab)) != NULL
662 && GET_CODE (next) == JUMP_INSN
663 && (GET_CODE (PATTERN (next)) == ADDR_VEC
664 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
666 else if (GET_CODE (lab) == NOTE)
668 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
669 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
671 else
672 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
676 return lvl;
679 /* Find all basic blocks of the function whose first insn is F.
681 Collect and return a list of labels whose addresses are taken. This
682 will be used in make_edges for use with computed gotos. */
684 static void
685 find_basic_blocks_1 (f)
686 rtx f;
688 register rtx insn, next;
689 int i = 0;
690 rtx bb_note = NULL_RTX;
691 rtx lvl = NULL_RTX;
692 rtx trll = NULL_RTX;
693 rtx head = NULL_RTX;
694 rtx end = NULL_RTX;
696 /* We process the instructions in a slightly different way than we did
697 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
698 closed out the previous block, so that it gets attached at the proper
699 place. Since this form should be equivalent to the previous,
700 count_basic_blocks continues to use the old form as a check. */
702 for (insn = f; insn; insn = next)
704 enum rtx_code code = GET_CODE (insn);
706 next = NEXT_INSN (insn);
708 switch (code)
710 case NOTE:
712 int kind = NOTE_LINE_NUMBER (insn);
714 /* Look for basic block notes with which to keep the
715 basic_block_info pointers stable. Unthread the note now;
716 we'll put it back at the right place in create_basic_block.
717 Or not at all if we've already found a note in this block. */
718 if (kind == NOTE_INSN_BASIC_BLOCK)
720 if (bb_note == NULL_RTX)
721 bb_note = insn;
722 else
723 next = flow_delete_insn (insn);
725 break;
728 case CODE_LABEL:
729 /* A basic block starts at a label. If we've closed one off due
730 to a barrier or some such, no need to do it again. */
731 if (head != NULL_RTX)
733 /* While we now have edge lists with which other portions of
734 the compiler might determine a call ending a basic block
735 does not imply an abnormal edge, it will be a bit before
736 everything can be updated. So continue to emit a noop at
737 the end of such a block. */
738 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
740 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
741 end = emit_insn_after (nop, end);
744 create_basic_block (i++, head, end, bb_note);
745 bb_note = NULL_RTX;
748 head = end = insn;
749 break;
751 case JUMP_INSN:
752 /* A basic block ends at a jump. */
753 if (head == NULL_RTX)
754 head = insn;
755 else
757 /* ??? Make a special check for table jumps. The way this
758 happens is truly and amazingly gross. We are about to
759 create a basic block that contains just a code label and
760 an addr*vec jump insn. Worse, an addr_diff_vec creates
761 its own natural loop.
763 Prevent this bit of brain damage, pasting things together
764 correctly in make_edges.
766 The correct solution involves emitting the table directly
767 on the tablejump instruction as a note, or JUMP_LABEL. */
769 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
770 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
772 head = end = NULL;
773 n_basic_blocks--;
774 break;
777 end = insn;
778 goto new_bb_inclusive;
780 case BARRIER:
781 /* A basic block ends at a barrier. It may be that an unconditional
782 jump already closed the basic block -- no need to do it again. */
783 if (head == NULL_RTX)
784 break;
786 /* While we now have edge lists with which other portions of the
787 compiler might determine a call ending a basic block does not
788 imply an abnormal edge, it will be a bit before everything can
789 be updated. So continue to emit a noop at the end of such a
790 block. */
791 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
793 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
794 end = emit_insn_after (nop, end);
796 goto new_bb_exclusive;
798 case CALL_INSN:
800 /* Record whether this call created an edge. */
801 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
802 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
804 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
806 /* Scan each of the alternatives for label refs. */
807 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
808 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
809 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
810 /* Record its tail recursion label, if any. */
811 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
812 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
815 /* A basic block ends at a call that can either throw or
816 do a non-local goto. */
817 if ((nonlocal_goto_handler_labels && region >= 0)
818 || can_throw_internal (insn))
820 new_bb_inclusive:
821 if (head == NULL_RTX)
822 head = insn;
823 end = insn;
825 new_bb_exclusive:
826 create_basic_block (i++, head, end, bb_note);
827 head = end = NULL_RTX;
828 bb_note = NULL_RTX;
829 break;
832 /* Fall through. */
834 case INSN:
835 /* Non-call exceptions generate new blocks just like calls. */
836 if (flag_non_call_exceptions && can_throw_internal (insn))
837 goto new_bb_inclusive;
839 if (head == NULL_RTX)
840 head = insn;
841 end = insn;
842 break;
844 default:
845 abort ();
848 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
850 rtx note;
852 /* Make a list of all labels referred to other than by jumps.
854 Make a special exception for labels followed by an ADDR*VEC,
855 as this would be a part of the tablejump setup code.
857 Make a special exception to registers loaded with label
858 values just before jump insns that use them. */
860 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
861 if (REG_NOTE_KIND (note) == REG_LABEL)
863 rtx lab = XEXP (note, 0), next;
865 if ((next = next_nonnote_insn (lab)) != NULL
866 && GET_CODE (next) == JUMP_INSN
867 && (GET_CODE (PATTERN (next)) == ADDR_VEC
868 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
870 else if (GET_CODE (lab) == NOTE)
872 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
873 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
875 else
876 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
881 if (head != NULL_RTX)
882 create_basic_block (i++, head, end, bb_note);
883 else if (bb_note)
884 flow_delete_insn (bb_note);
886 if (i != n_basic_blocks)
887 abort ();
889 label_value_list = lvl;
890 tail_recursion_label_list = trll;
893 /* Tidy the CFG by deleting unreachable code and whatnot. */
895 void
896 cleanup_cfg ()
898 delete_unreachable_blocks ();
899 try_merge_blocks ();
900 mark_critical_edges ();
902 /* Kill the data we won't maintain. */
903 free_EXPR_LIST_list (&label_value_list);
904 free_EXPR_LIST_list (&tail_recursion_label_list);
907 /* Create a new basic block consisting of the instructions between
908 HEAD and END inclusive. Reuses the note and basic block struct
909 in BB_NOTE, if any. */
911 void
912 create_basic_block (index, head, end, bb_note)
913 int index;
914 rtx head, end, bb_note;
916 basic_block bb;
918 if (bb_note
919 && ! RTX_INTEGRATED_P (bb_note)
920 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
921 && bb->aux == NULL)
923 /* If we found an existing note, thread it back onto the chain. */
925 rtx after;
927 if (GET_CODE (head) == CODE_LABEL)
928 after = head;
929 else
931 after = PREV_INSN (head);
932 head = bb_note;
935 if (after != bb_note && NEXT_INSN (after) != bb_note)
936 reorder_insns (bb_note, bb_note, after);
938 else
940 /* Otherwise we must create a note and a basic block structure.
941 Since we allow basic block structs in rtl, give the struct
942 the same lifetime by allocating it off the function obstack
943 rather than using malloc. */
945 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
946 memset (bb, 0, sizeof (*bb));
948 if (GET_CODE (head) == CODE_LABEL)
949 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
950 else
952 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
953 head = bb_note;
955 NOTE_BASIC_BLOCK (bb_note) = bb;
958 /* Always include the bb note in the block. */
959 if (NEXT_INSN (end) == bb_note)
960 end = bb_note;
962 bb->head = head;
963 bb->end = end;
964 bb->index = index;
965 BASIC_BLOCK (index) = bb;
967 /* Tag the block so that we know it has been used when considering
968 other basic block notes. */
969 bb->aux = bb;
972 /* Records the basic block struct in BB_FOR_INSN, for every instruction
973 indexed by INSN_UID. MAX is the size of the array. */
975 void
976 compute_bb_for_insn (max)
977 int max;
979 int i;
981 if (basic_block_for_insn)
982 VARRAY_FREE (basic_block_for_insn);
983 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
985 for (i = 0; i < n_basic_blocks; ++i)
987 basic_block bb = BASIC_BLOCK (i);
988 rtx insn, end;
990 end = bb->end;
991 insn = bb->head;
992 while (1)
994 int uid = INSN_UID (insn);
995 if (uid < max)
996 VARRAY_BB (basic_block_for_insn, uid) = bb;
997 if (insn == end)
998 break;
999 insn = NEXT_INSN (insn);
1004 /* Free the memory associated with the edge structures. */
1006 static void
1007 clear_edges ()
1009 int i;
1010 edge n, e;
1012 for (i = 0; i < n_basic_blocks; ++i)
1014 basic_block bb = BASIC_BLOCK (i);
1016 for (e = bb->succ; e; e = n)
1018 n = e->succ_next;
1019 free (e);
1022 bb->succ = 0;
1023 bb->pred = 0;
1026 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1028 n = e->succ_next;
1029 free (e);
1032 ENTRY_BLOCK_PTR->succ = 0;
1033 EXIT_BLOCK_PTR->pred = 0;
1035 n_edges = 0;
1038 /* Identify the edges between basic blocks.
1040 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1041 that are otherwise unreachable may be reachable with a non-local goto.
1043 BB_EH_END is an array indexed by basic block number in which we record
1044 the list of exception regions active at the end of the basic block. */
1046 static void
1047 make_edges (label_value_list)
1048 rtx label_value_list;
1050 int i;
1051 sbitmap *edge_cache = NULL;
1053 /* Assume no computed jump; revise as we create edges. */
1054 current_function_has_computed_jump = 0;
1056 /* Heavy use of computed goto in machine-generated code can lead to
1057 nearly fully-connected CFGs. In that case we spend a significant
1058 amount of time searching the edge lists for duplicates. */
1059 if (forced_labels || label_value_list)
1061 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1062 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1065 /* By nature of the way these get numbered, block 0 is always the entry. */
1066 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1068 for (i = 0; i < n_basic_blocks; ++i)
1070 basic_block bb = BASIC_BLOCK (i);
1071 rtx insn, x;
1072 enum rtx_code code;
1073 int force_fallthru = 0;
1075 /* Examine the last instruction of the block, and discover the
1076 ways we can leave the block. */
1078 insn = bb->end;
1079 code = GET_CODE (insn);
1081 /* A branch. */
1082 if (code == JUMP_INSN)
1084 rtx tmp;
1086 /* Recognize exception handling placeholders. */
1087 if (GET_CODE (PATTERN (insn)) == RESX)
1088 make_eh_edge (edge_cache, bb, insn);
1090 /* Recognize a non-local goto as a branch outside the
1091 current function. */
1092 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1095 /* ??? Recognize a tablejump and do the right thing. */
1096 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1097 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1098 && GET_CODE (tmp) == JUMP_INSN
1099 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1100 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1102 rtvec vec;
1103 int j;
1105 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1106 vec = XVEC (PATTERN (tmp), 0);
1107 else
1108 vec = XVEC (PATTERN (tmp), 1);
1110 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1111 make_label_edge (edge_cache, bb,
1112 XEXP (RTVEC_ELT (vec, j), 0), 0);
1114 /* Some targets (eg, ARM) emit a conditional jump that also
1115 contains the out-of-range target. Scan for these and
1116 add an edge if necessary. */
1117 if ((tmp = single_set (insn)) != NULL
1118 && SET_DEST (tmp) == pc_rtx
1119 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1120 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1121 make_label_edge (edge_cache, bb,
1122 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1124 #ifdef CASE_DROPS_THROUGH
1125 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1126 us naturally detecting fallthru into the next block. */
1127 force_fallthru = 1;
1128 #endif
1131 /* If this is a computed jump, then mark it as reaching
1132 everything on the label_value_list and forced_labels list. */
1133 else if (computed_jump_p (insn))
1135 current_function_has_computed_jump = 1;
1137 for (x = label_value_list; x; x = XEXP (x, 1))
1138 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1140 for (x = forced_labels; x; x = XEXP (x, 1))
1141 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1144 /* Returns create an exit out. */
1145 else if (returnjump_p (insn))
1146 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1148 /* Otherwise, we have a plain conditional or unconditional jump. */
1149 else
1151 if (! JUMP_LABEL (insn))
1152 abort ();
1153 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1157 /* If this is a sibling call insn, then this is in effect a
1158 combined call and return, and so we need an edge to the
1159 exit block. No need to worry about EH edges, since we
1160 wouldn't have created the sibling call in the first place. */
1162 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1163 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1164 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1166 /* If this is a CALL_INSN, then mark it as reaching the active EH
1167 handler for this CALL_INSN. If we're handling non-call
1168 exceptions then any insn can reach any of the active handlers.
1170 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1172 else if (code == CALL_INSN || flag_non_call_exceptions)
1174 /* Add any appropriate EH edges. */
1175 make_eh_edge (edge_cache, bb, insn);
1177 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1179 /* ??? This could be made smarter: in some cases it's possible
1180 to tell that certain calls will not do a nonlocal goto.
1182 For example, if the nested functions that do the nonlocal
1183 gotos do not have their addresses taken, then only calls to
1184 those functions or to other nested functions that use them
1185 could possibly do nonlocal gotos. */
1186 /* We do know that a REG_EH_REGION note with a value less
1187 than 0 is guaranteed not to perform a non-local goto. */
1188 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1189 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1190 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1191 make_label_edge (edge_cache, bb, XEXP (x, 0),
1192 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1196 /* Find out if we can drop through to the next block. */
1197 insn = next_nonnote_insn (insn);
1198 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1199 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1200 else if (i + 1 < n_basic_blocks)
1202 rtx tmp = BLOCK_HEAD (i + 1);
1203 if (GET_CODE (tmp) == NOTE)
1204 tmp = next_nonnote_insn (tmp);
1205 if (force_fallthru || insn == tmp)
1206 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1210 if (edge_cache)
1211 sbitmap_vector_free (edge_cache);
1214 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1215 about the edge that is accumulated between calls. */
1217 void
1218 make_edge (edge_cache, src, dst, flags)
1219 sbitmap *edge_cache;
1220 basic_block src, dst;
1221 int flags;
1223 int use_edge_cache;
1224 edge e;
1226 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1227 many edges to them, and we didn't allocate memory for it. */
1228 use_edge_cache = (edge_cache
1229 && src != ENTRY_BLOCK_PTR
1230 && dst != EXIT_BLOCK_PTR);
1232 /* Make sure we don't add duplicate edges. */
1233 switch (use_edge_cache)
1235 default:
1236 /* Quick test for non-existance of the edge. */
1237 if (! TEST_BIT (edge_cache[src->index], dst->index))
1238 break;
1240 /* The edge exists; early exit if no work to do. */
1241 if (flags == 0)
1242 return;
1244 /* FALLTHRU */
1245 case 0:
1246 for (e = src->succ; e; e = e->succ_next)
1247 if (e->dest == dst)
1249 e->flags |= flags;
1250 return;
1252 break;
1255 e = (edge) xcalloc (1, sizeof (*e));
1256 n_edges++;
1258 e->succ_next = src->succ;
1259 e->pred_next = dst->pred;
1260 e->src = src;
1261 e->dest = dst;
1262 e->flags = flags;
1264 src->succ = e;
1265 dst->pred = e;
1267 if (use_edge_cache)
1268 SET_BIT (edge_cache[src->index], dst->index);
1271 /* Create an edge from a basic block to a label. */
1273 static void
1274 make_label_edge (edge_cache, src, label, flags)
1275 sbitmap *edge_cache;
1276 basic_block src;
1277 rtx label;
1278 int flags;
1280 if (GET_CODE (label) != CODE_LABEL)
1281 abort ();
1283 /* If the label was never emitted, this insn is junk, but avoid a
1284 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1285 as a result of a syntax error and a diagnostic has already been
1286 printed. */
1288 if (INSN_UID (label) == 0)
1289 return;
1291 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1294 /* Create the edges generated by INSN in REGION. */
1296 static void
1297 make_eh_edge (edge_cache, src, insn)
1298 sbitmap *edge_cache;
1299 basic_block src;
1300 rtx insn;
1302 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1303 rtx handlers, i;
1305 handlers = reachable_handlers (insn);
1307 for (i = handlers; i; i = XEXP (i, 1))
1308 make_label_edge (edge_cache, src, XEXP (i, 0),
1309 EDGE_ABNORMAL | EDGE_EH | is_call);
1311 free_INSN_LIST_list (&handlers);
1314 /* Identify critical edges and set the bits appropriately. */
1316 static void
1317 mark_critical_edges ()
1319 int i, n = n_basic_blocks;
1320 basic_block bb;
1322 /* We begin with the entry block. This is not terribly important now,
1323 but could be if a front end (Fortran) implemented alternate entry
1324 points. */
1325 bb = ENTRY_BLOCK_PTR;
1326 i = -1;
1328 while (1)
1330 edge e;
1332 /* (1) Critical edges must have a source with multiple successors. */
1333 if (bb->succ && bb->succ->succ_next)
1335 for (e = bb->succ; e; e = e->succ_next)
1337 /* (2) Critical edges must have a destination with multiple
1338 predecessors. Note that we know there is at least one
1339 predecessor -- the edge we followed to get here. */
1340 if (e->dest->pred->pred_next)
1341 e->flags |= EDGE_CRITICAL;
1342 else
1343 e->flags &= ~EDGE_CRITICAL;
1346 else
1348 for (e = bb->succ; e; e = e->succ_next)
1349 e->flags &= ~EDGE_CRITICAL;
1352 if (++i >= n)
1353 break;
1354 bb = BASIC_BLOCK (i);
1358 /* Split a block BB after insn INSN creating a new fallthru edge.
1359 Return the new edge. Note that to keep other parts of the compiler happy,
1360 this function renumbers all the basic blocks so that the new
1361 one has a number one greater than the block split. */
1363 edge
1364 split_block (bb, insn)
1365 basic_block bb;
1366 rtx insn;
1368 basic_block new_bb;
1369 edge new_edge;
1370 edge e;
1371 rtx bb_note;
1372 int i, j;
1374 /* There is no point splitting the block after its end. */
1375 if (bb->end == insn)
1376 return 0;
1378 /* Create the new structures. */
1379 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1380 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1381 n_edges++;
1383 memset (new_bb, 0, sizeof (*new_bb));
1385 new_bb->head = NEXT_INSN (insn);
1386 new_bb->end = bb->end;
1387 bb->end = insn;
1389 new_bb->succ = bb->succ;
1390 bb->succ = new_edge;
1391 new_bb->pred = new_edge;
1392 new_bb->count = bb->count;
1393 new_bb->loop_depth = bb->loop_depth;
1395 new_edge->src = bb;
1396 new_edge->dest = new_bb;
1397 new_edge->flags = EDGE_FALLTHRU;
1398 new_edge->probability = REG_BR_PROB_BASE;
1399 new_edge->count = bb->count;
1401 /* Redirect the src of the successor edges of bb to point to new_bb. */
1402 for (e = new_bb->succ; e; e = e->succ_next)
1403 e->src = new_bb;
1405 /* Place the new block just after the block being split. */
1406 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1408 /* Some parts of the compiler expect blocks to be number in
1409 sequential order so insert the new block immediately after the
1410 block being split.. */
1411 j = bb->index;
1412 for (i = n_basic_blocks - 1; i > j + 1; --i)
1414 basic_block tmp = BASIC_BLOCK (i - 1);
1415 BASIC_BLOCK (i) = tmp;
1416 tmp->index = i;
1419 BASIC_BLOCK (i) = new_bb;
1420 new_bb->index = i;
1422 /* Create the basic block note. */
1423 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1424 new_bb->head);
1425 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1426 new_bb->head = bb_note;
1428 update_bb_for_insn (new_bb);
1430 if (bb->global_live_at_start)
1432 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1433 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1434 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1436 /* We now have to calculate which registers are live at the end
1437 of the split basic block and at the start of the new basic
1438 block. Start with those registers that are known to be live
1439 at the end of the original basic block and get
1440 propagate_block to determine which registers are live. */
1441 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1442 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1443 COPY_REG_SET (bb->global_live_at_end,
1444 new_bb->global_live_at_start);
1447 return new_edge;
1451 /* Split a (typically critical) edge. Return the new block.
1452 Abort on abnormal edges.
1454 ??? The code generally expects to be called on critical edges.
1455 The case of a block ending in an unconditional jump to a
1456 block with multiple predecessors is not handled optimally. */
1458 basic_block
1459 split_edge (edge_in)
1460 edge edge_in;
1462 basic_block old_pred, bb, old_succ;
1463 edge edge_out;
1464 rtx bb_note;
1465 int i, j;
1467 /* Abnormal edges cannot be split. */
1468 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1469 abort ();
1471 old_pred = edge_in->src;
1472 old_succ = edge_in->dest;
1474 /* Remove the existing edge from the destination's pred list. */
1476 edge *pp;
1477 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1478 continue;
1479 *pp = edge_in->pred_next;
1480 edge_in->pred_next = NULL;
1483 /* Create the new structures. */
1484 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1485 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1486 n_edges++;
1488 memset (bb, 0, sizeof (*bb));
1490 /* ??? This info is likely going to be out of date very soon. */
1491 if (old_succ->global_live_at_start)
1493 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1494 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1495 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1496 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1499 /* Wire them up. */
1500 bb->pred = edge_in;
1501 bb->succ = edge_out;
1502 bb->count = edge_in->count;
1504 edge_in->dest = bb;
1505 edge_in->flags &= ~EDGE_CRITICAL;
1507 edge_out->pred_next = old_succ->pred;
1508 edge_out->succ_next = NULL;
1509 edge_out->src = bb;
1510 edge_out->dest = old_succ;
1511 edge_out->flags = EDGE_FALLTHRU;
1512 edge_out->probability = REG_BR_PROB_BASE;
1513 edge_out->count = edge_in->count;
1515 old_succ->pred = edge_out;
1517 /* Tricky case -- if there existed a fallthru into the successor
1518 (and we're not it) we must add a new unconditional jump around
1519 the new block we're actually interested in.
1521 Further, if that edge is critical, this means a second new basic
1522 block must be created to hold it. In order to simplify correct
1523 insn placement, do this before we touch the existing basic block
1524 ordering for the block we were really wanting. */
1525 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1527 edge e;
1528 for (e = edge_out->pred_next; e; e = e->pred_next)
1529 if (e->flags & EDGE_FALLTHRU)
1530 break;
1532 if (e)
1534 basic_block jump_block;
1535 rtx pos;
1537 if ((e->flags & EDGE_CRITICAL) == 0
1538 && e->src != ENTRY_BLOCK_PTR)
1540 /* Non critical -- we can simply add a jump to the end
1541 of the existing predecessor. */
1542 jump_block = e->src;
1544 else
1546 /* We need a new block to hold the jump. The simplest
1547 way to do the bulk of the work here is to recursively
1548 call ourselves. */
1549 jump_block = split_edge (e);
1550 e = jump_block->succ;
1553 /* Now add the jump insn ... */
1554 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1555 jump_block->end);
1556 jump_block->end = pos;
1557 if (basic_block_for_insn)
1558 set_block_for_insn (pos, jump_block);
1559 emit_barrier_after (pos);
1561 /* ... let jump know that label is in use, ... */
1562 JUMP_LABEL (pos) = old_succ->head;
1563 ++LABEL_NUSES (old_succ->head);
1565 /* ... and clear fallthru on the outgoing edge. */
1566 e->flags &= ~EDGE_FALLTHRU;
1568 /* Continue splitting the interesting edge. */
1572 /* Place the new block just in front of the successor. */
1573 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1574 if (old_succ == EXIT_BLOCK_PTR)
1575 j = n_basic_blocks - 1;
1576 else
1577 j = old_succ->index;
1578 for (i = n_basic_blocks - 1; i > j; --i)
1580 basic_block tmp = BASIC_BLOCK (i - 1);
1581 BASIC_BLOCK (i) = tmp;
1582 tmp->index = i;
1584 BASIC_BLOCK (i) = bb;
1585 bb->index = i;
1587 /* Create the basic block note.
1589 Where we place the note can have a noticable impact on the generated
1590 code. Consider this cfg:
1596 +->1-->2--->E
1598 +--+
1600 If we need to insert an insn on the edge from block 0 to block 1,
1601 we want to ensure the instructions we insert are outside of any
1602 loop notes that physically sit between block 0 and block 1. Otherwise
1603 we confuse the loop optimizer into thinking the loop is a phony. */
1604 if (old_succ != EXIT_BLOCK_PTR
1605 && PREV_INSN (old_succ->head)
1606 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1607 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1608 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1609 PREV_INSN (old_succ->head));
1610 else if (old_succ != EXIT_BLOCK_PTR)
1611 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1612 else
1613 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1614 NOTE_BASIC_BLOCK (bb_note) = bb;
1615 bb->head = bb->end = bb_note;
1617 /* Not quite simple -- for non-fallthru edges, we must adjust the
1618 predecessor's jump instruction to target our new block. */
1619 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1621 rtx tmp, insn = old_pred->end;
1622 rtx old_label = old_succ->head;
1623 rtx new_label = gen_label_rtx ();
1625 if (GET_CODE (insn) != JUMP_INSN)
1626 abort ();
1628 /* ??? Recognize a tablejump and adjust all matching cases. */
1629 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1630 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1631 && GET_CODE (tmp) == JUMP_INSN
1632 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1633 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1635 rtvec vec;
1636 int j;
1638 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1639 vec = XVEC (PATTERN (tmp), 0);
1640 else
1641 vec = XVEC (PATTERN (tmp), 1);
1643 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1644 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1646 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1647 --LABEL_NUSES (old_label);
1648 ++LABEL_NUSES (new_label);
1651 /* Handle casesi dispatch insns */
1652 if ((tmp = single_set (insn)) != NULL
1653 && SET_DEST (tmp) == pc_rtx
1654 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1655 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1656 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1658 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1659 new_label);
1660 --LABEL_NUSES (old_label);
1661 ++LABEL_NUSES (new_label);
1664 else
1666 /* This would have indicated an abnormal edge. */
1667 if (computed_jump_p (insn))
1668 abort ();
1670 /* A return instruction can't be redirected. */
1671 if (returnjump_p (insn))
1672 abort ();
1674 /* If the insn doesn't go where we think, we're confused. */
1675 if (JUMP_LABEL (insn) != old_label)
1676 abort ();
1678 redirect_jump (insn, new_label, 0);
1681 emit_label_before (new_label, bb_note);
1682 bb->head = new_label;
1685 return bb;
1688 /* Queue instructions for insertion on an edge between two basic blocks.
1689 The new instructions and basic blocks (if any) will not appear in the
1690 CFG until commit_edge_insertions is called. */
1692 void
1693 insert_insn_on_edge (pattern, e)
1694 rtx pattern;
1695 edge e;
1697 /* We cannot insert instructions on an abnormal critical edge.
1698 It will be easier to find the culprit if we die now. */
1699 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1700 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1701 abort ();
1703 if (e->insns == NULL_RTX)
1704 start_sequence ();
1705 else
1706 push_to_sequence (e->insns);
1708 emit_insn (pattern);
1710 e->insns = get_insns ();
1711 end_sequence ();
1714 /* Update the CFG for the instructions queued on edge E. */
1716 static void
1717 commit_one_edge_insertion (e)
1718 edge e;
1720 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1721 basic_block bb;
1723 /* Pull the insns off the edge now since the edge might go away. */
1724 insns = e->insns;
1725 e->insns = NULL_RTX;
1727 /* Figure out where to put these things. If the destination has
1728 one predecessor, insert there. Except for the exit block. */
1729 if (e->dest->pred->pred_next == NULL
1730 && e->dest != EXIT_BLOCK_PTR)
1732 bb = e->dest;
1734 /* Get the location correct wrt a code label, and "nice" wrt
1735 a basic block note, and before everything else. */
1736 tmp = bb->head;
1737 if (GET_CODE (tmp) == CODE_LABEL)
1738 tmp = NEXT_INSN (tmp);
1739 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1740 tmp = NEXT_INSN (tmp);
1741 if (tmp == bb->head)
1742 before = tmp;
1743 else
1744 after = PREV_INSN (tmp);
1747 /* If the source has one successor and the edge is not abnormal,
1748 insert there. Except for the entry block. */
1749 else if ((e->flags & EDGE_ABNORMAL) == 0
1750 && e->src->succ->succ_next == NULL
1751 && e->src != ENTRY_BLOCK_PTR)
1753 bb = e->src;
1754 /* It is possible to have a non-simple jump here. Consider a target
1755 where some forms of unconditional jumps clobber a register. This
1756 happens on the fr30 for example.
1758 We know this block has a single successor, so we can just emit
1759 the queued insns before the jump. */
1760 if (GET_CODE (bb->end) == JUMP_INSN)
1762 before = bb->end;
1764 else
1766 /* We'd better be fallthru, or we've lost track of what's what. */
1767 if ((e->flags & EDGE_FALLTHRU) == 0)
1768 abort ();
1770 after = bb->end;
1774 /* Otherwise we must split the edge. */
1775 else
1777 bb = split_edge (e);
1778 after = bb->end;
1781 /* Now that we've found the spot, do the insertion. */
1783 /* Set the new block number for these insns, if structure is allocated. */
1784 if (basic_block_for_insn)
1786 rtx i;
1787 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1788 set_block_for_insn (i, bb);
1791 if (before)
1793 emit_insns_before (insns, before);
1794 if (before == bb->head)
1795 bb->head = insns;
1797 last = prev_nonnote_insn (before);
1799 else
1801 last = emit_insns_after (insns, after);
1802 if (after == bb->end)
1803 bb->end = last;
1806 if (returnjump_p (last))
1808 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1809 This is not currently a problem because this only happens
1810 for the (single) epilogue, which already has a fallthru edge
1811 to EXIT. */
1813 e = bb->succ;
1814 if (e->dest != EXIT_BLOCK_PTR
1815 || e->succ_next != NULL
1816 || (e->flags & EDGE_FALLTHRU) == 0)
1817 abort ();
1818 e->flags &= ~EDGE_FALLTHRU;
1820 emit_barrier_after (last);
1821 bb->end = last;
1823 if (before)
1824 flow_delete_insn (before);
1826 else if (GET_CODE (last) == JUMP_INSN)
1827 abort ();
1830 /* Update the CFG for all queued instructions. */
1832 void
1833 commit_edge_insertions ()
1835 int i;
1836 basic_block bb;
1838 #ifdef ENABLE_CHECKING
1839 verify_flow_info ();
1840 #endif
1842 i = -1;
1843 bb = ENTRY_BLOCK_PTR;
1844 while (1)
1846 edge e, next;
1848 for (e = bb->succ; e; e = next)
1850 next = e->succ_next;
1851 if (e->insns)
1852 commit_one_edge_insertion (e);
1855 if (++i >= n_basic_blocks)
1856 break;
1857 bb = BASIC_BLOCK (i);
1861 /* Add fake edges to the function exit for any non constant calls in
1862 the bitmap of blocks specified by BLOCKS or to the whole CFG if
1863 BLOCKS is zero. Return the nuber of blocks that were split. */
1866 flow_call_edges_add (blocks)
1867 sbitmap blocks;
1869 int i;
1870 int blocks_split = 0;
1871 int bb_num = 0;
1872 basic_block *bbs;
1874 /* Map bb indicies into basic block pointers since split_block
1875 will renumber the basic blocks. */
1877 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
1879 if (! blocks)
1881 for (i = 0; i < n_basic_blocks; i++)
1882 bbs[bb_num++] = BASIC_BLOCK (i);
1884 else
1886 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
1888 bbs[bb_num++] = BASIC_BLOCK (i);
1893 /* Now add fake edges to the function exit for any non constant
1894 calls since there is no way that we can determine if they will
1895 return or not... */
1897 for (i = 0; i < bb_num; i++)
1899 basic_block bb = bbs[i];
1900 rtx insn;
1901 rtx prev_insn;
1903 for (insn = bb->end; ; insn = prev_insn)
1905 prev_insn = PREV_INSN (insn);
1906 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
1908 edge e;
1910 /* Note that the following may create a new basic block
1911 and renumber the existing basic blocks. */
1912 e = split_block (bb, insn);
1913 if (e)
1914 blocks_split++;
1916 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
1918 if (insn == bb->head)
1919 break;
1923 if (blocks_split)
1924 verify_flow_info ();
1926 free (bbs);
1927 return blocks_split;
1930 /* Delete all unreachable basic blocks. */
1932 static void
1933 delete_unreachable_blocks ()
1935 basic_block *worklist, *tos;
1936 edge e;
1937 int i, n;
1939 n = n_basic_blocks;
1940 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1942 /* Use basic_block->aux as a marker. Clear them all. */
1944 for (i = 0; i < n; ++i)
1945 BASIC_BLOCK (i)->aux = NULL;
1947 /* Add our starting points to the worklist. Almost always there will
1948 be only one. It isn't inconcievable that we might one day directly
1949 support Fortran alternate entry points. */
1951 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1953 *tos++ = e->dest;
1955 /* Mark the block with a handy non-null value. */
1956 e->dest->aux = e;
1959 /* Iterate: find everything reachable from what we've already seen. */
1961 while (tos != worklist)
1963 basic_block b = *--tos;
1965 for (e = b->succ; e; e = e->succ_next)
1966 if (!e->dest->aux)
1968 *tos++ = e->dest;
1969 e->dest->aux = e;
1973 /* Delete all unreachable basic blocks. Count down so that we
1974 don't interfere with the block renumbering that happens in
1975 flow_delete_block. */
1977 for (i = n - 1; i >= 0; --i)
1979 basic_block b = BASIC_BLOCK (i);
1981 if (b->aux != NULL)
1982 /* This block was found. Tidy up the mark. */
1983 b->aux = NULL;
1984 else
1985 flow_delete_block (b);
1988 tidy_fallthru_edges ();
1990 free (worklist);
1993 /* Return true if NOTE is not one of the ones that must be kept paired,
1994 so that we may simply delete them. */
1996 static int
1997 can_delete_note_p (note)
1998 rtx note;
2000 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2001 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2004 /* Unlink a chain of insns between START and FINISH, leaving notes
2005 that must be paired. */
2007 void
2008 flow_delete_insn_chain (start, finish)
2009 rtx start, finish;
2011 /* Unchain the insns one by one. It would be quicker to delete all
2012 of these with a single unchaining, rather than one at a time, but
2013 we need to keep the NOTE's. */
2015 rtx next;
2017 while (1)
2019 next = NEXT_INSN (start);
2020 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2022 else if (GET_CODE (start) == CODE_LABEL
2023 && ! can_delete_label_p (start))
2025 const char *name = LABEL_NAME (start);
2026 PUT_CODE (start, NOTE);
2027 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2028 NOTE_SOURCE_FILE (start) = name;
2030 else
2031 next = flow_delete_insn (start);
2033 if (start == finish)
2034 break;
2035 start = next;
2039 /* Delete the insns in a (non-live) block. We physically delete every
2040 non-deleted-note insn, and update the flow graph appropriately.
2042 Return nonzero if we deleted an exception handler. */
2044 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2045 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2048 flow_delete_block (b)
2049 basic_block b;
2051 int deleted_handler = 0;
2052 rtx insn, end, tmp;
2054 /* If the head of this block is a CODE_LABEL, then it might be the
2055 label for an exception handler which can't be reached.
2057 We need to remove the label from the exception_handler_label list
2058 and remove the associated NOTE_INSN_EH_REGION_BEG and
2059 NOTE_INSN_EH_REGION_END notes. */
2061 insn = b->head;
2063 never_reached_warning (insn);
2065 if (GET_CODE (insn) == CODE_LABEL)
2066 maybe_remove_eh_handler (insn);
2068 /* Include any jump table following the basic block. */
2069 end = b->end;
2070 if (GET_CODE (end) == JUMP_INSN
2071 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2072 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2073 && GET_CODE (tmp) == JUMP_INSN
2074 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2075 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2076 end = tmp;
2078 /* Include any barrier that may follow the basic block. */
2079 tmp = next_nonnote_insn (end);
2080 if (tmp && GET_CODE (tmp) == BARRIER)
2081 end = tmp;
2083 /* Selectively delete the entire chain. */
2084 flow_delete_insn_chain (insn, end);
2086 /* Remove the edges into and out of this block. Note that there may
2087 indeed be edges in, if we are removing an unreachable loop. */
2089 edge e, next, *q;
2091 for (e = b->pred; e; e = next)
2093 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2094 continue;
2095 *q = e->succ_next;
2096 next = e->pred_next;
2097 n_edges--;
2098 free (e);
2100 for (e = b->succ; e; e = next)
2102 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2103 continue;
2104 *q = e->pred_next;
2105 next = e->succ_next;
2106 n_edges--;
2107 free (e);
2110 b->pred = NULL;
2111 b->succ = NULL;
2114 /* Remove the basic block from the array, and compact behind it. */
2115 expunge_block (b);
2117 return deleted_handler;
2120 /* Remove block B from the basic block array and compact behind it. */
2122 static void
2123 expunge_block (b)
2124 basic_block b;
2126 int i, n = n_basic_blocks;
2128 for (i = b->index; i + 1 < n; ++i)
2130 basic_block x = BASIC_BLOCK (i + 1);
2131 BASIC_BLOCK (i) = x;
2132 x->index = i;
2135 basic_block_info->num_elements--;
2136 n_basic_blocks--;
2139 /* Delete INSN by patching it out. Return the next insn. */
2142 flow_delete_insn (insn)
2143 rtx insn;
2145 rtx prev = PREV_INSN (insn);
2146 rtx next = NEXT_INSN (insn);
2147 rtx note;
2149 PREV_INSN (insn) = NULL_RTX;
2150 NEXT_INSN (insn) = NULL_RTX;
2151 INSN_DELETED_P (insn) = 1;
2153 if (prev)
2154 NEXT_INSN (prev) = next;
2155 if (next)
2156 PREV_INSN (next) = prev;
2157 else
2158 set_last_insn (prev);
2160 if (GET_CODE (insn) == CODE_LABEL)
2161 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2163 /* If deleting a jump, decrement the use count of the label. Deleting
2164 the label itself should happen in the normal course of block merging. */
2165 if (GET_CODE (insn) == JUMP_INSN
2166 && JUMP_LABEL (insn)
2167 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2168 LABEL_NUSES (JUMP_LABEL (insn))--;
2170 /* Also if deleting an insn that references a label. */
2171 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2172 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2173 LABEL_NUSES (XEXP (note, 0))--;
2175 return next;
2178 /* True if a given label can be deleted. */
2180 static int
2181 can_delete_label_p (label)
2182 rtx label;
2184 rtx x;
2186 if (LABEL_PRESERVE_P (label))
2187 return 0;
2189 for (x = forced_labels; x; x = XEXP (x, 1))
2190 if (label == XEXP (x, 0))
2191 return 0;
2192 for (x = label_value_list; x; x = XEXP (x, 1))
2193 if (label == XEXP (x, 0))
2194 return 0;
2195 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2196 if (label == XEXP (x, 0))
2197 return 0;
2199 /* User declared labels must be preserved. */
2200 if (LABEL_NAME (label) != 0)
2201 return 0;
2203 return 1;
2206 static int
2207 tail_recursion_label_p (label)
2208 rtx label;
2210 rtx x;
2212 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2213 if (label == XEXP (x, 0))
2214 return 1;
2216 return 0;
2219 /* Blocks A and B are to be merged into a single block A. The insns
2220 are already contiguous, hence `nomove'. */
2222 void
2223 merge_blocks_nomove (a, b)
2224 basic_block a, b;
2226 edge e;
2227 rtx b_head, b_end, a_end;
2228 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2229 int b_empty = 0;
2231 /* If there was a CODE_LABEL beginning B, delete it. */
2232 b_head = b->head;
2233 b_end = b->end;
2234 if (GET_CODE (b_head) == CODE_LABEL)
2236 /* Detect basic blocks with nothing but a label. This can happen
2237 in particular at the end of a function. */
2238 if (b_head == b_end)
2239 b_empty = 1;
2240 del_first = del_last = b_head;
2241 b_head = NEXT_INSN (b_head);
2244 /* Delete the basic block note. */
2245 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2247 if (b_head == b_end)
2248 b_empty = 1;
2249 if (! del_last)
2250 del_first = b_head;
2251 del_last = b_head;
2252 b_head = NEXT_INSN (b_head);
2255 /* If there was a jump out of A, delete it. */
2256 a_end = a->end;
2257 if (GET_CODE (a_end) == JUMP_INSN)
2259 rtx prev;
2261 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2262 if (GET_CODE (prev) != NOTE
2263 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2264 || prev == a->head)
2265 break;
2267 del_first = a_end;
2269 #ifdef HAVE_cc0
2270 /* If this was a conditional jump, we need to also delete
2271 the insn that set cc0. */
2272 if (prev && sets_cc0_p (prev))
2274 rtx tmp = prev;
2275 prev = prev_nonnote_insn (prev);
2276 if (!prev)
2277 prev = a->head;
2278 del_first = tmp;
2280 #endif
2282 a_end = prev;
2284 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2285 del_first = NEXT_INSN (a_end);
2287 /* Delete everything marked above as well as crap that might be
2288 hanging out between the two blocks. */
2289 flow_delete_insn_chain (del_first, del_last);
2291 /* Normally there should only be one successor of A and that is B, but
2292 partway though the merge of blocks for conditional_execution we'll
2293 be merging a TEST block with THEN and ELSE successors. Free the
2294 whole lot of them and hope the caller knows what they're doing. */
2295 while (a->succ)
2296 remove_edge (a->succ);
2298 /* Adjust the edges out of B for the new owner. */
2299 for (e = b->succ; e; e = e->succ_next)
2300 e->src = a;
2301 a->succ = b->succ;
2303 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2304 b->pred = b->succ = NULL;
2306 /* Reassociate the insns of B with A. */
2307 if (!b_empty)
2309 if (basic_block_for_insn)
2311 BLOCK_FOR_INSN (b_head) = a;
2312 while (b_head != b_end)
2314 b_head = NEXT_INSN (b_head);
2315 BLOCK_FOR_INSN (b_head) = a;
2318 a_end = b_end;
2320 a->end = a_end;
2322 expunge_block (b);
2325 /* Blocks A and B are to be merged into a single block. A has no incoming
2326 fallthru edge, so it can be moved before B without adding or modifying
2327 any jumps (aside from the jump from A to B). */
2329 static int
2330 merge_blocks_move_predecessor_nojumps (a, b)
2331 basic_block a, b;
2333 rtx start, end, barrier;
2334 int index;
2336 start = a->head;
2337 end = a->end;
2339 barrier = next_nonnote_insn (end);
2340 if (GET_CODE (barrier) != BARRIER)
2341 abort ();
2342 flow_delete_insn (barrier);
2344 /* Move block and loop notes out of the chain so that we do not
2345 disturb their order.
2347 ??? A better solution would be to squeeze out all the non-nested notes
2348 and adjust the block trees appropriately. Even better would be to have
2349 a tighter connection between block trees and rtl so that this is not
2350 necessary. */
2351 start = squeeze_notes (start, end);
2353 /* Scramble the insn chain. */
2354 if (end != PREV_INSN (b->head))
2355 reorder_insns (start, end, PREV_INSN (b->head));
2357 if (rtl_dump_file)
2359 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2360 a->index, b->index);
2363 /* Swap the records for the two blocks around. Although we are deleting B,
2364 A is now where B was and we want to compact the BB array from where
2365 A used to be. */
2366 BASIC_BLOCK (a->index) = b;
2367 BASIC_BLOCK (b->index) = a;
2368 index = a->index;
2369 a->index = b->index;
2370 b->index = index;
2372 /* Now blocks A and B are contiguous. Merge them. */
2373 merge_blocks_nomove (a, b);
2375 return 1;
2378 /* Blocks A and B are to be merged into a single block. B has no outgoing
2379 fallthru edge, so it can be moved after A without adding or modifying
2380 any jumps (aside from the jump from A to B). */
2382 static int
2383 merge_blocks_move_successor_nojumps (a, b)
2384 basic_block a, b;
2386 rtx start, end, barrier;
2388 start = b->head;
2389 end = b->end;
2390 barrier = NEXT_INSN (end);
2392 /* Recognize a jump table following block B. */
2393 if (GET_CODE (barrier) == CODE_LABEL
2394 && NEXT_INSN (barrier)
2395 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2396 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2397 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2399 end = NEXT_INSN (barrier);
2400 barrier = NEXT_INSN (end);
2403 /* There had better have been a barrier there. Delete it. */
2404 if (GET_CODE (barrier) != BARRIER)
2405 abort ();
2406 flow_delete_insn (barrier);
2408 /* Move block and loop notes out of the chain so that we do not
2409 disturb their order.
2411 ??? A better solution would be to squeeze out all the non-nested notes
2412 and adjust the block trees appropriately. Even better would be to have
2413 a tighter connection between block trees and rtl so that this is not
2414 necessary. */
2415 start = squeeze_notes (start, end);
2417 /* Scramble the insn chain. */
2418 reorder_insns (start, end, a->end);
2420 /* Now blocks A and B are contiguous. Merge them. */
2421 merge_blocks_nomove (a, b);
2423 if (rtl_dump_file)
2425 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2426 b->index, a->index);
2429 return 1;
2432 /* Attempt to merge basic blocks that are potentially non-adjacent.
2433 Return true iff the attempt succeeded. */
2435 static int
2436 merge_blocks (e, b, c)
2437 edge e;
2438 basic_block b, c;
2440 /* If C has a tail recursion label, do not merge. There is no
2441 edge recorded from the call_placeholder back to this label, as
2442 that would make optimize_sibling_and_tail_recursive_calls more
2443 complex for no gain. */
2444 if (GET_CODE (c->head) == CODE_LABEL
2445 && tail_recursion_label_p (c->head))
2446 return 0;
2448 /* If B has a fallthru edge to C, no need to move anything. */
2449 if (e->flags & EDGE_FALLTHRU)
2451 merge_blocks_nomove (b, c);
2453 if (rtl_dump_file)
2455 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2456 b->index, c->index);
2459 return 1;
2461 else
2463 edge tmp_edge;
2464 int c_has_outgoing_fallthru;
2465 int b_has_incoming_fallthru;
2467 /* We must make sure to not munge nesting of exception regions,
2468 lexical blocks, and loop notes.
2470 The first is taken care of by requiring that the active eh
2471 region at the end of one block always matches the active eh
2472 region at the beginning of the next block.
2474 The later two are taken care of by squeezing out all the notes. */
2476 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2477 executed and we may want to treat blocks which have two out
2478 edges, one normal, one abnormal as only having one edge for
2479 block merging purposes. */
2481 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2482 if (tmp_edge->flags & EDGE_FALLTHRU)
2483 break;
2484 c_has_outgoing_fallthru = (tmp_edge != NULL);
2486 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2487 if (tmp_edge->flags & EDGE_FALLTHRU)
2488 break;
2489 b_has_incoming_fallthru = (tmp_edge != NULL);
2491 /* If B does not have an incoming fallthru, then it can be moved
2492 immediately before C without introducing or modifying jumps.
2493 C cannot be the first block, so we do not have to worry about
2494 accessing a non-existent block. */
2495 if (! b_has_incoming_fallthru)
2496 return merge_blocks_move_predecessor_nojumps (b, c);
2498 /* Otherwise, we're going to try to move C after B. If C does
2499 not have an outgoing fallthru, then it can be moved
2500 immediately after B without introducing or modifying jumps. */
2501 if (! c_has_outgoing_fallthru)
2502 return merge_blocks_move_successor_nojumps (b, c);
2504 /* Otherwise, we'll need to insert an extra jump, and possibly
2505 a new block to contain it. */
2506 /* ??? Not implemented yet. */
2508 return 0;
2512 /* Top level driver for merge_blocks. */
2514 static void
2515 try_merge_blocks ()
2517 int i;
2519 /* Attempt to merge blocks as made possible by edge removal. If a block
2520 has only one successor, and the successor has only one predecessor,
2521 they may be combined. */
2523 for (i = 0; i < n_basic_blocks;)
2525 basic_block c, b = BASIC_BLOCK (i);
2526 edge s;
2528 /* A loop because chains of blocks might be combineable. */
2529 while ((s = b->succ) != NULL
2530 && s->succ_next == NULL
2531 && (s->flags & EDGE_EH) == 0
2532 && (c = s->dest) != EXIT_BLOCK_PTR
2533 && c->pred->pred_next == NULL
2534 /* If the jump insn has side effects, we can't kill the edge. */
2535 && (GET_CODE (b->end) != JUMP_INSN
2536 || onlyjump_p (b->end))
2537 && merge_blocks (s, b, c))
2538 continue;
2540 /* Don't get confused by the index shift caused by deleting blocks. */
2541 i = b->index + 1;
2545 /* The given edge should potentially be a fallthru edge. If that is in
2546 fact true, delete the jump and barriers that are in the way. */
2548 void
2549 tidy_fallthru_edge (e, b, c)
2550 edge e;
2551 basic_block b, c;
2553 rtx q;
2555 /* ??? In a late-running flow pass, other folks may have deleted basic
2556 blocks by nopping out blocks, leaving multiple BARRIERs between here
2557 and the target label. They ought to be chastized and fixed.
2559 We can also wind up with a sequence of undeletable labels between
2560 one block and the next.
2562 So search through a sequence of barriers, labels, and notes for
2563 the head of block C and assert that we really do fall through. */
2565 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2566 return;
2568 /* Remove what will soon cease being the jump insn from the source block.
2569 If block B consisted only of this single jump, turn it into a deleted
2570 note. */
2571 q = b->end;
2572 if (GET_CODE (q) == JUMP_INSN
2573 && onlyjump_p (q)
2574 && (any_uncondjump_p (q)
2575 || (b->succ == e && e->succ_next == NULL)))
2577 #ifdef HAVE_cc0
2578 /* If this was a conditional jump, we need to also delete
2579 the insn that set cc0. */
2580 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2581 q = PREV_INSN (q);
2582 #endif
2584 if (b->head == q)
2586 PUT_CODE (q, NOTE);
2587 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2588 NOTE_SOURCE_FILE (q) = 0;
2590 else
2592 q = PREV_INSN (q);
2594 /* We don't want a block to end on a line-number note since that has
2595 the potential of changing the code between -g and not -g. */
2596 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
2597 q = PREV_INSN (q);
2600 b->end = q;
2603 /* Selectively unlink the sequence. */
2604 if (q != PREV_INSN (c->head))
2605 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2607 e->flags |= EDGE_FALLTHRU;
2610 /* Fix up edges that now fall through, or rather should now fall through
2611 but previously required a jump around now deleted blocks. Simplify
2612 the search by only examining blocks numerically adjacent, since this
2613 is how find_basic_blocks created them. */
2615 static void
2616 tidy_fallthru_edges ()
2618 int i;
2620 for (i = 1; i < n_basic_blocks; ++i)
2622 basic_block b = BASIC_BLOCK (i - 1);
2623 basic_block c = BASIC_BLOCK (i);
2624 edge s;
2626 /* We care about simple conditional or unconditional jumps with
2627 a single successor.
2629 If we had a conditional branch to the next instruction when
2630 find_basic_blocks was called, then there will only be one
2631 out edge for the block which ended with the conditional
2632 branch (since we do not create duplicate edges).
2634 Furthermore, the edge will be marked as a fallthru because we
2635 merge the flags for the duplicate edges. So we do not want to
2636 check that the edge is not a FALLTHRU edge. */
2637 if ((s = b->succ) != NULL
2638 && ! (s->flags & EDGE_COMPLEX)
2639 && s->succ_next == NULL
2640 && s->dest == c
2641 /* If the jump insn has side effects, we can't tidy the edge. */
2642 && (GET_CODE (b->end) != JUMP_INSN
2643 || onlyjump_p (b->end)))
2644 tidy_fallthru_edge (s, b, c);
2648 /* Perform data flow analysis.
2649 F is the first insn of the function; FLAGS is a set of PROP_* flags
2650 to be used in accumulating flow info. */
2652 void
2653 life_analysis (f, file, flags)
2654 rtx f;
2655 FILE *file;
2656 int flags;
2658 #ifdef ELIMINABLE_REGS
2659 register int i;
2660 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2661 #endif
2663 /* Record which registers will be eliminated. We use this in
2664 mark_used_regs. */
2666 CLEAR_HARD_REG_SET (elim_reg_set);
2668 #ifdef ELIMINABLE_REGS
2669 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2670 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2671 #else
2672 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2673 #endif
2675 if (! optimize)
2676 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
2678 /* The post-reload life analysis have (on a global basis) the same
2679 registers live as was computed by reload itself. elimination
2680 Otherwise offsets and such may be incorrect.
2682 Reload will make some registers as live even though they do not
2683 appear in the rtl.
2685 We don't want to create new auto-incs after reload, since they
2686 are unlikely to be useful and can cause problems with shared
2687 stack slots. */
2688 if (reload_completed)
2689 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2691 /* We want alias analysis information for local dead store elimination. */
2692 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2693 init_alias_analysis ();
2695 /* Always remove no-op moves. Do this before other processing so
2696 that we don't have to keep re-scanning them. */
2697 delete_noop_moves (f);
2699 /* Some targets can emit simpler epilogues if they know that sp was
2700 not ever modified during the function. After reload, of course,
2701 we've already emitted the epilogue so there's no sense searching. */
2702 if (! reload_completed)
2703 notice_stack_pointer_modification (f);
2705 /* Allocate and zero out data structures that will record the
2706 data from lifetime analysis. */
2707 allocate_reg_life_data ();
2708 allocate_bb_life_data ();
2710 /* Find the set of registers live on function exit. */
2711 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2713 /* "Update" life info from zero. It'd be nice to begin the
2714 relaxation with just the exit and noreturn blocks, but that set
2715 is not immediately handy. */
2717 if (flags & PROP_REG_INFO)
2718 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2719 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2721 /* Clean up. */
2722 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2723 end_alias_analysis ();
2725 if (file)
2726 dump_flow_info (file);
2728 free_basic_block_vars (1);
2731 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2732 Search for REGNO. If found, abort if it is not wider than word_mode. */
2734 static int
2735 verify_wide_reg_1 (px, pregno)
2736 rtx *px;
2737 void *pregno;
2739 rtx x = *px;
2740 unsigned int regno = *(int *) pregno;
2742 if (GET_CODE (x) == REG && REGNO (x) == regno)
2744 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2745 abort ();
2746 return 1;
2748 return 0;
2751 /* A subroutine of verify_local_live_at_start. Search through insns
2752 between HEAD and END looking for register REGNO. */
2754 static void
2755 verify_wide_reg (regno, head, end)
2756 int regno;
2757 rtx head, end;
2759 while (1)
2761 if (INSN_P (head)
2762 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2763 return;
2764 if (head == end)
2765 break;
2766 head = NEXT_INSN (head);
2769 /* We didn't find the register at all. Something's way screwy. */
2770 if (rtl_dump_file)
2771 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
2772 print_rtl_and_abort ();
2775 /* A subroutine of update_life_info. Verify that there are no untoward
2776 changes in live_at_start during a local update. */
2778 static void
2779 verify_local_live_at_start (new_live_at_start, bb)
2780 regset new_live_at_start;
2781 basic_block bb;
2783 if (reload_completed)
2785 /* After reload, there are no pseudos, nor subregs of multi-word
2786 registers. The regsets should exactly match. */
2787 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2789 if (rtl_dump_file)
2791 fprintf (rtl_dump_file,
2792 "live_at_start mismatch in bb %d, aborting\n",
2793 bb->index);
2794 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
2795 debug_bitmap_file (rtl_dump_file, new_live_at_start);
2797 print_rtl_and_abort ();
2800 else
2802 int i;
2804 /* Find the set of changed registers. */
2805 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2807 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2809 /* No registers should die. */
2810 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2812 if (rtl_dump_file)
2813 fprintf (rtl_dump_file,
2814 "Register %d died unexpectedly in block %d\n", i,
2815 bb->index);
2816 print_rtl_and_abort ();
2819 /* Verify that the now-live register is wider than word_mode. */
2820 verify_wide_reg (i, bb->head, bb->end);
2825 /* Updates life information starting with the basic blocks set in BLOCKS.
2826 If BLOCKS is null, consider it to be the universal set.
2828 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2829 we are only expecting local modifications to basic blocks. If we find
2830 extra registers live at the beginning of a block, then we either killed
2831 useful data, or we have a broken split that wants data not provided.
2832 If we find registers removed from live_at_start, that means we have
2833 a broken peephole that is killing a register it shouldn't.
2835 ??? This is not true in one situation -- when a pre-reload splitter
2836 generates subregs of a multi-word pseudo, current life analysis will
2837 lose the kill. So we _can_ have a pseudo go live. How irritating.
2839 Including PROP_REG_INFO does not properly refresh regs_ever_live
2840 unless the caller resets it to zero. */
2842 void
2843 update_life_info (blocks, extent, prop_flags)
2844 sbitmap blocks;
2845 enum update_life_extent extent;
2846 int prop_flags;
2848 regset tmp;
2849 regset_head tmp_head;
2850 int i;
2852 tmp = INITIALIZE_REG_SET (tmp_head);
2854 /* For a global update, we go through the relaxation process again. */
2855 if (extent != UPDATE_LIFE_LOCAL)
2857 calculate_global_regs_live (blocks, blocks,
2858 prop_flags & PROP_SCAN_DEAD_CODE);
2860 /* If asked, remove notes from the blocks we'll update. */
2861 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2862 count_or_remove_death_notes (blocks, 1);
2865 if (blocks)
2867 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2869 basic_block bb = BASIC_BLOCK (i);
2871 COPY_REG_SET (tmp, bb->global_live_at_end);
2872 propagate_block (bb, tmp, NULL, NULL, prop_flags);
2874 if (extent == UPDATE_LIFE_LOCAL)
2875 verify_local_live_at_start (tmp, bb);
2878 else
2880 for (i = n_basic_blocks - 1; i >= 0; --i)
2882 basic_block bb = BASIC_BLOCK (i);
2884 COPY_REG_SET (tmp, bb->global_live_at_end);
2885 propagate_block (bb, tmp, NULL, NULL, prop_flags);
2887 if (extent == UPDATE_LIFE_LOCAL)
2888 verify_local_live_at_start (tmp, bb);
2892 FREE_REG_SET (tmp);
2894 if (prop_flags & PROP_REG_INFO)
2896 /* The only pseudos that are live at the beginning of the function
2897 are those that were not set anywhere in the function. local-alloc
2898 doesn't know how to handle these correctly, so mark them as not
2899 local to any one basic block. */
2900 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2901 FIRST_PSEUDO_REGISTER, i,
2902 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2904 /* We have a problem with any pseudoreg that lives across the setjmp.
2905 ANSI says that if a user variable does not change in value between
2906 the setjmp and the longjmp, then the longjmp preserves it. This
2907 includes longjmp from a place where the pseudo appears dead.
2908 (In principle, the value still exists if it is in scope.)
2909 If the pseudo goes in a hard reg, some other value may occupy
2910 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2911 Conclusion: such a pseudo must not go in a hard reg. */
2912 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2913 FIRST_PSEUDO_REGISTER, i,
2915 if (regno_reg_rtx[i] != 0)
2917 REG_LIVE_LENGTH (i) = -1;
2918 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2924 /* Free the variables allocated by find_basic_blocks.
2926 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2928 void
2929 free_basic_block_vars (keep_head_end_p)
2930 int keep_head_end_p;
2932 if (basic_block_for_insn)
2934 VARRAY_FREE (basic_block_for_insn);
2935 basic_block_for_insn = NULL;
2938 if (! keep_head_end_p)
2940 clear_edges ();
2941 VARRAY_FREE (basic_block_info);
2942 n_basic_blocks = 0;
2944 ENTRY_BLOCK_PTR->aux = NULL;
2945 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2946 EXIT_BLOCK_PTR->aux = NULL;
2947 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2951 /* Return nonzero if the destination of SET equals the source. */
2953 static int
2954 set_noop_p (set)
2955 rtx set;
2957 rtx src = SET_SRC (set);
2958 rtx dst = SET_DEST (set);
2960 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2962 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2963 return 0;
2964 src = SUBREG_REG (src);
2965 dst = SUBREG_REG (dst);
2968 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2969 && REGNO (src) == REGNO (dst));
2972 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2973 value to itself. */
2975 static int
2976 noop_move_p (insn)
2977 rtx insn;
2979 rtx pat = PATTERN (insn);
2981 /* Insns carrying these notes are useful later on. */
2982 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2983 return 0;
2985 if (GET_CODE (pat) == SET && set_noop_p (pat))
2986 return 1;
2988 if (GET_CODE (pat) == PARALLEL)
2990 int i;
2991 /* If nothing but SETs of registers to themselves,
2992 this insn can also be deleted. */
2993 for (i = 0; i < XVECLEN (pat, 0); i++)
2995 rtx tem = XVECEXP (pat, 0, i);
2997 if (GET_CODE (tem) == USE
2998 || GET_CODE (tem) == CLOBBER)
2999 continue;
3001 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
3002 return 0;
3005 return 1;
3007 return 0;
3010 /* Delete any insns that copy a register to itself. */
3012 static void
3013 delete_noop_moves (f)
3014 rtx f;
3016 rtx insn;
3017 for (insn = f; insn; insn = NEXT_INSN (insn))
3019 if (GET_CODE (insn) == INSN && noop_move_p (insn))
3021 PUT_CODE (insn, NOTE);
3022 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3023 NOTE_SOURCE_FILE (insn) = 0;
3028 /* Determine if the stack pointer is constant over the life of the function.
3029 Only useful before prologues have been emitted. */
3031 static void
3032 notice_stack_pointer_modification_1 (x, pat, data)
3033 rtx x;
3034 rtx pat ATTRIBUTE_UNUSED;
3035 void *data ATTRIBUTE_UNUSED;
3037 if (x == stack_pointer_rtx
3038 /* The stack pointer is only modified indirectly as the result
3039 of a push until later in flow. See the comments in rtl.texi
3040 regarding Embedded Side-Effects on Addresses. */
3041 || (GET_CODE (x) == MEM
3042 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
3043 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
3044 current_function_sp_is_unchanging = 0;
3047 static void
3048 notice_stack_pointer_modification (f)
3049 rtx f;
3051 rtx insn;
3053 /* Assume that the stack pointer is unchanging if alloca hasn't
3054 been used. */
3055 current_function_sp_is_unchanging = !current_function_calls_alloca;
3056 if (! current_function_sp_is_unchanging)
3057 return;
3059 for (insn = f; insn; insn = NEXT_INSN (insn))
3061 if (INSN_P (insn))
3063 /* Check if insn modifies the stack pointer. */
3064 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3065 NULL);
3066 if (! current_function_sp_is_unchanging)
3067 return;
3072 /* Mark a register in SET. Hard registers in large modes get all
3073 of their component registers set as well. */
3075 static void
3076 mark_reg (reg, xset)
3077 rtx reg;
3078 void *xset;
3080 regset set = (regset) xset;
3081 int regno = REGNO (reg);
3083 if (GET_MODE (reg) == BLKmode)
3084 abort ();
3086 SET_REGNO_REG_SET (set, regno);
3087 if (regno < FIRST_PSEUDO_REGISTER)
3089 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3090 while (--n > 0)
3091 SET_REGNO_REG_SET (set, regno + n);
3095 /* Mark those regs which are needed at the end of the function as live
3096 at the end of the last basic block. */
3098 static void
3099 mark_regs_live_at_end (set)
3100 regset set;
3102 int i;
3104 /* If exiting needs the right stack value, consider the stack pointer
3105 live at the end of the function. */
3106 if ((HAVE_epilogue && reload_completed)
3107 || ! EXIT_IGNORE_STACK
3108 || (! FRAME_POINTER_REQUIRED
3109 && ! current_function_calls_alloca
3110 && flag_omit_frame_pointer)
3111 || current_function_sp_is_unchanging)
3113 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3116 /* Mark the frame pointer if needed at the end of the function. If
3117 we end up eliminating it, it will be removed from the live list
3118 of each basic block by reload. */
3120 if (! reload_completed || frame_pointer_needed)
3122 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3123 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3124 /* If they are different, also mark the hard frame pointer as live. */
3125 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3126 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3127 #endif
3130 #ifdef PIC_OFFSET_TABLE_REGNUM
3131 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3132 /* Many architectures have a GP register even without flag_pic.
3133 Assume the pic register is not in use, or will be handled by
3134 other means, if it is not fixed. */
3135 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3136 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3137 #endif
3138 #endif
3140 /* Mark all global registers, and all registers used by the epilogue
3141 as being live at the end of the function since they may be
3142 referenced by our caller. */
3143 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3144 if (global_regs[i] || EPILOGUE_USES (i))
3145 SET_REGNO_REG_SET (set, i);
3147 if (HAVE_epilogue && reload_completed)
3149 /* Mark all call-saved registers that we actually used. */
3150 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3151 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3152 SET_REGNO_REG_SET (set, i);
3155 #ifdef EH_RETURN_DATA_REGNO
3156 /* Mark the registers that will contain data for the handler. */
3157 if (reload_completed && current_function_calls_eh_return)
3158 for (i = 0; ; ++i)
3160 unsigned regno = EH_RETURN_DATA_REGNO(i);
3161 if (regno == INVALID_REGNUM)
3162 break;
3163 SET_REGNO_REG_SET (set, regno);
3165 #endif
3166 #ifdef EH_RETURN_STACKADJ_RTX
3167 if ((! HAVE_epilogue || ! reload_completed)
3168 && current_function_calls_eh_return)
3170 rtx tmp = EH_RETURN_STACKADJ_RTX;
3171 if (tmp && REG_P (tmp))
3172 mark_reg (tmp, set);
3174 #endif
3175 #ifdef EH_RETURN_HANDLER_RTX
3176 if ((! HAVE_epilogue || ! reload_completed)
3177 && current_function_calls_eh_return)
3179 rtx tmp = EH_RETURN_HANDLER_RTX;
3180 if (tmp && REG_P (tmp))
3181 mark_reg (tmp, set);
3183 #endif
3185 /* Mark function return value. */
3186 diddle_return_value (mark_reg, set);
3189 /* Callback function for for_each_successor_phi. DATA is a regset.
3190 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3191 INSN, in the regset. */
3193 static int
3194 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3195 rtx insn ATTRIBUTE_UNUSED;
3196 int dest_regno ATTRIBUTE_UNUSED;
3197 int src_regno;
3198 void *data;
3200 regset live = (regset) data;
3201 SET_REGNO_REG_SET (live, src_regno);
3202 return 0;
3205 /* Propagate global life info around the graph of basic blocks. Begin
3206 considering blocks with their corresponding bit set in BLOCKS_IN.
3207 If BLOCKS_IN is null, consider it the universal set.
3209 BLOCKS_OUT is set for every block that was changed. */
3211 static void
3212 calculate_global_regs_live (blocks_in, blocks_out, flags)
3213 sbitmap blocks_in, blocks_out;
3214 int flags;
3216 basic_block *queue, *qhead, *qtail, *qend;
3217 regset tmp, new_live_at_end, call_used;
3218 regset_head tmp_head, call_used_head;
3219 regset_head new_live_at_end_head;
3220 int i;
3222 tmp = INITIALIZE_REG_SET (tmp_head);
3223 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3224 call_used = INITIALIZE_REG_SET (call_used_head);
3226 /* Inconveniently, this is only redily available in hard reg set form. */
3227 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3228 if (call_used_regs[i])
3229 SET_REGNO_REG_SET (call_used, i);
3231 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3232 because the `head == tail' style test for an empty queue doesn't
3233 work with a full queue. */
3234 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3235 qtail = queue;
3236 qhead = qend = queue + n_basic_blocks + 2;
3238 /* Queue the blocks set in the initial mask. Do this in reverse block
3239 number order so that we are more likely for the first round to do
3240 useful work. We use AUX non-null to flag that the block is queued. */
3241 if (blocks_in)
3243 /* Clear out the garbage that might be hanging out in bb->aux. */
3244 for (i = n_basic_blocks - 1; i >= 0; --i)
3245 BASIC_BLOCK (i)->aux = NULL;
3247 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3249 basic_block bb = BASIC_BLOCK (i);
3250 *--qhead = bb;
3251 bb->aux = bb;
3254 else
3256 for (i = 0; i < n_basic_blocks; ++i)
3258 basic_block bb = BASIC_BLOCK (i);
3259 *--qhead = bb;
3260 bb->aux = bb;
3264 if (blocks_out)
3265 sbitmap_zero (blocks_out);
3267 while (qhead != qtail)
3269 int rescan, changed;
3270 basic_block bb;
3271 edge e;
3273 bb = *qhead++;
3274 if (qhead == qend)
3275 qhead = queue;
3276 bb->aux = NULL;
3278 /* Begin by propogating live_at_start from the successor blocks. */
3279 CLEAR_REG_SET (new_live_at_end);
3280 for (e = bb->succ; e; e = e->succ_next)
3282 basic_block sb = e->dest;
3284 /* Call-clobbered registers die across exception and call edges. */
3285 /* ??? Abnormal call edges ignored for the moment, as this gets
3286 confused by sibling call edges, which crashes reg-stack. */
3287 if (e->flags & EDGE_EH)
3289 bitmap_operation (tmp, sb->global_live_at_start,
3290 call_used, BITMAP_AND_COMPL);
3291 IOR_REG_SET (new_live_at_end, tmp);
3293 else
3294 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3297 /* The all-important stack pointer must always be live. */
3298 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3300 /* Before reload, there are a few registers that must be forced
3301 live everywhere -- which might not already be the case for
3302 blocks within infinite loops. */
3303 if (! reload_completed)
3305 /* Any reference to any pseudo before reload is a potential
3306 reference of the frame pointer. */
3307 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
3309 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3310 /* Pseudos with argument area equivalences may require
3311 reloading via the argument pointer. */
3312 if (fixed_regs[ARG_POINTER_REGNUM])
3313 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
3314 #endif
3316 #ifdef PIC_OFFSET_TABLE_REGNUM
3317 /* Any constant, or pseudo with constant equivalences, may
3318 require reloading from memory using the pic register. */
3319 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3320 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
3321 #endif
3324 /* Regs used in phi nodes are not included in
3325 global_live_at_start, since they are live only along a
3326 particular edge. Set those regs that are live because of a
3327 phi node alternative corresponding to this particular block. */
3328 if (in_ssa_form)
3329 for_each_successor_phi (bb, &set_phi_alternative_reg,
3330 new_live_at_end);
3332 if (bb == ENTRY_BLOCK_PTR)
3334 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3335 continue;
3338 /* On our first pass through this block, we'll go ahead and continue.
3339 Recognize first pass by local_set NULL. On subsequent passes, we
3340 get to skip out early if live_at_end wouldn't have changed. */
3342 if (bb->local_set == NULL)
3344 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3345 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3346 rescan = 1;
3348 else
3350 /* If any bits were removed from live_at_end, we'll have to
3351 rescan the block. This wouldn't be necessary if we had
3352 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3353 local_live is really dependent on live_at_end. */
3354 CLEAR_REG_SET (tmp);
3355 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3356 new_live_at_end, BITMAP_AND_COMPL);
3358 if (! rescan)
3360 /* If any of the registers in the new live_at_end set are
3361 conditionally set in this basic block, we must rescan.
3362 This is because conditional lifetimes at the end of the
3363 block do not just take the live_at_end set into account,
3364 but also the liveness at the start of each successor
3365 block. We can miss changes in those sets if we only
3366 compare the new live_at_end against the previous one. */
3367 CLEAR_REG_SET (tmp);
3368 rescan = bitmap_operation (tmp, new_live_at_end,
3369 bb->cond_local_set, BITMAP_AND);
3372 if (! rescan)
3374 /* Find the set of changed bits. Take this opportunity
3375 to notice that this set is empty and early out. */
3376 CLEAR_REG_SET (tmp);
3377 changed = bitmap_operation (tmp, bb->global_live_at_end,
3378 new_live_at_end, BITMAP_XOR);
3379 if (! changed)
3380 continue;
3382 /* If any of the changed bits overlap with local_set,
3383 we'll have to rescan the block. Detect overlap by
3384 the AND with ~local_set turning off bits. */
3385 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3386 BITMAP_AND_COMPL);
3390 /* Let our caller know that BB changed enough to require its
3391 death notes updated. */
3392 if (blocks_out)
3393 SET_BIT (blocks_out, bb->index);
3395 if (! rescan)
3397 /* Add to live_at_start the set of all registers in
3398 new_live_at_end that aren't in the old live_at_end. */
3400 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3401 BITMAP_AND_COMPL);
3402 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3404 changed = bitmap_operation (bb->global_live_at_start,
3405 bb->global_live_at_start,
3406 tmp, BITMAP_IOR);
3407 if (! changed)
3408 continue;
3410 else
3412 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3414 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3415 into live_at_start. */
3416 propagate_block (bb, new_live_at_end, bb->local_set,
3417 bb->cond_local_set, flags);
3419 /* If live_at start didn't change, no need to go farther. */
3420 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3421 continue;
3423 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3426 /* Queue all predecessors of BB so that we may re-examine
3427 their live_at_end. */
3428 for (e = bb->pred; e; e = e->pred_next)
3430 basic_block pb = e->src;
3431 if (pb->aux == NULL)
3433 *qtail++ = pb;
3434 if (qtail == qend)
3435 qtail = queue;
3436 pb->aux = pb;
3441 FREE_REG_SET (tmp);
3442 FREE_REG_SET (new_live_at_end);
3443 FREE_REG_SET (call_used);
3445 if (blocks_out)
3447 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3449 basic_block bb = BASIC_BLOCK (i);
3450 FREE_REG_SET (bb->local_set);
3451 FREE_REG_SET (bb->cond_local_set);
3454 else
3456 for (i = n_basic_blocks - 1; i >= 0; --i)
3458 basic_block bb = BASIC_BLOCK (i);
3459 FREE_REG_SET (bb->local_set);
3460 FREE_REG_SET (bb->cond_local_set);
3464 free (queue);
3467 /* Subroutines of life analysis. */
3469 /* Allocate the permanent data structures that represent the results
3470 of life analysis. Not static since used also for stupid life analysis. */
3472 static void
3473 allocate_bb_life_data ()
3475 register int i;
3477 for (i = 0; i < n_basic_blocks; i++)
3479 basic_block bb = BASIC_BLOCK (i);
3481 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3482 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3485 ENTRY_BLOCK_PTR->global_live_at_end
3486 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3487 EXIT_BLOCK_PTR->global_live_at_start
3488 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3490 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3493 void
3494 allocate_reg_life_data ()
3496 int i;
3498 max_regno = max_reg_num ();
3500 /* Recalculate the register space, in case it has grown. Old style
3501 vector oriented regsets would set regset_{size,bytes} here also. */
3502 allocate_reg_info (max_regno, FALSE, FALSE);
3504 /* Reset all the data we'll collect in propagate_block and its
3505 subroutines. */
3506 for (i = 0; i < max_regno; i++)
3508 REG_N_SETS (i) = 0;
3509 REG_N_REFS (i) = 0;
3510 REG_N_DEATHS (i) = 0;
3511 REG_N_CALLS_CROSSED (i) = 0;
3512 REG_LIVE_LENGTH (i) = 0;
3513 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3517 /* Delete dead instructions for propagate_block. */
3519 static void
3520 propagate_block_delete_insn (bb, insn)
3521 basic_block bb;
3522 rtx insn;
3524 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3526 /* If the insn referred to a label, and that label was attached to
3527 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3528 pretty much mandatory to delete it, because the ADDR_VEC may be
3529 referencing labels that no longer exist. */
3531 if (inote)
3533 rtx label = XEXP (inote, 0);
3534 rtx next;
3536 /* The label may be forced if it has been put in the constant
3537 pool. If that is the only use we must discard the table
3538 jump following it, but not the label itself. */
3539 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
3540 && (next = next_nonnote_insn (label)) != NULL
3541 && GET_CODE (next) == JUMP_INSN
3542 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3543 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3545 rtx pat = PATTERN (next);
3546 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3547 int len = XVECLEN (pat, diff_vec_p);
3548 int i;
3550 for (i = 0; i < len; i++)
3551 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3553 flow_delete_insn (next);
3557 if (bb->end == insn)
3558 bb->end = PREV_INSN (insn);
3559 flow_delete_insn (insn);
3562 /* Delete dead libcalls for propagate_block. Return the insn
3563 before the libcall. */
3565 static rtx
3566 propagate_block_delete_libcall (bb, insn, note)
3567 basic_block bb;
3568 rtx insn, note;
3570 rtx first = XEXP (note, 0);
3571 rtx before = PREV_INSN (first);
3573 if (insn == bb->end)
3574 bb->end = before;
3576 flow_delete_insn_chain (first, insn);
3577 return before;
3580 /* Update the life-status of regs for one insn. Return the previous insn. */
3583 propagate_one_insn (pbi, insn)
3584 struct propagate_block_info *pbi;
3585 rtx insn;
3587 rtx prev = PREV_INSN (insn);
3588 int flags = pbi->flags;
3589 int insn_is_dead = 0;
3590 int libcall_is_dead = 0;
3591 rtx note;
3592 int i;
3594 if (! INSN_P (insn))
3595 return prev;
3597 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3598 if (flags & PROP_SCAN_DEAD_CODE)
3600 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
3601 libcall_is_dead = (insn_is_dead && note != 0
3602 && libcall_dead_p (pbi, note, insn));
3605 /* If an instruction consists of just dead store(s) on final pass,
3606 delete it. */
3607 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3609 /* If we're trying to delete a prologue or epilogue instruction
3610 that isn't flagged as possibly being dead, something is wrong.
3611 But if we are keeping the stack pointer depressed, we might well
3612 be deleting insns that are used to compute the amount to update
3613 it by, so they are fine. */
3614 if (reload_completed
3615 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
3616 && (TYPE_RETURNS_STACK_DEPRESSED
3617 (TREE_TYPE (current_function_decl))))
3618 && (((HAVE_epilogue || HAVE_prologue)
3619 && prologue_epilogue_contains (insn))
3620 || (HAVE_sibcall_epilogue
3621 && sibcall_epilogue_contains (insn)))
3622 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3623 abort ();
3625 /* Record sets. Do this even for dead instructions, since they
3626 would have killed the values if they hadn't been deleted. */
3627 mark_set_regs (pbi, PATTERN (insn), insn);
3629 /* CC0 is now known to be dead. Either this insn used it,
3630 in which case it doesn't anymore, or clobbered it,
3631 so the next insn can't use it. */
3632 pbi->cc0_live = 0;
3634 if (libcall_is_dead)
3635 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3636 else
3637 propagate_block_delete_insn (pbi->bb, insn);
3639 return prev;
3642 /* See if this is an increment or decrement that can be merged into
3643 a following memory address. */
3644 #ifdef AUTO_INC_DEC
3646 register rtx x = single_set (insn);
3648 /* Does this instruction increment or decrement a register? */
3649 if ((flags & PROP_AUTOINC)
3650 && x != 0
3651 && GET_CODE (SET_DEST (x)) == REG
3652 && (GET_CODE (SET_SRC (x)) == PLUS
3653 || GET_CODE (SET_SRC (x)) == MINUS)
3654 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3655 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3656 /* Ok, look for a following memory ref we can combine with.
3657 If one is found, change the memory ref to a PRE_INC
3658 or PRE_DEC, cancel this insn, and return 1.
3659 Return 0 if nothing has been done. */
3660 && try_pre_increment_1 (pbi, insn))
3661 return prev;
3663 #endif /* AUTO_INC_DEC */
3665 CLEAR_REG_SET (pbi->new_set);
3667 /* If this is not the final pass, and this insn is copying the value of
3668 a library call and it's dead, don't scan the insns that perform the
3669 library call, so that the call's arguments are not marked live. */
3670 if (libcall_is_dead)
3672 /* Record the death of the dest reg. */
3673 mark_set_regs (pbi, PATTERN (insn), insn);
3675 insn = XEXP (note, 0);
3676 return PREV_INSN (insn);
3678 else if (GET_CODE (PATTERN (insn)) == SET
3679 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3680 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3681 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3682 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3683 /* We have an insn to pop a constant amount off the stack.
3684 (Such insns use PLUS regardless of the direction of the stack,
3685 and any insn to adjust the stack by a constant is always a pop.)
3686 These insns, if not dead stores, have no effect on life. */
3688 else
3690 /* Any regs live at the time of a call instruction must not go
3691 in a register clobbered by calls. Find all regs now live and
3692 record this for them. */
3694 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3695 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3696 { REG_N_CALLS_CROSSED (i)++; });
3698 /* Record sets. Do this even for dead instructions, since they
3699 would have killed the values if they hadn't been deleted. */
3700 mark_set_regs (pbi, PATTERN (insn), insn);
3702 if (GET_CODE (insn) == CALL_INSN)
3704 register int i;
3705 rtx note, cond;
3707 cond = NULL_RTX;
3708 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3709 cond = COND_EXEC_TEST (PATTERN (insn));
3711 /* Non-constant calls clobber memory. */
3712 if (! CONST_CALL_P (insn))
3714 free_EXPR_LIST_list (&pbi->mem_set_list);
3715 pbi->mem_set_list_len = 0;
3718 /* There may be extra registers to be clobbered. */
3719 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3720 note;
3721 note = XEXP (note, 1))
3722 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3723 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3724 cond, insn, pbi->flags);
3726 /* Calls change all call-used and global registers. */
3727 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3728 if (call_used_regs[i] && ! global_regs[i]
3729 && ! fixed_regs[i])
3731 /* We do not want REG_UNUSED notes for these registers. */
3732 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3733 cond, insn,
3734 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3738 /* If an insn doesn't use CC0, it becomes dead since we assume
3739 that every insn clobbers it. So show it dead here;
3740 mark_used_regs will set it live if it is referenced. */
3741 pbi->cc0_live = 0;
3743 /* Record uses. */
3744 if (! insn_is_dead)
3745 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3747 /* Sometimes we may have inserted something before INSN (such as a move)
3748 when we make an auto-inc. So ensure we will scan those insns. */
3749 #ifdef AUTO_INC_DEC
3750 prev = PREV_INSN (insn);
3751 #endif
3753 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3755 register int i;
3756 rtx note, cond;
3758 cond = NULL_RTX;
3759 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3760 cond = COND_EXEC_TEST (PATTERN (insn));
3762 /* Calls use their arguments. */
3763 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3764 note;
3765 note = XEXP (note, 1))
3766 if (GET_CODE (XEXP (note, 0)) == USE)
3767 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3768 cond, insn);
3770 /* The stack ptr is used (honorarily) by a CALL insn. */
3771 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3773 /* Calls may also reference any of the global registers,
3774 so they are made live. */
3775 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3776 if (global_regs[i])
3777 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3778 cond, insn);
3782 /* On final pass, update counts of how many insns in which each reg
3783 is live. */
3784 if (flags & PROP_REG_INFO)
3785 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3786 { REG_LIVE_LENGTH (i)++; });
3788 return prev;
3791 /* Initialize a propagate_block_info struct for public consumption.
3792 Note that the structure itself is opaque to this file, but that
3793 the user can use the regsets provided here. */
3795 struct propagate_block_info *
3796 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
3797 basic_block bb;
3798 regset live, local_set, cond_local_set;
3799 int flags;
3801 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
3803 pbi->bb = bb;
3804 pbi->reg_live = live;
3805 pbi->mem_set_list = NULL_RTX;
3806 pbi->mem_set_list_len = 0;
3807 pbi->local_set = local_set;
3808 pbi->cond_local_set = cond_local_set;
3809 pbi->cc0_live = 0;
3810 pbi->flags = flags;
3812 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3813 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3814 else
3815 pbi->reg_next_use = NULL;
3817 pbi->new_set = BITMAP_XMALLOC ();
3819 #ifdef HAVE_conditional_execution
3820 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3821 free_reg_cond_life_info);
3822 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3824 /* If this block ends in a conditional branch, for each register live
3825 from one side of the branch and not the other, record the register
3826 as conditionally dead. */
3827 if (GET_CODE (bb->end) == JUMP_INSN
3828 && any_condjump_p (bb->end))
3830 regset_head diff_head;
3831 regset diff = INITIALIZE_REG_SET (diff_head);
3832 basic_block bb_true, bb_false;
3833 rtx cond_true, cond_false, set_src;
3834 int i;
3836 /* Identify the successor blocks. */
3837 bb_true = bb->succ->dest;
3838 if (bb->succ->succ_next != NULL)
3840 bb_false = bb->succ->succ_next->dest;
3842 if (bb->succ->flags & EDGE_FALLTHRU)
3844 basic_block t = bb_false;
3845 bb_false = bb_true;
3846 bb_true = t;
3848 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3849 abort ();
3851 else
3853 /* This can happen with a conditional jump to the next insn. */
3854 if (JUMP_LABEL (bb->end) != bb_true->head)
3855 abort ();
3857 /* Simplest way to do nothing. */
3858 bb_false = bb_true;
3861 /* Extract the condition from the branch. */
3862 set_src = SET_SRC (pc_set (bb->end));
3863 cond_true = XEXP (set_src, 0);
3864 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3865 GET_MODE (cond_true), XEXP (cond_true, 0),
3866 XEXP (cond_true, 1));
3867 if (GET_CODE (XEXP (set_src, 1)) == PC)
3869 rtx t = cond_false;
3870 cond_false = cond_true;
3871 cond_true = t;
3874 /* Compute which register lead different lives in the successors. */
3875 if (bitmap_operation (diff, bb_true->global_live_at_start,
3876 bb_false->global_live_at_start, BITMAP_XOR))
3878 rtx reg = XEXP (cond_true, 0);
3880 if (GET_CODE (reg) == SUBREG)
3881 reg = SUBREG_REG (reg);
3883 if (GET_CODE (reg) != REG)
3884 abort ();
3886 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
3888 /* For each such register, mark it conditionally dead. */
3889 EXECUTE_IF_SET_IN_REG_SET
3890 (diff, 0, i,
3892 struct reg_cond_life_info *rcli;
3893 rtx cond;
3895 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3897 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3898 cond = cond_false;
3899 else
3900 cond = cond_true;
3901 rcli->condition = cond;
3902 rcli->stores = const0_rtx;
3903 rcli->orig_condition = cond;
3905 splay_tree_insert (pbi->reg_cond_dead, i,
3906 (splay_tree_value) rcli);
3910 FREE_REG_SET (diff);
3912 #endif
3914 /* If this block has no successors, any stores to the frame that aren't
3915 used later in the block are dead. So make a pass over the block
3916 recording any such that are made and show them dead at the end. We do
3917 a very conservative and simple job here. */
3918 if (optimize
3919 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
3920 && (TYPE_RETURNS_STACK_DEPRESSED
3921 (TREE_TYPE (current_function_decl))))
3922 && (flags & PROP_SCAN_DEAD_CODE)
3923 && (bb->succ == NULL
3924 || (bb->succ->succ_next == NULL
3925 && bb->succ->dest == EXIT_BLOCK_PTR
3926 && ! current_function_calls_eh_return)))
3928 rtx insn;
3929 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3930 if (GET_CODE (insn) == INSN
3931 && GET_CODE (PATTERN (insn)) == SET
3932 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3934 rtx mem = SET_DEST (PATTERN (insn));
3936 /* This optimization is performed by faking a store to the
3937 memory at the end of the block. This doesn't work for
3938 unchanging memories because multiple stores to unchanging
3939 memory is illegal and alias analysis doesn't consider it. */
3940 if (RTX_UNCHANGING_P (mem))
3941 continue;
3943 if (XEXP (mem, 0) == frame_pointer_rtx
3944 || (GET_CODE (XEXP (mem, 0)) == PLUS
3945 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3946 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3948 #ifdef AUTO_INC_DEC
3949 /* Store a copy of mem, otherwise the address may be scrogged
3950 by find_auto_inc. This matters because insn_dead_p uses
3951 an rtx_equal_p check to determine if two addresses are
3952 the same. This works before find_auto_inc, but fails
3953 after find_auto_inc, causing discrepencies between the
3954 set of live registers calculated during the
3955 calculate_global_regs_live phase and what actually exists
3956 after flow completes, leading to aborts. */
3957 if (flags & PROP_AUTOINC)
3958 mem = shallow_copy_rtx (mem);
3959 #endif
3960 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3961 if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
3962 break;
3967 return pbi;
3970 /* Release a propagate_block_info struct. */
3972 void
3973 free_propagate_block_info (pbi)
3974 struct propagate_block_info *pbi;
3976 free_EXPR_LIST_list (&pbi->mem_set_list);
3978 BITMAP_XFREE (pbi->new_set);
3980 #ifdef HAVE_conditional_execution
3981 splay_tree_delete (pbi->reg_cond_dead);
3982 BITMAP_XFREE (pbi->reg_cond_reg);
3983 #endif
3985 if (pbi->reg_next_use)
3986 free (pbi->reg_next_use);
3988 free (pbi);
3991 /* Compute the registers live at the beginning of a basic block BB from
3992 those live at the end.
3994 When called, REG_LIVE contains those live at the end. On return, it
3995 contains those live at the beginning.
3997 LOCAL_SET, if non-null, will be set with all registers killed
3998 unconditionally by this basic block.
3999 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
4000 killed conditionally by this basic block. If there is any unconditional
4001 set of a register, then the corresponding bit will be set in LOCAL_SET
4002 and cleared in COND_LOCAL_SET.
4003 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
4004 case, the resulting set will be equal to the union of the two sets that
4005 would otherwise be computed. */
4007 void
4008 propagate_block (bb, live, local_set, cond_local_set, flags)
4009 basic_block bb;
4010 regset live;
4011 regset local_set;
4012 regset cond_local_set;
4013 int flags;
4015 struct propagate_block_info *pbi;
4016 rtx insn, prev;
4018 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
4020 if (flags & PROP_REG_INFO)
4022 register int i;
4024 /* Process the regs live at the end of the block.
4025 Mark them as not local to any one basic block. */
4026 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
4027 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4030 /* Scan the block an insn at a time from end to beginning. */
4032 for (insn = bb->end;; insn = prev)
4034 /* If this is a call to `setjmp' et al, warn if any
4035 non-volatile datum is live. */
4036 if ((flags & PROP_REG_INFO)
4037 && GET_CODE (insn) == NOTE
4038 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
4039 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
4041 prev = propagate_one_insn (pbi, insn);
4043 if (insn == bb->head)
4044 break;
4047 free_propagate_block_info (pbi);
4050 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4051 (SET expressions whose destinations are registers dead after the insn).
4052 NEEDED is the regset that says which regs are alive after the insn.
4054 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4056 If X is the entire body of an insn, NOTES contains the reg notes
4057 pertaining to the insn. */
4059 static int
4060 insn_dead_p (pbi, x, call_ok, notes)
4061 struct propagate_block_info *pbi;
4062 rtx x;
4063 int call_ok;
4064 rtx notes ATTRIBUTE_UNUSED;
4066 enum rtx_code code = GET_CODE (x);
4068 #ifdef AUTO_INC_DEC
4069 /* If flow is invoked after reload, we must take existing AUTO_INC
4070 expresions into account. */
4071 if (reload_completed)
4073 for (; notes; notes = XEXP (notes, 1))
4075 if (REG_NOTE_KIND (notes) == REG_INC)
4077 int regno = REGNO (XEXP (notes, 0));
4079 /* Don't delete insns to set global regs. */
4080 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4081 || REGNO_REG_SET_P (pbi->reg_live, regno))
4082 return 0;
4086 #endif
4088 /* If setting something that's a reg or part of one,
4089 see if that register's altered value will be live. */
4091 if (code == SET)
4093 rtx r = SET_DEST (x);
4095 #ifdef HAVE_cc0
4096 if (GET_CODE (r) == CC0)
4097 return ! pbi->cc0_live;
4098 #endif
4100 /* A SET that is a subroutine call cannot be dead. */
4101 if (GET_CODE (SET_SRC (x)) == CALL)
4103 if (! call_ok)
4104 return 0;
4107 /* Don't eliminate loads from volatile memory or volatile asms. */
4108 else if (volatile_refs_p (SET_SRC (x)))
4109 return 0;
4111 if (GET_CODE (r) == MEM)
4113 rtx temp;
4115 if (MEM_VOLATILE_P (r))
4116 return 0;
4118 /* Walk the set of memory locations we are currently tracking
4119 and see if one is an identical match to this memory location.
4120 If so, this memory write is dead (remember, we're walking
4121 backwards from the end of the block to the start). Since
4122 rtx_equal_p does not check the alias set or flags, we also
4123 must have the potential for them to conflict (anti_dependence). */
4124 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
4125 if (anti_dependence (r, XEXP (temp, 0)))
4127 rtx mem = XEXP (temp, 0);
4129 if (rtx_equal_p (mem, r))
4130 return 1;
4131 #ifdef AUTO_INC_DEC
4132 /* Check if memory reference matches an auto increment. Only
4133 post increment/decrement or modify are valid. */
4134 if (GET_MODE (mem) == GET_MODE (r)
4135 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
4136 || GET_CODE (XEXP (mem, 0)) == POST_INC
4137 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
4138 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
4139 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
4140 return 1;
4141 #endif
4144 else
4146 while (GET_CODE (r) == SUBREG
4147 || GET_CODE (r) == STRICT_LOW_PART
4148 || GET_CODE (r) == ZERO_EXTRACT)
4149 r = XEXP (r, 0);
4151 if (GET_CODE (r) == REG)
4153 int regno = REGNO (r);
4155 /* Obvious. */
4156 if (REGNO_REG_SET_P (pbi->reg_live, regno))
4157 return 0;
4159 /* If this is a hard register, verify that subsequent
4160 words are not needed. */
4161 if (regno < FIRST_PSEUDO_REGISTER)
4163 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
4165 while (--n > 0)
4166 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
4167 return 0;
4170 /* Don't delete insns to set global regs. */
4171 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4172 return 0;
4174 /* Make sure insns to set the stack pointer aren't deleted. */
4175 if (regno == STACK_POINTER_REGNUM)
4176 return 0;
4178 /* ??? These bits might be redundant with the force live bits
4179 in calculate_global_regs_live. We would delete from
4180 sequential sets; whether this actually affects real code
4181 for anything but the stack pointer I don't know. */
4182 /* Make sure insns to set the frame pointer aren't deleted. */
4183 if (regno == FRAME_POINTER_REGNUM
4184 && (! reload_completed || frame_pointer_needed))
4185 return 0;
4186 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4187 if (regno == HARD_FRAME_POINTER_REGNUM
4188 && (! reload_completed || frame_pointer_needed))
4189 return 0;
4190 #endif
4192 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4193 /* Make sure insns to set arg pointer are never deleted
4194 (if the arg pointer isn't fixed, there will be a USE
4195 for it, so we can treat it normally). */
4196 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4197 return 0;
4198 #endif
4200 /* Otherwise, the set is dead. */
4201 return 1;
4206 /* If performing several activities, insn is dead if each activity
4207 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4208 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4209 worth keeping. */
4210 else if (code == PARALLEL)
4212 int i = XVECLEN (x, 0);
4214 for (i--; i >= 0; i--)
4215 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4216 && GET_CODE (XVECEXP (x, 0, i)) != USE
4217 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4218 return 0;
4220 return 1;
4223 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4224 is not necessarily true for hard registers. */
4225 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4226 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4227 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4228 return 1;
4230 /* We do not check other CLOBBER or USE here. An insn consisting of just
4231 a CLOBBER or just a USE should not be deleted. */
4232 return 0;
4235 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4236 return 1 if the entire library call is dead.
4237 This is true if INSN copies a register (hard or pseudo)
4238 and if the hard return reg of the call insn is dead.
4239 (The caller should have tested the destination of the SET inside
4240 INSN already for death.)
4242 If this insn doesn't just copy a register, then we don't
4243 have an ordinary libcall. In that case, cse could not have
4244 managed to substitute the source for the dest later on,
4245 so we can assume the libcall is dead.
4247 PBI is the block info giving pseudoregs live before this insn.
4248 NOTE is the REG_RETVAL note of the insn. */
4250 static int
4251 libcall_dead_p (pbi, note, insn)
4252 struct propagate_block_info *pbi;
4253 rtx note;
4254 rtx insn;
4256 rtx x = single_set (insn);
4258 if (x)
4260 register rtx r = SET_SRC (x);
4261 if (GET_CODE (r) == REG)
4263 rtx call = XEXP (note, 0);
4264 rtx call_pat;
4265 register int i;
4267 /* Find the call insn. */
4268 while (call != insn && GET_CODE (call) != CALL_INSN)
4269 call = NEXT_INSN (call);
4271 /* If there is none, do nothing special,
4272 since ordinary death handling can understand these insns. */
4273 if (call == insn)
4274 return 0;
4276 /* See if the hard reg holding the value is dead.
4277 If this is a PARALLEL, find the call within it. */
4278 call_pat = PATTERN (call);
4279 if (GET_CODE (call_pat) == PARALLEL)
4281 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4282 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4283 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4284 break;
4286 /* This may be a library call that is returning a value
4287 via invisible pointer. Do nothing special, since
4288 ordinary death handling can understand these insns. */
4289 if (i < 0)
4290 return 0;
4292 call_pat = XVECEXP (call_pat, 0, i);
4295 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4298 return 1;
4301 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4302 live at function entry. Don't count global register variables, variables
4303 in registers that can be used for function arg passing, or variables in
4304 fixed hard registers. */
4307 regno_uninitialized (regno)
4308 int regno;
4310 if (n_basic_blocks == 0
4311 || (regno < FIRST_PSEUDO_REGISTER
4312 && (global_regs[regno]
4313 || fixed_regs[regno]
4314 || FUNCTION_ARG_REGNO_P (regno))))
4315 return 0;
4317 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4320 /* 1 if register REGNO was alive at a place where `setjmp' was called
4321 and was set more than once or is an argument.
4322 Such regs may be clobbered by `longjmp'. */
4325 regno_clobbered_at_setjmp (regno)
4326 int regno;
4328 if (n_basic_blocks == 0)
4329 return 0;
4331 return ((REG_N_SETS (regno) > 1
4332 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4333 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4336 /* INSN references memory, possibly using autoincrement addressing modes.
4337 Find any entries on the mem_set_list that need to be invalidated due
4338 to an address change. */
4340 static void
4341 invalidate_mems_from_autoinc (pbi, insn)
4342 struct propagate_block_info *pbi;
4343 rtx insn;
4345 rtx note = REG_NOTES (insn);
4346 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4348 if (REG_NOTE_KIND (note) == REG_INC)
4350 rtx temp = pbi->mem_set_list;
4351 rtx prev = NULL_RTX;
4352 rtx next;
4354 while (temp)
4356 next = XEXP (temp, 1);
4357 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4359 /* Splice temp out of list. */
4360 if (prev)
4361 XEXP (prev, 1) = next;
4362 else
4363 pbi->mem_set_list = next;
4364 free_EXPR_LIST_node (temp);
4365 pbi->mem_set_list_len--;
4367 else
4368 prev = temp;
4369 temp = next;
4375 /* EXP is either a MEM or a REG. Remove any dependant entries
4376 from pbi->mem_set_list. */
4378 static void
4379 invalidate_mems_from_set (pbi, exp)
4380 struct propagate_block_info *pbi;
4381 rtx exp;
4383 rtx temp = pbi->mem_set_list;
4384 rtx prev = NULL_RTX;
4385 rtx next;
4387 while (temp)
4389 next = XEXP (temp, 1);
4390 if ((GET_CODE (exp) == MEM
4391 && output_dependence (XEXP (temp, 0), exp))
4392 || (GET_CODE (exp) == REG
4393 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
4395 /* Splice this entry out of the list. */
4396 if (prev)
4397 XEXP (prev, 1) = next;
4398 else
4399 pbi->mem_set_list = next;
4400 free_EXPR_LIST_node (temp);
4401 pbi->mem_set_list_len--;
4403 else
4404 prev = temp;
4405 temp = next;
4409 /* Process the registers that are set within X. Their bits are set to
4410 1 in the regset DEAD, because they are dead prior to this insn.
4412 If INSN is nonzero, it is the insn being processed.
4414 FLAGS is the set of operations to perform. */
4416 static void
4417 mark_set_regs (pbi, x, insn)
4418 struct propagate_block_info *pbi;
4419 rtx x, insn;
4421 rtx cond = NULL_RTX;
4422 rtx link;
4423 enum rtx_code code;
4425 if (insn)
4426 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4428 if (REG_NOTE_KIND (link) == REG_INC)
4429 mark_set_1 (pbi, SET, XEXP (link, 0),
4430 (GET_CODE (x) == COND_EXEC
4431 ? COND_EXEC_TEST (x) : NULL_RTX),
4432 insn, pbi->flags);
4434 retry:
4435 switch (code = GET_CODE (x))
4437 case SET:
4438 case CLOBBER:
4439 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4440 return;
4442 case COND_EXEC:
4443 cond = COND_EXEC_TEST (x);
4444 x = COND_EXEC_CODE (x);
4445 goto retry;
4447 case PARALLEL:
4449 register int i;
4450 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4452 rtx sub = XVECEXP (x, 0, i);
4453 switch (code = GET_CODE (sub))
4455 case COND_EXEC:
4456 if (cond != NULL_RTX)
4457 abort ();
4459 cond = COND_EXEC_TEST (sub);
4460 sub = COND_EXEC_CODE (sub);
4461 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4462 break;
4463 /* Fall through. */
4465 case SET:
4466 case CLOBBER:
4467 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4468 break;
4470 default:
4471 break;
4474 break;
4477 default:
4478 break;
4482 /* Process a single SET rtx, X. */
4484 static void
4485 mark_set_1 (pbi, code, reg, cond, insn, flags)
4486 struct propagate_block_info *pbi;
4487 enum rtx_code code;
4488 rtx reg, cond, insn;
4489 int flags;
4491 int regno_first = -1, regno_last = -1;
4492 unsigned long not_dead = 0;
4493 int i;
4495 /* Modifying just one hardware register of a multi-reg value or just a
4496 byte field of a register does not mean the value from before this insn
4497 is now dead. Of course, if it was dead after it's unused now. */
4499 switch (GET_CODE (reg))
4501 case PARALLEL:
4502 /* Some targets place small structures in registers for return values of
4503 functions. We have to detect this case specially here to get correct
4504 flow information. */
4505 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4506 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
4507 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
4508 flags);
4509 return;
4511 case ZERO_EXTRACT:
4512 case SIGN_EXTRACT:
4513 case STRICT_LOW_PART:
4514 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4516 reg = XEXP (reg, 0);
4517 while (GET_CODE (reg) == SUBREG
4518 || GET_CODE (reg) == ZERO_EXTRACT
4519 || GET_CODE (reg) == SIGN_EXTRACT
4520 || GET_CODE (reg) == STRICT_LOW_PART);
4521 if (GET_CODE (reg) == MEM)
4522 break;
4523 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4524 /* Fall through. */
4526 case REG:
4527 regno_last = regno_first = REGNO (reg);
4528 if (regno_first < FIRST_PSEUDO_REGISTER)
4529 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4530 break;
4532 case SUBREG:
4533 if (GET_CODE (SUBREG_REG (reg)) == REG)
4535 enum machine_mode outer_mode = GET_MODE (reg);
4536 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4538 /* Identify the range of registers affected. This is moderately
4539 tricky for hard registers. See alter_subreg. */
4541 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4542 if (regno_first < FIRST_PSEUDO_REGISTER)
4544 #ifdef ALTER_HARD_SUBREG
4545 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4546 inner_mode, regno_first);
4547 #else
4548 regno_first += SUBREG_WORD (reg);
4549 #endif
4550 regno_last = (regno_first
4551 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4553 /* Since we've just adjusted the register number ranges, make
4554 sure REG matches. Otherwise some_was_live will be clear
4555 when it shouldn't have been, and we'll create incorrect
4556 REG_UNUSED notes. */
4557 reg = gen_rtx_REG (outer_mode, regno_first);
4559 else
4561 /* If the number of words in the subreg is less than the number
4562 of words in the full register, we have a well-defined partial
4563 set. Otherwise the high bits are undefined.
4565 This is only really applicable to pseudos, since we just took
4566 care of multi-word hard registers. */
4567 if (((GET_MODE_SIZE (outer_mode)
4568 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4569 < ((GET_MODE_SIZE (inner_mode)
4570 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4571 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
4572 regno_first);
4574 reg = SUBREG_REG (reg);
4577 else
4578 reg = SUBREG_REG (reg);
4579 break;
4581 default:
4582 break;
4585 /* If this set is a MEM, then it kills any aliased writes.
4586 If this set is a REG, then it kills any MEMs which use the reg. */
4587 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4589 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4590 invalidate_mems_from_set (pbi, reg);
4592 /* If the memory reference had embedded side effects (autoincrement
4593 address modes. Then we may need to kill some entries on the
4594 memory set list. */
4595 if (insn && GET_CODE (reg) == MEM)
4596 invalidate_mems_from_autoinc (pbi, insn);
4598 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
4599 && GET_CODE (reg) == MEM && ! side_effects_p (reg)
4600 /* ??? With more effort we could track conditional memory life. */
4601 && ! cond
4602 /* We do not know the size of a BLKmode store, so we do not track
4603 them for redundant store elimination. */
4604 && GET_MODE (reg) != BLKmode
4605 /* There are no REG_INC notes for SP, so we can't assume we'll see
4606 everything that invalidates it. To be safe, don't eliminate any
4607 stores though SP; none of them should be redundant anyway. */
4608 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4610 #ifdef AUTO_INC_DEC
4611 /* Store a copy of mem, otherwise the address may be
4612 scrogged by find_auto_inc. */
4613 if (flags & PROP_AUTOINC)
4614 reg = shallow_copy_rtx (reg);
4615 #endif
4616 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4617 pbi->mem_set_list_len++;
4621 if (GET_CODE (reg) == REG
4622 && ! (regno_first == FRAME_POINTER_REGNUM
4623 && (! reload_completed || frame_pointer_needed))
4624 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4625 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4626 && (! reload_completed || frame_pointer_needed))
4627 #endif
4628 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4629 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4630 #endif
4633 int some_was_live = 0, some_was_dead = 0;
4635 for (i = regno_first; i <= regno_last; ++i)
4637 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4638 if (pbi->local_set)
4640 /* Order of the set operation matters here since both
4641 sets may be the same. */
4642 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
4643 if (cond != NULL_RTX
4644 && ! REGNO_REG_SET_P (pbi->local_set, i))
4645 SET_REGNO_REG_SET (pbi->cond_local_set, i);
4646 else
4647 SET_REGNO_REG_SET (pbi->local_set, i);
4649 if (code != CLOBBER)
4650 SET_REGNO_REG_SET (pbi->new_set, i);
4652 some_was_live |= needed_regno;
4653 some_was_dead |= ! needed_regno;
4656 #ifdef HAVE_conditional_execution
4657 /* Consider conditional death in deciding that the register needs
4658 a death note. */
4659 if (some_was_live && ! not_dead
4660 /* The stack pointer is never dead. Well, not strictly true,
4661 but it's very difficult to tell from here. Hopefully
4662 combine_stack_adjustments will fix up the most egregious
4663 errors. */
4664 && regno_first != STACK_POINTER_REGNUM)
4666 for (i = regno_first; i <= regno_last; ++i)
4667 if (! mark_regno_cond_dead (pbi, i, cond))
4668 not_dead |= ((unsigned long) 1) << (i - regno_first);
4670 #endif
4672 /* Additional data to record if this is the final pass. */
4673 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4674 | PROP_DEATH_NOTES | PROP_AUTOINC))
4676 register rtx y;
4677 register int blocknum = pbi->bb->index;
4679 y = NULL_RTX;
4680 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4682 y = pbi->reg_next_use[regno_first];
4684 /* The next use is no longer next, since a store intervenes. */
4685 for (i = regno_first; i <= regno_last; ++i)
4686 pbi->reg_next_use[i] = 0;
4689 if (flags & PROP_REG_INFO)
4691 for (i = regno_first; i <= regno_last; ++i)
4693 /* Count (weighted) references, stores, etc. This counts a
4694 register twice if it is modified, but that is correct. */
4695 REG_N_SETS (i) += 1;
4696 REG_N_REFS (i) += (optimize_size ? 1
4697 : pbi->bb->loop_depth + 1);
4699 /* The insns where a reg is live are normally counted
4700 elsewhere, but we want the count to include the insn
4701 where the reg is set, and the normal counting mechanism
4702 would not count it. */
4703 REG_LIVE_LENGTH (i) += 1;
4706 /* If this is a hard reg, record this function uses the reg. */
4707 if (regno_first < FIRST_PSEUDO_REGISTER)
4709 for (i = regno_first; i <= regno_last; i++)
4710 regs_ever_live[i] = 1;
4712 else
4714 /* Keep track of which basic blocks each reg appears in. */
4715 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4716 REG_BASIC_BLOCK (regno_first) = blocknum;
4717 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4718 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4722 if (! some_was_dead)
4724 if (flags & PROP_LOG_LINKS)
4726 /* Make a logical link from the next following insn
4727 that uses this register, back to this insn.
4728 The following insns have already been processed.
4730 We don't build a LOG_LINK for hard registers containing
4731 in ASM_OPERANDs. If these registers get replaced,
4732 we might wind up changing the semantics of the insn,
4733 even if reload can make what appear to be valid
4734 assignments later. */
4735 if (y && (BLOCK_NUM (y) == blocknum)
4736 && (regno_first >= FIRST_PSEUDO_REGISTER
4737 || asm_noperands (PATTERN (y)) < 0))
4738 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4741 else if (not_dead)
4743 else if (! some_was_live)
4745 if (flags & PROP_REG_INFO)
4746 REG_N_DEATHS (regno_first) += 1;
4748 if (flags & PROP_DEATH_NOTES)
4750 /* Note that dead stores have already been deleted
4751 when possible. If we get here, we have found a
4752 dead store that cannot be eliminated (because the
4753 same insn does something useful). Indicate this
4754 by marking the reg being set as dying here. */
4755 REG_NOTES (insn)
4756 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4759 else
4761 if (flags & PROP_DEATH_NOTES)
4763 /* This is a case where we have a multi-word hard register
4764 and some, but not all, of the words of the register are
4765 needed in subsequent insns. Write REG_UNUSED notes
4766 for those parts that were not needed. This case should
4767 be rare. */
4769 for (i = regno_first; i <= regno_last; ++i)
4770 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4771 REG_NOTES (insn)
4772 = alloc_EXPR_LIST (REG_UNUSED,
4773 gen_rtx_REG (reg_raw_mode[i], i),
4774 REG_NOTES (insn));
4779 /* Mark the register as being dead. */
4780 if (some_was_live
4781 /* The stack pointer is never dead. Well, not strictly true,
4782 but it's very difficult to tell from here. Hopefully
4783 combine_stack_adjustments will fix up the most egregious
4784 errors. */
4785 && regno_first != STACK_POINTER_REGNUM)
4787 for (i = regno_first; i <= regno_last; ++i)
4788 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
4789 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4792 else if (GET_CODE (reg) == REG)
4794 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4795 pbi->reg_next_use[regno_first] = 0;
4798 /* If this is the last pass and this is a SCRATCH, show it will be dying
4799 here and count it. */
4800 else if (GET_CODE (reg) == SCRATCH)
4802 if (flags & PROP_DEATH_NOTES)
4803 REG_NOTES (insn)
4804 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4808 #ifdef HAVE_conditional_execution
4809 /* Mark REGNO conditionally dead.
4810 Return true if the register is now unconditionally dead. */
4812 static int
4813 mark_regno_cond_dead (pbi, regno, cond)
4814 struct propagate_block_info *pbi;
4815 int regno;
4816 rtx cond;
4818 /* If this is a store to a predicate register, the value of the
4819 predicate is changing, we don't know that the predicate as seen
4820 before is the same as that seen after. Flush all dependent
4821 conditions from reg_cond_dead. This will make all such
4822 conditionally live registers unconditionally live. */
4823 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4824 flush_reg_cond_reg (pbi, regno);
4826 /* If this is an unconditional store, remove any conditional
4827 life that may have existed. */
4828 if (cond == NULL_RTX)
4829 splay_tree_remove (pbi->reg_cond_dead, regno);
4830 else
4832 splay_tree_node node;
4833 struct reg_cond_life_info *rcli;
4834 rtx ncond;
4836 /* Otherwise this is a conditional set. Record that fact.
4837 It may have been conditionally used, or there may be a
4838 subsequent set with a complimentary condition. */
4840 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4841 if (node == NULL)
4843 /* The register was unconditionally live previously.
4844 Record the current condition as the condition under
4845 which it is dead. */
4846 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4847 rcli->condition = cond;
4848 rcli->stores = cond;
4849 rcli->orig_condition = const0_rtx;
4850 splay_tree_insert (pbi->reg_cond_dead, regno,
4851 (splay_tree_value) rcli);
4853 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4855 /* Not unconditionaly dead. */
4856 return 0;
4858 else
4860 /* The register was conditionally live previously.
4861 Add the new condition to the old. */
4862 rcli = (struct reg_cond_life_info *) node->value;
4863 ncond = rcli->condition;
4864 ncond = ior_reg_cond (ncond, cond, 1);
4865 if (rcli->stores == const0_rtx)
4866 rcli->stores = cond;
4867 else if (rcli->stores != const1_rtx)
4868 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
4870 /* If the register is now unconditionally dead, remove the entry
4871 in the splay_tree. A register is unconditionally dead if the
4872 dead condition ncond is true. A register is also unconditionally
4873 dead if the sum of all conditional stores is an unconditional
4874 store (stores is true), and the dead condition is identically the
4875 same as the original dead condition initialized at the end of
4876 the block. This is a pointer compare, not an rtx_equal_p
4877 compare. */
4878 if (ncond == const1_rtx
4879 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
4880 splay_tree_remove (pbi->reg_cond_dead, regno);
4881 else
4883 rcli->condition = ncond;
4885 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4887 /* Not unconditionaly dead. */
4888 return 0;
4893 return 1;
4896 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4898 static void
4899 free_reg_cond_life_info (value)
4900 splay_tree_value value;
4902 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4903 free (rcli);
4906 /* Helper function for flush_reg_cond_reg. */
4908 static int
4909 flush_reg_cond_reg_1 (node, data)
4910 splay_tree_node node;
4911 void *data;
4913 struct reg_cond_life_info *rcli;
4914 int *xdata = (int *) data;
4915 unsigned int regno = xdata[0];
4917 /* Don't need to search if last flushed value was farther on in
4918 the in-order traversal. */
4919 if (xdata[1] >= (int) node->key)
4920 return 0;
4922 /* Splice out portions of the expression that refer to regno. */
4923 rcli = (struct reg_cond_life_info *) node->value;
4924 rcli->condition = elim_reg_cond (rcli->condition, regno);
4925 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
4926 rcli->stores = elim_reg_cond (rcli->stores, regno);
4928 /* If the entire condition is now false, signal the node to be removed. */
4929 if (rcli->condition == const0_rtx)
4931 xdata[1] = node->key;
4932 return -1;
4934 else if (rcli->condition == const1_rtx)
4935 abort ();
4937 return 0;
4940 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4942 static void
4943 flush_reg_cond_reg (pbi, regno)
4944 struct propagate_block_info *pbi;
4945 int regno;
4947 int pair[2];
4949 pair[0] = regno;
4950 pair[1] = -1;
4951 while (splay_tree_foreach (pbi->reg_cond_dead,
4952 flush_reg_cond_reg_1, pair) == -1)
4953 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4955 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4958 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
4959 For ior/and, the ADD flag determines whether we want to add the new
4960 condition X to the old one unconditionally. If it is zero, we will
4961 only return a new expression if X allows us to simplify part of
4962 OLD, otherwise we return OLD unchanged to the caller.
4963 If ADD is nonzero, we will return a new condition in all cases. The
4964 toplevel caller of one of these functions should always pass 1 for
4965 ADD. */
4967 static rtx
4968 ior_reg_cond (old, x, add)
4969 rtx old, x;
4970 int add;
4972 rtx op0, op1;
4974 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
4976 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
4977 && GET_CODE (x) == reverse_condition (GET_CODE (old))
4978 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
4979 return const1_rtx;
4980 if (GET_CODE (x) == GET_CODE (old)
4981 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
4982 return old;
4983 if (! add)
4984 return old;
4985 return gen_rtx_IOR (0, old, x);
4988 switch (GET_CODE (old))
4990 case IOR:
4991 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
4992 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
4993 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
4995 if (op0 == const0_rtx)
4996 return op1;
4997 if (op1 == const0_rtx)
4998 return op0;
4999 if (op0 == const1_rtx || op1 == const1_rtx)
5000 return const1_rtx;
5001 if (op0 == XEXP (old, 0))
5002 op0 = gen_rtx_IOR (0, op0, x);
5003 else
5004 op1 = gen_rtx_IOR (0, op1, x);
5005 return gen_rtx_IOR (0, op0, op1);
5007 if (! add)
5008 return old;
5009 return gen_rtx_IOR (0, old, x);
5011 case AND:
5012 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5013 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5014 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5016 if (op0 == const1_rtx)
5017 return op1;
5018 if (op1 == const1_rtx)
5019 return op0;
5020 if (op0 == const0_rtx || op1 == const0_rtx)
5021 return const0_rtx;
5022 if (op0 == XEXP (old, 0))
5023 op0 = gen_rtx_IOR (0, op0, x);
5024 else
5025 op1 = gen_rtx_IOR (0, op1, x);
5026 return gen_rtx_AND (0, op0, op1);
5028 if (! add)
5029 return old;
5030 return gen_rtx_IOR (0, old, x);
5032 case NOT:
5033 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5034 if (op0 != XEXP (old, 0))
5035 return not_reg_cond (op0);
5036 if (! add)
5037 return old;
5038 return gen_rtx_IOR (0, old, x);
5040 default:
5041 abort ();
5045 static rtx
5046 not_reg_cond (x)
5047 rtx x;
5049 enum rtx_code x_code;
5051 if (x == const0_rtx)
5052 return const1_rtx;
5053 else if (x == const1_rtx)
5054 return const0_rtx;
5055 x_code = GET_CODE (x);
5056 if (x_code == NOT)
5057 return XEXP (x, 0);
5058 if (GET_RTX_CLASS (x_code) == '<'
5059 && GET_CODE (XEXP (x, 0)) == REG)
5061 if (XEXP (x, 1) != const0_rtx)
5062 abort ();
5064 return gen_rtx_fmt_ee (reverse_condition (x_code),
5065 VOIDmode, XEXP (x, 0), const0_rtx);
5067 return gen_rtx_NOT (0, x);
5070 static rtx
5071 and_reg_cond (old, x, add)
5072 rtx old, x;
5073 int add;
5075 rtx op0, op1;
5077 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5079 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5080 && GET_CODE (x) == reverse_condition (GET_CODE (old))
5081 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5082 return const0_rtx;
5083 if (GET_CODE (x) == GET_CODE (old)
5084 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5085 return old;
5086 if (! add)
5087 return old;
5088 return gen_rtx_AND (0, old, x);
5091 switch (GET_CODE (old))
5093 case IOR:
5094 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5095 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5096 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5098 if (op0 == const0_rtx)
5099 return op1;
5100 if (op1 == const0_rtx)
5101 return op0;
5102 if (op0 == const1_rtx || op1 == const1_rtx)
5103 return const1_rtx;
5104 if (op0 == XEXP (old, 0))
5105 op0 = gen_rtx_AND (0, op0, x);
5106 else
5107 op1 = gen_rtx_AND (0, op1, x);
5108 return gen_rtx_IOR (0, op0, op1);
5110 if (! add)
5111 return old;
5112 return gen_rtx_AND (0, old, x);
5114 case AND:
5115 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5116 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5117 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5119 if (op0 == const1_rtx)
5120 return op1;
5121 if (op1 == const1_rtx)
5122 return op0;
5123 if (op0 == const0_rtx || op1 == const0_rtx)
5124 return const0_rtx;
5125 if (op0 == XEXP (old, 0))
5126 op0 = gen_rtx_AND (0, op0, x);
5127 else
5128 op1 = gen_rtx_AND (0, op1, x);
5129 return gen_rtx_AND (0, op0, op1);
5131 if (! add)
5132 return old;
5134 /* If X is identical to one of the existing terms of the AND,
5135 then just return what we already have. */
5136 /* ??? There really should be some sort of recursive check here in
5137 case there are nested ANDs. */
5138 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
5139 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
5140 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
5141 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
5142 return old;
5144 return gen_rtx_AND (0, old, x);
5146 case NOT:
5147 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5148 if (op0 != XEXP (old, 0))
5149 return not_reg_cond (op0);
5150 if (! add)
5151 return old;
5152 return gen_rtx_AND (0, old, x);
5154 default:
5155 abort ();
5159 /* Given a condition X, remove references to reg REGNO and return the
5160 new condition. The removal will be done so that all conditions
5161 involving REGNO are considered to evaluate to false. This function
5162 is used when the value of REGNO changes. */
5164 static rtx
5165 elim_reg_cond (x, regno)
5166 rtx x;
5167 unsigned int regno;
5169 rtx op0, op1;
5171 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
5173 if (REGNO (XEXP (x, 0)) == regno)
5174 return const0_rtx;
5175 return x;
5178 switch (GET_CODE (x))
5180 case AND:
5181 op0 = elim_reg_cond (XEXP (x, 0), regno);
5182 op1 = elim_reg_cond (XEXP (x, 1), regno);
5183 if (op0 == const0_rtx || op1 == const0_rtx)
5184 return const0_rtx;
5185 if (op0 == const1_rtx)
5186 return op1;
5187 if (op1 == const1_rtx)
5188 return op0;
5189 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5190 return x;
5191 return gen_rtx_AND (0, op0, op1);
5193 case IOR:
5194 op0 = elim_reg_cond (XEXP (x, 0), regno);
5195 op1 = elim_reg_cond (XEXP (x, 1), regno);
5196 if (op0 == const1_rtx || op1 == const1_rtx)
5197 return const1_rtx;
5198 if (op0 == const0_rtx)
5199 return op1;
5200 if (op1 == const0_rtx)
5201 return op0;
5202 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5203 return x;
5204 return gen_rtx_IOR (0, op0, op1);
5206 case NOT:
5207 op0 = elim_reg_cond (XEXP (x, 0), regno);
5208 if (op0 == const0_rtx)
5209 return const1_rtx;
5210 if (op0 == const1_rtx)
5211 return const0_rtx;
5212 if (op0 != XEXP (x, 0))
5213 return not_reg_cond (op0);
5214 return x;
5216 default:
5217 abort ();
5220 #endif /* HAVE_conditional_execution */
5222 #ifdef AUTO_INC_DEC
5224 /* Try to substitute the auto-inc expression INC as the address inside
5225 MEM which occurs in INSN. Currently, the address of MEM is an expression
5226 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5227 that has a single set whose source is a PLUS of INCR_REG and something
5228 else. */
5230 static void
5231 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
5232 struct propagate_block_info *pbi;
5233 rtx inc, insn, mem, incr, incr_reg;
5235 int regno = REGNO (incr_reg);
5236 rtx set = single_set (incr);
5237 rtx q = SET_DEST (set);
5238 rtx y = SET_SRC (set);
5239 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
5241 /* Make sure this reg appears only once in this insn. */
5242 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
5243 return;
5245 if (dead_or_set_p (incr, incr_reg)
5246 /* Mustn't autoinc an eliminable register. */
5247 && (regno >= FIRST_PSEUDO_REGISTER
5248 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
5250 /* This is the simple case. Try to make the auto-inc. If
5251 we can't, we are done. Otherwise, we will do any
5252 needed updates below. */
5253 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
5254 return;
5256 else if (GET_CODE (q) == REG
5257 /* PREV_INSN used here to check the semi-open interval
5258 [insn,incr). */
5259 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
5260 /* We must also check for sets of q as q may be
5261 a call clobbered hard register and there may
5262 be a call between PREV_INSN (insn) and incr. */
5263 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
5265 /* We have *p followed sometime later by q = p+size.
5266 Both p and q must be live afterward,
5267 and q is not used between INSN and its assignment.
5268 Change it to q = p, ...*q..., q = q+size.
5269 Then fall into the usual case. */
5270 rtx insns, temp;
5272 start_sequence ();
5273 emit_move_insn (q, incr_reg);
5274 insns = get_insns ();
5275 end_sequence ();
5277 if (basic_block_for_insn)
5278 for (temp = insns; temp; temp = NEXT_INSN (temp))
5279 set_block_for_insn (temp, pbi->bb);
5281 /* If we can't make the auto-inc, or can't make the
5282 replacement into Y, exit. There's no point in making
5283 the change below if we can't do the auto-inc and doing
5284 so is not correct in the pre-inc case. */
5286 XEXP (inc, 0) = q;
5287 validate_change (insn, &XEXP (mem, 0), inc, 1);
5288 validate_change (incr, &XEXP (y, opnum), q, 1);
5289 if (! apply_change_group ())
5290 return;
5292 /* We now know we'll be doing this change, so emit the
5293 new insn(s) and do the updates. */
5294 emit_insns_before (insns, insn);
5296 if (pbi->bb->head == insn)
5297 pbi->bb->head = insns;
5299 /* INCR will become a NOTE and INSN won't contain a
5300 use of INCR_REG. If a use of INCR_REG was just placed in
5301 the insn before INSN, make that the next use.
5302 Otherwise, invalidate it. */
5303 if (GET_CODE (PREV_INSN (insn)) == INSN
5304 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
5305 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
5306 pbi->reg_next_use[regno] = PREV_INSN (insn);
5307 else
5308 pbi->reg_next_use[regno] = 0;
5310 incr_reg = q;
5311 regno = REGNO (q);
5313 /* REGNO is now used in INCR which is below INSN, but
5314 it previously wasn't live here. If we don't mark
5315 it as live, we'll put a REG_DEAD note for it
5316 on this insn, which is incorrect. */
5317 SET_REGNO_REG_SET (pbi->reg_live, regno);
5319 /* If there are any calls between INSN and INCR, show
5320 that REGNO now crosses them. */
5321 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
5322 if (GET_CODE (temp) == CALL_INSN)
5323 REG_N_CALLS_CROSSED (regno)++;
5325 else
5326 return;
5328 /* If we haven't returned, it means we were able to make the
5329 auto-inc, so update the status. First, record that this insn
5330 has an implicit side effect. */
5332 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
5334 /* Modify the old increment-insn to simply copy
5335 the already-incremented value of our register. */
5336 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
5337 abort ();
5339 /* If that makes it a no-op (copying the register into itself) delete
5340 it so it won't appear to be a "use" and a "set" of this
5341 register. */
5342 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
5344 /* If the original source was dead, it's dead now. */
5345 rtx note;
5347 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
5349 remove_note (incr, note);
5350 if (XEXP (note, 0) != incr_reg)
5351 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
5354 PUT_CODE (incr, NOTE);
5355 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
5356 NOTE_SOURCE_FILE (incr) = 0;
5359 if (regno >= FIRST_PSEUDO_REGISTER)
5361 /* Count an extra reference to the reg. When a reg is
5362 incremented, spilling it is worse, so we want to make
5363 that less likely. */
5364 REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
5366 /* Count the increment as a setting of the register,
5367 even though it isn't a SET in rtl. */
5368 REG_N_SETS (regno)++;
5372 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5373 reference. */
5375 static void
5376 find_auto_inc (pbi, x, insn)
5377 struct propagate_block_info *pbi;
5378 rtx x;
5379 rtx insn;
5381 rtx addr = XEXP (x, 0);
5382 HOST_WIDE_INT offset = 0;
5383 rtx set, y, incr, inc_val;
5384 int regno;
5385 int size = GET_MODE_SIZE (GET_MODE (x));
5387 if (GET_CODE (insn) == JUMP_INSN)
5388 return;
5390 /* Here we detect use of an index register which might be good for
5391 postincrement, postdecrement, preincrement, or predecrement. */
5393 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5394 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5396 if (GET_CODE (addr) != REG)
5397 return;
5399 regno = REGNO (addr);
5401 /* Is the next use an increment that might make auto-increment? */
5402 incr = pbi->reg_next_use[regno];
5403 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5404 return;
5405 set = single_set (incr);
5406 if (set == 0 || GET_CODE (set) != SET)
5407 return;
5408 y = SET_SRC (set);
5410 if (GET_CODE (y) != PLUS)
5411 return;
5413 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5414 inc_val = XEXP (y, 1);
5415 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5416 inc_val = XEXP (y, 0);
5417 else
5418 return;
5420 if (GET_CODE (inc_val) == CONST_INT)
5422 if (HAVE_POST_INCREMENT
5423 && (INTVAL (inc_val) == size && offset == 0))
5424 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5425 incr, addr);
5426 else if (HAVE_POST_DECREMENT
5427 && (INTVAL (inc_val) == -size && offset == 0))
5428 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5429 incr, addr);
5430 else if (HAVE_PRE_INCREMENT
5431 && (INTVAL (inc_val) == size && offset == size))
5432 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5433 incr, addr);
5434 else if (HAVE_PRE_DECREMENT
5435 && (INTVAL (inc_val) == -size && offset == -size))
5436 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5437 incr, addr);
5438 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5439 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5440 gen_rtx_PLUS (Pmode,
5441 addr,
5442 inc_val)),
5443 insn, x, incr, addr);
5445 else if (GET_CODE (inc_val) == REG
5446 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5447 NEXT_INSN (incr)))
5450 if (HAVE_POST_MODIFY_REG && offset == 0)
5451 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5452 gen_rtx_PLUS (Pmode,
5453 addr,
5454 inc_val)),
5455 insn, x, incr, addr);
5459 #endif /* AUTO_INC_DEC */
5461 static void
5462 mark_used_reg (pbi, reg, cond, insn)
5463 struct propagate_block_info *pbi;
5464 rtx reg;
5465 rtx cond ATTRIBUTE_UNUSED;
5466 rtx insn;
5468 int regno = REGNO (reg);
5469 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
5470 int some_was_dead = ! some_was_live;
5471 int some_not_set;
5472 int n;
5474 /* A hard reg in a wide mode may really be multiple registers.
5475 If so, mark all of them just like the first. */
5476 if (regno < FIRST_PSEUDO_REGISTER)
5478 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5479 while (--n > 0)
5481 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5482 some_was_live |= needed_regno;
5483 some_was_dead |= ! needed_regno;
5487 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5489 /* Record where each reg is used, so when the reg is set we know
5490 the next insn that uses it. */
5491 pbi->reg_next_use[regno] = insn;
5494 if (pbi->flags & PROP_REG_INFO)
5496 if (regno < FIRST_PSEUDO_REGISTER)
5498 /* If this is a register we are going to try to eliminate,
5499 don't mark it live here. If we are successful in
5500 eliminating it, it need not be live unless it is used for
5501 pseudos, in which case it will have been set live when it
5502 was allocated to the pseudos. If the register will not
5503 be eliminated, reload will set it live at that point.
5505 Otherwise, record that this function uses this register. */
5506 /* ??? The PPC backend tries to "eliminate" on the pic
5507 register to itself. This should be fixed. In the mean
5508 time, hack around it. */
5510 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5511 && (regno == FRAME_POINTER_REGNUM
5512 || regno == ARG_POINTER_REGNUM)))
5514 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5516 regs_ever_live[regno + --n] = 1;
5517 while (n > 0);
5520 else
5522 /* Keep track of which basic block each reg appears in. */
5524 register int blocknum = pbi->bb->index;
5525 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5526 REG_BASIC_BLOCK (regno) = blocknum;
5527 else if (REG_BASIC_BLOCK (regno) != blocknum)
5528 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5530 /* Count (weighted) number of uses of each reg. */
5531 REG_N_REFS (regno) += (optimize_size ? 1
5532 : pbi->bb->loop_depth + 1);
5536 /* Find out if any of the register was set this insn. */
5537 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5538 if (regno < FIRST_PSEUDO_REGISTER)
5540 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5541 while (--n > 0)
5542 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5545 /* Record and count the insns in which a reg dies. If it is used in
5546 this insn and was dead below the insn then it dies in this insn.
5547 If it was set in this insn, we do not make a REG_DEAD note;
5548 likewise if we already made such a note. */
5549 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5550 && some_was_dead
5551 && some_not_set)
5553 /* Check for the case where the register dying partially
5554 overlaps the register set by this insn. */
5555 if (regno < FIRST_PSEUDO_REGISTER
5556 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5558 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5559 while (--n >= 0)
5560 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5563 /* If none of the words in X is needed, make a REG_DEAD note.
5564 Otherwise, we must make partial REG_DEAD notes. */
5565 if (! some_was_live)
5567 if ((pbi->flags & PROP_DEATH_NOTES)
5568 && ! find_regno_note (insn, REG_DEAD, regno))
5569 REG_NOTES (insn)
5570 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5572 if (pbi->flags & PROP_REG_INFO)
5573 REG_N_DEATHS (regno)++;
5575 else
5577 /* Don't make a REG_DEAD note for a part of a register
5578 that is set in the insn. */
5580 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5581 for (; n >= regno; n--)
5582 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5583 && ! dead_or_set_regno_p (insn, n))
5584 REG_NOTES (insn)
5585 = alloc_EXPR_LIST (REG_DEAD,
5586 gen_rtx_REG (reg_raw_mode[n], n),
5587 REG_NOTES (insn));
5591 SET_REGNO_REG_SET (pbi->reg_live, regno);
5592 if (regno < FIRST_PSEUDO_REGISTER)
5594 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5595 while (--n > 0)
5596 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5599 #ifdef HAVE_conditional_execution
5600 /* If this is a conditional use, record that fact. If it is later
5601 conditionally set, we'll know to kill the register. */
5602 if (cond != NULL_RTX)
5604 splay_tree_node node;
5605 struct reg_cond_life_info *rcli;
5606 rtx ncond;
5608 if (some_was_live)
5610 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5611 if (node == NULL)
5613 /* The register was unconditionally live previously.
5614 No need to do anything. */
5616 else
5618 /* The register was conditionally live previously.
5619 Subtract the new life cond from the old death cond. */
5620 rcli = (struct reg_cond_life_info *) node->value;
5621 ncond = rcli->condition;
5622 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
5624 /* If the register is now unconditionally live, remove the
5625 entry in the splay_tree. */
5626 if (ncond == const0_rtx)
5627 splay_tree_remove (pbi->reg_cond_dead, regno);
5628 else
5630 rcli->condition = ncond;
5631 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5635 else
5637 /* The register was not previously live at all. Record
5638 the condition under which it is still dead. */
5639 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5640 rcli->condition = not_reg_cond (cond);
5641 rcli->stores = const0_rtx;
5642 rcli->orig_condition = const0_rtx;
5643 splay_tree_insert (pbi->reg_cond_dead, regno,
5644 (splay_tree_value) rcli);
5646 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5649 else if (some_was_live)
5651 splay_tree_node node;
5653 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5654 if (node != NULL)
5656 /* The register was conditionally live previously, but is now
5657 unconditionally so. Remove it from the conditionally dead
5658 list, so that a conditional set won't cause us to think
5659 it dead. */
5660 splay_tree_remove (pbi->reg_cond_dead, regno);
5664 #endif
5667 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5668 This is done assuming the registers needed from X are those that
5669 have 1-bits in PBI->REG_LIVE.
5671 INSN is the containing instruction. If INSN is dead, this function
5672 is not called. */
5674 static void
5675 mark_used_regs (pbi, x, cond, insn)
5676 struct propagate_block_info *pbi;
5677 rtx x, cond, insn;
5679 register RTX_CODE code;
5680 register int regno;
5681 int flags = pbi->flags;
5683 retry:
5684 code = GET_CODE (x);
5685 switch (code)
5687 case LABEL_REF:
5688 case SYMBOL_REF:
5689 case CONST_INT:
5690 case CONST:
5691 case CONST_DOUBLE:
5692 case PC:
5693 case ADDR_VEC:
5694 case ADDR_DIFF_VEC:
5695 return;
5697 #ifdef HAVE_cc0
5698 case CC0:
5699 pbi->cc0_live = 1;
5700 return;
5701 #endif
5703 case CLOBBER:
5704 /* If we are clobbering a MEM, mark any registers inside the address
5705 as being used. */
5706 if (GET_CODE (XEXP (x, 0)) == MEM)
5707 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5708 return;
5710 case MEM:
5711 /* Don't bother watching stores to mems if this is not the
5712 final pass. We'll not be deleting dead stores this round. */
5713 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5715 /* Invalidate the data for the last MEM stored, but only if MEM is
5716 something that can be stored into. */
5717 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5718 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5719 /* Needn't clear the memory set list. */
5721 else
5723 rtx temp = pbi->mem_set_list;
5724 rtx prev = NULL_RTX;
5725 rtx next;
5727 while (temp)
5729 next = XEXP (temp, 1);
5730 if (anti_dependence (XEXP (temp, 0), x))
5732 /* Splice temp out of the list. */
5733 if (prev)
5734 XEXP (prev, 1) = next;
5735 else
5736 pbi->mem_set_list = next;
5737 free_EXPR_LIST_node (temp);
5738 pbi->mem_set_list_len--;
5740 else
5741 prev = temp;
5742 temp = next;
5746 /* If the memory reference had embedded side effects (autoincrement
5747 address modes. Then we may need to kill some entries on the
5748 memory set list. */
5749 if (insn)
5750 invalidate_mems_from_autoinc (pbi, insn);
5753 #ifdef AUTO_INC_DEC
5754 if (flags & PROP_AUTOINC)
5755 find_auto_inc (pbi, x, insn);
5756 #endif
5757 break;
5759 case SUBREG:
5760 #ifdef CLASS_CANNOT_CHANGE_MODE
5761 if (GET_CODE (SUBREG_REG (x)) == REG
5762 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5763 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5764 GET_MODE (SUBREG_REG (x))))
5765 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5766 #endif
5768 /* While we're here, optimize this case. */
5769 x = SUBREG_REG (x);
5770 if (GET_CODE (x) != REG)
5771 goto retry;
5772 /* Fall through. */
5774 case REG:
5775 /* See a register other than being set => mark it as needed. */
5776 mark_used_reg (pbi, x, cond, insn);
5777 return;
5779 case SET:
5781 register rtx testreg = SET_DEST (x);
5782 int mark_dest = 0;
5784 /* If storing into MEM, don't show it as being used. But do
5785 show the address as being used. */
5786 if (GET_CODE (testreg) == MEM)
5788 #ifdef AUTO_INC_DEC
5789 if (flags & PROP_AUTOINC)
5790 find_auto_inc (pbi, testreg, insn);
5791 #endif
5792 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5793 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5794 return;
5797 /* Storing in STRICT_LOW_PART is like storing in a reg
5798 in that this SET might be dead, so ignore it in TESTREG.
5799 but in some other ways it is like using the reg.
5801 Storing in a SUBREG or a bit field is like storing the entire
5802 register in that if the register's value is not used
5803 then this SET is not needed. */
5804 while (GET_CODE (testreg) == STRICT_LOW_PART
5805 || GET_CODE (testreg) == ZERO_EXTRACT
5806 || GET_CODE (testreg) == SIGN_EXTRACT
5807 || GET_CODE (testreg) == SUBREG)
5809 #ifdef CLASS_CANNOT_CHANGE_MODE
5810 if (GET_CODE (testreg) == SUBREG
5811 && GET_CODE (SUBREG_REG (testreg)) == REG
5812 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5813 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5814 GET_MODE (testreg)))
5815 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5816 #endif
5818 /* Modifying a single register in an alternate mode
5819 does not use any of the old value. But these other
5820 ways of storing in a register do use the old value. */
5821 if (GET_CODE (testreg) == SUBREG
5822 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5824 else
5825 mark_dest = 1;
5827 testreg = XEXP (testreg, 0);
5830 /* If this is a store into a register or group of registers,
5831 recursively scan the value being stored. */
5833 if ((GET_CODE (testreg) == PARALLEL
5834 && GET_MODE (testreg) == BLKmode)
5835 || (GET_CODE (testreg) == REG
5836 && (regno = REGNO (testreg),
5837 ! (regno == FRAME_POINTER_REGNUM
5838 && (! reload_completed || frame_pointer_needed)))
5839 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5840 && ! (regno == HARD_FRAME_POINTER_REGNUM
5841 && (! reload_completed || frame_pointer_needed))
5842 #endif
5843 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5844 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5845 #endif
5848 if (mark_dest)
5849 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5850 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5851 return;
5854 break;
5856 case ASM_OPERANDS:
5857 case UNSPEC_VOLATILE:
5858 case TRAP_IF:
5859 case ASM_INPUT:
5861 /* Traditional and volatile asm instructions must be considered to use
5862 and clobber all hard registers, all pseudo-registers and all of
5863 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5865 Consider for instance a volatile asm that changes the fpu rounding
5866 mode. An insn should not be moved across this even if it only uses
5867 pseudo-regs because it might give an incorrectly rounded result.
5869 ?!? Unfortunately, marking all hard registers as live causes massive
5870 problems for the register allocator and marking all pseudos as live
5871 creates mountains of uninitialized variable warnings.
5873 So for now, just clear the memory set list and mark any regs
5874 we can find in ASM_OPERANDS as used. */
5875 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5877 free_EXPR_LIST_list (&pbi->mem_set_list);
5878 pbi->mem_set_list_len = 0;
5881 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5882 We can not just fall through here since then we would be confused
5883 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5884 traditional asms unlike their normal usage. */
5885 if (code == ASM_OPERANDS)
5887 int j;
5889 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5890 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5892 break;
5895 case COND_EXEC:
5896 if (cond != NULL_RTX)
5897 abort ();
5899 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5901 cond = COND_EXEC_TEST (x);
5902 x = COND_EXEC_CODE (x);
5903 goto retry;
5905 case PHI:
5906 /* We _do_not_ want to scan operands of phi nodes. Operands of
5907 a phi function are evaluated only when control reaches this
5908 block along a particular edge. Therefore, regs that appear
5909 as arguments to phi should not be added to the global live at
5910 start. */
5911 return;
5913 default:
5914 break;
5917 /* Recursively scan the operands of this expression. */
5920 register const char *fmt = GET_RTX_FORMAT (code);
5921 register int i;
5923 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5925 if (fmt[i] == 'e')
5927 /* Tail recursive case: save a function call level. */
5928 if (i == 0)
5930 x = XEXP (x, 0);
5931 goto retry;
5933 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5935 else if (fmt[i] == 'E')
5937 register int j;
5938 for (j = 0; j < XVECLEN (x, i); j++)
5939 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5945 #ifdef AUTO_INC_DEC
5947 static int
5948 try_pre_increment_1 (pbi, insn)
5949 struct propagate_block_info *pbi;
5950 rtx insn;
5952 /* Find the next use of this reg. If in same basic block,
5953 make it do pre-increment or pre-decrement if appropriate. */
5954 rtx x = single_set (insn);
5955 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5956 * INTVAL (XEXP (SET_SRC (x), 1)));
5957 int regno = REGNO (SET_DEST (x));
5958 rtx y = pbi->reg_next_use[regno];
5959 if (y != 0
5960 && SET_DEST (x) != stack_pointer_rtx
5961 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5962 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5963 mode would be better. */
5964 && ! dead_or_set_p (y, SET_DEST (x))
5965 && try_pre_increment (y, SET_DEST (x), amount))
5967 /* We have found a suitable auto-increment and already changed
5968 insn Y to do it. So flush this increment instruction. */
5969 propagate_block_delete_insn (pbi->bb, insn);
5971 /* Count a reference to this reg for the increment insn we are
5972 deleting. When a reg is incremented, spilling it is worse,
5973 so we want to make that less likely. */
5974 if (regno >= FIRST_PSEUDO_REGISTER)
5976 REG_N_REFS (regno) += (optimize_size ? 1
5977 : pbi->bb->loop_depth + 1);
5978 REG_N_SETS (regno)++;
5981 /* Flush any remembered memories depending on the value of
5982 the incremented register. */
5983 invalidate_mems_from_set (pbi, SET_DEST (x));
5985 return 1;
5987 return 0;
5990 /* Try to change INSN so that it does pre-increment or pre-decrement
5991 addressing on register REG in order to add AMOUNT to REG.
5992 AMOUNT is negative for pre-decrement.
5993 Returns 1 if the change could be made.
5994 This checks all about the validity of the result of modifying INSN. */
5996 static int
5997 try_pre_increment (insn, reg, amount)
5998 rtx insn, reg;
5999 HOST_WIDE_INT amount;
6001 register rtx use;
6003 /* Nonzero if we can try to make a pre-increment or pre-decrement.
6004 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
6005 int pre_ok = 0;
6006 /* Nonzero if we can try to make a post-increment or post-decrement.
6007 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
6008 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
6009 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
6010 int post_ok = 0;
6012 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
6013 int do_post = 0;
6015 /* From the sign of increment, see which possibilities are conceivable
6016 on this target machine. */
6017 if (HAVE_PRE_INCREMENT && amount > 0)
6018 pre_ok = 1;
6019 if (HAVE_POST_INCREMENT && amount > 0)
6020 post_ok = 1;
6022 if (HAVE_PRE_DECREMENT && amount < 0)
6023 pre_ok = 1;
6024 if (HAVE_POST_DECREMENT && amount < 0)
6025 post_ok = 1;
6027 if (! (pre_ok || post_ok))
6028 return 0;
6030 /* It is not safe to add a side effect to a jump insn
6031 because if the incremented register is spilled and must be reloaded
6032 there would be no way to store the incremented value back in memory. */
6034 if (GET_CODE (insn) == JUMP_INSN)
6035 return 0;
6037 use = 0;
6038 if (pre_ok)
6039 use = find_use_as_address (PATTERN (insn), reg, 0);
6040 if (post_ok && (use == 0 || use == (rtx) 1))
6042 use = find_use_as_address (PATTERN (insn), reg, -amount);
6043 do_post = 1;
6046 if (use == 0 || use == (rtx) 1)
6047 return 0;
6049 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
6050 return 0;
6052 /* See if this combination of instruction and addressing mode exists. */
6053 if (! validate_change (insn, &XEXP (use, 0),
6054 gen_rtx_fmt_e (amount > 0
6055 ? (do_post ? POST_INC : PRE_INC)
6056 : (do_post ? POST_DEC : PRE_DEC),
6057 Pmode, reg), 0))
6058 return 0;
6060 /* Record that this insn now has an implicit side effect on X. */
6061 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
6062 return 1;
6065 #endif /* AUTO_INC_DEC */
6067 /* Find the place in the rtx X where REG is used as a memory address.
6068 Return the MEM rtx that so uses it.
6069 If PLUSCONST is nonzero, search instead for a memory address equivalent to
6070 (plus REG (const_int PLUSCONST)).
6072 If such an address does not appear, return 0.
6073 If REG appears more than once, or is used other than in such an address,
6074 return (rtx)1. */
6077 find_use_as_address (x, reg, plusconst)
6078 register rtx x;
6079 rtx reg;
6080 HOST_WIDE_INT plusconst;
6082 enum rtx_code code = GET_CODE (x);
6083 const char *fmt = GET_RTX_FORMAT (code);
6084 register int i;
6085 register rtx value = 0;
6086 register rtx tem;
6088 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
6089 return x;
6091 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
6092 && XEXP (XEXP (x, 0), 0) == reg
6093 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6094 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
6095 return x;
6097 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
6099 /* If REG occurs inside a MEM used in a bit-field reference,
6100 that is unacceptable. */
6101 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
6102 return (rtx) (HOST_WIDE_INT) 1;
6105 if (x == reg)
6106 return (rtx) (HOST_WIDE_INT) 1;
6108 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6110 if (fmt[i] == 'e')
6112 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
6113 if (value == 0)
6114 value = tem;
6115 else if (tem != 0)
6116 return (rtx) (HOST_WIDE_INT) 1;
6118 else if (fmt[i] == 'E')
6120 register int j;
6121 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6123 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
6124 if (value == 0)
6125 value = tem;
6126 else if (tem != 0)
6127 return (rtx) (HOST_WIDE_INT) 1;
6132 return value;
6135 /* Write information about registers and basic blocks into FILE.
6136 This is part of making a debugging dump. */
6138 void
6139 dump_regset (r, outf)
6140 regset r;
6141 FILE *outf;
6143 int i;
6144 if (r == NULL)
6146 fputs (" (nil)", outf);
6147 return;
6150 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
6152 fprintf (outf, " %d", i);
6153 if (i < FIRST_PSEUDO_REGISTER)
6154 fprintf (outf, " [%s]",
6155 reg_names[i]);
6159 void
6160 debug_regset (r)
6161 regset r;
6163 dump_regset (r, stderr);
6164 putc ('\n', stderr);
6167 void
6168 dump_flow_info (file)
6169 FILE *file;
6171 register int i;
6172 static const char * const reg_class_names[] = REG_CLASS_NAMES;
6174 fprintf (file, "%d registers.\n", max_regno);
6175 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
6176 if (REG_N_REFS (i))
6178 enum reg_class class, altclass;
6179 fprintf (file, "\nRegister %d used %d times across %d insns",
6180 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
6181 if (REG_BASIC_BLOCK (i) >= 0)
6182 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
6183 if (REG_N_SETS (i))
6184 fprintf (file, "; set %d time%s", REG_N_SETS (i),
6185 (REG_N_SETS (i) == 1) ? "" : "s");
6186 if (REG_USERVAR_P (regno_reg_rtx[i]))
6187 fprintf (file, "; user var");
6188 if (REG_N_DEATHS (i) != 1)
6189 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
6190 if (REG_N_CALLS_CROSSED (i) == 1)
6191 fprintf (file, "; crosses 1 call");
6192 else if (REG_N_CALLS_CROSSED (i))
6193 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
6194 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
6195 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
6196 class = reg_preferred_class (i);
6197 altclass = reg_alternate_class (i);
6198 if (class != GENERAL_REGS || altclass != ALL_REGS)
6200 if (altclass == ALL_REGS || class == ALL_REGS)
6201 fprintf (file, "; pref %s", reg_class_names[(int) class]);
6202 else if (altclass == NO_REGS)
6203 fprintf (file, "; %s or none", reg_class_names[(int) class]);
6204 else
6205 fprintf (file, "; pref %s, else %s",
6206 reg_class_names[(int) class],
6207 reg_class_names[(int) altclass]);
6209 if (REG_POINTER (regno_reg_rtx[i]))
6210 fprintf (file, "; pointer");
6211 fprintf (file, ".\n");
6214 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
6215 for (i = 0; i < n_basic_blocks; i++)
6217 register basic_block bb = BASIC_BLOCK (i);
6218 register edge e;
6220 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
6221 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
6223 fprintf (file, "Predecessors: ");
6224 for (e = bb->pred; e; e = e->pred_next)
6225 dump_edge_info (file, e, 0);
6227 fprintf (file, "\nSuccessors: ");
6228 for (e = bb->succ; e; e = e->succ_next)
6229 dump_edge_info (file, e, 1);
6231 fprintf (file, "\nRegisters live at start:");
6232 dump_regset (bb->global_live_at_start, file);
6234 fprintf (file, "\nRegisters live at end:");
6235 dump_regset (bb->global_live_at_end, file);
6237 putc ('\n', file);
6240 putc ('\n', file);
6243 void
6244 debug_flow_info ()
6246 dump_flow_info (stderr);
6249 static void
6250 dump_edge_info (file, e, do_succ)
6251 FILE *file;
6252 edge e;
6253 int do_succ;
6255 basic_block side = (do_succ ? e->dest : e->src);
6257 if (side == ENTRY_BLOCK_PTR)
6258 fputs (" ENTRY", file);
6259 else if (side == EXIT_BLOCK_PTR)
6260 fputs (" EXIT", file);
6261 else
6262 fprintf (file, " %d", side->index);
6264 if (e->count)
6265 fprintf (file, " count:%d", e->count);
6267 if (e->flags)
6269 static const char * const bitnames[] = {
6270 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6272 int comma = 0;
6273 int i, flags = e->flags;
6275 fputc (' ', file);
6276 fputc ('(', file);
6277 for (i = 0; flags; i++)
6278 if (flags & (1 << i))
6280 flags &= ~(1 << i);
6282 if (comma)
6283 fputc (',', file);
6284 if (i < (int) ARRAY_SIZE (bitnames))
6285 fputs (bitnames[i], file);
6286 else
6287 fprintf (file, "%d", i);
6288 comma = 1;
6290 fputc (')', file);
6294 /* Print out one basic block with live information at start and end. */
6296 void
6297 dump_bb (bb, outf)
6298 basic_block bb;
6299 FILE *outf;
6301 rtx insn;
6302 rtx last;
6303 edge e;
6305 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
6306 bb->index, bb->loop_depth, bb->count);
6307 putc ('\n', outf);
6309 fputs (";; Predecessors: ", outf);
6310 for (e = bb->pred; e; e = e->pred_next)
6311 dump_edge_info (outf, e, 0);
6312 putc ('\n', outf);
6314 fputs (";; Registers live at start:", outf);
6315 dump_regset (bb->global_live_at_start, outf);
6316 putc ('\n', outf);
6318 for (insn = bb->head, last = NEXT_INSN (bb->end);
6319 insn != last;
6320 insn = NEXT_INSN (insn))
6321 print_rtl_single (outf, insn);
6323 fputs (";; Registers live at end:", outf);
6324 dump_regset (bb->global_live_at_end, outf);
6325 putc ('\n', outf);
6327 fputs (";; Successors: ", outf);
6328 for (e = bb->succ; e; e = e->succ_next)
6329 dump_edge_info (outf, e, 1);
6330 putc ('\n', outf);
6333 void
6334 debug_bb (bb)
6335 basic_block bb;
6337 dump_bb (bb, stderr);
6340 void
6341 debug_bb_n (n)
6342 int n;
6344 dump_bb (BASIC_BLOCK (n), stderr);
6347 /* Like print_rtl, but also print out live information for the start of each
6348 basic block. */
6350 void
6351 print_rtl_with_bb (outf, rtx_first)
6352 FILE *outf;
6353 rtx rtx_first;
6355 register rtx tmp_rtx;
6357 if (rtx_first == 0)
6358 fprintf (outf, "(nil)\n");
6359 else
6361 int i;
6362 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
6363 int max_uid = get_max_uid ();
6364 basic_block *start = (basic_block *)
6365 xcalloc (max_uid, sizeof (basic_block));
6366 basic_block *end = (basic_block *)
6367 xcalloc (max_uid, sizeof (basic_block));
6368 enum bb_state *in_bb_p = (enum bb_state *)
6369 xcalloc (max_uid, sizeof (enum bb_state));
6371 for (i = n_basic_blocks - 1; i >= 0; i--)
6373 basic_block bb = BASIC_BLOCK (i);
6374 rtx x;
6376 start[INSN_UID (bb->head)] = bb;
6377 end[INSN_UID (bb->end)] = bb;
6378 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6380 enum bb_state state = IN_MULTIPLE_BB;
6381 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
6382 state = IN_ONE_BB;
6383 in_bb_p[INSN_UID (x)] = state;
6385 if (x == bb->end)
6386 break;
6390 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6392 int did_output;
6393 basic_block bb;
6395 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6397 fprintf (outf, ";; Start of basic block %d, registers live:",
6398 bb->index);
6399 dump_regset (bb->global_live_at_start, outf);
6400 putc ('\n', outf);
6403 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6404 && GET_CODE (tmp_rtx) != NOTE
6405 && GET_CODE (tmp_rtx) != BARRIER)
6406 fprintf (outf, ";; Insn is not within a basic block\n");
6407 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6408 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6410 did_output = print_rtl_single (outf, tmp_rtx);
6412 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6414 fprintf (outf, ";; End of basic block %d, registers live:\n",
6415 bb->index);
6416 dump_regset (bb->global_live_at_end, outf);
6417 putc ('\n', outf);
6420 if (did_output)
6421 putc ('\n', outf);
6424 free (start);
6425 free (end);
6426 free (in_bb_p);
6429 if (current_function_epilogue_delay_list != 0)
6431 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6432 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6433 tmp_rtx = XEXP (tmp_rtx, 1))
6434 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6438 /* Dump the rtl into the current debugging dump file, then abort. */
6439 static void
6440 print_rtl_and_abort ()
6442 if (rtl_dump_file)
6444 print_rtl_with_bb (rtl_dump_file, get_insns ());
6445 fclose (rtl_dump_file);
6447 abort ();
6450 /* Recompute register set/reference counts immediately prior to register
6451 allocation.
6453 This avoids problems with set/reference counts changing to/from values
6454 which have special meanings to the register allocators.
6456 Additionally, the reference counts are the primary component used by the
6457 register allocators to prioritize pseudos for allocation to hard regs.
6458 More accurate reference counts generally lead to better register allocation.
6460 F is the first insn to be scanned.
6462 LOOP_STEP denotes how much loop_depth should be incremented per
6463 loop nesting level in order to increase the ref count more for
6464 references in a loop.
6466 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6467 possibly other information which is used by the register allocators. */
6469 void
6470 recompute_reg_usage (f, loop_step)
6471 rtx f ATTRIBUTE_UNUSED;
6472 int loop_step ATTRIBUTE_UNUSED;
6474 allocate_reg_life_data ();
6475 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6478 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6479 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6480 of the number of registers that died. */
6483 count_or_remove_death_notes (blocks, kill)
6484 sbitmap blocks;
6485 int kill;
6487 int i, count = 0;
6489 for (i = n_basic_blocks - 1; i >= 0; --i)
6491 basic_block bb;
6492 rtx insn;
6494 if (blocks && ! TEST_BIT (blocks, i))
6495 continue;
6497 bb = BASIC_BLOCK (i);
6499 for (insn = bb->head;; insn = NEXT_INSN (insn))
6501 if (INSN_P (insn))
6503 rtx *pprev = &REG_NOTES (insn);
6504 rtx link = *pprev;
6506 while (link)
6508 switch (REG_NOTE_KIND (link))
6510 case REG_DEAD:
6511 if (GET_CODE (XEXP (link, 0)) == REG)
6513 rtx reg = XEXP (link, 0);
6514 int n;
6516 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6517 n = 1;
6518 else
6519 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6520 count += n;
6522 /* Fall through. */
6524 case REG_UNUSED:
6525 if (kill)
6527 rtx next = XEXP (link, 1);
6528 free_EXPR_LIST_node (link);
6529 *pprev = link = next;
6530 break;
6532 /* Fall through. */
6534 default:
6535 pprev = &XEXP (link, 1);
6536 link = *pprev;
6537 break;
6542 if (insn == bb->end)
6543 break;
6547 return count;
6551 /* Update insns block within BB. */
6553 void
6554 update_bb_for_insn (bb)
6555 basic_block bb;
6557 rtx insn;
6559 if (! basic_block_for_insn)
6560 return;
6562 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6564 set_block_for_insn (insn, bb);
6566 if (insn == bb->end)
6567 break;
6572 /* Record INSN's block as BB. */
6574 void
6575 set_block_for_insn (insn, bb)
6576 rtx insn;
6577 basic_block bb;
6579 size_t uid = INSN_UID (insn);
6580 if (uid >= basic_block_for_insn->num_elements)
6582 int new_size;
6584 /* Add one-eighth the size so we don't keep calling xrealloc. */
6585 new_size = uid + (uid + 7) / 8;
6587 VARRAY_GROW (basic_block_for_insn, new_size);
6589 VARRAY_BB (basic_block_for_insn, uid) = bb;
6592 /* Record INSN's block number as BB. */
6593 /* ??? This has got to go. */
6595 void
6596 set_block_num (insn, bb)
6597 rtx insn;
6598 int bb;
6600 set_block_for_insn (insn, BASIC_BLOCK (bb));
6603 /* Verify the CFG consistency. This function check some CFG invariants and
6604 aborts when something is wrong. Hope that this function will help to
6605 convert many optimization passes to preserve CFG consistent.
6607 Currently it does following checks:
6609 - test head/end pointers
6610 - overlapping of basic blocks
6611 - edge list corectness
6612 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6613 - tails of basic blocks (ensure that boundary is necesary)
6614 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6615 and NOTE_INSN_BASIC_BLOCK
6616 - check that all insns are in the basic blocks
6617 (except the switch handling code, barriers and notes)
6618 - check that all returns are followed by barriers
6620 In future it can be extended check a lot of other stuff as well
6621 (reachability of basic blocks, life information, etc. etc.). */
6623 void
6624 verify_flow_info ()
6626 const int max_uid = get_max_uid ();
6627 const rtx rtx_first = get_insns ();
6628 rtx last_head = get_last_insn ();
6629 basic_block *bb_info;
6630 rtx x;
6631 int i, last_bb_num_seen, num_bb_notes, err = 0;
6633 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6635 for (i = n_basic_blocks - 1; i >= 0; i--)
6637 basic_block bb = BASIC_BLOCK (i);
6638 rtx head = bb->head;
6639 rtx end = bb->end;
6641 /* Verify the end of the basic block is in the INSN chain. */
6642 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6643 if (x == end)
6644 break;
6645 if (!x)
6647 error ("End insn %d for block %d not found in the insn stream.",
6648 INSN_UID (end), bb->index);
6649 err = 1;
6652 /* Work backwards from the end to the head of the basic block
6653 to verify the head is in the RTL chain. */
6654 for (; x != NULL_RTX; x = PREV_INSN (x))
6656 /* While walking over the insn chain, verify insns appear
6657 in only one basic block and initialize the BB_INFO array
6658 used by other passes. */
6659 if (bb_info[INSN_UID (x)] != NULL)
6661 error ("Insn %d is in multiple basic blocks (%d and %d)",
6662 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6663 err = 1;
6665 bb_info[INSN_UID (x)] = bb;
6667 if (x == head)
6668 break;
6670 if (!x)
6672 error ("Head insn %d for block %d not found in the insn stream.",
6673 INSN_UID (head), bb->index);
6674 err = 1;
6677 last_head = x;
6680 /* Now check the basic blocks (boundaries etc.) */
6681 for (i = n_basic_blocks - 1; i >= 0; i--)
6683 basic_block bb = BASIC_BLOCK (i);
6684 /* Check corectness of edge lists */
6685 edge e;
6687 e = bb->succ;
6688 while (e)
6690 if (e->src != bb)
6692 fprintf (stderr,
6693 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6694 bb->index);
6695 fprintf (stderr, "Predecessor: ");
6696 dump_edge_info (stderr, e, 0);
6697 fprintf (stderr, "\nSuccessor: ");
6698 dump_edge_info (stderr, e, 1);
6699 fflush (stderr);
6700 err = 1;
6702 if (e->dest != EXIT_BLOCK_PTR)
6704 edge e2 = e->dest->pred;
6705 while (e2 && e2 != e)
6706 e2 = e2->pred_next;
6707 if (!e2)
6709 error ("Basic block %i edge lists are corrupted", bb->index);
6710 err = 1;
6713 e = e->succ_next;
6716 e = bb->pred;
6717 while (e)
6719 if (e->dest != bb)
6721 error ("Basic block %d pred edge is corrupted", bb->index);
6722 fputs ("Predecessor: ", stderr);
6723 dump_edge_info (stderr, e, 0);
6724 fputs ("\nSuccessor: ", stderr);
6725 dump_edge_info (stderr, e, 1);
6726 fputc ('\n', stderr);
6727 err = 1;
6729 if (e->src != ENTRY_BLOCK_PTR)
6731 edge e2 = e->src->succ;
6732 while (e2 && e2 != e)
6733 e2 = e2->succ_next;
6734 if (!e2)
6736 error ("Basic block %i edge lists are corrupted", bb->index);
6737 err = 1;
6740 e = e->pred_next;
6743 /* OK pointers are correct. Now check the header of basic
6744 block. It ought to contain optional CODE_LABEL followed
6745 by NOTE_BASIC_BLOCK. */
6746 x = bb->head;
6747 if (GET_CODE (x) == CODE_LABEL)
6749 if (bb->end == x)
6751 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6752 bb->index);
6753 err = 1;
6755 x = NEXT_INSN (x);
6757 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6759 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6760 bb->index);
6761 err = 1;
6764 if (bb->end == x)
6766 /* Do checks for empty blocks here */
6768 else
6770 x = NEXT_INSN (x);
6771 while (x)
6773 if (NOTE_INSN_BASIC_BLOCK_P (x))
6775 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6776 INSN_UID (x), bb->index);
6777 err = 1;
6780 if (x == bb->end)
6781 break;
6783 if (GET_CODE (x) == JUMP_INSN
6784 || GET_CODE (x) == CODE_LABEL
6785 || GET_CODE (x) == BARRIER)
6787 error ("In basic block %d:", bb->index);
6788 fatal_insn ("Flow control insn inside a basic block", x);
6791 x = NEXT_INSN (x);
6796 last_bb_num_seen = -1;
6797 num_bb_notes = 0;
6798 x = rtx_first;
6799 while (x)
6801 if (NOTE_INSN_BASIC_BLOCK_P (x))
6803 basic_block bb = NOTE_BASIC_BLOCK (x);
6804 num_bb_notes++;
6805 if (bb->index != last_bb_num_seen + 1)
6806 /* Basic blocks not numbered consecutively. */
6807 abort ();
6809 last_bb_num_seen = bb->index;
6812 if (!bb_info[INSN_UID (x)])
6814 switch (GET_CODE (x))
6816 case BARRIER:
6817 case NOTE:
6818 break;
6820 case CODE_LABEL:
6821 /* An addr_vec is placed outside any block block. */
6822 if (NEXT_INSN (x)
6823 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6824 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6825 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6827 x = NEXT_INSN (x);
6830 /* But in any case, non-deletable labels can appear anywhere. */
6831 break;
6833 default:
6834 fatal_insn ("Insn outside basic block", x);
6838 if (INSN_P (x)
6839 && GET_CODE (x) == JUMP_INSN
6840 && returnjump_p (x) && ! condjump_p (x)
6841 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6842 fatal_insn ("Return not followed by barrier", x);
6844 x = NEXT_INSN (x);
6847 if (num_bb_notes != n_basic_blocks)
6848 internal_error
6849 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6850 num_bb_notes, n_basic_blocks);
6852 if (err)
6853 abort ();
6855 /* Clean up. */
6856 free (bb_info);
6859 /* Functions to access an edge list with a vector representation.
6860 Enough data is kept such that given an index number, the
6861 pred and succ that edge represents can be determined, or
6862 given a pred and a succ, its index number can be returned.
6863 This allows algorithms which consume a lot of memory to
6864 represent the normally full matrix of edge (pred,succ) with a
6865 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6866 wasted space in the client code due to sparse flow graphs. */
6868 /* This functions initializes the edge list. Basically the entire
6869 flowgraph is processed, and all edges are assigned a number,
6870 and the data structure is filled in. */
6872 struct edge_list *
6873 create_edge_list ()
6875 struct edge_list *elist;
6876 edge e;
6877 int num_edges;
6878 int x;
6879 int block_count;
6881 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6883 num_edges = 0;
6885 /* Determine the number of edges in the flow graph by counting successor
6886 edges on each basic block. */
6887 for (x = 0; x < n_basic_blocks; x++)
6889 basic_block bb = BASIC_BLOCK (x);
6891 for (e = bb->succ; e; e = e->succ_next)
6892 num_edges++;
6894 /* Don't forget successors of the entry block. */
6895 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6896 num_edges++;
6898 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6899 elist->num_blocks = block_count;
6900 elist->num_edges = num_edges;
6901 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6903 num_edges = 0;
6905 /* Follow successors of the entry block, and register these edges. */
6906 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6908 elist->index_to_edge[num_edges] = e;
6909 num_edges++;
6912 for (x = 0; x < n_basic_blocks; x++)
6914 basic_block bb = BASIC_BLOCK (x);
6916 /* Follow all successors of blocks, and register these edges. */
6917 for (e = bb->succ; e; e = e->succ_next)
6919 elist->index_to_edge[num_edges] = e;
6920 num_edges++;
6923 return elist;
6926 /* This function free's memory associated with an edge list. */
6928 void
6929 free_edge_list (elist)
6930 struct edge_list *elist;
6932 if (elist)
6934 free (elist->index_to_edge);
6935 free (elist);
6939 /* This function provides debug output showing an edge list. */
6941 void
6942 print_edge_list (f, elist)
6943 FILE *f;
6944 struct edge_list *elist;
6946 int x;
6947 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6948 elist->num_blocks - 2, elist->num_edges);
6950 for (x = 0; x < elist->num_edges; x++)
6952 fprintf (f, " %-4d - edge(", x);
6953 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6954 fprintf (f, "entry,");
6955 else
6956 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6958 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6959 fprintf (f, "exit)\n");
6960 else
6961 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6965 /* This function provides an internal consistency check of an edge list,
6966 verifying that all edges are present, and that there are no
6967 extra edges. */
6969 void
6970 verify_edge_list (f, elist)
6971 FILE *f;
6972 struct edge_list *elist;
6974 int x, pred, succ, index;
6975 edge e;
6977 for (x = 0; x < n_basic_blocks; x++)
6979 basic_block bb = BASIC_BLOCK (x);
6981 for (e = bb->succ; e; e = e->succ_next)
6983 pred = e->src->index;
6984 succ = e->dest->index;
6985 index = EDGE_INDEX (elist, e->src, e->dest);
6986 if (index == EDGE_INDEX_NO_EDGE)
6988 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
6989 continue;
6991 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6992 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6993 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6994 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6995 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6996 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6999 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7001 pred = e->src->index;
7002 succ = e->dest->index;
7003 index = EDGE_INDEX (elist, e->src, e->dest);
7004 if (index == EDGE_INDEX_NO_EDGE)
7006 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7007 continue;
7009 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7010 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7011 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7012 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7013 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7014 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7016 /* We've verified that all the edges are in the list, no lets make sure
7017 there are no spurious edges in the list. */
7019 for (pred = 0; pred < n_basic_blocks; pred++)
7020 for (succ = 0; succ < n_basic_blocks; succ++)
7022 basic_block p = BASIC_BLOCK (pred);
7023 basic_block s = BASIC_BLOCK (succ);
7025 int found_edge = 0;
7027 for (e = p->succ; e; e = e->succ_next)
7028 if (e->dest == s)
7030 found_edge = 1;
7031 break;
7033 for (e = s->pred; e; e = e->pred_next)
7034 if (e->src == p)
7036 found_edge = 1;
7037 break;
7039 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7040 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7041 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
7042 pred, succ);
7043 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7044 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7045 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
7046 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7047 BASIC_BLOCK (succ)));
7049 for (succ = 0; succ < n_basic_blocks; succ++)
7051 basic_block p = ENTRY_BLOCK_PTR;
7052 basic_block s = BASIC_BLOCK (succ);
7054 int found_edge = 0;
7056 for (e = p->succ; e; e = e->succ_next)
7057 if (e->dest == s)
7059 found_edge = 1;
7060 break;
7062 for (e = s->pred; e; e = e->pred_next)
7063 if (e->src == p)
7065 found_edge = 1;
7066 break;
7068 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7069 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7070 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
7071 succ);
7072 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7073 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7074 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
7075 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
7076 BASIC_BLOCK (succ)));
7078 for (pred = 0; pred < n_basic_blocks; pred++)
7080 basic_block p = BASIC_BLOCK (pred);
7081 basic_block s = EXIT_BLOCK_PTR;
7083 int found_edge = 0;
7085 for (e = p->succ; e; e = e->succ_next)
7086 if (e->dest == s)
7088 found_edge = 1;
7089 break;
7091 for (e = s->pred; e; e = e->pred_next)
7092 if (e->src == p)
7094 found_edge = 1;
7095 break;
7097 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7098 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7099 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
7100 pred);
7101 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7102 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7103 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
7104 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7105 EXIT_BLOCK_PTR));
7109 /* This routine will determine what, if any, edge there is between
7110 a specified predecessor and successor. */
7113 find_edge_index (edge_list, pred, succ)
7114 struct edge_list *edge_list;
7115 basic_block pred, succ;
7117 int x;
7118 for (x = 0; x < NUM_EDGES (edge_list); x++)
7120 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
7121 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
7122 return x;
7124 return (EDGE_INDEX_NO_EDGE);
7127 /* This function will remove an edge from the flow graph. */
7129 void
7130 remove_edge (e)
7131 edge e;
7133 edge last_pred = NULL;
7134 edge last_succ = NULL;
7135 edge tmp;
7136 basic_block src, dest;
7137 src = e->src;
7138 dest = e->dest;
7139 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
7140 last_succ = tmp;
7142 if (!tmp)
7143 abort ();
7144 if (last_succ)
7145 last_succ->succ_next = e->succ_next;
7146 else
7147 src->succ = e->succ_next;
7149 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
7150 last_pred = tmp;
7152 if (!tmp)
7153 abort ();
7154 if (last_pred)
7155 last_pred->pred_next = e->pred_next;
7156 else
7157 dest->pred = e->pred_next;
7159 n_edges--;
7160 free (e);
7163 /* This routine will remove any fake successor edges for a basic block.
7164 When the edge is removed, it is also removed from whatever predecessor
7165 list it is in. */
7167 static void
7168 remove_fake_successors (bb)
7169 basic_block bb;
7171 edge e;
7172 for (e = bb->succ; e;)
7174 edge tmp = e;
7175 e = e->succ_next;
7176 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
7177 remove_edge (tmp);
7181 /* This routine will remove all fake edges from the flow graph. If
7182 we remove all fake successors, it will automatically remove all
7183 fake predecessors. */
7185 void
7186 remove_fake_edges ()
7188 int x;
7190 for (x = 0; x < n_basic_blocks; x++)
7191 remove_fake_successors (BASIC_BLOCK (x));
7193 /* We've handled all successors except the entry block's. */
7194 remove_fake_successors (ENTRY_BLOCK_PTR);
7197 /* This function will add a fake edge between any block which has no
7198 successors, and the exit block. Some data flow equations require these
7199 edges to exist. */
7201 void
7202 add_noreturn_fake_exit_edges ()
7204 int x;
7206 for (x = 0; x < n_basic_blocks; x++)
7207 if (BASIC_BLOCK (x)->succ == NULL)
7208 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7211 /* This function adds a fake edge between any infinite loops to the
7212 exit block. Some optimizations require a path from each node to
7213 the exit node.
7215 See also Morgan, Figure 3.10, pp. 82-83.
7217 The current implementation is ugly, not attempting to minimize the
7218 number of inserted fake edges. To reduce the number of fake edges
7219 to insert, add fake edges from _innermost_ loops containing only
7220 nodes not reachable from the exit block. */
7222 void
7223 connect_infinite_loops_to_exit ()
7225 basic_block unvisited_block;
7227 /* Perform depth-first search in the reverse graph to find nodes
7228 reachable from the exit block. */
7229 struct depth_first_search_dsS dfs_ds;
7231 flow_dfs_compute_reverse_init (&dfs_ds);
7232 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7234 /* Repeatedly add fake edges, updating the unreachable nodes. */
7235 while (1)
7237 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7238 if (!unvisited_block)
7239 break;
7240 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7241 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7244 flow_dfs_compute_reverse_finish (&dfs_ds);
7246 return;
7249 /* Redirect an edge's successor from one block to another. */
7251 void
7252 redirect_edge_succ (e, new_succ)
7253 edge e;
7254 basic_block new_succ;
7256 edge *pe;
7258 /* Disconnect the edge from the old successor block. */
7259 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7260 continue;
7261 *pe = (*pe)->pred_next;
7263 /* Reconnect the edge to the new successor block. */
7264 e->pred_next = new_succ->pred;
7265 new_succ->pred = e;
7266 e->dest = new_succ;
7269 /* Redirect an edge's predecessor from one block to another. */
7271 void
7272 redirect_edge_pred (e, new_pred)
7273 edge e;
7274 basic_block new_pred;
7276 edge *pe;
7278 /* Disconnect the edge from the old predecessor block. */
7279 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7280 continue;
7281 *pe = (*pe)->succ_next;
7283 /* Reconnect the edge to the new predecessor block. */
7284 e->succ_next = new_pred->succ;
7285 new_pred->succ = e;
7286 e->src = new_pred;
7289 /* Dump the list of basic blocks in the bitmap NODES. */
7291 static void
7292 flow_nodes_print (str, nodes, file)
7293 const char *str;
7294 const sbitmap nodes;
7295 FILE *file;
7297 int node;
7299 if (! nodes)
7300 return;
7302 fprintf (file, "%s { ", str);
7303 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7304 fputs ("}\n", file);
7308 /* Dump the list of edges in the array EDGE_LIST. */
7310 static void
7311 flow_edge_list_print (str, edge_list, num_edges, file)
7312 const char *str;
7313 const edge *edge_list;
7314 int num_edges;
7315 FILE *file;
7317 int i;
7319 if (! edge_list)
7320 return;
7322 fprintf (file, "%s { ", str);
7323 for (i = 0; i < num_edges; i++)
7324 fprintf (file, "%d->%d ", edge_list[i]->src->index,
7325 edge_list[i]->dest->index);
7326 fputs ("}\n", file);
7330 /* Dump loop related CFG information. */
7332 static void
7333 flow_loops_cfg_dump (loops, file)
7334 const struct loops *loops;
7335 FILE *file;
7337 int i;
7339 if (! loops->num || ! file || ! loops->cfg.dom)
7340 return;
7342 for (i = 0; i < n_basic_blocks; i++)
7344 edge succ;
7346 fprintf (file, ";; %d succs { ", i);
7347 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7348 fprintf (file, "%d ", succ->dest->index);
7349 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7352 /* Dump the DFS node order. */
7353 if (loops->cfg.dfs_order)
7355 fputs (";; DFS order: ", file);
7356 for (i = 0; i < n_basic_blocks; i++)
7357 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7358 fputs ("\n", file);
7360 /* Dump the reverse completion node order. */
7361 if (loops->cfg.rc_order)
7363 fputs (";; RC order: ", file);
7364 for (i = 0; i < n_basic_blocks; i++)
7365 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7366 fputs ("\n", file);
7370 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7372 static int
7373 flow_loop_nested_p (outer, loop)
7374 struct loop *outer;
7375 struct loop *loop;
7377 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7381 /* Dump the loop information specified by LOOP to the stream FILE
7382 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7383 void
7384 flow_loop_dump (loop, file, loop_dump_aux, verbose)
7385 const struct loop *loop;
7386 FILE *file;
7387 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7388 int verbose;
7390 if (! loop || ! loop->header)
7391 return;
7393 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
7394 loop->num, INSN_UID (loop->first->head),
7395 INSN_UID (loop->last->end),
7396 loop->shared ? " shared" : "",
7397 loop->invalid ? " invalid" : "");
7398 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7399 loop->header->index, loop->latch->index,
7400 loop->pre_header ? loop->pre_header->index : -1,
7401 loop->first->index, loop->last->index);
7402 fprintf (file, ";; depth %d, level %d, outer %ld\n",
7403 loop->depth, loop->level,
7404 (long) (loop->outer ? loop->outer->num : -1));
7406 if (loop->pre_header_edges)
7407 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
7408 loop->num_pre_header_edges, file);
7409 flow_edge_list_print (";; entry edges", loop->entry_edges,
7410 loop->num_entries, file);
7411 fprintf (file, ";; %d", loop->num_nodes);
7412 flow_nodes_print (" nodes", loop->nodes, file);
7413 flow_edge_list_print (";; exit edges", loop->exit_edges,
7414 loop->num_exits, file);
7415 if (loop->exits_doms)
7416 flow_nodes_print (";; exit doms", loop->exits_doms, file);
7417 if (loop_dump_aux)
7418 loop_dump_aux (loop, file, verbose);
7422 /* Dump the loop information specified by LOOPS to the stream FILE,
7423 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7424 void
7425 flow_loops_dump (loops, file, loop_dump_aux, verbose)
7426 const struct loops *loops;
7427 FILE *file;
7428 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7429 int verbose;
7431 int i;
7432 int num_loops;
7434 num_loops = loops->num;
7435 if (! num_loops || ! file)
7436 return;
7438 fprintf (file, ";; %d loops found, %d levels\n",
7439 num_loops, loops->levels);
7441 for (i = 0; i < num_loops; i++)
7443 struct loop *loop = &loops->array[i];
7445 flow_loop_dump (loop, file, loop_dump_aux, verbose);
7447 if (loop->shared)
7449 int j;
7451 for (j = 0; j < i; j++)
7453 struct loop *oloop = &loops->array[j];
7455 if (loop->header == oloop->header)
7457 int disjoint;
7458 int smaller;
7460 smaller = loop->num_nodes < oloop->num_nodes;
7462 /* If the union of LOOP and OLOOP is different than
7463 the larger of LOOP and OLOOP then LOOP and OLOOP
7464 must be disjoint. */
7465 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7466 smaller ? oloop : loop);
7467 fprintf (file,
7468 ";; loop header %d shared by loops %d, %d %s\n",
7469 loop->header->index, i, j,
7470 disjoint ? "disjoint" : "nested");
7476 if (verbose)
7477 flow_loops_cfg_dump (loops, file);
7481 /* Free all the memory allocated for LOOPS. */
7483 void
7484 flow_loops_free (loops)
7485 struct loops *loops;
7487 if (loops->array)
7489 int i;
7491 if (! loops->num)
7492 abort ();
7494 /* Free the loop descriptors. */
7495 for (i = 0; i < loops->num; i++)
7497 struct loop *loop = &loops->array[i];
7499 if (loop->pre_header_edges)
7500 free (loop->pre_header_edges);
7501 if (loop->nodes)
7502 sbitmap_free (loop->nodes);
7503 if (loop->entry_edges)
7504 free (loop->entry_edges);
7505 if (loop->exit_edges)
7506 free (loop->exit_edges);
7507 if (loop->exits_doms)
7508 sbitmap_free (loop->exits_doms);
7510 free (loops->array);
7511 loops->array = NULL;
7513 if (loops->cfg.dom)
7514 sbitmap_vector_free (loops->cfg.dom);
7515 if (loops->cfg.dfs_order)
7516 free (loops->cfg.dfs_order);
7518 if (loops->shared_headers)
7519 sbitmap_free (loops->shared_headers);
7524 /* Find the entry edges into the loop with header HEADER and nodes
7525 NODES and store in ENTRY_EDGES array. Return the number of entry
7526 edges from the loop. */
7528 static int
7529 flow_loop_entry_edges_find (header, nodes, entry_edges)
7530 basic_block header;
7531 const sbitmap nodes;
7532 edge **entry_edges;
7534 edge e;
7535 int num_entries;
7537 *entry_edges = NULL;
7539 num_entries = 0;
7540 for (e = header->pred; e; e = e->pred_next)
7542 basic_block src = e->src;
7544 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7545 num_entries++;
7548 if (! num_entries)
7549 abort ();
7551 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
7553 num_entries = 0;
7554 for (e = header->pred; e; e = e->pred_next)
7556 basic_block src = e->src;
7558 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7559 (*entry_edges)[num_entries++] = e;
7562 return num_entries;
7566 /* Find the exit edges from the loop using the bitmap of loop nodes
7567 NODES and store in EXIT_EDGES array. Return the number of
7568 exit edges from the loop. */
7570 static int
7571 flow_loop_exit_edges_find (nodes, exit_edges)
7572 const sbitmap nodes;
7573 edge **exit_edges;
7575 edge e;
7576 int node;
7577 int num_exits;
7579 *exit_edges = NULL;
7581 /* Check all nodes within the loop to see if there are any
7582 successors not in the loop. Note that a node may have multiple
7583 exiting edges ????? A node can have one jumping edge and one fallthru
7584 edge so only one of these can exit the loop. */
7585 num_exits = 0;
7586 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7587 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7589 basic_block dest = e->dest;
7591 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7592 num_exits++;
7596 if (! num_exits)
7597 return 0;
7599 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
7601 /* Store all exiting edges into an array. */
7602 num_exits = 0;
7603 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7604 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7606 basic_block dest = e->dest;
7608 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7609 (*exit_edges)[num_exits++] = e;
7613 return num_exits;
7617 /* Find the nodes contained within the loop with header HEADER and
7618 latch LATCH and store in NODES. Return the number of nodes within
7619 the loop. */
7621 static int
7622 flow_loop_nodes_find (header, latch, nodes)
7623 basic_block header;
7624 basic_block latch;
7625 sbitmap nodes;
7627 basic_block *stack;
7628 int sp;
7629 int num_nodes = 0;
7631 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7632 sp = 0;
7634 /* Start with only the loop header in the set of loop nodes. */
7635 sbitmap_zero (nodes);
7636 SET_BIT (nodes, header->index);
7637 num_nodes++;
7638 header->loop_depth++;
7640 /* Push the loop latch on to the stack. */
7641 if (! TEST_BIT (nodes, latch->index))
7643 SET_BIT (nodes, latch->index);
7644 latch->loop_depth++;
7645 num_nodes++;
7646 stack[sp++] = latch;
7649 while (sp)
7651 basic_block node;
7652 edge e;
7654 node = stack[--sp];
7655 for (e = node->pred; e; e = e->pred_next)
7657 basic_block ancestor = e->src;
7659 /* If each ancestor not marked as part of loop, add to set of
7660 loop nodes and push on to stack. */
7661 if (ancestor != ENTRY_BLOCK_PTR
7662 && ! TEST_BIT (nodes, ancestor->index))
7664 SET_BIT (nodes, ancestor->index);
7665 ancestor->loop_depth++;
7666 num_nodes++;
7667 stack[sp++] = ancestor;
7671 free (stack);
7672 return num_nodes;
7675 /* Compute the depth first search order and store in the array
7676 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7677 RC_ORDER is non-zero, return the reverse completion number for each
7678 node. Returns the number of nodes visited. A depth first search
7679 tries to get as far away from the starting point as quickly as
7680 possible. */
7682 static int
7683 flow_depth_first_order_compute (dfs_order, rc_order)
7684 int *dfs_order;
7685 int *rc_order;
7687 edge *stack;
7688 int sp;
7689 int dfsnum = 0;
7690 int rcnum = n_basic_blocks - 1;
7691 sbitmap visited;
7693 /* Allocate stack for back-tracking up CFG. */
7694 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7695 sp = 0;
7697 /* Allocate bitmap to track nodes that have been visited. */
7698 visited = sbitmap_alloc (n_basic_blocks);
7700 /* None of the nodes in the CFG have been visited yet. */
7701 sbitmap_zero (visited);
7703 /* Push the first edge on to the stack. */
7704 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7706 while (sp)
7708 edge e;
7709 basic_block src;
7710 basic_block dest;
7712 /* Look at the edge on the top of the stack. */
7713 e = stack[sp - 1];
7714 src = e->src;
7715 dest = e->dest;
7717 /* Check if the edge destination has been visited yet. */
7718 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7720 /* Mark that we have visited the destination. */
7721 SET_BIT (visited, dest->index);
7723 if (dfs_order)
7724 dfs_order[dfsnum++] = dest->index;
7726 if (dest->succ)
7728 /* Since the DEST node has been visited for the first
7729 time, check its successors. */
7730 stack[sp++] = dest->succ;
7732 else
7734 /* There are no successors for the DEST node so assign
7735 its reverse completion number. */
7736 if (rc_order)
7737 rc_order[rcnum--] = dest->index;
7740 else
7742 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7744 /* There are no more successors for the SRC node
7745 so assign its reverse completion number. */
7746 if (rc_order)
7747 rc_order[rcnum--] = src->index;
7750 if (e->succ_next)
7751 stack[sp - 1] = e->succ_next;
7752 else
7753 sp--;
7757 free (stack);
7758 sbitmap_free (visited);
7760 /* The number of nodes visited should not be greater than
7761 n_basic_blocks. */
7762 if (dfsnum > n_basic_blocks)
7763 abort ();
7765 /* There are some nodes left in the CFG that are unreachable. */
7766 if (dfsnum < n_basic_blocks)
7767 abort ();
7768 return dfsnum;
7771 /* Compute the depth first search order on the _reverse_ graph and
7772 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7773 Returns the number of nodes visited.
7775 The computation is split into three pieces:
7777 flow_dfs_compute_reverse_init () creates the necessary data
7778 structures.
7780 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7781 structures. The block will start the search.
7783 flow_dfs_compute_reverse_execute () continues (or starts) the
7784 search using the block on the top of the stack, stopping when the
7785 stack is empty.
7787 flow_dfs_compute_reverse_finish () destroys the necessary data
7788 structures.
7790 Thus, the user will probably call ..._init(), call ..._add_bb() to
7791 add a beginning basic block to the stack, call ..._execute(),
7792 possibly add another bb to the stack and again call ..._execute(),
7793 ..., and finally call _finish(). */
7795 /* Initialize the data structures used for depth-first search on the
7796 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7797 added to the basic block stack. DATA is the current depth-first
7798 search context. If INITIALIZE_STACK is non-zero, there is an
7799 element on the stack. */
7801 static void
7802 flow_dfs_compute_reverse_init (data)
7803 depth_first_search_ds data;
7805 /* Allocate stack for back-tracking up CFG. */
7806 data->stack =
7807 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
7808 * sizeof (basic_block));
7809 data->sp = 0;
7811 /* Allocate bitmap to track nodes that have been visited. */
7812 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
7814 /* None of the nodes in the CFG have been visited yet. */
7815 sbitmap_zero (data->visited_blocks);
7817 return;
7820 /* Add the specified basic block to the top of the dfs data
7821 structures. When the search continues, it will start at the
7822 block. */
7824 static void
7825 flow_dfs_compute_reverse_add_bb (data, bb)
7826 depth_first_search_ds data;
7827 basic_block bb;
7829 data->stack[data->sp++] = bb;
7830 return;
7833 /* Continue the depth-first search through the reverse graph starting
7834 with the block at the stack's top and ending when the stack is
7835 empty. Visited nodes are marked. Returns an unvisited basic
7836 block, or NULL if there is none available. */
7838 static basic_block
7839 flow_dfs_compute_reverse_execute (data)
7840 depth_first_search_ds data;
7842 basic_block bb;
7843 edge e;
7844 int i;
7846 while (data->sp > 0)
7848 bb = data->stack[--data->sp];
7850 /* Mark that we have visited this node. */
7851 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
7853 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
7855 /* Perform depth-first search on adjacent vertices. */
7856 for (e = bb->pred; e; e = e->pred_next)
7857 flow_dfs_compute_reverse_add_bb (data, e->src);
7861 /* Determine if there are unvisited basic blocks. */
7862 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
7863 if (!TEST_BIT (data->visited_blocks, i))
7864 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
7865 return NULL;
7868 /* Destroy the data structures needed for depth-first search on the
7869 reverse graph. */
7871 static void
7872 flow_dfs_compute_reverse_finish (data)
7873 depth_first_search_ds data;
7875 free (data->stack);
7876 sbitmap_free (data->visited_blocks);
7877 return;
7881 /* Find the root node of the loop pre-header extended basic block and
7882 the edges along the trace from the root node to the loop header. */
7884 static void
7885 flow_loop_pre_header_scan (loop)
7886 struct loop *loop;
7888 int num = 0;
7889 basic_block ebb;
7891 loop->num_pre_header_edges = 0;
7893 if (loop->num_entries != 1)
7894 return;
7896 ebb = loop->entry_edges[0]->src;
7898 if (ebb != ENTRY_BLOCK_PTR)
7900 edge e;
7902 /* Count number of edges along trace from loop header to
7903 root of pre-header extended basic block. Usually this is
7904 only one or two edges. */
7905 num++;
7906 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
7908 ebb = ebb->pred->src;
7909 num++;
7912 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
7913 loop->num_pre_header_edges = num;
7915 /* Store edges in order that they are followed. The source
7916 of the first edge is the root node of the pre-header extended
7917 basic block and the destination of the last last edge is
7918 the loop header. */
7919 for (e = loop->entry_edges[0]; num; e = e->src->pred)
7921 loop->pre_header_edges[--num] = e;
7927 /* Return the block for the pre-header of the loop with header
7928 HEADER where DOM specifies the dominator information. Return NULL if
7929 there is no pre-header. */
7931 static basic_block
7932 flow_loop_pre_header_find (header, dom)
7933 basic_block header;
7934 const sbitmap *dom;
7936 basic_block pre_header;
7937 edge e;
7939 /* If block p is a predecessor of the header and is the only block
7940 that the header does not dominate, then it is the pre-header. */
7941 pre_header = NULL;
7942 for (e = header->pred; e; e = e->pred_next)
7944 basic_block node = e->src;
7946 if (node != ENTRY_BLOCK_PTR
7947 && ! TEST_BIT (dom[node->index], header->index))
7949 if (pre_header == NULL)
7950 pre_header = node;
7951 else
7953 /* There are multiple edges into the header from outside
7954 the loop so there is no pre-header block. */
7955 pre_header = NULL;
7956 break;
7960 return pre_header;
7963 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7964 previously added. The insertion algorithm assumes that the loops
7965 are added in the order found by a depth first search of the CFG. */
7967 static void
7968 flow_loop_tree_node_add (prevloop, loop)
7969 struct loop *prevloop;
7970 struct loop *loop;
7973 if (flow_loop_nested_p (prevloop, loop))
7975 prevloop->inner = loop;
7976 loop->outer = prevloop;
7977 return;
7980 while (prevloop->outer)
7982 if (flow_loop_nested_p (prevloop->outer, loop))
7984 prevloop->next = loop;
7985 loop->outer = prevloop->outer;
7986 return;
7988 prevloop = prevloop->outer;
7991 prevloop->next = loop;
7992 loop->outer = NULL;
7995 /* Build the loop hierarchy tree for LOOPS. */
7997 static void
7998 flow_loops_tree_build (loops)
7999 struct loops *loops;
8001 int i;
8002 int num_loops;
8004 num_loops = loops->num;
8005 if (! num_loops)
8006 return;
8008 /* Root the loop hierarchy tree with the first loop found.
8009 Since we used a depth first search this should be the
8010 outermost loop. */
8011 loops->tree = &loops->array[0];
8012 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
8014 /* Add the remaining loops to the tree. */
8015 for (i = 1; i < num_loops; i++)
8016 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
8019 /* Helper function to compute loop nesting depth and enclosed loop level
8020 for the natural loop specified by LOOP at the loop depth DEPTH.
8021 Returns the loop level. */
8023 static int
8024 flow_loop_level_compute (loop, depth)
8025 struct loop *loop;
8026 int depth;
8028 struct loop *inner;
8029 int level = 1;
8031 if (! loop)
8032 return 0;
8034 /* Traverse loop tree assigning depth and computing level as the
8035 maximum level of all the inner loops of this loop. The loop
8036 level is equivalent to the height of the loop in the loop tree
8037 and corresponds to the number of enclosed loop levels (including
8038 itself). */
8039 for (inner = loop->inner; inner; inner = inner->next)
8041 int ilevel;
8043 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
8045 if (ilevel > level)
8046 level = ilevel;
8048 loop->level = level;
8049 loop->depth = depth;
8050 return level;
8053 /* Compute the loop nesting depth and enclosed loop level for the loop
8054 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
8055 level. */
8057 static int
8058 flow_loops_level_compute (loops)
8059 struct loops *loops;
8061 struct loop *loop;
8062 int level;
8063 int levels = 0;
8065 /* Traverse all the outer level loops. */
8066 for (loop = loops->tree; loop; loop = loop->next)
8068 level = flow_loop_level_compute (loop, 1);
8069 if (level > levels)
8070 levels = level;
8072 return levels;
8076 /* Scan a single natural loop specified by LOOP collecting information
8077 about it specified by FLAGS. */
8080 flow_loop_scan (loops, loop, flags)
8081 struct loops *loops;
8082 struct loop *loop;
8083 int flags;
8085 /* Determine prerequisites. */
8086 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
8087 flags |= LOOP_EXIT_EDGES;
8089 if (flags & LOOP_ENTRY_EDGES)
8091 /* Find edges which enter the loop header.
8092 Note that the entry edges should only
8093 enter the header of a natural loop. */
8094 loop->num_entries
8095 = flow_loop_entry_edges_find (loop->header,
8096 loop->nodes,
8097 &loop->entry_edges);
8100 if (flags & LOOP_EXIT_EDGES)
8102 /* Find edges which exit the loop. */
8103 loop->num_exits
8104 = flow_loop_exit_edges_find (loop->nodes,
8105 &loop->exit_edges);
8108 if (flags & LOOP_EXITS_DOMS)
8110 int j;
8112 /* Determine which loop nodes dominate all the exits
8113 of the loop. */
8114 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
8115 sbitmap_copy (loop->exits_doms, loop->nodes);
8116 for (j = 0; j < loop->num_exits; j++)
8117 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
8118 loops->cfg.dom[loop->exit_edges[j]->src->index]);
8120 /* The header of a natural loop must dominate
8121 all exits. */
8122 if (! TEST_BIT (loop->exits_doms, loop->header->index))
8123 abort ();
8126 if (flags & LOOP_PRE_HEADER)
8128 /* Look to see if the loop has a pre-header node. */
8129 loop->pre_header
8130 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
8132 /* Find the blocks within the extended basic block of
8133 the loop pre-header. */
8134 flow_loop_pre_header_scan (loop);
8136 return 1;
8140 /* Find all the natural loops in the function and save in LOOPS structure
8141 and recalculate loop_depth information in basic block structures.
8142 FLAGS controls which loop information is collected.
8143 Return the number of natural loops found. */
8146 flow_loops_find (loops, flags)
8147 struct loops *loops;
8148 int flags;
8150 int i;
8151 int b;
8152 int num_loops;
8153 edge e;
8154 sbitmap headers;
8155 sbitmap *dom;
8156 int *dfs_order;
8157 int *rc_order;
8159 /* This function cannot be repeatedly called with different
8160 flags to build up the loop information. The loop tree
8161 must always be built if this function is called. */
8162 if (! (flags & LOOP_TREE))
8163 abort ();
8165 memset (loops, 0, sizeof (*loops));
8167 /* Taking care of this degenerate case makes the rest of
8168 this code simpler. */
8169 if (n_basic_blocks == 0)
8170 return 0;
8172 dfs_order = NULL;
8173 rc_order = NULL;
8175 /* Compute the dominators. */
8176 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
8177 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
8179 /* Count the number of loop edges (back edges). This should be the
8180 same as the number of natural loops. */
8182 num_loops = 0;
8183 for (b = 0; b < n_basic_blocks; b++)
8185 basic_block header;
8187 header = BASIC_BLOCK (b);
8188 header->loop_depth = 0;
8190 for (e = header->pred; e; e = e->pred_next)
8192 basic_block latch = e->src;
8194 /* Look for back edges where a predecessor is dominated
8195 by this block. A natural loop has a single entry
8196 node (header) that dominates all the nodes in the
8197 loop. It also has single back edge to the header
8198 from a latch node. Note that multiple natural loops
8199 may share the same header. */
8200 if (b != header->index)
8201 abort ();
8203 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
8204 num_loops++;
8208 if (num_loops)
8210 /* Compute depth first search order of the CFG so that outer
8211 natural loops will be found before inner natural loops. */
8212 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8213 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8214 flow_depth_first_order_compute (dfs_order, rc_order);
8216 /* Save CFG derived information to avoid recomputing it. */
8217 loops->cfg.dom = dom;
8218 loops->cfg.dfs_order = dfs_order;
8219 loops->cfg.rc_order = rc_order;
8221 /* Allocate loop structures. */
8222 loops->array
8223 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
8225 headers = sbitmap_alloc (n_basic_blocks);
8226 sbitmap_zero (headers);
8228 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
8229 sbitmap_zero (loops->shared_headers);
8231 /* Find and record information about all the natural loops
8232 in the CFG. */
8233 num_loops = 0;
8234 for (b = 0; b < n_basic_blocks; b++)
8236 basic_block header;
8238 /* Search the nodes of the CFG in reverse completion order
8239 so that we can find outer loops first. */
8240 header = BASIC_BLOCK (rc_order[b]);
8242 /* Look for all the possible latch blocks for this header. */
8243 for (e = header->pred; e; e = e->pred_next)
8245 basic_block latch = e->src;
8247 /* Look for back edges where a predecessor is dominated
8248 by this block. A natural loop has a single entry
8249 node (header) that dominates all the nodes in the
8250 loop. It also has single back edge to the header
8251 from a latch node. Note that multiple natural loops
8252 may share the same header. */
8253 if (latch != ENTRY_BLOCK_PTR
8254 && TEST_BIT (dom[latch->index], header->index))
8256 struct loop *loop;
8258 loop = loops->array + num_loops;
8260 loop->header = header;
8261 loop->latch = latch;
8262 loop->num = num_loops;
8264 num_loops++;
8269 for (i = 0; i < num_loops; i++)
8271 struct loop *loop = &loops->array[i];
8273 /* Keep track of blocks that are loop headers so
8274 that we can tell which loops should be merged. */
8275 if (TEST_BIT (headers, loop->header->index))
8276 SET_BIT (loops->shared_headers, loop->header->index);
8277 SET_BIT (headers, loop->header->index);
8279 /* Find nodes contained within the loop. */
8280 loop->nodes = sbitmap_alloc (n_basic_blocks);
8281 loop->num_nodes
8282 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
8284 /* Compute first and last blocks within the loop.
8285 These are often the same as the loop header and
8286 loop latch respectively, but this is not always
8287 the case. */
8288 loop->first
8289 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
8290 loop->last
8291 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
8293 flow_loop_scan (loops, loop, flags);
8296 /* Natural loops with shared headers may either be disjoint or
8297 nested. Disjoint loops with shared headers cannot be inner
8298 loops and should be merged. For now just mark loops that share
8299 headers. */
8300 for (i = 0; i < num_loops; i++)
8301 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
8302 loops->array[i].shared = 1;
8304 sbitmap_free (headers);
8307 loops->num = num_loops;
8309 /* Build the loop hierarchy tree. */
8310 flow_loops_tree_build (loops);
8312 /* Assign the loop nesting depth and enclosed loop level for each
8313 loop. */
8314 loops->levels = flow_loops_level_compute (loops);
8316 return num_loops;
8320 /* Update the information regarding the loops in the CFG
8321 specified by LOOPS. */
8323 flow_loops_update (loops, flags)
8324 struct loops *loops;
8325 int flags;
8327 /* One day we may want to update the current loop data. For now
8328 throw away the old stuff and rebuild what we need. */
8329 if (loops->array)
8330 flow_loops_free (loops);
8332 return flow_loops_find (loops, flags);
8336 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8339 flow_loop_outside_edge_p (loop, e)
8340 const struct loop *loop;
8341 edge e;
8343 if (e->dest != loop->header)
8344 abort ();
8345 return (e->src == ENTRY_BLOCK_PTR)
8346 || ! TEST_BIT (loop->nodes, e->src->index);
8349 /* Clear LOG_LINKS fields of insns in a chain.
8350 Also clear the global_live_at_{start,end} fields of the basic block
8351 structures. */
8353 void
8354 clear_log_links (insns)
8355 rtx insns;
8357 rtx i;
8358 int b;
8360 for (i = insns; i; i = NEXT_INSN (i))
8361 if (INSN_P (i))
8362 LOG_LINKS (i) = 0;
8364 for (b = 0; b < n_basic_blocks; b++)
8366 basic_block bb = BASIC_BLOCK (b);
8368 bb->global_live_at_start = NULL;
8369 bb->global_live_at_end = NULL;
8372 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
8373 EXIT_BLOCK_PTR->global_live_at_start = NULL;
8376 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8377 correspond to the hard registers, if any, set in that map. This
8378 could be done far more efficiently by having all sorts of special-cases
8379 with moving single words, but probably isn't worth the trouble. */
8381 void
8382 reg_set_to_hard_reg_set (to, from)
8383 HARD_REG_SET *to;
8384 bitmap from;
8386 int i;
8388 EXECUTE_IF_SET_IN_BITMAP
8389 (from, 0, i,
8391 if (i >= FIRST_PSEUDO_REGISTER)
8392 return;
8393 SET_HARD_REG_BIT (*to, i);
8397 /* Called once at intialization time. */
8399 void
8400 init_flow ()
8402 static int initialized;
8404 if (!initialized)
8406 gcc_obstack_init (&flow_obstack);
8407 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
8408 initialized = 1;
8410 else
8412 obstack_free (&flow_obstack, flow_firstobj);
8413 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);