2001-09-05 Alexandre Petit-Bianco <apbianco@redhat.com>
[official-gcc.git] / gcc / flow.c
blob8ca087757dd443cebce408db48cc1fa1fb4afb09
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "expr.h"
137 #include "ssa.h"
138 #include "timevar.h"
140 #include "obstack.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
174 #endif
175 #endif
177 /* The obstack on which the flow graph components are allocated. */
179 struct obstack flow_obstack;
180 static char *flow_firstobj;
182 /* Number of basic blocks in the current function. */
184 int n_basic_blocks;
186 /* Number of edges in the current function. */
188 int n_edges;
190 /* The basic block array. */
192 varray_type basic_block_info;
194 /* The special entry and exit blocks. */
196 struct basic_block_def entry_exit_blocks[2]
197 = {{NULL, /* head */
198 NULL, /* end */
199 NULL, /* head_tree */
200 NULL, /* end_tree */
201 NULL, /* pred */
202 NULL, /* succ */
203 NULL, /* local_set */
204 NULL, /* cond_local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
207 NULL, /* aux */
208 ENTRY_BLOCK, /* index */
209 0, /* loop_depth */
210 0, /* count */
211 0, /* frequency */
212 0 /* flags */
215 NULL, /* head */
216 NULL, /* end */
217 NULL, /* head_tree */
218 NULL, /* end_tree */
219 NULL, /* pred */
220 NULL, /* succ */
221 NULL, /* local_set */
222 NULL, /* cond_local_set */
223 NULL, /* global_live_at_start */
224 NULL, /* global_live_at_end */
225 NULL, /* aux */
226 EXIT_BLOCK, /* index */
227 0, /* loop_depth */
228 0, /* count */
229 0, /* frequency */
230 0 /* flags */
234 /* Nonzero if the second flow pass has completed. */
235 int flow2_completed;
237 /* Maximum register number used in this function, plus one. */
239 int max_regno;
241 /* Indexed by n, giving various register information */
243 varray_type reg_n_info;
245 /* Size of a regset for the current function,
246 in (1) bytes and (2) elements. */
248 int regset_bytes;
249 int regset_size;
251 /* Regset of regs live when calls to `setjmp'-like functions happen. */
252 /* ??? Does this exist only for the setjmp-clobbered warning message? */
254 regset regs_live_at_setjmp;
256 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
257 that have to go in the same hard reg.
258 The first two regs in the list are a pair, and the next two
259 are another pair, etc. */
260 rtx regs_may_share;
262 /* Callback that determines if it's ok for a function to have no
263 noreturn attribute. */
264 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
266 /* Set of registers that may be eliminable. These are handled specially
267 in updating regs_ever_live. */
269 static HARD_REG_SET elim_reg_set;
271 /* The basic block structure for every insn, indexed by uid. */
273 varray_type basic_block_for_insn;
275 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
276 /* ??? Should probably be using LABEL_NUSES instead. It would take a
277 bit of surgery to be able to use or co-opt the routines in jump. */
279 static rtx label_value_list;
280 static rtx tail_recursion_label_list;
282 /* Holds information for tracking conditional register life information. */
283 struct reg_cond_life_info
285 /* A boolean expression of conditions under which a register is dead. */
286 rtx condition;
287 /* Conditions under which a register is dead at the basic block end. */
288 rtx orig_condition;
290 /* A boolean expression of conditions under which a register has been
291 stored into. */
292 rtx stores;
294 /* ??? Could store mask of bytes that are dead, so that we could finally
295 track lifetimes of multi-word registers accessed via subregs. */
298 /* For use in communicating between propagate_block and its subroutines.
299 Holds all information needed to compute life and def-use information. */
301 struct propagate_block_info
303 /* The basic block we're considering. */
304 basic_block bb;
306 /* Bit N is set if register N is conditionally or unconditionally live. */
307 regset reg_live;
309 /* Bit N is set if register N is set this insn. */
310 regset new_set;
312 /* Element N is the next insn that uses (hard or pseudo) register N
313 within the current basic block; or zero, if there is no such insn. */
314 rtx *reg_next_use;
316 /* Contains a list of all the MEMs we are tracking for dead store
317 elimination. */
318 rtx mem_set_list;
320 /* If non-null, record the set of registers set unconditionally in the
321 basic block. */
322 regset local_set;
324 /* If non-null, record the set of registers set conditionally in the
325 basic block. */
326 regset cond_local_set;
328 #ifdef HAVE_conditional_execution
329 /* Indexed by register number, holds a reg_cond_life_info for each
330 register that is not unconditionally live or dead. */
331 splay_tree reg_cond_dead;
333 /* Bit N is set if register N is in an expression in reg_cond_dead. */
334 regset reg_cond_reg;
335 #endif
337 /* The length of mem_set_list. */
338 int mem_set_list_len;
340 /* Non-zero if the value of CC0 is live. */
341 int cc0_live;
343 /* Flags controling the set of information propagate_block collects. */
344 int flags;
347 /* Maximum length of pbi->mem_set_list before we start dropping
348 new elements on the floor. */
349 #define MAX_MEM_SET_LIST_LEN 100
351 /* Store the data structures necessary for depth-first search. */
352 struct depth_first_search_dsS {
353 /* stack for backtracking during the algorithm */
354 basic_block *stack;
356 /* number of edges in the stack. That is, positions 0, ..., sp-1
357 have edges. */
358 unsigned int sp;
360 /* record of basic blocks already seen by depth-first search */
361 sbitmap visited_blocks;
363 typedef struct depth_first_search_dsS *depth_first_search_ds;
365 /* Have print_rtl_and_abort give the same information that fancy_abort
366 does. */
367 #define print_rtl_and_abort() \
368 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
370 /* Forward declarations */
371 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
372 static bool try_crossjump_bb PARAMS ((int, basic_block));
373 static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
374 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
375 rtx *, rtx *));
376 static int count_basic_blocks PARAMS ((rtx));
377 static void find_basic_blocks_1 PARAMS ((rtx));
378 static rtx find_label_refs PARAMS ((rtx, rtx));
379 static void make_edges PARAMS ((rtx, int, int, int));
380 static void make_label_edge PARAMS ((sbitmap *, basic_block,
381 rtx, int));
382 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
384 static void commit_one_edge_insertion PARAMS ((edge));
386 static void delete_unreachable_blocks PARAMS ((void));
387 static int can_delete_note_p PARAMS ((rtx));
388 static int can_delete_label_p PARAMS ((rtx));
389 static int tail_recursion_label_p PARAMS ((rtx));
390 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
391 basic_block));
392 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
393 basic_block));
394 static int merge_blocks PARAMS ((edge,basic_block,basic_block,
395 int));
396 static bool try_optimize_cfg PARAMS ((int));
397 static bool can_fallthru PARAMS ((basic_block, basic_block));
398 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
399 static bool try_simplify_condjump PARAMS ((basic_block));
400 static bool try_forward_edges PARAMS ((int, basic_block));
401 static void tidy_fallthru_edges PARAMS ((void));
402 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
403 static void verify_wide_reg PARAMS ((int, rtx, rtx));
404 static void verify_local_live_at_start PARAMS ((regset, basic_block));
405 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
406 static void notice_stack_pointer_modification PARAMS ((rtx));
407 static void mark_reg PARAMS ((rtx, void *));
408 static void mark_regs_live_at_end PARAMS ((regset));
409 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
410 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
411 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
412 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
413 static int insn_dead_p PARAMS ((struct propagate_block_info *,
414 rtx, int, rtx));
415 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
416 rtx, rtx));
417 static void mark_set_regs PARAMS ((struct propagate_block_info *,
418 rtx, rtx));
419 static void mark_set_1 PARAMS ((struct propagate_block_info *,
420 enum rtx_code, rtx, rtx,
421 rtx, int));
422 #ifdef HAVE_conditional_execution
423 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
424 int, rtx));
425 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
426 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
427 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
428 int));
429 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
430 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
431 static rtx not_reg_cond PARAMS ((rtx));
432 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
433 #endif
434 #ifdef AUTO_INC_DEC
435 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
436 rtx, rtx, rtx, rtx, rtx));
437 static void find_auto_inc PARAMS ((struct propagate_block_info *,
438 rtx, rtx));
439 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
440 rtx));
441 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
442 #endif
443 static void mark_used_reg PARAMS ((struct propagate_block_info *,
444 rtx, rtx, rtx));
445 static void mark_used_regs PARAMS ((struct propagate_block_info *,
446 rtx, rtx, rtx));
447 void dump_flow_info PARAMS ((FILE *));
448 void debug_flow_info PARAMS ((void));
449 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
450 const char *))
451 ATTRIBUTE_NORETURN;
453 static void add_to_mem_set_list PARAMS ((struct propagate_block_info *,
454 rtx));
455 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
456 rtx));
457 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
458 rtx));
459 static void remove_fake_successors PARAMS ((basic_block));
460 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
461 FILE *));
462 static void flow_edge_list_print PARAMS ((const char *, const edge *,
463 int, FILE *));
464 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
465 FILE *));
466 static int flow_loop_nested_p PARAMS ((struct loop *,
467 struct loop *));
468 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
469 edge **));
470 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
471 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
472 static void flow_dfs_compute_reverse_init
473 PARAMS ((depth_first_search_ds));
474 static void flow_dfs_compute_reverse_add_bb
475 PARAMS ((depth_first_search_ds, basic_block));
476 static basic_block flow_dfs_compute_reverse_execute
477 PARAMS ((depth_first_search_ds));
478 static void flow_dfs_compute_reverse_finish
479 PARAMS ((depth_first_search_ds));
480 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
481 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
482 const sbitmap *));
483 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
484 static void flow_loops_tree_build PARAMS ((struct loops *));
485 static int flow_loop_level_compute PARAMS ((struct loop *, int));
486 static int flow_loops_level_compute PARAMS ((struct loops *));
487 static void delete_dead_jumptables PARAMS ((void));
488 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
489 static bool need_fake_edge_p PARAMS ((rtx));
491 /* Find basic blocks of the current function.
492 F is the first insn of the function and NREGS the number of register
493 numbers in use. */
495 void
496 find_basic_blocks (f, nregs, file)
497 rtx f;
498 int nregs ATTRIBUTE_UNUSED;
499 FILE *file ATTRIBUTE_UNUSED;
501 int max_uid;
502 timevar_push (TV_CFG);
504 /* Flush out existing data. */
505 if (basic_block_info != NULL)
507 int i;
509 clear_edges ();
511 /* Clear bb->aux on all extant basic blocks. We'll use this as a
512 tag for reuse during create_basic_block, just in case some pass
513 copies around basic block notes improperly. */
514 for (i = 0; i < n_basic_blocks; ++i)
515 BASIC_BLOCK (i)->aux = NULL;
517 VARRAY_FREE (basic_block_info);
520 n_basic_blocks = count_basic_blocks (f);
522 /* Size the basic block table. The actual structures will be allocated
523 by find_basic_blocks_1, since we want to keep the structure pointers
524 stable across calls to find_basic_blocks. */
525 /* ??? This whole issue would be much simpler if we called find_basic_blocks
526 exactly once, and thereafter we don't have a single long chain of
527 instructions at all until close to the end of compilation when we
528 actually lay them out. */
530 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
532 find_basic_blocks_1 (f);
534 /* Record the block to which an insn belongs. */
535 /* ??? This should be done another way, by which (perhaps) a label is
536 tagged directly with the basic block that it starts. It is used for
537 more than that currently, but IMO that is the only valid use. */
539 max_uid = get_max_uid ();
540 #ifdef AUTO_INC_DEC
541 /* Leave space for insns life_analysis makes in some cases for auto-inc.
542 These cases are rare, so we don't need too much space. */
543 max_uid += max_uid / 10;
544 #endif
546 compute_bb_for_insn (max_uid);
548 /* Discover the edges of our cfg. */
549 make_edges (label_value_list, 0, n_basic_blocks - 1, 0);
551 /* Do very simple cleanup now, for the benefit of code that runs between
552 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
553 tidy_fallthru_edges ();
555 mark_critical_edges ();
557 #ifdef ENABLE_CHECKING
558 verify_flow_info ();
559 #endif
560 timevar_pop (TV_CFG);
563 void
564 check_function_return_warnings ()
566 if (warn_missing_noreturn
567 && !TREE_THIS_VOLATILE (cfun->decl)
568 && EXIT_BLOCK_PTR->pred == NULL
569 && (lang_missing_noreturn_ok_p
570 && !lang_missing_noreturn_ok_p (cfun->decl)))
571 warning ("function might be possible candidate for attribute `noreturn'");
573 /* If we have a path to EXIT, then we do return. */
574 if (TREE_THIS_VOLATILE (cfun->decl)
575 && EXIT_BLOCK_PTR->pred != NULL)
576 warning ("`noreturn' function does return");
578 /* If the clobber_return_insn appears in some basic block, then we
579 do reach the end without returning a value. */
580 else if (warn_return_type
581 && cfun->x_clobber_return_insn != NULL
582 && EXIT_BLOCK_PTR->pred != NULL)
584 int max_uid = get_max_uid ();
586 /* If clobber_return_insn was excised by jump1, then renumber_insns
587 can make max_uid smaller than the number still recorded in our rtx.
588 That's fine, since this is a quick way of verifying that the insn
589 is no longer in the chain. */
590 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
592 /* Recompute insn->block mapping, since the initial mapping is
593 set before we delete unreachable blocks. */
594 compute_bb_for_insn (max_uid);
596 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
597 warning ("control reaches end of non-void function");
602 /* Count the basic blocks of the function. */
604 static int
605 count_basic_blocks (f)
606 rtx f;
608 register rtx insn;
609 register RTX_CODE prev_code;
610 register int count = 0;
611 int saw_abnormal_edge = 0;
613 prev_code = JUMP_INSN;
614 for (insn = f; insn; insn = NEXT_INSN (insn))
616 enum rtx_code code = GET_CODE (insn);
618 if (code == CODE_LABEL
619 || (GET_RTX_CLASS (code) == 'i'
620 && (prev_code == JUMP_INSN
621 || prev_code == BARRIER
622 || saw_abnormal_edge)))
624 saw_abnormal_edge = 0;
625 count++;
628 /* Record whether this insn created an edge. */
629 if (code == CALL_INSN)
631 rtx note;
633 /* If there is a nonlocal goto label and the specified
634 region number isn't -1, we have an edge. */
635 if (nonlocal_goto_handler_labels
636 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
637 || INTVAL (XEXP (note, 0)) >= 0))
638 saw_abnormal_edge = 1;
640 else if (can_throw_internal (insn))
641 saw_abnormal_edge = 1;
643 else if (flag_non_call_exceptions
644 && code == INSN
645 && can_throw_internal (insn))
646 saw_abnormal_edge = 1;
648 if (code != NOTE)
649 prev_code = code;
652 /* The rest of the compiler works a bit smoother when we don't have to
653 check for the edge case of do-nothing functions with no basic blocks. */
654 if (count == 0)
656 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
657 count = 1;
660 return count;
663 /* Scan a list of insns for labels referred to other than by jumps.
664 This is used to scan the alternatives of a call placeholder. */
665 static rtx
666 find_label_refs (f, lvl)
667 rtx f;
668 rtx lvl;
670 rtx insn;
672 for (insn = f; insn; insn = NEXT_INSN (insn))
673 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
675 rtx note;
677 /* Make a list of all labels referred to other than by jumps
678 (which just don't have the REG_LABEL notes).
680 Make a special exception for labels followed by an ADDR*VEC,
681 as this would be a part of the tablejump setup code.
683 Make a special exception to registers loaded with label
684 values just before jump insns that use them. */
686 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
687 if (REG_NOTE_KIND (note) == REG_LABEL)
689 rtx lab = XEXP (note, 0), next;
691 if ((next = next_nonnote_insn (lab)) != NULL
692 && GET_CODE (next) == JUMP_INSN
693 && (GET_CODE (PATTERN (next)) == ADDR_VEC
694 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
696 else if (GET_CODE (lab) == NOTE)
698 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
699 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
701 else
702 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
706 return lvl;
709 /* Assume that someone emitted code with control flow instructions to the
710 basic block. Update the data structure. */
711 void
712 find_sub_basic_blocks (bb)
713 basic_block bb;
715 rtx insn = bb->head;
716 rtx end = bb->end;
717 rtx jump_insn = NULL_RTX;
718 edge falltru = 0;
719 basic_block first_bb = bb;
720 int i;
722 if (insn == bb->end)
723 return;
725 if (GET_CODE (insn) == CODE_LABEL)
726 insn = NEXT_INSN (insn);
728 /* Scan insn chain and try to find new basic block boundaries. */
729 while (1)
731 enum rtx_code code = GET_CODE (insn);
732 switch (code)
734 case BARRIER:
735 if (!jump_insn)
736 abort ();
737 break;
738 /* On code label, split current basic block. */
739 case CODE_LABEL:
740 falltru = split_block (bb, PREV_INSN (insn));
741 if (jump_insn)
742 bb->end = jump_insn;
743 bb = falltru->dest;
744 remove_edge (falltru);
745 jump_insn = 0;
746 if (LABEL_ALTERNATE_NAME (insn))
747 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
748 break;
749 case INSN:
750 case JUMP_INSN:
751 /* In case we've previously split insn on the JUMP_INSN, move the
752 block header to proper place. */
753 if (jump_insn)
755 falltru = split_block (bb, PREV_INSN (insn));
756 bb->end = jump_insn;
757 bb = falltru->dest;
758 remove_edge (falltru);
759 jump_insn = 0;
761 /* We need some special care for those expressions. */
762 if (GET_CODE (insn) == JUMP_INSN)
764 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
765 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
766 abort();
767 jump_insn = insn;
769 break;
770 default:
771 break;
773 if (insn == end)
774 break;
775 insn = NEXT_INSN (insn);
778 /* In case expander replaced normal insn by sequence terminating by
779 return and barrier, or possibly other sequence not behaving like
780 ordinary jump, we need to take care and move basic block boundary. */
781 if (jump_insn && GET_CODE (bb->end) != JUMP_INSN)
782 bb->end = jump_insn;
784 /* We've possibly replaced the conditional jump by conditional jump
785 followed by cleanup at fallthru edge, so the outgoing edges may
786 be dead. */
787 purge_dead_edges (bb);
789 /* Now re-scan and wire in all edges. This expect simple (conditional)
790 jumps at the end of each new basic blocks. */
791 make_edges (NULL, first_bb->index, bb->index, 1);
793 /* Update branch probabilities. Expect only (un)conditional jumps
794 to be created with only the forward edges. */
795 for (i = first_bb->index; i <= bb->index; i++)
797 edge e,f;
798 basic_block b = BASIC_BLOCK (i);
799 if (b != first_bb)
801 b->count = 0;
802 b->frequency = 0;
803 for (e = b->pred; e; e=e->pred_next)
805 b->count += e->count;
806 b->frequency += EDGE_FREQUENCY (e);
809 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
811 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
812 int probability;
814 if (!note)
815 continue;
816 probability = INTVAL (XEXP (find_reg_note (b->end,
817 REG_BR_PROB,
818 NULL), 0));
819 e = BRANCH_EDGE (b);
820 e->probability = probability;
821 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
822 / REG_BR_PROB_BASE);
823 f = FALLTHRU_EDGE (b);
824 f->probability = REG_BR_PROB_BASE - probability;
825 f->count = b->count - e->count;
827 if (b->succ && !b->succ->succ_next)
829 e = b->succ;
830 e->probability = REG_BR_PROB_BASE;
831 e->count = b->count;
836 /* Find all basic blocks of the function whose first insn is F.
838 Collect and return a list of labels whose addresses are taken. This
839 will be used in make_edges for use with computed gotos. */
841 static void
842 find_basic_blocks_1 (f)
843 rtx f;
845 register rtx insn, next;
846 int i = 0;
847 rtx bb_note = NULL_RTX;
848 rtx lvl = NULL_RTX;
849 rtx trll = NULL_RTX;
850 rtx head = NULL_RTX;
851 rtx end = NULL_RTX;
853 /* We process the instructions in a slightly different way than we did
854 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
855 closed out the previous block, so that it gets attached at the proper
856 place. Since this form should be equivalent to the previous,
857 count_basic_blocks continues to use the old form as a check. */
859 for (insn = f; insn; insn = next)
861 enum rtx_code code = GET_CODE (insn);
863 next = NEXT_INSN (insn);
865 switch (code)
867 case NOTE:
869 int kind = NOTE_LINE_NUMBER (insn);
871 /* Look for basic block notes with which to keep the
872 basic_block_info pointers stable. Unthread the note now;
873 we'll put it back at the right place in create_basic_block.
874 Or not at all if we've already found a note in this block. */
875 if (kind == NOTE_INSN_BASIC_BLOCK)
877 if (bb_note == NULL_RTX)
878 bb_note = insn;
879 else
880 next = flow_delete_insn (insn);
882 break;
885 case CODE_LABEL:
886 /* A basic block starts at a label. If we've closed one off due
887 to a barrier or some such, no need to do it again. */
888 if (head != NULL_RTX)
890 create_basic_block (i++, head, end, bb_note);
891 bb_note = NULL_RTX;
894 head = end = insn;
895 break;
897 case JUMP_INSN:
898 /* A basic block ends at a jump. */
899 if (head == NULL_RTX)
900 head = insn;
901 else
903 /* ??? Make a special check for table jumps. The way this
904 happens is truly and amazingly gross. We are about to
905 create a basic block that contains just a code label and
906 an addr*vec jump insn. Worse, an addr_diff_vec creates
907 its own natural loop.
909 Prevent this bit of brain damage, pasting things together
910 correctly in make_edges.
912 The correct solution involves emitting the table directly
913 on the tablejump instruction as a note, or JUMP_LABEL. */
915 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
916 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
918 head = end = NULL;
919 n_basic_blocks--;
920 break;
923 end = insn;
924 goto new_bb_inclusive;
926 case BARRIER:
927 /* A basic block ends at a barrier. It may be that an unconditional
928 jump already closed the basic block -- no need to do it again. */
929 if (head == NULL_RTX)
930 break;
931 goto new_bb_exclusive;
933 case CALL_INSN:
935 /* Record whether this call created an edge. */
936 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
937 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
939 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
941 /* Scan each of the alternatives for label refs. */
942 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
943 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
944 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
945 /* Record its tail recursion label, if any. */
946 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
947 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
950 /* A basic block ends at a call that can either throw or
951 do a non-local goto. */
952 if ((nonlocal_goto_handler_labels && region >= 0)
953 || can_throw_internal (insn))
955 new_bb_inclusive:
956 if (head == NULL_RTX)
957 head = insn;
958 end = insn;
960 new_bb_exclusive:
961 create_basic_block (i++, head, end, bb_note);
962 head = end = NULL_RTX;
963 bb_note = NULL_RTX;
964 break;
967 /* Fall through. */
969 case INSN:
970 /* Non-call exceptions generate new blocks just like calls. */
971 if (flag_non_call_exceptions && can_throw_internal (insn))
972 goto new_bb_inclusive;
974 if (head == NULL_RTX)
975 head = insn;
976 end = insn;
977 break;
979 default:
980 abort ();
983 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
985 rtx note;
987 /* Make a list of all labels referred to other than by jumps.
989 Make a special exception for labels followed by an ADDR*VEC,
990 as this would be a part of the tablejump setup code.
992 Make a special exception to registers loaded with label
993 values just before jump insns that use them. */
995 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
996 if (REG_NOTE_KIND (note) == REG_LABEL)
998 rtx lab = XEXP (note, 0), next;
1000 if ((next = next_nonnote_insn (lab)) != NULL
1001 && GET_CODE (next) == JUMP_INSN
1002 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1003 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1005 else if (GET_CODE (lab) == NOTE)
1007 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1008 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
1010 else
1011 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
1016 if (head != NULL_RTX)
1017 create_basic_block (i++, head, end, bb_note);
1018 else if (bb_note)
1019 flow_delete_insn (bb_note);
1021 if (i != n_basic_blocks)
1022 abort ();
1024 label_value_list = lvl;
1025 tail_recursion_label_list = trll;
1028 /* Tidy the CFG by deleting unreachable code and whatnot. */
1030 void
1031 cleanup_cfg (mode)
1032 int mode;
1034 int i;
1036 timevar_push (TV_CLEANUP_CFG);
1037 delete_unreachable_blocks ();
1038 if (try_optimize_cfg (mode))
1039 delete_unreachable_blocks ();
1040 mark_critical_edges ();
1042 /* Kill the data we won't maintain. */
1043 free_EXPR_LIST_list (&label_value_list);
1044 free_EXPR_LIST_list (&tail_recursion_label_list);
1045 timevar_pop (TV_CLEANUP_CFG);
1047 /* Clear bb->aux on all basic blocks. */
1048 for (i = 0; i < n_basic_blocks; ++i)
1049 BASIC_BLOCK (i)->aux = NULL;
1052 /* Create a new basic block consisting of the instructions between
1053 HEAD and END inclusive. Reuses the note and basic block struct
1054 in BB_NOTE, if any. */
1056 void
1057 create_basic_block (index, head, end, bb_note)
1058 int index;
1059 rtx head, end, bb_note;
1061 basic_block bb;
1063 if (bb_note
1064 && ! RTX_INTEGRATED_P (bb_note)
1065 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1066 && bb->aux == NULL)
1068 /* If we found an existing note, thread it back onto the chain. */
1070 rtx after;
1072 if (GET_CODE (head) == CODE_LABEL)
1073 after = head;
1074 else
1076 after = PREV_INSN (head);
1077 head = bb_note;
1080 if (after != bb_note && NEXT_INSN (after) != bb_note)
1081 reorder_insns (bb_note, bb_note, after);
1083 else
1085 /* Otherwise we must create a note and a basic block structure.
1086 Since we allow basic block structs in rtl, give the struct
1087 the same lifetime by allocating it off the function obstack
1088 rather than using malloc. */
1090 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1091 memset (bb, 0, sizeof (*bb));
1093 if (GET_CODE (head) == CODE_LABEL)
1094 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1095 else
1097 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1098 head = bb_note;
1100 NOTE_BASIC_BLOCK (bb_note) = bb;
1103 /* Always include the bb note in the block. */
1104 if (NEXT_INSN (end) == bb_note)
1105 end = bb_note;
1107 bb->head = head;
1108 bb->end = end;
1109 bb->index = index;
1110 BASIC_BLOCK (index) = bb;
1112 /* Tag the block so that we know it has been used when considering
1113 other basic block notes. */
1114 bb->aux = bb;
1117 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
1118 note associated with the BLOCK. */
1121 first_insn_after_basic_block_note (block)
1122 basic_block block;
1124 rtx insn;
1126 /* Get the first instruction in the block. */
1127 insn = block->head;
1129 if (insn == NULL_RTX)
1130 return NULL_RTX;
1131 if (GET_CODE (insn) == CODE_LABEL)
1132 insn = NEXT_INSN (insn);
1133 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
1134 abort ();
1136 return NEXT_INSN (insn);
1139 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1140 indexed by INSN_UID. MAX is the size of the array. */
1142 void
1143 compute_bb_for_insn (max)
1144 int max;
1146 int i;
1148 if (basic_block_for_insn)
1149 VARRAY_FREE (basic_block_for_insn);
1150 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1152 for (i = 0; i < n_basic_blocks; ++i)
1154 basic_block bb = BASIC_BLOCK (i);
1155 rtx insn, end;
1157 end = bb->end;
1158 insn = bb->head;
1159 while (1)
1161 int uid = INSN_UID (insn);
1162 if (uid < max)
1163 VARRAY_BB (basic_block_for_insn, uid) = bb;
1164 if (insn == end)
1165 break;
1166 insn = NEXT_INSN (insn);
1171 /* Free the memory associated with the edge structures. */
1173 void
1174 clear_edges ()
1176 int i;
1177 edge n, e;
1179 for (i = 0; i < n_basic_blocks; ++i)
1181 basic_block bb = BASIC_BLOCK (i);
1183 for (e = bb->succ; e; e = n)
1185 n = e->succ_next;
1186 free (e);
1189 bb->succ = 0;
1190 bb->pred = 0;
1193 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1195 n = e->succ_next;
1196 free (e);
1199 ENTRY_BLOCK_PTR->succ = 0;
1200 EXIT_BLOCK_PTR->pred = 0;
1202 n_edges = 0;
1205 /* Identify the edges between basic blocks MIN to MAX.
1207 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1208 that are otherwise unreachable may be reachable with a non-local goto.
1210 BB_EH_END is an array indexed by basic block number in which we record
1211 the list of exception regions active at the end of the basic block. */
1213 static void
1214 make_edges (label_value_list, min, max, update_p)
1215 rtx label_value_list;
1216 int min, max, update_p;
1218 int i;
1219 sbitmap *edge_cache = NULL;
1221 /* Assume no computed jump; revise as we create edges. */
1222 current_function_has_computed_jump = 0;
1224 /* Heavy use of computed goto in machine-generated code can lead to
1225 nearly fully-connected CFGs. In that case we spend a significant
1226 amount of time searching the edge lists for duplicates. */
1227 if (forced_labels || label_value_list)
1229 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1230 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1232 if (update_p)
1233 for (i = min; i <= max; ++i)
1235 edge e;
1236 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1237 if (e->dest != EXIT_BLOCK_PTR)
1238 SET_BIT (edge_cache[i], e->dest->index);
1242 /* By nature of the way these get numbered, block 0 is always the entry. */
1243 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1245 for (i = min; i <= max; ++i)
1247 basic_block bb = BASIC_BLOCK (i);
1248 rtx insn, x;
1249 enum rtx_code code;
1250 int force_fallthru = 0;
1252 if (GET_CODE (bb->head) == CODE_LABEL
1253 && LABEL_ALTERNATE_NAME (bb->head))
1254 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1256 /* Examine the last instruction of the block, and discover the
1257 ways we can leave the block. */
1259 insn = bb->end;
1260 code = GET_CODE (insn);
1262 /* A branch. */
1263 if (code == JUMP_INSN)
1265 rtx tmp;
1267 /* Recognize exception handling placeholders. */
1268 if (GET_CODE (PATTERN (insn)) == RESX)
1269 make_eh_edge (edge_cache, bb, insn);
1271 /* Recognize a non-local goto as a branch outside the
1272 current function. */
1273 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1276 /* ??? Recognize a tablejump and do the right thing. */
1277 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1278 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1279 && GET_CODE (tmp) == JUMP_INSN
1280 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1281 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1283 rtvec vec;
1284 int j;
1286 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1287 vec = XVEC (PATTERN (tmp), 0);
1288 else
1289 vec = XVEC (PATTERN (tmp), 1);
1291 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1292 make_label_edge (edge_cache, bb,
1293 XEXP (RTVEC_ELT (vec, j), 0), 0);
1295 /* Some targets (eg, ARM) emit a conditional jump that also
1296 contains the out-of-range target. Scan for these and
1297 add an edge if necessary. */
1298 if ((tmp = single_set (insn)) != NULL
1299 && SET_DEST (tmp) == pc_rtx
1300 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1301 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1302 make_label_edge (edge_cache, bb,
1303 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1305 #ifdef CASE_DROPS_THROUGH
1306 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1307 us naturally detecting fallthru into the next block. */
1308 force_fallthru = 1;
1309 #endif
1312 /* If this is a computed jump, then mark it as reaching
1313 everything on the label_value_list and forced_labels list. */
1314 else if (computed_jump_p (insn))
1316 current_function_has_computed_jump = 1;
1318 for (x = label_value_list; x; x = XEXP (x, 1))
1319 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1321 for (x = forced_labels; x; x = XEXP (x, 1))
1322 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1325 /* Returns create an exit out. */
1326 else if (returnjump_p (insn))
1327 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1329 /* Otherwise, we have a plain conditional or unconditional jump. */
1330 else
1332 if (! JUMP_LABEL (insn))
1333 abort ();
1334 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1338 /* If this is a sibling call insn, then this is in effect a
1339 combined call and return, and so we need an edge to the
1340 exit block. No need to worry about EH edges, since we
1341 wouldn't have created the sibling call in the first place. */
1343 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1344 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1345 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1347 /* If this is a CALL_INSN, then mark it as reaching the active EH
1348 handler for this CALL_INSN. If we're handling non-call
1349 exceptions then any insn can reach any of the active handlers.
1351 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1353 else if (code == CALL_INSN || flag_non_call_exceptions)
1355 /* Add any appropriate EH edges. */
1356 make_eh_edge (edge_cache, bb, insn);
1358 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1360 /* ??? This could be made smarter: in some cases it's possible
1361 to tell that certain calls will not do a nonlocal goto.
1363 For example, if the nested functions that do the nonlocal
1364 gotos do not have their addresses taken, then only calls to
1365 those functions or to other nested functions that use them
1366 could possibly do nonlocal gotos. */
1367 /* We do know that a REG_EH_REGION note with a value less
1368 than 0 is guaranteed not to perform a non-local goto. */
1369 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1370 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1371 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1372 make_label_edge (edge_cache, bb, XEXP (x, 0),
1373 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1377 /* Find out if we can drop through to the next block. */
1378 insn = next_nonnote_insn (insn);
1379 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1380 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1381 else if (i + 1 < n_basic_blocks)
1383 rtx tmp = BLOCK_HEAD (i + 1);
1384 if (GET_CODE (tmp) == NOTE)
1385 tmp = next_nonnote_insn (tmp);
1386 if (force_fallthru || insn == tmp)
1387 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1391 if (edge_cache)
1392 sbitmap_vector_free (edge_cache);
1395 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1396 about the edge that is accumulated between calls. */
1398 void
1399 make_edge (edge_cache, src, dst, flags)
1400 sbitmap *edge_cache;
1401 basic_block src, dst;
1402 int flags;
1404 int use_edge_cache;
1405 edge e;
1407 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1408 many edges to them, and we didn't allocate memory for it. */
1409 use_edge_cache = (edge_cache
1410 && src != ENTRY_BLOCK_PTR
1411 && dst != EXIT_BLOCK_PTR);
1413 /* Make sure we don't add duplicate edges. */
1414 switch (use_edge_cache)
1416 default:
1417 /* Quick test for non-existance of the edge. */
1418 if (! TEST_BIT (edge_cache[src->index], dst->index))
1419 break;
1421 /* The edge exists; early exit if no work to do. */
1422 if (flags == 0)
1423 return;
1425 /* FALLTHRU */
1426 case 0:
1427 for (e = src->succ; e; e = e->succ_next)
1428 if (e->dest == dst)
1430 e->flags |= flags;
1431 return;
1433 break;
1436 e = (edge) xcalloc (1, sizeof (*e));
1437 n_edges++;
1439 e->succ_next = src->succ;
1440 e->pred_next = dst->pred;
1441 e->src = src;
1442 e->dest = dst;
1443 e->flags = flags;
1445 src->succ = e;
1446 dst->pred = e;
1448 if (use_edge_cache)
1449 SET_BIT (edge_cache[src->index], dst->index);
1452 /* Create an edge from a basic block to a label. */
1454 static void
1455 make_label_edge (edge_cache, src, label, flags)
1456 sbitmap *edge_cache;
1457 basic_block src;
1458 rtx label;
1459 int flags;
1461 if (GET_CODE (label) != CODE_LABEL)
1462 abort ();
1464 /* If the label was never emitted, this insn is junk, but avoid a
1465 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1466 as a result of a syntax error and a diagnostic has already been
1467 printed. */
1469 if (INSN_UID (label) == 0)
1470 return;
1472 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1475 /* Create the edges generated by INSN in REGION. */
1477 static void
1478 make_eh_edge (edge_cache, src, insn)
1479 sbitmap *edge_cache;
1480 basic_block src;
1481 rtx insn;
1483 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1484 rtx handlers, i;
1486 handlers = reachable_handlers (insn);
1488 for (i = handlers; i; i = XEXP (i, 1))
1489 make_label_edge (edge_cache, src, XEXP (i, 0),
1490 EDGE_ABNORMAL | EDGE_EH | is_call);
1492 free_INSN_LIST_list (&handlers);
1495 /* Identify critical edges and set the bits appropriately. */
1497 void
1498 mark_critical_edges ()
1500 int i, n = n_basic_blocks;
1501 basic_block bb;
1503 /* We begin with the entry block. This is not terribly important now,
1504 but could be if a front end (Fortran) implemented alternate entry
1505 points. */
1506 bb = ENTRY_BLOCK_PTR;
1507 i = -1;
1509 while (1)
1511 edge e;
1513 /* (1) Critical edges must have a source with multiple successors. */
1514 if (bb->succ && bb->succ->succ_next)
1516 for (e = bb->succ; e; e = e->succ_next)
1518 /* (2) Critical edges must have a destination with multiple
1519 predecessors. Note that we know there is at least one
1520 predecessor -- the edge we followed to get here. */
1521 if (e->dest->pred->pred_next)
1522 e->flags |= EDGE_CRITICAL;
1523 else
1524 e->flags &= ~EDGE_CRITICAL;
1527 else
1529 for (e = bb->succ; e; e = e->succ_next)
1530 e->flags &= ~EDGE_CRITICAL;
1533 if (++i >= n)
1534 break;
1535 bb = BASIC_BLOCK (i);
1539 /* Mark the back edges in DFS traversal.
1540 Return non-zero if a loop (natural or otherwise) is present.
1541 Inspired by Depth_First_Search_PP described in:
1543 Advanced Compiler Design and Implementation
1544 Steven Muchnick
1545 Morgan Kaufmann, 1997
1547 and heavily borrowed from flow_depth_first_order_compute. */
1549 bool
1550 mark_dfs_back_edges ()
1552 edge *stack;
1553 int *pre;
1554 int *post;
1555 int sp;
1556 int prenum = 1;
1557 int postnum = 1;
1558 sbitmap visited;
1559 bool found = false;
1561 /* Allocate the preorder and postorder number arrays. */
1562 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
1563 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
1565 /* Allocate stack for back-tracking up CFG. */
1566 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
1567 sp = 0;
1569 /* Allocate bitmap to track nodes that have been visited. */
1570 visited = sbitmap_alloc (n_basic_blocks);
1572 /* None of the nodes in the CFG have been visited yet. */
1573 sbitmap_zero (visited);
1575 /* Push the first edge on to the stack. */
1576 stack[sp++] = ENTRY_BLOCK_PTR->succ;
1578 while (sp)
1580 edge e;
1581 basic_block src;
1582 basic_block dest;
1584 /* Look at the edge on the top of the stack. */
1585 e = stack[sp - 1];
1586 src = e->src;
1587 dest = e->dest;
1588 e->flags &= ~EDGE_DFS_BACK;
1590 /* Check if the edge destination has been visited yet. */
1591 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1593 /* Mark that we have visited the destination. */
1594 SET_BIT (visited, dest->index);
1596 pre[dest->index] = prenum++;
1598 if (dest->succ)
1600 /* Since the DEST node has been visited for the first
1601 time, check its successors. */
1602 stack[sp++] = dest->succ;
1604 else
1605 post[dest->index] = postnum++;
1607 else
1609 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
1610 && pre[src->index] >= pre[dest->index]
1611 && post[dest->index] == 0)
1612 e->flags |= EDGE_DFS_BACK, found = true;
1614 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
1615 post[src->index] = postnum++;
1617 if (e->succ_next)
1618 stack[sp - 1] = e->succ_next;
1619 else
1620 sp--;
1624 free (pre);
1625 free (post);
1626 free (stack);
1627 sbitmap_free (visited);
1629 return found;
1632 /* Split a block BB after insn INSN creating a new fallthru edge.
1633 Return the new edge. Note that to keep other parts of the compiler happy,
1634 this function renumbers all the basic blocks so that the new
1635 one has a number one greater than the block split. */
1637 edge
1638 split_block (bb, insn)
1639 basic_block bb;
1640 rtx insn;
1642 basic_block new_bb;
1643 edge new_edge;
1644 edge e;
1645 rtx bb_note;
1646 int i, j;
1648 /* There is no point splitting the block after its end. */
1649 if (bb->end == insn)
1650 return 0;
1652 /* Create the new structures. */
1653 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1654 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1655 n_edges++;
1657 memset (new_bb, 0, sizeof (*new_bb));
1659 new_bb->head = NEXT_INSN (insn);
1660 new_bb->end = bb->end;
1661 bb->end = insn;
1663 new_bb->succ = bb->succ;
1664 bb->succ = new_edge;
1665 new_bb->pred = new_edge;
1666 new_bb->count = bb->count;
1667 new_bb->frequency = bb->frequency;
1668 new_bb->loop_depth = bb->loop_depth;
1670 new_edge->src = bb;
1671 new_edge->dest = new_bb;
1672 new_edge->flags = EDGE_FALLTHRU;
1673 new_edge->probability = REG_BR_PROB_BASE;
1674 new_edge->count = bb->count;
1676 /* Redirect the src of the successor edges of bb to point to new_bb. */
1677 for (e = new_bb->succ; e; e = e->succ_next)
1678 e->src = new_bb;
1680 /* Place the new block just after the block being split. */
1681 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1683 /* Some parts of the compiler expect blocks to be number in
1684 sequential order so insert the new block immediately after the
1685 block being split.. */
1686 j = bb->index;
1687 for (i = n_basic_blocks - 1; i > j + 1; --i)
1689 basic_block tmp = BASIC_BLOCK (i - 1);
1690 BASIC_BLOCK (i) = tmp;
1691 tmp->index = i;
1694 BASIC_BLOCK (i) = new_bb;
1695 new_bb->index = i;
1697 if (GET_CODE (new_bb->head) == CODE_LABEL)
1699 /* Create the basic block note. */
1700 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1701 new_bb->head);
1702 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1704 /* If the only thing in this new block was the label, make sure
1705 the block note gets included. */
1706 if (new_bb->head == new_bb->end)
1707 new_bb->end = bb_note;
1709 else
1711 /* Create the basic block note. */
1712 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1713 new_bb->head);
1714 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1715 new_bb->head = bb_note;
1718 update_bb_for_insn (new_bb);
1720 if (bb->global_live_at_start)
1722 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1723 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1724 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1726 /* We now have to calculate which registers are live at the end
1727 of the split basic block and at the start of the new basic
1728 block. Start with those registers that are known to be live
1729 at the end of the original basic block and get
1730 propagate_block to determine which registers are live. */
1731 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1732 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1733 COPY_REG_SET (bb->global_live_at_end,
1734 new_bb->global_live_at_start);
1737 return new_edge;
1740 /* Return label in the head of basic block. Create one if it doesn't exist. */
1742 block_label (block)
1743 basic_block block;
1745 if (block == EXIT_BLOCK_PTR)
1746 return NULL_RTX;
1747 if (GET_CODE (block->head) != CODE_LABEL)
1749 block->head = emit_label_before (gen_label_rtx (), block->head);
1750 if (basic_block_for_insn)
1751 set_block_for_insn (block->head, block);
1753 return block->head;
1756 /* Return true if the block has no effect and only forwards control flow to
1757 its single destination. */
1758 bool
1759 forwarder_block_p (bb)
1760 basic_block bb;
1762 rtx insn = bb->head;
1763 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1764 || !bb->succ || bb->succ->succ_next)
1765 return false;
1767 while (insn != bb->end)
1769 if (active_insn_p (insn))
1770 return false;
1771 insn = NEXT_INSN (insn);
1773 return (!active_insn_p (insn)
1774 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1777 /* Return nonzero if we can reach target from src by falling trought. */
1778 static bool
1779 can_fallthru (src, target)
1780 basic_block src, target;
1782 rtx insn = src->end;
1783 rtx insn2 = target->head;
1785 if (src->index + 1 == target->index && !active_insn_p (insn2))
1786 insn2 = next_active_insn (insn2);
1787 /* ??? Later we may add code to move jump tables offline. */
1788 return next_active_insn (insn) == insn2;
1791 /* Attempt to perform edge redirection by replacing possibly complex jump
1792 instruction by unconditional jump or removing jump completely.
1793 This can apply only if all edges now point to the same block.
1795 The parameters and return values are equivalent to redirect_edge_and_branch.
1797 static bool
1798 try_redirect_by_replacing_jump (e, target)
1799 edge e;
1800 basic_block target;
1802 basic_block src = e->src;
1803 rtx insn = src->end, kill_from;
1804 edge tmp;
1805 rtx set;
1806 int fallthru = 0;
1808 /* Verify that all targets will be TARGET. */
1809 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1810 if (tmp->dest != target && tmp != e)
1811 break;
1812 if (tmp || !onlyjump_p (insn))
1813 return false;
1815 /* Avoid removing branch with side effects. */
1816 set = single_set (insn);
1817 if (!set || side_effects_p (set))
1818 return false;
1820 /* In case we zap a conditional jump, we'll need to kill
1821 the cc0 setter too. */
1822 kill_from = insn;
1823 #ifdef HAVE_cc0
1824 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
1825 kill_from = PREV_INSN (insn);
1826 #endif
1828 /* See if we can create the fallthru edge. */
1829 if (can_fallthru (src, target))
1831 src->end = PREV_INSN (kill_from);
1832 if (rtl_dump_file)
1833 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1834 fallthru = 1;
1836 /* Selectivly unlink whole insn chain. */
1837 flow_delete_insn_chain (kill_from, PREV_INSN (target->head));
1839 /* If this already is simplejump, redirect it. */
1840 else if (simplejump_p (insn))
1842 if (e->dest == target)
1843 return false;
1844 if (rtl_dump_file)
1845 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1846 INSN_UID (insn), e->dest->index, target->index);
1847 redirect_jump (insn, block_label (target), 0);
1849 /* Or replace possibly complicated jump insn by simple jump insn. */
1850 else
1852 rtx target_label = block_label (target);
1853 rtx barrier;
1855 src->end = emit_jump_insn_before (gen_jump (target_label), kill_from);
1856 JUMP_LABEL (src->end) = target_label;
1857 LABEL_NUSES (target_label)++;
1858 if (basic_block_for_insn)
1859 set_block_for_new_insns (src->end, src);
1860 if (rtl_dump_file)
1861 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1862 INSN_UID (insn), INSN_UID (src->end));
1864 flow_delete_insn_chain (kill_from, insn);
1866 barrier = next_nonnote_insn (src->end);
1867 if (!barrier || GET_CODE (barrier) != BARRIER)
1868 emit_barrier_after (src->end);
1871 /* Keep only one edge out and set proper flags. */
1872 while (src->succ->succ_next)
1873 remove_edge (src->succ);
1874 e = src->succ;
1875 if (fallthru)
1876 e->flags = EDGE_FALLTHRU;
1877 else
1878 e->flags = 0;
1879 e->probability = REG_BR_PROB_BASE;
1880 e->count = src->count;
1882 /* We don't want a block to end on a line-number note since that has
1883 the potential of changing the code between -g and not -g. */
1884 while (GET_CODE (e->src->end) == NOTE
1885 && NOTE_LINE_NUMBER (e->src->end) >= 0)
1887 rtx prev = PREV_INSN (e->src->end);
1888 flow_delete_insn (e->src->end);
1889 e->src->end = prev;
1892 if (e->dest != target)
1893 redirect_edge_succ (e, target);
1894 return true;
1897 /* Return last loop_beg note appearing after INSN, before start of next
1898 basic block. Return INSN if there are no such notes.
1900 When emmiting jump to redirect an fallthru edge, it should always
1901 appear after the LOOP_BEG notes, as loop optimizer expect loop to
1902 eighter start by fallthru edge or jump following the LOOP_BEG note
1903 jumping to the loop exit test. */
1905 last_loop_beg_note (insn)
1906 rtx insn;
1908 rtx last = insn;
1909 insn = NEXT_INSN (insn);
1910 while (GET_CODE (insn) == NOTE
1911 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1913 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1914 last = insn;
1915 insn = NEXT_INSN (insn);
1917 return last;
1920 /* Attempt to change code to redirect edge E to TARGET.
1921 Don't do that on expense of adding new instructions or reordering
1922 basic blocks.
1924 Function can be also called with edge destionation equivalent to the
1925 TARGET. Then it should try the simplifications and do nothing if
1926 none is possible.
1928 Return true if transformation suceeded. We still return flase in case
1929 E already destinated TARGET and we didn't managed to simplify instruction
1930 stream. */
1931 bool
1932 redirect_edge_and_branch (e, target)
1933 edge e;
1934 basic_block target;
1936 rtx tmp;
1937 rtx old_label = e->dest->head;
1938 basic_block src = e->src;
1939 rtx insn = src->end;
1941 if (e->flags & EDGE_COMPLEX)
1942 return false;
1944 if (try_redirect_by_replacing_jump (e, target))
1945 return true;
1946 /* Do this fast path late, as we want above code to simplify for cases
1947 where called on single edge leaving basic block containing nontrivial
1948 jump insn. */
1949 else if (e->dest == target)
1950 return false;
1952 /* We can only redirect non-fallthru edges of jump insn. */
1953 if (e->flags & EDGE_FALLTHRU)
1954 return false;
1955 if (GET_CODE (insn) != JUMP_INSN)
1956 return false;
1958 /* Recognize a tablejump and adjust all matching cases. */
1959 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1960 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1961 && GET_CODE (tmp) == JUMP_INSN
1962 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1963 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1965 rtvec vec;
1966 int j;
1967 rtx new_label = block_label (target);
1969 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1970 vec = XVEC (PATTERN (tmp), 0);
1971 else
1972 vec = XVEC (PATTERN (tmp), 1);
1974 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1975 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1977 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1978 --LABEL_NUSES (old_label);
1979 ++LABEL_NUSES (new_label);
1982 /* Handle casesi dispatch insns */
1983 if ((tmp = single_set (insn)) != NULL
1984 && SET_DEST (tmp) == pc_rtx
1985 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1986 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1987 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1989 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1990 new_label);
1991 --LABEL_NUSES (old_label);
1992 ++LABEL_NUSES (new_label);
1995 else
1997 /* ?? We may play the games with moving the named labels from
1998 one basic block to the other in case only one computed_jump is
1999 available. */
2000 if (computed_jump_p (insn))
2001 return false;
2003 /* A return instruction can't be redirected. */
2004 if (returnjump_p (insn))
2005 return false;
2007 /* If the insn doesn't go where we think, we're confused. */
2008 if (JUMP_LABEL (insn) != old_label)
2009 abort ();
2010 redirect_jump (insn, block_label (target), 0);
2013 if (rtl_dump_file)
2014 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
2015 e->src->index, e->dest->index, target->index);
2016 if (e->dest != target)
2017 redirect_edge_succ_nodup (e, target);
2018 return true;
2021 /* Redirect edge even at the expense of creating new jump insn or
2022 basic block. Return new basic block if created, NULL otherwise.
2023 Abort if converison is impossible. */
2024 basic_block
2025 redirect_edge_and_branch_force (e, target)
2026 edge e;
2027 basic_block target;
2029 basic_block new_bb;
2030 edge new_edge;
2031 rtx label;
2032 rtx bb_note;
2033 int i, j;
2035 if (redirect_edge_and_branch (e, target))
2036 return NULL;
2037 if (e->dest == target)
2038 return NULL;
2039 if (e->flags & EDGE_ABNORMAL)
2040 abort ();
2041 if (!(e->flags & EDGE_FALLTHRU))
2042 abort ();
2044 e->flags &= ~EDGE_FALLTHRU;
2045 label = block_label (target);
2046 /* Case of the fallthru block. */
2047 if (!e->src->succ->succ_next)
2049 e->src->end = emit_jump_insn_after (gen_jump (label),
2050 last_loop_beg_note (e->src->end));
2051 JUMP_LABEL (e->src->end) = label;
2052 LABEL_NUSES (label)++;
2053 if (basic_block_for_insn)
2054 set_block_for_new_insns (e->src->end, e->src);
2055 emit_barrier_after (e->src->end);
2056 if (rtl_dump_file)
2057 fprintf (rtl_dump_file,
2058 "Emitting jump insn %i to redirect edge %i->%i to %i\n",
2059 INSN_UID (e->src->end), e->src->index, e->dest->index,
2060 target->index);
2061 redirect_edge_succ (e, target);
2062 return NULL;
2064 /* Redirecting fallthru edge of the conditional needs extra work. */
2066 if (rtl_dump_file)
2067 fprintf (rtl_dump_file,
2068 "Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
2069 INSN_UID (e->src->end), e->src->index, e->dest->index,
2070 target->index);
2072 /* Create the new structures. */
2073 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
2074 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
2075 n_edges++;
2077 memset (new_bb, 0, sizeof (*new_bb));
2079 new_bb->end = new_bb->head = last_loop_beg_note (e->src->end);
2080 new_bb->succ = NULL;
2081 new_bb->pred = new_edge;
2082 new_bb->count = e->count;
2083 new_bb->frequency = EDGE_FREQUENCY (e);
2084 new_bb->loop_depth = e->dest->loop_depth;
2086 new_edge->flags = EDGE_FALLTHRU;
2087 new_edge->probability = e->probability;
2088 new_edge->count = e->count;
2090 if (target->global_live_at_start)
2092 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2093 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2094 COPY_REG_SET (new_bb->global_live_at_start,
2095 target->global_live_at_start);
2096 COPY_REG_SET (new_bb->global_live_at_end, new_bb->global_live_at_start);
2099 /* Wire edge in. */
2100 new_edge->src = e->src;
2101 new_edge->dest = new_bb;
2102 new_edge->succ_next = e->src->succ;
2103 e->src->succ = new_edge;
2104 new_edge->pred_next = NULL;
2106 /* Redirect old edge. */
2107 redirect_edge_succ (e, target);
2108 redirect_edge_pred (e, new_bb);
2109 e->probability = REG_BR_PROB_BASE;
2111 /* Place the new block just after the block being split. */
2112 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2114 /* Some parts of the compiler expect blocks to be number in
2115 sequential order so insert the new block immediately after the
2116 block being split.. */
2117 j = new_edge->src->index;
2118 for (i = n_basic_blocks - 1; i > j + 1; --i)
2120 basic_block tmp = BASIC_BLOCK (i - 1);
2121 BASIC_BLOCK (i) = tmp;
2122 tmp->index = i;
2125 BASIC_BLOCK (i) = new_bb;
2126 new_bb->index = i;
2128 /* Create the basic block note. */
2129 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
2130 NOTE_BASIC_BLOCK (bb_note) = new_bb;
2131 new_bb->head = bb_note;
2133 new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
2134 JUMP_LABEL (new_bb->end) = label;
2135 LABEL_NUSES (label)++;
2136 if (basic_block_for_insn)
2137 set_block_for_new_insns (new_bb->end, new_bb);
2138 emit_barrier_after (new_bb->end);
2139 return new_bb;
2142 /* Helper function for split_edge. Return true in case edge BB2 to BB1
2143 is back edge of syntactic loop. */
2144 static bool
2145 back_edge_of_syntactic_loop_p (bb1, bb2)
2146 basic_block bb1, bb2;
2148 rtx insn;
2149 int count = 0;
2151 if (bb1->index > bb2->index)
2152 return false;
2154 if (bb1->index == bb2->index)
2155 return true;
2157 for (insn = bb1->end; insn != bb2->head && count >= 0;
2158 insn = NEXT_INSN (insn))
2159 if (GET_CODE (insn) == NOTE)
2161 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2162 count++;
2163 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2164 count--;
2167 return count >= 0;
2170 /* Split a (typically critical) edge. Return the new block.
2171 Abort on abnormal edges.
2173 ??? The code generally expects to be called on critical edges.
2174 The case of a block ending in an unconditional jump to a
2175 block with multiple predecessors is not handled optimally. */
2177 basic_block
2178 split_edge (edge_in)
2179 edge edge_in;
2181 basic_block old_pred, bb, old_succ;
2182 edge edge_out;
2183 rtx bb_note;
2184 int i, j;
2186 /* Abnormal edges cannot be split. */
2187 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
2188 abort ();
2190 old_pred = edge_in->src;
2191 old_succ = edge_in->dest;
2193 /* Create the new structures. */
2194 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
2195 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
2196 n_edges++;
2198 memset (bb, 0, sizeof (*bb));
2200 /* ??? This info is likely going to be out of date very soon. */
2201 if (old_succ->global_live_at_start)
2203 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2204 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2205 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
2206 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
2209 /* Wire them up. */
2210 bb->succ = edge_out;
2211 bb->count = edge_in->count;
2212 bb->frequency = EDGE_FREQUENCY (edge_in);
2214 edge_in->flags &= ~EDGE_CRITICAL;
2216 edge_out->pred_next = old_succ->pred;
2217 edge_out->succ_next = NULL;
2218 edge_out->src = bb;
2219 edge_out->dest = old_succ;
2220 edge_out->flags = EDGE_FALLTHRU;
2221 edge_out->probability = REG_BR_PROB_BASE;
2222 edge_out->count = edge_in->count;
2224 old_succ->pred = edge_out;
2226 /* Tricky case -- if there existed a fallthru into the successor
2227 (and we're not it) we must add a new unconditional jump around
2228 the new block we're actually interested in.
2230 Further, if that edge is critical, this means a second new basic
2231 block must be created to hold it. In order to simplify correct
2232 insn placement, do this before we touch the existing basic block
2233 ordering for the block we were really wanting. */
2234 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2236 edge e;
2237 for (e = edge_out->pred_next; e; e = e->pred_next)
2238 if (e->flags & EDGE_FALLTHRU)
2239 break;
2241 if (e)
2243 basic_block jump_block;
2244 rtx pos;
2246 if ((e->flags & EDGE_CRITICAL) == 0
2247 && e->src != ENTRY_BLOCK_PTR)
2249 /* Non critical -- we can simply add a jump to the end
2250 of the existing predecessor. */
2251 jump_block = e->src;
2253 else
2255 /* We need a new block to hold the jump. The simplest
2256 way to do the bulk of the work here is to recursively
2257 call ourselves. */
2258 jump_block = split_edge (e);
2259 e = jump_block->succ;
2262 /* Now add the jump insn ... */
2263 pos = emit_jump_insn_after (gen_jump (old_succ->head),
2264 last_loop_beg_note (jump_block->end));
2265 jump_block->end = pos;
2266 if (basic_block_for_insn)
2267 set_block_for_new_insns (pos, jump_block);
2268 emit_barrier_after (pos);
2270 /* ... let jump know that label is in use, ... */
2271 JUMP_LABEL (pos) = old_succ->head;
2272 ++LABEL_NUSES (old_succ->head);
2274 /* ... and clear fallthru on the outgoing edge. */
2275 e->flags &= ~EDGE_FALLTHRU;
2277 /* Continue splitting the interesting edge. */
2281 /* Place the new block just in front of the successor. */
2282 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2283 if (old_succ == EXIT_BLOCK_PTR)
2284 j = n_basic_blocks - 1;
2285 else
2286 j = old_succ->index;
2287 for (i = n_basic_blocks - 1; i > j; --i)
2289 basic_block tmp = BASIC_BLOCK (i - 1);
2290 BASIC_BLOCK (i) = tmp;
2291 tmp->index = i;
2293 BASIC_BLOCK (i) = bb;
2294 bb->index = i;
2296 /* Create the basic block note.
2298 Where we place the note can have a noticable impact on the generated
2299 code. Consider this cfg:
2305 +->1-->2--->E
2307 +--+
2309 If we need to insert an insn on the edge from block 0 to block 1,
2310 we want to ensure the instructions we insert are outside of any
2311 loop notes that physically sit between block 0 and block 1. Otherwise
2312 we confuse the loop optimizer into thinking the loop is a phony. */
2313 if (old_succ != EXIT_BLOCK_PTR
2314 && PREV_INSN (old_succ->head)
2315 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
2316 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG
2317 && !back_edge_of_syntactic_loop_p (old_succ, old_pred))
2318 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
2319 PREV_INSN (old_succ->head));
2320 else if (old_succ != EXIT_BLOCK_PTR)
2321 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
2322 else
2323 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
2324 NOTE_BASIC_BLOCK (bb_note) = bb;
2325 bb->head = bb->end = bb_note;
2327 /* For non-fallthry edges, we must adjust the predecessor's
2328 jump instruction to target our new block. */
2329 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2331 if (!redirect_edge_and_branch (edge_in, bb))
2332 abort ();
2334 else
2335 redirect_edge_succ (edge_in, bb);
2337 return bb;
2340 /* Queue instructions for insertion on an edge between two basic blocks.
2341 The new instructions and basic blocks (if any) will not appear in the
2342 CFG until commit_edge_insertions is called. */
2344 void
2345 insert_insn_on_edge (pattern, e)
2346 rtx pattern;
2347 edge e;
2349 /* We cannot insert instructions on an abnormal critical edge.
2350 It will be easier to find the culprit if we die now. */
2351 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2352 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2353 abort ();
2355 if (e->insns == NULL_RTX)
2356 start_sequence ();
2357 else
2358 push_to_sequence (e->insns);
2360 emit_insn (pattern);
2362 e->insns = get_insns ();
2363 end_sequence ();
2366 /* Update the CFG for the instructions queued on edge E. */
2368 static void
2369 commit_one_edge_insertion (e)
2370 edge e;
2372 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2373 basic_block bb;
2375 /* Pull the insns off the edge now since the edge might go away. */
2376 insns = e->insns;
2377 e->insns = NULL_RTX;
2379 /* Figure out where to put these things. If the destination has
2380 one predecessor, insert there. Except for the exit block. */
2381 if (e->dest->pred->pred_next == NULL
2382 && e->dest != EXIT_BLOCK_PTR)
2384 bb = e->dest;
2386 /* Get the location correct wrt a code label, and "nice" wrt
2387 a basic block note, and before everything else. */
2388 tmp = bb->head;
2389 if (GET_CODE (tmp) == CODE_LABEL)
2390 tmp = NEXT_INSN (tmp);
2391 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2392 tmp = NEXT_INSN (tmp);
2393 if (tmp == bb->head)
2394 before = tmp;
2395 else
2396 after = PREV_INSN (tmp);
2399 /* If the source has one successor and the edge is not abnormal,
2400 insert there. Except for the entry block. */
2401 else if ((e->flags & EDGE_ABNORMAL) == 0
2402 && e->src->succ->succ_next == NULL
2403 && e->src != ENTRY_BLOCK_PTR)
2405 bb = e->src;
2406 /* It is possible to have a non-simple jump here. Consider a target
2407 where some forms of unconditional jumps clobber a register. This
2408 happens on the fr30 for example.
2410 We know this block has a single successor, so we can just emit
2411 the queued insns before the jump. */
2412 if (GET_CODE (bb->end) == JUMP_INSN)
2414 before = bb->end;
2415 while (GET_CODE (PREV_INSN (before)) == NOTE
2416 && NOTE_LINE_NUMBER (PREV_INSN (before)) == NOTE_INSN_LOOP_BEG)
2417 before = PREV_INSN (before);
2419 else
2421 /* We'd better be fallthru, or we've lost track of what's what. */
2422 if ((e->flags & EDGE_FALLTHRU) == 0)
2423 abort ();
2425 after = bb->end;
2429 /* Otherwise we must split the edge. */
2430 else
2432 bb = split_edge (e);
2433 after = bb->end;
2436 /* Now that we've found the spot, do the insertion. */
2438 /* Set the new block number for these insns, if structure is allocated. */
2439 if (basic_block_for_insn)
2441 rtx i;
2442 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2443 set_block_for_insn (i, bb);
2446 if (before)
2448 emit_insns_before (insns, before);
2449 if (before == bb->head)
2450 bb->head = insns;
2452 last = prev_nonnote_insn (before);
2454 else
2456 last = emit_insns_after (insns, after);
2457 if (after == bb->end)
2458 bb->end = last;
2461 if (returnjump_p (last))
2463 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2464 This is not currently a problem because this only happens
2465 for the (single) epilogue, which already has a fallthru edge
2466 to EXIT. */
2468 e = bb->succ;
2469 if (e->dest != EXIT_BLOCK_PTR
2470 || e->succ_next != NULL
2471 || (e->flags & EDGE_FALLTHRU) == 0)
2472 abort ();
2473 e->flags &= ~EDGE_FALLTHRU;
2475 emit_barrier_after (last);
2476 bb->end = last;
2478 if (before)
2479 flow_delete_insn (before);
2481 else if (GET_CODE (last) == JUMP_INSN)
2482 abort ();
2483 find_sub_basic_blocks (bb);
2486 /* Update the CFG for all queued instructions. */
2488 void
2489 commit_edge_insertions ()
2491 int i;
2492 basic_block bb;
2493 compute_bb_for_insn (get_max_uid ());
2495 #ifdef ENABLE_CHECKING
2496 verify_flow_info ();
2497 #endif
2499 i = -1;
2500 bb = ENTRY_BLOCK_PTR;
2501 while (1)
2503 edge e, next;
2505 for (e = bb->succ; e; e = next)
2507 next = e->succ_next;
2508 if (e->insns)
2509 commit_one_edge_insertion (e);
2512 if (++i >= n_basic_blocks)
2513 break;
2514 bb = BASIC_BLOCK (i);
2518 /* Return true if we need to add fake edge to exit.
2519 Helper function for the flow_call_edges_add. */
2520 static bool
2521 need_fake_edge_p (insn)
2522 rtx insn;
2524 if (!INSN_P (insn))
2525 return false;
2527 if ((GET_CODE (insn) == CALL_INSN
2528 && !SIBLING_CALL_P (insn)
2529 && !find_reg_note (insn, REG_NORETURN, NULL)
2530 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
2531 && !CONST_OR_PURE_CALL_P (insn)))
2532 return true;
2534 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2535 && MEM_VOLATILE_P (PATTERN (insn)))
2536 || (GET_CODE (PATTERN (insn)) == PARALLEL
2537 && asm_noperands (insn) != -1
2538 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2539 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2542 /* Add fake edges to the function exit for any non constant and non noreturn
2543 calls, volatile inline assembly in the bitmap of blocks specified by
2544 BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks
2545 that were split.
2547 The goal is to expose cases in which entering a basic block does not imply
2548 that all subsequent instructions must be executed. */
2551 flow_call_edges_add (blocks)
2552 sbitmap blocks;
2554 int i;
2555 int blocks_split = 0;
2556 int bb_num = 0;
2557 basic_block *bbs;
2558 bool check_last_block = false;
2560 /* Map bb indicies into basic block pointers since split_block
2561 will renumber the basic blocks. */
2563 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2565 if (! blocks)
2567 for (i = 0; i < n_basic_blocks; i++)
2568 bbs[bb_num++] = BASIC_BLOCK (i);
2569 check_last_block = true;
2571 else
2573 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2575 bbs[bb_num++] = BASIC_BLOCK (i);
2576 if (i == n_basic_blocks - 1)
2577 check_last_block = true;
2581 /* In the last basic block, before epilogue generation, there will be
2582 a fallthru edge to EXIT. Special care is required if the last insn
2583 of the last basic block is a call because make_edge folds duplicate
2584 edges, which would result in the fallthru edge also being marked
2585 fake, which would result in the fallthru edge being removed by
2586 remove_fake_edges, which would result in an invalid CFG.
2588 Moreover, we can't elide the outgoing fake edge, since the block
2589 profiler needs to take this into account in order to solve the minimal
2590 spanning tree in the case that the call doesn't return.
2592 Handle this by adding a dummy instruction in a new last basic block. */
2593 if (check_last_block
2594 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
2596 edge e;
2597 for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
2598 if (e->dest == EXIT_BLOCK_PTR)
2599 break;
2600 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2601 commit_edge_insertions ();
2605 /* Now add fake edges to the function exit for any non constant
2606 calls since there is no way that we can determine if they will
2607 return or not... */
2609 for (i = 0; i < bb_num; i++)
2611 basic_block bb = bbs[i];
2612 rtx insn;
2613 rtx prev_insn;
2615 for (insn = bb->end; ; insn = prev_insn)
2617 prev_insn = PREV_INSN (insn);
2618 if (need_fake_edge_p (insn))
2620 edge e;
2622 /* The above condition should be enought to verify that there is
2623 no edge to the exit block in CFG already. Calling make_edge in
2624 such case would make us to mark that edge as fake and remove it
2625 later. */
2626 #ifdef ENABLE_CHECKING
2627 if (insn == bb->end)
2628 for (e = bb->succ; e; e = e->succ_next)
2629 if (e->dest == EXIT_BLOCK_PTR)
2630 abort ();
2631 #endif
2633 /* Note that the following may create a new basic block
2634 and renumber the existing basic blocks. */
2635 e = split_block (bb, insn);
2636 if (e)
2637 blocks_split++;
2639 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2641 if (insn == bb->head)
2642 break;
2646 if (blocks_split)
2647 verify_flow_info ();
2649 free (bbs);
2650 return blocks_split;
2653 /* Find unreachable blocks. An unreachable block will have 0 in
2654 the reachable bit in block->flags. A non-zero value indicates the
2655 block is reachable. */
2657 void
2658 find_unreachable_blocks ()
2660 edge e;
2661 int i, n;
2662 basic_block *tos, *worklist;
2664 n = n_basic_blocks;
2665 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2667 /* Clear all the reachability flags. */
2669 for (i = 0; i < n; ++i)
2670 BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
2672 /* Add our starting points to the worklist. Almost always there will
2673 be only one. It isn't inconcievable that we might one day directly
2674 support Fortran alternate entry points. */
2676 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2678 *tos++ = e->dest;
2680 /* Mark the block reachable. */
2681 e->dest->flags |= BB_REACHABLE;
2684 /* Iterate: find everything reachable from what we've already seen. */
2686 while (tos != worklist)
2688 basic_block b = *--tos;
2690 for (e = b->succ; e; e = e->succ_next)
2691 if (!(e->dest->flags & BB_REACHABLE))
2693 *tos++ = e->dest;
2694 e->dest->flags |= BB_REACHABLE;
2698 free (worklist);
2701 /* Delete all unreachable basic blocks. */
2702 static void
2703 delete_unreachable_blocks ()
2705 int i;
2707 find_unreachable_blocks ();
2709 /* Delete all unreachable basic blocks. Count down so that we
2710 don't interfere with the block renumbering that happens in
2711 flow_delete_block. */
2713 for (i = n_basic_blocks - 1; i >= 0; --i)
2715 basic_block b = BASIC_BLOCK (i);
2717 if (!(b->flags & BB_REACHABLE))
2718 flow_delete_block (b);
2721 tidy_fallthru_edges ();
2724 /* Return true if NOTE is not one of the ones that must be kept paired,
2725 so that we may simply delete them. */
2727 static int
2728 can_delete_note_p (note)
2729 rtx note;
2731 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2732 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2735 /* Unlink a chain of insns between START and FINISH, leaving notes
2736 that must be paired. */
2738 void
2739 flow_delete_insn_chain (start, finish)
2740 rtx start, finish;
2742 /* Unchain the insns one by one. It would be quicker to delete all
2743 of these with a single unchaining, rather than one at a time, but
2744 we need to keep the NOTE's. */
2746 rtx next;
2748 while (1)
2750 next = NEXT_INSN (start);
2751 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2753 else if (GET_CODE (start) == CODE_LABEL
2754 && ! can_delete_label_p (start))
2756 const char *name = LABEL_NAME (start);
2757 PUT_CODE (start, NOTE);
2758 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2759 NOTE_SOURCE_FILE (start) = name;
2761 else
2762 next = flow_delete_insn (start);
2764 if (start == finish)
2765 break;
2766 start = next;
2770 /* Delete the insns in a (non-live) block. We physically delete every
2771 non-deleted-note insn, and update the flow graph appropriately.
2773 Return nonzero if we deleted an exception handler. */
2775 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2776 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2779 flow_delete_block (b)
2780 basic_block b;
2782 int deleted_handler = 0;
2783 rtx insn, end, tmp;
2785 /* If the head of this block is a CODE_LABEL, then it might be the
2786 label for an exception handler which can't be reached.
2788 We need to remove the label from the exception_handler_label list
2789 and remove the associated NOTE_INSN_EH_REGION_BEG and
2790 NOTE_INSN_EH_REGION_END notes. */
2792 insn = b->head;
2794 never_reached_warning (insn);
2796 if (GET_CODE (insn) == CODE_LABEL)
2797 maybe_remove_eh_handler (insn);
2799 /* Include any jump table following the basic block. */
2800 end = b->end;
2801 if (GET_CODE (end) == JUMP_INSN
2802 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2803 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2804 && GET_CODE (tmp) == JUMP_INSN
2805 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2806 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2807 end = tmp;
2809 /* Include any barrier that may follow the basic block. */
2810 tmp = next_nonnote_insn (end);
2811 if (tmp && GET_CODE (tmp) == BARRIER)
2812 end = tmp;
2814 /* Selectively delete the entire chain. */
2815 flow_delete_insn_chain (insn, end);
2817 /* Remove the edges into and out of this block. Note that there may
2818 indeed be edges in, if we are removing an unreachable loop. */
2820 edge e, next, *q;
2822 for (e = b->pred; e; e = next)
2824 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2825 continue;
2826 *q = e->succ_next;
2827 next = e->pred_next;
2828 n_edges--;
2829 free (e);
2831 for (e = b->succ; e; e = next)
2833 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2834 continue;
2835 *q = e->pred_next;
2836 next = e->succ_next;
2837 n_edges--;
2838 free (e);
2841 b->pred = NULL;
2842 b->succ = NULL;
2845 /* Remove the basic block from the array, and compact behind it. */
2846 expunge_block (b);
2848 return deleted_handler;
2851 /* Remove block B from the basic block array and compact behind it. */
2853 void
2854 expunge_block (b)
2855 basic_block b;
2857 int i, n = n_basic_blocks;
2859 for (i = b->index; i + 1 < n; ++i)
2861 basic_block x = BASIC_BLOCK (i + 1);
2862 BASIC_BLOCK (i) = x;
2863 x->index = i;
2866 basic_block_info->num_elements--;
2867 n_basic_blocks--;
2870 /* Delete INSN by patching it out. Return the next insn. */
2873 flow_delete_insn (insn)
2874 rtx insn;
2876 rtx prev = PREV_INSN (insn);
2877 rtx next = NEXT_INSN (insn);
2878 rtx note;
2880 PREV_INSN (insn) = NULL_RTX;
2881 NEXT_INSN (insn) = NULL_RTX;
2882 INSN_DELETED_P (insn) = 1;
2884 if (prev)
2885 NEXT_INSN (prev) = next;
2886 if (next)
2887 PREV_INSN (next) = prev;
2888 else
2889 set_last_insn (prev);
2891 if (GET_CODE (insn) == CODE_LABEL)
2892 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2894 /* If deleting a jump, decrement the use count of the label. Deleting
2895 the label itself should happen in the normal course of block merging. */
2896 if (GET_CODE (insn) == JUMP_INSN
2897 && JUMP_LABEL (insn)
2898 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2899 LABEL_NUSES (JUMP_LABEL (insn))--;
2901 /* Also if deleting an insn that references a label. */
2902 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2903 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2904 LABEL_NUSES (XEXP (note, 0))--;
2906 if (GET_CODE (insn) == JUMP_INSN
2907 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2908 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2910 rtx pat = PATTERN (insn);
2911 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2912 int len = XVECLEN (pat, diff_vec_p);
2913 int i;
2915 for (i = 0; i < len; i++)
2916 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2919 return next;
2922 /* True if a given label can be deleted. */
2924 static int
2925 can_delete_label_p (label)
2926 rtx label;
2928 rtx x;
2930 if (LABEL_PRESERVE_P (label))
2931 return 0;
2933 for (x = forced_labels; x; x = XEXP (x, 1))
2934 if (label == XEXP (x, 0))
2935 return 0;
2936 for (x = label_value_list; x; x = XEXP (x, 1))
2937 if (label == XEXP (x, 0))
2938 return 0;
2939 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2940 if (label == XEXP (x, 0))
2941 return 0;
2943 /* User declared labels must be preserved. */
2944 if (LABEL_NAME (label) != 0)
2945 return 0;
2947 return 1;
2950 static int
2951 tail_recursion_label_p (label)
2952 rtx label;
2954 rtx x;
2956 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2957 if (label == XEXP (x, 0))
2958 return 1;
2960 return 0;
2963 /* Blocks A and B are to be merged into a single block A. The insns
2964 are already contiguous, hence `nomove'. */
2966 void
2967 merge_blocks_nomove (a, b)
2968 basic_block a, b;
2970 edge e;
2971 rtx b_head, b_end, a_end;
2972 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2973 int b_empty = 0;
2975 /* If there was a CODE_LABEL beginning B, delete it. */
2976 b_head = b->head;
2977 b_end = b->end;
2978 if (GET_CODE (b_head) == CODE_LABEL)
2980 /* Detect basic blocks with nothing but a label. This can happen
2981 in particular at the end of a function. */
2982 if (b_head == b_end)
2983 b_empty = 1;
2984 del_first = del_last = b_head;
2985 b_head = NEXT_INSN (b_head);
2988 /* Delete the basic block note. */
2989 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2991 if (b_head == b_end)
2992 b_empty = 1;
2993 if (! del_last)
2994 del_first = b_head;
2995 del_last = b_head;
2996 b_head = NEXT_INSN (b_head);
2999 /* If there was a jump out of A, delete it. */
3000 a_end = a->end;
3001 if (GET_CODE (a_end) == JUMP_INSN)
3003 rtx prev;
3005 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
3006 if (GET_CODE (prev) != NOTE
3007 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
3008 || prev == a->head)
3009 break;
3011 del_first = a_end;
3013 #ifdef HAVE_cc0
3014 /* If this was a conditional jump, we need to also delete
3015 the insn that set cc0. */
3016 if (only_sets_cc0_p (prev))
3018 rtx tmp = prev;
3019 prev = prev_nonnote_insn (prev);
3020 if (!prev)
3021 prev = a->head;
3022 del_first = tmp;
3024 #endif
3026 a_end = prev;
3028 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
3029 del_first = NEXT_INSN (a_end);
3031 /* Delete everything marked above as well as crap that might be
3032 hanging out between the two blocks. */
3033 flow_delete_insn_chain (del_first, del_last);
3035 /* Normally there should only be one successor of A and that is B, but
3036 partway though the merge of blocks for conditional_execution we'll
3037 be merging a TEST block with THEN and ELSE successors. Free the
3038 whole lot of them and hope the caller knows what they're doing. */
3039 while (a->succ)
3040 remove_edge (a->succ);
3042 /* Adjust the edges out of B for the new owner. */
3043 for (e = b->succ; e; e = e->succ_next)
3044 e->src = a;
3045 a->succ = b->succ;
3047 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
3048 b->pred = b->succ = NULL;
3050 /* Reassociate the insns of B with A. */
3051 if (!b_empty)
3053 if (basic_block_for_insn)
3055 BLOCK_FOR_INSN (b_head) = a;
3056 while (b_head != b_end)
3058 b_head = NEXT_INSN (b_head);
3059 BLOCK_FOR_INSN (b_head) = a;
3062 a_end = b_end;
3064 a->end = a_end;
3066 expunge_block (b);
3069 /* Blocks A and B are to be merged into a single block. A has no incoming
3070 fallthru edge, so it can be moved before B without adding or modifying
3071 any jumps (aside from the jump from A to B). */
3073 static int
3074 merge_blocks_move_predecessor_nojumps (a, b)
3075 basic_block a, b;
3077 rtx barrier;
3078 int index;
3080 barrier = next_nonnote_insn (a->end);
3081 if (GET_CODE (barrier) != BARRIER)
3082 abort ();
3083 flow_delete_insn (barrier);
3085 /* Move block and loop notes out of the chain so that we do not
3086 disturb their order.
3088 ??? A better solution would be to squeeze out all the non-nested notes
3089 and adjust the block trees appropriately. Even better would be to have
3090 a tighter connection between block trees and rtl so that this is not
3091 necessary. */
3092 squeeze_notes (&a->head, &a->end);
3094 /* Scramble the insn chain. */
3095 if (a->end != PREV_INSN (b->head))
3096 reorder_insns (a->head, a->end, PREV_INSN (b->head));
3098 if (rtl_dump_file)
3100 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
3101 a->index, b->index);
3104 /* Swap the records for the two blocks around. Although we are deleting B,
3105 A is now where B was and we want to compact the BB array from where
3106 A used to be. */
3107 BASIC_BLOCK (a->index) = b;
3108 BASIC_BLOCK (b->index) = a;
3109 index = a->index;
3110 a->index = b->index;
3111 b->index = index;
3113 /* Now blocks A and B are contiguous. Merge them. */
3114 merge_blocks_nomove (a, b);
3116 return 1;
3119 /* Blocks A and B are to be merged into a single block. B has no outgoing
3120 fallthru edge, so it can be moved after A without adding or modifying
3121 any jumps (aside from the jump from A to B). */
3123 static int
3124 merge_blocks_move_successor_nojumps (a, b)
3125 basic_block a, b;
3127 rtx barrier;
3129 barrier = NEXT_INSN (b->end);
3131 /* Recognize a jump table following block B. */
3132 if (barrier
3133 && GET_CODE (barrier) == CODE_LABEL
3134 && NEXT_INSN (barrier)
3135 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
3136 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
3137 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
3139 b->end = NEXT_INSN (barrier);
3140 barrier = NEXT_INSN (b->end);
3143 /* There had better have been a barrier there. Delete it. */
3144 if (barrier && GET_CODE (barrier) == BARRIER)
3145 flow_delete_insn (barrier);
3147 /* Move block and loop notes out of the chain so that we do not
3148 disturb their order.
3150 ??? A better solution would be to squeeze out all the non-nested notes
3151 and adjust the block trees appropriately. Even better would be to have
3152 a tighter connection between block trees and rtl so that this is not
3153 necessary. */
3154 squeeze_notes (&b->head, &b->end);
3156 /* Scramble the insn chain. */
3157 reorder_insns (b->head, b->end, a->end);
3159 /* Now blocks A and B are contiguous. Merge them. */
3160 merge_blocks_nomove (a, b);
3162 if (rtl_dump_file)
3164 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
3165 b->index, a->index);
3168 return 1;
3171 /* Attempt to merge basic blocks that are potentially non-adjacent.
3172 Return true iff the attempt succeeded. */
3174 static int
3175 merge_blocks (e, b, c, mode)
3176 edge e;
3177 basic_block b, c;
3178 int mode;
3180 /* If C has a tail recursion label, do not merge. There is no
3181 edge recorded from the call_placeholder back to this label, as
3182 that would make optimize_sibling_and_tail_recursive_calls more
3183 complex for no gain. */
3184 if (GET_CODE (c->head) == CODE_LABEL
3185 && tail_recursion_label_p (c->head))
3186 return 0;
3188 /* If B has a fallthru edge to C, no need to move anything. */
3189 if (e->flags & EDGE_FALLTHRU)
3191 merge_blocks_nomove (b, c);
3193 if (rtl_dump_file)
3195 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
3196 b->index, c->index);
3199 return 1;
3201 /* Otherwise we will need to move code around. Do that only if expensive
3202 transformations are allowed. */
3203 else if (mode & CLEANUP_EXPENSIVE)
3205 edge tmp_edge, c_fallthru_edge;
3206 int c_has_outgoing_fallthru;
3207 int b_has_incoming_fallthru;
3209 /* Avoid overactive code motion, as the forwarder blocks should be
3210 eliminated by edge redirection instead. One exception might have
3211 been if B is a forwarder block and C has no fallthru edge, but
3212 that should be cleaned up by bb-reorder instead. */
3213 if (forwarder_block_p (b) || forwarder_block_p (c))
3214 return 0;
3216 /* We must make sure to not munge nesting of lexical blocks,
3217 and loop notes. This is done by squeezing out all the notes
3218 and leaving them there to lie. Not ideal, but functional. */
3220 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
3221 if (tmp_edge->flags & EDGE_FALLTHRU)
3222 break;
3223 c_has_outgoing_fallthru = (tmp_edge != NULL);
3224 c_fallthru_edge = tmp_edge;
3226 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
3227 if (tmp_edge->flags & EDGE_FALLTHRU)
3228 break;
3229 b_has_incoming_fallthru = (tmp_edge != NULL);
3231 /* If B does not have an incoming fallthru, then it can be moved
3232 immediately before C without introducing or modifying jumps.
3233 C cannot be the first block, so we do not have to worry about
3234 accessing a non-existent block. */
3235 if (! b_has_incoming_fallthru)
3236 return merge_blocks_move_predecessor_nojumps (b, c);
3238 /* Otherwise, we're going to try to move C after B. If C does
3239 not have an outgoing fallthru, then it can be moved
3240 immediately after B without introducing or modifying jumps. */
3241 if (! c_has_outgoing_fallthru)
3242 return merge_blocks_move_successor_nojumps (b, c);
3244 /* Otherwise, we'll need to insert an extra jump, and possibly
3245 a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
3246 as we don't have explicit return instructions before epilogues
3247 are generated, so give up on that case. */
3249 if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
3250 && merge_blocks_move_successor_nojumps (b, c))
3252 basic_block target = c_fallthru_edge->dest;
3253 rtx barrier;
3254 basic_block new;
3256 /* This is a dirty hack to avoid code duplication.
3258 Set edge to point to wrong basic block, so
3259 redirect_edge_and_branch_force will do the trick
3260 and rewire edge back to the original location. */
3261 redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
3262 new = redirect_edge_and_branch_force (c_fallthru_edge, target);
3264 /* We've just created barrier, but another barrier is
3265 already present in the stream. Avoid the duplicate. */
3266 barrier = next_nonnote_insn (new ? new->end : b->end);
3267 if (GET_CODE (barrier) != BARRIER)
3268 abort ();
3269 flow_delete_insn (barrier);
3271 return 1;
3274 return 0;
3276 return 0;
3279 /* Simplify a conditional jump around an unconditional jump.
3280 Return true if something changed. */
3282 static bool
3283 try_simplify_condjump (cbranch_block)
3284 basic_block cbranch_block;
3286 basic_block jump_block, jump_dest_block, cbranch_dest_block;
3287 edge cbranch_jump_edge, cbranch_fallthru_edge;
3288 rtx cbranch_insn;
3290 /* Verify that there are exactly two successors. */
3291 if (!cbranch_block->succ
3292 || !cbranch_block->succ->succ_next
3293 || cbranch_block->succ->succ_next->succ_next)
3294 return false;
3296 /* Verify that we've got a normal conditional branch at the end
3297 of the block. */
3298 cbranch_insn = cbranch_block->end;
3299 if (!any_condjump_p (cbranch_insn))
3300 return false;
3302 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
3303 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
3305 /* The next block must not have multiple predecessors, must not
3306 be the last block in the function, and must contain just the
3307 unconditional jump. */
3308 jump_block = cbranch_fallthru_edge->dest;
3309 if (jump_block->pred->pred_next
3310 || jump_block->index == n_basic_blocks - 1
3311 || !forwarder_block_p (jump_block))
3312 return false;
3313 jump_dest_block = jump_block->succ->dest;
3315 /* The conditional branch must target the block after the
3316 unconditional branch. */
3317 cbranch_dest_block = cbranch_jump_edge->dest;
3319 if (!can_fallthru (jump_block, cbranch_dest_block))
3320 return false;
3322 /* Invert the conditional branch. Prevent jump.c from deleting
3323 "unreachable" instructions. */
3324 LABEL_NUSES (JUMP_LABEL (cbranch_insn))++;
3325 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 1))
3327 LABEL_NUSES (JUMP_LABEL (cbranch_insn))--;
3328 return false;
3331 if (rtl_dump_file)
3332 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
3333 INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
3335 /* Success. Update the CFG to match. Note that after this point
3336 the edge variable names appear backwards; the redirection is done
3337 this way to preserve edge profile data. */
3338 cbranch_jump_edge = redirect_edge_succ_nodup (cbranch_jump_edge,
3339 cbranch_dest_block);
3340 cbranch_fallthru_edge = redirect_edge_succ_nodup (cbranch_fallthru_edge,
3341 jump_dest_block);
3342 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
3343 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
3345 /* Delete the block with the unconditional jump, and clean up the mess. */
3346 flow_delete_block (jump_block);
3347 tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
3349 return true;
3352 /* Attempt to forward edges leaving basic block B.
3353 Return true if sucessful. */
3355 static bool
3356 try_forward_edges (mode, b)
3357 basic_block b;
3358 int mode;
3360 bool changed = false;
3361 edge e, next;
3363 for (e = b->succ; e ; e = next)
3365 basic_block target, first;
3366 int counter;
3368 next = e->succ_next;
3370 /* Skip complex edges because we don't know how to update them.
3372 Still handle fallthru edges, as we can suceed to forward fallthru
3373 edge to the same place as the branch edge of conditional branch
3374 and turn conditional branch to an unconditonal branch. */
3375 if (e->flags & EDGE_COMPLEX)
3376 continue;
3378 target = first = e->dest;
3379 counter = 0;
3381 /* Look for the real destination of the jump.
3382 Avoid inifinite loop in the infinite empty loop by counting
3383 up to n_basic_blocks. */
3384 while (forwarder_block_p (target)
3385 && target->succ->dest != EXIT_BLOCK_PTR
3386 && counter < n_basic_blocks)
3388 /* Bypass trivial infinite loops. */
3389 if (target == target->succ->dest)
3390 counter = n_basic_blocks;
3392 /* Avoid killing of loop pre-headers, as it is the place loop
3393 optimizer wants to hoist code to.
3395 For fallthru forwarders, the LOOP_BEG note must appear between
3396 the header of block and CODE_LABEL of the loop, for non forwarders
3397 it must appear before the JUMP_INSN. */
3398 if (mode & CLEANUP_PRE_LOOP)
3400 rtx insn = (target->succ->flags & EDGE_FALLTHRU
3401 ? target->head : prev_nonnote_insn (target->end));
3403 if (GET_CODE (insn) != NOTE)
3404 insn = NEXT_INSN (insn);
3406 for (;insn && GET_CODE (insn) != CODE_LABEL && !INSN_P (insn);
3407 insn = NEXT_INSN (insn))
3408 if (GET_CODE (insn) == NOTE
3409 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
3410 break;
3412 if (GET_CODE (insn) == NOTE)
3413 break;
3415 target = target->succ->dest, counter++;
3418 if (counter >= n_basic_blocks)
3420 if (rtl_dump_file)
3421 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n",
3422 target->index);
3424 else if (target == first)
3425 ; /* We didn't do anything. */
3426 else
3428 /* Save the values now, as the edge may get removed. */
3429 gcov_type edge_count = e->count;
3430 int edge_probability = e->probability;
3432 if (redirect_edge_and_branch (e, target))
3434 /* We successfully forwarded the edge. Now update profile
3435 data: for each edge we traversed in the chain, remove
3436 the original edge's execution count. */
3437 int edge_frequency = ((edge_probability * b->frequency
3438 + REG_BR_PROB_BASE / 2)
3439 / REG_BR_PROB_BASE);
3443 first->count -= edge_count;
3444 first->succ->count -= edge_count;
3445 first->frequency -= edge_frequency;
3446 first = first->succ->dest;
3448 while (first != target);
3450 changed = true;
3452 else
3454 if (rtl_dump_file)
3455 fprintf (rtl_dump_file, "Forwarding edge %i->%i to %i failed.\n",
3456 b->index, e->dest->index, target->index);
3461 return changed;
3464 /* Look through the insns at the end of BB1 and BB2 and find the longest
3465 sequence that are equivalent. Store the first insns for that sequence
3466 in *F1 and *F2 and return the sequence length.
3468 To simplify callers of this function, if the blocks match exactly,
3469 store the head of the blocks in *F1 and *F2. */
3471 static int
3472 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
3473 int mode ATTRIBUTE_UNUSED;
3474 basic_block bb1, bb2;
3475 rtx *f1, *f2;
3477 rtx i1, i2, p1, p2, last1, last2, afterlast1, afterlast2;
3478 int ninsns = 0;
3480 /* Skip simple jumps at the end of the blocks. Complex jumps still
3481 need to be compared for equivalence, which we'll do below. */
3483 i1 = bb1->end;
3484 if (onlyjump_p (i1)
3485 || (returnjump_p (i1) && !side_effects_p (PATTERN (i1))))
3486 i1 = PREV_INSN (i1);
3487 i2 = bb2->end;
3488 if (onlyjump_p (i2)
3489 || (returnjump_p (i2) && !side_effects_p (PATTERN (i2))))
3490 i2 = PREV_INSN (i2);
3492 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
3493 while (true)
3495 /* Ignore notes. */
3496 while ((GET_CODE (i1) == NOTE && i1 != bb1->head))
3497 i1 = PREV_INSN (i1);
3498 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
3499 i2 = PREV_INSN (i2);
3501 if (i1 == bb1->head || i2 == bb2->head)
3502 break;
3504 /* Verify that I1 and I2 are equivalent. */
3506 if (GET_CODE (i1) != GET_CODE (i2))
3507 break;
3509 p1 = PATTERN (i1);
3510 p2 = PATTERN (i2);
3512 /* If this is a CALL_INSN, compare register usage information.
3513 If we don't check this on stack register machines, the two
3514 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3515 numbers of stack registers in the same basic block.
3516 If we don't check this on machines with delay slots, a delay slot may
3517 be filled that clobbers a parameter expected by the subroutine.
3519 ??? We take the simple route for now and assume that if they're
3520 equal, they were constructed identically. */
3522 if (GET_CODE (i1) == CALL_INSN
3523 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3524 CALL_INSN_FUNCTION_USAGE (i2)))
3525 break;
3527 #ifdef STACK_REGS
3528 /* If cross_jump_death_matters is not 0, the insn's mode
3529 indicates whether or not the insn contains any stack-like
3530 regs. */
3532 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
3534 /* If register stack conversion has already been done, then
3535 death notes must also be compared before it is certain that
3536 the two instruction streams match. */
3538 rtx note;
3539 HARD_REG_SET i1_regset, i2_regset;
3541 CLEAR_HARD_REG_SET (i1_regset);
3542 CLEAR_HARD_REG_SET (i2_regset);
3544 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3545 if (REG_NOTE_KIND (note) == REG_DEAD
3546 && STACK_REG_P (XEXP (note, 0)))
3547 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3549 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3550 if (REG_NOTE_KIND (note) == REG_DEAD
3551 && STACK_REG_P (XEXP (note, 0)))
3552 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3554 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3556 break;
3558 done:
3561 #endif
3563 if (GET_CODE (p1) != GET_CODE (p2))
3564 break;
3566 if (! rtx_renumbered_equal_p (p1, p2))
3568 /* The following code helps take care of G++ cleanups. */
3569 rtx equiv1 = find_reg_equal_equiv_note (i1);
3570 rtx equiv2 = find_reg_equal_equiv_note (i2);
3572 if (equiv1 && equiv2
3573 /* If the equivalences are not to a constant, they may
3574 reference pseudos that no longer exist, so we can't
3575 use them. */
3576 && CONSTANT_P (XEXP (equiv1, 0))
3577 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3579 rtx s1 = single_set (i1);
3580 rtx s2 = single_set (i2);
3581 if (s1 != 0 && s2 != 0
3582 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3584 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3585 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3586 if (! rtx_renumbered_equal_p (p1, p2))
3587 cancel_changes (0);
3588 else if (apply_change_group ())
3589 goto win;
3592 break;
3595 win:
3596 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
3597 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3599 afterlast1 = last1, afterlast2 = last2;
3600 last1 = i1, last2 = i2;
3601 ninsns++;
3603 i1 = PREV_INSN (i1);
3604 i2 = PREV_INSN (i2);
3607 #ifdef HAVE_cc0
3608 if (ninsns)
3610 /* Don't allow the insn after a compare to be shared by
3611 cross-jumping unless the compare is also shared. */
3612 if (reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
3613 last1 = afterlast1, last2 = afterlast2, ninsns--;
3615 #endif
3617 /* Include preceeding notes and labels in the cross-jump. One,
3618 this may bring us to the head of the blocks as requested above.
3619 Two, it keeps line number notes as matched as may be. */
3620 if (ninsns)
3622 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
3623 last1 = PREV_INSN (last1);
3624 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
3625 last1 = PREV_INSN (last1);
3626 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
3627 last2 = PREV_INSN (last2);
3628 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
3629 last2 = PREV_INSN (last2);
3631 *f1 = last1;
3632 *f2 = last2;
3635 return ninsns;
3638 /* Return true iff outgoing edges of BB1 and BB2 match, together with
3639 the branch instruction. This means that if we commonize the control
3640 flow before end of the basic block, the semantic remains unchanged.
3642 We may assume that there exists one edge with a common destination. */
3644 static bool
3645 outgoing_edges_match (bb1, bb2)
3646 basic_block bb1;
3647 basic_block bb2;
3649 /* If BB1 has only one successor, we must be looking at an unconditional
3650 jump. Which, by the assumption above, means that we only need to check
3651 that BB2 has one successor. */
3652 if (bb1->succ && !bb1->succ->succ_next)
3653 return (bb2->succ && !bb2->succ->succ_next);
3655 /* Match conditional jumps - this may get tricky when fallthru and branch
3656 edges are crossed. */
3657 if (bb1->succ
3658 && bb1->succ->succ_next
3659 && !bb1->succ->succ_next->succ_next
3660 && any_condjump_p (bb1->end))
3662 edge b1, f1, b2, f2;
3663 bool reverse, match;
3664 rtx set1, set2, cond1, cond2;
3665 enum rtx_code code1, code2;
3667 if (!bb2->succ
3668 || !bb2->succ->succ_next
3669 || bb1->succ->succ_next->succ_next
3670 || !any_condjump_p (bb2->end))
3671 return false;
3673 b1 = BRANCH_EDGE (bb1);
3674 b2 = BRANCH_EDGE (bb2);
3675 f1 = FALLTHRU_EDGE (bb1);
3676 f2 = FALLTHRU_EDGE (bb2);
3678 /* Get around possible forwarders on fallthru edges. Other cases
3679 should be optimized out already. */
3680 if (forwarder_block_p (f1->dest))
3681 f1 = f1->dest->succ;
3682 if (forwarder_block_p (f2->dest))
3683 f2 = f2->dest->succ;
3685 /* To simplify use of this function, return false if there are
3686 unneeded forwarder blocks. These will get eliminated later
3687 during cleanup_cfg. */
3688 if (forwarder_block_p (f1->dest)
3689 || forwarder_block_p (f2->dest)
3690 || forwarder_block_p (b1->dest)
3691 || forwarder_block_p (b2->dest))
3692 return false;
3694 if (f1->dest == f2->dest && b1->dest == b2->dest)
3695 reverse = false;
3696 else if (f1->dest == b2->dest && b1->dest == f2->dest)
3697 reverse = true;
3698 else
3699 return false;
3701 set1 = pc_set (bb1->end);
3702 set2 = pc_set (bb2->end);
3703 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
3704 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
3705 reverse = !reverse;
3707 cond1 = XEXP (SET_SRC (set1), 0);
3708 cond2 = XEXP (SET_SRC (set2), 0);
3709 code1 = GET_CODE (cond1);
3710 if (reverse)
3711 code2 = reversed_comparison_code (cond2, bb2->end);
3712 else
3713 code2 = GET_CODE (cond2);
3714 if (code2 == UNKNOWN)
3715 return false;
3717 /* Verify codes and operands match. */
3718 match = ((code1 == code2
3719 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
3720 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
3721 || (code1 == swap_condition (code2)
3722 && rtx_renumbered_equal_p (XEXP (cond1, 1),
3723 XEXP (cond2, 0))
3724 && rtx_renumbered_equal_p (XEXP (cond1, 0),
3725 XEXP (cond2, 1))));
3727 /* If we return true, we will join the blocks. Which means that
3728 we will only have one branch prediction bit to work with. Thus
3729 we require the existing branches to have probabilities that are
3730 roughly similar. */
3731 /* ??? We should use bb->frequency to allow merging in infrequently
3732 executed blocks, but at the moment it is not available when
3733 cleanup_cfg is run. */
3734 if (match && !optimize_size)
3736 rtx note1, note2;
3737 int prob1, prob2;
3738 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
3739 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
3741 if (note1 && note2)
3743 prob1 = INTVAL (XEXP (note1, 0));
3744 prob2 = INTVAL (XEXP (note2, 0));
3745 if (reverse)
3746 prob2 = REG_BR_PROB_BASE - prob2;
3748 /* Fail if the difference in probabilities is
3749 greater than 5%. */
3750 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 20)
3751 return false;
3753 else if (note1 || note2)
3754 return false;
3757 if (rtl_dump_file && match)
3758 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
3759 bb1->index, bb2->index);
3761 return match;
3764 /* ??? We can handle computed jumps too. This may be important for
3765 inlined functions containing switch statements. Also jumps w/o
3766 fallthru edges can be handled by simply matching whole insn. */
3767 return false;
3770 /* E1 and E2 are edges with the same destination block. Search their
3771 predecessors for common code. If found, redirect control flow from
3772 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
3774 static bool
3775 try_crossjump_to_edge (mode, e1, e2)
3776 int mode;
3777 edge e1, e2;
3779 int nmatch;
3780 basic_block src1 = e1->src, src2 = e2->src;
3781 basic_block redirect_to;
3782 rtx newpos1, newpos2;
3783 edge s;
3784 rtx last;
3785 rtx label;
3786 rtx note;
3788 /* Search backward through forwarder blocks. We don't need to worry
3789 about multiple entry or chained forwarders, as they will be optimized
3790 away. We do this to look past the unconditional jump following a
3791 conditional jump that is required due to the current CFG shape. */
3792 if (src1->pred
3793 && !src1->pred->pred_next
3794 && forwarder_block_p (src1))
3796 e1 = src1->pred;
3797 src1 = e1->src;
3799 if (src2->pred
3800 && !src2->pred->pred_next
3801 && forwarder_block_p (src2))
3803 e2 = src2->pred;
3804 src2 = e2->src;
3807 /* Nothing to do if we reach ENTRY, or a common source block. */
3808 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
3809 return false;
3810 if (src1 == src2)
3811 return false;
3813 /* Seeing more than 1 forwarder blocks would confuse us later... */
3814 if (forwarder_block_p (e1->dest)
3815 && forwarder_block_p (e1->dest->succ->dest))
3816 return false;
3817 if (forwarder_block_p (e2->dest)
3818 && forwarder_block_p (e2->dest->succ->dest))
3819 return false;
3821 /* Likewise with dead code (possibly newly created by the other optimizations
3822 of cfg_cleanup). */
3823 if (!src1->pred || !src2->pred)
3824 return false;
3826 /* Likewise with complex edges.
3827 ??? We should be able to handle most complex edges later with some
3828 care. */
3829 if (e1->flags & EDGE_COMPLEX)
3830 return false;
3832 /* Look for the common insn sequence, part the first ... */
3833 if (!outgoing_edges_match (src1, src2))
3834 return false;
3836 /* ... and part the second. */
3837 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
3838 if (!nmatch)
3839 return false;
3841 /* Avoid splitting if possible. */
3842 if (newpos2 == src2->head)
3843 redirect_to = src2;
3844 else
3846 if (rtl_dump_file)
3847 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
3848 src2->index, nmatch);
3849 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
3852 if (rtl_dump_file)
3853 fprintf (rtl_dump_file,
3854 "Cross jumping from bb %i to bb %i; %i common insns\n",
3855 src1->index, src2->index, nmatch);
3857 redirect_to->count += src1->count;
3858 redirect_to->frequency += src1->frequency;
3860 /* Recompute the frequencies and counts of outgoing edges. */
3861 for (s = redirect_to->succ; s; s = s->succ_next)
3863 edge s2;
3864 basic_block d = s->dest;
3866 if (forwarder_block_p (d))
3867 d = d->succ->dest;
3868 for (s2 = src1->succ; ; s2 = s2->succ_next)
3870 basic_block d2 = s2->dest;
3871 if (forwarder_block_p (d2))
3872 d2 = d2->succ->dest;
3873 if (d == d2)
3874 break;
3876 s->count += s2->count;
3878 /* Take care to update possible forwarder blocks. We verified
3879 that there is no more than one in the chain, so we can't run
3880 into infinite loop. */
3881 if (forwarder_block_p (s->dest))
3883 s->dest->succ->count += s2->count;
3884 s->dest->count += s2->count;
3885 s->dest->frequency += EDGE_FREQUENCY (s);
3887 if (forwarder_block_p (s2->dest))
3889 s2->dest->succ->count -= s2->count;
3890 s2->dest->count -= s2->count;
3891 s2->dest->frequency -= EDGE_FREQUENCY (s);
3893 if (!redirect_to->frequency && !src1->frequency)
3894 s->probability = (s->probability + s2->probability) / 2;
3895 else
3896 s->probability =
3897 ((s->probability * redirect_to->frequency +
3898 s2->probability * src1->frequency)
3899 / (redirect_to->frequency + src1->frequency));
3902 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
3903 if (note)
3904 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
3906 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
3908 /* Skip possible basic block header. */
3909 if (GET_CODE (newpos1) == CODE_LABEL)
3910 newpos1 = NEXT_INSN (newpos1);
3911 if (GET_CODE (newpos1) == NOTE)
3912 newpos1 = NEXT_INSN (newpos1);
3913 last = src1->end;
3915 /* Emit the jump insn. */
3916 label = block_label (redirect_to);
3917 src1->end = emit_jump_insn_before (gen_jump (label), newpos1);
3918 JUMP_LABEL (src1->end) = label;
3919 LABEL_NUSES (label)++;
3920 if (basic_block_for_insn)
3921 set_block_for_new_insns (src1->end, src1);
3923 /* Delete the now unreachable instructions. */
3924 flow_delete_insn_chain (newpos1, last);
3926 /* Make sure there is a barrier after the new jump. */
3927 last = next_nonnote_insn (src1->end);
3928 if (!last || GET_CODE (last) != BARRIER)
3929 emit_barrier_after (src1->end);
3931 /* Update CFG. */
3932 while (src1->succ)
3933 remove_edge (src1->succ);
3934 make_edge (NULL, src1, redirect_to, 0);
3935 src1->succ->probability = REG_BR_PROB_BASE;
3936 src1->succ->count = src1->count;
3938 return true;
3941 /* Search the predecessors of BB for common insn sequences. When found,
3942 share code between them by redirecting control flow. Return true if
3943 any changes made. */
3945 static bool
3946 try_crossjump_bb (mode, bb)
3947 int mode;
3948 basic_block bb;
3950 edge e, e2, nexte2, nexte, fallthru;
3951 bool changed;
3953 /* Nothing to do if there is not at least two incomming edges. */
3954 if (!bb->pred || !bb->pred->pred_next)
3955 return false;
3957 /* It is always cheapest to redirect a block that ends in a branch to
3958 a block that falls through into BB, as that adds no branches to the
3959 program. We'll try that combination first. */
3960 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
3961 if (fallthru->flags & EDGE_FALLTHRU)
3962 break;
3964 changed = false;
3965 for (e = bb->pred; e; e = nexte)
3967 nexte = e->pred_next;
3969 /* Elide complex edges now, as neither try_crossjump_to_edge
3970 nor outgoing_edges_match can handle them. */
3971 if (e->flags & EDGE_COMPLEX)
3972 continue;
3974 /* As noted above, first try with the fallthru predecessor. */
3975 if (fallthru)
3977 /* Don't combine the fallthru edge into anything else.
3978 If there is a match, we'll do it the other way around. */
3979 if (e == fallthru)
3980 continue;
3982 if (try_crossjump_to_edge (mode, e, fallthru))
3984 changed = true;
3985 nexte = bb->pred;
3986 continue;
3990 /* Non-obvious work limiting check: Recognize that we're going
3991 to call try_crossjump_bb on every basic block. So if we have
3992 two blocks with lots of outgoing edges (a switch) and they
3993 share lots of common destinations, then we would do the
3994 cross-jump check once for each common destination.
3996 Now, if the blocks actually are cross-jump candidates, then
3997 all of their destinations will be shared. Which means that
3998 we only need check them for cross-jump candidacy once. We
3999 can eliminate redundant checks of crossjump(A,B) by arbitrarily
4000 choosing to do the check from the block for which the edge
4001 in question is the first successor of A. */
4002 if (e->src->succ != e)
4003 continue;
4005 for (e2 = bb->pred; e2; e2 = nexte2)
4007 nexte2 = e2->pred_next;
4009 if (e2 == e)
4010 continue;
4012 /* We've already checked the fallthru edge above. */
4013 if (e2 == fallthru)
4014 continue;
4016 /* Again, neither try_crossjump_to_edge nor outgoing_edges_match
4017 can handle complex edges. */
4018 if (e2->flags & EDGE_COMPLEX)
4019 continue;
4021 /* The "first successor" check above only prevents multiple
4022 checks of crossjump(A,B). In order to prevent redundant
4023 checks of crossjump(B,A), require that A be the block
4024 with the lowest index. */
4025 if (e->src->index > e2->src->index)
4026 continue;
4028 if (try_crossjump_to_edge (mode, e, e2))
4030 changed = true;
4031 nexte = bb->pred;
4032 break;
4037 return changed;
4040 /* Do simple CFG optimizations - basic block merging, simplifying of jump
4041 instructions etc. Return nonzero if changes were made. */
4043 static bool
4044 try_optimize_cfg (mode)
4045 int mode;
4047 int i;
4048 bool changed_overall = false;
4049 bool changed;
4050 int iterations = 0;
4052 /* Attempt to merge blocks as made possible by edge removal. If a block
4053 has only one successor, and the successor has only one predecessor,
4054 they may be combined. */
4058 changed = false;
4059 iterations++;
4061 if (rtl_dump_file)
4062 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
4063 iterations);
4065 for (i = 0; i < n_basic_blocks;)
4067 basic_block c, b = BASIC_BLOCK (i);
4068 edge s;
4069 bool changed_here = false;
4071 /* Delete trivially dead basic blocks. */
4072 while (b->pred == NULL)
4074 c = BASIC_BLOCK (b->index - 1);
4075 if (rtl_dump_file)
4076 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
4077 flow_delete_block (b);
4078 changed = true;
4079 b = c;
4082 /* Remove code labels no longer used. Don't do this before
4083 CALL_PLACEHOLDER is removed, as some branches may be hidden
4084 within. */
4085 if (b->pred->pred_next == NULL
4086 && (b->pred->flags & EDGE_FALLTHRU)
4087 && !(b->pred->flags & EDGE_COMPLEX)
4088 && GET_CODE (b->head) == CODE_LABEL
4089 && (!(mode & CLEANUP_PRE_SIBCALL)
4090 || !tail_recursion_label_p (b->head))
4091 /* If previous block ends with condjump jumping to next BB,
4092 we can't delete the label. */
4093 && (b->pred->src == ENTRY_BLOCK_PTR
4094 || !reg_mentioned_p (b->head, b->pred->src->end)))
4096 rtx label = b->head;
4097 b->head = NEXT_INSN (b->head);
4098 flow_delete_insn_chain (label, label);
4099 if (rtl_dump_file)
4100 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
4101 b->index);
4104 /* If we fall through an empty block, we can remove it. */
4105 if (b->pred->pred_next == NULL
4106 && (b->pred->flags & EDGE_FALLTHRU)
4107 && GET_CODE (b->head) != CODE_LABEL
4108 && forwarder_block_p (b)
4109 /* Note that forwarder_block_p true ensures that there
4110 is a successor for this block. */
4111 && (b->succ->flags & EDGE_FALLTHRU)
4112 && n_basic_blocks > 1)
4114 if (rtl_dump_file)
4115 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
4116 b->index);
4117 c = BASIC_BLOCK (b->index ? b->index - 1 : 1);
4118 redirect_edge_succ_nodup (b->pred, b->succ->dest);
4119 flow_delete_block (b);
4120 changed = true;
4121 b = c;
4124 /* Merge blocks. Loop because chains of blocks might be
4125 combineable. */
4126 while ((s = b->succ) != NULL
4127 && s->succ_next == NULL
4128 && !(s->flags & EDGE_COMPLEX)
4129 && (c = s->dest) != EXIT_BLOCK_PTR
4130 && c->pred->pred_next == NULL
4131 /* If the jump insn has side effects,
4132 we can't kill the edge. */
4133 && (GET_CODE (b->end) != JUMP_INSN
4134 || onlyjump_p (b->end))
4135 && merge_blocks (s, b, c, mode))
4136 changed_here = true;
4138 /* Simplify branch over branch. */
4139 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
4140 changed_here = true;
4142 /* If B has a single outgoing edge, but uses a non-trivial jump
4143 instruction without side-effects, we can either delete the
4144 jump entirely, or replace it with a simple unconditional jump.
4145 Use redirect_edge_and_branch to do the dirty work. */
4146 if (b->succ
4147 && ! b->succ->succ_next
4148 && b->succ->dest != EXIT_BLOCK_PTR
4149 && onlyjump_p (b->end)
4150 && redirect_edge_and_branch (b->succ, b->succ->dest))
4151 changed_here = true;
4153 /* Simplify branch to branch. */
4154 if (try_forward_edges (mode, b))
4155 changed_here = true;
4157 /* Look for shared code between blocks. */
4158 if ((mode & CLEANUP_CROSSJUMP)
4159 && try_crossjump_bb (mode, b))
4160 changed_here = true;
4162 /* Don't get confused by the index shift caused by deleting
4163 blocks. */
4164 if (!changed_here)
4165 i = b->index + 1;
4166 else
4167 changed = true;
4170 if ((mode & CLEANUP_CROSSJUMP)
4171 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
4172 changed = true;
4174 #ifdef ENABLE_CHECKING
4175 if (changed)
4176 verify_flow_info ();
4177 #endif
4179 changed_overall |= changed;
4181 while (changed);
4182 return changed_overall;
4185 /* The given edge should potentially be a fallthru edge. If that is in
4186 fact true, delete the jump and barriers that are in the way. */
4188 void
4189 tidy_fallthru_edge (e, b, c)
4190 edge e;
4191 basic_block b, c;
4193 rtx q;
4195 /* ??? In a late-running flow pass, other folks may have deleted basic
4196 blocks by nopping out blocks, leaving multiple BARRIERs between here
4197 and the target label. They ought to be chastized and fixed.
4199 We can also wind up with a sequence of undeletable labels between
4200 one block and the next.
4202 So search through a sequence of barriers, labels, and notes for
4203 the head of block C and assert that we really do fall through. */
4205 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
4206 return;
4208 /* Remove what will soon cease being the jump insn from the source block.
4209 If block B consisted only of this single jump, turn it into a deleted
4210 note. */
4211 q = b->end;
4212 if (GET_CODE (q) == JUMP_INSN
4213 && onlyjump_p (q)
4214 && (any_uncondjump_p (q)
4215 || (b->succ == e && e->succ_next == NULL)))
4217 #ifdef HAVE_cc0
4218 /* If this was a conditional jump, we need to also delete
4219 the insn that set cc0. */
4220 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
4221 q = PREV_INSN (q);
4222 #endif
4224 if (b->head == q)
4226 PUT_CODE (q, NOTE);
4227 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
4228 NOTE_SOURCE_FILE (q) = 0;
4230 else
4232 q = PREV_INSN (q);
4234 /* We don't want a block to end on a line-number note since that has
4235 the potential of changing the code between -g and not -g. */
4236 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
4237 q = PREV_INSN (q);
4240 b->end = q;
4243 /* Selectively unlink the sequence. */
4244 if (q != PREV_INSN (c->head))
4245 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
4247 e->flags |= EDGE_FALLTHRU;
4250 /* Fix up edges that now fall through, or rather should now fall through
4251 but previously required a jump around now deleted blocks. Simplify
4252 the search by only examining blocks numerically adjacent, since this
4253 is how find_basic_blocks created them. */
4255 static void
4256 tidy_fallthru_edges ()
4258 int i;
4260 for (i = 1; i < n_basic_blocks; ++i)
4262 basic_block b = BASIC_BLOCK (i - 1);
4263 basic_block c = BASIC_BLOCK (i);
4264 edge s;
4266 /* We care about simple conditional or unconditional jumps with
4267 a single successor.
4269 If we had a conditional branch to the next instruction when
4270 find_basic_blocks was called, then there will only be one
4271 out edge for the block which ended with the conditional
4272 branch (since we do not create duplicate edges).
4274 Furthermore, the edge will be marked as a fallthru because we
4275 merge the flags for the duplicate edges. So we do not want to
4276 check that the edge is not a FALLTHRU edge. */
4277 if ((s = b->succ) != NULL
4278 && ! (s->flags & EDGE_COMPLEX)
4279 && s->succ_next == NULL
4280 && s->dest == c
4281 /* If the jump insn has side effects, we can't tidy the edge. */
4282 && (GET_CODE (b->end) != JUMP_INSN
4283 || onlyjump_p (b->end)))
4284 tidy_fallthru_edge (s, b, c);
4288 /* Perform data flow analysis.
4289 F is the first insn of the function; FLAGS is a set of PROP_* flags
4290 to be used in accumulating flow info. */
4292 void
4293 life_analysis (f, file, flags)
4294 rtx f;
4295 FILE *file;
4296 int flags;
4298 #ifdef ELIMINABLE_REGS
4299 register int i;
4300 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
4301 #endif
4303 /* Record which registers will be eliminated. We use this in
4304 mark_used_regs. */
4306 CLEAR_HARD_REG_SET (elim_reg_set);
4308 #ifdef ELIMINABLE_REGS
4309 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4310 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4311 #else
4312 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4313 #endif
4315 if (! optimize)
4316 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
4318 /* The post-reload life analysis have (on a global basis) the same
4319 registers live as was computed by reload itself. elimination
4320 Otherwise offsets and such may be incorrect.
4322 Reload will make some registers as live even though they do not
4323 appear in the rtl.
4325 We don't want to create new auto-incs after reload, since they
4326 are unlikely to be useful and can cause problems with shared
4327 stack slots. */
4328 if (reload_completed)
4329 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
4331 /* We want alias analysis information for local dead store elimination. */
4332 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4333 init_alias_analysis ();
4335 /* Always remove no-op moves. Do this before other processing so
4336 that we don't have to keep re-scanning them. */
4337 delete_noop_moves (f);
4339 /* Some targets can emit simpler epilogues if they know that sp was
4340 not ever modified during the function. After reload, of course,
4341 we've already emitted the epilogue so there's no sense searching. */
4342 if (! reload_completed)
4343 notice_stack_pointer_modification (f);
4345 /* Allocate and zero out data structures that will record the
4346 data from lifetime analysis. */
4347 allocate_reg_life_data ();
4348 allocate_bb_life_data ();
4350 /* Find the set of registers live on function exit. */
4351 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
4353 /* "Update" life info from zero. It'd be nice to begin the
4354 relaxation with just the exit and noreturn blocks, but that set
4355 is not immediately handy. */
4357 if (flags & PROP_REG_INFO)
4358 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4359 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
4361 /* Clean up. */
4362 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4363 end_alias_analysis ();
4365 if (file)
4366 dump_flow_info (file);
4368 free_basic_block_vars (1);
4370 #ifdef ENABLE_CHECKING
4372 rtx insn;
4374 /* Search for any REG_LABEL notes which reference deleted labels. */
4375 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4377 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4379 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
4380 abort ();
4383 #endif
4384 /* Removing dead insns should've made jumptables really dead. */
4385 delete_dead_jumptables ();
4388 /* A subroutine of verify_wide_reg, called through for_each_rtx.
4389 Search for REGNO. If found, abort if it is not wider than word_mode. */
4391 static int
4392 verify_wide_reg_1 (px, pregno)
4393 rtx *px;
4394 void *pregno;
4396 rtx x = *px;
4397 unsigned int regno = *(int *) pregno;
4399 if (GET_CODE (x) == REG && REGNO (x) == regno)
4401 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
4402 abort ();
4403 return 1;
4405 return 0;
4408 /* A subroutine of verify_local_live_at_start. Search through insns
4409 between HEAD and END looking for register REGNO. */
4411 static void
4412 verify_wide_reg (regno, head, end)
4413 int regno;
4414 rtx head, end;
4416 while (1)
4418 if (INSN_P (head)
4419 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
4420 return;
4421 if (head == end)
4422 break;
4423 head = NEXT_INSN (head);
4426 /* We didn't find the register at all. Something's way screwy. */
4427 if (rtl_dump_file)
4428 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
4429 print_rtl_and_abort ();
4432 /* A subroutine of update_life_info. Verify that there are no untoward
4433 changes in live_at_start during a local update. */
4435 static void
4436 verify_local_live_at_start (new_live_at_start, bb)
4437 regset new_live_at_start;
4438 basic_block bb;
4440 if (reload_completed)
4442 /* After reload, there are no pseudos, nor subregs of multi-word
4443 registers. The regsets should exactly match. */
4444 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
4446 if (rtl_dump_file)
4448 fprintf (rtl_dump_file,
4449 "live_at_start mismatch in bb %d, aborting\n",
4450 bb->index);
4451 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
4452 debug_bitmap_file (rtl_dump_file, new_live_at_start);
4454 print_rtl_and_abort ();
4457 else
4459 int i;
4461 /* Find the set of changed registers. */
4462 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
4464 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
4466 /* No registers should die. */
4467 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
4469 if (rtl_dump_file)
4470 fprintf (rtl_dump_file,
4471 "Register %d died unexpectedly in block %d\n", i,
4472 bb->index);
4473 print_rtl_and_abort ();
4476 /* Verify that the now-live register is wider than word_mode. */
4477 verify_wide_reg (i, bb->head, bb->end);
4482 /* Updates life information starting with the basic blocks set in BLOCKS.
4483 If BLOCKS is null, consider it to be the universal set.
4485 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
4486 we are only expecting local modifications to basic blocks. If we find
4487 extra registers live at the beginning of a block, then we either killed
4488 useful data, or we have a broken split that wants data not provided.
4489 If we find registers removed from live_at_start, that means we have
4490 a broken peephole that is killing a register it shouldn't.
4492 ??? This is not true in one situation -- when a pre-reload splitter
4493 generates subregs of a multi-word pseudo, current life analysis will
4494 lose the kill. So we _can_ have a pseudo go live. How irritating.
4496 Including PROP_REG_INFO does not properly refresh regs_ever_live
4497 unless the caller resets it to zero. */
4499 void
4500 update_life_info (blocks, extent, prop_flags)
4501 sbitmap blocks;
4502 enum update_life_extent extent;
4503 int prop_flags;
4505 regset tmp;
4506 regset_head tmp_head;
4507 int i;
4509 tmp = INITIALIZE_REG_SET (tmp_head);
4511 /* Changes to the CFG are only allowed when
4512 doing a global update for the entire CFG. */
4513 if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
4514 && (extent == UPDATE_LIFE_LOCAL || blocks))
4515 abort ();
4517 /* For a global update, we go through the relaxation process again. */
4518 if (extent != UPDATE_LIFE_LOCAL)
4520 for ( ; ; )
4522 int changed = 0;
4524 calculate_global_regs_live (blocks, blocks,
4525 prop_flags & (PROP_SCAN_DEAD_CODE
4526 | PROP_ALLOW_CFG_CHANGES));
4528 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4529 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4530 break;
4532 /* Removing dead code may allow the CFG to be simplified which
4533 in turn may allow for further dead code detection / removal. */
4534 for (i = n_basic_blocks - 1; i >= 0; --i)
4536 basic_block bb = BASIC_BLOCK (i);
4538 COPY_REG_SET (tmp, bb->global_live_at_end);
4539 changed |= propagate_block (bb, tmp, NULL, NULL,
4540 prop_flags & (PROP_SCAN_DEAD_CODE
4541 | PROP_KILL_DEAD_CODE));
4544 if (! changed || ! try_optimize_cfg (CLEANUP_EXPENSIVE))
4545 break;
4547 delete_unreachable_blocks ();
4548 mark_critical_edges ();
4551 /* If asked, remove notes from the blocks we'll update. */
4552 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
4553 count_or_remove_death_notes (blocks, 1);
4556 if (blocks)
4558 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4560 basic_block bb = BASIC_BLOCK (i);
4562 COPY_REG_SET (tmp, bb->global_live_at_end);
4563 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4565 if (extent == UPDATE_LIFE_LOCAL)
4566 verify_local_live_at_start (tmp, bb);
4569 else
4571 for (i = n_basic_blocks - 1; i >= 0; --i)
4573 basic_block bb = BASIC_BLOCK (i);
4575 COPY_REG_SET (tmp, bb->global_live_at_end);
4576 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4578 if (extent == UPDATE_LIFE_LOCAL)
4579 verify_local_live_at_start (tmp, bb);
4583 FREE_REG_SET (tmp);
4585 if (prop_flags & PROP_REG_INFO)
4587 /* The only pseudos that are live at the beginning of the function
4588 are those that were not set anywhere in the function. local-alloc
4589 doesn't know how to handle these correctly, so mark them as not
4590 local to any one basic block. */
4591 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
4592 FIRST_PSEUDO_REGISTER, i,
4593 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4595 /* We have a problem with any pseudoreg that lives across the setjmp.
4596 ANSI says that if a user variable does not change in value between
4597 the setjmp and the longjmp, then the longjmp preserves it. This
4598 includes longjmp from a place where the pseudo appears dead.
4599 (In principle, the value still exists if it is in scope.)
4600 If the pseudo goes in a hard reg, some other value may occupy
4601 that hard reg where this pseudo is dead, thus clobbering the pseudo.
4602 Conclusion: such a pseudo must not go in a hard reg. */
4603 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
4604 FIRST_PSEUDO_REGISTER, i,
4606 if (regno_reg_rtx[i] != 0)
4608 REG_LIVE_LENGTH (i) = -1;
4609 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4615 /* Free the variables allocated by find_basic_blocks.
4617 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
4619 void
4620 free_basic_block_vars (keep_head_end_p)
4621 int keep_head_end_p;
4623 if (basic_block_for_insn)
4625 VARRAY_FREE (basic_block_for_insn);
4626 basic_block_for_insn = NULL;
4629 if (! keep_head_end_p)
4631 if (basic_block_info)
4633 clear_edges ();
4634 VARRAY_FREE (basic_block_info);
4636 n_basic_blocks = 0;
4638 ENTRY_BLOCK_PTR->aux = NULL;
4639 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
4640 EXIT_BLOCK_PTR->aux = NULL;
4641 EXIT_BLOCK_PTR->global_live_at_start = NULL;
4645 /* Delete any insns that copy a register to itself. */
4647 void
4648 delete_noop_moves (f)
4649 rtx f ATTRIBUTE_UNUSED;
4651 int i;
4652 rtx insn, next;
4653 basic_block bb;
4655 for (i = 0; i < n_basic_blocks; i++)
4657 bb = BASIC_BLOCK (i);
4658 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
4660 next = NEXT_INSN (insn);
4661 if (INSN_P (insn) && noop_move_p (insn))
4663 /* Do not call flow_delete_insn here to not confuse backward
4664 pointers of LIBCALL block. */
4665 PUT_CODE (insn, NOTE);
4666 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4667 NOTE_SOURCE_FILE (insn) = 0;
4668 if (insn == bb->end)
4669 purge_dead_edges (bb);
4675 /* Delete any jump tables never referenced. We can't delete them at the
4676 time of removing tablejump insn as they are referenced by the preceeding
4677 insns computing the destination, so we delay deleting and garbagecollect
4678 them once life information is computed. */
4679 static void
4680 delete_dead_jumptables ()
4682 rtx insn, next;
4683 for (insn = get_insns (); insn; insn = next)
4685 next = NEXT_INSN (insn);
4686 if (GET_CODE (insn) == CODE_LABEL
4687 && LABEL_NUSES (insn) == 0
4688 && GET_CODE (next) == JUMP_INSN
4689 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4690 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4692 if (rtl_dump_file)
4693 fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
4694 flow_delete_insn (NEXT_INSN (insn));
4695 flow_delete_insn (insn);
4696 next = NEXT_INSN (next);
4701 /* Determine if the stack pointer is constant over the life of the function.
4702 Only useful before prologues have been emitted. */
4704 static void
4705 notice_stack_pointer_modification_1 (x, pat, data)
4706 rtx x;
4707 rtx pat ATTRIBUTE_UNUSED;
4708 void *data ATTRIBUTE_UNUSED;
4710 if (x == stack_pointer_rtx
4711 /* The stack pointer is only modified indirectly as the result
4712 of a push until later in flow. See the comments in rtl.texi
4713 regarding Embedded Side-Effects on Addresses. */
4714 || (GET_CODE (x) == MEM
4715 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
4716 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
4717 current_function_sp_is_unchanging = 0;
4720 static void
4721 notice_stack_pointer_modification (f)
4722 rtx f;
4724 rtx insn;
4726 /* Assume that the stack pointer is unchanging if alloca hasn't
4727 been used. */
4728 current_function_sp_is_unchanging = !current_function_calls_alloca;
4729 if (! current_function_sp_is_unchanging)
4730 return;
4732 for (insn = f; insn; insn = NEXT_INSN (insn))
4734 if (INSN_P (insn))
4736 /* Check if insn modifies the stack pointer. */
4737 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
4738 NULL);
4739 if (! current_function_sp_is_unchanging)
4740 return;
4745 /* Mark a register in SET. Hard registers in large modes get all
4746 of their component registers set as well. */
4748 static void
4749 mark_reg (reg, xset)
4750 rtx reg;
4751 void *xset;
4753 regset set = (regset) xset;
4754 int regno = REGNO (reg);
4756 if (GET_MODE (reg) == BLKmode)
4757 abort ();
4759 SET_REGNO_REG_SET (set, regno);
4760 if (regno < FIRST_PSEUDO_REGISTER)
4762 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4763 while (--n > 0)
4764 SET_REGNO_REG_SET (set, regno + n);
4768 /* Mark those regs which are needed at the end of the function as live
4769 at the end of the last basic block. */
4771 static void
4772 mark_regs_live_at_end (set)
4773 regset set;
4775 unsigned int i;
4777 /* If exiting needs the right stack value, consider the stack pointer
4778 live at the end of the function. */
4779 if ((HAVE_epilogue && reload_completed)
4780 || ! EXIT_IGNORE_STACK
4781 || (! FRAME_POINTER_REQUIRED
4782 && ! current_function_calls_alloca
4783 && flag_omit_frame_pointer)
4784 || current_function_sp_is_unchanging)
4786 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
4789 /* Mark the frame pointer if needed at the end of the function. If
4790 we end up eliminating it, it will be removed from the live list
4791 of each basic block by reload. */
4793 if (! reload_completed || frame_pointer_needed)
4795 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
4796 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4797 /* If they are different, also mark the hard frame pointer as live. */
4798 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4799 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
4800 #endif
4803 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4804 /* Many architectures have a GP register even without flag_pic.
4805 Assume the pic register is not in use, or will be handled by
4806 other means, if it is not fixed. */
4807 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4808 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4809 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
4810 #endif
4812 /* Mark all global registers, and all registers used by the epilogue
4813 as being live at the end of the function since they may be
4814 referenced by our caller. */
4815 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4816 if (global_regs[i] || EPILOGUE_USES (i))
4817 SET_REGNO_REG_SET (set, i);
4819 if (HAVE_epilogue && reload_completed)
4821 /* Mark all call-saved registers that we actually used. */
4822 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4823 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
4824 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
4825 SET_REGNO_REG_SET (set, i);
4828 #ifdef EH_RETURN_DATA_REGNO
4829 /* Mark the registers that will contain data for the handler. */
4830 if (reload_completed && current_function_calls_eh_return)
4831 for (i = 0; ; ++i)
4833 unsigned regno = EH_RETURN_DATA_REGNO(i);
4834 if (regno == INVALID_REGNUM)
4835 break;
4836 SET_REGNO_REG_SET (set, regno);
4838 #endif
4839 #ifdef EH_RETURN_STACKADJ_RTX
4840 if ((! HAVE_epilogue || ! reload_completed)
4841 && current_function_calls_eh_return)
4843 rtx tmp = EH_RETURN_STACKADJ_RTX;
4844 if (tmp && REG_P (tmp))
4845 mark_reg (tmp, set);
4847 #endif
4848 #ifdef EH_RETURN_HANDLER_RTX
4849 if ((! HAVE_epilogue || ! reload_completed)
4850 && current_function_calls_eh_return)
4852 rtx tmp = EH_RETURN_HANDLER_RTX;
4853 if (tmp && REG_P (tmp))
4854 mark_reg (tmp, set);
4856 #endif
4858 /* Mark function return value. */
4859 diddle_return_value (mark_reg, set);
4862 /* Callback function for for_each_successor_phi. DATA is a regset.
4863 Sets the SRC_REGNO, the regno of the phi alternative for phi node
4864 INSN, in the regset. */
4866 static int
4867 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
4868 rtx insn ATTRIBUTE_UNUSED;
4869 int dest_regno ATTRIBUTE_UNUSED;
4870 int src_regno;
4871 void *data;
4873 regset live = (regset) data;
4874 SET_REGNO_REG_SET (live, src_regno);
4875 return 0;
4878 /* Propagate global life info around the graph of basic blocks. Begin
4879 considering blocks with their corresponding bit set in BLOCKS_IN.
4880 If BLOCKS_IN is null, consider it the universal set.
4882 BLOCKS_OUT is set for every block that was changed. */
4884 static void
4885 calculate_global_regs_live (blocks_in, blocks_out, flags)
4886 sbitmap blocks_in, blocks_out;
4887 int flags;
4889 basic_block *queue, *qhead, *qtail, *qend;
4890 regset tmp, new_live_at_end, call_used;
4891 regset_head tmp_head, call_used_head;
4892 regset_head new_live_at_end_head;
4893 int i;
4895 tmp = INITIALIZE_REG_SET (tmp_head);
4896 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
4897 call_used = INITIALIZE_REG_SET (call_used_head);
4899 /* Inconveniently, this is only redily available in hard reg set form. */
4900 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4901 if (call_used_regs[i])
4902 SET_REGNO_REG_SET (call_used, i);
4904 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
4905 because the `head == tail' style test for an empty queue doesn't
4906 work with a full queue. */
4907 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
4908 qtail = queue;
4909 qhead = qend = queue + n_basic_blocks + 2;
4911 /* Queue the blocks set in the initial mask. Do this in reverse block
4912 number order so that we are more likely for the first round to do
4913 useful work. We use AUX non-null to flag that the block is queued. */
4914 if (blocks_in)
4916 /* Clear out the garbage that might be hanging out in bb->aux. */
4917 for (i = n_basic_blocks - 1; i >= 0; --i)
4918 BASIC_BLOCK (i)->aux = NULL;
4920 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
4922 basic_block bb = BASIC_BLOCK (i);
4923 *--qhead = bb;
4924 bb->aux = bb;
4927 else
4929 for (i = 0; i < n_basic_blocks; ++i)
4931 basic_block bb = BASIC_BLOCK (i);
4932 *--qhead = bb;
4933 bb->aux = bb;
4937 if (blocks_out)
4938 sbitmap_zero (blocks_out);
4940 /* We work through the queue until there are no more blocks. What
4941 is live at the end of this block is precisely the union of what
4942 is live at the beginning of all its successors. So, we set its
4943 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
4944 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
4945 this block by walking through the instructions in this block in
4946 reverse order and updating as we go. If that changed
4947 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
4948 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
4950 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
4951 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
4952 must either be live at the end of the block, or used within the
4953 block. In the latter case, it will certainly never disappear
4954 from GLOBAL_LIVE_AT_START. In the former case, the register
4955 could go away only if it disappeared from GLOBAL_LIVE_AT_START
4956 for one of the successor blocks. By induction, that cannot
4957 occur. */
4958 while (qhead != qtail)
4960 int rescan, changed;
4961 basic_block bb;
4962 edge e;
4964 bb = *qhead++;
4965 if (qhead == qend)
4966 qhead = queue;
4967 bb->aux = NULL;
4969 /* Begin by propagating live_at_start from the successor blocks. */
4970 CLEAR_REG_SET (new_live_at_end);
4971 for (e = bb->succ; e; e = e->succ_next)
4973 basic_block sb = e->dest;
4975 /* Call-clobbered registers die across exception and call edges. */
4976 /* ??? Abnormal call edges ignored for the moment, as this gets
4977 confused by sibling call edges, which crashes reg-stack. */
4978 if (e->flags & EDGE_EH)
4980 bitmap_operation (tmp, sb->global_live_at_start,
4981 call_used, BITMAP_AND_COMPL);
4982 IOR_REG_SET (new_live_at_end, tmp);
4984 else
4985 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
4988 /* The all-important stack pointer must always be live. */
4989 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
4991 /* Before reload, there are a few registers that must be forced
4992 live everywhere -- which might not already be the case for
4993 blocks within infinite loops. */
4994 if (! reload_completed)
4996 /* Any reference to any pseudo before reload is a potential
4997 reference of the frame pointer. */
4998 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
5000 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5001 /* Pseudos with argument area equivalences may require
5002 reloading via the argument pointer. */
5003 if (fixed_regs[ARG_POINTER_REGNUM])
5004 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
5005 #endif
5007 /* Any constant, or pseudo with constant equivalences, may
5008 require reloading from memory using the pic register. */
5009 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
5010 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
5011 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
5014 /* Regs used in phi nodes are not included in
5015 global_live_at_start, since they are live only along a
5016 particular edge. Set those regs that are live because of a
5017 phi node alternative corresponding to this particular block. */
5018 if (in_ssa_form)
5019 for_each_successor_phi (bb, &set_phi_alternative_reg,
5020 new_live_at_end);
5022 if (bb == ENTRY_BLOCK_PTR)
5024 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5025 continue;
5028 /* On our first pass through this block, we'll go ahead and continue.
5029 Recognize first pass by local_set NULL. On subsequent passes, we
5030 get to skip out early if live_at_end wouldn't have changed. */
5032 if (bb->local_set == NULL)
5034 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5035 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5036 rescan = 1;
5038 else
5040 /* If any bits were removed from live_at_end, we'll have to
5041 rescan the block. This wouldn't be necessary if we had
5042 precalculated local_live, however with PROP_SCAN_DEAD_CODE
5043 local_live is really dependent on live_at_end. */
5044 CLEAR_REG_SET (tmp);
5045 rescan = bitmap_operation (tmp, bb->global_live_at_end,
5046 new_live_at_end, BITMAP_AND_COMPL);
5048 if (! rescan)
5050 /* If any of the registers in the new live_at_end set are
5051 conditionally set in this basic block, we must rescan.
5052 This is because conditional lifetimes at the end of the
5053 block do not just take the live_at_end set into account,
5054 but also the liveness at the start of each successor
5055 block. We can miss changes in those sets if we only
5056 compare the new live_at_end against the previous one. */
5057 CLEAR_REG_SET (tmp);
5058 rescan = bitmap_operation (tmp, new_live_at_end,
5059 bb->cond_local_set, BITMAP_AND);
5062 if (! rescan)
5064 /* Find the set of changed bits. Take this opportunity
5065 to notice that this set is empty and early out. */
5066 CLEAR_REG_SET (tmp);
5067 changed = bitmap_operation (tmp, bb->global_live_at_end,
5068 new_live_at_end, BITMAP_XOR);
5069 if (! changed)
5070 continue;
5072 /* If any of the changed bits overlap with local_set,
5073 we'll have to rescan the block. Detect overlap by
5074 the AND with ~local_set turning off bits. */
5075 rescan = bitmap_operation (tmp, tmp, bb->local_set,
5076 BITMAP_AND_COMPL);
5080 /* Let our caller know that BB changed enough to require its
5081 death notes updated. */
5082 if (blocks_out)
5083 SET_BIT (blocks_out, bb->index);
5085 if (! rescan)
5087 /* Add to live_at_start the set of all registers in
5088 new_live_at_end that aren't in the old live_at_end. */
5090 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
5091 BITMAP_AND_COMPL);
5092 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5094 changed = bitmap_operation (bb->global_live_at_start,
5095 bb->global_live_at_start,
5096 tmp, BITMAP_IOR);
5097 if (! changed)
5098 continue;
5100 else
5102 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5104 /* Rescan the block insn by insn to turn (a copy of) live_at_end
5105 into live_at_start. */
5106 propagate_block (bb, new_live_at_end, bb->local_set,
5107 bb->cond_local_set, flags);
5109 /* If live_at start didn't change, no need to go farther. */
5110 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
5111 continue;
5113 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
5116 /* Queue all predecessors of BB so that we may re-examine
5117 their live_at_end. */
5118 for (e = bb->pred; e; e = e->pred_next)
5120 basic_block pb = e->src;
5121 if (pb->aux == NULL)
5123 *qtail++ = pb;
5124 if (qtail == qend)
5125 qtail = queue;
5126 pb->aux = pb;
5131 FREE_REG_SET (tmp);
5132 FREE_REG_SET (new_live_at_end);
5133 FREE_REG_SET (call_used);
5135 if (blocks_out)
5137 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
5139 basic_block bb = BASIC_BLOCK (i);
5140 FREE_REG_SET (bb->local_set);
5141 FREE_REG_SET (bb->cond_local_set);
5144 else
5146 for (i = n_basic_blocks - 1; i >= 0; --i)
5148 basic_block bb = BASIC_BLOCK (i);
5149 FREE_REG_SET (bb->local_set);
5150 FREE_REG_SET (bb->cond_local_set);
5154 free (queue);
5157 /* Subroutines of life analysis. */
5159 /* Allocate the permanent data structures that represent the results
5160 of life analysis. Not static since used also for stupid life analysis. */
5162 void
5163 allocate_bb_life_data ()
5165 register int i;
5167 for (i = 0; i < n_basic_blocks; i++)
5169 basic_block bb = BASIC_BLOCK (i);
5171 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5172 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5175 ENTRY_BLOCK_PTR->global_live_at_end
5176 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5177 EXIT_BLOCK_PTR->global_live_at_start
5178 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5180 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5183 void
5184 allocate_reg_life_data ()
5186 int i;
5188 max_regno = max_reg_num ();
5190 /* Recalculate the register space, in case it has grown. Old style
5191 vector oriented regsets would set regset_{size,bytes} here also. */
5192 allocate_reg_info (max_regno, FALSE, FALSE);
5194 /* Reset all the data we'll collect in propagate_block and its
5195 subroutines. */
5196 for (i = 0; i < max_regno; i++)
5198 REG_N_SETS (i) = 0;
5199 REG_N_REFS (i) = 0;
5200 REG_N_DEATHS (i) = 0;
5201 REG_N_CALLS_CROSSED (i) = 0;
5202 REG_LIVE_LENGTH (i) = 0;
5203 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
5207 /* Delete dead instructions for propagate_block. */
5209 static void
5210 propagate_block_delete_insn (bb, insn)
5211 basic_block bb;
5212 rtx insn;
5214 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
5216 /* If the insn referred to a label, and that label was attached to
5217 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
5218 pretty much mandatory to delete it, because the ADDR_VEC may be
5219 referencing labels that no longer exist.
5221 INSN may reference a deleted label, particularly when a jump
5222 table has been optimized into a direct jump. There's no
5223 real good way to fix up the reference to the deleted label
5224 when the label is deleted, so we just allow it here.
5226 After dead code elimination is complete, we do search for
5227 any REG_LABEL notes which reference deleted labels as a
5228 sanity check. */
5230 if (inote && GET_CODE (inote) == CODE_LABEL)
5232 rtx label = XEXP (inote, 0);
5233 rtx next;
5235 /* The label may be forced if it has been put in the constant
5236 pool. If that is the only use we must discard the table
5237 jump following it, but not the label itself. */
5238 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
5239 && (next = next_nonnote_insn (label)) != NULL
5240 && GET_CODE (next) == JUMP_INSN
5241 && (GET_CODE (PATTERN (next)) == ADDR_VEC
5242 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
5244 rtx pat = PATTERN (next);
5245 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
5246 int len = XVECLEN (pat, diff_vec_p);
5247 int i;
5249 for (i = 0; i < len; i++)
5250 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
5252 flow_delete_insn (next);
5256 if (bb->end == insn)
5258 bb->end = PREV_INSN (insn);
5259 purge_dead_edges (bb);
5261 flow_delete_insn (insn);
5264 /* Delete dead libcalls for propagate_block. Return the insn
5265 before the libcall. */
5267 static rtx
5268 propagate_block_delete_libcall (bb, insn, note)
5269 basic_block bb;
5270 rtx insn, note;
5272 rtx first = XEXP (note, 0);
5273 rtx before = PREV_INSN (first);
5275 if (insn == bb->end)
5276 bb->end = before;
5278 flow_delete_insn_chain (first, insn);
5279 return before;
5282 /* Update the life-status of regs for one insn. Return the previous insn. */
5285 propagate_one_insn (pbi, insn)
5286 struct propagate_block_info *pbi;
5287 rtx insn;
5289 rtx prev = PREV_INSN (insn);
5290 int flags = pbi->flags;
5291 int insn_is_dead = 0;
5292 int libcall_is_dead = 0;
5293 rtx note;
5294 int i;
5296 if (! INSN_P (insn))
5297 return prev;
5299 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
5300 if (flags & PROP_SCAN_DEAD_CODE)
5302 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
5303 libcall_is_dead = (insn_is_dead && note != 0
5304 && libcall_dead_p (pbi, note, insn));
5307 /* If an instruction consists of just dead store(s) on final pass,
5308 delete it. */
5309 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
5311 /* If we're trying to delete a prologue or epilogue instruction
5312 that isn't flagged as possibly being dead, something is wrong.
5313 But if we are keeping the stack pointer depressed, we might well
5314 be deleting insns that are used to compute the amount to update
5315 it by, so they are fine. */
5316 if (reload_completed
5317 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5318 && (TYPE_RETURNS_STACK_DEPRESSED
5319 (TREE_TYPE (current_function_decl))))
5320 && (((HAVE_epilogue || HAVE_prologue)
5321 && prologue_epilogue_contains (insn))
5322 || (HAVE_sibcall_epilogue
5323 && sibcall_epilogue_contains (insn)))
5324 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
5325 abort ();
5327 /* Record sets. Do this even for dead instructions, since they
5328 would have killed the values if they hadn't been deleted. */
5329 mark_set_regs (pbi, PATTERN (insn), insn);
5331 /* CC0 is now known to be dead. Either this insn used it,
5332 in which case it doesn't anymore, or clobbered it,
5333 so the next insn can't use it. */
5334 pbi->cc0_live = 0;
5336 if (libcall_is_dead)
5337 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
5338 else
5339 propagate_block_delete_insn (pbi->bb, insn);
5341 return prev;
5344 /* See if this is an increment or decrement that can be merged into
5345 a following memory address. */
5346 #ifdef AUTO_INC_DEC
5348 register rtx x = single_set (insn);
5350 /* Does this instruction increment or decrement a register? */
5351 if ((flags & PROP_AUTOINC)
5352 && x != 0
5353 && GET_CODE (SET_DEST (x)) == REG
5354 && (GET_CODE (SET_SRC (x)) == PLUS
5355 || GET_CODE (SET_SRC (x)) == MINUS)
5356 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
5357 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5358 /* Ok, look for a following memory ref we can combine with.
5359 If one is found, change the memory ref to a PRE_INC
5360 or PRE_DEC, cancel this insn, and return 1.
5361 Return 0 if nothing has been done. */
5362 && try_pre_increment_1 (pbi, insn))
5363 return prev;
5365 #endif /* AUTO_INC_DEC */
5367 CLEAR_REG_SET (pbi->new_set);
5369 /* If this is not the final pass, and this insn is copying the value of
5370 a library call and it's dead, don't scan the insns that perform the
5371 library call, so that the call's arguments are not marked live. */
5372 if (libcall_is_dead)
5374 /* Record the death of the dest reg. */
5375 mark_set_regs (pbi, PATTERN (insn), insn);
5377 insn = XEXP (note, 0);
5378 return PREV_INSN (insn);
5380 else if (GET_CODE (PATTERN (insn)) == SET
5381 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
5382 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
5383 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
5384 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
5385 /* We have an insn to pop a constant amount off the stack.
5386 (Such insns use PLUS regardless of the direction of the stack,
5387 and any insn to adjust the stack by a constant is always a pop.)
5388 These insns, if not dead stores, have no effect on life. */
5390 else
5392 /* Any regs live at the time of a call instruction must not go
5393 in a register clobbered by calls. Find all regs now live and
5394 record this for them. */
5396 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
5397 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5398 { REG_N_CALLS_CROSSED (i)++; });
5400 /* Record sets. Do this even for dead instructions, since they
5401 would have killed the values if they hadn't been deleted. */
5402 mark_set_regs (pbi, PATTERN (insn), insn);
5404 if (GET_CODE (insn) == CALL_INSN)
5406 register int i;
5407 rtx note, cond;
5409 cond = NULL_RTX;
5410 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5411 cond = COND_EXEC_TEST (PATTERN (insn));
5413 /* Non-constant calls clobber memory. */
5414 if (! CONST_OR_PURE_CALL_P (insn))
5416 free_EXPR_LIST_list (&pbi->mem_set_list);
5417 pbi->mem_set_list_len = 0;
5420 /* There may be extra registers to be clobbered. */
5421 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5422 note;
5423 note = XEXP (note, 1))
5424 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
5425 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
5426 cond, insn, pbi->flags);
5428 /* Calls change all call-used and global registers. */
5429 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5430 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
5432 /* We do not want REG_UNUSED notes for these registers. */
5433 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
5434 cond, insn,
5435 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
5439 /* If an insn doesn't use CC0, it becomes dead since we assume
5440 that every insn clobbers it. So show it dead here;
5441 mark_used_regs will set it live if it is referenced. */
5442 pbi->cc0_live = 0;
5444 /* Record uses. */
5445 if (! insn_is_dead)
5446 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
5448 /* Sometimes we may have inserted something before INSN (such as a move)
5449 when we make an auto-inc. So ensure we will scan those insns. */
5450 #ifdef AUTO_INC_DEC
5451 prev = PREV_INSN (insn);
5452 #endif
5454 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
5456 register int i;
5457 rtx note, cond;
5459 cond = NULL_RTX;
5460 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5461 cond = COND_EXEC_TEST (PATTERN (insn));
5463 /* Calls use their arguments. */
5464 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5465 note;
5466 note = XEXP (note, 1))
5467 if (GET_CODE (XEXP (note, 0)) == USE)
5468 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
5469 cond, insn);
5471 /* The stack ptr is used (honorarily) by a CALL insn. */
5472 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
5474 /* Calls may also reference any of the global registers,
5475 so they are made live. */
5476 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5477 if (global_regs[i])
5478 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
5479 cond, insn);
5483 /* On final pass, update counts of how many insns in which each reg
5484 is live. */
5485 if (flags & PROP_REG_INFO)
5486 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5487 { REG_LIVE_LENGTH (i)++; });
5489 return prev;
5492 /* Initialize a propagate_block_info struct for public consumption.
5493 Note that the structure itself is opaque to this file, but that
5494 the user can use the regsets provided here. */
5496 struct propagate_block_info *
5497 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
5498 basic_block bb;
5499 regset live, local_set, cond_local_set;
5500 int flags;
5502 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
5504 pbi->bb = bb;
5505 pbi->reg_live = live;
5506 pbi->mem_set_list = NULL_RTX;
5507 pbi->mem_set_list_len = 0;
5508 pbi->local_set = local_set;
5509 pbi->cond_local_set = cond_local_set;
5510 pbi->cc0_live = 0;
5511 pbi->flags = flags;
5513 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5514 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
5515 else
5516 pbi->reg_next_use = NULL;
5518 pbi->new_set = BITMAP_XMALLOC ();
5520 #ifdef HAVE_conditional_execution
5521 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
5522 free_reg_cond_life_info);
5523 pbi->reg_cond_reg = BITMAP_XMALLOC ();
5525 /* If this block ends in a conditional branch, for each register live
5526 from one side of the branch and not the other, record the register
5527 as conditionally dead. */
5528 if (GET_CODE (bb->end) == JUMP_INSN
5529 && any_condjump_p (bb->end))
5531 regset_head diff_head;
5532 regset diff = INITIALIZE_REG_SET (diff_head);
5533 basic_block bb_true, bb_false;
5534 rtx cond_true, cond_false, set_src;
5535 int i;
5537 /* Identify the successor blocks. */
5538 bb_true = bb->succ->dest;
5539 if (bb->succ->succ_next != NULL)
5541 bb_false = bb->succ->succ_next->dest;
5543 if (bb->succ->flags & EDGE_FALLTHRU)
5545 basic_block t = bb_false;
5546 bb_false = bb_true;
5547 bb_true = t;
5549 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
5550 abort ();
5552 else
5554 /* This can happen with a conditional jump to the next insn. */
5555 if (JUMP_LABEL (bb->end) != bb_true->head)
5556 abort ();
5558 /* Simplest way to do nothing. */
5559 bb_false = bb_true;
5562 /* Extract the condition from the branch. */
5563 set_src = SET_SRC (pc_set (bb->end));
5564 cond_true = XEXP (set_src, 0);
5565 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
5566 GET_MODE (cond_true), XEXP (cond_true, 0),
5567 XEXP (cond_true, 1));
5568 if (GET_CODE (XEXP (set_src, 1)) == PC)
5570 rtx t = cond_false;
5571 cond_false = cond_true;
5572 cond_true = t;
5575 /* Compute which register lead different lives in the successors. */
5576 if (bitmap_operation (diff, bb_true->global_live_at_start,
5577 bb_false->global_live_at_start, BITMAP_XOR))
5579 rtx reg = XEXP (cond_true, 0);
5581 if (GET_CODE (reg) == SUBREG)
5582 reg = SUBREG_REG (reg);
5584 if (GET_CODE (reg) != REG)
5585 abort ();
5587 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
5589 /* For each such register, mark it conditionally dead. */
5590 EXECUTE_IF_SET_IN_REG_SET
5591 (diff, 0, i,
5593 struct reg_cond_life_info *rcli;
5594 rtx cond;
5596 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5598 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
5599 cond = cond_false;
5600 else
5601 cond = cond_true;
5602 rcli->condition = cond;
5603 rcli->stores = const0_rtx;
5604 rcli->orig_condition = cond;
5606 splay_tree_insert (pbi->reg_cond_dead, i,
5607 (splay_tree_value) rcli);
5611 FREE_REG_SET (diff);
5613 #endif
5615 /* If this block has no successors, any stores to the frame that aren't
5616 used later in the block are dead. So make a pass over the block
5617 recording any such that are made and show them dead at the end. We do
5618 a very conservative and simple job here. */
5619 if (optimize
5620 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5621 && (TYPE_RETURNS_STACK_DEPRESSED
5622 (TREE_TYPE (current_function_decl))))
5623 && (flags & PROP_SCAN_DEAD_CODE)
5624 && (bb->succ == NULL
5625 || (bb->succ->succ_next == NULL
5626 && bb->succ->dest == EXIT_BLOCK_PTR
5627 && ! current_function_calls_eh_return)))
5629 rtx insn, set;
5630 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
5631 if (GET_CODE (insn) == INSN
5632 && (set = single_set (insn))
5633 && GET_CODE (SET_DEST (set)) == MEM)
5635 rtx mem = SET_DEST (set);
5636 rtx canon_mem = canon_rtx (mem);
5638 /* This optimization is performed by faking a store to the
5639 memory at the end of the block. This doesn't work for
5640 unchanging memories because multiple stores to unchanging
5641 memory is illegal and alias analysis doesn't consider it. */
5642 if (RTX_UNCHANGING_P (canon_mem))
5643 continue;
5645 if (XEXP (canon_mem, 0) == frame_pointer_rtx
5646 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
5647 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
5648 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
5649 add_to_mem_set_list (pbi, canon_mem);
5653 return pbi;
5656 /* Release a propagate_block_info struct. */
5658 void
5659 free_propagate_block_info (pbi)
5660 struct propagate_block_info *pbi;
5662 free_EXPR_LIST_list (&pbi->mem_set_list);
5664 BITMAP_XFREE (pbi->new_set);
5666 #ifdef HAVE_conditional_execution
5667 splay_tree_delete (pbi->reg_cond_dead);
5668 BITMAP_XFREE (pbi->reg_cond_reg);
5669 #endif
5671 if (pbi->reg_next_use)
5672 free (pbi->reg_next_use);
5674 free (pbi);
5677 /* Compute the registers live at the beginning of a basic block BB from
5678 those live at the end.
5680 When called, REG_LIVE contains those live at the end. On return, it
5681 contains those live at the beginning.
5683 LOCAL_SET, if non-null, will be set with all registers killed
5684 unconditionally by this basic block.
5685 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
5686 killed conditionally by this basic block. If there is any unconditional
5687 set of a register, then the corresponding bit will be set in LOCAL_SET
5688 and cleared in COND_LOCAL_SET.
5689 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
5690 case, the resulting set will be equal to the union of the two sets that
5691 would otherwise be computed.
5693 Return non-zero if an INSN is deleted (i.e. by dead code removal). */
5696 propagate_block (bb, live, local_set, cond_local_set, flags)
5697 basic_block bb;
5698 regset live;
5699 regset local_set;
5700 regset cond_local_set;
5701 int flags;
5703 struct propagate_block_info *pbi;
5704 rtx insn, prev;
5705 int changed;
5707 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
5709 if (flags & PROP_REG_INFO)
5711 register int i;
5713 /* Process the regs live at the end of the block.
5714 Mark them as not local to any one basic block. */
5715 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
5716 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
5719 /* Scan the block an insn at a time from end to beginning. */
5721 changed = 0;
5722 for (insn = bb->end;; insn = prev)
5724 /* If this is a call to `setjmp' et al, warn if any
5725 non-volatile datum is live. */
5726 if ((flags & PROP_REG_INFO)
5727 && GET_CODE (insn) == CALL_INSN
5728 && find_reg_note (insn, REG_SETJMP, NULL))
5729 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
5731 prev = propagate_one_insn (pbi, insn);
5732 changed |= NEXT_INSN (prev) != insn;
5734 if (insn == bb->head)
5735 break;
5738 free_propagate_block_info (pbi);
5740 return changed;
5743 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
5744 (SET expressions whose destinations are registers dead after the insn).
5745 NEEDED is the regset that says which regs are alive after the insn.
5747 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
5749 If X is the entire body of an insn, NOTES contains the reg notes
5750 pertaining to the insn. */
5752 static int
5753 insn_dead_p (pbi, x, call_ok, notes)
5754 struct propagate_block_info *pbi;
5755 rtx x;
5756 int call_ok;
5757 rtx notes ATTRIBUTE_UNUSED;
5759 enum rtx_code code = GET_CODE (x);
5761 #ifdef AUTO_INC_DEC
5762 /* If flow is invoked after reload, we must take existing AUTO_INC
5763 expresions into account. */
5764 if (reload_completed)
5766 for (; notes; notes = XEXP (notes, 1))
5768 if (REG_NOTE_KIND (notes) == REG_INC)
5770 int regno = REGNO (XEXP (notes, 0));
5772 /* Don't delete insns to set global regs. */
5773 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5774 || REGNO_REG_SET_P (pbi->reg_live, regno))
5775 return 0;
5779 #endif
5781 /* If setting something that's a reg or part of one,
5782 see if that register's altered value will be live. */
5784 if (code == SET)
5786 rtx r = SET_DEST (x);
5788 #ifdef HAVE_cc0
5789 if (GET_CODE (r) == CC0)
5790 return ! pbi->cc0_live;
5791 #endif
5793 /* A SET that is a subroutine call cannot be dead. */
5794 if (GET_CODE (SET_SRC (x)) == CALL)
5796 if (! call_ok)
5797 return 0;
5800 /* Don't eliminate loads from volatile memory or volatile asms. */
5801 else if (volatile_refs_p (SET_SRC (x)))
5802 return 0;
5804 if (GET_CODE (r) == MEM)
5806 rtx temp, canon_r;
5808 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
5809 return 0;
5811 canon_r = canon_rtx (r);
5813 /* Walk the set of memory locations we are currently tracking
5814 and see if one is an identical match to this memory location.
5815 If so, this memory write is dead (remember, we're walking
5816 backwards from the end of the block to the start). Since
5817 rtx_equal_p does not check the alias set or flags, we also
5818 must have the potential for them to conflict (anti_dependence). */
5819 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
5820 if (anti_dependence (r, XEXP (temp, 0)))
5822 rtx mem = XEXP (temp, 0);
5824 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
5825 && (GET_MODE_SIZE (GET_MODE (canon_r))
5826 <= GET_MODE_SIZE (GET_MODE (mem))))
5827 return 1;
5829 #ifdef AUTO_INC_DEC
5830 /* Check if memory reference matches an auto increment. Only
5831 post increment/decrement or modify are valid. */
5832 if (GET_MODE (mem) == GET_MODE (r)
5833 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
5834 || GET_CODE (XEXP (mem, 0)) == POST_INC
5835 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
5836 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
5837 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
5838 return 1;
5839 #endif
5842 else
5844 while (GET_CODE (r) == SUBREG
5845 || GET_CODE (r) == STRICT_LOW_PART
5846 || GET_CODE (r) == ZERO_EXTRACT)
5847 r = XEXP (r, 0);
5849 if (GET_CODE (r) == REG)
5851 int regno = REGNO (r);
5853 /* Obvious. */
5854 if (REGNO_REG_SET_P (pbi->reg_live, regno))
5855 return 0;
5857 /* If this is a hard register, verify that subsequent
5858 words are not needed. */
5859 if (regno < FIRST_PSEUDO_REGISTER)
5861 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
5863 while (--n > 0)
5864 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
5865 return 0;
5868 /* Don't delete insns to set global regs. */
5869 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5870 return 0;
5872 /* Make sure insns to set the stack pointer aren't deleted. */
5873 if (regno == STACK_POINTER_REGNUM)
5874 return 0;
5876 /* ??? These bits might be redundant with the force live bits
5877 in calculate_global_regs_live. We would delete from
5878 sequential sets; whether this actually affects real code
5879 for anything but the stack pointer I don't know. */
5880 /* Make sure insns to set the frame pointer aren't deleted. */
5881 if (regno == FRAME_POINTER_REGNUM
5882 && (! reload_completed || frame_pointer_needed))
5883 return 0;
5884 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5885 if (regno == HARD_FRAME_POINTER_REGNUM
5886 && (! reload_completed || frame_pointer_needed))
5887 return 0;
5888 #endif
5890 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5891 /* Make sure insns to set arg pointer are never deleted
5892 (if the arg pointer isn't fixed, there will be a USE
5893 for it, so we can treat it normally). */
5894 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5895 return 0;
5896 #endif
5898 /* Otherwise, the set is dead. */
5899 return 1;
5904 /* If performing several activities, insn is dead if each activity
5905 is individually dead. Also, CLOBBERs and USEs can be ignored; a
5906 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
5907 worth keeping. */
5908 else if (code == PARALLEL)
5910 int i = XVECLEN (x, 0);
5912 for (i--; i >= 0; i--)
5913 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
5914 && GET_CODE (XVECEXP (x, 0, i)) != USE
5915 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
5916 return 0;
5918 return 1;
5921 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
5922 is not necessarily true for hard registers. */
5923 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
5924 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
5925 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
5926 return 1;
5928 /* We do not check other CLOBBER or USE here. An insn consisting of just
5929 a CLOBBER or just a USE should not be deleted. */
5930 return 0;
5933 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
5934 return 1 if the entire library call is dead.
5935 This is true if INSN copies a register (hard or pseudo)
5936 and if the hard return reg of the call insn is dead.
5937 (The caller should have tested the destination of the SET inside
5938 INSN already for death.)
5940 If this insn doesn't just copy a register, then we don't
5941 have an ordinary libcall. In that case, cse could not have
5942 managed to substitute the source for the dest later on,
5943 so we can assume the libcall is dead.
5945 PBI is the block info giving pseudoregs live before this insn.
5946 NOTE is the REG_RETVAL note of the insn. */
5948 static int
5949 libcall_dead_p (pbi, note, insn)
5950 struct propagate_block_info *pbi;
5951 rtx note;
5952 rtx insn;
5954 rtx x = single_set (insn);
5956 if (x)
5958 register rtx r = SET_SRC (x);
5960 if (GET_CODE (r) == REG)
5962 rtx call = XEXP (note, 0);
5963 rtx call_pat;
5964 register int i;
5966 /* Find the call insn. */
5967 while (call != insn && GET_CODE (call) != CALL_INSN)
5968 call = NEXT_INSN (call);
5970 /* If there is none, do nothing special,
5971 since ordinary death handling can understand these insns. */
5972 if (call == insn)
5973 return 0;
5975 /* See if the hard reg holding the value is dead.
5976 If this is a PARALLEL, find the call within it. */
5977 call_pat = PATTERN (call);
5978 if (GET_CODE (call_pat) == PARALLEL)
5980 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
5981 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
5982 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
5983 break;
5985 /* This may be a library call that is returning a value
5986 via invisible pointer. Do nothing special, since
5987 ordinary death handling can understand these insns. */
5988 if (i < 0)
5989 return 0;
5991 call_pat = XVECEXP (call_pat, 0, i);
5994 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
5997 return 1;
6000 /* Return 1 if register REGNO was used before it was set, i.e. if it is
6001 live at function entry. Don't count global register variables, variables
6002 in registers that can be used for function arg passing, or variables in
6003 fixed hard registers. */
6006 regno_uninitialized (regno)
6007 int regno;
6009 if (n_basic_blocks == 0
6010 || (regno < FIRST_PSEUDO_REGISTER
6011 && (global_regs[regno]
6012 || fixed_regs[regno]
6013 || FUNCTION_ARG_REGNO_P (regno))))
6014 return 0;
6016 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
6019 /* 1 if register REGNO was alive at a place where `setjmp' was called
6020 and was set more than once or is an argument.
6021 Such regs may be clobbered by `longjmp'. */
6024 regno_clobbered_at_setjmp (regno)
6025 int regno;
6027 if (n_basic_blocks == 0)
6028 return 0;
6030 return ((REG_N_SETS (regno) > 1
6031 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
6032 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
6035 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
6036 maximal list size; look for overlaps in mode and select the largest. */
6037 static void
6038 add_to_mem_set_list (pbi, mem)
6039 struct propagate_block_info *pbi;
6040 rtx mem;
6042 rtx i;
6044 /* We don't know how large a BLKmode store is, so we must not
6045 take them into consideration. */
6046 if (GET_MODE (mem) == BLKmode)
6047 return;
6049 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
6051 rtx e = XEXP (i, 0);
6052 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
6054 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
6056 #ifdef AUTO_INC_DEC
6057 /* If we must store a copy of the mem, we can just modify
6058 the mode of the stored copy. */
6059 if (pbi->flags & PROP_AUTOINC)
6060 PUT_MODE (e, GET_MODE (mem));
6061 else
6062 #endif
6063 XEXP (i, 0) = mem;
6065 return;
6069 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
6071 #ifdef AUTO_INC_DEC
6072 /* Store a copy of mem, otherwise the address may be
6073 scrogged by find_auto_inc. */
6074 if (pbi->flags & PROP_AUTOINC)
6075 mem = shallow_copy_rtx (mem);
6076 #endif
6077 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
6078 pbi->mem_set_list_len++;
6082 /* INSN references memory, possibly using autoincrement addressing modes.
6083 Find any entries on the mem_set_list that need to be invalidated due
6084 to an address change. */
6086 static void
6087 invalidate_mems_from_autoinc (pbi, insn)
6088 struct propagate_block_info *pbi;
6089 rtx insn;
6091 rtx note = REG_NOTES (insn);
6092 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
6093 if (REG_NOTE_KIND (note) == REG_INC)
6094 invalidate_mems_from_set (pbi, XEXP (note, 0));
6097 /* EXP is a REG. Remove any dependant entries from pbi->mem_set_list. */
6099 static void
6100 invalidate_mems_from_set (pbi, exp)
6101 struct propagate_block_info *pbi;
6102 rtx exp;
6104 rtx temp = pbi->mem_set_list;
6105 rtx prev = NULL_RTX;
6106 rtx next;
6108 while (temp)
6110 next = XEXP (temp, 1);
6111 if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
6113 /* Splice this entry out of the list. */
6114 if (prev)
6115 XEXP (prev, 1) = next;
6116 else
6117 pbi->mem_set_list = next;
6118 free_EXPR_LIST_node (temp);
6119 pbi->mem_set_list_len--;
6121 else
6122 prev = temp;
6123 temp = next;
6127 /* Process the registers that are set within X. Their bits are set to
6128 1 in the regset DEAD, because they are dead prior to this insn.
6130 If INSN is nonzero, it is the insn being processed.
6132 FLAGS is the set of operations to perform. */
6134 static void
6135 mark_set_regs (pbi, x, insn)
6136 struct propagate_block_info *pbi;
6137 rtx x, insn;
6139 rtx cond = NULL_RTX;
6140 rtx link;
6141 enum rtx_code code;
6143 if (insn)
6144 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6146 if (REG_NOTE_KIND (link) == REG_INC)
6147 mark_set_1 (pbi, SET, XEXP (link, 0),
6148 (GET_CODE (x) == COND_EXEC
6149 ? COND_EXEC_TEST (x) : NULL_RTX),
6150 insn, pbi->flags);
6152 retry:
6153 switch (code = GET_CODE (x))
6155 case SET:
6156 case CLOBBER:
6157 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
6158 return;
6160 case COND_EXEC:
6161 cond = COND_EXEC_TEST (x);
6162 x = COND_EXEC_CODE (x);
6163 goto retry;
6165 case PARALLEL:
6167 register int i;
6168 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6170 rtx sub = XVECEXP (x, 0, i);
6171 switch (code = GET_CODE (sub))
6173 case COND_EXEC:
6174 if (cond != NULL_RTX)
6175 abort ();
6177 cond = COND_EXEC_TEST (sub);
6178 sub = COND_EXEC_CODE (sub);
6179 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
6180 break;
6181 /* Fall through. */
6183 case SET:
6184 case CLOBBER:
6185 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
6186 break;
6188 default:
6189 break;
6192 break;
6195 default:
6196 break;
6200 /* Process a single set, which appears in INSN. REG (which may not
6201 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
6202 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
6203 If the set is conditional (because it appear in a COND_EXEC), COND
6204 will be the condition. */
6206 static void
6207 mark_set_1 (pbi, code, reg, cond, insn, flags)
6208 struct propagate_block_info *pbi;
6209 enum rtx_code code;
6210 rtx reg, cond, insn;
6211 int flags;
6213 int regno_first = -1, regno_last = -1;
6214 unsigned long not_dead = 0;
6215 int i;
6217 /* Modifying just one hardware register of a multi-reg value or just a
6218 byte field of a register does not mean the value from before this insn
6219 is now dead. Of course, if it was dead after it's unused now. */
6221 switch (GET_CODE (reg))
6223 case PARALLEL:
6224 /* Some targets place small structures in registers for return values of
6225 functions. We have to detect this case specially here to get correct
6226 flow information. */
6227 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
6228 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
6229 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
6230 flags);
6231 return;
6233 case ZERO_EXTRACT:
6234 case SIGN_EXTRACT:
6235 case STRICT_LOW_PART:
6236 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
6238 reg = XEXP (reg, 0);
6239 while (GET_CODE (reg) == SUBREG
6240 || GET_CODE (reg) == ZERO_EXTRACT
6241 || GET_CODE (reg) == SIGN_EXTRACT
6242 || GET_CODE (reg) == STRICT_LOW_PART);
6243 if (GET_CODE (reg) == MEM)
6244 break;
6245 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
6246 /* Fall through. */
6248 case REG:
6249 regno_last = regno_first = REGNO (reg);
6250 if (regno_first < FIRST_PSEUDO_REGISTER)
6251 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6252 break;
6254 case SUBREG:
6255 if (GET_CODE (SUBREG_REG (reg)) == REG)
6257 enum machine_mode outer_mode = GET_MODE (reg);
6258 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
6260 /* Identify the range of registers affected. This is moderately
6261 tricky for hard registers. See alter_subreg. */
6263 regno_last = regno_first = REGNO (SUBREG_REG (reg));
6264 if (regno_first < FIRST_PSEUDO_REGISTER)
6266 regno_first += subreg_regno_offset (regno_first, inner_mode,
6267 SUBREG_BYTE (reg),
6268 outer_mode);
6269 regno_last = (regno_first
6270 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
6272 /* Since we've just adjusted the register number ranges, make
6273 sure REG matches. Otherwise some_was_live will be clear
6274 when it shouldn't have been, and we'll create incorrect
6275 REG_UNUSED notes. */
6276 reg = gen_rtx_REG (outer_mode, regno_first);
6278 else
6280 /* If the number of words in the subreg is less than the number
6281 of words in the full register, we have a well-defined partial
6282 set. Otherwise the high bits are undefined.
6284 This is only really applicable to pseudos, since we just took
6285 care of multi-word hard registers. */
6286 if (((GET_MODE_SIZE (outer_mode)
6287 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
6288 < ((GET_MODE_SIZE (inner_mode)
6289 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
6290 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
6291 regno_first);
6293 reg = SUBREG_REG (reg);
6296 else
6297 reg = SUBREG_REG (reg);
6298 break;
6300 default:
6301 break;
6304 /* If this set is a MEM, then it kills any aliased writes.
6305 If this set is a REG, then it kills any MEMs which use the reg. */
6306 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
6308 if (GET_CODE (reg) == REG)
6309 invalidate_mems_from_set (pbi, reg);
6311 /* If the memory reference had embedded side effects (autoincrement
6312 address modes. Then we may need to kill some entries on the
6313 memory set list. */
6314 if (insn && GET_CODE (reg) == MEM)
6315 invalidate_mems_from_autoinc (pbi, insn);
6317 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
6318 /* ??? With more effort we could track conditional memory life. */
6319 && ! cond
6320 /* There are no REG_INC notes for SP, so we can't assume we'll see
6321 everything that invalidates it. To be safe, don't eliminate any
6322 stores though SP; none of them should be redundant anyway. */
6323 && ! reg_mentioned_p (stack_pointer_rtx, reg))
6324 add_to_mem_set_list (pbi, canon_rtx (reg));
6327 if (GET_CODE (reg) == REG
6328 && ! (regno_first == FRAME_POINTER_REGNUM
6329 && (! reload_completed || frame_pointer_needed))
6330 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
6331 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
6332 && (! reload_completed || frame_pointer_needed))
6333 #endif
6334 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
6335 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
6336 #endif
6339 int some_was_live = 0, some_was_dead = 0;
6341 for (i = regno_first; i <= regno_last; ++i)
6343 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6344 if (pbi->local_set)
6346 /* Order of the set operation matters here since both
6347 sets may be the same. */
6348 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
6349 if (cond != NULL_RTX
6350 && ! REGNO_REG_SET_P (pbi->local_set, i))
6351 SET_REGNO_REG_SET (pbi->cond_local_set, i);
6352 else
6353 SET_REGNO_REG_SET (pbi->local_set, i);
6355 if (code != CLOBBER)
6356 SET_REGNO_REG_SET (pbi->new_set, i);
6358 some_was_live |= needed_regno;
6359 some_was_dead |= ! needed_regno;
6362 #ifdef HAVE_conditional_execution
6363 /* Consider conditional death in deciding that the register needs
6364 a death note. */
6365 if (some_was_live && ! not_dead
6366 /* The stack pointer is never dead. Well, not strictly true,
6367 but it's very difficult to tell from here. Hopefully
6368 combine_stack_adjustments will fix up the most egregious
6369 errors. */
6370 && regno_first != STACK_POINTER_REGNUM)
6372 for (i = regno_first; i <= regno_last; ++i)
6373 if (! mark_regno_cond_dead (pbi, i, cond))
6374 not_dead |= ((unsigned long) 1) << (i - regno_first);
6376 #endif
6378 /* Additional data to record if this is the final pass. */
6379 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
6380 | PROP_DEATH_NOTES | PROP_AUTOINC))
6382 register rtx y;
6383 register int blocknum = pbi->bb->index;
6385 y = NULL_RTX;
6386 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6388 y = pbi->reg_next_use[regno_first];
6390 /* The next use is no longer next, since a store intervenes. */
6391 for (i = regno_first; i <= regno_last; ++i)
6392 pbi->reg_next_use[i] = 0;
6395 if (flags & PROP_REG_INFO)
6397 for (i = regno_first; i <= regno_last; ++i)
6399 /* Count (weighted) references, stores, etc. This counts a
6400 register twice if it is modified, but that is correct. */
6401 REG_N_SETS (i) += 1;
6402 REG_N_REFS (i) += 1;
6403 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
6405 /* The insns where a reg is live are normally counted
6406 elsewhere, but we want the count to include the insn
6407 where the reg is set, and the normal counting mechanism
6408 would not count it. */
6409 REG_LIVE_LENGTH (i) += 1;
6412 /* If this is a hard reg, record this function uses the reg. */
6413 if (regno_first < FIRST_PSEUDO_REGISTER)
6415 for (i = regno_first; i <= regno_last; i++)
6416 regs_ever_live[i] = 1;
6418 else
6420 /* Keep track of which basic blocks each reg appears in. */
6421 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6422 REG_BASIC_BLOCK (regno_first) = blocknum;
6423 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6424 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6428 if (! some_was_dead)
6430 if (flags & PROP_LOG_LINKS)
6432 /* Make a logical link from the next following insn
6433 that uses this register, back to this insn.
6434 The following insns have already been processed.
6436 We don't build a LOG_LINK for hard registers containing
6437 in ASM_OPERANDs. If these registers get replaced,
6438 we might wind up changing the semantics of the insn,
6439 even if reload can make what appear to be valid
6440 assignments later. */
6441 if (y && (BLOCK_NUM (y) == blocknum)
6442 && (regno_first >= FIRST_PSEUDO_REGISTER
6443 || asm_noperands (PATTERN (y)) < 0))
6444 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
6447 else if (not_dead)
6449 else if (! some_was_live)
6451 if (flags & PROP_REG_INFO)
6452 REG_N_DEATHS (regno_first) += 1;
6454 if (flags & PROP_DEATH_NOTES)
6456 /* Note that dead stores have already been deleted
6457 when possible. If we get here, we have found a
6458 dead store that cannot be eliminated (because the
6459 same insn does something useful). Indicate this
6460 by marking the reg being set as dying here. */
6461 REG_NOTES (insn)
6462 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6465 else
6467 if (flags & PROP_DEATH_NOTES)
6469 /* This is a case where we have a multi-word hard register
6470 and some, but not all, of the words of the register are
6471 needed in subsequent insns. Write REG_UNUSED notes
6472 for those parts that were not needed. This case should
6473 be rare. */
6475 for (i = regno_first; i <= regno_last; ++i)
6476 if (! REGNO_REG_SET_P (pbi->reg_live, i))
6477 REG_NOTES (insn)
6478 = alloc_EXPR_LIST (REG_UNUSED,
6479 gen_rtx_REG (reg_raw_mode[i], i),
6480 REG_NOTES (insn));
6485 /* Mark the register as being dead. */
6486 if (some_was_live
6487 /* The stack pointer is never dead. Well, not strictly true,
6488 but it's very difficult to tell from here. Hopefully
6489 combine_stack_adjustments will fix up the most egregious
6490 errors. */
6491 && regno_first != STACK_POINTER_REGNUM)
6493 for (i = regno_first; i <= regno_last; ++i)
6494 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
6495 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
6498 else if (GET_CODE (reg) == REG)
6500 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6501 pbi->reg_next_use[regno_first] = 0;
6504 /* If this is the last pass and this is a SCRATCH, show it will be dying
6505 here and count it. */
6506 else if (GET_CODE (reg) == SCRATCH)
6508 if (flags & PROP_DEATH_NOTES)
6509 REG_NOTES (insn)
6510 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6514 #ifdef HAVE_conditional_execution
6515 /* Mark REGNO conditionally dead.
6516 Return true if the register is now unconditionally dead. */
6518 static int
6519 mark_regno_cond_dead (pbi, regno, cond)
6520 struct propagate_block_info *pbi;
6521 int regno;
6522 rtx cond;
6524 /* If this is a store to a predicate register, the value of the
6525 predicate is changing, we don't know that the predicate as seen
6526 before is the same as that seen after. Flush all dependent
6527 conditions from reg_cond_dead. This will make all such
6528 conditionally live registers unconditionally live. */
6529 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
6530 flush_reg_cond_reg (pbi, regno);
6532 /* If this is an unconditional store, remove any conditional
6533 life that may have existed. */
6534 if (cond == NULL_RTX)
6535 splay_tree_remove (pbi->reg_cond_dead, regno);
6536 else
6538 splay_tree_node node;
6539 struct reg_cond_life_info *rcli;
6540 rtx ncond;
6542 /* Otherwise this is a conditional set. Record that fact.
6543 It may have been conditionally used, or there may be a
6544 subsequent set with a complimentary condition. */
6546 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
6547 if (node == NULL)
6549 /* The register was unconditionally live previously.
6550 Record the current condition as the condition under
6551 which it is dead. */
6552 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6553 rcli->condition = cond;
6554 rcli->stores = cond;
6555 rcli->orig_condition = const0_rtx;
6556 splay_tree_insert (pbi->reg_cond_dead, regno,
6557 (splay_tree_value) rcli);
6559 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6561 /* Not unconditionaly dead. */
6562 return 0;
6564 else
6566 /* The register was conditionally live previously.
6567 Add the new condition to the old. */
6568 rcli = (struct reg_cond_life_info *) node->value;
6569 ncond = rcli->condition;
6570 ncond = ior_reg_cond (ncond, cond, 1);
6571 if (rcli->stores == const0_rtx)
6572 rcli->stores = cond;
6573 else if (rcli->stores != const1_rtx)
6574 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
6576 /* If the register is now unconditionally dead, remove the entry
6577 in the splay_tree. A register is unconditionally dead if the
6578 dead condition ncond is true. A register is also unconditionally
6579 dead if the sum of all conditional stores is an unconditional
6580 store (stores is true), and the dead condition is identically the
6581 same as the original dead condition initialized at the end of
6582 the block. This is a pointer compare, not an rtx_equal_p
6583 compare. */
6584 if (ncond == const1_rtx
6585 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
6586 splay_tree_remove (pbi->reg_cond_dead, regno);
6587 else
6589 rcli->condition = ncond;
6591 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6593 /* Not unconditionaly dead. */
6594 return 0;
6599 return 1;
6602 /* Called from splay_tree_delete for pbi->reg_cond_life. */
6604 static void
6605 free_reg_cond_life_info (value)
6606 splay_tree_value value;
6608 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
6609 free (rcli);
6612 /* Helper function for flush_reg_cond_reg. */
6614 static int
6615 flush_reg_cond_reg_1 (node, data)
6616 splay_tree_node node;
6617 void *data;
6619 struct reg_cond_life_info *rcli;
6620 int *xdata = (int *) data;
6621 unsigned int regno = xdata[0];
6623 /* Don't need to search if last flushed value was farther on in
6624 the in-order traversal. */
6625 if (xdata[1] >= (int) node->key)
6626 return 0;
6628 /* Splice out portions of the expression that refer to regno. */
6629 rcli = (struct reg_cond_life_info *) node->value;
6630 rcli->condition = elim_reg_cond (rcli->condition, regno);
6631 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
6632 rcli->stores = elim_reg_cond (rcli->stores, regno);
6634 /* If the entire condition is now false, signal the node to be removed. */
6635 if (rcli->condition == const0_rtx)
6637 xdata[1] = node->key;
6638 return -1;
6640 else if (rcli->condition == const1_rtx)
6641 abort ();
6643 return 0;
6646 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
6648 static void
6649 flush_reg_cond_reg (pbi, regno)
6650 struct propagate_block_info *pbi;
6651 int regno;
6653 int pair[2];
6655 pair[0] = regno;
6656 pair[1] = -1;
6657 while (splay_tree_foreach (pbi->reg_cond_dead,
6658 flush_reg_cond_reg_1, pair) == -1)
6659 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
6661 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
6664 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
6665 For ior/and, the ADD flag determines whether we want to add the new
6666 condition X to the old one unconditionally. If it is zero, we will
6667 only return a new expression if X allows us to simplify part of
6668 OLD, otherwise we return OLD unchanged to the caller.
6669 If ADD is nonzero, we will return a new condition in all cases. The
6670 toplevel caller of one of these functions should always pass 1 for
6671 ADD. */
6673 static rtx
6674 ior_reg_cond (old, x, add)
6675 rtx old, x;
6676 int add;
6678 rtx op0, op1;
6680 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6682 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6683 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
6684 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6685 return const1_rtx;
6686 if (GET_CODE (x) == GET_CODE (old)
6687 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6688 return old;
6689 if (! add)
6690 return old;
6691 return gen_rtx_IOR (0, old, x);
6694 switch (GET_CODE (old))
6696 case IOR:
6697 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6698 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6699 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6701 if (op0 == const0_rtx)
6702 return op1;
6703 if (op1 == const0_rtx)
6704 return op0;
6705 if (op0 == const1_rtx || op1 == const1_rtx)
6706 return const1_rtx;
6707 if (op0 == XEXP (old, 0))
6708 op0 = gen_rtx_IOR (0, op0, x);
6709 else
6710 op1 = gen_rtx_IOR (0, op1, x);
6711 return gen_rtx_IOR (0, op0, op1);
6713 if (! add)
6714 return old;
6715 return gen_rtx_IOR (0, old, x);
6717 case AND:
6718 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6719 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6720 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6722 if (op0 == const1_rtx)
6723 return op1;
6724 if (op1 == const1_rtx)
6725 return op0;
6726 if (op0 == const0_rtx || op1 == const0_rtx)
6727 return const0_rtx;
6728 if (op0 == XEXP (old, 0))
6729 op0 = gen_rtx_IOR (0, op0, x);
6730 else
6731 op1 = gen_rtx_IOR (0, op1, x);
6732 return gen_rtx_AND (0, op0, op1);
6734 if (! add)
6735 return old;
6736 return gen_rtx_IOR (0, old, x);
6738 case NOT:
6739 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6740 if (op0 != XEXP (old, 0))
6741 return not_reg_cond (op0);
6742 if (! add)
6743 return old;
6744 return gen_rtx_IOR (0, old, x);
6746 default:
6747 abort ();
6751 static rtx
6752 not_reg_cond (x)
6753 rtx x;
6755 enum rtx_code x_code;
6757 if (x == const0_rtx)
6758 return const1_rtx;
6759 else if (x == const1_rtx)
6760 return const0_rtx;
6761 x_code = GET_CODE (x);
6762 if (x_code == NOT)
6763 return XEXP (x, 0);
6764 if (GET_RTX_CLASS (x_code) == '<'
6765 && GET_CODE (XEXP (x, 0)) == REG)
6767 if (XEXP (x, 1) != const0_rtx)
6768 abort ();
6770 return gen_rtx_fmt_ee (reverse_condition (x_code),
6771 VOIDmode, XEXP (x, 0), const0_rtx);
6773 return gen_rtx_NOT (0, x);
6776 static rtx
6777 and_reg_cond (old, x, add)
6778 rtx old, x;
6779 int add;
6781 rtx op0, op1;
6783 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6785 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6786 && GET_CODE (x) == reverse_condition (GET_CODE (old))
6787 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6788 return const0_rtx;
6789 if (GET_CODE (x) == GET_CODE (old)
6790 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6791 return old;
6792 if (! add)
6793 return old;
6794 return gen_rtx_AND (0, old, x);
6797 switch (GET_CODE (old))
6799 case IOR:
6800 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6801 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6802 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6804 if (op0 == const0_rtx)
6805 return op1;
6806 if (op1 == const0_rtx)
6807 return op0;
6808 if (op0 == const1_rtx || op1 == const1_rtx)
6809 return const1_rtx;
6810 if (op0 == XEXP (old, 0))
6811 op0 = gen_rtx_AND (0, op0, x);
6812 else
6813 op1 = gen_rtx_AND (0, op1, x);
6814 return gen_rtx_IOR (0, op0, op1);
6816 if (! add)
6817 return old;
6818 return gen_rtx_AND (0, old, x);
6820 case AND:
6821 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6822 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6823 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6825 if (op0 == const1_rtx)
6826 return op1;
6827 if (op1 == const1_rtx)
6828 return op0;
6829 if (op0 == const0_rtx || op1 == const0_rtx)
6830 return const0_rtx;
6831 if (op0 == XEXP (old, 0))
6832 op0 = gen_rtx_AND (0, op0, x);
6833 else
6834 op1 = gen_rtx_AND (0, op1, x);
6835 return gen_rtx_AND (0, op0, op1);
6837 if (! add)
6838 return old;
6840 /* If X is identical to one of the existing terms of the AND,
6841 then just return what we already have. */
6842 /* ??? There really should be some sort of recursive check here in
6843 case there are nested ANDs. */
6844 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
6845 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
6846 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
6847 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
6848 return old;
6850 return gen_rtx_AND (0, old, x);
6852 case NOT:
6853 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6854 if (op0 != XEXP (old, 0))
6855 return not_reg_cond (op0);
6856 if (! add)
6857 return old;
6858 return gen_rtx_AND (0, old, x);
6860 default:
6861 abort ();
6865 /* Given a condition X, remove references to reg REGNO and return the
6866 new condition. The removal will be done so that all conditions
6867 involving REGNO are considered to evaluate to false. This function
6868 is used when the value of REGNO changes. */
6870 static rtx
6871 elim_reg_cond (x, regno)
6872 rtx x;
6873 unsigned int regno;
6875 rtx op0, op1;
6877 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6879 if (REGNO (XEXP (x, 0)) == regno)
6880 return const0_rtx;
6881 return x;
6884 switch (GET_CODE (x))
6886 case AND:
6887 op0 = elim_reg_cond (XEXP (x, 0), regno);
6888 op1 = elim_reg_cond (XEXP (x, 1), regno);
6889 if (op0 == const0_rtx || op1 == const0_rtx)
6890 return const0_rtx;
6891 if (op0 == const1_rtx)
6892 return op1;
6893 if (op1 == const1_rtx)
6894 return op0;
6895 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6896 return x;
6897 return gen_rtx_AND (0, op0, op1);
6899 case IOR:
6900 op0 = elim_reg_cond (XEXP (x, 0), regno);
6901 op1 = elim_reg_cond (XEXP (x, 1), regno);
6902 if (op0 == const1_rtx || op1 == const1_rtx)
6903 return const1_rtx;
6904 if (op0 == const0_rtx)
6905 return op1;
6906 if (op1 == const0_rtx)
6907 return op0;
6908 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6909 return x;
6910 return gen_rtx_IOR (0, op0, op1);
6912 case NOT:
6913 op0 = elim_reg_cond (XEXP (x, 0), regno);
6914 if (op0 == const0_rtx)
6915 return const1_rtx;
6916 if (op0 == const1_rtx)
6917 return const0_rtx;
6918 if (op0 != XEXP (x, 0))
6919 return not_reg_cond (op0);
6920 return x;
6922 default:
6923 abort ();
6926 #endif /* HAVE_conditional_execution */
6928 #ifdef AUTO_INC_DEC
6930 /* Try to substitute the auto-inc expression INC as the address inside
6931 MEM which occurs in INSN. Currently, the address of MEM is an expression
6932 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
6933 that has a single set whose source is a PLUS of INCR_REG and something
6934 else. */
6936 static void
6937 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
6938 struct propagate_block_info *pbi;
6939 rtx inc, insn, mem, incr, incr_reg;
6941 int regno = REGNO (incr_reg);
6942 rtx set = single_set (incr);
6943 rtx q = SET_DEST (set);
6944 rtx y = SET_SRC (set);
6945 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
6947 /* Make sure this reg appears only once in this insn. */
6948 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
6949 return;
6951 if (dead_or_set_p (incr, incr_reg)
6952 /* Mustn't autoinc an eliminable register. */
6953 && (regno >= FIRST_PSEUDO_REGISTER
6954 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
6956 /* This is the simple case. Try to make the auto-inc. If
6957 we can't, we are done. Otherwise, we will do any
6958 needed updates below. */
6959 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
6960 return;
6962 else if (GET_CODE (q) == REG
6963 /* PREV_INSN used here to check the semi-open interval
6964 [insn,incr). */
6965 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
6966 /* We must also check for sets of q as q may be
6967 a call clobbered hard register and there may
6968 be a call between PREV_INSN (insn) and incr. */
6969 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
6971 /* We have *p followed sometime later by q = p+size.
6972 Both p and q must be live afterward,
6973 and q is not used between INSN and its assignment.
6974 Change it to q = p, ...*q..., q = q+size.
6975 Then fall into the usual case. */
6976 rtx insns, temp;
6978 start_sequence ();
6979 emit_move_insn (q, incr_reg);
6980 insns = get_insns ();
6981 end_sequence ();
6983 if (basic_block_for_insn)
6984 for (temp = insns; temp; temp = NEXT_INSN (temp))
6985 set_block_for_insn (temp, pbi->bb);
6987 /* If we can't make the auto-inc, or can't make the
6988 replacement into Y, exit. There's no point in making
6989 the change below if we can't do the auto-inc and doing
6990 so is not correct in the pre-inc case. */
6992 XEXP (inc, 0) = q;
6993 validate_change (insn, &XEXP (mem, 0), inc, 1);
6994 validate_change (incr, &XEXP (y, opnum), q, 1);
6995 if (! apply_change_group ())
6996 return;
6998 /* We now know we'll be doing this change, so emit the
6999 new insn(s) and do the updates. */
7000 emit_insns_before (insns, insn);
7002 if (pbi->bb->head == insn)
7003 pbi->bb->head = insns;
7005 /* INCR will become a NOTE and INSN won't contain a
7006 use of INCR_REG. If a use of INCR_REG was just placed in
7007 the insn before INSN, make that the next use.
7008 Otherwise, invalidate it. */
7009 if (GET_CODE (PREV_INSN (insn)) == INSN
7010 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
7011 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
7012 pbi->reg_next_use[regno] = PREV_INSN (insn);
7013 else
7014 pbi->reg_next_use[regno] = 0;
7016 incr_reg = q;
7017 regno = REGNO (q);
7019 /* REGNO is now used in INCR which is below INSN, but
7020 it previously wasn't live here. If we don't mark
7021 it as live, we'll put a REG_DEAD note for it
7022 on this insn, which is incorrect. */
7023 SET_REGNO_REG_SET (pbi->reg_live, regno);
7025 /* If there are any calls between INSN and INCR, show
7026 that REGNO now crosses them. */
7027 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
7028 if (GET_CODE (temp) == CALL_INSN)
7029 REG_N_CALLS_CROSSED (regno)++;
7031 else
7032 return;
7034 /* If we haven't returned, it means we were able to make the
7035 auto-inc, so update the status. First, record that this insn
7036 has an implicit side effect. */
7038 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
7040 /* Modify the old increment-insn to simply copy
7041 the already-incremented value of our register. */
7042 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
7043 abort ();
7045 /* If that makes it a no-op (copying the register into itself) delete
7046 it so it won't appear to be a "use" and a "set" of this
7047 register. */
7048 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
7050 /* If the original source was dead, it's dead now. */
7051 rtx note;
7053 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
7055 remove_note (incr, note);
7056 if (XEXP (note, 0) != incr_reg)
7057 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
7060 PUT_CODE (incr, NOTE);
7061 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
7062 NOTE_SOURCE_FILE (incr) = 0;
7065 if (regno >= FIRST_PSEUDO_REGISTER)
7067 /* Count an extra reference to the reg. When a reg is
7068 incremented, spilling it is worse, so we want to make
7069 that less likely. */
7070 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7072 /* Count the increment as a setting of the register,
7073 even though it isn't a SET in rtl. */
7074 REG_N_SETS (regno)++;
7078 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
7079 reference. */
7081 static void
7082 find_auto_inc (pbi, x, insn)
7083 struct propagate_block_info *pbi;
7084 rtx x;
7085 rtx insn;
7087 rtx addr = XEXP (x, 0);
7088 HOST_WIDE_INT offset = 0;
7089 rtx set, y, incr, inc_val;
7090 int regno;
7091 int size = GET_MODE_SIZE (GET_MODE (x));
7093 if (GET_CODE (insn) == JUMP_INSN)
7094 return;
7096 /* Here we detect use of an index register which might be good for
7097 postincrement, postdecrement, preincrement, or predecrement. */
7099 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
7100 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
7102 if (GET_CODE (addr) != REG)
7103 return;
7105 regno = REGNO (addr);
7107 /* Is the next use an increment that might make auto-increment? */
7108 incr = pbi->reg_next_use[regno];
7109 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
7110 return;
7111 set = single_set (incr);
7112 if (set == 0 || GET_CODE (set) != SET)
7113 return;
7114 y = SET_SRC (set);
7116 if (GET_CODE (y) != PLUS)
7117 return;
7119 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
7120 inc_val = XEXP (y, 1);
7121 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
7122 inc_val = XEXP (y, 0);
7123 else
7124 return;
7126 if (GET_CODE (inc_val) == CONST_INT)
7128 if (HAVE_POST_INCREMENT
7129 && (INTVAL (inc_val) == size && offset == 0))
7130 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
7131 incr, addr);
7132 else if (HAVE_POST_DECREMENT
7133 && (INTVAL (inc_val) == -size && offset == 0))
7134 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
7135 incr, addr);
7136 else if (HAVE_PRE_INCREMENT
7137 && (INTVAL (inc_val) == size && offset == size))
7138 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
7139 incr, addr);
7140 else if (HAVE_PRE_DECREMENT
7141 && (INTVAL (inc_val) == -size && offset == -size))
7142 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
7143 incr, addr);
7144 else if (HAVE_POST_MODIFY_DISP && offset == 0)
7145 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7146 gen_rtx_PLUS (Pmode,
7147 addr,
7148 inc_val)),
7149 insn, x, incr, addr);
7151 else if (GET_CODE (inc_val) == REG
7152 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
7153 NEXT_INSN (incr)))
7156 if (HAVE_POST_MODIFY_REG && offset == 0)
7157 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7158 gen_rtx_PLUS (Pmode,
7159 addr,
7160 inc_val)),
7161 insn, x, incr, addr);
7165 #endif /* AUTO_INC_DEC */
7167 static void
7168 mark_used_reg (pbi, reg, cond, insn)
7169 struct propagate_block_info *pbi;
7170 rtx reg;
7171 rtx cond ATTRIBUTE_UNUSED;
7172 rtx insn;
7174 unsigned int regno_first, regno_last, i;
7175 int some_was_live, some_was_dead, some_not_set;
7177 regno_last = regno_first = REGNO (reg);
7178 if (regno_first < FIRST_PSEUDO_REGISTER)
7179 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
7181 /* Find out if any of this register is live after this instruction. */
7182 some_was_live = some_was_dead = 0;
7183 for (i = regno_first; i <= regno_last; ++i)
7185 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
7186 some_was_live |= needed_regno;
7187 some_was_dead |= ! needed_regno;
7190 /* Find out if any of the register was set this insn. */
7191 some_not_set = 0;
7192 for (i = regno_first; i <= regno_last; ++i)
7193 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
7195 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
7197 /* Record where each reg is used, so when the reg is set we know
7198 the next insn that uses it. */
7199 pbi->reg_next_use[regno_first] = insn;
7202 if (pbi->flags & PROP_REG_INFO)
7204 if (regno_first < FIRST_PSEUDO_REGISTER)
7206 /* If this is a register we are going to try to eliminate,
7207 don't mark it live here. If we are successful in
7208 eliminating it, it need not be live unless it is used for
7209 pseudos, in which case it will have been set live when it
7210 was allocated to the pseudos. If the register will not
7211 be eliminated, reload will set it live at that point.
7213 Otherwise, record that this function uses this register. */
7214 /* ??? The PPC backend tries to "eliminate" on the pic
7215 register to itself. This should be fixed. In the mean
7216 time, hack around it. */
7218 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
7219 && (regno_first == FRAME_POINTER_REGNUM
7220 || regno_first == ARG_POINTER_REGNUM)))
7221 for (i = regno_first; i <= regno_last; ++i)
7222 regs_ever_live[i] = 1;
7224 else
7226 /* Keep track of which basic block each reg appears in. */
7228 register int blocknum = pbi->bb->index;
7229 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
7230 REG_BASIC_BLOCK (regno_first) = blocknum;
7231 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
7232 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
7234 /* Count (weighted) number of uses of each reg. */
7235 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
7236 REG_N_REFS (regno_first)++;
7240 /* Record and count the insns in which a reg dies. If it is used in
7241 this insn and was dead below the insn then it dies in this insn.
7242 If it was set in this insn, we do not make a REG_DEAD note;
7243 likewise if we already made such a note. */
7244 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
7245 && some_was_dead
7246 && some_not_set)
7248 /* Check for the case where the register dying partially
7249 overlaps the register set by this insn. */
7250 if (regno_first != regno_last)
7251 for (i = regno_first; i <= regno_last; ++i)
7252 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
7254 /* If none of the words in X is needed, make a REG_DEAD note.
7255 Otherwise, we must make partial REG_DEAD notes. */
7256 if (! some_was_live)
7258 if ((pbi->flags & PROP_DEATH_NOTES)
7259 && ! find_regno_note (insn, REG_DEAD, regno_first))
7260 REG_NOTES (insn)
7261 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
7263 if (pbi->flags & PROP_REG_INFO)
7264 REG_N_DEATHS (regno_first)++;
7266 else
7268 /* Don't make a REG_DEAD note for a part of a register
7269 that is set in the insn. */
7270 for (i = regno_first; i <= regno_last; ++i)
7271 if (! REGNO_REG_SET_P (pbi->reg_live, i)
7272 && ! dead_or_set_regno_p (insn, i))
7273 REG_NOTES (insn)
7274 = alloc_EXPR_LIST (REG_DEAD,
7275 gen_rtx_REG (reg_raw_mode[i], i),
7276 REG_NOTES (insn));
7280 /* Mark the register as being live. */
7281 for (i = regno_first; i <= regno_last; ++i)
7283 SET_REGNO_REG_SET (pbi->reg_live, i);
7285 #ifdef HAVE_conditional_execution
7286 /* If this is a conditional use, record that fact. If it is later
7287 conditionally set, we'll know to kill the register. */
7288 if (cond != NULL_RTX)
7290 splay_tree_node node;
7291 struct reg_cond_life_info *rcli;
7292 rtx ncond;
7294 if (some_was_live)
7296 node = splay_tree_lookup (pbi->reg_cond_dead, i);
7297 if (node == NULL)
7299 /* The register was unconditionally live previously.
7300 No need to do anything. */
7302 else
7304 /* The register was conditionally live previously.
7305 Subtract the new life cond from the old death cond. */
7306 rcli = (struct reg_cond_life_info *) node->value;
7307 ncond = rcli->condition;
7308 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
7310 /* If the register is now unconditionally live,
7311 remove the entry in the splay_tree. */
7312 if (ncond == const0_rtx)
7313 splay_tree_remove (pbi->reg_cond_dead, i);
7314 else
7316 rcli->condition = ncond;
7317 SET_REGNO_REG_SET (pbi->reg_cond_reg,
7318 REGNO (XEXP (cond, 0)));
7322 else
7324 /* The register was not previously live at all. Record
7325 the condition under which it is still dead. */
7326 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
7327 rcli->condition = not_reg_cond (cond);
7328 rcli->stores = const0_rtx;
7329 rcli->orig_condition = const0_rtx;
7330 splay_tree_insert (pbi->reg_cond_dead, i,
7331 (splay_tree_value) rcli);
7333 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
7336 else if (some_was_live)
7338 /* The register may have been conditionally live previously, but
7339 is now unconditionally live. Remove it from the conditionally
7340 dead list, so that a conditional set won't cause us to think
7341 it dead. */
7342 splay_tree_remove (pbi->reg_cond_dead, i);
7344 #endif
7348 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
7349 This is done assuming the registers needed from X are those that
7350 have 1-bits in PBI->REG_LIVE.
7352 INSN is the containing instruction. If INSN is dead, this function
7353 is not called. */
7355 static void
7356 mark_used_regs (pbi, x, cond, insn)
7357 struct propagate_block_info *pbi;
7358 rtx x, cond, insn;
7360 register RTX_CODE code;
7361 register int regno;
7362 int flags = pbi->flags;
7364 retry:
7365 code = GET_CODE (x);
7366 switch (code)
7368 case LABEL_REF:
7369 case SYMBOL_REF:
7370 case CONST_INT:
7371 case CONST:
7372 case CONST_DOUBLE:
7373 case PC:
7374 case ADDR_VEC:
7375 case ADDR_DIFF_VEC:
7376 return;
7378 #ifdef HAVE_cc0
7379 case CC0:
7380 pbi->cc0_live = 1;
7381 return;
7382 #endif
7384 case CLOBBER:
7385 /* If we are clobbering a MEM, mark any registers inside the address
7386 as being used. */
7387 if (GET_CODE (XEXP (x, 0)) == MEM)
7388 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
7389 return;
7391 case MEM:
7392 /* Don't bother watching stores to mems if this is not the
7393 final pass. We'll not be deleting dead stores this round. */
7394 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
7396 /* Invalidate the data for the last MEM stored, but only if MEM is
7397 something that can be stored into. */
7398 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
7399 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
7400 /* Needn't clear the memory set list. */
7402 else
7404 rtx temp = pbi->mem_set_list;
7405 rtx prev = NULL_RTX;
7406 rtx next;
7408 while (temp)
7410 next = XEXP (temp, 1);
7411 if (anti_dependence (XEXP (temp, 0), x))
7413 /* Splice temp out of the list. */
7414 if (prev)
7415 XEXP (prev, 1) = next;
7416 else
7417 pbi->mem_set_list = next;
7418 free_EXPR_LIST_node (temp);
7419 pbi->mem_set_list_len--;
7421 else
7422 prev = temp;
7423 temp = next;
7427 /* If the memory reference had embedded side effects (autoincrement
7428 address modes. Then we may need to kill some entries on the
7429 memory set list. */
7430 if (insn)
7431 invalidate_mems_from_autoinc (pbi, insn);
7434 #ifdef AUTO_INC_DEC
7435 if (flags & PROP_AUTOINC)
7436 find_auto_inc (pbi, x, insn);
7437 #endif
7438 break;
7440 case SUBREG:
7441 #ifdef CLASS_CANNOT_CHANGE_MODE
7442 if (GET_CODE (SUBREG_REG (x)) == REG
7443 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
7444 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
7445 GET_MODE (SUBREG_REG (x))))
7446 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
7447 #endif
7449 /* While we're here, optimize this case. */
7450 x = SUBREG_REG (x);
7451 if (GET_CODE (x) != REG)
7452 goto retry;
7453 /* Fall through. */
7455 case REG:
7456 /* See a register other than being set => mark it as needed. */
7457 mark_used_reg (pbi, x, cond, insn);
7458 return;
7460 case SET:
7462 register rtx testreg = SET_DEST (x);
7463 int mark_dest = 0;
7465 /* If storing into MEM, don't show it as being used. But do
7466 show the address as being used. */
7467 if (GET_CODE (testreg) == MEM)
7469 #ifdef AUTO_INC_DEC
7470 if (flags & PROP_AUTOINC)
7471 find_auto_inc (pbi, testreg, insn);
7472 #endif
7473 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
7474 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7475 return;
7478 /* Storing in STRICT_LOW_PART is like storing in a reg
7479 in that this SET might be dead, so ignore it in TESTREG.
7480 but in some other ways it is like using the reg.
7482 Storing in a SUBREG or a bit field is like storing the entire
7483 register in that if the register's value is not used
7484 then this SET is not needed. */
7485 while (GET_CODE (testreg) == STRICT_LOW_PART
7486 || GET_CODE (testreg) == ZERO_EXTRACT
7487 || GET_CODE (testreg) == SIGN_EXTRACT
7488 || GET_CODE (testreg) == SUBREG)
7490 #ifdef CLASS_CANNOT_CHANGE_MODE
7491 if (GET_CODE (testreg) == SUBREG
7492 && GET_CODE (SUBREG_REG (testreg)) == REG
7493 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
7494 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
7495 GET_MODE (testreg)))
7496 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
7497 #endif
7499 /* Modifying a single register in an alternate mode
7500 does not use any of the old value. But these other
7501 ways of storing in a register do use the old value. */
7502 if (GET_CODE (testreg) == SUBREG
7503 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
7505 else
7506 mark_dest = 1;
7508 testreg = XEXP (testreg, 0);
7511 /* If this is a store into a register or group of registers,
7512 recursively scan the value being stored. */
7514 if ((GET_CODE (testreg) == PARALLEL
7515 && GET_MODE (testreg) == BLKmode)
7516 || (GET_CODE (testreg) == REG
7517 && (regno = REGNO (testreg),
7518 ! (regno == FRAME_POINTER_REGNUM
7519 && (! reload_completed || frame_pointer_needed)))
7520 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7521 && ! (regno == HARD_FRAME_POINTER_REGNUM
7522 && (! reload_completed || frame_pointer_needed))
7523 #endif
7524 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
7525 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
7526 #endif
7529 if (mark_dest)
7530 mark_used_regs (pbi, SET_DEST (x), cond, insn);
7531 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7532 return;
7535 break;
7537 case ASM_OPERANDS:
7538 case UNSPEC_VOLATILE:
7539 case TRAP_IF:
7540 case ASM_INPUT:
7542 /* Traditional and volatile asm instructions must be considered to use
7543 and clobber all hard registers, all pseudo-registers and all of
7544 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
7546 Consider for instance a volatile asm that changes the fpu rounding
7547 mode. An insn should not be moved across this even if it only uses
7548 pseudo-regs because it might give an incorrectly rounded result.
7550 ?!? Unfortunately, marking all hard registers as live causes massive
7551 problems for the register allocator and marking all pseudos as live
7552 creates mountains of uninitialized variable warnings.
7554 So for now, just clear the memory set list and mark any regs
7555 we can find in ASM_OPERANDS as used. */
7556 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
7558 free_EXPR_LIST_list (&pbi->mem_set_list);
7559 pbi->mem_set_list_len = 0;
7562 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7563 We can not just fall through here since then we would be confused
7564 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7565 traditional asms unlike their normal usage. */
7566 if (code == ASM_OPERANDS)
7568 int j;
7570 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
7571 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
7573 break;
7576 case COND_EXEC:
7577 if (cond != NULL_RTX)
7578 abort ();
7580 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
7582 cond = COND_EXEC_TEST (x);
7583 x = COND_EXEC_CODE (x);
7584 goto retry;
7586 case PHI:
7587 /* We _do_not_ want to scan operands of phi nodes. Operands of
7588 a phi function are evaluated only when control reaches this
7589 block along a particular edge. Therefore, regs that appear
7590 as arguments to phi should not be added to the global live at
7591 start. */
7592 return;
7594 default:
7595 break;
7598 /* Recursively scan the operands of this expression. */
7601 register const char * const fmt = GET_RTX_FORMAT (code);
7602 register int i;
7604 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7606 if (fmt[i] == 'e')
7608 /* Tail recursive case: save a function call level. */
7609 if (i == 0)
7611 x = XEXP (x, 0);
7612 goto retry;
7614 mark_used_regs (pbi, XEXP (x, i), cond, insn);
7616 else if (fmt[i] == 'E')
7618 register int j;
7619 for (j = 0; j < XVECLEN (x, i); j++)
7620 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
7626 #ifdef AUTO_INC_DEC
7628 static int
7629 try_pre_increment_1 (pbi, insn)
7630 struct propagate_block_info *pbi;
7631 rtx insn;
7633 /* Find the next use of this reg. If in same basic block,
7634 make it do pre-increment or pre-decrement if appropriate. */
7635 rtx x = single_set (insn);
7636 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
7637 * INTVAL (XEXP (SET_SRC (x), 1)));
7638 int regno = REGNO (SET_DEST (x));
7639 rtx y = pbi->reg_next_use[regno];
7640 if (y != 0
7641 && SET_DEST (x) != stack_pointer_rtx
7642 && BLOCK_NUM (y) == BLOCK_NUM (insn)
7643 /* Don't do this if the reg dies, or gets set in y; a standard addressing
7644 mode would be better. */
7645 && ! dead_or_set_p (y, SET_DEST (x))
7646 && try_pre_increment (y, SET_DEST (x), amount))
7648 /* We have found a suitable auto-increment and already changed
7649 insn Y to do it. So flush this increment instruction. */
7650 propagate_block_delete_insn (pbi->bb, insn);
7652 /* Count a reference to this reg for the increment insn we are
7653 deleting. When a reg is incremented, spilling it is worse,
7654 so we want to make that less likely. */
7655 if (regno >= FIRST_PSEUDO_REGISTER)
7657 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7658 REG_N_SETS (regno)++;
7661 /* Flush any remembered memories depending on the value of
7662 the incremented register. */
7663 invalidate_mems_from_set (pbi, SET_DEST (x));
7665 return 1;
7667 return 0;
7670 /* Try to change INSN so that it does pre-increment or pre-decrement
7671 addressing on register REG in order to add AMOUNT to REG.
7672 AMOUNT is negative for pre-decrement.
7673 Returns 1 if the change could be made.
7674 This checks all about the validity of the result of modifying INSN. */
7676 static int
7677 try_pre_increment (insn, reg, amount)
7678 rtx insn, reg;
7679 HOST_WIDE_INT amount;
7681 register rtx use;
7683 /* Nonzero if we can try to make a pre-increment or pre-decrement.
7684 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
7685 int pre_ok = 0;
7686 /* Nonzero if we can try to make a post-increment or post-decrement.
7687 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
7688 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
7689 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
7690 int post_ok = 0;
7692 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
7693 int do_post = 0;
7695 /* From the sign of increment, see which possibilities are conceivable
7696 on this target machine. */
7697 if (HAVE_PRE_INCREMENT && amount > 0)
7698 pre_ok = 1;
7699 if (HAVE_POST_INCREMENT && amount > 0)
7700 post_ok = 1;
7702 if (HAVE_PRE_DECREMENT && amount < 0)
7703 pre_ok = 1;
7704 if (HAVE_POST_DECREMENT && amount < 0)
7705 post_ok = 1;
7707 if (! (pre_ok || post_ok))
7708 return 0;
7710 /* It is not safe to add a side effect to a jump insn
7711 because if the incremented register is spilled and must be reloaded
7712 there would be no way to store the incremented value back in memory. */
7714 if (GET_CODE (insn) == JUMP_INSN)
7715 return 0;
7717 use = 0;
7718 if (pre_ok)
7719 use = find_use_as_address (PATTERN (insn), reg, 0);
7720 if (post_ok && (use == 0 || use == (rtx) 1))
7722 use = find_use_as_address (PATTERN (insn), reg, -amount);
7723 do_post = 1;
7726 if (use == 0 || use == (rtx) 1)
7727 return 0;
7729 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
7730 return 0;
7732 /* See if this combination of instruction and addressing mode exists. */
7733 if (! validate_change (insn, &XEXP (use, 0),
7734 gen_rtx_fmt_e (amount > 0
7735 ? (do_post ? POST_INC : PRE_INC)
7736 : (do_post ? POST_DEC : PRE_DEC),
7737 Pmode, reg), 0))
7738 return 0;
7740 /* Record that this insn now has an implicit side effect on X. */
7741 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
7742 return 1;
7745 #endif /* AUTO_INC_DEC */
7747 /* Find the place in the rtx X where REG is used as a memory address.
7748 Return the MEM rtx that so uses it.
7749 If PLUSCONST is nonzero, search instead for a memory address equivalent to
7750 (plus REG (const_int PLUSCONST)).
7752 If such an address does not appear, return 0.
7753 If REG appears more than once, or is used other than in such an address,
7754 return (rtx)1. */
7757 find_use_as_address (x, reg, plusconst)
7758 register rtx x;
7759 rtx reg;
7760 HOST_WIDE_INT plusconst;
7762 enum rtx_code code = GET_CODE (x);
7763 const char * const fmt = GET_RTX_FORMAT (code);
7764 register int i;
7765 register rtx value = 0;
7766 register rtx tem;
7768 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
7769 return x;
7771 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
7772 && XEXP (XEXP (x, 0), 0) == reg
7773 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7774 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
7775 return x;
7777 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
7779 /* If REG occurs inside a MEM used in a bit-field reference,
7780 that is unacceptable. */
7781 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
7782 return (rtx) (HOST_WIDE_INT) 1;
7785 if (x == reg)
7786 return (rtx) (HOST_WIDE_INT) 1;
7788 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7790 if (fmt[i] == 'e')
7792 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
7793 if (value == 0)
7794 value = tem;
7795 else if (tem != 0)
7796 return (rtx) (HOST_WIDE_INT) 1;
7798 else if (fmt[i] == 'E')
7800 register int j;
7801 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7803 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
7804 if (value == 0)
7805 value = tem;
7806 else if (tem != 0)
7807 return (rtx) (HOST_WIDE_INT) 1;
7812 return value;
7815 /* Write information about registers and basic blocks into FILE.
7816 This is part of making a debugging dump. */
7818 void
7819 dump_regset (r, outf)
7820 regset r;
7821 FILE *outf;
7823 int i;
7824 if (r == NULL)
7826 fputs (" (nil)", outf);
7827 return;
7830 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
7832 fprintf (outf, " %d", i);
7833 if (i < FIRST_PSEUDO_REGISTER)
7834 fprintf (outf, " [%s]",
7835 reg_names[i]);
7839 /* Print a human-reaable representation of R on the standard error
7840 stream. This function is designed to be used from within the
7841 debugger. */
7843 void
7844 debug_regset (r)
7845 regset r;
7847 dump_regset (r, stderr);
7848 putc ('\n', stderr);
7851 void
7852 dump_flow_info (file)
7853 FILE *file;
7855 register int i;
7856 static const char * const reg_class_names[] = REG_CLASS_NAMES;
7858 fprintf (file, "%d registers.\n", max_regno);
7859 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
7860 if (REG_N_REFS (i))
7862 enum reg_class class, altclass;
7863 fprintf (file, "\nRegister %d used %d times across %d insns",
7864 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
7865 if (REG_BASIC_BLOCK (i) >= 0)
7866 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
7867 if (REG_N_SETS (i))
7868 fprintf (file, "; set %d time%s", REG_N_SETS (i),
7869 (REG_N_SETS (i) == 1) ? "" : "s");
7870 if (REG_USERVAR_P (regno_reg_rtx[i]))
7871 fprintf (file, "; user var");
7872 if (REG_N_DEATHS (i) != 1)
7873 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
7874 if (REG_N_CALLS_CROSSED (i) == 1)
7875 fprintf (file, "; crosses 1 call");
7876 else if (REG_N_CALLS_CROSSED (i))
7877 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
7878 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
7879 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
7880 class = reg_preferred_class (i);
7881 altclass = reg_alternate_class (i);
7882 if (class != GENERAL_REGS || altclass != ALL_REGS)
7884 if (altclass == ALL_REGS || class == ALL_REGS)
7885 fprintf (file, "; pref %s", reg_class_names[(int) class]);
7886 else if (altclass == NO_REGS)
7887 fprintf (file, "; %s or none", reg_class_names[(int) class]);
7888 else
7889 fprintf (file, "; pref %s, else %s",
7890 reg_class_names[(int) class],
7891 reg_class_names[(int) altclass]);
7893 if (REG_POINTER (regno_reg_rtx[i]))
7894 fprintf (file, "; pointer");
7895 fprintf (file, ".\n");
7898 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
7899 for (i = 0; i < n_basic_blocks; i++)
7901 register basic_block bb = BASIC_BLOCK (i);
7902 register edge e;
7904 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
7905 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
7906 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7907 fprintf (file, ", freq %i.\n", bb->frequency);
7909 fprintf (file, "Predecessors: ");
7910 for (e = bb->pred; e; e = e->pred_next)
7911 dump_edge_info (file, e, 0);
7913 fprintf (file, "\nSuccessors: ");
7914 for (e = bb->succ; e; e = e->succ_next)
7915 dump_edge_info (file, e, 1);
7917 fprintf (file, "\nRegisters live at start:");
7918 dump_regset (bb->global_live_at_start, file);
7920 fprintf (file, "\nRegisters live at end:");
7921 dump_regset (bb->global_live_at_end, file);
7923 putc ('\n', file);
7926 putc ('\n', file);
7929 void
7930 debug_flow_info ()
7932 dump_flow_info (stderr);
7935 void
7936 dump_edge_info (file, e, do_succ)
7937 FILE *file;
7938 edge e;
7939 int do_succ;
7941 basic_block side = (do_succ ? e->dest : e->src);
7943 if (side == ENTRY_BLOCK_PTR)
7944 fputs (" ENTRY", file);
7945 else if (side == EXIT_BLOCK_PTR)
7946 fputs (" EXIT", file);
7947 else
7948 fprintf (file, " %d", side->index);
7950 if (e->probability)
7951 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
7953 if (e->count)
7955 fprintf (file, " count:");
7956 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
7959 if (e->flags)
7961 static const char * const bitnames[] = {
7962 "fallthru", "crit", "ab", "abcall", "eh", "fake", "dfs_back"
7964 int comma = 0;
7965 int i, flags = e->flags;
7967 fputc (' ', file);
7968 fputc ('(', file);
7969 for (i = 0; flags; i++)
7970 if (flags & (1 << i))
7972 flags &= ~(1 << i);
7974 if (comma)
7975 fputc (',', file);
7976 if (i < (int) ARRAY_SIZE (bitnames))
7977 fputs (bitnames[i], file);
7978 else
7979 fprintf (file, "%d", i);
7980 comma = 1;
7982 fputc (')', file);
7986 /* Print out one basic block with live information at start and end. */
7988 void
7989 dump_bb (bb, outf)
7990 basic_block bb;
7991 FILE *outf;
7993 rtx insn;
7994 rtx last;
7995 edge e;
7997 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
7998 bb->index, bb->loop_depth);
7999 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
8000 putc ('\n', outf);
8002 fputs (";; Predecessors: ", outf);
8003 for (e = bb->pred; e; e = e->pred_next)
8004 dump_edge_info (outf, e, 0);
8005 putc ('\n', outf);
8007 fputs (";; Registers live at start:", outf);
8008 dump_regset (bb->global_live_at_start, outf);
8009 putc ('\n', outf);
8011 for (insn = bb->head, last = NEXT_INSN (bb->end);
8012 insn != last;
8013 insn = NEXT_INSN (insn))
8014 print_rtl_single (outf, insn);
8016 fputs (";; Registers live at end:", outf);
8017 dump_regset (bb->global_live_at_end, outf);
8018 putc ('\n', outf);
8020 fputs (";; Successors: ", outf);
8021 for (e = bb->succ; e; e = e->succ_next)
8022 dump_edge_info (outf, e, 1);
8023 putc ('\n', outf);
8026 void
8027 debug_bb (bb)
8028 basic_block bb;
8030 dump_bb (bb, stderr);
8033 void
8034 debug_bb_n (n)
8035 int n;
8037 dump_bb (BASIC_BLOCK (n), stderr);
8040 /* Like print_rtl, but also print out live information for the start of each
8041 basic block. */
8043 void
8044 print_rtl_with_bb (outf, rtx_first)
8045 FILE *outf;
8046 rtx rtx_first;
8048 register rtx tmp_rtx;
8050 if (rtx_first == 0)
8051 fprintf (outf, "(nil)\n");
8052 else
8054 int i;
8055 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
8056 int max_uid = get_max_uid ();
8057 basic_block *start = (basic_block *)
8058 xcalloc (max_uid, sizeof (basic_block));
8059 basic_block *end = (basic_block *)
8060 xcalloc (max_uid, sizeof (basic_block));
8061 enum bb_state *in_bb_p = (enum bb_state *)
8062 xcalloc (max_uid, sizeof (enum bb_state));
8064 for (i = n_basic_blocks - 1; i >= 0; i--)
8066 basic_block bb = BASIC_BLOCK (i);
8067 rtx x;
8069 start[INSN_UID (bb->head)] = bb;
8070 end[INSN_UID (bb->end)] = bb;
8071 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
8073 enum bb_state state = IN_MULTIPLE_BB;
8074 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
8075 state = IN_ONE_BB;
8076 in_bb_p[INSN_UID (x)] = state;
8078 if (x == bb->end)
8079 break;
8083 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
8085 int did_output;
8086 basic_block bb;
8088 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
8090 fprintf (outf, ";; Start of basic block %d, registers live:",
8091 bb->index);
8092 dump_regset (bb->global_live_at_start, outf);
8093 putc ('\n', outf);
8096 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
8097 && GET_CODE (tmp_rtx) != NOTE
8098 && GET_CODE (tmp_rtx) != BARRIER)
8099 fprintf (outf, ";; Insn is not within a basic block\n");
8100 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
8101 fprintf (outf, ";; Insn is in multiple basic blocks\n");
8103 did_output = print_rtl_single (outf, tmp_rtx);
8105 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
8107 fprintf (outf, ";; End of basic block %d, registers live:\n",
8108 bb->index);
8109 dump_regset (bb->global_live_at_end, outf);
8110 putc ('\n', outf);
8113 if (did_output)
8114 putc ('\n', outf);
8117 free (start);
8118 free (end);
8119 free (in_bb_p);
8122 if (current_function_epilogue_delay_list != 0)
8124 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
8125 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
8126 tmp_rtx = XEXP (tmp_rtx, 1))
8127 print_rtl_single (outf, XEXP (tmp_rtx, 0));
8131 /* Dump the rtl into the current debugging dump file, then abort. */
8133 static void
8134 print_rtl_and_abort_fcn (file, line, function)
8135 const char *file;
8136 int line;
8137 const char *function;
8139 if (rtl_dump_file)
8141 print_rtl_with_bb (rtl_dump_file, get_insns ());
8142 fclose (rtl_dump_file);
8145 fancy_abort (file, line, function);
8148 /* Recompute register set/reference counts immediately prior to register
8149 allocation.
8151 This avoids problems with set/reference counts changing to/from values
8152 which have special meanings to the register allocators.
8154 Additionally, the reference counts are the primary component used by the
8155 register allocators to prioritize pseudos for allocation to hard regs.
8156 More accurate reference counts generally lead to better register allocation.
8158 F is the first insn to be scanned.
8160 LOOP_STEP denotes how much loop_depth should be incremented per
8161 loop nesting level in order to increase the ref count more for
8162 references in a loop.
8164 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
8165 possibly other information which is used by the register allocators. */
8167 void
8168 recompute_reg_usage (f, loop_step)
8169 rtx f ATTRIBUTE_UNUSED;
8170 int loop_step ATTRIBUTE_UNUSED;
8172 allocate_reg_life_data ();
8173 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
8176 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
8177 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
8178 of the number of registers that died. */
8181 count_or_remove_death_notes (blocks, kill)
8182 sbitmap blocks;
8183 int kill;
8185 int i, count = 0;
8187 for (i = n_basic_blocks - 1; i >= 0; --i)
8189 basic_block bb;
8190 rtx insn;
8192 if (blocks && ! TEST_BIT (blocks, i))
8193 continue;
8195 bb = BASIC_BLOCK (i);
8197 for (insn = bb->head;; insn = NEXT_INSN (insn))
8199 if (INSN_P (insn))
8201 rtx *pprev = &REG_NOTES (insn);
8202 rtx link = *pprev;
8204 while (link)
8206 switch (REG_NOTE_KIND (link))
8208 case REG_DEAD:
8209 if (GET_CODE (XEXP (link, 0)) == REG)
8211 rtx reg = XEXP (link, 0);
8212 int n;
8214 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
8215 n = 1;
8216 else
8217 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
8218 count += n;
8220 /* Fall through. */
8222 case REG_UNUSED:
8223 if (kill)
8225 rtx next = XEXP (link, 1);
8226 free_EXPR_LIST_node (link);
8227 *pprev = link = next;
8228 break;
8230 /* Fall through. */
8232 default:
8233 pprev = &XEXP (link, 1);
8234 link = *pprev;
8235 break;
8240 if (insn == bb->end)
8241 break;
8245 return count;
8249 /* Update insns block within BB. */
8251 void
8252 update_bb_for_insn (bb)
8253 basic_block bb;
8255 rtx insn;
8257 if (! basic_block_for_insn)
8258 return;
8260 for (insn = bb->head; ; insn = NEXT_INSN (insn))
8262 set_block_for_insn (insn, bb);
8264 if (insn == bb->end)
8265 break;
8270 /* Record INSN's block as BB. */
8272 void
8273 set_block_for_insn (insn, bb)
8274 rtx insn;
8275 basic_block bb;
8277 size_t uid = INSN_UID (insn);
8278 if (uid >= basic_block_for_insn->num_elements)
8280 int new_size;
8282 /* Add one-eighth the size so we don't keep calling xrealloc. */
8283 new_size = uid + (uid + 7) / 8;
8285 VARRAY_GROW (basic_block_for_insn, new_size);
8287 VARRAY_BB (basic_block_for_insn, uid) = bb;
8290 /* When a new insn has been inserted into an existing block, it will
8291 sometimes emit more than a single insn. This routine will set the
8292 block number for the specified insn, and look backwards in the insn
8293 chain to see if there are any other uninitialized insns immediately
8294 previous to this one, and set the block number for them too. */
8296 void
8297 set_block_for_new_insns (insn, bb)
8298 rtx insn;
8299 basic_block bb;
8301 set_block_for_insn (insn, bb);
8303 /* Scan the previous instructions setting the block number until we find
8304 an instruction that has the block number set, or we find a note
8305 of any kind. */
8306 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
8308 if (GET_CODE (insn) == NOTE)
8309 break;
8310 if ((unsigned) INSN_UID (insn) >= basic_block_for_insn->num_elements
8311 || BLOCK_FOR_INSN (insn) == 0)
8312 set_block_for_insn (insn, bb);
8313 else
8314 break;
8318 /* Verify the CFG consistency. This function check some CFG invariants and
8319 aborts when something is wrong. Hope that this function will help to
8320 convert many optimization passes to preserve CFG consistent.
8322 Currently it does following checks:
8324 - test head/end pointers
8325 - overlapping of basic blocks
8326 - edge list correctness
8327 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
8328 - tails of basic blocks (ensure that boundary is necesary)
8329 - scans body of the basic block for JUMP_INSN, CODE_LABEL
8330 and NOTE_INSN_BASIC_BLOCK
8331 - check that all insns are in the basic blocks
8332 (except the switch handling code, barriers and notes)
8333 - check that all returns are followed by barriers
8335 In future it can be extended check a lot of other stuff as well
8336 (reachability of basic blocks, life information, etc. etc.). */
8338 void
8339 verify_flow_info ()
8341 const int max_uid = get_max_uid ();
8342 const rtx rtx_first = get_insns ();
8343 rtx last_head = get_last_insn ();
8344 basic_block *bb_info, *last_visited;
8345 size_t *edge_checksum;
8346 rtx x;
8347 int i, last_bb_num_seen, num_bb_notes, err = 0;
8349 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
8350 last_visited = (basic_block *) xcalloc (n_basic_blocks + 2,
8351 sizeof (basic_block));
8352 edge_checksum = (size_t *) xcalloc (n_basic_blocks + 2, sizeof (size_t));
8354 for (i = n_basic_blocks - 1; i >= 0; i--)
8356 basic_block bb = BASIC_BLOCK (i);
8357 rtx head = bb->head;
8358 rtx end = bb->end;
8360 /* Verify the end of the basic block is in the INSN chain. */
8361 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
8362 if (x == end)
8363 break;
8364 if (!x)
8366 error ("End insn %d for block %d not found in the insn stream.",
8367 INSN_UID (end), bb->index);
8368 err = 1;
8371 /* Work backwards from the end to the head of the basic block
8372 to verify the head is in the RTL chain. */
8373 for (; x != NULL_RTX; x = PREV_INSN (x))
8375 /* While walking over the insn chain, verify insns appear
8376 in only one basic block and initialize the BB_INFO array
8377 used by other passes. */
8378 if (bb_info[INSN_UID (x)] != NULL)
8380 error ("Insn %d is in multiple basic blocks (%d and %d)",
8381 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
8382 err = 1;
8384 bb_info[INSN_UID (x)] = bb;
8386 if (x == head)
8387 break;
8389 if (!x)
8391 error ("Head insn %d for block %d not found in the insn stream.",
8392 INSN_UID (head), bb->index);
8393 err = 1;
8396 last_head = x;
8399 /* Now check the basic blocks (boundaries etc.) */
8400 for (i = n_basic_blocks - 1; i >= 0; i--)
8402 basic_block bb = BASIC_BLOCK (i);
8403 int has_fallthru = 0;
8404 edge e;
8406 e = bb->succ;
8407 while (e)
8409 if (last_visited [e->dest->index + 2] == bb)
8411 error ("verify_flow_info: Duplicate edge %i->%i",
8412 e->src->index, e->dest->index);
8413 err = 1;
8415 last_visited [e->dest->index + 2] = bb;
8417 if (e->flags & EDGE_FALLTHRU)
8418 has_fallthru = 1;
8420 if ((e->flags & EDGE_FALLTHRU)
8421 && e->src != ENTRY_BLOCK_PTR
8422 && e->dest != EXIT_BLOCK_PTR)
8424 rtx insn;
8425 if (e->src->index + 1 != e->dest->index)
8427 error ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
8428 e->src->index, e->dest->index);
8429 err = 1;
8431 else
8432 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
8433 insn = NEXT_INSN (insn))
8434 if (GET_CODE (insn) == BARRIER || INSN_P (insn))
8436 error ("verify_flow_info: Incorrect fallthru %i->%i",
8437 e->src->index, e->dest->index);
8438 fatal_insn ("Wrong insn in the fallthru edge", insn);
8439 err = 1;
8442 if (e->src != bb)
8444 error ("verify_flow_info: Basic block %d succ edge is corrupted",
8445 bb->index);
8446 fprintf (stderr, "Predecessor: ");
8447 dump_edge_info (stderr, e, 0);
8448 fprintf (stderr, "\nSuccessor: ");
8449 dump_edge_info (stderr, e, 1);
8450 fprintf (stderr, "\n");
8451 err = 1;
8453 edge_checksum[e->dest->index + 2] += (size_t) e;
8454 e = e->succ_next;
8456 if (!has_fallthru)
8458 rtx insn = bb->end;
8460 /* Ensure existence of barrier in BB with no fallthru edges. */
8461 for (insn = bb->end; GET_CODE (insn) != BARRIER;
8462 insn = NEXT_INSN (insn))
8463 if (!insn
8464 || (GET_CODE (insn) == NOTE
8465 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
8467 error ("Missing barrier after block %i", bb->index);
8468 err = 1;
8472 e = bb->pred;
8473 while (e)
8475 if (e->dest != bb)
8477 error ("Basic block %d pred edge is corrupted", bb->index);
8478 fputs ("Predecessor: ", stderr);
8479 dump_edge_info (stderr, e, 0);
8480 fputs ("\nSuccessor: ", stderr);
8481 dump_edge_info (stderr, e, 1);
8482 fputc ('\n', stderr);
8483 err = 1;
8485 edge_checksum[e->dest->index + 2] -= (size_t) e;
8486 e = e->pred_next;
8489 /* OK pointers are correct. Now check the header of basic
8490 block. It ought to contain optional CODE_LABEL followed
8491 by NOTE_BASIC_BLOCK. */
8492 x = bb->head;
8493 if (GET_CODE (x) == CODE_LABEL)
8495 if (bb->end == x)
8497 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
8498 bb->index);
8499 err = 1;
8501 x = NEXT_INSN (x);
8503 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
8505 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
8506 bb->index);
8507 err = 1;
8510 if (bb->end == x)
8512 /* Do checks for empty blocks here */
8514 else
8516 x = NEXT_INSN (x);
8517 while (x)
8519 if (NOTE_INSN_BASIC_BLOCK_P (x))
8521 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
8522 INSN_UID (x), bb->index);
8523 err = 1;
8526 if (x == bb->end)
8527 break;
8529 if (GET_CODE (x) == JUMP_INSN
8530 || GET_CODE (x) == CODE_LABEL
8531 || GET_CODE (x) == BARRIER)
8533 error ("In basic block %d:", bb->index);
8534 fatal_insn ("Flow control insn inside a basic block", x);
8537 x = NEXT_INSN (x);
8542 /* Complete edge checksumming for ENTRY and EXIT. */
8544 edge e;
8545 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
8546 edge_checksum[e->dest->index + 2] += (size_t) e;
8547 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
8548 edge_checksum[e->dest->index + 2] -= (size_t) e;
8551 for (i = -2; i < n_basic_blocks; ++i)
8552 if (edge_checksum[i + 2])
8554 error ("Basic block %i edge lists are corrupted", i);
8555 err = 1;
8558 last_bb_num_seen = -1;
8559 num_bb_notes = 0;
8560 x = rtx_first;
8561 while (x)
8563 if (NOTE_INSN_BASIC_BLOCK_P (x))
8565 basic_block bb = NOTE_BASIC_BLOCK (x);
8566 num_bb_notes++;
8567 if (bb->index != last_bb_num_seen + 1)
8568 internal_error ("Basic blocks not numbered consecutively.");
8570 last_bb_num_seen = bb->index;
8573 if (!bb_info[INSN_UID (x)])
8575 switch (GET_CODE (x))
8577 case BARRIER:
8578 case NOTE:
8579 break;
8581 case CODE_LABEL:
8582 /* An addr_vec is placed outside any block block. */
8583 if (NEXT_INSN (x)
8584 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
8585 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
8586 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
8588 x = NEXT_INSN (x);
8591 /* But in any case, non-deletable labels can appear anywhere. */
8592 break;
8594 default:
8595 fatal_insn ("Insn outside basic block", x);
8599 if (INSN_P (x)
8600 && GET_CODE (x) == JUMP_INSN
8601 && returnjump_p (x) && ! condjump_p (x)
8602 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
8603 fatal_insn ("Return not followed by barrier", x);
8605 x = NEXT_INSN (x);
8608 if (num_bb_notes != n_basic_blocks)
8609 internal_error
8610 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
8611 num_bb_notes, n_basic_blocks);
8613 if (err)
8614 internal_error ("verify_flow_info failed.");
8616 /* Clean up. */
8617 free (bb_info);
8618 free (last_visited);
8619 free (edge_checksum);
8622 /* Functions to access an edge list with a vector representation.
8623 Enough data is kept such that given an index number, the
8624 pred and succ that edge represents can be determined, or
8625 given a pred and a succ, its index number can be returned.
8626 This allows algorithms which consume a lot of memory to
8627 represent the normally full matrix of edge (pred,succ) with a
8628 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
8629 wasted space in the client code due to sparse flow graphs. */
8631 /* This functions initializes the edge list. Basically the entire
8632 flowgraph is processed, and all edges are assigned a number,
8633 and the data structure is filled in. */
8635 struct edge_list *
8636 create_edge_list ()
8638 struct edge_list *elist;
8639 edge e;
8640 int num_edges;
8641 int x;
8642 int block_count;
8644 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
8646 num_edges = 0;
8648 /* Determine the number of edges in the flow graph by counting successor
8649 edges on each basic block. */
8650 for (x = 0; x < n_basic_blocks; x++)
8652 basic_block bb = BASIC_BLOCK (x);
8654 for (e = bb->succ; e; e = e->succ_next)
8655 num_edges++;
8657 /* Don't forget successors of the entry block. */
8658 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8659 num_edges++;
8661 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
8662 elist->num_blocks = block_count;
8663 elist->num_edges = num_edges;
8664 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
8666 num_edges = 0;
8668 /* Follow successors of the entry block, and register these edges. */
8669 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8671 elist->index_to_edge[num_edges] = e;
8672 num_edges++;
8675 for (x = 0; x < n_basic_blocks; x++)
8677 basic_block bb = BASIC_BLOCK (x);
8679 /* Follow all successors of blocks, and register these edges. */
8680 for (e = bb->succ; e; e = e->succ_next)
8682 elist->index_to_edge[num_edges] = e;
8683 num_edges++;
8686 return elist;
8689 /* This function free's memory associated with an edge list. */
8691 void
8692 free_edge_list (elist)
8693 struct edge_list *elist;
8695 if (elist)
8697 free (elist->index_to_edge);
8698 free (elist);
8702 /* This function provides debug output showing an edge list. */
8704 void
8705 print_edge_list (f, elist)
8706 FILE *f;
8707 struct edge_list *elist;
8709 int x;
8710 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
8711 elist->num_blocks - 2, elist->num_edges);
8713 for (x = 0; x < elist->num_edges; x++)
8715 fprintf (f, " %-4d - edge(", x);
8716 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
8717 fprintf (f, "entry,");
8718 else
8719 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
8721 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
8722 fprintf (f, "exit)\n");
8723 else
8724 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
8728 /* This function provides an internal consistency check of an edge list,
8729 verifying that all edges are present, and that there are no
8730 extra edges. */
8732 void
8733 verify_edge_list (f, elist)
8734 FILE *f;
8735 struct edge_list *elist;
8737 int x, pred, succ, index;
8738 edge e;
8740 for (x = 0; x < n_basic_blocks; x++)
8742 basic_block bb = BASIC_BLOCK (x);
8744 for (e = bb->succ; e; e = e->succ_next)
8746 pred = e->src->index;
8747 succ = e->dest->index;
8748 index = EDGE_INDEX (elist, e->src, e->dest);
8749 if (index == EDGE_INDEX_NO_EDGE)
8751 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8752 continue;
8754 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8755 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8756 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8757 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8758 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8759 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8762 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8764 pred = e->src->index;
8765 succ = e->dest->index;
8766 index = EDGE_INDEX (elist, e->src, e->dest);
8767 if (index == EDGE_INDEX_NO_EDGE)
8769 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8770 continue;
8772 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8773 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8774 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8775 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8776 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8777 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8779 /* We've verified that all the edges are in the list, no lets make sure
8780 there are no spurious edges in the list. */
8782 for (pred = 0; pred < n_basic_blocks; pred++)
8783 for (succ = 0; succ < n_basic_blocks; succ++)
8785 basic_block p = BASIC_BLOCK (pred);
8786 basic_block s = BASIC_BLOCK (succ);
8788 int found_edge = 0;
8790 for (e = p->succ; e; e = e->succ_next)
8791 if (e->dest == s)
8793 found_edge = 1;
8794 break;
8796 for (e = s->pred; e; e = e->pred_next)
8797 if (e->src == p)
8799 found_edge = 1;
8800 break;
8802 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8803 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8804 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
8805 pred, succ);
8806 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8807 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8808 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
8809 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8810 BASIC_BLOCK (succ)));
8812 for (succ = 0; succ < n_basic_blocks; succ++)
8814 basic_block p = ENTRY_BLOCK_PTR;
8815 basic_block s = BASIC_BLOCK (succ);
8817 int found_edge = 0;
8819 for (e = p->succ; e; e = e->succ_next)
8820 if (e->dest == s)
8822 found_edge = 1;
8823 break;
8825 for (e = s->pred; e; e = e->pred_next)
8826 if (e->src == p)
8828 found_edge = 1;
8829 break;
8831 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8832 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8833 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
8834 succ);
8835 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8836 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8837 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
8838 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
8839 BASIC_BLOCK (succ)));
8841 for (pred = 0; pred < n_basic_blocks; pred++)
8843 basic_block p = BASIC_BLOCK (pred);
8844 basic_block s = EXIT_BLOCK_PTR;
8846 int found_edge = 0;
8848 for (e = p->succ; e; e = e->succ_next)
8849 if (e->dest == s)
8851 found_edge = 1;
8852 break;
8854 for (e = s->pred; e; e = e->pred_next)
8855 if (e->src == p)
8857 found_edge = 1;
8858 break;
8860 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8861 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8862 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
8863 pred);
8864 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8865 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8866 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
8867 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8868 EXIT_BLOCK_PTR));
8872 /* This routine will determine what, if any, edge there is between
8873 a specified predecessor and successor. */
8876 find_edge_index (edge_list, pred, succ)
8877 struct edge_list *edge_list;
8878 basic_block pred, succ;
8880 int x;
8881 for (x = 0; x < NUM_EDGES (edge_list); x++)
8883 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
8884 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
8885 return x;
8887 return (EDGE_INDEX_NO_EDGE);
8890 /* This function will remove an edge from the flow graph. */
8892 void
8893 remove_edge (e)
8894 edge e;
8896 edge last_pred = NULL;
8897 edge last_succ = NULL;
8898 edge tmp;
8899 basic_block src, dest;
8900 src = e->src;
8901 dest = e->dest;
8902 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
8903 last_succ = tmp;
8905 if (!tmp)
8906 abort ();
8907 if (last_succ)
8908 last_succ->succ_next = e->succ_next;
8909 else
8910 src->succ = e->succ_next;
8912 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
8913 last_pred = tmp;
8915 if (!tmp)
8916 abort ();
8917 if (last_pred)
8918 last_pred->pred_next = e->pred_next;
8919 else
8920 dest->pred = e->pred_next;
8922 n_edges--;
8923 free (e);
8926 /* This routine will remove any fake successor edges for a basic block.
8927 When the edge is removed, it is also removed from whatever predecessor
8928 list it is in. */
8930 static void
8931 remove_fake_successors (bb)
8932 basic_block bb;
8934 edge e;
8935 for (e = bb->succ; e;)
8937 edge tmp = e;
8938 e = e->succ_next;
8939 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
8940 remove_edge (tmp);
8944 /* This routine will remove all fake edges from the flow graph. If
8945 we remove all fake successors, it will automatically remove all
8946 fake predecessors. */
8948 void
8949 remove_fake_edges ()
8951 int x;
8953 for (x = 0; x < n_basic_blocks; x++)
8954 remove_fake_successors (BASIC_BLOCK (x));
8956 /* We've handled all successors except the entry block's. */
8957 remove_fake_successors (ENTRY_BLOCK_PTR);
8960 /* This function will add a fake edge between any block which has no
8961 successors, and the exit block. Some data flow equations require these
8962 edges to exist. */
8964 void
8965 add_noreturn_fake_exit_edges ()
8967 int x;
8969 for (x = 0; x < n_basic_blocks; x++)
8970 if (BASIC_BLOCK (x)->succ == NULL)
8971 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
8974 /* This function adds a fake edge between any infinite loops to the
8975 exit block. Some optimizations require a path from each node to
8976 the exit node.
8978 See also Morgan, Figure 3.10, pp. 82-83.
8980 The current implementation is ugly, not attempting to minimize the
8981 number of inserted fake edges. To reduce the number of fake edges
8982 to insert, add fake edges from _innermost_ loops containing only
8983 nodes not reachable from the exit block. */
8985 void
8986 connect_infinite_loops_to_exit ()
8988 basic_block unvisited_block;
8990 /* Perform depth-first search in the reverse graph to find nodes
8991 reachable from the exit block. */
8992 struct depth_first_search_dsS dfs_ds;
8994 flow_dfs_compute_reverse_init (&dfs_ds);
8995 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
8997 /* Repeatedly add fake edges, updating the unreachable nodes. */
8998 while (1)
9000 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
9001 if (!unvisited_block)
9002 break;
9003 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
9004 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
9007 flow_dfs_compute_reverse_finish (&dfs_ds);
9009 return;
9012 /* Redirect an edge's successor from one block to another. */
9014 void
9015 redirect_edge_succ (e, new_succ)
9016 edge e;
9017 basic_block new_succ;
9019 edge *pe;
9021 /* Disconnect the edge from the old successor block. */
9022 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
9023 continue;
9024 *pe = (*pe)->pred_next;
9026 /* Reconnect the edge to the new successor block. */
9027 e->pred_next = new_succ->pred;
9028 new_succ->pred = e;
9029 e->dest = new_succ;
9032 /* Like previous but avoid possible dupplicate edge. */
9034 edge
9035 redirect_edge_succ_nodup (e, new_succ)
9036 edge e;
9037 basic_block new_succ;
9039 edge s;
9040 /* Check whether the edge is already present. */
9041 for (s = e->src->succ; s; s = s->succ_next)
9042 if (s->dest == new_succ && s != e)
9043 break;
9044 if (s)
9046 s->flags |= e->flags;
9047 s->probability += e->probability;
9048 s->count += e->count;
9049 remove_edge (e);
9050 e = s;
9052 else
9053 redirect_edge_succ (e, new_succ);
9054 return e;
9057 /* Redirect an edge's predecessor from one block to another. */
9059 void
9060 redirect_edge_pred (e, new_pred)
9061 edge e;
9062 basic_block new_pred;
9064 edge *pe;
9066 /* Disconnect the edge from the old predecessor block. */
9067 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
9068 continue;
9069 *pe = (*pe)->succ_next;
9071 /* Reconnect the edge to the new predecessor block. */
9072 e->succ_next = new_pred->succ;
9073 new_pred->succ = e;
9074 e->src = new_pred;
9077 /* Dump the list of basic blocks in the bitmap NODES. */
9079 static void
9080 flow_nodes_print (str, nodes, file)
9081 const char *str;
9082 const sbitmap nodes;
9083 FILE *file;
9085 int node;
9087 if (! nodes)
9088 return;
9090 fprintf (file, "%s { ", str);
9091 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
9092 fputs ("}\n", file);
9096 /* Dump the list of edges in the array EDGE_LIST. */
9098 static void
9099 flow_edge_list_print (str, edge_list, num_edges, file)
9100 const char *str;
9101 const edge *edge_list;
9102 int num_edges;
9103 FILE *file;
9105 int i;
9107 if (! edge_list)
9108 return;
9110 fprintf (file, "%s { ", str);
9111 for (i = 0; i < num_edges; i++)
9112 fprintf (file, "%d->%d ", edge_list[i]->src->index,
9113 edge_list[i]->dest->index);
9114 fputs ("}\n", file);
9118 /* Dump loop related CFG information. */
9120 static void
9121 flow_loops_cfg_dump (loops, file)
9122 const struct loops *loops;
9123 FILE *file;
9125 int i;
9127 if (! loops->num || ! file || ! loops->cfg.dom)
9128 return;
9130 for (i = 0; i < n_basic_blocks; i++)
9132 edge succ;
9134 fprintf (file, ";; %d succs { ", i);
9135 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
9136 fprintf (file, "%d ", succ->dest->index);
9137 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
9140 /* Dump the DFS node order. */
9141 if (loops->cfg.dfs_order)
9143 fputs (";; DFS order: ", file);
9144 for (i = 0; i < n_basic_blocks; i++)
9145 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
9146 fputs ("\n", file);
9148 /* Dump the reverse completion node order. */
9149 if (loops->cfg.rc_order)
9151 fputs (";; RC order: ", file);
9152 for (i = 0; i < n_basic_blocks; i++)
9153 fprintf (file, "%d ", loops->cfg.rc_order[i]);
9154 fputs ("\n", file);
9158 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
9160 static int
9161 flow_loop_nested_p (outer, loop)
9162 struct loop *outer;
9163 struct loop *loop;
9165 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
9169 /* Dump the loop information specified by LOOP to the stream FILE
9170 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9171 void
9172 flow_loop_dump (loop, file, loop_dump_aux, verbose)
9173 const struct loop *loop;
9174 FILE *file;
9175 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9176 int verbose;
9178 if (! loop || ! loop->header)
9179 return;
9181 if (loop->first->head && loop->last->end)
9182 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
9183 loop->num, INSN_UID (loop->first->head),
9184 INSN_UID (loop->last->end),
9185 loop->shared ? " shared" : "",
9186 loop->invalid ? " invalid" : "");
9187 else
9188 fprintf (file, ";;\n;; Loop %d:%s%s\n", loop->num,
9189 loop->shared ? " shared" : "",
9190 loop->invalid ? " invalid" : "");
9192 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
9193 loop->header->index, loop->latch->index,
9194 loop->pre_header ? loop->pre_header->index : -1,
9195 loop->first->index, loop->last->index);
9196 fprintf (file, ";; depth %d, level %d, outer %ld\n",
9197 loop->depth, loop->level,
9198 (long) (loop->outer ? loop->outer->num : -1));
9200 if (loop->pre_header_edges)
9201 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
9202 loop->num_pre_header_edges, file);
9203 flow_edge_list_print (";; entry edges", loop->entry_edges,
9204 loop->num_entries, file);
9205 fprintf (file, ";; %d", loop->num_nodes);
9206 flow_nodes_print (" nodes", loop->nodes, file);
9207 flow_edge_list_print (";; exit edges", loop->exit_edges,
9208 loop->num_exits, file);
9209 if (loop->exits_doms)
9210 flow_nodes_print (";; exit doms", loop->exits_doms, file);
9211 if (loop_dump_aux)
9212 loop_dump_aux (loop, file, verbose);
9216 /* Dump the loop information specified by LOOPS to the stream FILE,
9217 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9218 void
9219 flow_loops_dump (loops, file, loop_dump_aux, verbose)
9220 const struct loops *loops;
9221 FILE *file;
9222 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9223 int verbose;
9225 int i;
9226 int num_loops;
9228 num_loops = loops->num;
9229 if (! num_loops || ! file)
9230 return;
9232 fprintf (file, ";; %d loops found, %d levels\n",
9233 num_loops, loops->levels);
9235 for (i = 0; i < num_loops; i++)
9237 struct loop *loop = &loops->array[i];
9239 flow_loop_dump (loop, file, loop_dump_aux, verbose);
9241 if (loop->shared)
9243 int j;
9245 for (j = 0; j < i; j++)
9247 struct loop *oloop = &loops->array[j];
9249 if (loop->header == oloop->header)
9251 int disjoint;
9252 int smaller;
9254 smaller = loop->num_nodes < oloop->num_nodes;
9256 /* If the union of LOOP and OLOOP is different than
9257 the larger of LOOP and OLOOP then LOOP and OLOOP
9258 must be disjoint. */
9259 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
9260 smaller ? oloop : loop);
9261 fprintf (file,
9262 ";; loop header %d shared by loops %d, %d %s\n",
9263 loop->header->index, i, j,
9264 disjoint ? "disjoint" : "nested");
9270 if (verbose)
9271 flow_loops_cfg_dump (loops, file);
9275 /* Free all the memory allocated for LOOPS. */
9277 void
9278 flow_loops_free (loops)
9279 struct loops *loops;
9281 if (loops->array)
9283 int i;
9285 if (! loops->num)
9286 abort ();
9288 /* Free the loop descriptors. */
9289 for (i = 0; i < loops->num; i++)
9291 struct loop *loop = &loops->array[i];
9293 if (loop->pre_header_edges)
9294 free (loop->pre_header_edges);
9295 if (loop->nodes)
9296 sbitmap_free (loop->nodes);
9297 if (loop->entry_edges)
9298 free (loop->entry_edges);
9299 if (loop->exit_edges)
9300 free (loop->exit_edges);
9301 if (loop->exits_doms)
9302 sbitmap_free (loop->exits_doms);
9304 free (loops->array);
9305 loops->array = NULL;
9307 if (loops->cfg.dom)
9308 sbitmap_vector_free (loops->cfg.dom);
9309 if (loops->cfg.dfs_order)
9310 free (loops->cfg.dfs_order);
9312 if (loops->shared_headers)
9313 sbitmap_free (loops->shared_headers);
9318 /* Find the entry edges into the loop with header HEADER and nodes
9319 NODES and store in ENTRY_EDGES array. Return the number of entry
9320 edges from the loop. */
9322 static int
9323 flow_loop_entry_edges_find (header, nodes, entry_edges)
9324 basic_block header;
9325 const sbitmap nodes;
9326 edge **entry_edges;
9328 edge e;
9329 int num_entries;
9331 *entry_edges = NULL;
9333 num_entries = 0;
9334 for (e = header->pred; e; e = e->pred_next)
9336 basic_block src = e->src;
9338 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9339 num_entries++;
9342 if (! num_entries)
9343 abort ();
9345 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
9347 num_entries = 0;
9348 for (e = header->pred; e; e = e->pred_next)
9350 basic_block src = e->src;
9352 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9353 (*entry_edges)[num_entries++] = e;
9356 return num_entries;
9360 /* Find the exit edges from the loop using the bitmap of loop nodes
9361 NODES and store in EXIT_EDGES array. Return the number of
9362 exit edges from the loop. */
9364 static int
9365 flow_loop_exit_edges_find (nodes, exit_edges)
9366 const sbitmap nodes;
9367 edge **exit_edges;
9369 edge e;
9370 int node;
9371 int num_exits;
9373 *exit_edges = NULL;
9375 /* Check all nodes within the loop to see if there are any
9376 successors not in the loop. Note that a node may have multiple
9377 exiting edges ????? A node can have one jumping edge and one fallthru
9378 edge so only one of these can exit the loop. */
9379 num_exits = 0;
9380 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9381 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9383 basic_block dest = e->dest;
9385 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9386 num_exits++;
9390 if (! num_exits)
9391 return 0;
9393 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
9395 /* Store all exiting edges into an array. */
9396 num_exits = 0;
9397 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9398 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9400 basic_block dest = e->dest;
9402 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9403 (*exit_edges)[num_exits++] = e;
9407 return num_exits;
9411 /* Find the nodes contained within the loop with header HEADER and
9412 latch LATCH and store in NODES. Return the number of nodes within
9413 the loop. */
9415 static int
9416 flow_loop_nodes_find (header, latch, nodes)
9417 basic_block header;
9418 basic_block latch;
9419 sbitmap nodes;
9421 basic_block *stack;
9422 int sp;
9423 int num_nodes = 0;
9425 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
9426 sp = 0;
9428 /* Start with only the loop header in the set of loop nodes. */
9429 sbitmap_zero (nodes);
9430 SET_BIT (nodes, header->index);
9431 num_nodes++;
9432 header->loop_depth++;
9434 /* Push the loop latch on to the stack. */
9435 if (! TEST_BIT (nodes, latch->index))
9437 SET_BIT (nodes, latch->index);
9438 latch->loop_depth++;
9439 num_nodes++;
9440 stack[sp++] = latch;
9443 while (sp)
9445 basic_block node;
9446 edge e;
9448 node = stack[--sp];
9449 for (e = node->pred; e; e = e->pred_next)
9451 basic_block ancestor = e->src;
9453 /* If each ancestor not marked as part of loop, add to set of
9454 loop nodes and push on to stack. */
9455 if (ancestor != ENTRY_BLOCK_PTR
9456 && ! TEST_BIT (nodes, ancestor->index))
9458 SET_BIT (nodes, ancestor->index);
9459 ancestor->loop_depth++;
9460 num_nodes++;
9461 stack[sp++] = ancestor;
9465 free (stack);
9466 return num_nodes;
9469 /* Compute reverse top sort order */
9470 void
9471 flow_reverse_top_sort_order_compute (rts_order)
9472 int *rts_order;
9474 edge *stack;
9475 int sp;
9476 int postnum = 0;
9477 sbitmap visited;
9479 /* Allocate stack for back-tracking up CFG. */
9480 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
9481 sp = 0;
9483 /* Allocate bitmap to track nodes that have been visited. */
9484 visited = sbitmap_alloc (n_basic_blocks);
9486 /* None of the nodes in the CFG have been visited yet. */
9487 sbitmap_zero (visited);
9489 /* Push the first edge on to the stack. */
9490 stack[sp++] = ENTRY_BLOCK_PTR->succ;
9492 while (sp)
9494 edge e;
9495 basic_block src;
9496 basic_block dest;
9498 /* Look at the edge on the top of the stack. */
9499 e = stack[sp - 1];
9500 src = e->src;
9501 dest = e->dest;
9503 /* Check if the edge destination has been visited yet. */
9504 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
9506 /* Mark that we have visited the destination. */
9507 SET_BIT (visited, dest->index);
9509 if (dest->succ)
9511 /* Since the DEST node has been visited for the first
9512 time, check its successors. */
9513 stack[sp++] = dest->succ;
9515 else
9516 rts_order[postnum++] = dest->index;
9518 else
9520 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
9521 rts_order[postnum++] = src->index;
9523 if (e->succ_next)
9524 stack[sp - 1] = e->succ_next;
9525 else
9526 sp--;
9530 free (stack);
9531 sbitmap_free (visited);
9534 /* Compute the depth first search order and store in the array
9535 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
9536 RC_ORDER is non-zero, return the reverse completion number for each
9537 node. Returns the number of nodes visited. A depth first search
9538 tries to get as far away from the starting point as quickly as
9539 possible. */
9542 flow_depth_first_order_compute (dfs_order, rc_order)
9543 int *dfs_order;
9544 int *rc_order;
9546 edge *stack;
9547 int sp;
9548 int dfsnum = 0;
9549 int rcnum = n_basic_blocks - 1;
9550 sbitmap visited;
9552 /* Allocate stack for back-tracking up CFG. */
9553 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
9554 sp = 0;
9556 /* Allocate bitmap to track nodes that have been visited. */
9557 visited = sbitmap_alloc (n_basic_blocks);
9559 /* None of the nodes in the CFG have been visited yet. */
9560 sbitmap_zero (visited);
9562 /* Push the first edge on to the stack. */
9563 stack[sp++] = ENTRY_BLOCK_PTR->succ;
9565 while (sp)
9567 edge e;
9568 basic_block src;
9569 basic_block dest;
9571 /* Look at the edge on the top of the stack. */
9572 e = stack[sp - 1];
9573 src = e->src;
9574 dest = e->dest;
9576 /* Check if the edge destination has been visited yet. */
9577 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
9579 /* Mark that we have visited the destination. */
9580 SET_BIT (visited, dest->index);
9582 if (dfs_order)
9583 dfs_order[dfsnum++] = dest->index;
9585 if (dest->succ)
9587 /* Since the DEST node has been visited for the first
9588 time, check its successors. */
9589 stack[sp++] = dest->succ;
9591 else
9593 /* There are no successors for the DEST node so assign
9594 its reverse completion number. */
9595 if (rc_order)
9596 rc_order[rcnum--] = dest->index;
9599 else
9601 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
9603 /* There are no more successors for the SRC node
9604 so assign its reverse completion number. */
9605 if (rc_order)
9606 rc_order[rcnum--] = src->index;
9609 if (e->succ_next)
9610 stack[sp - 1] = e->succ_next;
9611 else
9612 sp--;
9616 free (stack);
9617 sbitmap_free (visited);
9619 /* The number of nodes visited should not be greater than
9620 n_basic_blocks. */
9621 if (dfsnum > n_basic_blocks)
9622 abort ();
9624 /* There are some nodes left in the CFG that are unreachable. */
9625 if (dfsnum < n_basic_blocks)
9626 abort ();
9627 return dfsnum;
9630 /* Compute the depth first search order on the _reverse_ graph and
9631 store in the array DFS_ORDER, marking the nodes visited in VISITED.
9632 Returns the number of nodes visited.
9634 The computation is split into three pieces:
9636 flow_dfs_compute_reverse_init () creates the necessary data
9637 structures.
9639 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
9640 structures. The block will start the search.
9642 flow_dfs_compute_reverse_execute () continues (or starts) the
9643 search using the block on the top of the stack, stopping when the
9644 stack is empty.
9646 flow_dfs_compute_reverse_finish () destroys the necessary data
9647 structures.
9649 Thus, the user will probably call ..._init(), call ..._add_bb() to
9650 add a beginning basic block to the stack, call ..._execute(),
9651 possibly add another bb to the stack and again call ..._execute(),
9652 ..., and finally call _finish(). */
9654 /* Initialize the data structures used for depth-first search on the
9655 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
9656 added to the basic block stack. DATA is the current depth-first
9657 search context. If INITIALIZE_STACK is non-zero, there is an
9658 element on the stack. */
9660 static void
9661 flow_dfs_compute_reverse_init (data)
9662 depth_first_search_ds data;
9664 /* Allocate stack for back-tracking up CFG. */
9665 data->stack =
9666 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
9667 * sizeof (basic_block));
9668 data->sp = 0;
9670 /* Allocate bitmap to track nodes that have been visited. */
9671 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
9673 /* None of the nodes in the CFG have been visited yet. */
9674 sbitmap_zero (data->visited_blocks);
9676 return;
9679 /* Add the specified basic block to the top of the dfs data
9680 structures. When the search continues, it will start at the
9681 block. */
9683 static void
9684 flow_dfs_compute_reverse_add_bb (data, bb)
9685 depth_first_search_ds data;
9686 basic_block bb;
9688 data->stack[data->sp++] = bb;
9689 return;
9692 /* Continue the depth-first search through the reverse graph starting
9693 with the block at the stack's top and ending when the stack is
9694 empty. Visited nodes are marked. Returns an unvisited basic
9695 block, or NULL if there is none available. */
9697 static basic_block
9698 flow_dfs_compute_reverse_execute (data)
9699 depth_first_search_ds data;
9701 basic_block bb;
9702 edge e;
9703 int i;
9705 while (data->sp > 0)
9707 bb = data->stack[--data->sp];
9709 /* Mark that we have visited this node. */
9710 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
9712 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
9714 /* Perform depth-first search on adjacent vertices. */
9715 for (e = bb->pred; e; e = e->pred_next)
9716 flow_dfs_compute_reverse_add_bb (data, e->src);
9720 /* Determine if there are unvisited basic blocks. */
9721 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
9722 if (!TEST_BIT (data->visited_blocks, i))
9723 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
9724 return NULL;
9727 /* Destroy the data structures needed for depth-first search on the
9728 reverse graph. */
9730 static void
9731 flow_dfs_compute_reverse_finish (data)
9732 depth_first_search_ds data;
9734 free (data->stack);
9735 sbitmap_free (data->visited_blocks);
9736 return;
9740 /* Find the root node of the loop pre-header extended basic block and
9741 the edges along the trace from the root node to the loop header. */
9743 static void
9744 flow_loop_pre_header_scan (loop)
9745 struct loop *loop;
9747 int num = 0;
9748 basic_block ebb;
9750 loop->num_pre_header_edges = 0;
9752 if (loop->num_entries != 1)
9753 return;
9755 ebb = loop->entry_edges[0]->src;
9757 if (ebb != ENTRY_BLOCK_PTR)
9759 edge e;
9761 /* Count number of edges along trace from loop header to
9762 root of pre-header extended basic block. Usually this is
9763 only one or two edges. */
9764 num++;
9765 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
9767 ebb = ebb->pred->src;
9768 num++;
9771 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
9772 loop->num_pre_header_edges = num;
9774 /* Store edges in order that they are followed. The source
9775 of the first edge is the root node of the pre-header extended
9776 basic block and the destination of the last last edge is
9777 the loop header. */
9778 for (e = loop->entry_edges[0]; num; e = e->src->pred)
9780 loop->pre_header_edges[--num] = e;
9786 /* Return the block for the pre-header of the loop with header
9787 HEADER where DOM specifies the dominator information. Return NULL if
9788 there is no pre-header. */
9790 static basic_block
9791 flow_loop_pre_header_find (header, dom)
9792 basic_block header;
9793 const sbitmap *dom;
9795 basic_block pre_header;
9796 edge e;
9798 /* If block p is a predecessor of the header and is the only block
9799 that the header does not dominate, then it is the pre-header. */
9800 pre_header = NULL;
9801 for (e = header->pred; e; e = e->pred_next)
9803 basic_block node = e->src;
9805 if (node != ENTRY_BLOCK_PTR
9806 && ! TEST_BIT (dom[node->index], header->index))
9808 if (pre_header == NULL)
9809 pre_header = node;
9810 else
9812 /* There are multiple edges into the header from outside
9813 the loop so there is no pre-header block. */
9814 pre_header = NULL;
9815 break;
9819 return pre_header;
9822 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
9823 previously added. The insertion algorithm assumes that the loops
9824 are added in the order found by a depth first search of the CFG. */
9826 static void
9827 flow_loop_tree_node_add (prevloop, loop)
9828 struct loop *prevloop;
9829 struct loop *loop;
9832 if (flow_loop_nested_p (prevloop, loop))
9834 prevloop->inner = loop;
9835 loop->outer = prevloop;
9836 return;
9839 while (prevloop->outer)
9841 if (flow_loop_nested_p (prevloop->outer, loop))
9843 prevloop->next = loop;
9844 loop->outer = prevloop->outer;
9845 return;
9847 prevloop = prevloop->outer;
9850 prevloop->next = loop;
9851 loop->outer = NULL;
9854 /* Build the loop hierarchy tree for LOOPS. */
9856 static void
9857 flow_loops_tree_build (loops)
9858 struct loops *loops;
9860 int i;
9861 int num_loops;
9863 num_loops = loops->num;
9864 if (! num_loops)
9865 return;
9867 /* Root the loop hierarchy tree with the first loop found.
9868 Since we used a depth first search this should be the
9869 outermost loop. */
9870 loops->tree_root = &loops->array[0];
9871 loops->tree_root->outer = loops->tree_root->inner = loops->tree_root->next = NULL;
9873 /* Add the remaining loops to the tree. */
9874 for (i = 1; i < num_loops; i++)
9875 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
9878 /* Helper function to compute loop nesting depth and enclosed loop level
9879 for the natural loop specified by LOOP at the loop depth DEPTH.
9880 Returns the loop level. */
9882 static int
9883 flow_loop_level_compute (loop, depth)
9884 struct loop *loop;
9885 int depth;
9887 struct loop *inner;
9888 int level = 1;
9890 if (! loop)
9891 return 0;
9893 /* Traverse loop tree assigning depth and computing level as the
9894 maximum level of all the inner loops of this loop. The loop
9895 level is equivalent to the height of the loop in the loop tree
9896 and corresponds to the number of enclosed loop levels (including
9897 itself). */
9898 for (inner = loop->inner; inner; inner = inner->next)
9900 int ilevel;
9902 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
9904 if (ilevel > level)
9905 level = ilevel;
9907 loop->level = level;
9908 loop->depth = depth;
9909 return level;
9912 /* Compute the loop nesting depth and enclosed loop level for the loop
9913 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
9914 level. */
9916 static int
9917 flow_loops_level_compute (loops)
9918 struct loops *loops;
9920 struct loop *loop;
9921 int level;
9922 int levels = 0;
9924 /* Traverse all the outer level loops. */
9925 for (loop = loops->tree_root; loop; loop = loop->next)
9927 level = flow_loop_level_compute (loop, 1);
9928 if (level > levels)
9929 levels = level;
9931 return levels;
9935 /* Scan a single natural loop specified by LOOP collecting information
9936 about it specified by FLAGS. */
9939 flow_loop_scan (loops, loop, flags)
9940 struct loops *loops;
9941 struct loop *loop;
9942 int flags;
9944 /* Determine prerequisites. */
9945 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
9946 flags |= LOOP_EXIT_EDGES;
9948 if (flags & LOOP_ENTRY_EDGES)
9950 /* Find edges which enter the loop header.
9951 Note that the entry edges should only
9952 enter the header of a natural loop. */
9953 loop->num_entries
9954 = flow_loop_entry_edges_find (loop->header,
9955 loop->nodes,
9956 &loop->entry_edges);
9959 if (flags & LOOP_EXIT_EDGES)
9961 /* Find edges which exit the loop. */
9962 loop->num_exits
9963 = flow_loop_exit_edges_find (loop->nodes,
9964 &loop->exit_edges);
9967 if (flags & LOOP_EXITS_DOMS)
9969 int j;
9971 /* Determine which loop nodes dominate all the exits
9972 of the loop. */
9973 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
9974 sbitmap_copy (loop->exits_doms, loop->nodes);
9975 for (j = 0; j < loop->num_exits; j++)
9976 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
9977 loops->cfg.dom[loop->exit_edges[j]->src->index]);
9979 /* The header of a natural loop must dominate
9980 all exits. */
9981 if (! TEST_BIT (loop->exits_doms, loop->header->index))
9982 abort ();
9985 if (flags & LOOP_PRE_HEADER)
9987 /* Look to see if the loop has a pre-header node. */
9988 loop->pre_header
9989 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
9991 /* Find the blocks within the extended basic block of
9992 the loop pre-header. */
9993 flow_loop_pre_header_scan (loop);
9995 return 1;
9999 /* Find all the natural loops in the function and save in LOOPS structure
10000 and recalculate loop_depth information in basic block structures.
10001 FLAGS controls which loop information is collected.
10002 Return the number of natural loops found. */
10005 flow_loops_find (loops, flags)
10006 struct loops *loops;
10007 int flags;
10009 int i;
10010 int b;
10011 int num_loops;
10012 edge e;
10013 sbitmap headers;
10014 sbitmap *dom;
10015 int *dfs_order;
10016 int *rc_order;
10018 /* This function cannot be repeatedly called with different
10019 flags to build up the loop information. The loop tree
10020 must always be built if this function is called. */
10021 if (! (flags & LOOP_TREE))
10022 abort ();
10024 memset (loops, 0, sizeof (*loops));
10026 /* Taking care of this degenerate case makes the rest of
10027 this code simpler. */
10028 if (n_basic_blocks == 0)
10029 return 0;
10031 dfs_order = NULL;
10032 rc_order = NULL;
10034 /* Compute the dominators. */
10035 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
10036 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
10038 /* Count the number of loop edges (back edges). This should be the
10039 same as the number of natural loops. */
10041 num_loops = 0;
10042 for (b = 0; b < n_basic_blocks; b++)
10044 basic_block header;
10046 header = BASIC_BLOCK (b);
10047 header->loop_depth = 0;
10049 for (e = header->pred; e; e = e->pred_next)
10051 basic_block latch = e->src;
10053 /* Look for back edges where a predecessor is dominated
10054 by this block. A natural loop has a single entry
10055 node (header) that dominates all the nodes in the
10056 loop. It also has single back edge to the header
10057 from a latch node. Note that multiple natural loops
10058 may share the same header. */
10059 if (b != header->index)
10060 abort ();
10062 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
10063 num_loops++;
10067 if (num_loops)
10069 /* Compute depth first search order of the CFG so that outer
10070 natural loops will be found before inner natural loops. */
10071 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
10072 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
10073 flow_depth_first_order_compute (dfs_order, rc_order);
10075 /* Save CFG derived information to avoid recomputing it. */
10076 loops->cfg.dom = dom;
10077 loops->cfg.dfs_order = dfs_order;
10078 loops->cfg.rc_order = rc_order;
10080 /* Allocate loop structures. */
10081 loops->array
10082 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
10084 headers = sbitmap_alloc (n_basic_blocks);
10085 sbitmap_zero (headers);
10087 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
10088 sbitmap_zero (loops->shared_headers);
10090 /* Find and record information about all the natural loops
10091 in the CFG. */
10092 num_loops = 0;
10093 for (b = 0; b < n_basic_blocks; b++)
10095 basic_block header;
10097 /* Search the nodes of the CFG in reverse completion order
10098 so that we can find outer loops first. */
10099 header = BASIC_BLOCK (rc_order[b]);
10101 /* Look for all the possible latch blocks for this header. */
10102 for (e = header->pred; e; e = e->pred_next)
10104 basic_block latch = e->src;
10106 /* Look for back edges where a predecessor is dominated
10107 by this block. A natural loop has a single entry
10108 node (header) that dominates all the nodes in the
10109 loop. It also has single back edge to the header
10110 from a latch node. Note that multiple natural loops
10111 may share the same header. */
10112 if (latch != ENTRY_BLOCK_PTR
10113 && TEST_BIT (dom[latch->index], header->index))
10115 struct loop *loop;
10117 loop = loops->array + num_loops;
10119 loop->header = header;
10120 loop->latch = latch;
10121 loop->num = num_loops;
10123 num_loops++;
10128 for (i = 0; i < num_loops; i++)
10130 struct loop *loop = &loops->array[i];
10132 /* Keep track of blocks that are loop headers so
10133 that we can tell which loops should be merged. */
10134 if (TEST_BIT (headers, loop->header->index))
10135 SET_BIT (loops->shared_headers, loop->header->index);
10136 SET_BIT (headers, loop->header->index);
10138 /* Find nodes contained within the loop. */
10139 loop->nodes = sbitmap_alloc (n_basic_blocks);
10140 loop->num_nodes
10141 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
10143 /* Compute first and last blocks within the loop.
10144 These are often the same as the loop header and
10145 loop latch respectively, but this is not always
10146 the case. */
10147 loop->first
10148 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
10149 loop->last
10150 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
10152 flow_loop_scan (loops, loop, flags);
10155 /* Natural loops with shared headers may either be disjoint or
10156 nested. Disjoint loops with shared headers cannot be inner
10157 loops and should be merged. For now just mark loops that share
10158 headers. */
10159 for (i = 0; i < num_loops; i++)
10160 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
10161 loops->array[i].shared = 1;
10163 sbitmap_free (headers);
10165 else
10167 sbitmap_vector_free (dom);
10170 loops->num = num_loops;
10172 /* Build the loop hierarchy tree. */
10173 flow_loops_tree_build (loops);
10175 /* Assign the loop nesting depth and enclosed loop level for each
10176 loop. */
10177 loops->levels = flow_loops_level_compute (loops);
10179 return num_loops;
10183 /* Update the information regarding the loops in the CFG
10184 specified by LOOPS. */
10186 flow_loops_update (loops, flags)
10187 struct loops *loops;
10188 int flags;
10190 /* One day we may want to update the current loop data. For now
10191 throw away the old stuff and rebuild what we need. */
10192 if (loops->array)
10193 flow_loops_free (loops);
10195 return flow_loops_find (loops, flags);
10199 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
10202 flow_loop_outside_edge_p (loop, e)
10203 const struct loop *loop;
10204 edge e;
10206 if (e->dest != loop->header)
10207 abort ();
10208 return (e->src == ENTRY_BLOCK_PTR)
10209 || ! TEST_BIT (loop->nodes, e->src->index);
10212 /* Clear LOG_LINKS fields of insns in a chain.
10213 Also clear the global_live_at_{start,end} fields of the basic block
10214 structures. */
10216 void
10217 clear_log_links (insns)
10218 rtx insns;
10220 rtx i;
10221 int b;
10223 for (i = insns; i; i = NEXT_INSN (i))
10224 if (INSN_P (i))
10225 LOG_LINKS (i) = 0;
10227 for (b = 0; b < n_basic_blocks; b++)
10229 basic_block bb = BASIC_BLOCK (b);
10231 bb->global_live_at_start = NULL;
10232 bb->global_live_at_end = NULL;
10235 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
10236 EXIT_BLOCK_PTR->global_live_at_start = NULL;
10239 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
10240 correspond to the hard registers, if any, set in that map. This
10241 could be done far more efficiently by having all sorts of special-cases
10242 with moving single words, but probably isn't worth the trouble. */
10244 void
10245 reg_set_to_hard_reg_set (to, from)
10246 HARD_REG_SET *to;
10247 bitmap from;
10249 int i;
10251 EXECUTE_IF_SET_IN_BITMAP
10252 (from, 0, i,
10254 if (i >= FIRST_PSEUDO_REGISTER)
10255 return;
10256 SET_HARD_REG_BIT (*to, i);
10260 /* Called once at intialization time. */
10262 void
10263 init_flow ()
10265 static int initialized;
10267 if (!initialized)
10269 gcc_obstack_init (&flow_obstack);
10270 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10271 initialized = 1;
10273 else
10275 obstack_free (&flow_obstack, flow_firstobj);
10276 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10280 /* Assume that the preceeding pass has possibly eliminated jump instructions
10281 or converted the unconditional jumps. Eliminate the edges from CFG.
10282 Return true if any edges are eliminated. */
10284 bool
10285 purge_dead_edges (bb)
10286 basic_block bb;
10288 edge e, next;
10289 rtx insn = bb->end;
10290 bool purged = false;
10292 if (GET_CODE (insn) == JUMP_INSN && !simplejump_p (insn))
10293 return false;
10294 if (GET_CODE (insn) == JUMP_INSN)
10296 rtx note;
10297 edge b,f;
10298 /* We do care only about conditional jumps and simplejumps. */
10299 if (!any_condjump_p (insn)
10300 && !returnjump_p (insn)
10301 && !simplejump_p (insn))
10302 return false;
10303 for (e = bb->succ; e; e = next)
10305 next = e->succ_next;
10307 /* Check purposes we can have edge. */
10308 if ((e->flags & EDGE_FALLTHRU)
10309 && any_condjump_p (insn))
10310 continue;
10311 if (e->dest != EXIT_BLOCK_PTR
10312 && e->dest->head == JUMP_LABEL (insn))
10313 continue;
10314 if (e->dest == EXIT_BLOCK_PTR
10315 && returnjump_p (insn))
10316 continue;
10317 purged = true;
10318 remove_edge (e);
10320 if (!bb->succ || !purged)
10321 return false;
10322 if (rtl_dump_file)
10323 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
10324 if (!optimize)
10325 return purged;
10327 /* Redistribute probabilities. */
10328 if (!bb->succ->succ_next)
10330 bb->succ->probability = REG_BR_PROB_BASE;
10331 bb->succ->count = bb->count;
10333 else
10335 note = find_reg_note (insn, REG_BR_PROB, NULL);
10336 if (!note)
10337 return purged;
10338 b = BRANCH_EDGE (bb);
10339 f = FALLTHRU_EDGE (bb);
10340 b->probability = INTVAL (XEXP (note, 0));
10341 f->probability = REG_BR_PROB_BASE - b->probability;
10342 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
10343 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
10345 return purged;
10348 /* Cleanup abnormal edges caused by throwing insns that have been
10349 eliminated. */
10350 if (! can_throw_internal (bb->end))
10351 for (e = bb->succ; e; e = next)
10353 next = e->succ_next;
10354 if (e->flags & EDGE_EH)
10356 remove_edge (e);
10357 purged = true;
10361 /* If we don't see a jump insn, we don't know exactly why the block would
10362 have been broken at this point. Look for a simple, non-fallthru edge,
10363 as these are only created by conditional branches. If we find such an
10364 edge we know that there used to be a jump here and can then safely
10365 remove all non-fallthru edges. */
10366 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
10367 e = e->succ_next);
10368 if (!e)
10369 return purged;
10370 for (e = bb->succ; e; e = next)
10372 next = e->succ_next;
10373 if (!(e->flags & EDGE_FALLTHRU))
10374 remove_edge (e), purged = true;
10376 if (!bb->succ || bb->succ->succ_next)
10377 abort ();
10378 bb->succ->probability = REG_BR_PROB_BASE;
10379 bb->succ->count = bb->count;
10381 if (rtl_dump_file)
10382 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
10383 bb->index);
10384 return purged;
10387 /* Search all basic blocks for potentionally dead edges and purge them.
10389 Return true ifif some edge has been elliminated.
10392 bool
10393 purge_all_dead_edges ()
10395 int i, purged = false;
10396 for (i = 0; i < n_basic_blocks; i++)
10397 purged |= purge_dead_edges (BASIC_BLOCK (i));
10398 return purged;