* flow.c (try_simplify_condjump): Use tidy_fallthru_edge.
[official-gcc.git] / gcc / flow.c
blob904684a37cd5ded943e37b36c75682020a5e233d
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "expr.h"
137 #include "ssa.h"
138 #include "timevar.h"
140 #include "obstack.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
174 #endif
175 #endif
177 /* The obstack on which the flow graph components are allocated. */
179 struct obstack flow_obstack;
180 static char *flow_firstobj;
182 /* Number of basic blocks in the current function. */
184 int n_basic_blocks;
186 /* Number of edges in the current function. */
188 int n_edges;
190 /* The basic block array. */
192 varray_type basic_block_info;
194 /* The special entry and exit blocks. */
196 struct basic_block_def entry_exit_blocks[2]
197 = {{NULL, /* head */
198 NULL, /* end */
199 NULL, /* head_tree */
200 NULL, /* end_tree */
201 NULL, /* pred */
202 NULL, /* succ */
203 NULL, /* local_set */
204 NULL, /* cond_local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
207 NULL, /* aux */
208 ENTRY_BLOCK, /* index */
209 0, /* loop_depth */
210 0, /* count */
211 0 /* frequency */
214 NULL, /* head */
215 NULL, /* end */
216 NULL, /* head_tree */
217 NULL, /* end_tree */
218 NULL, /* pred */
219 NULL, /* succ */
220 NULL, /* local_set */
221 NULL, /* cond_local_set */
222 NULL, /* global_live_at_start */
223 NULL, /* global_live_at_end */
224 NULL, /* aux */
225 EXIT_BLOCK, /* index */
226 0, /* loop_depth */
227 0, /* count */
228 0 /* frequency */
232 /* Nonzero if the second flow pass has completed. */
233 int flow2_completed;
235 /* Maximum register number used in this function, plus one. */
237 int max_regno;
239 /* Indexed by n, giving various register information */
241 varray_type reg_n_info;
243 /* Size of a regset for the current function,
244 in (1) bytes and (2) elements. */
246 int regset_bytes;
247 int regset_size;
249 /* Regset of regs live when calls to `setjmp'-like functions happen. */
250 /* ??? Does this exist only for the setjmp-clobbered warning message? */
252 regset regs_live_at_setjmp;
254 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
255 that have to go in the same hard reg.
256 The first two regs in the list are a pair, and the next two
257 are another pair, etc. */
258 rtx regs_may_share;
260 /* Callback that determines if it's ok for a function to have no
261 noreturn attribute. */
262 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
264 /* Set of registers that may be eliminable. These are handled specially
265 in updating regs_ever_live. */
267 static HARD_REG_SET elim_reg_set;
269 /* The basic block structure for every insn, indexed by uid. */
271 varray_type basic_block_for_insn;
273 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
274 /* ??? Should probably be using LABEL_NUSES instead. It would take a
275 bit of surgery to be able to use or co-opt the routines in jump. */
277 static rtx label_value_list;
278 static rtx tail_recursion_label_list;
280 /* Holds information for tracking conditional register life information. */
281 struct reg_cond_life_info
283 /* A boolean expression of conditions under which a register is dead. */
284 rtx condition;
285 /* Conditions under which a register is dead at the basic block end. */
286 rtx orig_condition;
288 /* A boolean expression of conditions under which a register has been
289 stored into. */
290 rtx stores;
292 /* ??? Could store mask of bytes that are dead, so that we could finally
293 track lifetimes of multi-word registers accessed via subregs. */
296 /* For use in communicating between propagate_block and its subroutines.
297 Holds all information needed to compute life and def-use information. */
299 struct propagate_block_info
301 /* The basic block we're considering. */
302 basic_block bb;
304 /* Bit N is set if register N is conditionally or unconditionally live. */
305 regset reg_live;
307 /* Bit N is set if register N is set this insn. */
308 regset new_set;
310 /* Element N is the next insn that uses (hard or pseudo) register N
311 within the current basic block; or zero, if there is no such insn. */
312 rtx *reg_next_use;
314 /* Contains a list of all the MEMs we are tracking for dead store
315 elimination. */
316 rtx mem_set_list;
318 /* If non-null, record the set of registers set unconditionally in the
319 basic block. */
320 regset local_set;
322 /* If non-null, record the set of registers set conditionally in the
323 basic block. */
324 regset cond_local_set;
326 #ifdef HAVE_conditional_execution
327 /* Indexed by register number, holds a reg_cond_life_info for each
328 register that is not unconditionally live or dead. */
329 splay_tree reg_cond_dead;
331 /* Bit N is set if register N is in an expression in reg_cond_dead. */
332 regset reg_cond_reg;
333 #endif
335 /* The length of mem_set_list. */
336 int mem_set_list_len;
338 /* Non-zero if the value of CC0 is live. */
339 int cc0_live;
341 /* Flags controling the set of information propagate_block collects. */
342 int flags;
345 /* Maximum length of pbi->mem_set_list before we start dropping
346 new elements on the floor. */
347 #define MAX_MEM_SET_LIST_LEN 100
349 /* Store the data structures necessary for depth-first search. */
350 struct depth_first_search_dsS {
351 /* stack for backtracking during the algorithm */
352 basic_block *stack;
354 /* number of edges in the stack. That is, positions 0, ..., sp-1
355 have edges. */
356 unsigned int sp;
358 /* record of basic blocks already seen by depth-first search */
359 sbitmap visited_blocks;
361 typedef struct depth_first_search_dsS *depth_first_search_ds;
363 /* Have print_rtl_and_abort give the same information that fancy_abort
364 does. */
365 #define print_rtl_and_abort() \
366 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
368 /* Forward declarations */
369 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
370 static bool try_crossjump_bb PARAMS ((int, basic_block));
371 static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
372 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
373 rtx *, rtx *));
374 static int count_basic_blocks PARAMS ((rtx));
375 static void find_basic_blocks_1 PARAMS ((rtx));
376 static rtx find_label_refs PARAMS ((rtx, rtx));
377 static void make_edges PARAMS ((rtx, int, int));
378 static void make_label_edge PARAMS ((sbitmap *, basic_block,
379 rtx, int));
380 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
382 static void commit_one_edge_insertion PARAMS ((edge));
384 static void delete_unreachable_blocks PARAMS ((void));
385 static int can_delete_note_p PARAMS ((rtx));
386 static void expunge_block PARAMS ((basic_block));
387 static int can_delete_label_p PARAMS ((rtx));
388 static int tail_recursion_label_p PARAMS ((rtx));
389 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
390 basic_block));
391 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
392 basic_block));
393 static int merge_blocks PARAMS ((edge,basic_block,basic_block,
394 int));
395 static bool try_optimize_cfg PARAMS ((int));
396 static bool can_fallthru PARAMS ((basic_block, basic_block));
397 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
398 static bool try_simplify_condjump PARAMS ((basic_block));
399 static bool try_forward_edges PARAMS ((basic_block));
400 static void tidy_fallthru_edges PARAMS ((void));
401 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
402 static void verify_wide_reg PARAMS ((int, rtx, rtx));
403 static void verify_local_live_at_start PARAMS ((regset, basic_block));
404 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
405 static void notice_stack_pointer_modification PARAMS ((rtx));
406 static void mark_reg PARAMS ((rtx, void *));
407 static void mark_regs_live_at_end PARAMS ((regset));
408 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
409 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
410 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
411 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
412 static int insn_dead_p PARAMS ((struct propagate_block_info *,
413 rtx, int, rtx));
414 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
415 rtx, rtx));
416 static void mark_set_regs PARAMS ((struct propagate_block_info *,
417 rtx, rtx));
418 static void mark_set_1 PARAMS ((struct propagate_block_info *,
419 enum rtx_code, rtx, rtx,
420 rtx, int));
421 #ifdef HAVE_conditional_execution
422 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
423 int, rtx));
424 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
425 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
426 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
427 int));
428 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
429 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
430 static rtx not_reg_cond PARAMS ((rtx));
431 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
432 #endif
433 #ifdef AUTO_INC_DEC
434 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
435 rtx, rtx, rtx, rtx, rtx));
436 static void find_auto_inc PARAMS ((struct propagate_block_info *,
437 rtx, rtx));
438 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
439 rtx));
440 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
441 #endif
442 static void mark_used_reg PARAMS ((struct propagate_block_info *,
443 rtx, rtx, rtx));
444 static void mark_used_regs PARAMS ((struct propagate_block_info *,
445 rtx, rtx, rtx));
446 void dump_flow_info PARAMS ((FILE *));
447 void debug_flow_info PARAMS ((void));
448 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
449 const char *))
450 ATTRIBUTE_NORETURN;
452 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
453 rtx));
454 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
455 rtx));
456 static void remove_fake_successors PARAMS ((basic_block));
457 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
458 FILE *));
459 static void flow_edge_list_print PARAMS ((const char *, const edge *,
460 int, FILE *));
461 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
462 FILE *));
463 static int flow_loop_nested_p PARAMS ((struct loop *,
464 struct loop *));
465 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
466 edge **));
467 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
468 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
469 static void flow_dfs_compute_reverse_init
470 PARAMS ((depth_first_search_ds));
471 static void flow_dfs_compute_reverse_add_bb
472 PARAMS ((depth_first_search_ds, basic_block));
473 static basic_block flow_dfs_compute_reverse_execute
474 PARAMS ((depth_first_search_ds));
475 static void flow_dfs_compute_reverse_finish
476 PARAMS ((depth_first_search_ds));
477 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
478 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
479 const sbitmap *));
480 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
481 static void flow_loops_tree_build PARAMS ((struct loops *));
482 static int flow_loop_level_compute PARAMS ((struct loop *, int));
483 static int flow_loops_level_compute PARAMS ((struct loops *));
485 /* Find basic blocks of the current function.
486 F is the first insn of the function and NREGS the number of register
487 numbers in use. */
489 void
490 find_basic_blocks (f, nregs, file)
491 rtx f;
492 int nregs ATTRIBUTE_UNUSED;
493 FILE *file ATTRIBUTE_UNUSED;
495 int max_uid;
496 timevar_push (TV_CFG);
498 /* Flush out existing data. */
499 if (basic_block_info != NULL)
501 int i;
503 clear_edges ();
505 /* Clear bb->aux on all extant basic blocks. We'll use this as a
506 tag for reuse during create_basic_block, just in case some pass
507 copies around basic block notes improperly. */
508 for (i = 0; i < n_basic_blocks; ++i)
509 BASIC_BLOCK (i)->aux = NULL;
511 VARRAY_FREE (basic_block_info);
514 n_basic_blocks = count_basic_blocks (f);
516 /* Size the basic block table. The actual structures will be allocated
517 by find_basic_blocks_1, since we want to keep the structure pointers
518 stable across calls to find_basic_blocks. */
519 /* ??? This whole issue would be much simpler if we called find_basic_blocks
520 exactly once, and thereafter we don't have a single long chain of
521 instructions at all until close to the end of compilation when we
522 actually lay them out. */
524 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
526 find_basic_blocks_1 (f);
528 /* Record the block to which an insn belongs. */
529 /* ??? This should be done another way, by which (perhaps) a label is
530 tagged directly with the basic block that it starts. It is used for
531 more than that currently, but IMO that is the only valid use. */
533 max_uid = get_max_uid ();
534 #ifdef AUTO_INC_DEC
535 /* Leave space for insns life_analysis makes in some cases for auto-inc.
536 These cases are rare, so we don't need too much space. */
537 max_uid += max_uid / 10;
538 #endif
540 compute_bb_for_insn (max_uid);
542 /* Discover the edges of our cfg. */
543 make_edges (label_value_list, 0, n_basic_blocks - 1);
545 /* Do very simple cleanup now, for the benefit of code that runs between
546 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
547 tidy_fallthru_edges ();
549 mark_critical_edges ();
551 #ifdef ENABLE_CHECKING
552 verify_flow_info ();
553 #endif
554 timevar_pop (TV_CFG);
557 void
558 check_function_return_warnings ()
560 if (warn_missing_noreturn
561 && !TREE_THIS_VOLATILE (cfun->decl)
562 && EXIT_BLOCK_PTR->pred == NULL
563 && (lang_missing_noreturn_ok_p
564 && !lang_missing_noreturn_ok_p (cfun->decl)))
565 warning ("function might be possible candidate for attribute `noreturn'");
567 /* If we have a path to EXIT, then we do return. */
568 if (TREE_THIS_VOLATILE (cfun->decl)
569 && EXIT_BLOCK_PTR->pred != NULL)
570 warning ("`noreturn' function does return");
572 /* If the clobber_return_insn appears in some basic block, then we
573 do reach the end without returning a value. */
574 else if (warn_return_type
575 && cfun->x_clobber_return_insn != NULL
576 && EXIT_BLOCK_PTR->pred != NULL)
578 int max_uid = get_max_uid ();
580 /* If clobber_return_insn was excised by jump1, then renumber_insns
581 can make max_uid smaller than the number still recorded in our rtx.
582 That's fine, since this is a quick way of verifying that the insn
583 is no longer in the chain. */
584 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
586 /* Recompute insn->block mapping, since the initial mapping is
587 set before we delete unreachable blocks. */
588 compute_bb_for_insn (max_uid);
590 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
591 warning ("control reaches end of non-void function");
596 /* Count the basic blocks of the function. */
598 static int
599 count_basic_blocks (f)
600 rtx f;
602 register rtx insn;
603 register RTX_CODE prev_code;
604 register int count = 0;
605 int saw_abnormal_edge = 0;
607 prev_code = JUMP_INSN;
608 for (insn = f; insn; insn = NEXT_INSN (insn))
610 enum rtx_code code = GET_CODE (insn);
612 if (code == CODE_LABEL
613 || (GET_RTX_CLASS (code) == 'i'
614 && (prev_code == JUMP_INSN
615 || prev_code == BARRIER
616 || saw_abnormal_edge)))
618 saw_abnormal_edge = 0;
619 count++;
622 /* Record whether this insn created an edge. */
623 if (code == CALL_INSN)
625 rtx note;
627 /* If there is a nonlocal goto label and the specified
628 region number isn't -1, we have an edge. */
629 if (nonlocal_goto_handler_labels
630 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
631 || INTVAL (XEXP (note, 0)) >= 0))
632 saw_abnormal_edge = 1;
634 else if (can_throw_internal (insn))
635 saw_abnormal_edge = 1;
637 else if (flag_non_call_exceptions
638 && code == INSN
639 && can_throw_internal (insn))
640 saw_abnormal_edge = 1;
642 if (code != NOTE)
643 prev_code = code;
646 /* The rest of the compiler works a bit smoother when we don't have to
647 check for the edge case of do-nothing functions with no basic blocks. */
648 if (count == 0)
650 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
651 count = 1;
654 return count;
657 /* Scan a list of insns for labels referred to other than by jumps.
658 This is used to scan the alternatives of a call placeholder. */
659 static rtx
660 find_label_refs (f, lvl)
661 rtx f;
662 rtx lvl;
664 rtx insn;
666 for (insn = f; insn; insn = NEXT_INSN (insn))
667 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
669 rtx note;
671 /* Make a list of all labels referred to other than by jumps
672 (which just don't have the REG_LABEL notes).
674 Make a special exception for labels followed by an ADDR*VEC,
675 as this would be a part of the tablejump setup code.
677 Make a special exception to registers loaded with label
678 values just before jump insns that use them. */
680 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
681 if (REG_NOTE_KIND (note) == REG_LABEL)
683 rtx lab = XEXP (note, 0), next;
685 if ((next = next_nonnote_insn (lab)) != NULL
686 && GET_CODE (next) == JUMP_INSN
687 && (GET_CODE (PATTERN (next)) == ADDR_VEC
688 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
690 else if (GET_CODE (lab) == NOTE)
692 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
693 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
695 else
696 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
700 return lvl;
703 /* Assume that someone emitted code with control flow instructions to the
704 basic block. Update the data structure. */
705 void
706 find_sub_basic_blocks (bb)
707 basic_block bb;
709 rtx insn = bb->head;
710 rtx end = bb->end;
711 rtx jump_insn = NULL_RTX;
712 int created = 0;
713 int barrier = 0;
714 edge falltru = 0;
715 basic_block first_bb = bb;
717 if (insn == bb->end)
718 return;
720 if (GET_CODE (insn) == CODE_LABEL)
721 insn = NEXT_INSN (insn);
723 /* Scan insn chain and try to find new basic block boundaries. */
724 while (1)
726 enum rtx_code code = GET_CODE (insn);
727 switch (code)
729 case BARRIER:
730 if (!jump_insn)
731 abort ();
732 barrier = 1;
733 break;
734 /* On code label, split current basic block. */
735 case CODE_LABEL:
736 falltru = split_block (bb, PREV_INSN (insn));
737 if (jump_insn)
738 bb->end = jump_insn;
739 bb = falltru->dest;
740 remove_edge (falltru);
741 barrier = 0;
742 jump_insn = 0;
743 created = 1;
744 if (LABEL_ALTERNATE_NAME (insn))
745 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
746 break;
747 case INSN:
748 case JUMP_INSN:
749 /* In case we've previously split insn on the JUMP_INSN, move the
750 block header to proper place. */
751 if (jump_insn)
753 falltru = split_block (bb, PREV_INSN (insn));
754 bb->end = jump_insn;
755 bb = falltru->dest;
756 remove_edge (falltru);
757 jump_insn = 0;
759 /* We need some special care for those expressions. */
760 if (GET_CODE (insn) == JUMP_INSN)
762 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
763 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
764 abort();
765 jump_insn = insn;
767 break;
768 default:
769 break;
771 if (insn == end)
772 break;
773 insn = NEXT_INSN (insn);
776 /* In case we've got barrier at the end of new insn stream, put it
777 outside basic block. */
778 if (GET_CODE (bb->end) == BARRIER)
779 bb->end = PREV_INSN (bb->end);
781 /* We've possibly replaced the conditional jump by conditional jump
782 followed by cleanup at fallthru edge, so the outgoing edges may
783 be dead. */
784 purge_dead_edges (bb);
786 /* Now re-scan and wire in all edges. This expect simple (conditional)
787 jumps at the end of each new basic blocks. */
788 make_edges (NULL, first_bb->index, bb->index - 1);
791 /* Find all basic blocks of the function whose first insn is F.
793 Collect and return a list of labels whose addresses are taken. This
794 will be used in make_edges for use with computed gotos. */
796 static void
797 find_basic_blocks_1 (f)
798 rtx f;
800 register rtx insn, next;
801 int i = 0;
802 rtx bb_note = NULL_RTX;
803 rtx lvl = NULL_RTX;
804 rtx trll = NULL_RTX;
805 rtx head = NULL_RTX;
806 rtx end = NULL_RTX;
808 /* We process the instructions in a slightly different way than we did
809 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
810 closed out the previous block, so that it gets attached at the proper
811 place. Since this form should be equivalent to the previous,
812 count_basic_blocks continues to use the old form as a check. */
814 for (insn = f; insn; insn = next)
816 enum rtx_code code = GET_CODE (insn);
818 next = NEXT_INSN (insn);
820 switch (code)
822 case NOTE:
824 int kind = NOTE_LINE_NUMBER (insn);
826 /* Look for basic block notes with which to keep the
827 basic_block_info pointers stable. Unthread the note now;
828 we'll put it back at the right place in create_basic_block.
829 Or not at all if we've already found a note in this block. */
830 if (kind == NOTE_INSN_BASIC_BLOCK)
832 if (bb_note == NULL_RTX)
833 bb_note = insn;
834 else
835 next = flow_delete_insn (insn);
837 break;
840 case CODE_LABEL:
841 /* A basic block starts at a label. If we've closed one off due
842 to a barrier or some such, no need to do it again. */
843 if (head != NULL_RTX)
845 create_basic_block (i++, head, end, bb_note);
846 bb_note = NULL_RTX;
849 head = end = insn;
850 break;
852 case JUMP_INSN:
853 /* A basic block ends at a jump. */
854 if (head == NULL_RTX)
855 head = insn;
856 else
858 /* ??? Make a special check for table jumps. The way this
859 happens is truly and amazingly gross. We are about to
860 create a basic block that contains just a code label and
861 an addr*vec jump insn. Worse, an addr_diff_vec creates
862 its own natural loop.
864 Prevent this bit of brain damage, pasting things together
865 correctly in make_edges.
867 The correct solution involves emitting the table directly
868 on the tablejump instruction as a note, or JUMP_LABEL. */
870 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
871 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
873 head = end = NULL;
874 n_basic_blocks--;
875 break;
878 end = insn;
879 goto new_bb_inclusive;
881 case BARRIER:
882 /* A basic block ends at a barrier. It may be that an unconditional
883 jump already closed the basic block -- no need to do it again. */
884 if (head == NULL_RTX)
885 break;
886 goto new_bb_exclusive;
888 case CALL_INSN:
890 /* Record whether this call created an edge. */
891 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
892 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
894 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
896 /* Scan each of the alternatives for label refs. */
897 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
898 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
899 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
900 /* Record its tail recursion label, if any. */
901 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
902 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
905 /* A basic block ends at a call that can either throw or
906 do a non-local goto. */
907 if ((nonlocal_goto_handler_labels && region >= 0)
908 || can_throw_internal (insn))
910 new_bb_inclusive:
911 if (head == NULL_RTX)
912 head = insn;
913 end = insn;
915 new_bb_exclusive:
916 create_basic_block (i++, head, end, bb_note);
917 head = end = NULL_RTX;
918 bb_note = NULL_RTX;
919 break;
922 /* Fall through. */
924 case INSN:
925 /* Non-call exceptions generate new blocks just like calls. */
926 if (flag_non_call_exceptions && can_throw_internal (insn))
927 goto new_bb_inclusive;
929 if (head == NULL_RTX)
930 head = insn;
931 end = insn;
932 break;
934 default:
935 abort ();
938 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
940 rtx note;
942 /* Make a list of all labels referred to other than by jumps.
944 Make a special exception for labels followed by an ADDR*VEC,
945 as this would be a part of the tablejump setup code.
947 Make a special exception to registers loaded with label
948 values just before jump insns that use them. */
950 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
951 if (REG_NOTE_KIND (note) == REG_LABEL)
953 rtx lab = XEXP (note, 0), next;
955 if ((next = next_nonnote_insn (lab)) != NULL
956 && GET_CODE (next) == JUMP_INSN
957 && (GET_CODE (PATTERN (next)) == ADDR_VEC
958 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
960 else if (GET_CODE (lab) == NOTE)
962 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
963 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
965 else
966 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
971 if (head != NULL_RTX)
972 create_basic_block (i++, head, end, bb_note);
973 else if (bb_note)
974 flow_delete_insn (bb_note);
976 if (i != n_basic_blocks)
977 abort ();
979 label_value_list = lvl;
980 tail_recursion_label_list = trll;
983 /* Tidy the CFG by deleting unreachable code and whatnot. */
985 void
986 cleanup_cfg (mode)
987 int mode;
989 timevar_push (TV_CLEANUP_CFG);
990 delete_unreachable_blocks ();
991 if (try_optimize_cfg (mode))
992 delete_unreachable_blocks ();
993 mark_critical_edges ();
995 /* Kill the data we won't maintain. */
996 free_EXPR_LIST_list (&label_value_list);
997 free_EXPR_LIST_list (&tail_recursion_label_list);
998 timevar_pop (TV_CLEANUP_CFG);
1001 /* Create a new basic block consisting of the instructions between
1002 HEAD and END inclusive. Reuses the note and basic block struct
1003 in BB_NOTE, if any. */
1005 void
1006 create_basic_block (index, head, end, bb_note)
1007 int index;
1008 rtx head, end, bb_note;
1010 basic_block bb;
1012 if (bb_note
1013 && ! RTX_INTEGRATED_P (bb_note)
1014 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1015 && bb->aux == NULL)
1017 /* If we found an existing note, thread it back onto the chain. */
1019 rtx after;
1021 if (GET_CODE (head) == CODE_LABEL)
1022 after = head;
1023 else
1025 after = PREV_INSN (head);
1026 head = bb_note;
1029 if (after != bb_note && NEXT_INSN (after) != bb_note)
1030 reorder_insns (bb_note, bb_note, after);
1032 else
1034 /* Otherwise we must create a note and a basic block structure.
1035 Since we allow basic block structs in rtl, give the struct
1036 the same lifetime by allocating it off the function obstack
1037 rather than using malloc. */
1039 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1040 memset (bb, 0, sizeof (*bb));
1042 if (GET_CODE (head) == CODE_LABEL)
1043 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1044 else
1046 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1047 head = bb_note;
1049 NOTE_BASIC_BLOCK (bb_note) = bb;
1052 /* Always include the bb note in the block. */
1053 if (NEXT_INSN (end) == bb_note)
1054 end = bb_note;
1056 bb->head = head;
1057 bb->end = end;
1058 bb->index = index;
1059 BASIC_BLOCK (index) = bb;
1061 /* Tag the block so that we know it has been used when considering
1062 other basic block notes. */
1063 bb->aux = bb;
1066 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
1067 note associated with the BLOCK. */
1070 first_insn_after_basic_block_note (block)
1071 basic_block block;
1073 rtx insn;
1075 /* Get the first instruction in the block. */
1076 insn = block->head;
1078 if (insn == NULL_RTX)
1079 return NULL_RTX;
1080 if (GET_CODE (insn) == CODE_LABEL)
1081 insn = NEXT_INSN (insn);
1082 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
1083 abort ();
1085 return NEXT_INSN (insn);
1088 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1089 indexed by INSN_UID. MAX is the size of the array. */
1091 void
1092 compute_bb_for_insn (max)
1093 int max;
1095 int i;
1097 if (basic_block_for_insn)
1098 VARRAY_FREE (basic_block_for_insn);
1099 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1101 for (i = 0; i < n_basic_blocks; ++i)
1103 basic_block bb = BASIC_BLOCK (i);
1104 rtx insn, end;
1106 end = bb->end;
1107 insn = bb->head;
1108 while (1)
1110 int uid = INSN_UID (insn);
1111 if (uid < max)
1112 VARRAY_BB (basic_block_for_insn, uid) = bb;
1113 if (insn == end)
1114 break;
1115 insn = NEXT_INSN (insn);
1120 /* Free the memory associated with the edge structures. */
1122 void
1123 clear_edges ()
1125 int i;
1126 edge n, e;
1128 for (i = 0; i < n_basic_blocks; ++i)
1130 basic_block bb = BASIC_BLOCK (i);
1132 for (e = bb->succ; e; e = n)
1134 n = e->succ_next;
1135 free (e);
1138 bb->succ = 0;
1139 bb->pred = 0;
1142 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1144 n = e->succ_next;
1145 free (e);
1148 ENTRY_BLOCK_PTR->succ = 0;
1149 EXIT_BLOCK_PTR->pred = 0;
1151 n_edges = 0;
1154 /* Identify the edges between basic blocks MIN to MAX.
1156 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1157 that are otherwise unreachable may be reachable with a non-local goto.
1159 BB_EH_END is an array indexed by basic block number in which we record
1160 the list of exception regions active at the end of the basic block. */
1162 static void
1163 make_edges (label_value_list, min, max)
1164 rtx label_value_list;
1165 int min, max;
1167 int i;
1168 sbitmap *edge_cache = NULL;
1170 /* Assume no computed jump; revise as we create edges. */
1171 current_function_has_computed_jump = 0;
1173 /* Heavy use of computed goto in machine-generated code can lead to
1174 nearly fully-connected CFGs. In that case we spend a significant
1175 amount of time searching the edge lists for duplicates. */
1176 if (forced_labels || label_value_list)
1178 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1179 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1182 /* By nature of the way these get numbered, block 0 is always the entry. */
1183 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1185 for (i = min; i <= max; ++i)
1187 basic_block bb = BASIC_BLOCK (i);
1188 rtx insn, x;
1189 enum rtx_code code;
1190 int force_fallthru = 0;
1192 if (GET_CODE (bb->head) == CODE_LABEL
1193 && LABEL_ALTERNATE_NAME (bb->head))
1194 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1196 /* Examine the last instruction of the block, and discover the
1197 ways we can leave the block. */
1199 insn = bb->end;
1200 code = GET_CODE (insn);
1202 /* A branch. */
1203 if (code == JUMP_INSN)
1205 rtx tmp;
1207 /* Recognize exception handling placeholders. */
1208 if (GET_CODE (PATTERN (insn)) == RESX)
1209 make_eh_edge (edge_cache, bb, insn);
1211 /* Recognize a non-local goto as a branch outside the
1212 current function. */
1213 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1216 /* ??? Recognize a tablejump and do the right thing. */
1217 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1218 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1219 && GET_CODE (tmp) == JUMP_INSN
1220 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1221 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1223 rtvec vec;
1224 int j;
1226 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1227 vec = XVEC (PATTERN (tmp), 0);
1228 else
1229 vec = XVEC (PATTERN (tmp), 1);
1231 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1232 make_label_edge (edge_cache, bb,
1233 XEXP (RTVEC_ELT (vec, j), 0), 0);
1235 /* Some targets (eg, ARM) emit a conditional jump that also
1236 contains the out-of-range target. Scan for these and
1237 add an edge if necessary. */
1238 if ((tmp = single_set (insn)) != NULL
1239 && SET_DEST (tmp) == pc_rtx
1240 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1241 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1242 make_label_edge (edge_cache, bb,
1243 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1245 #ifdef CASE_DROPS_THROUGH
1246 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1247 us naturally detecting fallthru into the next block. */
1248 force_fallthru = 1;
1249 #endif
1252 /* If this is a computed jump, then mark it as reaching
1253 everything on the label_value_list and forced_labels list. */
1254 else if (computed_jump_p (insn))
1256 current_function_has_computed_jump = 1;
1258 for (x = label_value_list; x; x = XEXP (x, 1))
1259 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1261 for (x = forced_labels; x; x = XEXP (x, 1))
1262 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1265 /* Returns create an exit out. */
1266 else if (returnjump_p (insn))
1267 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1269 /* Otherwise, we have a plain conditional or unconditional jump. */
1270 else
1272 if (! JUMP_LABEL (insn))
1273 abort ();
1274 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1278 /* If this is a sibling call insn, then this is in effect a
1279 combined call and return, and so we need an edge to the
1280 exit block. No need to worry about EH edges, since we
1281 wouldn't have created the sibling call in the first place. */
1283 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1284 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1285 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1287 /* If this is a CALL_INSN, then mark it as reaching the active EH
1288 handler for this CALL_INSN. If we're handling non-call
1289 exceptions then any insn can reach any of the active handlers.
1291 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1293 else if (code == CALL_INSN || flag_non_call_exceptions)
1295 /* Add any appropriate EH edges. */
1296 make_eh_edge (edge_cache, bb, insn);
1298 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1300 /* ??? This could be made smarter: in some cases it's possible
1301 to tell that certain calls will not do a nonlocal goto.
1303 For example, if the nested functions that do the nonlocal
1304 gotos do not have their addresses taken, then only calls to
1305 those functions or to other nested functions that use them
1306 could possibly do nonlocal gotos. */
1307 /* We do know that a REG_EH_REGION note with a value less
1308 than 0 is guaranteed not to perform a non-local goto. */
1309 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1310 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1311 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1312 make_label_edge (edge_cache, bb, XEXP (x, 0),
1313 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1317 /* Find out if we can drop through to the next block. */
1318 insn = next_nonnote_insn (insn);
1319 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1320 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1321 else if (i + 1 < n_basic_blocks)
1323 rtx tmp = BLOCK_HEAD (i + 1);
1324 if (GET_CODE (tmp) == NOTE)
1325 tmp = next_nonnote_insn (tmp);
1326 if (force_fallthru || insn == tmp)
1327 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1331 if (edge_cache)
1332 sbitmap_vector_free (edge_cache);
1335 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1336 about the edge that is accumulated between calls. */
1338 void
1339 make_edge (edge_cache, src, dst, flags)
1340 sbitmap *edge_cache;
1341 basic_block src, dst;
1342 int flags;
1344 int use_edge_cache;
1345 edge e;
1347 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1348 many edges to them, and we didn't allocate memory for it. */
1349 use_edge_cache = (edge_cache
1350 && src != ENTRY_BLOCK_PTR
1351 && dst != EXIT_BLOCK_PTR);
1353 /* Make sure we don't add duplicate edges. */
1354 switch (use_edge_cache)
1356 default:
1357 /* Quick test for non-existance of the edge. */
1358 if (! TEST_BIT (edge_cache[src->index], dst->index))
1359 break;
1361 /* The edge exists; early exit if no work to do. */
1362 if (flags == 0)
1363 return;
1365 /* FALLTHRU */
1366 case 0:
1367 for (e = src->succ; e; e = e->succ_next)
1368 if (e->dest == dst)
1370 e->flags |= flags;
1371 return;
1373 break;
1376 e = (edge) xcalloc (1, sizeof (*e));
1377 n_edges++;
1379 e->succ_next = src->succ;
1380 e->pred_next = dst->pred;
1381 e->src = src;
1382 e->dest = dst;
1383 e->flags = flags;
1385 src->succ = e;
1386 dst->pred = e;
1388 if (use_edge_cache)
1389 SET_BIT (edge_cache[src->index], dst->index);
1392 /* Create an edge from a basic block to a label. */
1394 static void
1395 make_label_edge (edge_cache, src, label, flags)
1396 sbitmap *edge_cache;
1397 basic_block src;
1398 rtx label;
1399 int flags;
1401 if (GET_CODE (label) != CODE_LABEL)
1402 abort ();
1404 /* If the label was never emitted, this insn is junk, but avoid a
1405 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1406 as a result of a syntax error and a diagnostic has already been
1407 printed. */
1409 if (INSN_UID (label) == 0)
1410 return;
1412 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1415 /* Create the edges generated by INSN in REGION. */
1417 static void
1418 make_eh_edge (edge_cache, src, insn)
1419 sbitmap *edge_cache;
1420 basic_block src;
1421 rtx insn;
1423 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1424 rtx handlers, i;
1426 handlers = reachable_handlers (insn);
1428 for (i = handlers; i; i = XEXP (i, 1))
1429 make_label_edge (edge_cache, src, XEXP (i, 0),
1430 EDGE_ABNORMAL | EDGE_EH | is_call);
1432 free_INSN_LIST_list (&handlers);
1435 /* Identify critical edges and set the bits appropriately. */
1437 void
1438 mark_critical_edges ()
1440 int i, n = n_basic_blocks;
1441 basic_block bb;
1443 /* We begin with the entry block. This is not terribly important now,
1444 but could be if a front end (Fortran) implemented alternate entry
1445 points. */
1446 bb = ENTRY_BLOCK_PTR;
1447 i = -1;
1449 while (1)
1451 edge e;
1453 /* (1) Critical edges must have a source with multiple successors. */
1454 if (bb->succ && bb->succ->succ_next)
1456 for (e = bb->succ; e; e = e->succ_next)
1458 /* (2) Critical edges must have a destination with multiple
1459 predecessors. Note that we know there is at least one
1460 predecessor -- the edge we followed to get here. */
1461 if (e->dest->pred->pred_next)
1462 e->flags |= EDGE_CRITICAL;
1463 else
1464 e->flags &= ~EDGE_CRITICAL;
1467 else
1469 for (e = bb->succ; e; e = e->succ_next)
1470 e->flags &= ~EDGE_CRITICAL;
1473 if (++i >= n)
1474 break;
1475 bb = BASIC_BLOCK (i);
1479 /* Split a block BB after insn INSN creating a new fallthru edge.
1480 Return the new edge. Note that to keep other parts of the compiler happy,
1481 this function renumbers all the basic blocks so that the new
1482 one has a number one greater than the block split. */
1484 edge
1485 split_block (bb, insn)
1486 basic_block bb;
1487 rtx insn;
1489 basic_block new_bb;
1490 edge new_edge;
1491 edge e;
1492 rtx bb_note;
1493 int i, j;
1495 /* There is no point splitting the block after its end. */
1496 if (bb->end == insn)
1497 return 0;
1499 /* Create the new structures. */
1500 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1501 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1502 n_edges++;
1504 memset (new_bb, 0, sizeof (*new_bb));
1506 new_bb->head = NEXT_INSN (insn);
1507 new_bb->end = bb->end;
1508 bb->end = insn;
1510 new_bb->succ = bb->succ;
1511 bb->succ = new_edge;
1512 new_bb->pred = new_edge;
1513 new_bb->count = bb->count;
1514 new_bb->frequency = bb->frequency;
1515 new_bb->loop_depth = bb->loop_depth;
1517 new_edge->src = bb;
1518 new_edge->dest = new_bb;
1519 new_edge->flags = EDGE_FALLTHRU;
1520 new_edge->probability = REG_BR_PROB_BASE;
1521 new_edge->count = bb->count;
1523 /* Redirect the src of the successor edges of bb to point to new_bb. */
1524 for (e = new_bb->succ; e; e = e->succ_next)
1525 e->src = new_bb;
1527 /* Place the new block just after the block being split. */
1528 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1530 /* Some parts of the compiler expect blocks to be number in
1531 sequential order so insert the new block immediately after the
1532 block being split.. */
1533 j = bb->index;
1534 for (i = n_basic_blocks - 1; i > j + 1; --i)
1536 basic_block tmp = BASIC_BLOCK (i - 1);
1537 BASIC_BLOCK (i) = tmp;
1538 tmp->index = i;
1541 BASIC_BLOCK (i) = new_bb;
1542 new_bb->index = i;
1544 if (GET_CODE (new_bb->head) == CODE_LABEL)
1546 /* Create the basic block note. */
1547 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1548 new_bb->head);
1549 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1551 /* If the only thing in this new block was the label, make sure
1552 the block note gets included. */
1553 if (new_bb->head == new_bb->end)
1554 new_bb->end = bb_note;
1556 else
1558 /* Create the basic block note. */
1559 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1560 new_bb->head);
1561 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1562 new_bb->head = bb_note;
1565 update_bb_for_insn (new_bb);
1567 if (bb->global_live_at_start)
1569 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1570 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1571 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1573 /* We now have to calculate which registers are live at the end
1574 of the split basic block and at the start of the new basic
1575 block. Start with those registers that are known to be live
1576 at the end of the original basic block and get
1577 propagate_block to determine which registers are live. */
1578 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1579 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1580 COPY_REG_SET (bb->global_live_at_end,
1581 new_bb->global_live_at_start);
1584 return new_edge;
1587 /* Return label in the head of basic block. Create one if it doesn't exist. */
1589 block_label (block)
1590 basic_block block;
1592 if (block == EXIT_BLOCK_PTR)
1593 return NULL_RTX;
1594 if (GET_CODE (block->head) != CODE_LABEL)
1595 block->head = emit_label_before (gen_label_rtx (), block->head);
1596 return block->head;
1599 /* Return true if the block has no effect and only forwards control flow to
1600 its single destination. */
1601 bool
1602 forwarder_block_p (bb)
1603 basic_block bb;
1605 rtx insn = bb->head;
1606 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1607 || !bb->succ || bb->succ->succ_next)
1608 return false;
1610 while (insn != bb->end)
1612 if (active_insn_p (insn))
1613 return false;
1614 insn = NEXT_INSN (insn);
1616 return (!active_insn_p (insn)
1617 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1620 /* Return nonzero if we can reach target from src by falling trought. */
1621 static bool
1622 can_fallthru (src, target)
1623 basic_block src, target;
1625 rtx insn = src->end;
1626 rtx insn2 = target->head;
1628 if (src->index + 1 == target->index && !active_insn_p (insn2))
1629 insn2 = next_active_insn (insn2);
1630 /* ??? Later we may add code to move jump tables offline. */
1631 return next_active_insn (insn) == insn2;
1634 /* Attempt to perform edge redirection by replacing possibly complex jump
1635 instruction by unconditional jump or removing jump completely.
1636 This can apply only if all edges now point to the same block.
1638 The parameters and return values are equivalent to redirect_edge_and_branch.
1640 static bool
1641 try_redirect_by_replacing_jump (e, target)
1642 edge e;
1643 basic_block target;
1645 basic_block src = e->src;
1646 rtx insn = src->end, kill_from;
1647 edge tmp;
1648 rtx set;
1649 int fallthru = 0;
1651 /* Verify that all targets will be TARGET. */
1652 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1653 if (tmp->dest != target && tmp != e)
1654 break;
1655 if (tmp || !onlyjump_p (insn))
1656 return false;
1658 /* Avoid removing branch with side effects. */
1659 set = single_set (insn);
1660 if (!set || side_effects_p (set))
1661 return false;
1663 /* In case we zap a conditional jump, we'll need to kill
1664 the cc0 setter too. */
1665 kill_from = insn;
1666 #ifdef HAVE_cc0
1667 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
1668 kill_from = PREV_INSN (insn);
1669 #endif
1671 /* See if we can create the fallthru edge. */
1672 if (can_fallthru (src, target))
1674 src->end = PREV_INSN (kill_from);
1675 if (rtl_dump_file)
1676 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1677 fallthru = 1;
1679 /* Selectivly unlink whole insn chain. */
1680 flow_delete_insn_chain (kill_from, PREV_INSN (target->head));
1682 /* If this already is simplejump, redirect it. */
1683 else if (simplejump_p (insn))
1685 if (e->dest == target)
1686 return false;
1687 if (rtl_dump_file)
1688 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1689 INSN_UID (insn), e->dest->index, target->index);
1690 redirect_jump (insn, block_label (target), 0);
1692 /* Or replace possibly complicated jump insn by simple jump insn. */
1693 else
1695 rtx target_label = block_label (target);
1696 rtx barrier;
1698 src->end = emit_jump_insn_before (gen_jump (target_label), kill_from);
1699 JUMP_LABEL (src->end) = target_label;
1700 LABEL_NUSES (target_label)++;
1701 if (basic_block_for_insn)
1702 set_block_for_new_insns (src->end, src);
1703 if (rtl_dump_file)
1704 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1705 INSN_UID (insn), INSN_UID (src->end));
1707 flow_delete_insn_chain (kill_from, insn);
1709 barrier = next_nonnote_insn (src->end);
1710 if (!barrier || GET_CODE (barrier) != BARRIER)
1711 emit_barrier_after (src->end);
1714 /* Keep only one edge out and set proper flags. */
1715 while (src->succ->succ_next)
1716 remove_edge (src->succ);
1717 e = src->succ;
1718 if (fallthru)
1719 e->flags = EDGE_FALLTHRU;
1720 else
1721 e->flags = 0;
1722 e->probability = REG_BR_PROB_BASE;
1723 e->count = src->count;
1725 /* We don't want a block to end on a line-number note since that has
1726 the potential of changing the code between -g and not -g. */
1727 while (GET_CODE (e->src->end) == NOTE
1728 && NOTE_LINE_NUMBER (e->src->end) >= 0)
1730 rtx prev = PREV_INSN (e->src->end);
1731 flow_delete_insn (e->src->end);
1732 e->src->end = prev;
1735 if (e->dest != target)
1736 redirect_edge_succ (e, target);
1737 return true;
1740 /* Attempt to change code to redirect edge E to TARGET.
1741 Don't do that on expense of adding new instructions or reordering
1742 basic blocks.
1744 Function can be also called with edge destionation equivalent to the
1745 TARGET. Then it should try the simplifications and do nothing if
1746 none is possible.
1748 Return true if transformation suceeded. We still return flase in case
1749 E already destinated TARGET and we didn't managed to simplify instruction
1750 stream. */
1751 bool
1752 redirect_edge_and_branch (e, target)
1753 edge e;
1754 basic_block target;
1756 rtx tmp;
1757 rtx old_label = e->dest->head;
1758 basic_block src = e->src;
1759 rtx insn = src->end;
1761 if (e->flags & EDGE_COMPLEX)
1762 return false;
1764 if (try_redirect_by_replacing_jump (e, target))
1765 return true;
1766 /* Do this fast path late, as we want above code to simplify for cases
1767 where called on single edge leaving basic block containing nontrivial
1768 jump insn. */
1769 else if (e->dest == target)
1770 return false;
1772 /* We can only redirect non-fallthru edges of jump insn. */
1773 if (e->flags & EDGE_FALLTHRU)
1774 return false;
1775 if (GET_CODE (insn) != JUMP_INSN)
1776 return false;
1778 /* Recognize a tablejump and adjust all matching cases. */
1779 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1780 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1781 && GET_CODE (tmp) == JUMP_INSN
1782 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1783 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1785 rtvec vec;
1786 int j;
1787 rtx new_label = block_label (target);
1789 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1790 vec = XVEC (PATTERN (tmp), 0);
1791 else
1792 vec = XVEC (PATTERN (tmp), 1);
1794 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1795 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1797 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1798 --LABEL_NUSES (old_label);
1799 ++LABEL_NUSES (new_label);
1802 /* Handle casesi dispatch insns */
1803 if ((tmp = single_set (insn)) != NULL
1804 && SET_DEST (tmp) == pc_rtx
1805 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1806 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1807 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1809 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1810 new_label);
1811 --LABEL_NUSES (old_label);
1812 ++LABEL_NUSES (new_label);
1815 else
1817 /* ?? We may play the games with moving the named labels from
1818 one basic block to the other in case only one computed_jump is
1819 available. */
1820 if (computed_jump_p (insn))
1821 return false;
1823 /* A return instruction can't be redirected. */
1824 if (returnjump_p (insn))
1825 return false;
1827 /* If the insn doesn't go where we think, we're confused. */
1828 if (JUMP_LABEL (insn) != old_label)
1829 abort ();
1830 redirect_jump (insn, block_label (target), 0);
1833 if (rtl_dump_file)
1834 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
1835 e->src->index, e->dest->index, target->index);
1836 if (e->dest != target)
1838 edge s;
1839 /* Check whether the edge is already present. */
1840 for (s = src->succ; s; s=s->succ_next)
1841 if (s->dest == target)
1842 break;
1843 if (s)
1845 s->flags |= e->flags;
1846 s->probability += e->probability;
1847 s->count += e->count;
1848 remove_edge (e);
1850 else
1851 redirect_edge_succ (e, target);
1853 return true;
1856 /* Redirect edge even at the expense of creating new jump insn or
1857 basic block. Return new basic block if created, NULL otherwise.
1858 Abort if converison is impossible. */
1859 basic_block
1860 redirect_edge_and_branch_force (e, target)
1861 edge e;
1862 basic_block target;
1864 basic_block new_bb;
1865 edge new_edge;
1866 rtx label;
1867 rtx bb_note;
1868 int i, j;
1870 if (redirect_edge_and_branch (e, target))
1871 return NULL;
1872 if (e->dest == target)
1873 return NULL;
1874 if (e->flags & EDGE_ABNORMAL)
1875 abort ();
1876 if (!(e->flags & EDGE_FALLTHRU))
1877 abort ();
1879 e->flags &= ~EDGE_FALLTHRU;
1880 label = block_label (target);
1881 /* Case of the fallthru block. */
1882 if (!e->src->succ->succ_next)
1884 e->src->end = emit_jump_insn_after (gen_jump (label), e->src->end);
1885 JUMP_LABEL (e->src->end) = label;
1886 LABEL_NUSES (label)++;
1887 if (basic_block_for_insn)
1888 set_block_for_new_insns (e->src->end, e->src);
1889 emit_barrier_after (e->src->end);
1890 if (rtl_dump_file)
1891 fprintf (rtl_dump_file,
1892 "Emitting jump insn %i to redirect edge %i->%i to %i\n",
1893 INSN_UID (e->src->end), e->src->index, e->dest->index,
1894 target->index);
1895 redirect_edge_succ (e, target);
1896 return NULL;
1898 /* Redirecting fallthru edge of the conditional needs extra work. */
1900 if (rtl_dump_file)
1901 fprintf (rtl_dump_file,
1902 "Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
1903 INSN_UID (e->src->end), e->src->index, e->dest->index,
1904 target->index);
1906 /* Create the new structures. */
1907 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1908 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1909 n_edges++;
1911 memset (new_bb, 0, sizeof (*new_bb));
1913 new_bb->end = new_bb->head = e->src->end;
1914 new_bb->succ = NULL;
1915 new_bb->pred = new_edge;
1916 new_bb->count = e->count;
1917 new_bb->frequency = e->probability * e->src->frequency / REG_BR_PROB_BASE;
1918 new_bb->loop_depth = e->dest->loop_depth;
1920 new_edge->flags = EDGE_FALLTHRU;
1921 new_edge->probability = e->probability;
1922 new_edge->count = e->count;
1924 if (e->dest->global_live_at_start)
1926 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1927 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1928 COPY_REG_SET (new_bb->global_live_at_start,
1929 target->global_live_at_start);
1930 COPY_REG_SET (new_bb->global_live_at_end, new_bb->global_live_at_start);
1933 /* Wire edge in. */
1934 new_edge->src = e->src;
1935 new_edge->dest = new_bb;
1936 new_edge->succ_next = e->src->succ;
1937 e->src->succ = new_edge;
1938 new_edge->pred_next = NULL;
1940 /* Redirect old edge. */
1941 redirect_edge_succ (e, target);
1942 redirect_edge_pred (e, new_bb);
1943 e->probability = REG_BR_PROB_BASE;
1945 /* Place the new block just after the block being split. */
1946 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1948 /* Some parts of the compiler expect blocks to be number in
1949 sequential order so insert the new block immediately after the
1950 block being split.. */
1951 j = new_edge->src->index;
1952 for (i = n_basic_blocks - 1; i > j + 1; --i)
1954 basic_block tmp = BASIC_BLOCK (i - 1);
1955 BASIC_BLOCK (i) = tmp;
1956 tmp->index = i;
1959 BASIC_BLOCK (i) = new_bb;
1960 new_bb->index = i;
1962 /* Create the basic block note. */
1963 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
1964 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1965 new_bb->head = bb_note;
1967 new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
1968 JUMP_LABEL (new_bb->end) = label;
1969 LABEL_NUSES (label)++;
1970 if (basic_block_for_insn)
1971 set_block_for_new_insns (new_bb->end, new_bb);
1972 emit_barrier_after (new_bb->end);
1973 return new_bb;
1976 /* Split a (typically critical) edge. Return the new block.
1977 Abort on abnormal edges.
1979 ??? The code generally expects to be called on critical edges.
1980 The case of a block ending in an unconditional jump to a
1981 block with multiple predecessors is not handled optimally. */
1983 basic_block
1984 split_edge (edge_in)
1985 edge edge_in;
1987 basic_block old_pred, bb, old_succ;
1988 edge edge_out;
1989 rtx bb_note;
1990 int i, j;
1992 /* Abnormal edges cannot be split. */
1993 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1994 abort ();
1996 old_pred = edge_in->src;
1997 old_succ = edge_in->dest;
1999 /* Create the new structures. */
2000 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
2001 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
2002 n_edges++;
2004 memset (bb, 0, sizeof (*bb));
2006 /* ??? This info is likely going to be out of date very soon. */
2007 if (old_succ->global_live_at_start)
2009 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2010 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2011 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
2012 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
2015 /* Wire them up. */
2016 bb->succ = edge_out;
2017 bb->count = edge_in->count;
2018 bb->frequency = (edge_in->probability * edge_in->src->frequency
2019 / REG_BR_PROB_BASE);
2021 edge_in->flags &= ~EDGE_CRITICAL;
2023 edge_out->pred_next = old_succ->pred;
2024 edge_out->succ_next = NULL;
2025 edge_out->src = bb;
2026 edge_out->dest = old_succ;
2027 edge_out->flags = EDGE_FALLTHRU;
2028 edge_out->probability = REG_BR_PROB_BASE;
2029 edge_out->count = edge_in->count;
2031 old_succ->pred = edge_out;
2033 /* Tricky case -- if there existed a fallthru into the successor
2034 (and we're not it) we must add a new unconditional jump around
2035 the new block we're actually interested in.
2037 Further, if that edge is critical, this means a second new basic
2038 block must be created to hold it. In order to simplify correct
2039 insn placement, do this before we touch the existing basic block
2040 ordering for the block we were really wanting. */
2041 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2043 edge e;
2044 for (e = edge_out->pred_next; e; e = e->pred_next)
2045 if (e->flags & EDGE_FALLTHRU)
2046 break;
2048 if (e)
2050 basic_block jump_block;
2051 rtx pos;
2053 if ((e->flags & EDGE_CRITICAL) == 0
2054 && e->src != ENTRY_BLOCK_PTR)
2056 /* Non critical -- we can simply add a jump to the end
2057 of the existing predecessor. */
2058 jump_block = e->src;
2060 else
2062 /* We need a new block to hold the jump. The simplest
2063 way to do the bulk of the work here is to recursively
2064 call ourselves. */
2065 jump_block = split_edge (e);
2066 e = jump_block->succ;
2069 /* Now add the jump insn ... */
2070 pos = emit_jump_insn_after (gen_jump (old_succ->head),
2071 jump_block->end);
2072 jump_block->end = pos;
2073 if (basic_block_for_insn)
2074 set_block_for_new_insns (pos, jump_block);
2075 emit_barrier_after (pos);
2077 /* ... let jump know that label is in use, ... */
2078 JUMP_LABEL (pos) = old_succ->head;
2079 ++LABEL_NUSES (old_succ->head);
2081 /* ... and clear fallthru on the outgoing edge. */
2082 e->flags &= ~EDGE_FALLTHRU;
2084 /* Continue splitting the interesting edge. */
2088 /* Place the new block just in front of the successor. */
2089 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2090 if (old_succ == EXIT_BLOCK_PTR)
2091 j = n_basic_blocks - 1;
2092 else
2093 j = old_succ->index;
2094 for (i = n_basic_blocks - 1; i > j; --i)
2096 basic_block tmp = BASIC_BLOCK (i - 1);
2097 BASIC_BLOCK (i) = tmp;
2098 tmp->index = i;
2100 BASIC_BLOCK (i) = bb;
2101 bb->index = i;
2103 /* Create the basic block note.
2105 Where we place the note can have a noticable impact on the generated
2106 code. Consider this cfg:
2112 +->1-->2--->E
2114 +--+
2116 If we need to insert an insn on the edge from block 0 to block 1,
2117 we want to ensure the instructions we insert are outside of any
2118 loop notes that physically sit between block 0 and block 1. Otherwise
2119 we confuse the loop optimizer into thinking the loop is a phony. */
2120 if (old_succ != EXIT_BLOCK_PTR
2121 && PREV_INSN (old_succ->head)
2122 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
2123 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
2124 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
2125 PREV_INSN (old_succ->head));
2126 else if (old_succ != EXIT_BLOCK_PTR)
2127 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
2128 else
2129 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
2130 NOTE_BASIC_BLOCK (bb_note) = bb;
2131 bb->head = bb->end = bb_note;
2133 /* For non-fallthry edges, we must adjust the predecessor's
2134 jump instruction to target our new block. */
2135 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2137 if (!redirect_edge_and_branch (edge_in, bb))
2138 abort ();
2140 else
2141 redirect_edge_succ (edge_in, bb);
2143 return bb;
2146 /* Queue instructions for insertion on an edge between two basic blocks.
2147 The new instructions and basic blocks (if any) will not appear in the
2148 CFG until commit_edge_insertions is called. */
2150 void
2151 insert_insn_on_edge (pattern, e)
2152 rtx pattern;
2153 edge e;
2155 /* We cannot insert instructions on an abnormal critical edge.
2156 It will be easier to find the culprit if we die now. */
2157 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2158 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2159 abort ();
2161 if (e->insns == NULL_RTX)
2162 start_sequence ();
2163 else
2164 push_to_sequence (e->insns);
2166 emit_insn (pattern);
2168 e->insns = get_insns ();
2169 end_sequence ();
2172 /* Update the CFG for the instructions queued on edge E. */
2174 static void
2175 commit_one_edge_insertion (e)
2176 edge e;
2178 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2179 basic_block bb;
2181 /* Pull the insns off the edge now since the edge might go away. */
2182 insns = e->insns;
2183 e->insns = NULL_RTX;
2185 /* Figure out where to put these things. If the destination has
2186 one predecessor, insert there. Except for the exit block. */
2187 if (e->dest->pred->pred_next == NULL
2188 && e->dest != EXIT_BLOCK_PTR)
2190 bb = e->dest;
2192 /* Get the location correct wrt a code label, and "nice" wrt
2193 a basic block note, and before everything else. */
2194 tmp = bb->head;
2195 if (GET_CODE (tmp) == CODE_LABEL)
2196 tmp = NEXT_INSN (tmp);
2197 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2198 tmp = NEXT_INSN (tmp);
2199 if (tmp == bb->head)
2200 before = tmp;
2201 else
2202 after = PREV_INSN (tmp);
2205 /* If the source has one successor and the edge is not abnormal,
2206 insert there. Except for the entry block. */
2207 else if ((e->flags & EDGE_ABNORMAL) == 0
2208 && e->src->succ->succ_next == NULL
2209 && e->src != ENTRY_BLOCK_PTR)
2211 bb = e->src;
2212 /* It is possible to have a non-simple jump here. Consider a target
2213 where some forms of unconditional jumps clobber a register. This
2214 happens on the fr30 for example.
2216 We know this block has a single successor, so we can just emit
2217 the queued insns before the jump. */
2218 if (GET_CODE (bb->end) == JUMP_INSN)
2220 before = bb->end;
2222 else
2224 /* We'd better be fallthru, or we've lost track of what's what. */
2225 if ((e->flags & EDGE_FALLTHRU) == 0)
2226 abort ();
2228 after = bb->end;
2232 /* Otherwise we must split the edge. */
2233 else
2235 bb = split_edge (e);
2236 after = bb->end;
2239 /* Now that we've found the spot, do the insertion. */
2241 /* Set the new block number for these insns, if structure is allocated. */
2242 if (basic_block_for_insn)
2244 rtx i;
2245 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2246 set_block_for_insn (i, bb);
2249 if (before)
2251 emit_insns_before (insns, before);
2252 if (before == bb->head)
2253 bb->head = insns;
2255 last = prev_nonnote_insn (before);
2257 else
2259 last = emit_insns_after (insns, after);
2260 if (after == bb->end)
2261 bb->end = last;
2264 if (returnjump_p (last))
2266 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2267 This is not currently a problem because this only happens
2268 for the (single) epilogue, which already has a fallthru edge
2269 to EXIT. */
2271 e = bb->succ;
2272 if (e->dest != EXIT_BLOCK_PTR
2273 || e->succ_next != NULL
2274 || (e->flags & EDGE_FALLTHRU) == 0)
2275 abort ();
2276 e->flags &= ~EDGE_FALLTHRU;
2278 emit_barrier_after (last);
2279 bb->end = last;
2281 if (before)
2282 flow_delete_insn (before);
2284 else if (GET_CODE (last) == JUMP_INSN)
2285 abort ();
2286 find_sub_basic_blocks (bb);
2289 /* Update the CFG for all queued instructions. */
2291 void
2292 commit_edge_insertions ()
2294 int i;
2295 basic_block bb;
2297 #ifdef ENABLE_CHECKING
2298 verify_flow_info ();
2299 #endif
2301 i = -1;
2302 bb = ENTRY_BLOCK_PTR;
2303 while (1)
2305 edge e, next;
2307 for (e = bb->succ; e; e = next)
2309 next = e->succ_next;
2310 if (e->insns)
2311 commit_one_edge_insertion (e);
2314 if (++i >= n_basic_blocks)
2315 break;
2316 bb = BASIC_BLOCK (i);
2320 /* Add fake edges to the function exit for any non constant calls in
2321 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2322 BLOCKS is zero. Return the nuber of blocks that were split. */
2325 flow_call_edges_add (blocks)
2326 sbitmap blocks;
2328 int i;
2329 int blocks_split = 0;
2330 int bb_num = 0;
2331 basic_block *bbs;
2333 /* Map bb indicies into basic block pointers since split_block
2334 will renumber the basic blocks. */
2336 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2338 if (! blocks)
2340 for (i = 0; i < n_basic_blocks; i++)
2341 bbs[bb_num++] = BASIC_BLOCK (i);
2343 else
2345 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2347 bbs[bb_num++] = BASIC_BLOCK (i);
2352 /* Now add fake edges to the function exit for any non constant
2353 calls since there is no way that we can determine if they will
2354 return or not... */
2356 for (i = 0; i < bb_num; i++)
2358 basic_block bb = bbs[i];
2359 rtx insn;
2360 rtx prev_insn;
2362 for (insn = bb->end; ; insn = prev_insn)
2364 prev_insn = PREV_INSN (insn);
2365 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
2367 edge e;
2369 /* Note that the following may create a new basic block
2370 and renumber the existing basic blocks. */
2371 e = split_block (bb, insn);
2372 if (e)
2373 blocks_split++;
2375 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2377 if (insn == bb->head)
2378 break;
2382 if (blocks_split)
2383 verify_flow_info ();
2385 free (bbs);
2386 return blocks_split;
2389 /* Find unreachable blocks. An unreachable block will have NULL in
2390 block->aux, a non-NULL value indicates the block is reachable. */
2392 void
2393 find_unreachable_blocks ()
2395 edge e;
2396 int i, n;
2397 basic_block *tos, *worklist;
2399 n = n_basic_blocks;
2400 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2402 /* Use basic_block->aux as a marker. Clear them all. */
2404 for (i = 0; i < n; ++i)
2405 BASIC_BLOCK (i)->aux = NULL;
2407 /* Add our starting points to the worklist. Almost always there will
2408 be only one. It isn't inconcievable that we might one day directly
2409 support Fortran alternate entry points. */
2411 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2413 *tos++ = e->dest;
2415 /* Mark the block with a handy non-null value. */
2416 e->dest->aux = e;
2419 /* Iterate: find everything reachable from what we've already seen. */
2421 while (tos != worklist)
2423 basic_block b = *--tos;
2425 for (e = b->succ; e; e = e->succ_next)
2426 if (!e->dest->aux)
2428 *tos++ = e->dest;
2429 e->dest->aux = e;
2433 free (worklist);
2436 /* Delete all unreachable basic blocks. */
2437 static void
2438 delete_unreachable_blocks ()
2440 int i;
2442 find_unreachable_blocks ();
2444 /* Delete all unreachable basic blocks. Count down so that we
2445 don't interfere with the block renumbering that happens in
2446 flow_delete_block. */
2448 for (i = n_basic_blocks - 1; i >= 0; --i)
2450 basic_block b = BASIC_BLOCK (i);
2452 if (b->aux != NULL)
2453 /* This block was found. Tidy up the mark. */
2454 b->aux = NULL;
2455 else
2456 flow_delete_block (b);
2459 tidy_fallthru_edges ();
2462 /* Return true if NOTE is not one of the ones that must be kept paired,
2463 so that we may simply delete them. */
2465 static int
2466 can_delete_note_p (note)
2467 rtx note;
2469 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2470 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2473 /* Unlink a chain of insns between START and FINISH, leaving notes
2474 that must be paired. */
2476 void
2477 flow_delete_insn_chain (start, finish)
2478 rtx start, finish;
2480 /* Unchain the insns one by one. It would be quicker to delete all
2481 of these with a single unchaining, rather than one at a time, but
2482 we need to keep the NOTE's. */
2484 rtx next;
2486 while (1)
2488 next = NEXT_INSN (start);
2489 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2491 else if (GET_CODE (start) == CODE_LABEL
2492 && ! can_delete_label_p (start))
2494 const char *name = LABEL_NAME (start);
2495 PUT_CODE (start, NOTE);
2496 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2497 NOTE_SOURCE_FILE (start) = name;
2499 else
2500 next = flow_delete_insn (start);
2502 if (start == finish)
2503 break;
2504 start = next;
2508 /* Delete the insns in a (non-live) block. We physically delete every
2509 non-deleted-note insn, and update the flow graph appropriately.
2511 Return nonzero if we deleted an exception handler. */
2513 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2514 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2517 flow_delete_block (b)
2518 basic_block b;
2520 int deleted_handler = 0;
2521 rtx insn, end, tmp;
2523 /* If the head of this block is a CODE_LABEL, then it might be the
2524 label for an exception handler which can't be reached.
2526 We need to remove the label from the exception_handler_label list
2527 and remove the associated NOTE_INSN_EH_REGION_BEG and
2528 NOTE_INSN_EH_REGION_END notes. */
2530 insn = b->head;
2532 never_reached_warning (insn);
2534 if (GET_CODE (insn) == CODE_LABEL)
2535 maybe_remove_eh_handler (insn);
2537 /* Include any jump table following the basic block. */
2538 end = b->end;
2539 if (GET_CODE (end) == JUMP_INSN
2540 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2541 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2542 && GET_CODE (tmp) == JUMP_INSN
2543 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2544 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2545 end = tmp;
2547 /* Include any barrier that may follow the basic block. */
2548 tmp = next_nonnote_insn (end);
2549 if (tmp && GET_CODE (tmp) == BARRIER)
2550 end = tmp;
2552 /* Selectively delete the entire chain. */
2553 flow_delete_insn_chain (insn, end);
2555 /* Remove the edges into and out of this block. Note that there may
2556 indeed be edges in, if we are removing an unreachable loop. */
2558 edge e, next, *q;
2560 for (e = b->pred; e; e = next)
2562 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2563 continue;
2564 *q = e->succ_next;
2565 next = e->pred_next;
2566 n_edges--;
2567 free (e);
2569 for (e = b->succ; e; e = next)
2571 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2572 continue;
2573 *q = e->pred_next;
2574 next = e->succ_next;
2575 n_edges--;
2576 free (e);
2579 b->pred = NULL;
2580 b->succ = NULL;
2583 /* Remove the basic block from the array, and compact behind it. */
2584 expunge_block (b);
2586 return deleted_handler;
2589 /* Remove block B from the basic block array and compact behind it. */
2591 static void
2592 expunge_block (b)
2593 basic_block b;
2595 int i, n = n_basic_blocks;
2597 for (i = b->index; i + 1 < n; ++i)
2599 basic_block x = BASIC_BLOCK (i + 1);
2600 BASIC_BLOCK (i) = x;
2601 x->index = i;
2604 basic_block_info->num_elements--;
2605 n_basic_blocks--;
2608 /* Delete INSN by patching it out. Return the next insn. */
2611 flow_delete_insn (insn)
2612 rtx insn;
2614 rtx prev = PREV_INSN (insn);
2615 rtx next = NEXT_INSN (insn);
2616 rtx note;
2618 PREV_INSN (insn) = NULL_RTX;
2619 NEXT_INSN (insn) = NULL_RTX;
2620 INSN_DELETED_P (insn) = 1;
2622 if (prev)
2623 NEXT_INSN (prev) = next;
2624 if (next)
2625 PREV_INSN (next) = prev;
2626 else
2627 set_last_insn (prev);
2629 if (GET_CODE (insn) == CODE_LABEL)
2630 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2632 /* If deleting a jump, decrement the use count of the label. Deleting
2633 the label itself should happen in the normal course of block merging. */
2634 if (GET_CODE (insn) == JUMP_INSN
2635 && JUMP_LABEL (insn)
2636 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2637 LABEL_NUSES (JUMP_LABEL (insn))--;
2639 /* Also if deleting an insn that references a label. */
2640 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2641 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2642 LABEL_NUSES (XEXP (note, 0))--;
2644 if (GET_CODE (insn) == JUMP_INSN
2645 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2646 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2648 rtx pat = PATTERN (insn);
2649 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2650 int len = XVECLEN (pat, diff_vec_p);
2651 int i;
2653 for (i = 0; i < len; i++)
2654 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2657 return next;
2660 /* True if a given label can be deleted. */
2662 static int
2663 can_delete_label_p (label)
2664 rtx label;
2666 rtx x;
2668 if (LABEL_PRESERVE_P (label))
2669 return 0;
2671 for (x = forced_labels; x; x = XEXP (x, 1))
2672 if (label == XEXP (x, 0))
2673 return 0;
2674 for (x = label_value_list; x; x = XEXP (x, 1))
2675 if (label == XEXP (x, 0))
2676 return 0;
2677 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2678 if (label == XEXP (x, 0))
2679 return 0;
2681 /* User declared labels must be preserved. */
2682 if (LABEL_NAME (label) != 0)
2683 return 0;
2685 return 1;
2688 static int
2689 tail_recursion_label_p (label)
2690 rtx label;
2692 rtx x;
2694 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2695 if (label == XEXP (x, 0))
2696 return 1;
2698 return 0;
2701 /* Blocks A and B are to be merged into a single block A. The insns
2702 are already contiguous, hence `nomove'. */
2704 void
2705 merge_blocks_nomove (a, b)
2706 basic_block a, b;
2708 edge e;
2709 rtx b_head, b_end, a_end;
2710 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2711 int b_empty = 0;
2713 /* If there was a CODE_LABEL beginning B, delete it. */
2714 b_head = b->head;
2715 b_end = b->end;
2716 if (GET_CODE (b_head) == CODE_LABEL)
2718 /* Detect basic blocks with nothing but a label. This can happen
2719 in particular at the end of a function. */
2720 if (b_head == b_end)
2721 b_empty = 1;
2722 del_first = del_last = b_head;
2723 b_head = NEXT_INSN (b_head);
2726 /* Delete the basic block note. */
2727 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2729 if (b_head == b_end)
2730 b_empty = 1;
2731 if (! del_last)
2732 del_first = b_head;
2733 del_last = b_head;
2734 b_head = NEXT_INSN (b_head);
2737 /* If there was a jump out of A, delete it. */
2738 a_end = a->end;
2739 if (GET_CODE (a_end) == JUMP_INSN)
2741 rtx prev;
2743 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2744 if (GET_CODE (prev) != NOTE
2745 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2746 || prev == a->head)
2747 break;
2749 del_first = a_end;
2751 #ifdef HAVE_cc0
2752 /* If this was a conditional jump, we need to also delete
2753 the insn that set cc0. */
2754 if (prev && sets_cc0_p (prev))
2756 rtx tmp = prev;
2757 prev = prev_nonnote_insn (prev);
2758 if (!prev)
2759 prev = a->head;
2760 del_first = tmp;
2762 #endif
2764 a_end = prev;
2766 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2767 del_first = NEXT_INSN (a_end);
2769 /* Delete everything marked above as well as crap that might be
2770 hanging out between the two blocks. */
2771 flow_delete_insn_chain (del_first, del_last);
2773 /* Normally there should only be one successor of A and that is B, but
2774 partway though the merge of blocks for conditional_execution we'll
2775 be merging a TEST block with THEN and ELSE successors. Free the
2776 whole lot of them and hope the caller knows what they're doing. */
2777 while (a->succ)
2778 remove_edge (a->succ);
2780 /* Adjust the edges out of B for the new owner. */
2781 for (e = b->succ; e; e = e->succ_next)
2782 e->src = a;
2783 a->succ = b->succ;
2785 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2786 b->pred = b->succ = NULL;
2788 /* Reassociate the insns of B with A. */
2789 if (!b_empty)
2791 if (basic_block_for_insn)
2793 BLOCK_FOR_INSN (b_head) = a;
2794 while (b_head != b_end)
2796 b_head = NEXT_INSN (b_head);
2797 BLOCK_FOR_INSN (b_head) = a;
2800 a_end = b_end;
2802 a->end = a_end;
2804 expunge_block (b);
2807 /* Blocks A and B are to be merged into a single block. A has no incoming
2808 fallthru edge, so it can be moved before B without adding or modifying
2809 any jumps (aside from the jump from A to B). */
2811 static int
2812 merge_blocks_move_predecessor_nojumps (a, b)
2813 basic_block a, b;
2815 rtx start, end, barrier;
2816 int index;
2818 start = a->head;
2819 end = a->end;
2821 barrier = next_nonnote_insn (end);
2822 if (GET_CODE (barrier) != BARRIER)
2823 abort ();
2824 flow_delete_insn (barrier);
2826 /* Move block and loop notes out of the chain so that we do not
2827 disturb their order.
2829 ??? A better solution would be to squeeze out all the non-nested notes
2830 and adjust the block trees appropriately. Even better would be to have
2831 a tighter connection between block trees and rtl so that this is not
2832 necessary. */
2833 start = squeeze_notes (start, end);
2835 /* Scramble the insn chain. */
2836 if (end != PREV_INSN (b->head))
2837 reorder_insns (start, end, PREV_INSN (b->head));
2839 if (rtl_dump_file)
2841 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2842 a->index, b->index);
2845 /* Swap the records for the two blocks around. Although we are deleting B,
2846 A is now where B was and we want to compact the BB array from where
2847 A used to be. */
2848 BASIC_BLOCK (a->index) = b;
2849 BASIC_BLOCK (b->index) = a;
2850 index = a->index;
2851 a->index = b->index;
2852 b->index = index;
2854 /* Now blocks A and B are contiguous. Merge them. */
2855 merge_blocks_nomove (a, b);
2857 return 1;
2860 /* Blocks A and B are to be merged into a single block. B has no outgoing
2861 fallthru edge, so it can be moved after A without adding or modifying
2862 any jumps (aside from the jump from A to B). */
2864 static int
2865 merge_blocks_move_successor_nojumps (a, b)
2866 basic_block a, b;
2868 rtx start, end, barrier;
2870 start = b->head;
2871 end = b->end;
2872 barrier = NEXT_INSN (end);
2874 /* Recognize a jump table following block B. */
2875 if (barrier
2876 && GET_CODE (barrier) == CODE_LABEL
2877 && NEXT_INSN (barrier)
2878 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2879 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2880 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2882 end = NEXT_INSN (barrier);
2883 barrier = NEXT_INSN (end);
2886 /* There had better have been a barrier there. Delete it. */
2887 if (barrier && GET_CODE (barrier) == BARRIER)
2888 flow_delete_insn (barrier);
2890 /* Move block and loop notes out of the chain so that we do not
2891 disturb their order.
2893 ??? A better solution would be to squeeze out all the non-nested notes
2894 and adjust the block trees appropriately. Even better would be to have
2895 a tighter connection between block trees and rtl so that this is not
2896 necessary. */
2897 start = squeeze_notes (start, end);
2899 /* Scramble the insn chain. */
2900 reorder_insns (start, end, a->end);
2902 /* Now blocks A and B are contiguous. Merge them. */
2903 merge_blocks_nomove (a, b);
2905 if (rtl_dump_file)
2907 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2908 b->index, a->index);
2911 return 1;
2914 /* Attempt to merge basic blocks that are potentially non-adjacent.
2915 Return true iff the attempt succeeded. */
2917 static int
2918 merge_blocks (e, b, c, mode)
2919 edge e;
2920 basic_block b, c;
2921 int mode;
2923 /* If C has a tail recursion label, do not merge. There is no
2924 edge recorded from the call_placeholder back to this label, as
2925 that would make optimize_sibling_and_tail_recursive_calls more
2926 complex for no gain. */
2927 if (GET_CODE (c->head) == CODE_LABEL
2928 && tail_recursion_label_p (c->head))
2929 return 0;
2931 /* If B has a fallthru edge to C, no need to move anything. */
2932 if (e->flags & EDGE_FALLTHRU)
2934 merge_blocks_nomove (b, c);
2936 if (rtl_dump_file)
2938 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2939 b->index, c->index);
2942 return 1;
2944 /* Otherwise we will need to move code around. Do that only if expensive
2945 transformations are allowed. */
2946 else if (mode & CLEANUP_EXPENSIVE)
2948 edge tmp_edge, c_fallthru_edge;
2949 int c_has_outgoing_fallthru;
2950 int b_has_incoming_fallthru;
2952 /* Avoid overactive code motion, as the forwarder blocks should be
2953 eliminated by edge redirection instead. One exception might have
2954 been if B is a forwarder block and C has no fallthru edge, but
2955 that should be cleaned up by bb-reorder instead. */
2956 if (forwarder_block_p (b) || forwarder_block_p (c))
2957 return 0;
2959 /* We must make sure to not munge nesting of lexical blocks,
2960 and loop notes. This is done by squeezing out all the notes
2961 and leaving them there to lie. Not ideal, but functional. */
2963 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2964 if (tmp_edge->flags & EDGE_FALLTHRU)
2965 break;
2966 c_has_outgoing_fallthru = (tmp_edge != NULL);
2967 c_fallthru_edge = tmp_edge;
2969 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2970 if (tmp_edge->flags & EDGE_FALLTHRU)
2971 break;
2972 b_has_incoming_fallthru = (tmp_edge != NULL);
2974 /* If B does not have an incoming fallthru, then it can be moved
2975 immediately before C without introducing or modifying jumps.
2976 C cannot be the first block, so we do not have to worry about
2977 accessing a non-existent block. */
2978 if (! b_has_incoming_fallthru)
2979 return merge_blocks_move_predecessor_nojumps (b, c);
2981 /* Otherwise, we're going to try to move C after B. If C does
2982 not have an outgoing fallthru, then it can be moved
2983 immediately after B without introducing or modifying jumps. */
2984 if (! c_has_outgoing_fallthru)
2985 return merge_blocks_move_successor_nojumps (b, c);
2987 /* Otherwise, we'll need to insert an extra jump, and possibly
2988 a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
2989 as we don't have explicit return instructions before epilogues
2990 are generated, so give up on that case. */
2992 if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
2993 && merge_blocks_move_successor_nojumps (b, c))
2995 basic_block target = c_fallthru_edge->dest;
2996 rtx barrier;
2997 basic_block new;
2999 /* This is a dirty hack to avoid code duplication.
3001 Set edge to point to wrong basic block, so
3002 redirect_edge_and_branch_force will do the trick
3003 and rewire edge back to the original location. */
3004 redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
3005 new = redirect_edge_and_branch_force (c_fallthru_edge, target);
3007 /* We've just created barrier, but another barrier is
3008 already present in the stream. Avoid the duplicate. */
3009 barrier = next_nonnote_insn (new ? new->end : b->end);
3010 if (GET_CODE (barrier) != BARRIER)
3011 abort ();
3012 flow_delete_insn (barrier);
3015 return 0;
3017 return 0;
3020 /* Simplify a conditional jump around an unconditional jump.
3021 Return true if something changed. */
3023 static bool
3024 try_simplify_condjump (cbranch_block)
3025 basic_block cbranch_block;
3027 basic_block jump_block, jump_dest_block, cbranch_dest_block;
3028 edge cbranch_jump_edge, cbranch_fallthru_edge;
3029 rtx cbranch_insn;
3031 /* Verify that there are exactly two successors. */
3032 if (!cbranch_block->succ
3033 || !cbranch_block->succ->succ_next
3034 || cbranch_block->succ->succ_next->succ_next)
3035 return false;
3037 /* Verify that we've got a normal conditional branch at the end
3038 of the block. */
3039 cbranch_insn = cbranch_block->end;
3040 if (!any_condjump_p (cbranch_insn))
3041 return false;
3043 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
3044 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
3046 /* The next block must not have multiple predecessors, must not
3047 be the last block in the function, and must contain just the
3048 unconditional jump. */
3049 jump_block = cbranch_fallthru_edge->dest;
3050 if (jump_block->pred->pred_next
3051 || jump_block->index == n_basic_blocks - 1
3052 || !forwarder_block_p (jump_block))
3053 return false;
3054 jump_dest_block = jump_block->succ->dest;
3056 /* The conditional branch must target the block after the
3057 unconditional branch. */
3058 cbranch_dest_block = cbranch_jump_edge->dest;
3060 if (!can_fallthru (jump_block, cbranch_dest_block))
3061 return false;
3063 /* Invert the conditional branch. Prevent jump.c from deleting
3064 "unreachable" instructions. */
3065 LABEL_NUSES (JUMP_LABEL (cbranch_insn))++;
3066 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 1))
3068 LABEL_NUSES (JUMP_LABEL (cbranch_insn))--;
3069 return false;
3072 if (rtl_dump_file)
3073 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
3074 INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
3076 /* Success. Update the CFG to match. Note that after this point
3077 the edge variable names appear backwards; the redirection is done
3078 this way to preserve edge profile data. */
3079 redirect_edge_succ (cbranch_jump_edge, cbranch_dest_block);
3080 redirect_edge_succ (cbranch_fallthru_edge, jump_dest_block);
3081 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
3082 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
3084 /* Delete the block with the unconditional jump, and clean up the mess. */
3085 flow_delete_block (jump_block);
3086 tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
3088 return true;
3091 /* Attempt to forward edges leaving basic block B.
3092 Return true if sucessful. */
3094 static bool
3095 try_forward_edges (b)
3096 basic_block b;
3098 bool changed = false;
3099 edge e, next;
3101 for (e = b->succ; e ; e = next)
3103 basic_block target, first;
3104 int counter;
3106 next = e->succ_next;
3108 /* Skip complex edges because we don't know how to update them.
3109 Skip fallthru edges because there's no jump to update. */
3110 if (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3111 continue;
3113 target = first = e->dest;
3114 counter = 0;
3116 /* Look for the real destination of the jump.
3117 Avoid inifinite loop in the infinite empty loop by counting
3118 up to n_basic_blocks. */
3119 while (forwarder_block_p (target)
3120 && target->succ->dest != EXIT_BLOCK_PTR
3121 && counter < n_basic_blocks)
3123 /* Bypass trivial infinite loops. */
3124 if (target == target->succ->dest)
3125 counter = n_basic_blocks;
3126 target = target->succ->dest, counter++;
3129 if (counter >= n_basic_blocks)
3131 if (rtl_dump_file)
3132 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n",
3133 target->index);
3135 else if (target == first)
3136 ; /* We didn't do anything. */
3137 else if (redirect_edge_and_branch (e, target))
3139 /* We successfully forwarded the edge. Now update profile
3140 data: for each edge we traversed in the chain, remove
3141 the original edge's execution count. */
3144 first->count -= e->count;
3145 first->succ->count -= e->count;
3146 first->frequency -= ((e->probability * b->frequency
3147 + REG_BR_PROB_BASE / 2)
3148 / REG_BR_PROB_BASE);
3149 first = first->succ->dest;
3151 while (first != target);
3153 changed = true;
3155 else
3157 if (rtl_dump_file)
3158 fprintf (rtl_dump_file, "Forwarding edge %i->%i to %i failed.\n",
3159 b->index, e->dest->index, target->index);
3163 return changed;
3166 /* Look through the insns at the end of BB1 and BB2 and find the longest
3167 sequence that are equivalent. Store the first insns for that sequence
3168 in *F1 and *F2 and return the sequence length.
3170 To simplify callers of this function, if the blocks match exactly,
3171 store the head of the blocks in *F1 and *F2. */
3173 static int
3174 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
3175 int mode ATTRIBUTE_UNUSED;
3176 basic_block bb1, bb2;
3177 rtx *f1, *f2;
3179 rtx i1, i2, p1, p2, last1, last2, afterlast1, afterlast2;
3180 int ninsns = 0;
3182 /* Skip simple jumps at the end of the blocks. Complex jumps still
3183 need to be compared for equivalence, which we'll do below. */
3185 i1 = bb1->end;
3186 if (onlyjump_p (i1))
3187 i1 = PREV_INSN (i1);
3188 i2 = bb2->end;
3189 if (onlyjump_p (i2))
3190 i2 = PREV_INSN (i2);
3192 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
3193 while (true)
3195 /* Ignore notes. */
3196 while ((GET_CODE (i1) == NOTE && i1 != bb1->head))
3197 i1 = PREV_INSN (i1);
3198 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
3199 i2 = PREV_INSN (i2);
3201 if (i1 == bb1->head || i2 == bb2->head)
3202 break;
3204 /* Verify that I1 and I2 are equivalent. */
3206 if (GET_CODE (i1) != GET_CODE (i2))
3207 break;
3209 p1 = PATTERN (i1);
3210 p2 = PATTERN (i2);
3212 /* If this is a CALL_INSN, compare register usage information.
3213 If we don't check this on stack register machines, the two
3214 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3215 numbers of stack registers in the same basic block.
3216 If we don't check this on machines with delay slots, a delay slot may
3217 be filled that clobbers a parameter expected by the subroutine.
3219 ??? We take the simple route for now and assume that if they're
3220 equal, they were constructed identically. */
3222 if (GET_CODE (i1) == CALL_INSN
3223 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3224 CALL_INSN_FUNCTION_USAGE (i2)))
3225 break;
3227 #ifdef STACK_REGS
3228 /* If cross_jump_death_matters is not 0, the insn's mode
3229 indicates whether or not the insn contains any stack-like
3230 regs. */
3232 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
3234 /* If register stack conversion has already been done, then
3235 death notes must also be compared before it is certain that
3236 the two instruction streams match. */
3238 rtx note;
3239 HARD_REG_SET i1_regset, i2_regset;
3241 CLEAR_HARD_REG_SET (i1_regset);
3242 CLEAR_HARD_REG_SET (i2_regset);
3244 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3245 if (REG_NOTE_KIND (note) == REG_DEAD
3246 && STACK_REG_P (XEXP (note, 0)))
3247 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3249 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3250 if (REG_NOTE_KIND (note) == REG_DEAD
3251 && STACK_REG_P (XEXP (note, 0)))
3252 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3254 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3256 break;
3258 done:
3261 #endif
3263 if (GET_CODE (p1) != GET_CODE (p2))
3264 break;
3266 if (! rtx_renumbered_equal_p (p1, p2))
3268 /* The following code helps take care of G++ cleanups. */
3269 rtx equiv1 = find_reg_equal_equiv_note (i1);
3270 rtx equiv2 = find_reg_equal_equiv_note (i2);
3272 if (equiv1 && equiv2
3273 /* If the equivalences are not to a constant, they may
3274 reference pseudos that no longer exist, so we can't
3275 use them. */
3276 && CONSTANT_P (XEXP (equiv1, 0))
3277 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3279 rtx s1 = single_set (i1);
3280 rtx s2 = single_set (i2);
3281 if (s1 != 0 && s2 != 0
3282 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3284 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3285 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3286 if (! rtx_renumbered_equal_p (p1, p2))
3287 cancel_changes (0);
3288 else if (apply_change_group ())
3289 goto win;
3292 break;
3295 win:
3296 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
3297 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3299 afterlast1 = last1, afterlast2 = last2;
3300 last1 = i1, last2 = i2;
3301 ninsns++;
3303 i1 = PREV_INSN (i1);
3304 i2 = PREV_INSN (i2);
3307 #ifdef HAVE_cc0
3308 if (ninsns)
3310 /* Don't allow the insn after a compare to be shared by
3311 cross-jumping unless the compare is also shared. */
3312 if (reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
3313 last1 = afterlast1, last2 = afterlast2, ninsns--;
3315 #endif
3317 /* Include preceeding notes and labels in the cross-jump. One,
3318 this may bring us to the head of the blocks as requested above.
3319 Two, it keeps line number notes as matched as may be. */
3320 if (ninsns)
3322 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
3323 last1 = PREV_INSN (last1);
3324 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
3325 last1 = PREV_INSN (last1);
3326 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
3327 last2 = PREV_INSN (last2);
3328 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
3329 last2 = PREV_INSN (last2);
3331 *f1 = last1;
3332 *f2 = last2;
3335 return ninsns;
3338 /* Return true iff outgoing edges of BB1 and BB2 match, together with
3339 the branch instruction. This means that if we commonize the control
3340 flow before end of the basic block, the semantic remains unchanged.
3342 We may assume that there exists one edge with a common destination. */
3344 static bool
3345 outgoing_edges_match (bb1, bb2)
3346 basic_block bb1;
3347 basic_block bb2;
3349 /* If BB1 has only one successor, we must be looking at an unconditional
3350 jump. Which, by the assumption above, means that we only need to check
3351 that BB2 has one successor. */
3352 if (bb1->succ && !bb1->succ->succ_next)
3353 return (bb2->succ && !bb2->succ->succ_next);
3355 /* Match conditional jumps - this may get tricky when fallthru and branch
3356 edges are crossed. */
3357 if (bb1->succ
3358 && bb1->succ->succ_next
3359 && !bb1->succ->succ_next->succ_next
3360 && any_condjump_p (bb1->end))
3362 edge b1, f1, b2, f2;
3363 bool reverse, match;
3364 rtx set1, set2, cond1, cond2;
3365 enum rtx_code code1, code2;
3367 if (!bb2->succ
3368 || !bb2->succ->succ_next
3369 || bb1->succ->succ_next->succ_next
3370 || !any_condjump_p (bb2->end))
3371 return false;
3373 b1 = BRANCH_EDGE (bb1);
3374 b2 = BRANCH_EDGE (bb2);
3375 f1 = FALLTHRU_EDGE (bb1);
3376 f2 = FALLTHRU_EDGE (bb2);
3378 /* Get around possible forwarders on fallthru edges. Other cases
3379 should be optimized out already. */
3380 if (forwarder_block_p (f1->dest))
3381 f1 = f1->dest->succ;
3382 if (forwarder_block_p (f2->dest))
3383 f2 = f2->dest->succ;
3385 /* To simplify use of this function, return false if there are
3386 unneeded forwarder blocks. These will get eliminated later
3387 during cleanup_cfg. */
3388 if (forwarder_block_p (f1->dest)
3389 || forwarder_block_p (f2->dest)
3390 || forwarder_block_p (b1->dest)
3391 || forwarder_block_p (b2->dest))
3392 return false;
3394 if (f1->dest == f2->dest && b1->dest == b2->dest)
3395 reverse = false;
3396 else if (f1->dest == b2->dest && b1->dest == f2->dest)
3397 reverse = true;
3398 else
3399 return false;
3401 set1 = pc_set (bb1->end);
3402 set2 = pc_set (bb2->end);
3403 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
3404 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
3405 reverse = !reverse;
3407 cond1 = XEXP (SET_SRC (set1), 0);
3408 cond2 = XEXP (SET_SRC (set2), 0);
3409 code1 = GET_CODE (cond1);
3410 if (reverse)
3411 code2 = reversed_comparison_code (cond2, bb2->end);
3412 else
3413 code2 = GET_CODE (cond2);
3414 if (code2 == UNKNOWN)
3415 return false;
3417 /* Verify codes and operands match. */
3418 match = ((code1 == code2
3419 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
3420 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
3421 || (code1 == swap_condition (code2)
3422 && rtx_renumbered_equal_p (XEXP (cond1, 1),
3423 XEXP (cond2, 0))
3424 && rtx_renumbered_equal_p (XEXP (cond1, 0),
3425 XEXP (cond2, 1))));
3427 /* If we return true, we will join the blocks. Which means that
3428 we will only have one branch prediction bit to work with. Thus
3429 we require the existing branches to have probabilities that are
3430 roughly similar. */
3431 /* ??? We should use bb->frequency to allow merging in infrequently
3432 executed blocks, but at the moment it is not available when
3433 cleanup_cfg is run. */
3434 if (match && !optimize_size)
3436 rtx note1, note2;
3437 int prob1, prob2;
3438 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
3439 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
3441 if (note1 && note2)
3443 prob1 = INTVAL (XEXP (note1, 0));
3444 prob2 = INTVAL (XEXP (note2, 0));
3445 if (reverse)
3446 prob2 = REG_BR_PROB_BASE - prob2;
3448 /* Fail if the difference in probabilities is
3449 greater than 5%. */
3450 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 20)
3451 return false;
3453 else if (note1 || note2)
3454 return false;
3457 if (rtl_dump_file && match)
3458 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
3459 bb1->index, bb2->index);
3461 return match;
3464 /* ??? We can handle computed jumps too. This may be important for
3465 inlined functions containing switch statements. Also jumps w/o
3466 fallthru edges can be handled by simply matching whole insn. */
3467 return false;
3470 /* E1 and E2 are edges with the same destination block. Search their
3471 predecessors for common code. If found, redirect control flow from
3472 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
3474 static bool
3475 try_crossjump_to_edge (mode, e1, e2)
3476 int mode;
3477 edge e1, e2;
3479 int nmatch;
3480 basic_block src1 = e1->src, src2 = e2->src;
3481 basic_block redirect_to;
3482 rtx newpos1, newpos2;
3483 edge s;
3484 rtx last;
3485 rtx label;
3487 /* Search backward through forwarder blocks. We don't need to worry
3488 about multiple entry or chained forwarders, as they will be optimized
3489 away. We do this to look past the unconditional jump following a
3490 conditional jump that is required due to the current CFG shape. */
3491 if (src1->pred
3492 && !src1->pred->pred_next
3493 && forwarder_block_p (src1))
3495 e1 = src1->pred;
3496 src1 = e1->src;
3498 if (src2->pred
3499 && !src2->pred->pred_next
3500 && forwarder_block_p (src2))
3502 e2 = src2->pred;
3503 src2 = e2->src;
3506 /* Nothing to do if we reach ENTRY, or a common source block. */
3507 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
3508 return false;
3509 if (src1 == src2)
3510 return false;
3512 /* Seeing more than 1 forwarder blocks would confuse us later... */
3513 if (forwarder_block_p (e1->dest)
3514 && forwarder_block_p (e1->dest->succ->dest))
3515 return false;
3516 if (forwarder_block_p (e2->dest)
3517 && forwarder_block_p (e2->dest->succ->dest))
3518 return false;
3520 /* Likewise with dead code. */
3521 /* ??? Won't we have eliminated these by now? */
3522 if (!src1->pred || !src2->pred)
3523 return false;
3525 /* Likewise with non-jump edges. */
3526 /* ??? Non-jump? You mean GET_CODE (e1->src-end) != JUMP_INSN?
3527 This fails for computed-goto as well, which may in fact be joinable. */
3528 if (e1->flags & EDGE_COMPLEX)
3529 return false;
3531 /* Look for the common insn sequence, part the first ... */
3532 if (!outgoing_edges_match (src1, src2))
3533 return false;
3535 /* ... and part the second. */
3536 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
3537 if (!nmatch)
3538 return false;
3540 /* Avoid splitting if possible. */
3541 if (newpos2 == src2->head)
3542 redirect_to = src2;
3543 else
3545 if (rtl_dump_file)
3546 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
3547 src2->index, nmatch);
3548 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
3551 if (rtl_dump_file)
3552 fprintf (rtl_dump_file,
3553 "Cross jumping from bb %i to bb %i; %i common insns\n",
3554 src1->index, src2->index, nmatch);
3556 redirect_to->count += src1->count;
3557 redirect_to->frequency += src1->frequency;
3559 /* Recompute the frequencies and counts of outgoing edges. */
3560 for (s = redirect_to->succ; s; s = s->succ_next)
3562 edge s2;
3563 basic_block d = s->dest;
3565 if (forwarder_block_p (d))
3566 d = d->succ->dest;
3567 for (s2 = src1->succ; ; s2 = s2->succ_next)
3569 basic_block d2 = s2->dest;
3570 if (forwarder_block_p (d2))
3571 d2 = d2->succ->dest;
3572 if (d == d2)
3573 break;
3575 s->count += s2->count;
3577 /* Take care to update possible forwarder blocks. We verified
3578 that there is no more than one in the chain, so we can't run
3579 into infinite loop. */
3580 if (forwarder_block_p (s->dest))
3582 s->dest->succ->count += s2->count;
3583 s->dest->count += s2->count;
3584 s->dest->frequency += ((s->probability * s->src->frequency)
3585 / REG_BR_PROB_BASE);
3587 if (forwarder_block_p (s2->dest))
3589 s2->dest->succ->count -= s2->count;
3590 s2->dest->count -= s2->count;
3591 s2->dest->frequency -= ((s->probability * s->src->frequency)
3592 / REG_BR_PROB_BASE);
3594 if (!redirect_to->frequency && !src1->frequency)
3595 s->probability = (s->probability + s2->probability) / 2;
3596 else
3597 s->probability =
3598 ((s->probability * redirect_to->frequency +
3599 s2->probability * src1->frequency)
3600 / (redirect_to->frequency + src1->frequency));
3603 /* FIXME: enable once probabilities are fetched properly at CFG build. */
3604 #if 0
3605 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
3606 if (note)
3607 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
3608 #endif
3610 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
3612 /* Skip possible basic block header. */
3613 if (GET_CODE (newpos1) == CODE_LABEL)
3614 newpos1 = NEXT_INSN (newpos1);
3615 if (GET_CODE (newpos1) == NOTE)
3616 newpos1 = NEXT_INSN (newpos1);
3617 last = src1->end;
3619 /* Emit the jump insn. */
3620 label = block_label (redirect_to);
3621 src1->end = emit_jump_insn_before (gen_jump (label), newpos1);
3622 JUMP_LABEL (src1->end) = label;
3623 LABEL_NUSES (label)++;
3624 if (basic_block_for_insn)
3625 set_block_for_new_insns (src1->end, src1);
3627 /* Delete the now unreachable instructions. */
3628 flow_delete_insn_chain (newpos1, last);
3630 /* Make sure there is a barrier after the new jump. */
3631 last = next_nonnote_insn (src1->end);
3632 if (!last || GET_CODE (last) != BARRIER)
3633 emit_barrier_after (src1->end);
3635 /* Update CFG. */
3636 while (src1->succ)
3637 remove_edge (src1->succ);
3638 make_edge (NULL, src1, redirect_to, 0);
3640 return true;
3643 /* Search the predecessors of BB for common insn sequences. When found,
3644 share code between them by redirecting control flow. Return true if
3645 any changes made. */
3647 static bool
3648 try_crossjump_bb (mode, bb)
3649 int mode;
3650 basic_block bb;
3652 edge e, e2, nexte2, nexte, fallthru;
3653 bool changed;
3655 /* Nothing to do if there is not at least two incomming edges. */
3656 if (!bb->pred || !bb->pred->pred_next)
3657 return false;
3659 /* It is always cheapest to redirect a block that ends in a branch to
3660 a block that falls through into BB, as that adds no branches to the
3661 program. We'll try that combination first. */
3662 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
3663 if (fallthru->flags & EDGE_FALLTHRU)
3664 break;
3666 changed = false;
3667 for (e = bb->pred; e; e = nexte)
3669 nexte = e->pred_next;
3671 /* Elide complex edges now, as neither try_crossjump_to_edge
3672 nor outgoing_edges_match can handle them. */
3673 if (e->flags & EDGE_COMPLEX)
3674 continue;
3676 /* As noted above, first try with the fallthru predecessor. */
3677 if (fallthru)
3679 /* Don't combine the fallthru edge into anything else.
3680 If there is a match, we'll do it the other way around. */
3681 if (e == fallthru)
3682 continue;
3684 if (try_crossjump_to_edge (mode, e, fallthru))
3686 changed = true;
3687 nexte = bb->pred;
3688 continue;
3692 /* Non-obvious work limiting check: Recognize that we're going
3693 to call try_crossjump_bb on every basic block. So if we have
3694 two blocks with lots of outgoing edges (a switch) and they
3695 share lots of common destinations, then we would do the
3696 cross-jump check once for each common destination.
3698 Now, if the blocks actually are cross-jump candidates, then
3699 all of their destinations will be shared. Which means that
3700 we only need check them for cross-jump candidacy once. We
3701 can eliminate redundant checks of crossjump(A,B) by arbitrarily
3702 choosing to do the check from the block for which the edge
3703 in question is the first successor of A. */
3704 if (e->src->succ != e)
3705 continue;
3707 for (e2 = bb->pred; e2; e2 = nexte2)
3709 nexte2 = e2->pred_next;
3711 if (e2 == e)
3712 continue;
3714 /* We've already checked the fallthru edge above. */
3715 if (e2 == fallthru)
3716 continue;
3718 /* Again, neither try_crossjump_to_edge nor outgoing_edges_match
3719 can handle complex edges. */
3720 if (e2->flags & EDGE_COMPLEX)
3721 continue;
3723 /* The "first successor" check above only prevents multiple
3724 checks of crossjump(A,B). In order to prevent redundant
3725 checks of crossjump(B,A), require that A be the block
3726 with the lowest index. */
3727 /* ??? Perhaps better is lowest execution frequency. */
3728 if (e->src->index > e2->src->index)
3729 continue;
3731 if (try_crossjump_to_edge (mode, e, e2))
3733 changed = true;
3734 nexte = bb->pred;
3735 break;
3740 return changed;
3743 /* Do simple CFG optimizations - basic block merging, simplifying of jump
3744 instructions etc. Return nonzero if changes were made. */
3746 static bool
3747 try_optimize_cfg (mode)
3748 int mode;
3750 int i;
3751 bool changed_overall = false;
3752 bool changed;
3753 int iterations = 0;
3755 /* Attempt to merge blocks as made possible by edge removal. If a block
3756 has only one successor, and the successor has only one predecessor,
3757 they may be combined. */
3761 changed = false;
3762 iterations++;
3764 if (rtl_dump_file)
3765 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
3766 iterations);
3768 for (i = 0; i < n_basic_blocks;)
3770 basic_block c, b = BASIC_BLOCK (i);
3771 edge s;
3772 bool changed_here = false;
3774 /* Delete trivially dead basic blocks. */
3775 while (b->pred == NULL)
3777 c = BASIC_BLOCK (b->index - 1);
3778 if (rtl_dump_file)
3779 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
3780 flow_delete_block (b);
3781 changed = true;
3782 b = c;
3785 /* Remove code labels no longer used. Don't do this before
3786 CALL_PLACEHOLDER is removed, as some branches may be hidden
3787 within. */
3788 if (b->pred->pred_next == NULL
3789 && (b->pred->flags & EDGE_FALLTHRU)
3790 && !(b->pred->flags & EDGE_COMPLEX)
3791 && GET_CODE (b->head) == CODE_LABEL
3792 && (!(mode & CLEANUP_PRE_SIBCALL)
3793 || !tail_recursion_label_p (b->head))
3794 /* If previous block ends with condjump jumping to next BB,
3795 we can't delete the label. */
3796 && (b->pred->src == ENTRY_BLOCK_PTR
3797 || !reg_mentioned_p (b->head, b->pred->src->end)))
3799 rtx label = b->head;
3800 b->head = NEXT_INSN (b->head);
3801 flow_delete_insn_chain (label, label);
3802 if (rtl_dump_file)
3803 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
3804 b->index);
3807 /* If we fall through an empty block, we can remove it. */
3808 if (b->pred->pred_next == NULL
3809 && (b->pred->flags & EDGE_FALLTHRU)
3810 && GET_CODE (b->head) != CODE_LABEL
3811 && forwarder_block_p (b)
3812 /* Note that forwarder_block_p true ensures that there
3813 is a successor for this block. */
3814 && (b->succ->flags & EDGE_FALLTHRU)
3815 && n_basic_blocks > 1)
3817 if (rtl_dump_file)
3818 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
3819 b->index);
3820 c = BASIC_BLOCK (b->index ? b->index - 1 : 1);
3821 redirect_edge_succ (b->pred, b->succ->dest);
3822 flow_delete_block (b);
3823 changed = true;
3824 b = c;
3827 /* Merge blocks. Loop because chains of blocks might be
3828 combineable. */
3829 while ((s = b->succ) != NULL
3830 && s->succ_next == NULL
3831 && !(s->flags & EDGE_COMPLEX)
3832 && (c = s->dest) != EXIT_BLOCK_PTR
3833 && c->pred->pred_next == NULL
3834 /* If the jump insn has side effects,
3835 we can't kill the edge. */
3836 && (GET_CODE (b->end) != JUMP_INSN
3837 || onlyjump_p (b->end))
3838 && merge_blocks (s, b, c, mode))
3839 changed_here = true;
3841 /* Simplify branch over branch. */
3842 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
3843 changed_here = true;
3845 /* If B has a single outgoing edge, but uses a non-trivial jump
3846 instruction without side-effects, we can either delete the
3847 jump entirely, or replace it with a simple unconditional jump.
3848 Use redirect_edge_and_branch to do the dirty work. */
3849 if (b->succ
3850 && ! b->succ->succ_next
3851 && b->succ->dest != EXIT_BLOCK_PTR
3852 && onlyjump_p (b->end)
3853 && redirect_edge_and_branch (b->succ, b->succ->dest))
3854 changed_here = true;
3856 /* Simplify branch to branch. */
3857 if (try_forward_edges (b))
3858 changed_here = true;
3860 /* Look for shared code between blocks. */
3861 if ((mode & CLEANUP_CROSSJUMP)
3862 && try_crossjump_bb (mode, b))
3863 changed_here = true;
3865 /* Don't get confused by the index shift caused by deleting
3866 blocks. */
3867 if (!changed_here)
3868 i = b->index + 1;
3869 else
3870 changed = true;
3873 if ((mode & CLEANUP_CROSSJUMP)
3874 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
3875 changed = true;
3877 #ifdef ENABLE_CHECKING
3878 if (changed)
3879 verify_flow_info ();
3880 #endif
3882 changed_overall |= changed;
3884 while (changed);
3885 return changed_overall;
3888 /* The given edge should potentially be a fallthru edge. If that is in
3889 fact true, delete the jump and barriers that are in the way. */
3891 void
3892 tidy_fallthru_edge (e, b, c)
3893 edge e;
3894 basic_block b, c;
3896 rtx q;
3898 /* ??? In a late-running flow pass, other folks may have deleted basic
3899 blocks by nopping out blocks, leaving multiple BARRIERs between here
3900 and the target label. They ought to be chastized and fixed.
3902 We can also wind up with a sequence of undeletable labels between
3903 one block and the next.
3905 So search through a sequence of barriers, labels, and notes for
3906 the head of block C and assert that we really do fall through. */
3908 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
3909 return;
3911 /* Remove what will soon cease being the jump insn from the source block.
3912 If block B consisted only of this single jump, turn it into a deleted
3913 note. */
3914 q = b->end;
3915 if (GET_CODE (q) == JUMP_INSN
3916 && onlyjump_p (q)
3917 && (any_uncondjump_p (q)
3918 || (b->succ == e && e->succ_next == NULL)))
3920 #ifdef HAVE_cc0
3921 /* If this was a conditional jump, we need to also delete
3922 the insn that set cc0. */
3923 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
3924 q = PREV_INSN (q);
3925 #endif
3927 if (b->head == q)
3929 PUT_CODE (q, NOTE);
3930 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
3931 NOTE_SOURCE_FILE (q) = 0;
3933 else
3935 q = PREV_INSN (q);
3937 /* We don't want a block to end on a line-number note since that has
3938 the potential of changing the code between -g and not -g. */
3939 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
3940 q = PREV_INSN (q);
3943 b->end = q;
3946 /* Selectively unlink the sequence. */
3947 if (q != PREV_INSN (c->head))
3948 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
3950 e->flags |= EDGE_FALLTHRU;
3953 /* Fix up edges that now fall through, or rather should now fall through
3954 but previously required a jump around now deleted blocks. Simplify
3955 the search by only examining blocks numerically adjacent, since this
3956 is how find_basic_blocks created them. */
3958 static void
3959 tidy_fallthru_edges ()
3961 int i;
3963 for (i = 1; i < n_basic_blocks; ++i)
3965 basic_block b = BASIC_BLOCK (i - 1);
3966 basic_block c = BASIC_BLOCK (i);
3967 edge s;
3969 /* We care about simple conditional or unconditional jumps with
3970 a single successor.
3972 If we had a conditional branch to the next instruction when
3973 find_basic_blocks was called, then there will only be one
3974 out edge for the block which ended with the conditional
3975 branch (since we do not create duplicate edges).
3977 Furthermore, the edge will be marked as a fallthru because we
3978 merge the flags for the duplicate edges. So we do not want to
3979 check that the edge is not a FALLTHRU edge. */
3980 if ((s = b->succ) != NULL
3981 && ! (s->flags & EDGE_COMPLEX)
3982 && s->succ_next == NULL
3983 && s->dest == c
3984 /* If the jump insn has side effects, we can't tidy the edge. */
3985 && (GET_CODE (b->end) != JUMP_INSN
3986 || onlyjump_p (b->end)))
3987 tidy_fallthru_edge (s, b, c);
3991 /* Perform data flow analysis.
3992 F is the first insn of the function; FLAGS is a set of PROP_* flags
3993 to be used in accumulating flow info. */
3995 void
3996 life_analysis (f, file, flags)
3997 rtx f;
3998 FILE *file;
3999 int flags;
4001 #ifdef ELIMINABLE_REGS
4002 register int i;
4003 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
4004 #endif
4006 /* Record which registers will be eliminated. We use this in
4007 mark_used_regs. */
4009 CLEAR_HARD_REG_SET (elim_reg_set);
4011 #ifdef ELIMINABLE_REGS
4012 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4013 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4014 #else
4015 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4016 #endif
4018 if (! optimize)
4019 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
4021 /* The post-reload life analysis have (on a global basis) the same
4022 registers live as was computed by reload itself. elimination
4023 Otherwise offsets and such may be incorrect.
4025 Reload will make some registers as live even though they do not
4026 appear in the rtl.
4028 We don't want to create new auto-incs after reload, since they
4029 are unlikely to be useful and can cause problems with shared
4030 stack slots. */
4031 if (reload_completed)
4032 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
4034 /* We want alias analysis information for local dead store elimination. */
4035 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4036 init_alias_analysis ();
4038 /* Always remove no-op moves. Do this before other processing so
4039 that we don't have to keep re-scanning them. */
4040 delete_noop_moves (f);
4042 /* Some targets can emit simpler epilogues if they know that sp was
4043 not ever modified during the function. After reload, of course,
4044 we've already emitted the epilogue so there's no sense searching. */
4045 if (! reload_completed)
4046 notice_stack_pointer_modification (f);
4048 /* Allocate and zero out data structures that will record the
4049 data from lifetime analysis. */
4050 allocate_reg_life_data ();
4051 allocate_bb_life_data ();
4053 /* Find the set of registers live on function exit. */
4054 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
4056 /* "Update" life info from zero. It'd be nice to begin the
4057 relaxation with just the exit and noreturn blocks, but that set
4058 is not immediately handy. */
4060 if (flags & PROP_REG_INFO)
4061 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4062 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
4064 /* Clean up. */
4065 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4066 end_alias_analysis ();
4068 if (file)
4069 dump_flow_info (file);
4071 free_basic_block_vars (1);
4073 #ifdef ENABLE_CHECKING
4075 rtx insn;
4077 /* Search for any REG_LABEL notes which reference deleted labels. */
4078 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4080 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4082 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
4083 abort ();
4086 #endif
4089 /* A subroutine of verify_wide_reg, called through for_each_rtx.
4090 Search for REGNO. If found, abort if it is not wider than word_mode. */
4092 static int
4093 verify_wide_reg_1 (px, pregno)
4094 rtx *px;
4095 void *pregno;
4097 rtx x = *px;
4098 unsigned int regno = *(int *) pregno;
4100 if (GET_CODE (x) == REG && REGNO (x) == regno)
4102 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
4103 abort ();
4104 return 1;
4106 return 0;
4109 /* A subroutine of verify_local_live_at_start. Search through insns
4110 between HEAD and END looking for register REGNO. */
4112 static void
4113 verify_wide_reg (regno, head, end)
4114 int regno;
4115 rtx head, end;
4117 while (1)
4119 if (INSN_P (head)
4120 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
4121 return;
4122 if (head == end)
4123 break;
4124 head = NEXT_INSN (head);
4127 /* We didn't find the register at all. Something's way screwy. */
4128 if (rtl_dump_file)
4129 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
4130 print_rtl_and_abort ();
4133 /* A subroutine of update_life_info. Verify that there are no untoward
4134 changes in live_at_start during a local update. */
4136 static void
4137 verify_local_live_at_start (new_live_at_start, bb)
4138 regset new_live_at_start;
4139 basic_block bb;
4141 if (reload_completed)
4143 /* After reload, there are no pseudos, nor subregs of multi-word
4144 registers. The regsets should exactly match. */
4145 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
4147 if (rtl_dump_file)
4149 fprintf (rtl_dump_file,
4150 "live_at_start mismatch in bb %d, aborting\n",
4151 bb->index);
4152 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
4153 debug_bitmap_file (rtl_dump_file, new_live_at_start);
4155 print_rtl_and_abort ();
4158 else
4160 int i;
4162 /* Find the set of changed registers. */
4163 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
4165 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
4167 /* No registers should die. */
4168 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
4170 if (rtl_dump_file)
4171 fprintf (rtl_dump_file,
4172 "Register %d died unexpectedly in block %d\n", i,
4173 bb->index);
4174 print_rtl_and_abort ();
4177 /* Verify that the now-live register is wider than word_mode. */
4178 verify_wide_reg (i, bb->head, bb->end);
4183 /* Updates life information starting with the basic blocks set in BLOCKS.
4184 If BLOCKS is null, consider it to be the universal set.
4186 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
4187 we are only expecting local modifications to basic blocks. If we find
4188 extra registers live at the beginning of a block, then we either killed
4189 useful data, or we have a broken split that wants data not provided.
4190 If we find registers removed from live_at_start, that means we have
4191 a broken peephole that is killing a register it shouldn't.
4193 ??? This is not true in one situation -- when a pre-reload splitter
4194 generates subregs of a multi-word pseudo, current life analysis will
4195 lose the kill. So we _can_ have a pseudo go live. How irritating.
4197 Including PROP_REG_INFO does not properly refresh regs_ever_live
4198 unless the caller resets it to zero. */
4200 void
4201 update_life_info (blocks, extent, prop_flags)
4202 sbitmap blocks;
4203 enum update_life_extent extent;
4204 int prop_flags;
4206 regset tmp;
4207 regset_head tmp_head;
4208 int i;
4210 tmp = INITIALIZE_REG_SET (tmp_head);
4212 /* For a global update, we go through the relaxation process again. */
4213 if (extent != UPDATE_LIFE_LOCAL)
4215 calculate_global_regs_live (blocks, blocks,
4216 prop_flags & PROP_SCAN_DEAD_CODE);
4218 /* If asked, remove notes from the blocks we'll update. */
4219 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
4220 count_or_remove_death_notes (blocks, 1);
4223 if (blocks)
4225 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4227 basic_block bb = BASIC_BLOCK (i);
4229 COPY_REG_SET (tmp, bb->global_live_at_end);
4230 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4232 if (extent == UPDATE_LIFE_LOCAL)
4233 verify_local_live_at_start (tmp, bb);
4236 else
4238 for (i = n_basic_blocks - 1; i >= 0; --i)
4240 basic_block bb = BASIC_BLOCK (i);
4242 COPY_REG_SET (tmp, bb->global_live_at_end);
4243 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4245 if (extent == UPDATE_LIFE_LOCAL)
4246 verify_local_live_at_start (tmp, bb);
4250 FREE_REG_SET (tmp);
4252 if (prop_flags & PROP_REG_INFO)
4254 /* The only pseudos that are live at the beginning of the function
4255 are those that were not set anywhere in the function. local-alloc
4256 doesn't know how to handle these correctly, so mark them as not
4257 local to any one basic block. */
4258 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
4259 FIRST_PSEUDO_REGISTER, i,
4260 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4262 /* We have a problem with any pseudoreg that lives across the setjmp.
4263 ANSI says that if a user variable does not change in value between
4264 the setjmp and the longjmp, then the longjmp preserves it. This
4265 includes longjmp from a place where the pseudo appears dead.
4266 (In principle, the value still exists if it is in scope.)
4267 If the pseudo goes in a hard reg, some other value may occupy
4268 that hard reg where this pseudo is dead, thus clobbering the pseudo.
4269 Conclusion: such a pseudo must not go in a hard reg. */
4270 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
4271 FIRST_PSEUDO_REGISTER, i,
4273 if (regno_reg_rtx[i] != 0)
4275 REG_LIVE_LENGTH (i) = -1;
4276 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4282 /* Free the variables allocated by find_basic_blocks.
4284 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
4286 void
4287 free_basic_block_vars (keep_head_end_p)
4288 int keep_head_end_p;
4290 if (basic_block_for_insn)
4292 VARRAY_FREE (basic_block_for_insn);
4293 basic_block_for_insn = NULL;
4296 if (! keep_head_end_p)
4298 if (basic_block_info)
4300 clear_edges ();
4301 VARRAY_FREE (basic_block_info);
4303 n_basic_blocks = 0;
4305 ENTRY_BLOCK_PTR->aux = NULL;
4306 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
4307 EXIT_BLOCK_PTR->aux = NULL;
4308 EXIT_BLOCK_PTR->global_live_at_start = NULL;
4312 /* Delete any insns that copy a register to itself. */
4314 void
4315 delete_noop_moves (f)
4316 rtx f ATTRIBUTE_UNUSED;
4318 int i;
4319 rtx insn, next;
4320 basic_block bb;
4322 for (i = 0; i < n_basic_blocks; i++)
4324 bb = BASIC_BLOCK (i);
4325 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
4327 next = NEXT_INSN (insn);
4328 if (INSN_P (insn) && noop_move_p (insn))
4330 if (insn == bb->end)
4331 bb->end = PREV_INSN (insn);
4332 flow_delete_insn (insn);
4338 /* Determine if the stack pointer is constant over the life of the function.
4339 Only useful before prologues have been emitted. */
4341 static void
4342 notice_stack_pointer_modification_1 (x, pat, data)
4343 rtx x;
4344 rtx pat ATTRIBUTE_UNUSED;
4345 void *data ATTRIBUTE_UNUSED;
4347 if (x == stack_pointer_rtx
4348 /* The stack pointer is only modified indirectly as the result
4349 of a push until later in flow. See the comments in rtl.texi
4350 regarding Embedded Side-Effects on Addresses. */
4351 || (GET_CODE (x) == MEM
4352 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
4353 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
4354 current_function_sp_is_unchanging = 0;
4357 static void
4358 notice_stack_pointer_modification (f)
4359 rtx f;
4361 rtx insn;
4363 /* Assume that the stack pointer is unchanging if alloca hasn't
4364 been used. */
4365 current_function_sp_is_unchanging = !current_function_calls_alloca;
4366 if (! current_function_sp_is_unchanging)
4367 return;
4369 for (insn = f; insn; insn = NEXT_INSN (insn))
4371 if (INSN_P (insn))
4373 /* Check if insn modifies the stack pointer. */
4374 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
4375 NULL);
4376 if (! current_function_sp_is_unchanging)
4377 return;
4382 /* Mark a register in SET. Hard registers in large modes get all
4383 of their component registers set as well. */
4385 static void
4386 mark_reg (reg, xset)
4387 rtx reg;
4388 void *xset;
4390 regset set = (regset) xset;
4391 int regno = REGNO (reg);
4393 if (GET_MODE (reg) == BLKmode)
4394 abort ();
4396 SET_REGNO_REG_SET (set, regno);
4397 if (regno < FIRST_PSEUDO_REGISTER)
4399 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4400 while (--n > 0)
4401 SET_REGNO_REG_SET (set, regno + n);
4405 /* Mark those regs which are needed at the end of the function as live
4406 at the end of the last basic block. */
4408 static void
4409 mark_regs_live_at_end (set)
4410 regset set;
4412 unsigned int i;
4414 /* If exiting needs the right stack value, consider the stack pointer
4415 live at the end of the function. */
4416 if ((HAVE_epilogue && reload_completed)
4417 || ! EXIT_IGNORE_STACK
4418 || (! FRAME_POINTER_REQUIRED
4419 && ! current_function_calls_alloca
4420 && flag_omit_frame_pointer)
4421 || current_function_sp_is_unchanging)
4423 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
4426 /* Mark the frame pointer if needed at the end of the function. If
4427 we end up eliminating it, it will be removed from the live list
4428 of each basic block by reload. */
4430 if (! reload_completed || frame_pointer_needed)
4432 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
4433 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4434 /* If they are different, also mark the hard frame pointer as live. */
4435 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4436 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
4437 #endif
4440 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4441 /* Many architectures have a GP register even without flag_pic.
4442 Assume the pic register is not in use, or will be handled by
4443 other means, if it is not fixed. */
4444 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4445 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4446 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
4447 #endif
4449 /* Mark all global registers, and all registers used by the epilogue
4450 as being live at the end of the function since they may be
4451 referenced by our caller. */
4452 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4453 if (global_regs[i] || EPILOGUE_USES (i))
4454 SET_REGNO_REG_SET (set, i);
4456 if (HAVE_epilogue && reload_completed)
4458 /* Mark all call-saved registers that we actually used. */
4459 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4460 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
4461 SET_REGNO_REG_SET (set, i);
4464 #ifdef EH_RETURN_DATA_REGNO
4465 /* Mark the registers that will contain data for the handler. */
4466 if (reload_completed && current_function_calls_eh_return)
4467 for (i = 0; ; ++i)
4469 unsigned regno = EH_RETURN_DATA_REGNO(i);
4470 if (regno == INVALID_REGNUM)
4471 break;
4472 SET_REGNO_REG_SET (set, regno);
4474 #endif
4475 #ifdef EH_RETURN_STACKADJ_RTX
4476 if ((! HAVE_epilogue || ! reload_completed)
4477 && current_function_calls_eh_return)
4479 rtx tmp = EH_RETURN_STACKADJ_RTX;
4480 if (tmp && REG_P (tmp))
4481 mark_reg (tmp, set);
4483 #endif
4484 #ifdef EH_RETURN_HANDLER_RTX
4485 if ((! HAVE_epilogue || ! reload_completed)
4486 && current_function_calls_eh_return)
4488 rtx tmp = EH_RETURN_HANDLER_RTX;
4489 if (tmp && REG_P (tmp))
4490 mark_reg (tmp, set);
4492 #endif
4494 /* Mark function return value. */
4495 diddle_return_value (mark_reg, set);
4498 /* Callback function for for_each_successor_phi. DATA is a regset.
4499 Sets the SRC_REGNO, the regno of the phi alternative for phi node
4500 INSN, in the regset. */
4502 static int
4503 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
4504 rtx insn ATTRIBUTE_UNUSED;
4505 int dest_regno ATTRIBUTE_UNUSED;
4506 int src_regno;
4507 void *data;
4509 regset live = (regset) data;
4510 SET_REGNO_REG_SET (live, src_regno);
4511 return 0;
4514 /* Propagate global life info around the graph of basic blocks. Begin
4515 considering blocks with their corresponding bit set in BLOCKS_IN.
4516 If BLOCKS_IN is null, consider it the universal set.
4518 BLOCKS_OUT is set for every block that was changed. */
4520 static void
4521 calculate_global_regs_live (blocks_in, blocks_out, flags)
4522 sbitmap blocks_in, blocks_out;
4523 int flags;
4525 basic_block *queue, *qhead, *qtail, *qend;
4526 regset tmp, new_live_at_end, call_used;
4527 regset_head tmp_head, call_used_head;
4528 regset_head new_live_at_end_head;
4529 int i;
4531 tmp = INITIALIZE_REG_SET (tmp_head);
4532 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
4533 call_used = INITIALIZE_REG_SET (call_used_head);
4535 /* Inconveniently, this is only redily available in hard reg set form. */
4536 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4537 if (call_used_regs[i])
4538 SET_REGNO_REG_SET (call_used, i);
4540 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
4541 because the `head == tail' style test for an empty queue doesn't
4542 work with a full queue. */
4543 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
4544 qtail = queue;
4545 qhead = qend = queue + n_basic_blocks + 2;
4547 /* Queue the blocks set in the initial mask. Do this in reverse block
4548 number order so that we are more likely for the first round to do
4549 useful work. We use AUX non-null to flag that the block is queued. */
4550 if (blocks_in)
4552 /* Clear out the garbage that might be hanging out in bb->aux. */
4553 for (i = n_basic_blocks - 1; i >= 0; --i)
4554 BASIC_BLOCK (i)->aux = NULL;
4556 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
4558 basic_block bb = BASIC_BLOCK (i);
4559 *--qhead = bb;
4560 bb->aux = bb;
4563 else
4565 for (i = 0; i < n_basic_blocks; ++i)
4567 basic_block bb = BASIC_BLOCK (i);
4568 *--qhead = bb;
4569 bb->aux = bb;
4573 if (blocks_out)
4574 sbitmap_zero (blocks_out);
4576 /* We work through the queue until there are no more blocks. What
4577 is live at the end of this block is precisely the union of what
4578 is live at the beginning of all its successors. So, we set its
4579 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
4580 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
4581 this block by walking through the instructions in this block in
4582 reverse order and updating as we go. If that changed
4583 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
4584 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
4586 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
4587 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
4588 must either be live at the end of the block, or used within the
4589 block. In the latter case, it will certainly never disappear
4590 from GLOBAL_LIVE_AT_START. In the former case, the register
4591 could go away only if it disappeared from GLOBAL_LIVE_AT_START
4592 for one of the successor blocks. By induction, that cannot
4593 occur. */
4594 while (qhead != qtail)
4596 int rescan, changed;
4597 basic_block bb;
4598 edge e;
4600 bb = *qhead++;
4601 if (qhead == qend)
4602 qhead = queue;
4603 bb->aux = NULL;
4605 /* Begin by propagating live_at_start from the successor blocks. */
4606 CLEAR_REG_SET (new_live_at_end);
4607 for (e = bb->succ; e; e = e->succ_next)
4609 basic_block sb = e->dest;
4611 /* Call-clobbered registers die across exception and call edges. */
4612 /* ??? Abnormal call edges ignored for the moment, as this gets
4613 confused by sibling call edges, which crashes reg-stack. */
4614 if (e->flags & EDGE_EH)
4616 bitmap_operation (tmp, sb->global_live_at_start,
4617 call_used, BITMAP_AND_COMPL);
4618 IOR_REG_SET (new_live_at_end, tmp);
4620 else
4621 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
4624 /* The all-important stack pointer must always be live. */
4625 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
4627 /* Before reload, there are a few registers that must be forced
4628 live everywhere -- which might not already be the case for
4629 blocks within infinite loops. */
4630 if (! reload_completed)
4632 /* Any reference to any pseudo before reload is a potential
4633 reference of the frame pointer. */
4634 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
4636 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4637 /* Pseudos with argument area equivalences may require
4638 reloading via the argument pointer. */
4639 if (fixed_regs[ARG_POINTER_REGNUM])
4640 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
4641 #endif
4643 /* Any constant, or pseudo with constant equivalences, may
4644 require reloading from memory using the pic register. */
4645 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4646 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4647 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
4650 /* Regs used in phi nodes are not included in
4651 global_live_at_start, since they are live only along a
4652 particular edge. Set those regs that are live because of a
4653 phi node alternative corresponding to this particular block. */
4654 if (in_ssa_form)
4655 for_each_successor_phi (bb, &set_phi_alternative_reg,
4656 new_live_at_end);
4658 if (bb == ENTRY_BLOCK_PTR)
4660 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4661 continue;
4664 /* On our first pass through this block, we'll go ahead and continue.
4665 Recognize first pass by local_set NULL. On subsequent passes, we
4666 get to skip out early if live_at_end wouldn't have changed. */
4668 if (bb->local_set == NULL)
4670 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4671 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4672 rescan = 1;
4674 else
4676 /* If any bits were removed from live_at_end, we'll have to
4677 rescan the block. This wouldn't be necessary if we had
4678 precalculated local_live, however with PROP_SCAN_DEAD_CODE
4679 local_live is really dependent on live_at_end. */
4680 CLEAR_REG_SET (tmp);
4681 rescan = bitmap_operation (tmp, bb->global_live_at_end,
4682 new_live_at_end, BITMAP_AND_COMPL);
4684 if (! rescan)
4686 /* If any of the registers in the new live_at_end set are
4687 conditionally set in this basic block, we must rescan.
4688 This is because conditional lifetimes at the end of the
4689 block do not just take the live_at_end set into account,
4690 but also the liveness at the start of each successor
4691 block. We can miss changes in those sets if we only
4692 compare the new live_at_end against the previous one. */
4693 CLEAR_REG_SET (tmp);
4694 rescan = bitmap_operation (tmp, new_live_at_end,
4695 bb->cond_local_set, BITMAP_AND);
4698 if (! rescan)
4700 /* Find the set of changed bits. Take this opportunity
4701 to notice that this set is empty and early out. */
4702 CLEAR_REG_SET (tmp);
4703 changed = bitmap_operation (tmp, bb->global_live_at_end,
4704 new_live_at_end, BITMAP_XOR);
4705 if (! changed)
4706 continue;
4708 /* If any of the changed bits overlap with local_set,
4709 we'll have to rescan the block. Detect overlap by
4710 the AND with ~local_set turning off bits. */
4711 rescan = bitmap_operation (tmp, tmp, bb->local_set,
4712 BITMAP_AND_COMPL);
4716 /* Let our caller know that BB changed enough to require its
4717 death notes updated. */
4718 if (blocks_out)
4719 SET_BIT (blocks_out, bb->index);
4721 if (! rescan)
4723 /* Add to live_at_start the set of all registers in
4724 new_live_at_end that aren't in the old live_at_end. */
4726 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
4727 BITMAP_AND_COMPL);
4728 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4730 changed = bitmap_operation (bb->global_live_at_start,
4731 bb->global_live_at_start,
4732 tmp, BITMAP_IOR);
4733 if (! changed)
4734 continue;
4736 else
4738 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4740 /* Rescan the block insn by insn to turn (a copy of) live_at_end
4741 into live_at_start. */
4742 propagate_block (bb, new_live_at_end, bb->local_set,
4743 bb->cond_local_set, flags);
4745 /* If live_at start didn't change, no need to go farther. */
4746 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
4747 continue;
4749 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
4752 /* Queue all predecessors of BB so that we may re-examine
4753 their live_at_end. */
4754 for (e = bb->pred; e; e = e->pred_next)
4756 basic_block pb = e->src;
4757 if (pb->aux == NULL)
4759 *qtail++ = pb;
4760 if (qtail == qend)
4761 qtail = queue;
4762 pb->aux = pb;
4767 FREE_REG_SET (tmp);
4768 FREE_REG_SET (new_live_at_end);
4769 FREE_REG_SET (call_used);
4771 if (blocks_out)
4773 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
4775 basic_block bb = BASIC_BLOCK (i);
4776 FREE_REG_SET (bb->local_set);
4777 FREE_REG_SET (bb->cond_local_set);
4780 else
4782 for (i = n_basic_blocks - 1; i >= 0; --i)
4784 basic_block bb = BASIC_BLOCK (i);
4785 FREE_REG_SET (bb->local_set);
4786 FREE_REG_SET (bb->cond_local_set);
4790 free (queue);
4793 /* Subroutines of life analysis. */
4795 /* Allocate the permanent data structures that represent the results
4796 of life analysis. Not static since used also for stupid life analysis. */
4798 void
4799 allocate_bb_life_data ()
4801 register int i;
4803 for (i = 0; i < n_basic_blocks; i++)
4805 basic_block bb = BASIC_BLOCK (i);
4807 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4808 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4811 ENTRY_BLOCK_PTR->global_live_at_end
4812 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4813 EXIT_BLOCK_PTR->global_live_at_start
4814 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4816 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4819 void
4820 allocate_reg_life_data ()
4822 int i;
4824 max_regno = max_reg_num ();
4826 /* Recalculate the register space, in case it has grown. Old style
4827 vector oriented regsets would set regset_{size,bytes} here also. */
4828 allocate_reg_info (max_regno, FALSE, FALSE);
4830 /* Reset all the data we'll collect in propagate_block and its
4831 subroutines. */
4832 for (i = 0; i < max_regno; i++)
4834 REG_N_SETS (i) = 0;
4835 REG_N_REFS (i) = 0;
4836 REG_N_DEATHS (i) = 0;
4837 REG_N_CALLS_CROSSED (i) = 0;
4838 REG_LIVE_LENGTH (i) = 0;
4839 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4843 /* Delete dead instructions for propagate_block. */
4845 static void
4846 propagate_block_delete_insn (bb, insn)
4847 basic_block bb;
4848 rtx insn;
4850 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4852 /* If the insn referred to a label, and that label was attached to
4853 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
4854 pretty much mandatory to delete it, because the ADDR_VEC may be
4855 referencing labels that no longer exist.
4857 INSN may reference a deleted label, particularly when a jump
4858 table has been optimized into a direct jump. There's no
4859 real good way to fix up the reference to the deleted label
4860 when the label is deleted, so we just allow it here.
4862 After dead code elimination is complete, we do search for
4863 any REG_LABEL notes which reference deleted labels as a
4864 sanity check. */
4866 if (inote && GET_CODE (inote) == CODE_LABEL)
4868 rtx label = XEXP (inote, 0);
4869 rtx next;
4871 /* The label may be forced if it has been put in the constant
4872 pool. If that is the only use we must discard the table
4873 jump following it, but not the label itself. */
4874 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
4875 && (next = next_nonnote_insn (label)) != NULL
4876 && GET_CODE (next) == JUMP_INSN
4877 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4878 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4880 rtx pat = PATTERN (next);
4881 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4882 int len = XVECLEN (pat, diff_vec_p);
4883 int i;
4885 for (i = 0; i < len; i++)
4886 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
4888 flow_delete_insn (next);
4892 if (bb->end == insn)
4893 bb->end = PREV_INSN (insn);
4894 flow_delete_insn (insn);
4897 /* Delete dead libcalls for propagate_block. Return the insn
4898 before the libcall. */
4900 static rtx
4901 propagate_block_delete_libcall (bb, insn, note)
4902 basic_block bb;
4903 rtx insn, note;
4905 rtx first = XEXP (note, 0);
4906 rtx before = PREV_INSN (first);
4908 if (insn == bb->end)
4909 bb->end = before;
4911 flow_delete_insn_chain (first, insn);
4912 return before;
4915 /* Update the life-status of regs for one insn. Return the previous insn. */
4918 propagate_one_insn (pbi, insn)
4919 struct propagate_block_info *pbi;
4920 rtx insn;
4922 rtx prev = PREV_INSN (insn);
4923 int flags = pbi->flags;
4924 int insn_is_dead = 0;
4925 int libcall_is_dead = 0;
4926 rtx note;
4927 int i;
4929 if (! INSN_P (insn))
4930 return prev;
4932 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4933 if (flags & PROP_SCAN_DEAD_CODE)
4935 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
4936 libcall_is_dead = (insn_is_dead && note != 0
4937 && libcall_dead_p (pbi, note, insn));
4940 /* If an instruction consists of just dead store(s) on final pass,
4941 delete it. */
4942 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
4944 /* If we're trying to delete a prologue or epilogue instruction
4945 that isn't flagged as possibly being dead, something is wrong.
4946 But if we are keeping the stack pointer depressed, we might well
4947 be deleting insns that are used to compute the amount to update
4948 it by, so they are fine. */
4949 if (reload_completed
4950 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4951 && (TYPE_RETURNS_STACK_DEPRESSED
4952 (TREE_TYPE (current_function_decl))))
4953 && (((HAVE_epilogue || HAVE_prologue)
4954 && prologue_epilogue_contains (insn))
4955 || (HAVE_sibcall_epilogue
4956 && sibcall_epilogue_contains (insn)))
4957 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
4958 abort ();
4960 /* Record sets. Do this even for dead instructions, since they
4961 would have killed the values if they hadn't been deleted. */
4962 mark_set_regs (pbi, PATTERN (insn), insn);
4964 /* CC0 is now known to be dead. Either this insn used it,
4965 in which case it doesn't anymore, or clobbered it,
4966 so the next insn can't use it. */
4967 pbi->cc0_live = 0;
4969 if (libcall_is_dead)
4970 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
4971 else
4972 propagate_block_delete_insn (pbi->bb, insn);
4974 return prev;
4977 /* See if this is an increment or decrement that can be merged into
4978 a following memory address. */
4979 #ifdef AUTO_INC_DEC
4981 register rtx x = single_set (insn);
4983 /* Does this instruction increment or decrement a register? */
4984 if ((flags & PROP_AUTOINC)
4985 && x != 0
4986 && GET_CODE (SET_DEST (x)) == REG
4987 && (GET_CODE (SET_SRC (x)) == PLUS
4988 || GET_CODE (SET_SRC (x)) == MINUS)
4989 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
4990 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
4991 /* Ok, look for a following memory ref we can combine with.
4992 If one is found, change the memory ref to a PRE_INC
4993 or PRE_DEC, cancel this insn, and return 1.
4994 Return 0 if nothing has been done. */
4995 && try_pre_increment_1 (pbi, insn))
4996 return prev;
4998 #endif /* AUTO_INC_DEC */
5000 CLEAR_REG_SET (pbi->new_set);
5002 /* If this is not the final pass, and this insn is copying the value of
5003 a library call and it's dead, don't scan the insns that perform the
5004 library call, so that the call's arguments are not marked live. */
5005 if (libcall_is_dead)
5007 /* Record the death of the dest reg. */
5008 mark_set_regs (pbi, PATTERN (insn), insn);
5010 insn = XEXP (note, 0);
5011 return PREV_INSN (insn);
5013 else if (GET_CODE (PATTERN (insn)) == SET
5014 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
5015 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
5016 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
5017 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
5018 /* We have an insn to pop a constant amount off the stack.
5019 (Such insns use PLUS regardless of the direction of the stack,
5020 and any insn to adjust the stack by a constant is always a pop.)
5021 These insns, if not dead stores, have no effect on life. */
5023 else
5025 /* Any regs live at the time of a call instruction must not go
5026 in a register clobbered by calls. Find all regs now live and
5027 record this for them. */
5029 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
5030 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5031 { REG_N_CALLS_CROSSED (i)++; });
5033 /* Record sets. Do this even for dead instructions, since they
5034 would have killed the values if they hadn't been deleted. */
5035 mark_set_regs (pbi, PATTERN (insn), insn);
5037 if (GET_CODE (insn) == CALL_INSN)
5039 register int i;
5040 rtx note, cond;
5042 cond = NULL_RTX;
5043 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5044 cond = COND_EXEC_TEST (PATTERN (insn));
5046 /* Non-constant calls clobber memory. */
5047 if (! CONST_CALL_P (insn))
5049 free_EXPR_LIST_list (&pbi->mem_set_list);
5050 pbi->mem_set_list_len = 0;
5053 /* There may be extra registers to be clobbered. */
5054 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5055 note;
5056 note = XEXP (note, 1))
5057 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
5058 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
5059 cond, insn, pbi->flags);
5061 /* Calls change all call-used and global registers. */
5062 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5063 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
5065 /* We do not want REG_UNUSED notes for these registers. */
5066 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
5067 cond, insn,
5068 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
5072 /* If an insn doesn't use CC0, it becomes dead since we assume
5073 that every insn clobbers it. So show it dead here;
5074 mark_used_regs will set it live if it is referenced. */
5075 pbi->cc0_live = 0;
5077 /* Record uses. */
5078 if (! insn_is_dead)
5079 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
5081 /* Sometimes we may have inserted something before INSN (such as a move)
5082 when we make an auto-inc. So ensure we will scan those insns. */
5083 #ifdef AUTO_INC_DEC
5084 prev = PREV_INSN (insn);
5085 #endif
5087 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
5089 register int i;
5090 rtx note, cond;
5092 cond = NULL_RTX;
5093 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5094 cond = COND_EXEC_TEST (PATTERN (insn));
5096 /* Calls use their arguments. */
5097 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5098 note;
5099 note = XEXP (note, 1))
5100 if (GET_CODE (XEXP (note, 0)) == USE)
5101 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
5102 cond, insn);
5104 /* The stack ptr is used (honorarily) by a CALL insn. */
5105 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
5107 /* Calls may also reference any of the global registers,
5108 so they are made live. */
5109 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5110 if (global_regs[i])
5111 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
5112 cond, insn);
5116 /* On final pass, update counts of how many insns in which each reg
5117 is live. */
5118 if (flags & PROP_REG_INFO)
5119 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5120 { REG_LIVE_LENGTH (i)++; });
5122 return prev;
5125 /* Initialize a propagate_block_info struct for public consumption.
5126 Note that the structure itself is opaque to this file, but that
5127 the user can use the regsets provided here. */
5129 struct propagate_block_info *
5130 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
5131 basic_block bb;
5132 regset live, local_set, cond_local_set;
5133 int flags;
5135 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
5137 pbi->bb = bb;
5138 pbi->reg_live = live;
5139 pbi->mem_set_list = NULL_RTX;
5140 pbi->mem_set_list_len = 0;
5141 pbi->local_set = local_set;
5142 pbi->cond_local_set = cond_local_set;
5143 pbi->cc0_live = 0;
5144 pbi->flags = flags;
5146 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5147 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
5148 else
5149 pbi->reg_next_use = NULL;
5151 pbi->new_set = BITMAP_XMALLOC ();
5153 #ifdef HAVE_conditional_execution
5154 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
5155 free_reg_cond_life_info);
5156 pbi->reg_cond_reg = BITMAP_XMALLOC ();
5158 /* If this block ends in a conditional branch, for each register live
5159 from one side of the branch and not the other, record the register
5160 as conditionally dead. */
5161 if (GET_CODE (bb->end) == JUMP_INSN
5162 && any_condjump_p (bb->end))
5164 regset_head diff_head;
5165 regset diff = INITIALIZE_REG_SET (diff_head);
5166 basic_block bb_true, bb_false;
5167 rtx cond_true, cond_false, set_src;
5168 int i;
5170 /* Identify the successor blocks. */
5171 bb_true = bb->succ->dest;
5172 if (bb->succ->succ_next != NULL)
5174 bb_false = bb->succ->succ_next->dest;
5176 if (bb->succ->flags & EDGE_FALLTHRU)
5178 basic_block t = bb_false;
5179 bb_false = bb_true;
5180 bb_true = t;
5182 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
5183 abort ();
5185 else
5187 /* This can happen with a conditional jump to the next insn. */
5188 if (JUMP_LABEL (bb->end) != bb_true->head)
5189 abort ();
5191 /* Simplest way to do nothing. */
5192 bb_false = bb_true;
5195 /* Extract the condition from the branch. */
5196 set_src = SET_SRC (pc_set (bb->end));
5197 cond_true = XEXP (set_src, 0);
5198 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
5199 GET_MODE (cond_true), XEXP (cond_true, 0),
5200 XEXP (cond_true, 1));
5201 if (GET_CODE (XEXP (set_src, 1)) == PC)
5203 rtx t = cond_false;
5204 cond_false = cond_true;
5205 cond_true = t;
5208 /* Compute which register lead different lives in the successors. */
5209 if (bitmap_operation (diff, bb_true->global_live_at_start,
5210 bb_false->global_live_at_start, BITMAP_XOR))
5212 rtx reg = XEXP (cond_true, 0);
5214 if (GET_CODE (reg) == SUBREG)
5215 reg = SUBREG_REG (reg);
5217 if (GET_CODE (reg) != REG)
5218 abort ();
5220 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
5222 /* For each such register, mark it conditionally dead. */
5223 EXECUTE_IF_SET_IN_REG_SET
5224 (diff, 0, i,
5226 struct reg_cond_life_info *rcli;
5227 rtx cond;
5229 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5231 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
5232 cond = cond_false;
5233 else
5234 cond = cond_true;
5235 rcli->condition = cond;
5236 rcli->stores = const0_rtx;
5237 rcli->orig_condition = cond;
5239 splay_tree_insert (pbi->reg_cond_dead, i,
5240 (splay_tree_value) rcli);
5244 FREE_REG_SET (diff);
5246 #endif
5248 /* If this block has no successors, any stores to the frame that aren't
5249 used later in the block are dead. So make a pass over the block
5250 recording any such that are made and show them dead at the end. We do
5251 a very conservative and simple job here. */
5252 if (optimize
5253 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5254 && (TYPE_RETURNS_STACK_DEPRESSED
5255 (TREE_TYPE (current_function_decl))))
5256 && (flags & PROP_SCAN_DEAD_CODE)
5257 && (bb->succ == NULL
5258 || (bb->succ->succ_next == NULL
5259 && bb->succ->dest == EXIT_BLOCK_PTR
5260 && ! current_function_calls_eh_return)))
5262 rtx insn, set;
5263 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
5264 if (GET_CODE (insn) == INSN
5265 && (set = single_set (insn))
5266 && GET_CODE (SET_DEST (set)) == MEM)
5268 rtx mem = SET_DEST (set);
5269 rtx canon_mem = canon_rtx (mem);
5271 /* This optimization is performed by faking a store to the
5272 memory at the end of the block. This doesn't work for
5273 unchanging memories because multiple stores to unchanging
5274 memory is illegal and alias analysis doesn't consider it. */
5275 if (RTX_UNCHANGING_P (canon_mem))
5276 continue;
5278 if (XEXP (canon_mem, 0) == frame_pointer_rtx
5279 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
5280 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
5281 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
5283 #ifdef AUTO_INC_DEC
5284 /* Store a copy of mem, otherwise the address may be scrogged
5285 by find_auto_inc. This matters because insn_dead_p uses
5286 an rtx_equal_p check to determine if two addresses are
5287 the same. This works before find_auto_inc, but fails
5288 after find_auto_inc, causing discrepencies between the
5289 set of live registers calculated during the
5290 calculate_global_regs_live phase and what actually exists
5291 after flow completes, leading to aborts. */
5292 if (flags & PROP_AUTOINC)
5293 mem = shallow_copy_rtx (mem);
5294 #endif
5295 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
5296 if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
5297 break;
5302 return pbi;
5305 /* Release a propagate_block_info struct. */
5307 void
5308 free_propagate_block_info (pbi)
5309 struct propagate_block_info *pbi;
5311 free_EXPR_LIST_list (&pbi->mem_set_list);
5313 BITMAP_XFREE (pbi->new_set);
5315 #ifdef HAVE_conditional_execution
5316 splay_tree_delete (pbi->reg_cond_dead);
5317 BITMAP_XFREE (pbi->reg_cond_reg);
5318 #endif
5320 if (pbi->reg_next_use)
5321 free (pbi->reg_next_use);
5323 free (pbi);
5326 /* Compute the registers live at the beginning of a basic block BB from
5327 those live at the end.
5329 When called, REG_LIVE contains those live at the end. On return, it
5330 contains those live at the beginning.
5332 LOCAL_SET, if non-null, will be set with all registers killed
5333 unconditionally by this basic block.
5334 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
5335 killed conditionally by this basic block. If there is any unconditional
5336 set of a register, then the corresponding bit will be set in LOCAL_SET
5337 and cleared in COND_LOCAL_SET.
5338 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
5339 case, the resulting set will be equal to the union of the two sets that
5340 would otherwise be computed. */
5342 void
5343 propagate_block (bb, live, local_set, cond_local_set, flags)
5344 basic_block bb;
5345 regset live;
5346 regset local_set;
5347 regset cond_local_set;
5348 int flags;
5350 struct propagate_block_info *pbi;
5351 rtx insn, prev;
5353 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
5355 if (flags & PROP_REG_INFO)
5357 register int i;
5359 /* Process the regs live at the end of the block.
5360 Mark them as not local to any one basic block. */
5361 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
5362 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
5365 /* Scan the block an insn at a time from end to beginning. */
5367 for (insn = bb->end;; insn = prev)
5369 /* If this is a call to `setjmp' et al, warn if any
5370 non-volatile datum is live. */
5371 if ((flags & PROP_REG_INFO)
5372 && GET_CODE (insn) == NOTE
5373 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
5374 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
5376 prev = propagate_one_insn (pbi, insn);
5378 if (insn == bb->head)
5379 break;
5382 free_propagate_block_info (pbi);
5385 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
5386 (SET expressions whose destinations are registers dead after the insn).
5387 NEEDED is the regset that says which regs are alive after the insn.
5389 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
5391 If X is the entire body of an insn, NOTES contains the reg notes
5392 pertaining to the insn. */
5394 static int
5395 insn_dead_p (pbi, x, call_ok, notes)
5396 struct propagate_block_info *pbi;
5397 rtx x;
5398 int call_ok;
5399 rtx notes ATTRIBUTE_UNUSED;
5401 enum rtx_code code = GET_CODE (x);
5403 #ifdef AUTO_INC_DEC
5404 /* If flow is invoked after reload, we must take existing AUTO_INC
5405 expresions into account. */
5406 if (reload_completed)
5408 for (; notes; notes = XEXP (notes, 1))
5410 if (REG_NOTE_KIND (notes) == REG_INC)
5412 int regno = REGNO (XEXP (notes, 0));
5414 /* Don't delete insns to set global regs. */
5415 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5416 || REGNO_REG_SET_P (pbi->reg_live, regno))
5417 return 0;
5421 #endif
5423 /* If setting something that's a reg or part of one,
5424 see if that register's altered value will be live. */
5426 if (code == SET)
5428 rtx r = SET_DEST (x);
5430 #ifdef HAVE_cc0
5431 if (GET_CODE (r) == CC0)
5432 return ! pbi->cc0_live;
5433 #endif
5435 /* A SET that is a subroutine call cannot be dead. */
5436 if (GET_CODE (SET_SRC (x)) == CALL)
5438 if (! call_ok)
5439 return 0;
5442 /* Don't eliminate loads from volatile memory or volatile asms. */
5443 else if (volatile_refs_p (SET_SRC (x)))
5444 return 0;
5446 if (GET_CODE (r) == MEM)
5448 rtx temp;
5450 if (MEM_VOLATILE_P (r))
5451 return 0;
5453 /* Walk the set of memory locations we are currently tracking
5454 and see if one is an identical match to this memory location.
5455 If so, this memory write is dead (remember, we're walking
5456 backwards from the end of the block to the start). Since
5457 rtx_equal_p does not check the alias set or flags, we also
5458 must have the potential for them to conflict (anti_dependence). */
5459 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
5460 if (anti_dependence (r, XEXP (temp, 0)))
5462 rtx mem = XEXP (temp, 0);
5464 if (rtx_equal_p (mem, r))
5465 return 1;
5466 #ifdef AUTO_INC_DEC
5467 /* Check if memory reference matches an auto increment. Only
5468 post increment/decrement or modify are valid. */
5469 if (GET_MODE (mem) == GET_MODE (r)
5470 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
5471 || GET_CODE (XEXP (mem, 0)) == POST_INC
5472 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
5473 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
5474 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
5475 return 1;
5476 #endif
5479 else
5481 while (GET_CODE (r) == SUBREG
5482 || GET_CODE (r) == STRICT_LOW_PART
5483 || GET_CODE (r) == ZERO_EXTRACT)
5484 r = XEXP (r, 0);
5486 if (GET_CODE (r) == REG)
5488 int regno = REGNO (r);
5490 /* Obvious. */
5491 if (REGNO_REG_SET_P (pbi->reg_live, regno))
5492 return 0;
5494 /* If this is a hard register, verify that subsequent
5495 words are not needed. */
5496 if (regno < FIRST_PSEUDO_REGISTER)
5498 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
5500 while (--n > 0)
5501 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
5502 return 0;
5505 /* Don't delete insns to set global regs. */
5506 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5507 return 0;
5509 /* Make sure insns to set the stack pointer aren't deleted. */
5510 if (regno == STACK_POINTER_REGNUM)
5511 return 0;
5513 /* ??? These bits might be redundant with the force live bits
5514 in calculate_global_regs_live. We would delete from
5515 sequential sets; whether this actually affects real code
5516 for anything but the stack pointer I don't know. */
5517 /* Make sure insns to set the frame pointer aren't deleted. */
5518 if (regno == FRAME_POINTER_REGNUM
5519 && (! reload_completed || frame_pointer_needed))
5520 return 0;
5521 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5522 if (regno == HARD_FRAME_POINTER_REGNUM
5523 && (! reload_completed || frame_pointer_needed))
5524 return 0;
5525 #endif
5527 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5528 /* Make sure insns to set arg pointer are never deleted
5529 (if the arg pointer isn't fixed, there will be a USE
5530 for it, so we can treat it normally). */
5531 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5532 return 0;
5533 #endif
5535 /* Otherwise, the set is dead. */
5536 return 1;
5541 /* If performing several activities, insn is dead if each activity
5542 is individually dead. Also, CLOBBERs and USEs can be ignored; a
5543 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
5544 worth keeping. */
5545 else if (code == PARALLEL)
5547 int i = XVECLEN (x, 0);
5549 for (i--; i >= 0; i--)
5550 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
5551 && GET_CODE (XVECEXP (x, 0, i)) != USE
5552 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
5553 return 0;
5555 return 1;
5558 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
5559 is not necessarily true for hard registers. */
5560 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
5561 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
5562 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
5563 return 1;
5565 /* We do not check other CLOBBER or USE here. An insn consisting of just
5566 a CLOBBER or just a USE should not be deleted. */
5567 return 0;
5570 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
5571 return 1 if the entire library call is dead.
5572 This is true if INSN copies a register (hard or pseudo)
5573 and if the hard return reg of the call insn is dead.
5574 (The caller should have tested the destination of the SET inside
5575 INSN already for death.)
5577 If this insn doesn't just copy a register, then we don't
5578 have an ordinary libcall. In that case, cse could not have
5579 managed to substitute the source for the dest later on,
5580 so we can assume the libcall is dead.
5582 PBI is the block info giving pseudoregs live before this insn.
5583 NOTE is the REG_RETVAL note of the insn. */
5585 static int
5586 libcall_dead_p (pbi, note, insn)
5587 struct propagate_block_info *pbi;
5588 rtx note;
5589 rtx insn;
5591 rtx x = single_set (insn);
5593 if (x)
5595 register rtx r = SET_SRC (x);
5596 if (GET_CODE (r) == REG)
5598 rtx call = XEXP (note, 0);
5599 rtx call_pat;
5600 register int i;
5602 /* Find the call insn. */
5603 while (call != insn && GET_CODE (call) != CALL_INSN)
5604 call = NEXT_INSN (call);
5606 /* If there is none, do nothing special,
5607 since ordinary death handling can understand these insns. */
5608 if (call == insn)
5609 return 0;
5611 /* See if the hard reg holding the value is dead.
5612 If this is a PARALLEL, find the call within it. */
5613 call_pat = PATTERN (call);
5614 if (GET_CODE (call_pat) == PARALLEL)
5616 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
5617 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
5618 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
5619 break;
5621 /* This may be a library call that is returning a value
5622 via invisible pointer. Do nothing special, since
5623 ordinary death handling can understand these insns. */
5624 if (i < 0)
5625 return 0;
5627 call_pat = XVECEXP (call_pat, 0, i);
5630 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
5633 return 1;
5636 /* Return 1 if register REGNO was used before it was set, i.e. if it is
5637 live at function entry. Don't count global register variables, variables
5638 in registers that can be used for function arg passing, or variables in
5639 fixed hard registers. */
5642 regno_uninitialized (regno)
5643 int regno;
5645 if (n_basic_blocks == 0
5646 || (regno < FIRST_PSEUDO_REGISTER
5647 && (global_regs[regno]
5648 || fixed_regs[regno]
5649 || FUNCTION_ARG_REGNO_P (regno))))
5650 return 0;
5652 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
5655 /* 1 if register REGNO was alive at a place where `setjmp' was called
5656 and was set more than once or is an argument.
5657 Such regs may be clobbered by `longjmp'. */
5660 regno_clobbered_at_setjmp (regno)
5661 int regno;
5663 if (n_basic_blocks == 0)
5664 return 0;
5666 return ((REG_N_SETS (regno) > 1
5667 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
5668 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
5671 /* INSN references memory, possibly using autoincrement addressing modes.
5672 Find any entries on the mem_set_list that need to be invalidated due
5673 to an address change. */
5675 static void
5676 invalidate_mems_from_autoinc (pbi, insn)
5677 struct propagate_block_info *pbi;
5678 rtx insn;
5680 rtx note = REG_NOTES (insn);
5681 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
5683 if (REG_NOTE_KIND (note) == REG_INC)
5685 rtx temp = pbi->mem_set_list;
5686 rtx prev = NULL_RTX;
5687 rtx next;
5689 while (temp)
5691 next = XEXP (temp, 1);
5692 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
5694 /* Splice temp out of list. */
5695 if (prev)
5696 XEXP (prev, 1) = next;
5697 else
5698 pbi->mem_set_list = next;
5699 free_EXPR_LIST_node (temp);
5700 pbi->mem_set_list_len--;
5702 else
5703 prev = temp;
5704 temp = next;
5710 /* EXP is either a MEM or a REG. Remove any dependant entries
5711 from pbi->mem_set_list. */
5713 static void
5714 invalidate_mems_from_set (pbi, exp)
5715 struct propagate_block_info *pbi;
5716 rtx exp;
5718 rtx temp = pbi->mem_set_list;
5719 rtx prev = NULL_RTX;
5720 rtx next;
5722 while (temp)
5724 next = XEXP (temp, 1);
5725 if ((GET_CODE (exp) == MEM
5726 && output_dependence (XEXP (temp, 0), exp))
5727 || (GET_CODE (exp) == REG
5728 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
5730 /* Splice this entry out of the list. */
5731 if (prev)
5732 XEXP (prev, 1) = next;
5733 else
5734 pbi->mem_set_list = next;
5735 free_EXPR_LIST_node (temp);
5736 pbi->mem_set_list_len--;
5738 else
5739 prev = temp;
5740 temp = next;
5744 /* Process the registers that are set within X. Their bits are set to
5745 1 in the regset DEAD, because they are dead prior to this insn.
5747 If INSN is nonzero, it is the insn being processed.
5749 FLAGS is the set of operations to perform. */
5751 static void
5752 mark_set_regs (pbi, x, insn)
5753 struct propagate_block_info *pbi;
5754 rtx x, insn;
5756 rtx cond = NULL_RTX;
5757 rtx link;
5758 enum rtx_code code;
5760 if (insn)
5761 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5763 if (REG_NOTE_KIND (link) == REG_INC)
5764 mark_set_1 (pbi, SET, XEXP (link, 0),
5765 (GET_CODE (x) == COND_EXEC
5766 ? COND_EXEC_TEST (x) : NULL_RTX),
5767 insn, pbi->flags);
5769 retry:
5770 switch (code = GET_CODE (x))
5772 case SET:
5773 case CLOBBER:
5774 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
5775 return;
5777 case COND_EXEC:
5778 cond = COND_EXEC_TEST (x);
5779 x = COND_EXEC_CODE (x);
5780 goto retry;
5782 case PARALLEL:
5784 register int i;
5785 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5787 rtx sub = XVECEXP (x, 0, i);
5788 switch (code = GET_CODE (sub))
5790 case COND_EXEC:
5791 if (cond != NULL_RTX)
5792 abort ();
5794 cond = COND_EXEC_TEST (sub);
5795 sub = COND_EXEC_CODE (sub);
5796 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
5797 break;
5798 /* Fall through. */
5800 case SET:
5801 case CLOBBER:
5802 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
5803 break;
5805 default:
5806 break;
5809 break;
5812 default:
5813 break;
5817 /* Process a single set, which appears in INSN. REG (which may not
5818 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
5819 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
5820 If the set is conditional (because it appear in a COND_EXEC), COND
5821 will be the condition. */
5823 static void
5824 mark_set_1 (pbi, code, reg, cond, insn, flags)
5825 struct propagate_block_info *pbi;
5826 enum rtx_code code;
5827 rtx reg, cond, insn;
5828 int flags;
5830 int regno_first = -1, regno_last = -1;
5831 unsigned long not_dead = 0;
5832 int i;
5834 /* Modifying just one hardware register of a multi-reg value or just a
5835 byte field of a register does not mean the value from before this insn
5836 is now dead. Of course, if it was dead after it's unused now. */
5838 switch (GET_CODE (reg))
5840 case PARALLEL:
5841 /* Some targets place small structures in registers for return values of
5842 functions. We have to detect this case specially here to get correct
5843 flow information. */
5844 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5845 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
5846 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
5847 flags);
5848 return;
5850 case ZERO_EXTRACT:
5851 case SIGN_EXTRACT:
5852 case STRICT_LOW_PART:
5853 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
5855 reg = XEXP (reg, 0);
5856 while (GET_CODE (reg) == SUBREG
5857 || GET_CODE (reg) == ZERO_EXTRACT
5858 || GET_CODE (reg) == SIGN_EXTRACT
5859 || GET_CODE (reg) == STRICT_LOW_PART);
5860 if (GET_CODE (reg) == MEM)
5861 break;
5862 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
5863 /* Fall through. */
5865 case REG:
5866 regno_last = regno_first = REGNO (reg);
5867 if (regno_first < FIRST_PSEUDO_REGISTER)
5868 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
5869 break;
5871 case SUBREG:
5872 if (GET_CODE (SUBREG_REG (reg)) == REG)
5874 enum machine_mode outer_mode = GET_MODE (reg);
5875 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
5877 /* Identify the range of registers affected. This is moderately
5878 tricky for hard registers. See alter_subreg. */
5880 regno_last = regno_first = REGNO (SUBREG_REG (reg));
5881 if (regno_first < FIRST_PSEUDO_REGISTER)
5883 regno_first += subreg_regno_offset (regno_first, inner_mode,
5884 SUBREG_BYTE (reg),
5885 outer_mode);
5886 regno_last = (regno_first
5887 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
5889 /* Since we've just adjusted the register number ranges, make
5890 sure REG matches. Otherwise some_was_live will be clear
5891 when it shouldn't have been, and we'll create incorrect
5892 REG_UNUSED notes. */
5893 reg = gen_rtx_REG (outer_mode, regno_first);
5895 else
5897 /* If the number of words in the subreg is less than the number
5898 of words in the full register, we have a well-defined partial
5899 set. Otherwise the high bits are undefined.
5901 This is only really applicable to pseudos, since we just took
5902 care of multi-word hard registers. */
5903 if (((GET_MODE_SIZE (outer_mode)
5904 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
5905 < ((GET_MODE_SIZE (inner_mode)
5906 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
5907 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
5908 regno_first);
5910 reg = SUBREG_REG (reg);
5913 else
5914 reg = SUBREG_REG (reg);
5915 break;
5917 default:
5918 break;
5921 /* If this set is a MEM, then it kills any aliased writes.
5922 If this set is a REG, then it kills any MEMs which use the reg. */
5923 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5925 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
5926 invalidate_mems_from_set (pbi, reg);
5928 /* If the memory reference had embedded side effects (autoincrement
5929 address modes. Then we may need to kill some entries on the
5930 memory set list. */
5931 if (insn && GET_CODE (reg) == MEM)
5932 invalidate_mems_from_autoinc (pbi, insn);
5934 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
5935 && GET_CODE (reg) == MEM && ! side_effects_p (reg)
5936 /* ??? With more effort we could track conditional memory life. */
5937 && ! cond
5938 /* We do not know the size of a BLKmode store, so we do not track
5939 them for redundant store elimination. */
5940 && GET_MODE (reg) != BLKmode
5941 /* There are no REG_INC notes for SP, so we can't assume we'll see
5942 everything that invalidates it. To be safe, don't eliminate any
5943 stores though SP; none of them should be redundant anyway. */
5944 && ! reg_mentioned_p (stack_pointer_rtx, reg))
5946 #ifdef AUTO_INC_DEC
5947 /* Store a copy of mem, otherwise the address may be
5948 scrogged by find_auto_inc. */
5949 if (flags & PROP_AUTOINC)
5950 reg = shallow_copy_rtx (reg);
5951 #endif
5952 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
5953 pbi->mem_set_list_len++;
5957 if (GET_CODE (reg) == REG
5958 && ! (regno_first == FRAME_POINTER_REGNUM
5959 && (! reload_completed || frame_pointer_needed))
5960 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5961 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
5962 && (! reload_completed || frame_pointer_needed))
5963 #endif
5964 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5965 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
5966 #endif
5969 int some_was_live = 0, some_was_dead = 0;
5971 for (i = regno_first; i <= regno_last; ++i)
5973 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
5974 if (pbi->local_set)
5976 /* Order of the set operation matters here since both
5977 sets may be the same. */
5978 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
5979 if (cond != NULL_RTX
5980 && ! REGNO_REG_SET_P (pbi->local_set, i))
5981 SET_REGNO_REG_SET (pbi->cond_local_set, i);
5982 else
5983 SET_REGNO_REG_SET (pbi->local_set, i);
5985 if (code != CLOBBER)
5986 SET_REGNO_REG_SET (pbi->new_set, i);
5988 some_was_live |= needed_regno;
5989 some_was_dead |= ! needed_regno;
5992 #ifdef HAVE_conditional_execution
5993 /* Consider conditional death in deciding that the register needs
5994 a death note. */
5995 if (some_was_live && ! not_dead
5996 /* The stack pointer is never dead. Well, not strictly true,
5997 but it's very difficult to tell from here. Hopefully
5998 combine_stack_adjustments will fix up the most egregious
5999 errors. */
6000 && regno_first != STACK_POINTER_REGNUM)
6002 for (i = regno_first; i <= regno_last; ++i)
6003 if (! mark_regno_cond_dead (pbi, i, cond))
6004 not_dead |= ((unsigned long) 1) << (i - regno_first);
6006 #endif
6008 /* Additional data to record if this is the final pass. */
6009 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
6010 | PROP_DEATH_NOTES | PROP_AUTOINC))
6012 register rtx y;
6013 register int blocknum = pbi->bb->index;
6015 y = NULL_RTX;
6016 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6018 y = pbi->reg_next_use[regno_first];
6020 /* The next use is no longer next, since a store intervenes. */
6021 for (i = regno_first; i <= regno_last; ++i)
6022 pbi->reg_next_use[i] = 0;
6025 if (flags & PROP_REG_INFO)
6027 for (i = regno_first; i <= regno_last; ++i)
6029 /* Count (weighted) references, stores, etc. This counts a
6030 register twice if it is modified, but that is correct. */
6031 REG_N_SETS (i) += 1;
6032 REG_N_REFS (i) += 1;
6033 REG_FREQ (i) += (optimize_size || !pbi->bb->frequency
6034 ? 1 : pbi->bb->frequency);
6036 /* The insns where a reg is live are normally counted
6037 elsewhere, but we want the count to include the insn
6038 where the reg is set, and the normal counting mechanism
6039 would not count it. */
6040 REG_LIVE_LENGTH (i) += 1;
6043 /* If this is a hard reg, record this function uses the reg. */
6044 if (regno_first < FIRST_PSEUDO_REGISTER)
6046 for (i = regno_first; i <= regno_last; i++)
6047 regs_ever_live[i] = 1;
6049 else
6051 /* Keep track of which basic blocks each reg appears in. */
6052 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6053 REG_BASIC_BLOCK (regno_first) = blocknum;
6054 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6055 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6059 if (! some_was_dead)
6061 if (flags & PROP_LOG_LINKS)
6063 /* Make a logical link from the next following insn
6064 that uses this register, back to this insn.
6065 The following insns have already been processed.
6067 We don't build a LOG_LINK for hard registers containing
6068 in ASM_OPERANDs. If these registers get replaced,
6069 we might wind up changing the semantics of the insn,
6070 even if reload can make what appear to be valid
6071 assignments later. */
6072 if (y && (BLOCK_NUM (y) == blocknum)
6073 && (regno_first >= FIRST_PSEUDO_REGISTER
6074 || asm_noperands (PATTERN (y)) < 0))
6075 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
6078 else if (not_dead)
6080 else if (! some_was_live)
6082 if (flags & PROP_REG_INFO)
6083 REG_N_DEATHS (regno_first) += 1;
6085 if (flags & PROP_DEATH_NOTES)
6087 /* Note that dead stores have already been deleted
6088 when possible. If we get here, we have found a
6089 dead store that cannot be eliminated (because the
6090 same insn does something useful). Indicate this
6091 by marking the reg being set as dying here. */
6092 REG_NOTES (insn)
6093 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6096 else
6098 if (flags & PROP_DEATH_NOTES)
6100 /* This is a case where we have a multi-word hard register
6101 and some, but not all, of the words of the register are
6102 needed in subsequent insns. Write REG_UNUSED notes
6103 for those parts that were not needed. This case should
6104 be rare. */
6106 for (i = regno_first; i <= regno_last; ++i)
6107 if (! REGNO_REG_SET_P (pbi->reg_live, i))
6108 REG_NOTES (insn)
6109 = alloc_EXPR_LIST (REG_UNUSED,
6110 gen_rtx_REG (reg_raw_mode[i], i),
6111 REG_NOTES (insn));
6116 /* Mark the register as being dead. */
6117 if (some_was_live
6118 /* The stack pointer is never dead. Well, not strictly true,
6119 but it's very difficult to tell from here. Hopefully
6120 combine_stack_adjustments will fix up the most egregious
6121 errors. */
6122 && regno_first != STACK_POINTER_REGNUM)
6124 for (i = regno_first; i <= regno_last; ++i)
6125 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
6126 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
6129 else if (GET_CODE (reg) == REG)
6131 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6132 pbi->reg_next_use[regno_first] = 0;
6135 /* If this is the last pass and this is a SCRATCH, show it will be dying
6136 here and count it. */
6137 else if (GET_CODE (reg) == SCRATCH)
6139 if (flags & PROP_DEATH_NOTES)
6140 REG_NOTES (insn)
6141 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6145 #ifdef HAVE_conditional_execution
6146 /* Mark REGNO conditionally dead.
6147 Return true if the register is now unconditionally dead. */
6149 static int
6150 mark_regno_cond_dead (pbi, regno, cond)
6151 struct propagate_block_info *pbi;
6152 int regno;
6153 rtx cond;
6155 /* If this is a store to a predicate register, the value of the
6156 predicate is changing, we don't know that the predicate as seen
6157 before is the same as that seen after. Flush all dependent
6158 conditions from reg_cond_dead. This will make all such
6159 conditionally live registers unconditionally live. */
6160 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
6161 flush_reg_cond_reg (pbi, regno);
6163 /* If this is an unconditional store, remove any conditional
6164 life that may have existed. */
6165 if (cond == NULL_RTX)
6166 splay_tree_remove (pbi->reg_cond_dead, regno);
6167 else
6169 splay_tree_node node;
6170 struct reg_cond_life_info *rcli;
6171 rtx ncond;
6173 /* Otherwise this is a conditional set. Record that fact.
6174 It may have been conditionally used, or there may be a
6175 subsequent set with a complimentary condition. */
6177 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
6178 if (node == NULL)
6180 /* The register was unconditionally live previously.
6181 Record the current condition as the condition under
6182 which it is dead. */
6183 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6184 rcli->condition = cond;
6185 rcli->stores = cond;
6186 rcli->orig_condition = const0_rtx;
6187 splay_tree_insert (pbi->reg_cond_dead, regno,
6188 (splay_tree_value) rcli);
6190 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6192 /* Not unconditionaly dead. */
6193 return 0;
6195 else
6197 /* The register was conditionally live previously.
6198 Add the new condition to the old. */
6199 rcli = (struct reg_cond_life_info *) node->value;
6200 ncond = rcli->condition;
6201 ncond = ior_reg_cond (ncond, cond, 1);
6202 if (rcli->stores == const0_rtx)
6203 rcli->stores = cond;
6204 else if (rcli->stores != const1_rtx)
6205 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
6207 /* If the register is now unconditionally dead, remove the entry
6208 in the splay_tree. A register is unconditionally dead if the
6209 dead condition ncond is true. A register is also unconditionally
6210 dead if the sum of all conditional stores is an unconditional
6211 store (stores is true), and the dead condition is identically the
6212 same as the original dead condition initialized at the end of
6213 the block. This is a pointer compare, not an rtx_equal_p
6214 compare. */
6215 if (ncond == const1_rtx
6216 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
6217 splay_tree_remove (pbi->reg_cond_dead, regno);
6218 else
6220 rcli->condition = ncond;
6222 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6224 /* Not unconditionaly dead. */
6225 return 0;
6230 return 1;
6233 /* Called from splay_tree_delete for pbi->reg_cond_life. */
6235 static void
6236 free_reg_cond_life_info (value)
6237 splay_tree_value value;
6239 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
6240 free (rcli);
6243 /* Helper function for flush_reg_cond_reg. */
6245 static int
6246 flush_reg_cond_reg_1 (node, data)
6247 splay_tree_node node;
6248 void *data;
6250 struct reg_cond_life_info *rcli;
6251 int *xdata = (int *) data;
6252 unsigned int regno = xdata[0];
6254 /* Don't need to search if last flushed value was farther on in
6255 the in-order traversal. */
6256 if (xdata[1] >= (int) node->key)
6257 return 0;
6259 /* Splice out portions of the expression that refer to regno. */
6260 rcli = (struct reg_cond_life_info *) node->value;
6261 rcli->condition = elim_reg_cond (rcli->condition, regno);
6262 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
6263 rcli->stores = elim_reg_cond (rcli->stores, regno);
6265 /* If the entire condition is now false, signal the node to be removed. */
6266 if (rcli->condition == const0_rtx)
6268 xdata[1] = node->key;
6269 return -1;
6271 else if (rcli->condition == const1_rtx)
6272 abort ();
6274 return 0;
6277 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
6279 static void
6280 flush_reg_cond_reg (pbi, regno)
6281 struct propagate_block_info *pbi;
6282 int regno;
6284 int pair[2];
6286 pair[0] = regno;
6287 pair[1] = -1;
6288 while (splay_tree_foreach (pbi->reg_cond_dead,
6289 flush_reg_cond_reg_1, pair) == -1)
6290 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
6292 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
6295 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
6296 For ior/and, the ADD flag determines whether we want to add the new
6297 condition X to the old one unconditionally. If it is zero, we will
6298 only return a new expression if X allows us to simplify part of
6299 OLD, otherwise we return OLD unchanged to the caller.
6300 If ADD is nonzero, we will return a new condition in all cases. The
6301 toplevel caller of one of these functions should always pass 1 for
6302 ADD. */
6304 static rtx
6305 ior_reg_cond (old, x, add)
6306 rtx old, x;
6307 int add;
6309 rtx op0, op1;
6311 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6313 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6314 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
6315 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6316 return const1_rtx;
6317 if (GET_CODE (x) == GET_CODE (old)
6318 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6319 return old;
6320 if (! add)
6321 return old;
6322 return gen_rtx_IOR (0, old, x);
6325 switch (GET_CODE (old))
6327 case IOR:
6328 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6329 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6330 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6332 if (op0 == const0_rtx)
6333 return op1;
6334 if (op1 == const0_rtx)
6335 return op0;
6336 if (op0 == const1_rtx || op1 == const1_rtx)
6337 return const1_rtx;
6338 if (op0 == XEXP (old, 0))
6339 op0 = gen_rtx_IOR (0, op0, x);
6340 else
6341 op1 = gen_rtx_IOR (0, op1, x);
6342 return gen_rtx_IOR (0, op0, op1);
6344 if (! add)
6345 return old;
6346 return gen_rtx_IOR (0, old, x);
6348 case AND:
6349 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6350 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6351 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6353 if (op0 == const1_rtx)
6354 return op1;
6355 if (op1 == const1_rtx)
6356 return op0;
6357 if (op0 == const0_rtx || op1 == const0_rtx)
6358 return const0_rtx;
6359 if (op0 == XEXP (old, 0))
6360 op0 = gen_rtx_IOR (0, op0, x);
6361 else
6362 op1 = gen_rtx_IOR (0, op1, x);
6363 return gen_rtx_AND (0, op0, op1);
6365 if (! add)
6366 return old;
6367 return gen_rtx_IOR (0, old, x);
6369 case NOT:
6370 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6371 if (op0 != XEXP (old, 0))
6372 return not_reg_cond (op0);
6373 if (! add)
6374 return old;
6375 return gen_rtx_IOR (0, old, x);
6377 default:
6378 abort ();
6382 static rtx
6383 not_reg_cond (x)
6384 rtx x;
6386 enum rtx_code x_code;
6388 if (x == const0_rtx)
6389 return const1_rtx;
6390 else if (x == const1_rtx)
6391 return const0_rtx;
6392 x_code = GET_CODE (x);
6393 if (x_code == NOT)
6394 return XEXP (x, 0);
6395 if (GET_RTX_CLASS (x_code) == '<'
6396 && GET_CODE (XEXP (x, 0)) == REG)
6398 if (XEXP (x, 1) != const0_rtx)
6399 abort ();
6401 return gen_rtx_fmt_ee (reverse_condition (x_code),
6402 VOIDmode, XEXP (x, 0), const0_rtx);
6404 return gen_rtx_NOT (0, x);
6407 static rtx
6408 and_reg_cond (old, x, add)
6409 rtx old, x;
6410 int add;
6412 rtx op0, op1;
6414 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6416 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6417 && GET_CODE (x) == reverse_condition (GET_CODE (old))
6418 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6419 return const0_rtx;
6420 if (GET_CODE (x) == GET_CODE (old)
6421 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6422 return old;
6423 if (! add)
6424 return old;
6425 return gen_rtx_AND (0, old, x);
6428 switch (GET_CODE (old))
6430 case IOR:
6431 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6432 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6433 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6435 if (op0 == const0_rtx)
6436 return op1;
6437 if (op1 == const0_rtx)
6438 return op0;
6439 if (op0 == const1_rtx || op1 == const1_rtx)
6440 return const1_rtx;
6441 if (op0 == XEXP (old, 0))
6442 op0 = gen_rtx_AND (0, op0, x);
6443 else
6444 op1 = gen_rtx_AND (0, op1, x);
6445 return gen_rtx_IOR (0, op0, op1);
6447 if (! add)
6448 return old;
6449 return gen_rtx_AND (0, old, x);
6451 case AND:
6452 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6453 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6454 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6456 if (op0 == const1_rtx)
6457 return op1;
6458 if (op1 == const1_rtx)
6459 return op0;
6460 if (op0 == const0_rtx || op1 == const0_rtx)
6461 return const0_rtx;
6462 if (op0 == XEXP (old, 0))
6463 op0 = gen_rtx_AND (0, op0, x);
6464 else
6465 op1 = gen_rtx_AND (0, op1, x);
6466 return gen_rtx_AND (0, op0, op1);
6468 if (! add)
6469 return old;
6471 /* If X is identical to one of the existing terms of the AND,
6472 then just return what we already have. */
6473 /* ??? There really should be some sort of recursive check here in
6474 case there are nested ANDs. */
6475 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
6476 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
6477 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
6478 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
6479 return old;
6481 return gen_rtx_AND (0, old, x);
6483 case NOT:
6484 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6485 if (op0 != XEXP (old, 0))
6486 return not_reg_cond (op0);
6487 if (! add)
6488 return old;
6489 return gen_rtx_AND (0, old, x);
6491 default:
6492 abort ();
6496 /* Given a condition X, remove references to reg REGNO and return the
6497 new condition. The removal will be done so that all conditions
6498 involving REGNO are considered to evaluate to false. This function
6499 is used when the value of REGNO changes. */
6501 static rtx
6502 elim_reg_cond (x, regno)
6503 rtx x;
6504 unsigned int regno;
6506 rtx op0, op1;
6508 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6510 if (REGNO (XEXP (x, 0)) == regno)
6511 return const0_rtx;
6512 return x;
6515 switch (GET_CODE (x))
6517 case AND:
6518 op0 = elim_reg_cond (XEXP (x, 0), regno);
6519 op1 = elim_reg_cond (XEXP (x, 1), regno);
6520 if (op0 == const0_rtx || op1 == const0_rtx)
6521 return const0_rtx;
6522 if (op0 == const1_rtx)
6523 return op1;
6524 if (op1 == const1_rtx)
6525 return op0;
6526 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6527 return x;
6528 return gen_rtx_AND (0, op0, op1);
6530 case IOR:
6531 op0 = elim_reg_cond (XEXP (x, 0), regno);
6532 op1 = elim_reg_cond (XEXP (x, 1), regno);
6533 if (op0 == const1_rtx || op1 == const1_rtx)
6534 return const1_rtx;
6535 if (op0 == const0_rtx)
6536 return op1;
6537 if (op1 == const0_rtx)
6538 return op0;
6539 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6540 return x;
6541 return gen_rtx_IOR (0, op0, op1);
6543 case NOT:
6544 op0 = elim_reg_cond (XEXP (x, 0), regno);
6545 if (op0 == const0_rtx)
6546 return const1_rtx;
6547 if (op0 == const1_rtx)
6548 return const0_rtx;
6549 if (op0 != XEXP (x, 0))
6550 return not_reg_cond (op0);
6551 return x;
6553 default:
6554 abort ();
6557 #endif /* HAVE_conditional_execution */
6559 #ifdef AUTO_INC_DEC
6561 /* Try to substitute the auto-inc expression INC as the address inside
6562 MEM which occurs in INSN. Currently, the address of MEM is an expression
6563 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
6564 that has a single set whose source is a PLUS of INCR_REG and something
6565 else. */
6567 static void
6568 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
6569 struct propagate_block_info *pbi;
6570 rtx inc, insn, mem, incr, incr_reg;
6572 int regno = REGNO (incr_reg);
6573 rtx set = single_set (incr);
6574 rtx q = SET_DEST (set);
6575 rtx y = SET_SRC (set);
6576 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
6578 /* Make sure this reg appears only once in this insn. */
6579 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
6580 return;
6582 if (dead_or_set_p (incr, incr_reg)
6583 /* Mustn't autoinc an eliminable register. */
6584 && (regno >= FIRST_PSEUDO_REGISTER
6585 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
6587 /* This is the simple case. Try to make the auto-inc. If
6588 we can't, we are done. Otherwise, we will do any
6589 needed updates below. */
6590 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
6591 return;
6593 else if (GET_CODE (q) == REG
6594 /* PREV_INSN used here to check the semi-open interval
6595 [insn,incr). */
6596 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
6597 /* We must also check for sets of q as q may be
6598 a call clobbered hard register and there may
6599 be a call between PREV_INSN (insn) and incr. */
6600 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
6602 /* We have *p followed sometime later by q = p+size.
6603 Both p and q must be live afterward,
6604 and q is not used between INSN and its assignment.
6605 Change it to q = p, ...*q..., q = q+size.
6606 Then fall into the usual case. */
6607 rtx insns, temp;
6609 start_sequence ();
6610 emit_move_insn (q, incr_reg);
6611 insns = get_insns ();
6612 end_sequence ();
6614 if (basic_block_for_insn)
6615 for (temp = insns; temp; temp = NEXT_INSN (temp))
6616 set_block_for_insn (temp, pbi->bb);
6618 /* If we can't make the auto-inc, or can't make the
6619 replacement into Y, exit. There's no point in making
6620 the change below if we can't do the auto-inc and doing
6621 so is not correct in the pre-inc case. */
6623 XEXP (inc, 0) = q;
6624 validate_change (insn, &XEXP (mem, 0), inc, 1);
6625 validate_change (incr, &XEXP (y, opnum), q, 1);
6626 if (! apply_change_group ())
6627 return;
6629 /* We now know we'll be doing this change, so emit the
6630 new insn(s) and do the updates. */
6631 emit_insns_before (insns, insn);
6633 if (pbi->bb->head == insn)
6634 pbi->bb->head = insns;
6636 /* INCR will become a NOTE and INSN won't contain a
6637 use of INCR_REG. If a use of INCR_REG was just placed in
6638 the insn before INSN, make that the next use.
6639 Otherwise, invalidate it. */
6640 if (GET_CODE (PREV_INSN (insn)) == INSN
6641 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
6642 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
6643 pbi->reg_next_use[regno] = PREV_INSN (insn);
6644 else
6645 pbi->reg_next_use[regno] = 0;
6647 incr_reg = q;
6648 regno = REGNO (q);
6650 /* REGNO is now used in INCR which is below INSN, but
6651 it previously wasn't live here. If we don't mark
6652 it as live, we'll put a REG_DEAD note for it
6653 on this insn, which is incorrect. */
6654 SET_REGNO_REG_SET (pbi->reg_live, regno);
6656 /* If there are any calls between INSN and INCR, show
6657 that REGNO now crosses them. */
6658 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
6659 if (GET_CODE (temp) == CALL_INSN)
6660 REG_N_CALLS_CROSSED (regno)++;
6662 else
6663 return;
6665 /* If we haven't returned, it means we were able to make the
6666 auto-inc, so update the status. First, record that this insn
6667 has an implicit side effect. */
6669 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
6671 /* Modify the old increment-insn to simply copy
6672 the already-incremented value of our register. */
6673 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
6674 abort ();
6676 /* If that makes it a no-op (copying the register into itself) delete
6677 it so it won't appear to be a "use" and a "set" of this
6678 register. */
6679 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
6681 /* If the original source was dead, it's dead now. */
6682 rtx note;
6684 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
6686 remove_note (incr, note);
6687 if (XEXP (note, 0) != incr_reg)
6688 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
6691 PUT_CODE (incr, NOTE);
6692 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
6693 NOTE_SOURCE_FILE (incr) = 0;
6696 if (regno >= FIRST_PSEUDO_REGISTER)
6698 /* Count an extra reference to the reg. When a reg is
6699 incremented, spilling it is worse, so we want to make
6700 that less likely. */
6701 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
6702 ? 1 : pbi->bb->frequency);
6704 /* Count the increment as a setting of the register,
6705 even though it isn't a SET in rtl. */
6706 REG_N_SETS (regno)++;
6710 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
6711 reference. */
6713 static void
6714 find_auto_inc (pbi, x, insn)
6715 struct propagate_block_info *pbi;
6716 rtx x;
6717 rtx insn;
6719 rtx addr = XEXP (x, 0);
6720 HOST_WIDE_INT offset = 0;
6721 rtx set, y, incr, inc_val;
6722 int regno;
6723 int size = GET_MODE_SIZE (GET_MODE (x));
6725 if (GET_CODE (insn) == JUMP_INSN)
6726 return;
6728 /* Here we detect use of an index register which might be good for
6729 postincrement, postdecrement, preincrement, or predecrement. */
6731 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
6732 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
6734 if (GET_CODE (addr) != REG)
6735 return;
6737 regno = REGNO (addr);
6739 /* Is the next use an increment that might make auto-increment? */
6740 incr = pbi->reg_next_use[regno];
6741 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
6742 return;
6743 set = single_set (incr);
6744 if (set == 0 || GET_CODE (set) != SET)
6745 return;
6746 y = SET_SRC (set);
6748 if (GET_CODE (y) != PLUS)
6749 return;
6751 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
6752 inc_val = XEXP (y, 1);
6753 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
6754 inc_val = XEXP (y, 0);
6755 else
6756 return;
6758 if (GET_CODE (inc_val) == CONST_INT)
6760 if (HAVE_POST_INCREMENT
6761 && (INTVAL (inc_val) == size && offset == 0))
6762 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
6763 incr, addr);
6764 else if (HAVE_POST_DECREMENT
6765 && (INTVAL (inc_val) == -size && offset == 0))
6766 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
6767 incr, addr);
6768 else if (HAVE_PRE_INCREMENT
6769 && (INTVAL (inc_val) == size && offset == size))
6770 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
6771 incr, addr);
6772 else if (HAVE_PRE_DECREMENT
6773 && (INTVAL (inc_val) == -size && offset == -size))
6774 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
6775 incr, addr);
6776 else if (HAVE_POST_MODIFY_DISP && offset == 0)
6777 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
6778 gen_rtx_PLUS (Pmode,
6779 addr,
6780 inc_val)),
6781 insn, x, incr, addr);
6783 else if (GET_CODE (inc_val) == REG
6784 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
6785 NEXT_INSN (incr)))
6788 if (HAVE_POST_MODIFY_REG && offset == 0)
6789 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
6790 gen_rtx_PLUS (Pmode,
6791 addr,
6792 inc_val)),
6793 insn, x, incr, addr);
6797 #endif /* AUTO_INC_DEC */
6799 static void
6800 mark_used_reg (pbi, reg, cond, insn)
6801 struct propagate_block_info *pbi;
6802 rtx reg;
6803 rtx cond ATTRIBUTE_UNUSED;
6804 rtx insn;
6806 unsigned int regno_first, regno_last, i;
6807 int some_was_live, some_was_dead, some_not_set;
6809 regno_last = regno_first = REGNO (reg);
6810 if (regno_first < FIRST_PSEUDO_REGISTER)
6811 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6813 /* Find out if any of this register is live after this instruction. */
6814 some_was_live = some_was_dead = 0;
6815 for (i = regno_first; i <= regno_last; ++i)
6817 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6818 some_was_live |= needed_regno;
6819 some_was_dead |= ! needed_regno;
6822 /* Find out if any of the register was set this insn. */
6823 some_not_set = 0;
6824 for (i = regno_first; i <= regno_last; ++i)
6825 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
6827 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6829 /* Record where each reg is used, so when the reg is set we know
6830 the next insn that uses it. */
6831 pbi->reg_next_use[regno_first] = insn;
6834 if (pbi->flags & PROP_REG_INFO)
6836 if (regno_first < FIRST_PSEUDO_REGISTER)
6838 /* If this is a register we are going to try to eliminate,
6839 don't mark it live here. If we are successful in
6840 eliminating it, it need not be live unless it is used for
6841 pseudos, in which case it will have been set live when it
6842 was allocated to the pseudos. If the register will not
6843 be eliminated, reload will set it live at that point.
6845 Otherwise, record that this function uses this register. */
6846 /* ??? The PPC backend tries to "eliminate" on the pic
6847 register to itself. This should be fixed. In the mean
6848 time, hack around it. */
6850 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
6851 && (regno_first == FRAME_POINTER_REGNUM
6852 || regno_first == ARG_POINTER_REGNUM)))
6853 for (i = regno_first; i <= regno_last; ++i)
6854 regs_ever_live[i] = 1;
6856 else
6858 /* Keep track of which basic block each reg appears in. */
6860 register int blocknum = pbi->bb->index;
6861 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6862 REG_BASIC_BLOCK (regno_first) = blocknum;
6863 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6864 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6866 /* Count (weighted) number of uses of each reg. */
6867 REG_FREQ (regno_first)
6868 += (optimize_size || !pbi->bb->frequency ? 1 : pbi->bb->frequency);
6869 REG_N_REFS (regno_first)++;
6873 /* Record and count the insns in which a reg dies. If it is used in
6874 this insn and was dead below the insn then it dies in this insn.
6875 If it was set in this insn, we do not make a REG_DEAD note;
6876 likewise if we already made such a note. */
6877 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
6878 && some_was_dead
6879 && some_not_set)
6881 /* Check for the case where the register dying partially
6882 overlaps the register set by this insn. */
6883 if (regno_first != regno_last)
6884 for (i = regno_first; i <= regno_last; ++i)
6885 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
6887 /* If none of the words in X is needed, make a REG_DEAD note.
6888 Otherwise, we must make partial REG_DEAD notes. */
6889 if (! some_was_live)
6891 if ((pbi->flags & PROP_DEATH_NOTES)
6892 && ! find_regno_note (insn, REG_DEAD, regno_first))
6893 REG_NOTES (insn)
6894 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
6896 if (pbi->flags & PROP_REG_INFO)
6897 REG_N_DEATHS (regno_first)++;
6899 else
6901 /* Don't make a REG_DEAD note for a part of a register
6902 that is set in the insn. */
6903 for (i = regno_first; i <= regno_last; ++i)
6904 if (! REGNO_REG_SET_P (pbi->reg_live, i)
6905 && ! dead_or_set_regno_p (insn, i))
6906 REG_NOTES (insn)
6907 = alloc_EXPR_LIST (REG_DEAD,
6908 gen_rtx_REG (reg_raw_mode[i], i),
6909 REG_NOTES (insn));
6913 /* Mark the register as being live. */
6914 for (i = regno_first; i <= regno_last; ++i)
6916 SET_REGNO_REG_SET (pbi->reg_live, i);
6918 #ifdef HAVE_conditional_execution
6919 /* If this is a conditional use, record that fact. If it is later
6920 conditionally set, we'll know to kill the register. */
6921 if (cond != NULL_RTX)
6923 splay_tree_node node;
6924 struct reg_cond_life_info *rcli;
6925 rtx ncond;
6927 if (some_was_live)
6929 node = splay_tree_lookup (pbi->reg_cond_dead, i);
6930 if (node == NULL)
6932 /* The register was unconditionally live previously.
6933 No need to do anything. */
6935 else
6937 /* The register was conditionally live previously.
6938 Subtract the new life cond from the old death cond. */
6939 rcli = (struct reg_cond_life_info *) node->value;
6940 ncond = rcli->condition;
6941 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
6943 /* If the register is now unconditionally live,
6944 remove the entry in the splay_tree. */
6945 if (ncond == const0_rtx)
6946 splay_tree_remove (pbi->reg_cond_dead, i);
6947 else
6949 rcli->condition = ncond;
6950 SET_REGNO_REG_SET (pbi->reg_cond_reg,
6951 REGNO (XEXP (cond, 0)));
6955 else
6957 /* The register was not previously live at all. Record
6958 the condition under which it is still dead. */
6959 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6960 rcli->condition = not_reg_cond (cond);
6961 rcli->stores = const0_rtx;
6962 rcli->orig_condition = const0_rtx;
6963 splay_tree_insert (pbi->reg_cond_dead, i,
6964 (splay_tree_value) rcli);
6966 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6969 else if (some_was_live)
6971 /* The register may have been conditionally live previously, but
6972 is now unconditionally live. Remove it from the conditionally
6973 dead list, so that a conditional set won't cause us to think
6974 it dead. */
6975 splay_tree_remove (pbi->reg_cond_dead, i);
6977 #endif
6981 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
6982 This is done assuming the registers needed from X are those that
6983 have 1-bits in PBI->REG_LIVE.
6985 INSN is the containing instruction. If INSN is dead, this function
6986 is not called. */
6988 static void
6989 mark_used_regs (pbi, x, cond, insn)
6990 struct propagate_block_info *pbi;
6991 rtx x, cond, insn;
6993 register RTX_CODE code;
6994 register int regno;
6995 int flags = pbi->flags;
6997 retry:
6998 code = GET_CODE (x);
6999 switch (code)
7001 case LABEL_REF:
7002 case SYMBOL_REF:
7003 case CONST_INT:
7004 case CONST:
7005 case CONST_DOUBLE:
7006 case PC:
7007 case ADDR_VEC:
7008 case ADDR_DIFF_VEC:
7009 return;
7011 #ifdef HAVE_cc0
7012 case CC0:
7013 pbi->cc0_live = 1;
7014 return;
7015 #endif
7017 case CLOBBER:
7018 /* If we are clobbering a MEM, mark any registers inside the address
7019 as being used. */
7020 if (GET_CODE (XEXP (x, 0)) == MEM)
7021 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
7022 return;
7024 case MEM:
7025 /* Don't bother watching stores to mems if this is not the
7026 final pass. We'll not be deleting dead stores this round. */
7027 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
7029 /* Invalidate the data for the last MEM stored, but only if MEM is
7030 something that can be stored into. */
7031 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
7032 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
7033 /* Needn't clear the memory set list. */
7035 else
7037 rtx temp = pbi->mem_set_list;
7038 rtx prev = NULL_RTX;
7039 rtx next;
7041 while (temp)
7043 next = XEXP (temp, 1);
7044 if (anti_dependence (XEXP (temp, 0), x))
7046 /* Splice temp out of the list. */
7047 if (prev)
7048 XEXP (prev, 1) = next;
7049 else
7050 pbi->mem_set_list = next;
7051 free_EXPR_LIST_node (temp);
7052 pbi->mem_set_list_len--;
7054 else
7055 prev = temp;
7056 temp = next;
7060 /* If the memory reference had embedded side effects (autoincrement
7061 address modes. Then we may need to kill some entries on the
7062 memory set list. */
7063 if (insn)
7064 invalidate_mems_from_autoinc (pbi, insn);
7067 #ifdef AUTO_INC_DEC
7068 if (flags & PROP_AUTOINC)
7069 find_auto_inc (pbi, x, insn);
7070 #endif
7071 break;
7073 case SUBREG:
7074 #ifdef CLASS_CANNOT_CHANGE_MODE
7075 if (GET_CODE (SUBREG_REG (x)) == REG
7076 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
7077 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
7078 GET_MODE (SUBREG_REG (x))))
7079 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
7080 #endif
7082 /* While we're here, optimize this case. */
7083 x = SUBREG_REG (x);
7084 if (GET_CODE (x) != REG)
7085 goto retry;
7086 /* Fall through. */
7088 case REG:
7089 /* See a register other than being set => mark it as needed. */
7090 mark_used_reg (pbi, x, cond, insn);
7091 return;
7093 case SET:
7095 register rtx testreg = SET_DEST (x);
7096 int mark_dest = 0;
7098 /* If storing into MEM, don't show it as being used. But do
7099 show the address as being used. */
7100 if (GET_CODE (testreg) == MEM)
7102 #ifdef AUTO_INC_DEC
7103 if (flags & PROP_AUTOINC)
7104 find_auto_inc (pbi, testreg, insn);
7105 #endif
7106 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
7107 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7108 return;
7111 /* Storing in STRICT_LOW_PART is like storing in a reg
7112 in that this SET might be dead, so ignore it in TESTREG.
7113 but in some other ways it is like using the reg.
7115 Storing in a SUBREG or a bit field is like storing the entire
7116 register in that if the register's value is not used
7117 then this SET is not needed. */
7118 while (GET_CODE (testreg) == STRICT_LOW_PART
7119 || GET_CODE (testreg) == ZERO_EXTRACT
7120 || GET_CODE (testreg) == SIGN_EXTRACT
7121 || GET_CODE (testreg) == SUBREG)
7123 #ifdef CLASS_CANNOT_CHANGE_MODE
7124 if (GET_CODE (testreg) == SUBREG
7125 && GET_CODE (SUBREG_REG (testreg)) == REG
7126 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
7127 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
7128 GET_MODE (testreg)))
7129 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
7130 #endif
7132 /* Modifying a single register in an alternate mode
7133 does not use any of the old value. But these other
7134 ways of storing in a register do use the old value. */
7135 if (GET_CODE (testreg) == SUBREG
7136 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
7138 else
7139 mark_dest = 1;
7141 testreg = XEXP (testreg, 0);
7144 /* If this is a store into a register or group of registers,
7145 recursively scan the value being stored. */
7147 if ((GET_CODE (testreg) == PARALLEL
7148 && GET_MODE (testreg) == BLKmode)
7149 || (GET_CODE (testreg) == REG
7150 && (regno = REGNO (testreg),
7151 ! (regno == FRAME_POINTER_REGNUM
7152 && (! reload_completed || frame_pointer_needed)))
7153 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7154 && ! (regno == HARD_FRAME_POINTER_REGNUM
7155 && (! reload_completed || frame_pointer_needed))
7156 #endif
7157 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
7158 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
7159 #endif
7162 if (mark_dest)
7163 mark_used_regs (pbi, SET_DEST (x), cond, insn);
7164 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7165 return;
7168 break;
7170 case ASM_OPERANDS:
7171 case UNSPEC_VOLATILE:
7172 case TRAP_IF:
7173 case ASM_INPUT:
7175 /* Traditional and volatile asm instructions must be considered to use
7176 and clobber all hard registers, all pseudo-registers and all of
7177 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
7179 Consider for instance a volatile asm that changes the fpu rounding
7180 mode. An insn should not be moved across this even if it only uses
7181 pseudo-regs because it might give an incorrectly rounded result.
7183 ?!? Unfortunately, marking all hard registers as live causes massive
7184 problems for the register allocator and marking all pseudos as live
7185 creates mountains of uninitialized variable warnings.
7187 So for now, just clear the memory set list and mark any regs
7188 we can find in ASM_OPERANDS as used. */
7189 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
7191 free_EXPR_LIST_list (&pbi->mem_set_list);
7192 pbi->mem_set_list_len = 0;
7195 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7196 We can not just fall through here since then we would be confused
7197 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7198 traditional asms unlike their normal usage. */
7199 if (code == ASM_OPERANDS)
7201 int j;
7203 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
7204 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
7206 break;
7209 case COND_EXEC:
7210 if (cond != NULL_RTX)
7211 abort ();
7213 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
7215 cond = COND_EXEC_TEST (x);
7216 x = COND_EXEC_CODE (x);
7217 goto retry;
7219 case PHI:
7220 /* We _do_not_ want to scan operands of phi nodes. Operands of
7221 a phi function are evaluated only when control reaches this
7222 block along a particular edge. Therefore, regs that appear
7223 as arguments to phi should not be added to the global live at
7224 start. */
7225 return;
7227 default:
7228 break;
7231 /* Recursively scan the operands of this expression. */
7234 register const char *fmt = GET_RTX_FORMAT (code);
7235 register int i;
7237 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7239 if (fmt[i] == 'e')
7241 /* Tail recursive case: save a function call level. */
7242 if (i == 0)
7244 x = XEXP (x, 0);
7245 goto retry;
7247 mark_used_regs (pbi, XEXP (x, i), cond, insn);
7249 else if (fmt[i] == 'E')
7251 register int j;
7252 for (j = 0; j < XVECLEN (x, i); j++)
7253 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
7259 #ifdef AUTO_INC_DEC
7261 static int
7262 try_pre_increment_1 (pbi, insn)
7263 struct propagate_block_info *pbi;
7264 rtx insn;
7266 /* Find the next use of this reg. If in same basic block,
7267 make it do pre-increment or pre-decrement if appropriate. */
7268 rtx x = single_set (insn);
7269 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
7270 * INTVAL (XEXP (SET_SRC (x), 1)));
7271 int regno = REGNO (SET_DEST (x));
7272 rtx y = pbi->reg_next_use[regno];
7273 if (y != 0
7274 && SET_DEST (x) != stack_pointer_rtx
7275 && BLOCK_NUM (y) == BLOCK_NUM (insn)
7276 /* Don't do this if the reg dies, or gets set in y; a standard addressing
7277 mode would be better. */
7278 && ! dead_or_set_p (y, SET_DEST (x))
7279 && try_pre_increment (y, SET_DEST (x), amount))
7281 /* We have found a suitable auto-increment and already changed
7282 insn Y to do it. So flush this increment instruction. */
7283 propagate_block_delete_insn (pbi->bb, insn);
7285 /* Count a reference to this reg for the increment insn we are
7286 deleting. When a reg is incremented, spilling it is worse,
7287 so we want to make that less likely. */
7288 if (regno >= FIRST_PSEUDO_REGISTER)
7290 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
7291 ? 1 : pbi->bb->frequency);
7292 REG_N_SETS (regno)++;
7295 /* Flush any remembered memories depending on the value of
7296 the incremented register. */
7297 invalidate_mems_from_set (pbi, SET_DEST (x));
7299 return 1;
7301 return 0;
7304 /* Try to change INSN so that it does pre-increment or pre-decrement
7305 addressing on register REG in order to add AMOUNT to REG.
7306 AMOUNT is negative for pre-decrement.
7307 Returns 1 if the change could be made.
7308 This checks all about the validity of the result of modifying INSN. */
7310 static int
7311 try_pre_increment (insn, reg, amount)
7312 rtx insn, reg;
7313 HOST_WIDE_INT amount;
7315 register rtx use;
7317 /* Nonzero if we can try to make a pre-increment or pre-decrement.
7318 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
7319 int pre_ok = 0;
7320 /* Nonzero if we can try to make a post-increment or post-decrement.
7321 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
7322 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
7323 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
7324 int post_ok = 0;
7326 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
7327 int do_post = 0;
7329 /* From the sign of increment, see which possibilities are conceivable
7330 on this target machine. */
7331 if (HAVE_PRE_INCREMENT && amount > 0)
7332 pre_ok = 1;
7333 if (HAVE_POST_INCREMENT && amount > 0)
7334 post_ok = 1;
7336 if (HAVE_PRE_DECREMENT && amount < 0)
7337 pre_ok = 1;
7338 if (HAVE_POST_DECREMENT && amount < 0)
7339 post_ok = 1;
7341 if (! (pre_ok || post_ok))
7342 return 0;
7344 /* It is not safe to add a side effect to a jump insn
7345 because if the incremented register is spilled and must be reloaded
7346 there would be no way to store the incremented value back in memory. */
7348 if (GET_CODE (insn) == JUMP_INSN)
7349 return 0;
7351 use = 0;
7352 if (pre_ok)
7353 use = find_use_as_address (PATTERN (insn), reg, 0);
7354 if (post_ok && (use == 0 || use == (rtx) 1))
7356 use = find_use_as_address (PATTERN (insn), reg, -amount);
7357 do_post = 1;
7360 if (use == 0 || use == (rtx) 1)
7361 return 0;
7363 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
7364 return 0;
7366 /* See if this combination of instruction and addressing mode exists. */
7367 if (! validate_change (insn, &XEXP (use, 0),
7368 gen_rtx_fmt_e (amount > 0
7369 ? (do_post ? POST_INC : PRE_INC)
7370 : (do_post ? POST_DEC : PRE_DEC),
7371 Pmode, reg), 0))
7372 return 0;
7374 /* Record that this insn now has an implicit side effect on X. */
7375 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
7376 return 1;
7379 #endif /* AUTO_INC_DEC */
7381 /* Find the place in the rtx X where REG is used as a memory address.
7382 Return the MEM rtx that so uses it.
7383 If PLUSCONST is nonzero, search instead for a memory address equivalent to
7384 (plus REG (const_int PLUSCONST)).
7386 If such an address does not appear, return 0.
7387 If REG appears more than once, or is used other than in such an address,
7388 return (rtx)1. */
7391 find_use_as_address (x, reg, plusconst)
7392 register rtx x;
7393 rtx reg;
7394 HOST_WIDE_INT plusconst;
7396 enum rtx_code code = GET_CODE (x);
7397 const char *fmt = GET_RTX_FORMAT (code);
7398 register int i;
7399 register rtx value = 0;
7400 register rtx tem;
7402 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
7403 return x;
7405 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
7406 && XEXP (XEXP (x, 0), 0) == reg
7407 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7408 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
7409 return x;
7411 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
7413 /* If REG occurs inside a MEM used in a bit-field reference,
7414 that is unacceptable. */
7415 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
7416 return (rtx) (HOST_WIDE_INT) 1;
7419 if (x == reg)
7420 return (rtx) (HOST_WIDE_INT) 1;
7422 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7424 if (fmt[i] == 'e')
7426 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
7427 if (value == 0)
7428 value = tem;
7429 else if (tem != 0)
7430 return (rtx) (HOST_WIDE_INT) 1;
7432 else if (fmt[i] == 'E')
7434 register int j;
7435 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7437 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
7438 if (value == 0)
7439 value = tem;
7440 else if (tem != 0)
7441 return (rtx) (HOST_WIDE_INT) 1;
7446 return value;
7449 /* Write information about registers and basic blocks into FILE.
7450 This is part of making a debugging dump. */
7452 void
7453 dump_regset (r, outf)
7454 regset r;
7455 FILE *outf;
7457 int i;
7458 if (r == NULL)
7460 fputs (" (nil)", outf);
7461 return;
7464 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
7466 fprintf (outf, " %d", i);
7467 if (i < FIRST_PSEUDO_REGISTER)
7468 fprintf (outf, " [%s]",
7469 reg_names[i]);
7473 /* Print a human-reaable representation of R on the standard error
7474 stream. This function is designed to be used from within the
7475 debugger. */
7477 void
7478 debug_regset (r)
7479 regset r;
7481 dump_regset (r, stderr);
7482 putc ('\n', stderr);
7485 void
7486 dump_flow_info (file)
7487 FILE *file;
7489 register int i;
7490 static const char * const reg_class_names[] = REG_CLASS_NAMES;
7492 fprintf (file, "%d registers.\n", max_regno);
7493 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
7494 if (REG_N_REFS (i))
7496 enum reg_class class, altclass;
7497 fprintf (file, "\nRegister %d used %d times across %d insns",
7498 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
7499 if (REG_BASIC_BLOCK (i) >= 0)
7500 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
7501 if (REG_N_SETS (i))
7502 fprintf (file, "; set %d time%s", REG_N_SETS (i),
7503 (REG_N_SETS (i) == 1) ? "" : "s");
7504 if (REG_USERVAR_P (regno_reg_rtx[i]))
7505 fprintf (file, "; user var");
7506 if (REG_N_DEATHS (i) != 1)
7507 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
7508 if (REG_N_CALLS_CROSSED (i) == 1)
7509 fprintf (file, "; crosses 1 call");
7510 else if (REG_N_CALLS_CROSSED (i))
7511 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
7512 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
7513 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
7514 class = reg_preferred_class (i);
7515 altclass = reg_alternate_class (i);
7516 if (class != GENERAL_REGS || altclass != ALL_REGS)
7518 if (altclass == ALL_REGS || class == ALL_REGS)
7519 fprintf (file, "; pref %s", reg_class_names[(int) class]);
7520 else if (altclass == NO_REGS)
7521 fprintf (file, "; %s or none", reg_class_names[(int) class]);
7522 else
7523 fprintf (file, "; pref %s, else %s",
7524 reg_class_names[(int) class],
7525 reg_class_names[(int) altclass]);
7527 if (REG_POINTER (regno_reg_rtx[i]))
7528 fprintf (file, "; pointer");
7529 fprintf (file, ".\n");
7532 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
7533 for (i = 0; i < n_basic_blocks; i++)
7535 register basic_block bb = BASIC_BLOCK (i);
7536 register edge e;
7538 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
7539 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
7540 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7541 fprintf (file, ", freq %i.\n", bb->frequency);
7543 fprintf (file, "Predecessors: ");
7544 for (e = bb->pred; e; e = e->pred_next)
7545 dump_edge_info (file, e, 0);
7547 fprintf (file, "\nSuccessors: ");
7548 for (e = bb->succ; e; e = e->succ_next)
7549 dump_edge_info (file, e, 1);
7551 fprintf (file, "\nRegisters live at start:");
7552 dump_regset (bb->global_live_at_start, file);
7554 fprintf (file, "\nRegisters live at end:");
7555 dump_regset (bb->global_live_at_end, file);
7557 putc ('\n', file);
7560 putc ('\n', file);
7563 void
7564 debug_flow_info ()
7566 dump_flow_info (stderr);
7569 void
7570 dump_edge_info (file, e, do_succ)
7571 FILE *file;
7572 edge e;
7573 int do_succ;
7575 basic_block side = (do_succ ? e->dest : e->src);
7577 if (side == ENTRY_BLOCK_PTR)
7578 fputs (" ENTRY", file);
7579 else if (side == EXIT_BLOCK_PTR)
7580 fputs (" EXIT", file);
7581 else
7582 fprintf (file, " %d", side->index);
7584 if (e->probability)
7585 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
7587 if (e->count)
7589 fprintf (file, " count:");
7590 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
7593 if (e->flags)
7595 static const char * const bitnames[] = {
7596 "fallthru", "crit", "ab", "abcall", "eh", "fake"
7598 int comma = 0;
7599 int i, flags = e->flags;
7601 fputc (' ', file);
7602 fputc ('(', file);
7603 for (i = 0; flags; i++)
7604 if (flags & (1 << i))
7606 flags &= ~(1 << i);
7608 if (comma)
7609 fputc (',', file);
7610 if (i < (int) ARRAY_SIZE (bitnames))
7611 fputs (bitnames[i], file);
7612 else
7613 fprintf (file, "%d", i);
7614 comma = 1;
7616 fputc (')', file);
7620 /* Print out one basic block with live information at start and end. */
7622 void
7623 dump_bb (bb, outf)
7624 basic_block bb;
7625 FILE *outf;
7627 rtx insn;
7628 rtx last;
7629 edge e;
7631 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
7632 bb->index, bb->loop_depth);
7633 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7634 putc ('\n', outf);
7636 fputs (";; Predecessors: ", outf);
7637 for (e = bb->pred; e; e = e->pred_next)
7638 dump_edge_info (outf, e, 0);
7639 putc ('\n', outf);
7641 fputs (";; Registers live at start:", outf);
7642 dump_regset (bb->global_live_at_start, outf);
7643 putc ('\n', outf);
7645 for (insn = bb->head, last = NEXT_INSN (bb->end);
7646 insn != last;
7647 insn = NEXT_INSN (insn))
7648 print_rtl_single (outf, insn);
7650 fputs (";; Registers live at end:", outf);
7651 dump_regset (bb->global_live_at_end, outf);
7652 putc ('\n', outf);
7654 fputs (";; Successors: ", outf);
7655 for (e = bb->succ; e; e = e->succ_next)
7656 dump_edge_info (outf, e, 1);
7657 putc ('\n', outf);
7660 void
7661 debug_bb (bb)
7662 basic_block bb;
7664 dump_bb (bb, stderr);
7667 void
7668 debug_bb_n (n)
7669 int n;
7671 dump_bb (BASIC_BLOCK (n), stderr);
7674 /* Like print_rtl, but also print out live information for the start of each
7675 basic block. */
7677 void
7678 print_rtl_with_bb (outf, rtx_first)
7679 FILE *outf;
7680 rtx rtx_first;
7682 register rtx tmp_rtx;
7684 if (rtx_first == 0)
7685 fprintf (outf, "(nil)\n");
7686 else
7688 int i;
7689 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
7690 int max_uid = get_max_uid ();
7691 basic_block *start = (basic_block *)
7692 xcalloc (max_uid, sizeof (basic_block));
7693 basic_block *end = (basic_block *)
7694 xcalloc (max_uid, sizeof (basic_block));
7695 enum bb_state *in_bb_p = (enum bb_state *)
7696 xcalloc (max_uid, sizeof (enum bb_state));
7698 for (i = n_basic_blocks - 1; i >= 0; i--)
7700 basic_block bb = BASIC_BLOCK (i);
7701 rtx x;
7703 start[INSN_UID (bb->head)] = bb;
7704 end[INSN_UID (bb->end)] = bb;
7705 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
7707 enum bb_state state = IN_MULTIPLE_BB;
7708 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
7709 state = IN_ONE_BB;
7710 in_bb_p[INSN_UID (x)] = state;
7712 if (x == bb->end)
7713 break;
7717 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
7719 int did_output;
7720 basic_block bb;
7722 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
7724 fprintf (outf, ";; Start of basic block %d, registers live:",
7725 bb->index);
7726 dump_regset (bb->global_live_at_start, outf);
7727 putc ('\n', outf);
7730 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
7731 && GET_CODE (tmp_rtx) != NOTE
7732 && GET_CODE (tmp_rtx) != BARRIER)
7733 fprintf (outf, ";; Insn is not within a basic block\n");
7734 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
7735 fprintf (outf, ";; Insn is in multiple basic blocks\n");
7737 did_output = print_rtl_single (outf, tmp_rtx);
7739 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
7741 fprintf (outf, ";; End of basic block %d, registers live:\n",
7742 bb->index);
7743 dump_regset (bb->global_live_at_end, outf);
7744 putc ('\n', outf);
7747 if (did_output)
7748 putc ('\n', outf);
7751 free (start);
7752 free (end);
7753 free (in_bb_p);
7756 if (current_function_epilogue_delay_list != 0)
7758 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
7759 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
7760 tmp_rtx = XEXP (tmp_rtx, 1))
7761 print_rtl_single (outf, XEXP (tmp_rtx, 0));
7765 /* Dump the rtl into the current debugging dump file, then abort. */
7767 static void
7768 print_rtl_and_abort_fcn (file, line, function)
7769 const char *file;
7770 int line;
7771 const char *function;
7773 if (rtl_dump_file)
7775 print_rtl_with_bb (rtl_dump_file, get_insns ());
7776 fclose (rtl_dump_file);
7779 fancy_abort (file, line, function);
7782 /* Recompute register set/reference counts immediately prior to register
7783 allocation.
7785 This avoids problems with set/reference counts changing to/from values
7786 which have special meanings to the register allocators.
7788 Additionally, the reference counts are the primary component used by the
7789 register allocators to prioritize pseudos for allocation to hard regs.
7790 More accurate reference counts generally lead to better register allocation.
7792 F is the first insn to be scanned.
7794 LOOP_STEP denotes how much loop_depth should be incremented per
7795 loop nesting level in order to increase the ref count more for
7796 references in a loop.
7798 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
7799 possibly other information which is used by the register allocators. */
7801 void
7802 recompute_reg_usage (f, loop_step)
7803 rtx f ATTRIBUTE_UNUSED;
7804 int loop_step ATTRIBUTE_UNUSED;
7806 allocate_reg_life_data ();
7807 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
7810 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
7811 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
7812 of the number of registers that died. */
7815 count_or_remove_death_notes (blocks, kill)
7816 sbitmap blocks;
7817 int kill;
7819 int i, count = 0;
7821 for (i = n_basic_blocks - 1; i >= 0; --i)
7823 basic_block bb;
7824 rtx insn;
7826 if (blocks && ! TEST_BIT (blocks, i))
7827 continue;
7829 bb = BASIC_BLOCK (i);
7831 for (insn = bb->head;; insn = NEXT_INSN (insn))
7833 if (INSN_P (insn))
7835 rtx *pprev = &REG_NOTES (insn);
7836 rtx link = *pprev;
7838 while (link)
7840 switch (REG_NOTE_KIND (link))
7842 case REG_DEAD:
7843 if (GET_CODE (XEXP (link, 0)) == REG)
7845 rtx reg = XEXP (link, 0);
7846 int n;
7848 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
7849 n = 1;
7850 else
7851 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
7852 count += n;
7854 /* Fall through. */
7856 case REG_UNUSED:
7857 if (kill)
7859 rtx next = XEXP (link, 1);
7860 free_EXPR_LIST_node (link);
7861 *pprev = link = next;
7862 break;
7864 /* Fall through. */
7866 default:
7867 pprev = &XEXP (link, 1);
7868 link = *pprev;
7869 break;
7874 if (insn == bb->end)
7875 break;
7879 return count;
7883 /* Update insns block within BB. */
7885 void
7886 update_bb_for_insn (bb)
7887 basic_block bb;
7889 rtx insn;
7891 if (! basic_block_for_insn)
7892 return;
7894 for (insn = bb->head; ; insn = NEXT_INSN (insn))
7896 set_block_for_insn (insn, bb);
7898 if (insn == bb->end)
7899 break;
7904 /* Record INSN's block as BB. */
7906 void
7907 set_block_for_insn (insn, bb)
7908 rtx insn;
7909 basic_block bb;
7911 size_t uid = INSN_UID (insn);
7912 if (uid >= basic_block_for_insn->num_elements)
7914 int new_size;
7916 /* Add one-eighth the size so we don't keep calling xrealloc. */
7917 new_size = uid + (uid + 7) / 8;
7919 VARRAY_GROW (basic_block_for_insn, new_size);
7921 VARRAY_BB (basic_block_for_insn, uid) = bb;
7924 /* When a new insn has been inserted into an existing block, it will
7925 sometimes emit more than a single insn. This routine will set the
7926 block number for the specified insn, and look backwards in the insn
7927 chain to see if there are any other uninitialized insns immediately
7928 previous to this one, and set the block number for them too. */
7930 void
7931 set_block_for_new_insns (insn, bb)
7932 rtx insn;
7933 basic_block bb;
7935 set_block_for_insn (insn, bb);
7937 /* Scan the previous instructions setting the block number until we find
7938 an instruction that has the block number set, or we find a note
7939 of any kind. */
7940 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
7942 if (GET_CODE (insn) == NOTE)
7943 break;
7944 if (INSN_UID (insn) >= basic_block_for_insn->num_elements
7945 || BLOCK_FOR_INSN (insn) == 0)
7946 set_block_for_insn (insn, bb);
7947 else
7948 break;
7952 /* Verify the CFG consistency. This function check some CFG invariants and
7953 aborts when something is wrong. Hope that this function will help to
7954 convert many optimization passes to preserve CFG consistent.
7956 Currently it does following checks:
7958 - test head/end pointers
7959 - overlapping of basic blocks
7960 - edge list corectness
7961 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
7962 - tails of basic blocks (ensure that boundary is necesary)
7963 - scans body of the basic block for JUMP_INSN, CODE_LABEL
7964 and NOTE_INSN_BASIC_BLOCK
7965 - check that all insns are in the basic blocks
7966 (except the switch handling code, barriers and notes)
7967 - check that all returns are followed by barriers
7969 In future it can be extended check a lot of other stuff as well
7970 (reachability of basic blocks, life information, etc. etc.). */
7972 void
7973 verify_flow_info ()
7975 const int max_uid = get_max_uid ();
7976 const rtx rtx_first = get_insns ();
7977 rtx last_head = get_last_insn ();
7978 basic_block *bb_info;
7979 rtx x;
7980 int i, last_bb_num_seen, num_bb_notes, err = 0;
7982 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
7984 for (i = n_basic_blocks - 1; i >= 0; i--)
7986 basic_block bb = BASIC_BLOCK (i);
7987 rtx head = bb->head;
7988 rtx end = bb->end;
7990 /* Verify the end of the basic block is in the INSN chain. */
7991 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
7992 if (x == end)
7993 break;
7994 if (!x)
7996 error ("End insn %d for block %d not found in the insn stream.",
7997 INSN_UID (end), bb->index);
7998 err = 1;
8001 /* Work backwards from the end to the head of the basic block
8002 to verify the head is in the RTL chain. */
8003 for (; x != NULL_RTX; x = PREV_INSN (x))
8005 /* While walking over the insn chain, verify insns appear
8006 in only one basic block and initialize the BB_INFO array
8007 used by other passes. */
8008 if (bb_info[INSN_UID (x)] != NULL)
8010 error ("Insn %d is in multiple basic blocks (%d and %d)",
8011 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
8012 err = 1;
8014 bb_info[INSN_UID (x)] = bb;
8016 if (x == head)
8017 break;
8019 if (!x)
8021 error ("Head insn %d for block %d not found in the insn stream.",
8022 INSN_UID (head), bb->index);
8023 err = 1;
8026 last_head = x;
8029 /* Now check the basic blocks (boundaries etc.) */
8030 for (i = n_basic_blocks - 1; i >= 0; i--)
8032 basic_block bb = BASIC_BLOCK (i);
8033 /* Check corectness of edge lists */
8034 edge e;
8036 e = bb->succ;
8037 while (e)
8039 if ((e->flags & EDGE_FALLTHRU)
8040 && e->src != ENTRY_BLOCK_PTR
8041 && e->dest != EXIT_BLOCK_PTR
8042 && (e->src->index + 1 != e->dest->index
8043 || !can_fallthru (e->src, e->dest)))
8045 error ("verify_flow_info: Incorrect fallthru edge %i->%i",
8046 e->src->index, e->dest->index);
8047 err = 1;
8050 if (e->src != bb)
8052 error ("verify_flow_info: Basic block %d succ edge is corrupted",
8053 bb->index);
8054 fprintf (stderr, "Predecessor: ");
8055 dump_edge_info (stderr, e, 0);
8056 fprintf (stderr, "\nSuccessor: ");
8057 dump_edge_info (stderr, e, 1);
8058 fprintf (stderr, "\n");
8059 err = 1;
8061 if (e->dest != EXIT_BLOCK_PTR)
8063 edge e2 = e->dest->pred;
8064 while (e2 && e2 != e)
8065 e2 = e2->pred_next;
8066 if (!e2)
8068 error ("Basic block %i edge lists are corrupted", bb->index);
8069 err = 1;
8072 e = e->succ_next;
8075 e = bb->pred;
8076 while (e)
8078 if (e->dest != bb)
8080 error ("Basic block %d pred edge is corrupted", bb->index);
8081 fputs ("Predecessor: ", stderr);
8082 dump_edge_info (stderr, e, 0);
8083 fputs ("\nSuccessor: ", stderr);
8084 dump_edge_info (stderr, e, 1);
8085 fputc ('\n', stderr);
8086 err = 1;
8088 if (e->src != ENTRY_BLOCK_PTR)
8090 edge e2 = e->src->succ;
8091 while (e2 && e2 != e)
8092 e2 = e2->succ_next;
8093 if (!e2)
8095 error ("Basic block %i edge lists are corrupted", bb->index);
8096 err = 1;
8099 e = e->pred_next;
8102 /* OK pointers are correct. Now check the header of basic
8103 block. It ought to contain optional CODE_LABEL followed
8104 by NOTE_BASIC_BLOCK. */
8105 x = bb->head;
8106 if (GET_CODE (x) == CODE_LABEL)
8108 if (bb->end == x)
8110 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
8111 bb->index);
8112 err = 1;
8114 x = NEXT_INSN (x);
8116 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
8118 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
8119 bb->index);
8120 err = 1;
8123 if (bb->end == x)
8125 /* Do checks for empty blocks here */
8127 else
8129 x = NEXT_INSN (x);
8130 while (x)
8132 if (NOTE_INSN_BASIC_BLOCK_P (x))
8134 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
8135 INSN_UID (x), bb->index);
8136 err = 1;
8139 if (x == bb->end)
8140 break;
8142 if (GET_CODE (x) == JUMP_INSN
8143 || GET_CODE (x) == CODE_LABEL
8144 || GET_CODE (x) == BARRIER)
8146 error ("In basic block %d:", bb->index);
8147 fatal_insn ("Flow control insn inside a basic block", x);
8150 x = NEXT_INSN (x);
8155 last_bb_num_seen = -1;
8156 num_bb_notes = 0;
8157 x = rtx_first;
8158 while (x)
8160 if (NOTE_INSN_BASIC_BLOCK_P (x))
8162 basic_block bb = NOTE_BASIC_BLOCK (x);
8163 num_bb_notes++;
8164 if (bb->index != last_bb_num_seen + 1)
8165 /* Basic blocks not numbered consecutively. */
8166 abort ();
8168 last_bb_num_seen = bb->index;
8171 if (!bb_info[INSN_UID (x)])
8173 switch (GET_CODE (x))
8175 case BARRIER:
8176 case NOTE:
8177 break;
8179 case CODE_LABEL:
8180 /* An addr_vec is placed outside any block block. */
8181 if (NEXT_INSN (x)
8182 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
8183 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
8184 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
8186 x = NEXT_INSN (x);
8189 /* But in any case, non-deletable labels can appear anywhere. */
8190 break;
8192 default:
8193 fatal_insn ("Insn outside basic block", x);
8197 if (INSN_P (x)
8198 && GET_CODE (x) == JUMP_INSN
8199 && returnjump_p (x) && ! condjump_p (x)
8200 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
8201 fatal_insn ("Return not followed by barrier", x);
8203 x = NEXT_INSN (x);
8206 if (num_bb_notes != n_basic_blocks)
8207 internal_error
8208 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
8209 num_bb_notes, n_basic_blocks);
8211 if (err)
8212 abort ();
8214 /* Clean up. */
8215 free (bb_info);
8218 /* Functions to access an edge list with a vector representation.
8219 Enough data is kept such that given an index number, the
8220 pred and succ that edge represents can be determined, or
8221 given a pred and a succ, its index number can be returned.
8222 This allows algorithms which consume a lot of memory to
8223 represent the normally full matrix of edge (pred,succ) with a
8224 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
8225 wasted space in the client code due to sparse flow graphs. */
8227 /* This functions initializes the edge list. Basically the entire
8228 flowgraph is processed, and all edges are assigned a number,
8229 and the data structure is filled in. */
8231 struct edge_list *
8232 create_edge_list ()
8234 struct edge_list *elist;
8235 edge e;
8236 int num_edges;
8237 int x;
8238 int block_count;
8240 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
8242 num_edges = 0;
8244 /* Determine the number of edges in the flow graph by counting successor
8245 edges on each basic block. */
8246 for (x = 0; x < n_basic_blocks; x++)
8248 basic_block bb = BASIC_BLOCK (x);
8250 for (e = bb->succ; e; e = e->succ_next)
8251 num_edges++;
8253 /* Don't forget successors of the entry block. */
8254 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8255 num_edges++;
8257 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
8258 elist->num_blocks = block_count;
8259 elist->num_edges = num_edges;
8260 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
8262 num_edges = 0;
8264 /* Follow successors of the entry block, and register these edges. */
8265 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8267 elist->index_to_edge[num_edges] = e;
8268 num_edges++;
8271 for (x = 0; x < n_basic_blocks; x++)
8273 basic_block bb = BASIC_BLOCK (x);
8275 /* Follow all successors of blocks, and register these edges. */
8276 for (e = bb->succ; e; e = e->succ_next)
8278 elist->index_to_edge[num_edges] = e;
8279 num_edges++;
8282 return elist;
8285 /* This function free's memory associated with an edge list. */
8287 void
8288 free_edge_list (elist)
8289 struct edge_list *elist;
8291 if (elist)
8293 free (elist->index_to_edge);
8294 free (elist);
8298 /* This function provides debug output showing an edge list. */
8300 void
8301 print_edge_list (f, elist)
8302 FILE *f;
8303 struct edge_list *elist;
8305 int x;
8306 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
8307 elist->num_blocks - 2, elist->num_edges);
8309 for (x = 0; x < elist->num_edges; x++)
8311 fprintf (f, " %-4d - edge(", x);
8312 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
8313 fprintf (f, "entry,");
8314 else
8315 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
8317 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
8318 fprintf (f, "exit)\n");
8319 else
8320 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
8324 /* This function provides an internal consistency check of an edge list,
8325 verifying that all edges are present, and that there are no
8326 extra edges. */
8328 void
8329 verify_edge_list (f, elist)
8330 FILE *f;
8331 struct edge_list *elist;
8333 int x, pred, succ, index;
8334 edge e;
8336 for (x = 0; x < n_basic_blocks; x++)
8338 basic_block bb = BASIC_BLOCK (x);
8340 for (e = bb->succ; e; e = e->succ_next)
8342 pred = e->src->index;
8343 succ = e->dest->index;
8344 index = EDGE_INDEX (elist, e->src, e->dest);
8345 if (index == EDGE_INDEX_NO_EDGE)
8347 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8348 continue;
8350 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8351 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8352 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8353 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8354 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8355 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8358 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8360 pred = e->src->index;
8361 succ = e->dest->index;
8362 index = EDGE_INDEX (elist, e->src, e->dest);
8363 if (index == EDGE_INDEX_NO_EDGE)
8365 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8366 continue;
8368 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8369 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8370 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8371 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8372 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8373 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8375 /* We've verified that all the edges are in the list, no lets make sure
8376 there are no spurious edges in the list. */
8378 for (pred = 0; pred < n_basic_blocks; pred++)
8379 for (succ = 0; succ < n_basic_blocks; succ++)
8381 basic_block p = BASIC_BLOCK (pred);
8382 basic_block s = BASIC_BLOCK (succ);
8384 int found_edge = 0;
8386 for (e = p->succ; e; e = e->succ_next)
8387 if (e->dest == s)
8389 found_edge = 1;
8390 break;
8392 for (e = s->pred; e; e = e->pred_next)
8393 if (e->src == p)
8395 found_edge = 1;
8396 break;
8398 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8399 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8400 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
8401 pred, succ);
8402 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8403 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8404 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
8405 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8406 BASIC_BLOCK (succ)));
8408 for (succ = 0; succ < n_basic_blocks; succ++)
8410 basic_block p = ENTRY_BLOCK_PTR;
8411 basic_block s = BASIC_BLOCK (succ);
8413 int found_edge = 0;
8415 for (e = p->succ; e; e = e->succ_next)
8416 if (e->dest == s)
8418 found_edge = 1;
8419 break;
8421 for (e = s->pred; e; e = e->pred_next)
8422 if (e->src == p)
8424 found_edge = 1;
8425 break;
8427 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8428 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8429 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
8430 succ);
8431 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8432 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8433 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
8434 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
8435 BASIC_BLOCK (succ)));
8437 for (pred = 0; pred < n_basic_blocks; pred++)
8439 basic_block p = BASIC_BLOCK (pred);
8440 basic_block s = EXIT_BLOCK_PTR;
8442 int found_edge = 0;
8444 for (e = p->succ; e; e = e->succ_next)
8445 if (e->dest == s)
8447 found_edge = 1;
8448 break;
8450 for (e = s->pred; e; e = e->pred_next)
8451 if (e->src == p)
8453 found_edge = 1;
8454 break;
8456 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8457 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8458 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
8459 pred);
8460 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8461 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8462 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
8463 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8464 EXIT_BLOCK_PTR));
8468 /* This routine will determine what, if any, edge there is between
8469 a specified predecessor and successor. */
8472 find_edge_index (edge_list, pred, succ)
8473 struct edge_list *edge_list;
8474 basic_block pred, succ;
8476 int x;
8477 for (x = 0; x < NUM_EDGES (edge_list); x++)
8479 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
8480 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
8481 return x;
8483 return (EDGE_INDEX_NO_EDGE);
8486 /* This function will remove an edge from the flow graph. */
8488 void
8489 remove_edge (e)
8490 edge e;
8492 edge last_pred = NULL;
8493 edge last_succ = NULL;
8494 edge tmp;
8495 basic_block src, dest;
8496 src = e->src;
8497 dest = e->dest;
8498 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
8499 last_succ = tmp;
8501 if (!tmp)
8502 abort ();
8503 if (last_succ)
8504 last_succ->succ_next = e->succ_next;
8505 else
8506 src->succ = e->succ_next;
8508 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
8509 last_pred = tmp;
8511 if (!tmp)
8512 abort ();
8513 if (last_pred)
8514 last_pred->pred_next = e->pred_next;
8515 else
8516 dest->pred = e->pred_next;
8518 n_edges--;
8519 free (e);
8522 /* This routine will remove any fake successor edges for a basic block.
8523 When the edge is removed, it is also removed from whatever predecessor
8524 list it is in. */
8526 static void
8527 remove_fake_successors (bb)
8528 basic_block bb;
8530 edge e;
8531 for (e = bb->succ; e;)
8533 edge tmp = e;
8534 e = e->succ_next;
8535 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
8536 remove_edge (tmp);
8540 /* This routine will remove all fake edges from the flow graph. If
8541 we remove all fake successors, it will automatically remove all
8542 fake predecessors. */
8544 void
8545 remove_fake_edges ()
8547 int x;
8549 for (x = 0; x < n_basic_blocks; x++)
8550 remove_fake_successors (BASIC_BLOCK (x));
8552 /* We've handled all successors except the entry block's. */
8553 remove_fake_successors (ENTRY_BLOCK_PTR);
8556 /* This function will add a fake edge between any block which has no
8557 successors, and the exit block. Some data flow equations require these
8558 edges to exist. */
8560 void
8561 add_noreturn_fake_exit_edges ()
8563 int x;
8565 for (x = 0; x < n_basic_blocks; x++)
8566 if (BASIC_BLOCK (x)->succ == NULL)
8567 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
8570 /* This function adds a fake edge between any infinite loops to the
8571 exit block. Some optimizations require a path from each node to
8572 the exit node.
8574 See also Morgan, Figure 3.10, pp. 82-83.
8576 The current implementation is ugly, not attempting to minimize the
8577 number of inserted fake edges. To reduce the number of fake edges
8578 to insert, add fake edges from _innermost_ loops containing only
8579 nodes not reachable from the exit block. */
8581 void
8582 connect_infinite_loops_to_exit ()
8584 basic_block unvisited_block;
8586 /* Perform depth-first search in the reverse graph to find nodes
8587 reachable from the exit block. */
8588 struct depth_first_search_dsS dfs_ds;
8590 flow_dfs_compute_reverse_init (&dfs_ds);
8591 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
8593 /* Repeatedly add fake edges, updating the unreachable nodes. */
8594 while (1)
8596 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
8597 if (!unvisited_block)
8598 break;
8599 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
8600 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
8603 flow_dfs_compute_reverse_finish (&dfs_ds);
8605 return;
8608 /* Redirect an edge's successor from one block to another. */
8610 void
8611 redirect_edge_succ (e, new_succ)
8612 edge e;
8613 basic_block new_succ;
8615 edge *pe;
8617 /* Disconnect the edge from the old successor block. */
8618 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
8619 continue;
8620 *pe = (*pe)->pred_next;
8622 /* Reconnect the edge to the new successor block. */
8623 e->pred_next = new_succ->pred;
8624 new_succ->pred = e;
8625 e->dest = new_succ;
8628 /* Redirect an edge's predecessor from one block to another. */
8630 void
8631 redirect_edge_pred (e, new_pred)
8632 edge e;
8633 basic_block new_pred;
8635 edge *pe;
8637 /* Disconnect the edge from the old predecessor block. */
8638 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
8639 continue;
8640 *pe = (*pe)->succ_next;
8642 /* Reconnect the edge to the new predecessor block. */
8643 e->succ_next = new_pred->succ;
8644 new_pred->succ = e;
8645 e->src = new_pred;
8648 /* Dump the list of basic blocks in the bitmap NODES. */
8650 static void
8651 flow_nodes_print (str, nodes, file)
8652 const char *str;
8653 const sbitmap nodes;
8654 FILE *file;
8656 int node;
8658 if (! nodes)
8659 return;
8661 fprintf (file, "%s { ", str);
8662 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
8663 fputs ("}\n", file);
8667 /* Dump the list of edges in the array EDGE_LIST. */
8669 static void
8670 flow_edge_list_print (str, edge_list, num_edges, file)
8671 const char *str;
8672 const edge *edge_list;
8673 int num_edges;
8674 FILE *file;
8676 int i;
8678 if (! edge_list)
8679 return;
8681 fprintf (file, "%s { ", str);
8682 for (i = 0; i < num_edges; i++)
8683 fprintf (file, "%d->%d ", edge_list[i]->src->index,
8684 edge_list[i]->dest->index);
8685 fputs ("}\n", file);
8689 /* Dump loop related CFG information. */
8691 static void
8692 flow_loops_cfg_dump (loops, file)
8693 const struct loops *loops;
8694 FILE *file;
8696 int i;
8698 if (! loops->num || ! file || ! loops->cfg.dom)
8699 return;
8701 for (i = 0; i < n_basic_blocks; i++)
8703 edge succ;
8705 fprintf (file, ";; %d succs { ", i);
8706 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
8707 fprintf (file, "%d ", succ->dest->index);
8708 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
8711 /* Dump the DFS node order. */
8712 if (loops->cfg.dfs_order)
8714 fputs (";; DFS order: ", file);
8715 for (i = 0; i < n_basic_blocks; i++)
8716 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
8717 fputs ("\n", file);
8719 /* Dump the reverse completion node order. */
8720 if (loops->cfg.rc_order)
8722 fputs (";; RC order: ", file);
8723 for (i = 0; i < n_basic_blocks; i++)
8724 fprintf (file, "%d ", loops->cfg.rc_order[i]);
8725 fputs ("\n", file);
8729 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
8731 static int
8732 flow_loop_nested_p (outer, loop)
8733 struct loop *outer;
8734 struct loop *loop;
8736 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
8740 /* Dump the loop information specified by LOOP to the stream FILE
8741 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
8742 void
8743 flow_loop_dump (loop, file, loop_dump_aux, verbose)
8744 const struct loop *loop;
8745 FILE *file;
8746 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
8747 int verbose;
8749 if (! loop || ! loop->header)
8750 return;
8752 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
8753 loop->num, INSN_UID (loop->first->head),
8754 INSN_UID (loop->last->end),
8755 loop->shared ? " shared" : "",
8756 loop->invalid ? " invalid" : "");
8757 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
8758 loop->header->index, loop->latch->index,
8759 loop->pre_header ? loop->pre_header->index : -1,
8760 loop->first->index, loop->last->index);
8761 fprintf (file, ";; depth %d, level %d, outer %ld\n",
8762 loop->depth, loop->level,
8763 (long) (loop->outer ? loop->outer->num : -1));
8765 if (loop->pre_header_edges)
8766 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
8767 loop->num_pre_header_edges, file);
8768 flow_edge_list_print (";; entry edges", loop->entry_edges,
8769 loop->num_entries, file);
8770 fprintf (file, ";; %d", loop->num_nodes);
8771 flow_nodes_print (" nodes", loop->nodes, file);
8772 flow_edge_list_print (";; exit edges", loop->exit_edges,
8773 loop->num_exits, file);
8774 if (loop->exits_doms)
8775 flow_nodes_print (";; exit doms", loop->exits_doms, file);
8776 if (loop_dump_aux)
8777 loop_dump_aux (loop, file, verbose);
8781 /* Dump the loop information specified by LOOPS to the stream FILE,
8782 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
8783 void
8784 flow_loops_dump (loops, file, loop_dump_aux, verbose)
8785 const struct loops *loops;
8786 FILE *file;
8787 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
8788 int verbose;
8790 int i;
8791 int num_loops;
8793 num_loops = loops->num;
8794 if (! num_loops || ! file)
8795 return;
8797 fprintf (file, ";; %d loops found, %d levels\n",
8798 num_loops, loops->levels);
8800 for (i = 0; i < num_loops; i++)
8802 struct loop *loop = &loops->array[i];
8804 flow_loop_dump (loop, file, loop_dump_aux, verbose);
8806 if (loop->shared)
8808 int j;
8810 for (j = 0; j < i; j++)
8812 struct loop *oloop = &loops->array[j];
8814 if (loop->header == oloop->header)
8816 int disjoint;
8817 int smaller;
8819 smaller = loop->num_nodes < oloop->num_nodes;
8821 /* If the union of LOOP and OLOOP is different than
8822 the larger of LOOP and OLOOP then LOOP and OLOOP
8823 must be disjoint. */
8824 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
8825 smaller ? oloop : loop);
8826 fprintf (file,
8827 ";; loop header %d shared by loops %d, %d %s\n",
8828 loop->header->index, i, j,
8829 disjoint ? "disjoint" : "nested");
8835 if (verbose)
8836 flow_loops_cfg_dump (loops, file);
8840 /* Free all the memory allocated for LOOPS. */
8842 void
8843 flow_loops_free (loops)
8844 struct loops *loops;
8846 if (loops->array)
8848 int i;
8850 if (! loops->num)
8851 abort ();
8853 /* Free the loop descriptors. */
8854 for (i = 0; i < loops->num; i++)
8856 struct loop *loop = &loops->array[i];
8858 if (loop->pre_header_edges)
8859 free (loop->pre_header_edges);
8860 if (loop->nodes)
8861 sbitmap_free (loop->nodes);
8862 if (loop->entry_edges)
8863 free (loop->entry_edges);
8864 if (loop->exit_edges)
8865 free (loop->exit_edges);
8866 if (loop->exits_doms)
8867 sbitmap_free (loop->exits_doms);
8869 free (loops->array);
8870 loops->array = NULL;
8872 if (loops->cfg.dom)
8873 sbitmap_vector_free (loops->cfg.dom);
8874 if (loops->cfg.dfs_order)
8875 free (loops->cfg.dfs_order);
8877 if (loops->shared_headers)
8878 sbitmap_free (loops->shared_headers);
8883 /* Find the entry edges into the loop with header HEADER and nodes
8884 NODES and store in ENTRY_EDGES array. Return the number of entry
8885 edges from the loop. */
8887 static int
8888 flow_loop_entry_edges_find (header, nodes, entry_edges)
8889 basic_block header;
8890 const sbitmap nodes;
8891 edge **entry_edges;
8893 edge e;
8894 int num_entries;
8896 *entry_edges = NULL;
8898 num_entries = 0;
8899 for (e = header->pred; e; e = e->pred_next)
8901 basic_block src = e->src;
8903 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
8904 num_entries++;
8907 if (! num_entries)
8908 abort ();
8910 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
8912 num_entries = 0;
8913 for (e = header->pred; e; e = e->pred_next)
8915 basic_block src = e->src;
8917 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
8918 (*entry_edges)[num_entries++] = e;
8921 return num_entries;
8925 /* Find the exit edges from the loop using the bitmap of loop nodes
8926 NODES and store in EXIT_EDGES array. Return the number of
8927 exit edges from the loop. */
8929 static int
8930 flow_loop_exit_edges_find (nodes, exit_edges)
8931 const sbitmap nodes;
8932 edge **exit_edges;
8934 edge e;
8935 int node;
8936 int num_exits;
8938 *exit_edges = NULL;
8940 /* Check all nodes within the loop to see if there are any
8941 successors not in the loop. Note that a node may have multiple
8942 exiting edges ????? A node can have one jumping edge and one fallthru
8943 edge so only one of these can exit the loop. */
8944 num_exits = 0;
8945 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
8946 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
8948 basic_block dest = e->dest;
8950 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
8951 num_exits++;
8955 if (! num_exits)
8956 return 0;
8958 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
8960 /* Store all exiting edges into an array. */
8961 num_exits = 0;
8962 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
8963 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
8965 basic_block dest = e->dest;
8967 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
8968 (*exit_edges)[num_exits++] = e;
8972 return num_exits;
8976 /* Find the nodes contained within the loop with header HEADER and
8977 latch LATCH and store in NODES. Return the number of nodes within
8978 the loop. */
8980 static int
8981 flow_loop_nodes_find (header, latch, nodes)
8982 basic_block header;
8983 basic_block latch;
8984 sbitmap nodes;
8986 basic_block *stack;
8987 int sp;
8988 int num_nodes = 0;
8990 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
8991 sp = 0;
8993 /* Start with only the loop header in the set of loop nodes. */
8994 sbitmap_zero (nodes);
8995 SET_BIT (nodes, header->index);
8996 num_nodes++;
8997 header->loop_depth++;
8999 /* Push the loop latch on to the stack. */
9000 if (! TEST_BIT (nodes, latch->index))
9002 SET_BIT (nodes, latch->index);
9003 latch->loop_depth++;
9004 num_nodes++;
9005 stack[sp++] = latch;
9008 while (sp)
9010 basic_block node;
9011 edge e;
9013 node = stack[--sp];
9014 for (e = node->pred; e; e = e->pred_next)
9016 basic_block ancestor = e->src;
9018 /* If each ancestor not marked as part of loop, add to set of
9019 loop nodes and push on to stack. */
9020 if (ancestor != ENTRY_BLOCK_PTR
9021 && ! TEST_BIT (nodes, ancestor->index))
9023 SET_BIT (nodes, ancestor->index);
9024 ancestor->loop_depth++;
9025 num_nodes++;
9026 stack[sp++] = ancestor;
9030 free (stack);
9031 return num_nodes;
9034 /* Compute the depth first search order and store in the array
9035 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
9036 RC_ORDER is non-zero, return the reverse completion number for each
9037 node. Returns the number of nodes visited. A depth first search
9038 tries to get as far away from the starting point as quickly as
9039 possible. */
9042 flow_depth_first_order_compute (dfs_order, rc_order)
9043 int *dfs_order;
9044 int *rc_order;
9046 edge *stack;
9047 int sp;
9048 int dfsnum = 0;
9049 int rcnum = n_basic_blocks - 1;
9050 sbitmap visited;
9052 /* Allocate stack for back-tracking up CFG. */
9053 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
9054 sp = 0;
9056 /* Allocate bitmap to track nodes that have been visited. */
9057 visited = sbitmap_alloc (n_basic_blocks);
9059 /* None of the nodes in the CFG have been visited yet. */
9060 sbitmap_zero (visited);
9062 /* Push the first edge on to the stack. */
9063 stack[sp++] = ENTRY_BLOCK_PTR->succ;
9065 while (sp)
9067 edge e;
9068 basic_block src;
9069 basic_block dest;
9071 /* Look at the edge on the top of the stack. */
9072 e = stack[sp - 1];
9073 src = e->src;
9074 dest = e->dest;
9076 /* Check if the edge destination has been visited yet. */
9077 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
9079 /* Mark that we have visited the destination. */
9080 SET_BIT (visited, dest->index);
9082 if (dfs_order)
9083 dfs_order[dfsnum++] = dest->index;
9085 if (dest->succ)
9087 /* Since the DEST node has been visited for the first
9088 time, check its successors. */
9089 stack[sp++] = dest->succ;
9091 else
9093 /* There are no successors for the DEST node so assign
9094 its reverse completion number. */
9095 if (rc_order)
9096 rc_order[rcnum--] = dest->index;
9099 else
9101 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
9103 /* There are no more successors for the SRC node
9104 so assign its reverse completion number. */
9105 if (rc_order)
9106 rc_order[rcnum--] = src->index;
9109 if (e->succ_next)
9110 stack[sp - 1] = e->succ_next;
9111 else
9112 sp--;
9116 free (stack);
9117 sbitmap_free (visited);
9119 /* The number of nodes visited should not be greater than
9120 n_basic_blocks. */
9121 if (dfsnum > n_basic_blocks)
9122 abort ();
9124 /* There are some nodes left in the CFG that are unreachable. */
9125 if (dfsnum < n_basic_blocks)
9126 abort ();
9127 return dfsnum;
9130 /* Compute the depth first search order on the _reverse_ graph and
9131 store in the array DFS_ORDER, marking the nodes visited in VISITED.
9132 Returns the number of nodes visited.
9134 The computation is split into three pieces:
9136 flow_dfs_compute_reverse_init () creates the necessary data
9137 structures.
9139 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
9140 structures. The block will start the search.
9142 flow_dfs_compute_reverse_execute () continues (or starts) the
9143 search using the block on the top of the stack, stopping when the
9144 stack is empty.
9146 flow_dfs_compute_reverse_finish () destroys the necessary data
9147 structures.
9149 Thus, the user will probably call ..._init(), call ..._add_bb() to
9150 add a beginning basic block to the stack, call ..._execute(),
9151 possibly add another bb to the stack and again call ..._execute(),
9152 ..., and finally call _finish(). */
9154 /* Initialize the data structures used for depth-first search on the
9155 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
9156 added to the basic block stack. DATA is the current depth-first
9157 search context. If INITIALIZE_STACK is non-zero, there is an
9158 element on the stack. */
9160 static void
9161 flow_dfs_compute_reverse_init (data)
9162 depth_first_search_ds data;
9164 /* Allocate stack for back-tracking up CFG. */
9165 data->stack =
9166 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
9167 * sizeof (basic_block));
9168 data->sp = 0;
9170 /* Allocate bitmap to track nodes that have been visited. */
9171 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
9173 /* None of the nodes in the CFG have been visited yet. */
9174 sbitmap_zero (data->visited_blocks);
9176 return;
9179 /* Add the specified basic block to the top of the dfs data
9180 structures. When the search continues, it will start at the
9181 block. */
9183 static void
9184 flow_dfs_compute_reverse_add_bb (data, bb)
9185 depth_first_search_ds data;
9186 basic_block bb;
9188 data->stack[data->sp++] = bb;
9189 return;
9192 /* Continue the depth-first search through the reverse graph starting
9193 with the block at the stack's top and ending when the stack is
9194 empty. Visited nodes are marked. Returns an unvisited basic
9195 block, or NULL if there is none available. */
9197 static basic_block
9198 flow_dfs_compute_reverse_execute (data)
9199 depth_first_search_ds data;
9201 basic_block bb;
9202 edge e;
9203 int i;
9205 while (data->sp > 0)
9207 bb = data->stack[--data->sp];
9209 /* Mark that we have visited this node. */
9210 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
9212 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
9214 /* Perform depth-first search on adjacent vertices. */
9215 for (e = bb->pred; e; e = e->pred_next)
9216 flow_dfs_compute_reverse_add_bb (data, e->src);
9220 /* Determine if there are unvisited basic blocks. */
9221 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
9222 if (!TEST_BIT (data->visited_blocks, i))
9223 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
9224 return NULL;
9227 /* Destroy the data structures needed for depth-first search on the
9228 reverse graph. */
9230 static void
9231 flow_dfs_compute_reverse_finish (data)
9232 depth_first_search_ds data;
9234 free (data->stack);
9235 sbitmap_free (data->visited_blocks);
9236 return;
9240 /* Find the root node of the loop pre-header extended basic block and
9241 the edges along the trace from the root node to the loop header. */
9243 static void
9244 flow_loop_pre_header_scan (loop)
9245 struct loop *loop;
9247 int num = 0;
9248 basic_block ebb;
9250 loop->num_pre_header_edges = 0;
9252 if (loop->num_entries != 1)
9253 return;
9255 ebb = loop->entry_edges[0]->src;
9257 if (ebb != ENTRY_BLOCK_PTR)
9259 edge e;
9261 /* Count number of edges along trace from loop header to
9262 root of pre-header extended basic block. Usually this is
9263 only one or two edges. */
9264 num++;
9265 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
9267 ebb = ebb->pred->src;
9268 num++;
9271 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
9272 loop->num_pre_header_edges = num;
9274 /* Store edges in order that they are followed. The source
9275 of the first edge is the root node of the pre-header extended
9276 basic block and the destination of the last last edge is
9277 the loop header. */
9278 for (e = loop->entry_edges[0]; num; e = e->src->pred)
9280 loop->pre_header_edges[--num] = e;
9286 /* Return the block for the pre-header of the loop with header
9287 HEADER where DOM specifies the dominator information. Return NULL if
9288 there is no pre-header. */
9290 static basic_block
9291 flow_loop_pre_header_find (header, dom)
9292 basic_block header;
9293 const sbitmap *dom;
9295 basic_block pre_header;
9296 edge e;
9298 /* If block p is a predecessor of the header and is the only block
9299 that the header does not dominate, then it is the pre-header. */
9300 pre_header = NULL;
9301 for (e = header->pred; e; e = e->pred_next)
9303 basic_block node = e->src;
9305 if (node != ENTRY_BLOCK_PTR
9306 && ! TEST_BIT (dom[node->index], header->index))
9308 if (pre_header == NULL)
9309 pre_header = node;
9310 else
9312 /* There are multiple edges into the header from outside
9313 the loop so there is no pre-header block. */
9314 pre_header = NULL;
9315 break;
9319 return pre_header;
9322 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
9323 previously added. The insertion algorithm assumes that the loops
9324 are added in the order found by a depth first search of the CFG. */
9326 static void
9327 flow_loop_tree_node_add (prevloop, loop)
9328 struct loop *prevloop;
9329 struct loop *loop;
9332 if (flow_loop_nested_p (prevloop, loop))
9334 prevloop->inner = loop;
9335 loop->outer = prevloop;
9336 return;
9339 while (prevloop->outer)
9341 if (flow_loop_nested_p (prevloop->outer, loop))
9343 prevloop->next = loop;
9344 loop->outer = prevloop->outer;
9345 return;
9347 prevloop = prevloop->outer;
9350 prevloop->next = loop;
9351 loop->outer = NULL;
9354 /* Build the loop hierarchy tree for LOOPS. */
9356 static void
9357 flow_loops_tree_build (loops)
9358 struct loops *loops;
9360 int i;
9361 int num_loops;
9363 num_loops = loops->num;
9364 if (! num_loops)
9365 return;
9367 /* Root the loop hierarchy tree with the first loop found.
9368 Since we used a depth first search this should be the
9369 outermost loop. */
9370 loops->tree_root = &loops->array[0];
9371 loops->tree_root->outer = loops->tree_root->inner = loops->tree_root->next = NULL;
9373 /* Add the remaining loops to the tree. */
9374 for (i = 1; i < num_loops; i++)
9375 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
9378 /* Helper function to compute loop nesting depth and enclosed loop level
9379 for the natural loop specified by LOOP at the loop depth DEPTH.
9380 Returns the loop level. */
9382 static int
9383 flow_loop_level_compute (loop, depth)
9384 struct loop *loop;
9385 int depth;
9387 struct loop *inner;
9388 int level = 1;
9390 if (! loop)
9391 return 0;
9393 /* Traverse loop tree assigning depth and computing level as the
9394 maximum level of all the inner loops of this loop. The loop
9395 level is equivalent to the height of the loop in the loop tree
9396 and corresponds to the number of enclosed loop levels (including
9397 itself). */
9398 for (inner = loop->inner; inner; inner = inner->next)
9400 int ilevel;
9402 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
9404 if (ilevel > level)
9405 level = ilevel;
9407 loop->level = level;
9408 loop->depth = depth;
9409 return level;
9412 /* Compute the loop nesting depth and enclosed loop level for the loop
9413 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
9414 level. */
9416 static int
9417 flow_loops_level_compute (loops)
9418 struct loops *loops;
9420 struct loop *loop;
9421 int level;
9422 int levels = 0;
9424 /* Traverse all the outer level loops. */
9425 for (loop = loops->tree_root; loop; loop = loop->next)
9427 level = flow_loop_level_compute (loop, 1);
9428 if (level > levels)
9429 levels = level;
9431 return levels;
9435 /* Scan a single natural loop specified by LOOP collecting information
9436 about it specified by FLAGS. */
9439 flow_loop_scan (loops, loop, flags)
9440 struct loops *loops;
9441 struct loop *loop;
9442 int flags;
9444 /* Determine prerequisites. */
9445 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
9446 flags |= LOOP_EXIT_EDGES;
9448 if (flags & LOOP_ENTRY_EDGES)
9450 /* Find edges which enter the loop header.
9451 Note that the entry edges should only
9452 enter the header of a natural loop. */
9453 loop->num_entries
9454 = flow_loop_entry_edges_find (loop->header,
9455 loop->nodes,
9456 &loop->entry_edges);
9459 if (flags & LOOP_EXIT_EDGES)
9461 /* Find edges which exit the loop. */
9462 loop->num_exits
9463 = flow_loop_exit_edges_find (loop->nodes,
9464 &loop->exit_edges);
9467 if (flags & LOOP_EXITS_DOMS)
9469 int j;
9471 /* Determine which loop nodes dominate all the exits
9472 of the loop. */
9473 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
9474 sbitmap_copy (loop->exits_doms, loop->nodes);
9475 for (j = 0; j < loop->num_exits; j++)
9476 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
9477 loops->cfg.dom[loop->exit_edges[j]->src->index]);
9479 /* The header of a natural loop must dominate
9480 all exits. */
9481 if (! TEST_BIT (loop->exits_doms, loop->header->index))
9482 abort ();
9485 if (flags & LOOP_PRE_HEADER)
9487 /* Look to see if the loop has a pre-header node. */
9488 loop->pre_header
9489 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
9491 /* Find the blocks within the extended basic block of
9492 the loop pre-header. */
9493 flow_loop_pre_header_scan (loop);
9495 return 1;
9499 /* Find all the natural loops in the function and save in LOOPS structure
9500 and recalculate loop_depth information in basic block structures.
9501 FLAGS controls which loop information is collected.
9502 Return the number of natural loops found. */
9505 flow_loops_find (loops, flags)
9506 struct loops *loops;
9507 int flags;
9509 int i;
9510 int b;
9511 int num_loops;
9512 edge e;
9513 sbitmap headers;
9514 sbitmap *dom;
9515 int *dfs_order;
9516 int *rc_order;
9518 /* This function cannot be repeatedly called with different
9519 flags to build up the loop information. The loop tree
9520 must always be built if this function is called. */
9521 if (! (flags & LOOP_TREE))
9522 abort ();
9524 memset (loops, 0, sizeof (*loops));
9526 /* Taking care of this degenerate case makes the rest of
9527 this code simpler. */
9528 if (n_basic_blocks == 0)
9529 return 0;
9531 dfs_order = NULL;
9532 rc_order = NULL;
9534 /* Compute the dominators. */
9535 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
9536 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
9538 /* Count the number of loop edges (back edges). This should be the
9539 same as the number of natural loops. */
9541 num_loops = 0;
9542 for (b = 0; b < n_basic_blocks; b++)
9544 basic_block header;
9546 header = BASIC_BLOCK (b);
9547 header->loop_depth = 0;
9549 for (e = header->pred; e; e = e->pred_next)
9551 basic_block latch = e->src;
9553 /* Look for back edges where a predecessor is dominated
9554 by this block. A natural loop has a single entry
9555 node (header) that dominates all the nodes in the
9556 loop. It also has single back edge to the header
9557 from a latch node. Note that multiple natural loops
9558 may share the same header. */
9559 if (b != header->index)
9560 abort ();
9562 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
9563 num_loops++;
9567 if (num_loops)
9569 /* Compute depth first search order of the CFG so that outer
9570 natural loops will be found before inner natural loops. */
9571 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9572 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9573 flow_depth_first_order_compute (dfs_order, rc_order);
9575 /* Save CFG derived information to avoid recomputing it. */
9576 loops->cfg.dom = dom;
9577 loops->cfg.dfs_order = dfs_order;
9578 loops->cfg.rc_order = rc_order;
9580 /* Allocate loop structures. */
9581 loops->array
9582 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
9584 headers = sbitmap_alloc (n_basic_blocks);
9585 sbitmap_zero (headers);
9587 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
9588 sbitmap_zero (loops->shared_headers);
9590 /* Find and record information about all the natural loops
9591 in the CFG. */
9592 num_loops = 0;
9593 for (b = 0; b < n_basic_blocks; b++)
9595 basic_block header;
9597 /* Search the nodes of the CFG in reverse completion order
9598 so that we can find outer loops first. */
9599 header = BASIC_BLOCK (rc_order[b]);
9601 /* Look for all the possible latch blocks for this header. */
9602 for (e = header->pred; e; e = e->pred_next)
9604 basic_block latch = e->src;
9606 /* Look for back edges where a predecessor is dominated
9607 by this block. A natural loop has a single entry
9608 node (header) that dominates all the nodes in the
9609 loop. It also has single back edge to the header
9610 from a latch node. Note that multiple natural loops
9611 may share the same header. */
9612 if (latch != ENTRY_BLOCK_PTR
9613 && TEST_BIT (dom[latch->index], header->index))
9615 struct loop *loop;
9617 loop = loops->array + num_loops;
9619 loop->header = header;
9620 loop->latch = latch;
9621 loop->num = num_loops;
9623 num_loops++;
9628 for (i = 0; i < num_loops; i++)
9630 struct loop *loop = &loops->array[i];
9632 /* Keep track of blocks that are loop headers so
9633 that we can tell which loops should be merged. */
9634 if (TEST_BIT (headers, loop->header->index))
9635 SET_BIT (loops->shared_headers, loop->header->index);
9636 SET_BIT (headers, loop->header->index);
9638 /* Find nodes contained within the loop. */
9639 loop->nodes = sbitmap_alloc (n_basic_blocks);
9640 loop->num_nodes
9641 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
9643 /* Compute first and last blocks within the loop.
9644 These are often the same as the loop header and
9645 loop latch respectively, but this is not always
9646 the case. */
9647 loop->first
9648 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
9649 loop->last
9650 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
9652 flow_loop_scan (loops, loop, flags);
9655 /* Natural loops with shared headers may either be disjoint or
9656 nested. Disjoint loops with shared headers cannot be inner
9657 loops and should be merged. For now just mark loops that share
9658 headers. */
9659 for (i = 0; i < num_loops; i++)
9660 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
9661 loops->array[i].shared = 1;
9663 sbitmap_free (headers);
9665 else
9667 sbitmap_vector_free (dom);
9670 loops->num = num_loops;
9672 /* Build the loop hierarchy tree. */
9673 flow_loops_tree_build (loops);
9675 /* Assign the loop nesting depth and enclosed loop level for each
9676 loop. */
9677 loops->levels = flow_loops_level_compute (loops);
9679 return num_loops;
9683 /* Update the information regarding the loops in the CFG
9684 specified by LOOPS. */
9686 flow_loops_update (loops, flags)
9687 struct loops *loops;
9688 int flags;
9690 /* One day we may want to update the current loop data. For now
9691 throw away the old stuff and rebuild what we need. */
9692 if (loops->array)
9693 flow_loops_free (loops);
9695 return flow_loops_find (loops, flags);
9699 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
9702 flow_loop_outside_edge_p (loop, e)
9703 const struct loop *loop;
9704 edge e;
9706 if (e->dest != loop->header)
9707 abort ();
9708 return (e->src == ENTRY_BLOCK_PTR)
9709 || ! TEST_BIT (loop->nodes, e->src->index);
9712 /* Clear LOG_LINKS fields of insns in a chain.
9713 Also clear the global_live_at_{start,end} fields of the basic block
9714 structures. */
9716 void
9717 clear_log_links (insns)
9718 rtx insns;
9720 rtx i;
9721 int b;
9723 for (i = insns; i; i = NEXT_INSN (i))
9724 if (INSN_P (i))
9725 LOG_LINKS (i) = 0;
9727 for (b = 0; b < n_basic_blocks; b++)
9729 basic_block bb = BASIC_BLOCK (b);
9731 bb->global_live_at_start = NULL;
9732 bb->global_live_at_end = NULL;
9735 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
9736 EXIT_BLOCK_PTR->global_live_at_start = NULL;
9739 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
9740 correspond to the hard registers, if any, set in that map. This
9741 could be done far more efficiently by having all sorts of special-cases
9742 with moving single words, but probably isn't worth the trouble. */
9744 void
9745 reg_set_to_hard_reg_set (to, from)
9746 HARD_REG_SET *to;
9747 bitmap from;
9749 int i;
9751 EXECUTE_IF_SET_IN_BITMAP
9752 (from, 0, i,
9754 if (i >= FIRST_PSEUDO_REGISTER)
9755 return;
9756 SET_HARD_REG_BIT (*to, i);
9760 /* Called once at intialization time. */
9762 void
9763 init_flow ()
9765 static int initialized;
9767 if (!initialized)
9769 gcc_obstack_init (&flow_obstack);
9770 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
9771 initialized = 1;
9773 else
9775 obstack_free (&flow_obstack, flow_firstobj);
9776 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
9780 /* Assume that the preceeding pass has possibly eliminated jump instructions
9781 or converted the unconditional jumps. Eliminate the edges from CFG. */
9783 void
9784 purge_dead_edges (bb)
9785 basic_block bb;
9787 edge e, next;
9788 rtx insn = bb->end;
9789 if (GET_CODE (insn) == JUMP_INSN && !simplejump_p (insn))
9790 return;
9791 if (GET_CODE (insn) == JUMP_INSN)
9793 for (e = bb->succ; e; e = next)
9795 next = e->succ_next;
9796 if (e->dest == EXIT_BLOCK_PTR || e->dest->head != JUMP_LABEL (insn))
9797 remove_edge (e);
9799 if (bb->succ && bb->succ->succ_next)
9800 abort ();
9801 if (!bb->succ)
9802 return;
9803 bb->succ->probability = REG_BR_PROB_BASE;
9804 bb->succ->count = bb->count;
9806 if (rtl_dump_file)
9807 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
9808 return;
9810 /* If we don't see a jump insn, we don't know exactly why the block would
9811 have been broken at this point. Look for a simple, non-fallthru edge,
9812 as these are only created by conditional branches. If we find such an
9813 edge we know that there used to be a jump here and can then safely
9814 remove all non-fallthru edges. */
9815 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
9816 e = e->succ_next);
9817 if (!e)
9818 return;
9819 for (e = bb->succ; e; e = next)
9821 next = e->succ_next;
9822 if (!(e->flags & EDGE_FALLTHRU))
9823 remove_edge (e);
9825 if (!bb->succ || bb->succ->succ_next)
9826 abort ();
9827 bb->succ->probability = REG_BR_PROB_BASE;
9828 bb->succ->count = bb->count;
9830 if (rtl_dump_file)
9831 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
9832 bb->index);
9833 return;
9836 /* Search all basic blocks for potentionally dead edges and purge them. */
9838 void
9839 purge_all_dead_edges ()
9841 int i;
9842 for (i = 0; i < n_basic_blocks; i++)
9843 purge_dead_edges (BASIC_BLOCK (i));