* flow.c (mark_set_1): Use REG_FREQ_FROM_BB.
[official-gcc.git] / gcc / flow.c
blob69e7747502d39dcdf5e793ce00e9b60fa48b29e2
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "expr.h"
137 #include "ssa.h"
138 #include "timevar.h"
140 #include "obstack.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
174 #endif
175 #endif
177 /* The obstack on which the flow graph components are allocated. */
179 struct obstack flow_obstack;
180 static char *flow_firstobj;
182 /* Number of basic blocks in the current function. */
184 int n_basic_blocks;
186 /* Number of edges in the current function. */
188 int n_edges;
190 /* The basic block array. */
192 varray_type basic_block_info;
194 /* The special entry and exit blocks. */
196 struct basic_block_def entry_exit_blocks[2]
197 = {{NULL, /* head */
198 NULL, /* end */
199 NULL, /* head_tree */
200 NULL, /* end_tree */
201 NULL, /* pred */
202 NULL, /* succ */
203 NULL, /* local_set */
204 NULL, /* cond_local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
207 NULL, /* aux */
208 ENTRY_BLOCK, /* index */
209 0, /* loop_depth */
210 0, /* count */
211 0 /* frequency */
214 NULL, /* head */
215 NULL, /* end */
216 NULL, /* head_tree */
217 NULL, /* end_tree */
218 NULL, /* pred */
219 NULL, /* succ */
220 NULL, /* local_set */
221 NULL, /* cond_local_set */
222 NULL, /* global_live_at_start */
223 NULL, /* global_live_at_end */
224 NULL, /* aux */
225 EXIT_BLOCK, /* index */
226 0, /* loop_depth */
227 0, /* count */
228 0 /* frequency */
232 /* Nonzero if the second flow pass has completed. */
233 int flow2_completed;
235 /* Maximum register number used in this function, plus one. */
237 int max_regno;
239 /* Indexed by n, giving various register information */
241 varray_type reg_n_info;
243 /* Size of a regset for the current function,
244 in (1) bytes and (2) elements. */
246 int regset_bytes;
247 int regset_size;
249 /* Regset of regs live when calls to `setjmp'-like functions happen. */
250 /* ??? Does this exist only for the setjmp-clobbered warning message? */
252 regset regs_live_at_setjmp;
254 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
255 that have to go in the same hard reg.
256 The first two regs in the list are a pair, and the next two
257 are another pair, etc. */
258 rtx regs_may_share;
260 /* Callback that determines if it's ok for a function to have no
261 noreturn attribute. */
262 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
264 /* Set of registers that may be eliminable. These are handled specially
265 in updating regs_ever_live. */
267 static HARD_REG_SET elim_reg_set;
269 /* The basic block structure for every insn, indexed by uid. */
271 varray_type basic_block_for_insn;
273 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
274 /* ??? Should probably be using LABEL_NUSES instead. It would take a
275 bit of surgery to be able to use or co-opt the routines in jump. */
277 static rtx label_value_list;
278 static rtx tail_recursion_label_list;
280 /* Holds information for tracking conditional register life information. */
281 struct reg_cond_life_info
283 /* A boolean expression of conditions under which a register is dead. */
284 rtx condition;
285 /* Conditions under which a register is dead at the basic block end. */
286 rtx orig_condition;
288 /* A boolean expression of conditions under which a register has been
289 stored into. */
290 rtx stores;
292 /* ??? Could store mask of bytes that are dead, so that we could finally
293 track lifetimes of multi-word registers accessed via subregs. */
296 /* For use in communicating between propagate_block and its subroutines.
297 Holds all information needed to compute life and def-use information. */
299 struct propagate_block_info
301 /* The basic block we're considering. */
302 basic_block bb;
304 /* Bit N is set if register N is conditionally or unconditionally live. */
305 regset reg_live;
307 /* Bit N is set if register N is set this insn. */
308 regset new_set;
310 /* Element N is the next insn that uses (hard or pseudo) register N
311 within the current basic block; or zero, if there is no such insn. */
312 rtx *reg_next_use;
314 /* Contains a list of all the MEMs we are tracking for dead store
315 elimination. */
316 rtx mem_set_list;
318 /* If non-null, record the set of registers set unconditionally in the
319 basic block. */
320 regset local_set;
322 /* If non-null, record the set of registers set conditionally in the
323 basic block. */
324 regset cond_local_set;
326 #ifdef HAVE_conditional_execution
327 /* Indexed by register number, holds a reg_cond_life_info for each
328 register that is not unconditionally live or dead. */
329 splay_tree reg_cond_dead;
331 /* Bit N is set if register N is in an expression in reg_cond_dead. */
332 regset reg_cond_reg;
333 #endif
335 /* The length of mem_set_list. */
336 int mem_set_list_len;
338 /* Non-zero if the value of CC0 is live. */
339 int cc0_live;
341 /* Flags controling the set of information propagate_block collects. */
342 int flags;
345 /* Maximum length of pbi->mem_set_list before we start dropping
346 new elements on the floor. */
347 #define MAX_MEM_SET_LIST_LEN 100
349 /* Store the data structures necessary for depth-first search. */
350 struct depth_first_search_dsS {
351 /* stack for backtracking during the algorithm */
352 basic_block *stack;
354 /* number of edges in the stack. That is, positions 0, ..., sp-1
355 have edges. */
356 unsigned int sp;
358 /* record of basic blocks already seen by depth-first search */
359 sbitmap visited_blocks;
361 typedef struct depth_first_search_dsS *depth_first_search_ds;
363 /* Have print_rtl_and_abort give the same information that fancy_abort
364 does. */
365 #define print_rtl_and_abort() \
366 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
368 /* Forward declarations */
369 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
370 static bool try_crossjump_bb PARAMS ((int, basic_block));
371 static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
372 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
373 rtx *, rtx *));
374 static int count_basic_blocks PARAMS ((rtx));
375 static void find_basic_blocks_1 PARAMS ((rtx));
376 static rtx find_label_refs PARAMS ((rtx, rtx));
377 static void make_edges PARAMS ((rtx, int, int, int));
378 static void make_label_edge PARAMS ((sbitmap *, basic_block,
379 rtx, int));
380 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
382 static void commit_one_edge_insertion PARAMS ((edge));
384 static void delete_unreachable_blocks PARAMS ((void));
385 static int can_delete_note_p PARAMS ((rtx));
386 static void expunge_block PARAMS ((basic_block));
387 static int can_delete_label_p PARAMS ((rtx));
388 static int tail_recursion_label_p PARAMS ((rtx));
389 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
390 basic_block));
391 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
392 basic_block));
393 static int merge_blocks PARAMS ((edge,basic_block,basic_block,
394 int));
395 static bool try_optimize_cfg PARAMS ((int));
396 static bool can_fallthru PARAMS ((basic_block, basic_block));
397 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
398 static bool try_simplify_condjump PARAMS ((basic_block));
399 static bool try_forward_edges PARAMS ((int, basic_block));
400 static void tidy_fallthru_edges PARAMS ((void));
401 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
402 static void verify_wide_reg PARAMS ((int, rtx, rtx));
403 static void verify_local_live_at_start PARAMS ((regset, basic_block));
404 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
405 static void notice_stack_pointer_modification PARAMS ((rtx));
406 static void mark_reg PARAMS ((rtx, void *));
407 static void mark_regs_live_at_end PARAMS ((regset));
408 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
409 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
410 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
411 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
412 static int insn_dead_p PARAMS ((struct propagate_block_info *,
413 rtx, int, rtx));
414 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
415 rtx, rtx));
416 static void mark_set_regs PARAMS ((struct propagate_block_info *,
417 rtx, rtx));
418 static void mark_set_1 PARAMS ((struct propagate_block_info *,
419 enum rtx_code, rtx, rtx,
420 rtx, int));
421 #ifdef HAVE_conditional_execution
422 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
423 int, rtx));
424 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
425 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
426 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
427 int));
428 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
429 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
430 static rtx not_reg_cond PARAMS ((rtx));
431 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
432 #endif
433 #ifdef AUTO_INC_DEC
434 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
435 rtx, rtx, rtx, rtx, rtx));
436 static void find_auto_inc PARAMS ((struct propagate_block_info *,
437 rtx, rtx));
438 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
439 rtx));
440 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
441 #endif
442 static void mark_used_reg PARAMS ((struct propagate_block_info *,
443 rtx, rtx, rtx));
444 static void mark_used_regs PARAMS ((struct propagate_block_info *,
445 rtx, rtx, rtx));
446 void dump_flow_info PARAMS ((FILE *));
447 void debug_flow_info PARAMS ((void));
448 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
449 const char *))
450 ATTRIBUTE_NORETURN;
452 static void add_to_mem_set_list PARAMS ((struct propagate_block_info *,
453 rtx));
454 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
455 rtx));
456 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
457 rtx));
458 static void remove_fake_successors PARAMS ((basic_block));
459 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
460 FILE *));
461 static void flow_edge_list_print PARAMS ((const char *, const edge *,
462 int, FILE *));
463 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
464 FILE *));
465 static int flow_loop_nested_p PARAMS ((struct loop *,
466 struct loop *));
467 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
468 edge **));
469 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
470 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
471 static void flow_dfs_compute_reverse_init
472 PARAMS ((depth_first_search_ds));
473 static void flow_dfs_compute_reverse_add_bb
474 PARAMS ((depth_first_search_ds, basic_block));
475 static basic_block flow_dfs_compute_reverse_execute
476 PARAMS ((depth_first_search_ds));
477 static void flow_dfs_compute_reverse_finish
478 PARAMS ((depth_first_search_ds));
479 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
480 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
481 const sbitmap *));
482 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
483 static void flow_loops_tree_build PARAMS ((struct loops *));
484 static int flow_loop_level_compute PARAMS ((struct loop *, int));
485 static int flow_loops_level_compute PARAMS ((struct loops *));
486 static void delete_dead_jumptables PARAMS ((void));
487 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
489 /* Find basic blocks of the current function.
490 F is the first insn of the function and NREGS the number of register
491 numbers in use. */
493 void
494 find_basic_blocks (f, nregs, file)
495 rtx f;
496 int nregs ATTRIBUTE_UNUSED;
497 FILE *file ATTRIBUTE_UNUSED;
499 int max_uid;
500 timevar_push (TV_CFG);
502 /* Flush out existing data. */
503 if (basic_block_info != NULL)
505 int i;
507 clear_edges ();
509 /* Clear bb->aux on all extant basic blocks. We'll use this as a
510 tag for reuse during create_basic_block, just in case some pass
511 copies around basic block notes improperly. */
512 for (i = 0; i < n_basic_blocks; ++i)
513 BASIC_BLOCK (i)->aux = NULL;
515 VARRAY_FREE (basic_block_info);
518 n_basic_blocks = count_basic_blocks (f);
520 /* Size the basic block table. The actual structures will be allocated
521 by find_basic_blocks_1, since we want to keep the structure pointers
522 stable across calls to find_basic_blocks. */
523 /* ??? This whole issue would be much simpler if we called find_basic_blocks
524 exactly once, and thereafter we don't have a single long chain of
525 instructions at all until close to the end of compilation when we
526 actually lay them out. */
528 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
530 find_basic_blocks_1 (f);
532 /* Record the block to which an insn belongs. */
533 /* ??? This should be done another way, by which (perhaps) a label is
534 tagged directly with the basic block that it starts. It is used for
535 more than that currently, but IMO that is the only valid use. */
537 max_uid = get_max_uid ();
538 #ifdef AUTO_INC_DEC
539 /* Leave space for insns life_analysis makes in some cases for auto-inc.
540 These cases are rare, so we don't need too much space. */
541 max_uid += max_uid / 10;
542 #endif
544 compute_bb_for_insn (max_uid);
546 /* Discover the edges of our cfg. */
547 make_edges (label_value_list, 0, n_basic_blocks - 1, 0);
549 /* Do very simple cleanup now, for the benefit of code that runs between
550 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
551 tidy_fallthru_edges ();
553 mark_critical_edges ();
555 #ifdef ENABLE_CHECKING
556 verify_flow_info ();
557 #endif
558 timevar_pop (TV_CFG);
561 void
562 check_function_return_warnings ()
564 if (warn_missing_noreturn
565 && !TREE_THIS_VOLATILE (cfun->decl)
566 && EXIT_BLOCK_PTR->pred == NULL
567 && (lang_missing_noreturn_ok_p
568 && !lang_missing_noreturn_ok_p (cfun->decl)))
569 warning ("function might be possible candidate for attribute `noreturn'");
571 /* If we have a path to EXIT, then we do return. */
572 if (TREE_THIS_VOLATILE (cfun->decl)
573 && EXIT_BLOCK_PTR->pred != NULL)
574 warning ("`noreturn' function does return");
576 /* If the clobber_return_insn appears in some basic block, then we
577 do reach the end without returning a value. */
578 else if (warn_return_type
579 && cfun->x_clobber_return_insn != NULL
580 && EXIT_BLOCK_PTR->pred != NULL)
582 int max_uid = get_max_uid ();
584 /* If clobber_return_insn was excised by jump1, then renumber_insns
585 can make max_uid smaller than the number still recorded in our rtx.
586 That's fine, since this is a quick way of verifying that the insn
587 is no longer in the chain. */
588 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
590 /* Recompute insn->block mapping, since the initial mapping is
591 set before we delete unreachable blocks. */
592 compute_bb_for_insn (max_uid);
594 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
595 warning ("control reaches end of non-void function");
600 /* Count the basic blocks of the function. */
602 static int
603 count_basic_blocks (f)
604 rtx f;
606 register rtx insn;
607 register RTX_CODE prev_code;
608 register int count = 0;
609 int saw_abnormal_edge = 0;
611 prev_code = JUMP_INSN;
612 for (insn = f; insn; insn = NEXT_INSN (insn))
614 enum rtx_code code = GET_CODE (insn);
616 if (code == CODE_LABEL
617 || (GET_RTX_CLASS (code) == 'i'
618 && (prev_code == JUMP_INSN
619 || prev_code == BARRIER
620 || saw_abnormal_edge)))
622 saw_abnormal_edge = 0;
623 count++;
626 /* Record whether this insn created an edge. */
627 if (code == CALL_INSN)
629 rtx note;
631 /* If there is a nonlocal goto label and the specified
632 region number isn't -1, we have an edge. */
633 if (nonlocal_goto_handler_labels
634 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
635 || INTVAL (XEXP (note, 0)) >= 0))
636 saw_abnormal_edge = 1;
638 else if (can_throw_internal (insn))
639 saw_abnormal_edge = 1;
641 else if (flag_non_call_exceptions
642 && code == INSN
643 && can_throw_internal (insn))
644 saw_abnormal_edge = 1;
646 if (code != NOTE)
647 prev_code = code;
650 /* The rest of the compiler works a bit smoother when we don't have to
651 check for the edge case of do-nothing functions with no basic blocks. */
652 if (count == 0)
654 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
655 count = 1;
658 return count;
661 /* Scan a list of insns for labels referred to other than by jumps.
662 This is used to scan the alternatives of a call placeholder. */
663 static rtx
664 find_label_refs (f, lvl)
665 rtx f;
666 rtx lvl;
668 rtx insn;
670 for (insn = f; insn; insn = NEXT_INSN (insn))
671 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
673 rtx note;
675 /* Make a list of all labels referred to other than by jumps
676 (which just don't have the REG_LABEL notes).
678 Make a special exception for labels followed by an ADDR*VEC,
679 as this would be a part of the tablejump setup code.
681 Make a special exception to registers loaded with label
682 values just before jump insns that use them. */
684 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
685 if (REG_NOTE_KIND (note) == REG_LABEL)
687 rtx lab = XEXP (note, 0), next;
689 if ((next = next_nonnote_insn (lab)) != NULL
690 && GET_CODE (next) == JUMP_INSN
691 && (GET_CODE (PATTERN (next)) == ADDR_VEC
692 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
694 else if (GET_CODE (lab) == NOTE)
696 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
697 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
699 else
700 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
704 return lvl;
707 /* Assume that someone emitted code with control flow instructions to the
708 basic block. Update the data structure. */
709 void
710 find_sub_basic_blocks (bb)
711 basic_block bb;
713 rtx insn = bb->head;
714 rtx end = bb->end;
715 rtx jump_insn = NULL_RTX;
716 edge falltru = 0;
717 basic_block first_bb = bb;
718 int i;
720 if (insn == bb->end)
721 return;
723 if (GET_CODE (insn) == CODE_LABEL)
724 insn = NEXT_INSN (insn);
726 /* Scan insn chain and try to find new basic block boundaries. */
727 while (1)
729 enum rtx_code code = GET_CODE (insn);
730 switch (code)
732 case BARRIER:
733 if (!jump_insn)
734 abort ();
735 break;
736 /* On code label, split current basic block. */
737 case CODE_LABEL:
738 falltru = split_block (bb, PREV_INSN (insn));
739 if (jump_insn)
740 bb->end = jump_insn;
741 bb = falltru->dest;
742 remove_edge (falltru);
743 jump_insn = 0;
744 if (LABEL_ALTERNATE_NAME (insn))
745 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
746 break;
747 case INSN:
748 case JUMP_INSN:
749 /* In case we've previously split insn on the JUMP_INSN, move the
750 block header to proper place. */
751 if (jump_insn)
753 falltru = split_block (bb, PREV_INSN (insn));
754 bb->end = jump_insn;
755 bb = falltru->dest;
756 remove_edge (falltru);
757 jump_insn = 0;
759 /* We need some special care for those expressions. */
760 if (GET_CODE (insn) == JUMP_INSN)
762 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
763 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
764 abort();
765 jump_insn = insn;
767 break;
768 default:
769 break;
771 if (insn == end)
772 break;
773 insn = NEXT_INSN (insn);
776 /* In case expander replaced normal insn by sequence terminating by
777 return and barrier, or possibly other sequence not behaving like
778 ordinary jump, we need to take care and move basic block boundary. */
779 if (jump_insn && GET_CODE (bb->end) != JUMP_INSN)
780 bb->end = jump_insn;
782 /* We've possibly replaced the conditional jump by conditional jump
783 followed by cleanup at fallthru edge, so the outgoing edges may
784 be dead. */
785 purge_dead_edges (bb);
787 /* Now re-scan and wire in all edges. This expect simple (conditional)
788 jumps at the end of each new basic blocks. */
789 make_edges (NULL, first_bb->index, bb->index, 1);
791 /* Update branch probabilities. Expect only (un)conditional jumps
792 to be created with only the forward edges. */
793 for (i = first_bb->index; i <= bb->index; i++)
795 edge e,f;
796 basic_block b = BASIC_BLOCK (i);
797 if (b != first_bb)
799 b->count = 0;
800 b->frequency = 0;
801 for (e = b->pred; e; e=e->pred_next)
803 b->count += e->count;
804 b->frequency += EDGE_FREQUENCY (e);
807 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
809 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
810 int probability;
812 if (!note)
813 continue;
814 probability = INTVAL (XEXP (find_reg_note (b->end,
815 REG_BR_PROB,
816 NULL), 0));
817 e = BRANCH_EDGE (b);
818 e->probability = probability;
819 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
820 / REG_BR_PROB_BASE);
821 f = FALLTHRU_EDGE (b);
822 f->probability = REG_BR_PROB_BASE - probability;
823 f->count = b->count - e->count;
825 if (b->succ && !b->succ->succ_next)
827 e = b->succ;
828 e->probability = REG_BR_PROB_BASE;
829 e->count = b->count;
834 /* Find all basic blocks of the function whose first insn is F.
836 Collect and return a list of labels whose addresses are taken. This
837 will be used in make_edges for use with computed gotos. */
839 static void
840 find_basic_blocks_1 (f)
841 rtx f;
843 register rtx insn, next;
844 int i = 0;
845 rtx bb_note = NULL_RTX;
846 rtx lvl = NULL_RTX;
847 rtx trll = NULL_RTX;
848 rtx head = NULL_RTX;
849 rtx end = NULL_RTX;
851 /* We process the instructions in a slightly different way than we did
852 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
853 closed out the previous block, so that it gets attached at the proper
854 place. Since this form should be equivalent to the previous,
855 count_basic_blocks continues to use the old form as a check. */
857 for (insn = f; insn; insn = next)
859 enum rtx_code code = GET_CODE (insn);
861 next = NEXT_INSN (insn);
863 switch (code)
865 case NOTE:
867 int kind = NOTE_LINE_NUMBER (insn);
869 /* Look for basic block notes with which to keep the
870 basic_block_info pointers stable. Unthread the note now;
871 we'll put it back at the right place in create_basic_block.
872 Or not at all if we've already found a note in this block. */
873 if (kind == NOTE_INSN_BASIC_BLOCK)
875 if (bb_note == NULL_RTX)
876 bb_note = insn;
877 else
878 next = flow_delete_insn (insn);
880 break;
883 case CODE_LABEL:
884 /* A basic block starts at a label. If we've closed one off due
885 to a barrier or some such, no need to do it again. */
886 if (head != NULL_RTX)
888 create_basic_block (i++, head, end, bb_note);
889 bb_note = NULL_RTX;
892 head = end = insn;
893 break;
895 case JUMP_INSN:
896 /* A basic block ends at a jump. */
897 if (head == NULL_RTX)
898 head = insn;
899 else
901 /* ??? Make a special check for table jumps. The way this
902 happens is truly and amazingly gross. We are about to
903 create a basic block that contains just a code label and
904 an addr*vec jump insn. Worse, an addr_diff_vec creates
905 its own natural loop.
907 Prevent this bit of brain damage, pasting things together
908 correctly in make_edges.
910 The correct solution involves emitting the table directly
911 on the tablejump instruction as a note, or JUMP_LABEL. */
913 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
914 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
916 head = end = NULL;
917 n_basic_blocks--;
918 break;
921 end = insn;
922 goto new_bb_inclusive;
924 case BARRIER:
925 /* A basic block ends at a barrier. It may be that an unconditional
926 jump already closed the basic block -- no need to do it again. */
927 if (head == NULL_RTX)
928 break;
929 goto new_bb_exclusive;
931 case CALL_INSN:
933 /* Record whether this call created an edge. */
934 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
935 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
937 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
939 /* Scan each of the alternatives for label refs. */
940 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
941 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
942 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
943 /* Record its tail recursion label, if any. */
944 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
945 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
948 /* A basic block ends at a call that can either throw or
949 do a non-local goto. */
950 if ((nonlocal_goto_handler_labels && region >= 0)
951 || can_throw_internal (insn))
953 new_bb_inclusive:
954 if (head == NULL_RTX)
955 head = insn;
956 end = insn;
958 new_bb_exclusive:
959 create_basic_block (i++, head, end, bb_note);
960 head = end = NULL_RTX;
961 bb_note = NULL_RTX;
962 break;
965 /* Fall through. */
967 case INSN:
968 /* Non-call exceptions generate new blocks just like calls. */
969 if (flag_non_call_exceptions && can_throw_internal (insn))
970 goto new_bb_inclusive;
972 if (head == NULL_RTX)
973 head = insn;
974 end = insn;
975 break;
977 default:
978 abort ();
981 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
983 rtx note;
985 /* Make a list of all labels referred to other than by jumps.
987 Make a special exception for labels followed by an ADDR*VEC,
988 as this would be a part of the tablejump setup code.
990 Make a special exception to registers loaded with label
991 values just before jump insns that use them. */
993 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
994 if (REG_NOTE_KIND (note) == REG_LABEL)
996 rtx lab = XEXP (note, 0), next;
998 if ((next = next_nonnote_insn (lab)) != NULL
999 && GET_CODE (next) == JUMP_INSN
1000 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1001 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1003 else if (GET_CODE (lab) == NOTE)
1005 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1006 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
1008 else
1009 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
1014 if (head != NULL_RTX)
1015 create_basic_block (i++, head, end, bb_note);
1016 else if (bb_note)
1017 flow_delete_insn (bb_note);
1019 if (i != n_basic_blocks)
1020 abort ();
1022 label_value_list = lvl;
1023 tail_recursion_label_list = trll;
1026 /* Tidy the CFG by deleting unreachable code and whatnot. */
1028 void
1029 cleanup_cfg (mode)
1030 int mode;
1032 timevar_push (TV_CLEANUP_CFG);
1033 delete_unreachable_blocks ();
1034 if (try_optimize_cfg (mode))
1035 delete_unreachable_blocks ();
1036 mark_critical_edges ();
1038 /* Kill the data we won't maintain. */
1039 free_EXPR_LIST_list (&label_value_list);
1040 free_EXPR_LIST_list (&tail_recursion_label_list);
1041 timevar_pop (TV_CLEANUP_CFG);
1044 /* Create a new basic block consisting of the instructions between
1045 HEAD and END inclusive. Reuses the note and basic block struct
1046 in BB_NOTE, if any. */
1048 void
1049 create_basic_block (index, head, end, bb_note)
1050 int index;
1051 rtx head, end, bb_note;
1053 basic_block bb;
1055 if (bb_note
1056 && ! RTX_INTEGRATED_P (bb_note)
1057 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1058 && bb->aux == NULL)
1060 /* If we found an existing note, thread it back onto the chain. */
1062 rtx after;
1064 if (GET_CODE (head) == CODE_LABEL)
1065 after = head;
1066 else
1068 after = PREV_INSN (head);
1069 head = bb_note;
1072 if (after != bb_note && NEXT_INSN (after) != bb_note)
1073 reorder_insns (bb_note, bb_note, after);
1075 else
1077 /* Otherwise we must create a note and a basic block structure.
1078 Since we allow basic block structs in rtl, give the struct
1079 the same lifetime by allocating it off the function obstack
1080 rather than using malloc. */
1082 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1083 memset (bb, 0, sizeof (*bb));
1085 if (GET_CODE (head) == CODE_LABEL)
1086 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1087 else
1089 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1090 head = bb_note;
1092 NOTE_BASIC_BLOCK (bb_note) = bb;
1095 /* Always include the bb note in the block. */
1096 if (NEXT_INSN (end) == bb_note)
1097 end = bb_note;
1099 bb->head = head;
1100 bb->end = end;
1101 bb->index = index;
1102 BASIC_BLOCK (index) = bb;
1104 /* Tag the block so that we know it has been used when considering
1105 other basic block notes. */
1106 bb->aux = bb;
1109 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
1110 note associated with the BLOCK. */
1113 first_insn_after_basic_block_note (block)
1114 basic_block block;
1116 rtx insn;
1118 /* Get the first instruction in the block. */
1119 insn = block->head;
1121 if (insn == NULL_RTX)
1122 return NULL_RTX;
1123 if (GET_CODE (insn) == CODE_LABEL)
1124 insn = NEXT_INSN (insn);
1125 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
1126 abort ();
1128 return NEXT_INSN (insn);
1131 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1132 indexed by INSN_UID. MAX is the size of the array. */
1134 void
1135 compute_bb_for_insn (max)
1136 int max;
1138 int i;
1140 if (basic_block_for_insn)
1141 VARRAY_FREE (basic_block_for_insn);
1142 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1144 for (i = 0; i < n_basic_blocks; ++i)
1146 basic_block bb = BASIC_BLOCK (i);
1147 rtx insn, end;
1149 end = bb->end;
1150 insn = bb->head;
1151 while (1)
1153 int uid = INSN_UID (insn);
1154 if (uid < max)
1155 VARRAY_BB (basic_block_for_insn, uid) = bb;
1156 if (insn == end)
1157 break;
1158 insn = NEXT_INSN (insn);
1163 /* Free the memory associated with the edge structures. */
1165 void
1166 clear_edges ()
1168 int i;
1169 edge n, e;
1171 for (i = 0; i < n_basic_blocks; ++i)
1173 basic_block bb = BASIC_BLOCK (i);
1175 for (e = bb->succ; e; e = n)
1177 n = e->succ_next;
1178 free (e);
1181 bb->succ = 0;
1182 bb->pred = 0;
1185 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1187 n = e->succ_next;
1188 free (e);
1191 ENTRY_BLOCK_PTR->succ = 0;
1192 EXIT_BLOCK_PTR->pred = 0;
1194 n_edges = 0;
1197 /* Identify the edges between basic blocks MIN to MAX.
1199 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1200 that are otherwise unreachable may be reachable with a non-local goto.
1202 BB_EH_END is an array indexed by basic block number in which we record
1203 the list of exception regions active at the end of the basic block. */
1205 static void
1206 make_edges (label_value_list, min, max, update_p)
1207 rtx label_value_list;
1208 int min, max, update_p;
1210 int i;
1211 sbitmap *edge_cache = NULL;
1213 /* Assume no computed jump; revise as we create edges. */
1214 current_function_has_computed_jump = 0;
1216 /* Heavy use of computed goto in machine-generated code can lead to
1217 nearly fully-connected CFGs. In that case we spend a significant
1218 amount of time searching the edge lists for duplicates. */
1219 if (forced_labels || label_value_list)
1221 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1222 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1224 if (update_p)
1225 for (i = min; i <= max; ++i)
1227 edge e;
1228 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1229 if (e->dest != EXIT_BLOCK_PTR)
1230 SET_BIT (edge_cache[i], e->dest->index);
1234 /* By nature of the way these get numbered, block 0 is always the entry. */
1235 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1237 for (i = min; i <= max; ++i)
1239 basic_block bb = BASIC_BLOCK (i);
1240 rtx insn, x;
1241 enum rtx_code code;
1242 int force_fallthru = 0;
1244 if (GET_CODE (bb->head) == CODE_LABEL
1245 && LABEL_ALTERNATE_NAME (bb->head))
1246 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1248 /* Examine the last instruction of the block, and discover the
1249 ways we can leave the block. */
1251 insn = bb->end;
1252 code = GET_CODE (insn);
1254 /* A branch. */
1255 if (code == JUMP_INSN)
1257 rtx tmp;
1259 /* Recognize exception handling placeholders. */
1260 if (GET_CODE (PATTERN (insn)) == RESX)
1261 make_eh_edge (edge_cache, bb, insn);
1263 /* Recognize a non-local goto as a branch outside the
1264 current function. */
1265 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1268 /* ??? Recognize a tablejump and do the right thing. */
1269 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1270 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1271 && GET_CODE (tmp) == JUMP_INSN
1272 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1273 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1275 rtvec vec;
1276 int j;
1278 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1279 vec = XVEC (PATTERN (tmp), 0);
1280 else
1281 vec = XVEC (PATTERN (tmp), 1);
1283 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1284 make_label_edge (edge_cache, bb,
1285 XEXP (RTVEC_ELT (vec, j), 0), 0);
1287 /* Some targets (eg, ARM) emit a conditional jump that also
1288 contains the out-of-range target. Scan for these and
1289 add an edge if necessary. */
1290 if ((tmp = single_set (insn)) != NULL
1291 && SET_DEST (tmp) == pc_rtx
1292 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1293 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1294 make_label_edge (edge_cache, bb,
1295 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1297 #ifdef CASE_DROPS_THROUGH
1298 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1299 us naturally detecting fallthru into the next block. */
1300 force_fallthru = 1;
1301 #endif
1304 /* If this is a computed jump, then mark it as reaching
1305 everything on the label_value_list and forced_labels list. */
1306 else if (computed_jump_p (insn))
1308 current_function_has_computed_jump = 1;
1310 for (x = label_value_list; x; x = XEXP (x, 1))
1311 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1313 for (x = forced_labels; x; x = XEXP (x, 1))
1314 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1317 /* Returns create an exit out. */
1318 else if (returnjump_p (insn))
1319 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1321 /* Otherwise, we have a plain conditional or unconditional jump. */
1322 else
1324 if (! JUMP_LABEL (insn))
1325 abort ();
1326 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1330 /* If this is a sibling call insn, then this is in effect a
1331 combined call and return, and so we need an edge to the
1332 exit block. No need to worry about EH edges, since we
1333 wouldn't have created the sibling call in the first place. */
1335 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1336 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1337 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1339 /* If this is a CALL_INSN, then mark it as reaching the active EH
1340 handler for this CALL_INSN. If we're handling non-call
1341 exceptions then any insn can reach any of the active handlers.
1343 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1345 else if (code == CALL_INSN || flag_non_call_exceptions)
1347 /* Add any appropriate EH edges. */
1348 make_eh_edge (edge_cache, bb, insn);
1350 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1352 /* ??? This could be made smarter: in some cases it's possible
1353 to tell that certain calls will not do a nonlocal goto.
1355 For example, if the nested functions that do the nonlocal
1356 gotos do not have their addresses taken, then only calls to
1357 those functions or to other nested functions that use them
1358 could possibly do nonlocal gotos. */
1359 /* We do know that a REG_EH_REGION note with a value less
1360 than 0 is guaranteed not to perform a non-local goto. */
1361 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1362 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1363 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1364 make_label_edge (edge_cache, bb, XEXP (x, 0),
1365 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1369 /* Find out if we can drop through to the next block. */
1370 insn = next_nonnote_insn (insn);
1371 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1372 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1373 else if (i + 1 < n_basic_blocks)
1375 rtx tmp = BLOCK_HEAD (i + 1);
1376 if (GET_CODE (tmp) == NOTE)
1377 tmp = next_nonnote_insn (tmp);
1378 if (force_fallthru || insn == tmp)
1379 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1383 if (edge_cache)
1384 sbitmap_vector_free (edge_cache);
1387 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1388 about the edge that is accumulated between calls. */
1390 void
1391 make_edge (edge_cache, src, dst, flags)
1392 sbitmap *edge_cache;
1393 basic_block src, dst;
1394 int flags;
1396 int use_edge_cache;
1397 edge e;
1399 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1400 many edges to them, and we didn't allocate memory for it. */
1401 use_edge_cache = (edge_cache
1402 && src != ENTRY_BLOCK_PTR
1403 && dst != EXIT_BLOCK_PTR);
1405 /* Make sure we don't add duplicate edges. */
1406 switch (use_edge_cache)
1408 default:
1409 /* Quick test for non-existance of the edge. */
1410 if (! TEST_BIT (edge_cache[src->index], dst->index))
1411 break;
1413 /* The edge exists; early exit if no work to do. */
1414 if (flags == 0)
1415 return;
1417 /* FALLTHRU */
1418 case 0:
1419 for (e = src->succ; e; e = e->succ_next)
1420 if (e->dest == dst)
1422 e->flags |= flags;
1423 return;
1425 break;
1428 e = (edge) xcalloc (1, sizeof (*e));
1429 n_edges++;
1431 e->succ_next = src->succ;
1432 e->pred_next = dst->pred;
1433 e->src = src;
1434 e->dest = dst;
1435 e->flags = flags;
1437 src->succ = e;
1438 dst->pred = e;
1440 if (use_edge_cache)
1441 SET_BIT (edge_cache[src->index], dst->index);
1444 /* Create an edge from a basic block to a label. */
1446 static void
1447 make_label_edge (edge_cache, src, label, flags)
1448 sbitmap *edge_cache;
1449 basic_block src;
1450 rtx label;
1451 int flags;
1453 if (GET_CODE (label) != CODE_LABEL)
1454 abort ();
1456 /* If the label was never emitted, this insn is junk, but avoid a
1457 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1458 as a result of a syntax error and a diagnostic has already been
1459 printed. */
1461 if (INSN_UID (label) == 0)
1462 return;
1464 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1467 /* Create the edges generated by INSN in REGION. */
1469 static void
1470 make_eh_edge (edge_cache, src, insn)
1471 sbitmap *edge_cache;
1472 basic_block src;
1473 rtx insn;
1475 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1476 rtx handlers, i;
1478 handlers = reachable_handlers (insn);
1480 for (i = handlers; i; i = XEXP (i, 1))
1481 make_label_edge (edge_cache, src, XEXP (i, 0),
1482 EDGE_ABNORMAL | EDGE_EH | is_call);
1484 free_INSN_LIST_list (&handlers);
1487 /* Identify critical edges and set the bits appropriately. */
1489 void
1490 mark_critical_edges ()
1492 int i, n = n_basic_blocks;
1493 basic_block bb;
1495 /* We begin with the entry block. This is not terribly important now,
1496 but could be if a front end (Fortran) implemented alternate entry
1497 points. */
1498 bb = ENTRY_BLOCK_PTR;
1499 i = -1;
1501 while (1)
1503 edge e;
1505 /* (1) Critical edges must have a source with multiple successors. */
1506 if (bb->succ && bb->succ->succ_next)
1508 for (e = bb->succ; e; e = e->succ_next)
1510 /* (2) Critical edges must have a destination with multiple
1511 predecessors. Note that we know there is at least one
1512 predecessor -- the edge we followed to get here. */
1513 if (e->dest->pred->pred_next)
1514 e->flags |= EDGE_CRITICAL;
1515 else
1516 e->flags &= ~EDGE_CRITICAL;
1519 else
1521 for (e = bb->succ; e; e = e->succ_next)
1522 e->flags &= ~EDGE_CRITICAL;
1525 if (++i >= n)
1526 break;
1527 bb = BASIC_BLOCK (i);
1531 /* Split a block BB after insn INSN creating a new fallthru edge.
1532 Return the new edge. Note that to keep other parts of the compiler happy,
1533 this function renumbers all the basic blocks so that the new
1534 one has a number one greater than the block split. */
1536 edge
1537 split_block (bb, insn)
1538 basic_block bb;
1539 rtx insn;
1541 basic_block new_bb;
1542 edge new_edge;
1543 edge e;
1544 rtx bb_note;
1545 int i, j;
1547 /* There is no point splitting the block after its end. */
1548 if (bb->end == insn)
1549 return 0;
1551 /* Create the new structures. */
1552 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1553 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1554 n_edges++;
1556 memset (new_bb, 0, sizeof (*new_bb));
1558 new_bb->head = NEXT_INSN (insn);
1559 new_bb->end = bb->end;
1560 bb->end = insn;
1562 new_bb->succ = bb->succ;
1563 bb->succ = new_edge;
1564 new_bb->pred = new_edge;
1565 new_bb->count = bb->count;
1566 new_bb->frequency = bb->frequency;
1567 new_bb->loop_depth = bb->loop_depth;
1569 new_edge->src = bb;
1570 new_edge->dest = new_bb;
1571 new_edge->flags = EDGE_FALLTHRU;
1572 new_edge->probability = REG_BR_PROB_BASE;
1573 new_edge->count = bb->count;
1575 /* Redirect the src of the successor edges of bb to point to new_bb. */
1576 for (e = new_bb->succ; e; e = e->succ_next)
1577 e->src = new_bb;
1579 /* Place the new block just after the block being split. */
1580 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1582 /* Some parts of the compiler expect blocks to be number in
1583 sequential order so insert the new block immediately after the
1584 block being split.. */
1585 j = bb->index;
1586 for (i = n_basic_blocks - 1; i > j + 1; --i)
1588 basic_block tmp = BASIC_BLOCK (i - 1);
1589 BASIC_BLOCK (i) = tmp;
1590 tmp->index = i;
1593 BASIC_BLOCK (i) = new_bb;
1594 new_bb->index = i;
1596 if (GET_CODE (new_bb->head) == CODE_LABEL)
1598 /* Create the basic block note. */
1599 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1600 new_bb->head);
1601 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1603 /* If the only thing in this new block was the label, make sure
1604 the block note gets included. */
1605 if (new_bb->head == new_bb->end)
1606 new_bb->end = bb_note;
1608 else
1610 /* Create the basic block note. */
1611 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1612 new_bb->head);
1613 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1614 new_bb->head = bb_note;
1617 update_bb_for_insn (new_bb);
1619 if (bb->global_live_at_start)
1621 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1622 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1623 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1625 /* We now have to calculate which registers are live at the end
1626 of the split basic block and at the start of the new basic
1627 block. Start with those registers that are known to be live
1628 at the end of the original basic block and get
1629 propagate_block to determine which registers are live. */
1630 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1631 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1632 COPY_REG_SET (bb->global_live_at_end,
1633 new_bb->global_live_at_start);
1636 return new_edge;
1639 /* Return label in the head of basic block. Create one if it doesn't exist. */
1641 block_label (block)
1642 basic_block block;
1644 if (block == EXIT_BLOCK_PTR)
1645 return NULL_RTX;
1646 if (GET_CODE (block->head) != CODE_LABEL)
1648 block->head = emit_label_before (gen_label_rtx (), block->head);
1649 if (basic_block_for_insn)
1650 set_block_for_insn (block->head, block);
1652 return block->head;
1655 /* Return true if the block has no effect and only forwards control flow to
1656 its single destination. */
1657 bool
1658 forwarder_block_p (bb)
1659 basic_block bb;
1661 rtx insn = bb->head;
1662 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1663 || !bb->succ || bb->succ->succ_next)
1664 return false;
1666 while (insn != bb->end)
1668 if (active_insn_p (insn))
1669 return false;
1670 insn = NEXT_INSN (insn);
1672 return (!active_insn_p (insn)
1673 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1676 /* Return nonzero if we can reach target from src by falling trought. */
1677 static bool
1678 can_fallthru (src, target)
1679 basic_block src, target;
1681 rtx insn = src->end;
1682 rtx insn2 = target->head;
1684 if (src->index + 1 == target->index && !active_insn_p (insn2))
1685 insn2 = next_active_insn (insn2);
1686 /* ??? Later we may add code to move jump tables offline. */
1687 return next_active_insn (insn) == insn2;
1690 /* Attempt to perform edge redirection by replacing possibly complex jump
1691 instruction by unconditional jump or removing jump completely.
1692 This can apply only if all edges now point to the same block.
1694 The parameters and return values are equivalent to redirect_edge_and_branch.
1696 static bool
1697 try_redirect_by_replacing_jump (e, target)
1698 edge e;
1699 basic_block target;
1701 basic_block src = e->src;
1702 rtx insn = src->end, kill_from;
1703 edge tmp;
1704 rtx set;
1705 int fallthru = 0;
1707 /* Verify that all targets will be TARGET. */
1708 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1709 if (tmp->dest != target && tmp != e)
1710 break;
1711 if (tmp || !onlyjump_p (insn))
1712 return false;
1714 /* Avoid removing branch with side effects. */
1715 set = single_set (insn);
1716 if (!set || side_effects_p (set))
1717 return false;
1719 /* In case we zap a conditional jump, we'll need to kill
1720 the cc0 setter too. */
1721 kill_from = insn;
1722 #ifdef HAVE_cc0
1723 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
1724 kill_from = PREV_INSN (insn);
1725 #endif
1727 /* See if we can create the fallthru edge. */
1728 if (can_fallthru (src, target))
1730 src->end = PREV_INSN (kill_from);
1731 if (rtl_dump_file)
1732 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1733 fallthru = 1;
1735 /* Selectivly unlink whole insn chain. */
1736 flow_delete_insn_chain (kill_from, PREV_INSN (target->head));
1738 /* If this already is simplejump, redirect it. */
1739 else if (simplejump_p (insn))
1741 if (e->dest == target)
1742 return false;
1743 if (rtl_dump_file)
1744 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1745 INSN_UID (insn), e->dest->index, target->index);
1746 redirect_jump (insn, block_label (target), 0);
1748 /* Or replace possibly complicated jump insn by simple jump insn. */
1749 else
1751 rtx target_label = block_label (target);
1752 rtx barrier;
1754 src->end = emit_jump_insn_before (gen_jump (target_label), kill_from);
1755 JUMP_LABEL (src->end) = target_label;
1756 LABEL_NUSES (target_label)++;
1757 if (basic_block_for_insn)
1758 set_block_for_new_insns (src->end, src);
1759 if (rtl_dump_file)
1760 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1761 INSN_UID (insn), INSN_UID (src->end));
1763 flow_delete_insn_chain (kill_from, insn);
1765 barrier = next_nonnote_insn (src->end);
1766 if (!barrier || GET_CODE (barrier) != BARRIER)
1767 emit_barrier_after (src->end);
1770 /* Keep only one edge out and set proper flags. */
1771 while (src->succ->succ_next)
1772 remove_edge (src->succ);
1773 e = src->succ;
1774 if (fallthru)
1775 e->flags = EDGE_FALLTHRU;
1776 else
1777 e->flags = 0;
1778 e->probability = REG_BR_PROB_BASE;
1779 e->count = src->count;
1781 /* We don't want a block to end on a line-number note since that has
1782 the potential of changing the code between -g and not -g. */
1783 while (GET_CODE (e->src->end) == NOTE
1784 && NOTE_LINE_NUMBER (e->src->end) >= 0)
1786 rtx prev = PREV_INSN (e->src->end);
1787 flow_delete_insn (e->src->end);
1788 e->src->end = prev;
1791 if (e->dest != target)
1792 redirect_edge_succ (e, target);
1793 return true;
1796 /* Return last loop_beg note appearing after INSN, before start of next
1797 basic block. Return INSN if there are no such notes.
1799 When emmiting jump to redirect an fallthru edge, it should always
1800 appear after the LOOP_BEG notes, as loop optimizer expect loop to
1801 eighter start by fallthru edge or jump following the LOOP_BEG note
1802 jumping to the loop exit test. */
1804 last_loop_beg_note (insn)
1805 rtx insn;
1807 rtx last = insn;
1808 insn = NEXT_INSN (insn);
1809 while (GET_CODE (insn) == NOTE
1810 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1812 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1813 last = insn;
1814 insn = NEXT_INSN (insn);
1816 return last;
1819 /* Attempt to change code to redirect edge E to TARGET.
1820 Don't do that on expense of adding new instructions or reordering
1821 basic blocks.
1823 Function can be also called with edge destionation equivalent to the
1824 TARGET. Then it should try the simplifications and do nothing if
1825 none is possible.
1827 Return true if transformation suceeded. We still return flase in case
1828 E already destinated TARGET and we didn't managed to simplify instruction
1829 stream. */
1830 bool
1831 redirect_edge_and_branch (e, target)
1832 edge e;
1833 basic_block target;
1835 rtx tmp;
1836 rtx old_label = e->dest->head;
1837 basic_block src = e->src;
1838 rtx insn = src->end;
1840 if (e->flags & EDGE_COMPLEX)
1841 return false;
1843 if (try_redirect_by_replacing_jump (e, target))
1844 return true;
1845 /* Do this fast path late, as we want above code to simplify for cases
1846 where called on single edge leaving basic block containing nontrivial
1847 jump insn. */
1848 else if (e->dest == target)
1849 return false;
1851 /* We can only redirect non-fallthru edges of jump insn. */
1852 if (e->flags & EDGE_FALLTHRU)
1853 return false;
1854 if (GET_CODE (insn) != JUMP_INSN)
1855 return false;
1857 /* Recognize a tablejump and adjust all matching cases. */
1858 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1859 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1860 && GET_CODE (tmp) == JUMP_INSN
1861 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1862 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1864 rtvec vec;
1865 int j;
1866 rtx new_label = block_label (target);
1868 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1869 vec = XVEC (PATTERN (tmp), 0);
1870 else
1871 vec = XVEC (PATTERN (tmp), 1);
1873 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1874 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1876 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1877 --LABEL_NUSES (old_label);
1878 ++LABEL_NUSES (new_label);
1881 /* Handle casesi dispatch insns */
1882 if ((tmp = single_set (insn)) != NULL
1883 && SET_DEST (tmp) == pc_rtx
1884 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1885 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1886 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1888 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1889 new_label);
1890 --LABEL_NUSES (old_label);
1891 ++LABEL_NUSES (new_label);
1894 else
1896 /* ?? We may play the games with moving the named labels from
1897 one basic block to the other in case only one computed_jump is
1898 available. */
1899 if (computed_jump_p (insn))
1900 return false;
1902 /* A return instruction can't be redirected. */
1903 if (returnjump_p (insn))
1904 return false;
1906 /* If the insn doesn't go where we think, we're confused. */
1907 if (JUMP_LABEL (insn) != old_label)
1908 abort ();
1909 redirect_jump (insn, block_label (target), 0);
1912 if (rtl_dump_file)
1913 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
1914 e->src->index, e->dest->index, target->index);
1915 if (e->dest != target)
1916 redirect_edge_succ_nodup (e, target);
1917 return true;
1920 /* Redirect edge even at the expense of creating new jump insn or
1921 basic block. Return new basic block if created, NULL otherwise.
1922 Abort if converison is impossible. */
1923 basic_block
1924 redirect_edge_and_branch_force (e, target)
1925 edge e;
1926 basic_block target;
1928 basic_block new_bb;
1929 edge new_edge;
1930 rtx label;
1931 rtx bb_note;
1932 int i, j;
1934 if (redirect_edge_and_branch (e, target))
1935 return NULL;
1936 if (e->dest == target)
1937 return NULL;
1938 if (e->flags & EDGE_ABNORMAL)
1939 abort ();
1940 if (!(e->flags & EDGE_FALLTHRU))
1941 abort ();
1943 e->flags &= ~EDGE_FALLTHRU;
1944 label = block_label (target);
1945 /* Case of the fallthru block. */
1946 if (!e->src->succ->succ_next)
1948 e->src->end = emit_jump_insn_after (gen_jump (label),
1949 last_loop_beg_note (e->src->end));
1950 JUMP_LABEL (e->src->end) = label;
1951 LABEL_NUSES (label)++;
1952 if (basic_block_for_insn)
1953 set_block_for_new_insns (e->src->end, e->src);
1954 emit_barrier_after (e->src->end);
1955 if (rtl_dump_file)
1956 fprintf (rtl_dump_file,
1957 "Emitting jump insn %i to redirect edge %i->%i to %i\n",
1958 INSN_UID (e->src->end), e->src->index, e->dest->index,
1959 target->index);
1960 redirect_edge_succ (e, target);
1961 return NULL;
1963 /* Redirecting fallthru edge of the conditional needs extra work. */
1965 if (rtl_dump_file)
1966 fprintf (rtl_dump_file,
1967 "Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
1968 INSN_UID (e->src->end), e->src->index, e->dest->index,
1969 target->index);
1971 /* Create the new structures. */
1972 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1973 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1974 n_edges++;
1976 memset (new_bb, 0, sizeof (*new_bb));
1978 new_bb->end = new_bb->head = last_loop_beg_note (e->src->end);
1979 new_bb->succ = NULL;
1980 new_bb->pred = new_edge;
1981 new_bb->count = e->count;
1982 new_bb->frequency = EDGE_FREQUENCY (e);
1983 new_bb->loop_depth = e->dest->loop_depth;
1985 new_edge->flags = EDGE_FALLTHRU;
1986 new_edge->probability = e->probability;
1987 new_edge->count = e->count;
1989 if (target->global_live_at_start)
1991 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1992 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1993 COPY_REG_SET (new_bb->global_live_at_start,
1994 target->global_live_at_start);
1995 COPY_REG_SET (new_bb->global_live_at_end, new_bb->global_live_at_start);
1998 /* Wire edge in. */
1999 new_edge->src = e->src;
2000 new_edge->dest = new_bb;
2001 new_edge->succ_next = e->src->succ;
2002 e->src->succ = new_edge;
2003 new_edge->pred_next = NULL;
2005 /* Redirect old edge. */
2006 redirect_edge_succ (e, target);
2007 redirect_edge_pred (e, new_bb);
2008 e->probability = REG_BR_PROB_BASE;
2010 /* Place the new block just after the block being split. */
2011 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2013 /* Some parts of the compiler expect blocks to be number in
2014 sequential order so insert the new block immediately after the
2015 block being split.. */
2016 j = new_edge->src->index;
2017 for (i = n_basic_blocks - 1; i > j + 1; --i)
2019 basic_block tmp = BASIC_BLOCK (i - 1);
2020 BASIC_BLOCK (i) = tmp;
2021 tmp->index = i;
2024 BASIC_BLOCK (i) = new_bb;
2025 new_bb->index = i;
2027 /* Create the basic block note. */
2028 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
2029 NOTE_BASIC_BLOCK (bb_note) = new_bb;
2030 new_bb->head = bb_note;
2032 new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
2033 JUMP_LABEL (new_bb->end) = label;
2034 LABEL_NUSES (label)++;
2035 if (basic_block_for_insn)
2036 set_block_for_new_insns (new_bb->end, new_bb);
2037 emit_barrier_after (new_bb->end);
2038 return new_bb;
2041 /* Helper function for split_edge. Return true in case edge BB2 to BB1
2042 is back edge of syntactic loop. */
2043 static bool
2044 back_edge_of_syntactic_loop_p (bb1, bb2)
2045 basic_block bb1, bb2;
2047 rtx insn;
2048 int count = 0;
2049 if (bb1->index > bb2->index)
2050 return false;
2051 if (bb1->index == bb2->index)
2052 return true;
2053 for (insn = bb1->end; insn != bb2->head && count >= 0;
2054 insn = NEXT_INSN (insn))
2055 if (GET_CODE (insn) == NOTE)
2057 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2058 count++;
2059 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2060 count--;
2062 return count >= 0;
2065 /* Split a (typically critical) edge. Return the new block.
2066 Abort on abnormal edges.
2068 ??? The code generally expects to be called on critical edges.
2069 The case of a block ending in an unconditional jump to a
2070 block with multiple predecessors is not handled optimally. */
2072 basic_block
2073 split_edge (edge_in)
2074 edge edge_in;
2076 basic_block old_pred, bb, old_succ;
2077 edge edge_out;
2078 rtx bb_note;
2079 int i, j;
2081 /* Abnormal edges cannot be split. */
2082 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
2083 abort ();
2085 old_pred = edge_in->src;
2086 old_succ = edge_in->dest;
2088 /* Create the new structures. */
2089 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
2090 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
2091 n_edges++;
2093 memset (bb, 0, sizeof (*bb));
2095 /* ??? This info is likely going to be out of date very soon. */
2096 if (old_succ->global_live_at_start)
2098 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2099 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2100 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
2101 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
2104 /* Wire them up. */
2105 bb->succ = edge_out;
2106 bb->count = edge_in->count;
2107 bb->frequency = EDGE_FREQUENCY (edge_in);
2109 edge_in->flags &= ~EDGE_CRITICAL;
2111 edge_out->pred_next = old_succ->pred;
2112 edge_out->succ_next = NULL;
2113 edge_out->src = bb;
2114 edge_out->dest = old_succ;
2115 edge_out->flags = EDGE_FALLTHRU;
2116 edge_out->probability = REG_BR_PROB_BASE;
2117 edge_out->count = edge_in->count;
2119 old_succ->pred = edge_out;
2121 /* Tricky case -- if there existed a fallthru into the successor
2122 (and we're not it) we must add a new unconditional jump around
2123 the new block we're actually interested in.
2125 Further, if that edge is critical, this means a second new basic
2126 block must be created to hold it. In order to simplify correct
2127 insn placement, do this before we touch the existing basic block
2128 ordering for the block we were really wanting. */
2129 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2131 edge e;
2132 for (e = edge_out->pred_next; e; e = e->pred_next)
2133 if (e->flags & EDGE_FALLTHRU)
2134 break;
2136 if (e)
2138 basic_block jump_block;
2139 rtx pos;
2141 if ((e->flags & EDGE_CRITICAL) == 0
2142 && e->src != ENTRY_BLOCK_PTR)
2144 /* Non critical -- we can simply add a jump to the end
2145 of the existing predecessor. */
2146 jump_block = e->src;
2148 else
2150 /* We need a new block to hold the jump. The simplest
2151 way to do the bulk of the work here is to recursively
2152 call ourselves. */
2153 jump_block = split_edge (e);
2154 e = jump_block->succ;
2157 /* Now add the jump insn ... */
2158 pos = emit_jump_insn_after (gen_jump (old_succ->head),
2159 last_loop_beg_note (jump_block->end));
2160 jump_block->end = pos;
2161 if (basic_block_for_insn)
2162 set_block_for_new_insns (pos, jump_block);
2163 emit_barrier_after (pos);
2165 /* ... let jump know that label is in use, ... */
2166 JUMP_LABEL (pos) = old_succ->head;
2167 ++LABEL_NUSES (old_succ->head);
2169 /* ... and clear fallthru on the outgoing edge. */
2170 e->flags &= ~EDGE_FALLTHRU;
2172 /* Continue splitting the interesting edge. */
2176 /* Place the new block just in front of the successor. */
2177 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2178 if (old_succ == EXIT_BLOCK_PTR)
2179 j = n_basic_blocks - 1;
2180 else
2181 j = old_succ->index;
2182 for (i = n_basic_blocks - 1; i > j; --i)
2184 basic_block tmp = BASIC_BLOCK (i - 1);
2185 BASIC_BLOCK (i) = tmp;
2186 tmp->index = i;
2188 BASIC_BLOCK (i) = bb;
2189 bb->index = i;
2191 /* Create the basic block note.
2193 Where we place the note can have a noticable impact on the generated
2194 code. Consider this cfg:
2200 +->1-->2--->E
2202 +--+
2204 If we need to insert an insn on the edge from block 0 to block 1,
2205 we want to ensure the instructions we insert are outside of any
2206 loop notes that physically sit between block 0 and block 1. Otherwise
2207 we confuse the loop optimizer into thinking the loop is a phony. */
2208 if (old_succ != EXIT_BLOCK_PTR
2209 && PREV_INSN (old_succ->head)
2210 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
2211 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG
2212 && !back_edge_of_syntactic_loop_p (old_succ, old_pred))
2213 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
2214 PREV_INSN (old_succ->head));
2215 else if (old_succ != EXIT_BLOCK_PTR)
2216 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
2217 else
2218 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
2219 NOTE_BASIC_BLOCK (bb_note) = bb;
2220 bb->head = bb->end = bb_note;
2222 /* For non-fallthry edges, we must adjust the predecessor's
2223 jump instruction to target our new block. */
2224 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2226 if (!redirect_edge_and_branch (edge_in, bb))
2227 abort ();
2229 else
2230 redirect_edge_succ (edge_in, bb);
2232 return bb;
2235 /* Queue instructions for insertion on an edge between two basic blocks.
2236 The new instructions and basic blocks (if any) will not appear in the
2237 CFG until commit_edge_insertions is called. */
2239 void
2240 insert_insn_on_edge (pattern, e)
2241 rtx pattern;
2242 edge e;
2244 /* We cannot insert instructions on an abnormal critical edge.
2245 It will be easier to find the culprit if we die now. */
2246 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2247 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2248 abort ();
2250 if (e->insns == NULL_RTX)
2251 start_sequence ();
2252 else
2253 push_to_sequence (e->insns);
2255 emit_insn (pattern);
2257 e->insns = get_insns ();
2258 end_sequence ();
2261 /* Update the CFG for the instructions queued on edge E. */
2263 static void
2264 commit_one_edge_insertion (e)
2265 edge e;
2267 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2268 basic_block bb;
2270 /* Pull the insns off the edge now since the edge might go away. */
2271 insns = e->insns;
2272 e->insns = NULL_RTX;
2274 /* Figure out where to put these things. If the destination has
2275 one predecessor, insert there. Except for the exit block. */
2276 if (e->dest->pred->pred_next == NULL
2277 && e->dest != EXIT_BLOCK_PTR)
2279 bb = e->dest;
2281 /* Get the location correct wrt a code label, and "nice" wrt
2282 a basic block note, and before everything else. */
2283 tmp = bb->head;
2284 if (GET_CODE (tmp) == CODE_LABEL)
2285 tmp = NEXT_INSN (tmp);
2286 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2287 tmp = NEXT_INSN (tmp);
2288 if (tmp == bb->head)
2289 before = tmp;
2290 else
2291 after = PREV_INSN (tmp);
2294 /* If the source has one successor and the edge is not abnormal,
2295 insert there. Except for the entry block. */
2296 else if ((e->flags & EDGE_ABNORMAL) == 0
2297 && e->src->succ->succ_next == NULL
2298 && e->src != ENTRY_BLOCK_PTR)
2300 bb = e->src;
2301 /* It is possible to have a non-simple jump here. Consider a target
2302 where some forms of unconditional jumps clobber a register. This
2303 happens on the fr30 for example.
2305 We know this block has a single successor, so we can just emit
2306 the queued insns before the jump. */
2307 if (GET_CODE (bb->end) == JUMP_INSN)
2309 before = bb->end;
2311 else
2313 /* We'd better be fallthru, or we've lost track of what's what. */
2314 if ((e->flags & EDGE_FALLTHRU) == 0)
2315 abort ();
2317 after = bb->end;
2321 /* Otherwise we must split the edge. */
2322 else
2324 bb = split_edge (e);
2325 after = bb->end;
2328 /* Now that we've found the spot, do the insertion. */
2330 /* Set the new block number for these insns, if structure is allocated. */
2331 if (basic_block_for_insn)
2333 rtx i;
2334 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2335 set_block_for_insn (i, bb);
2338 if (before)
2340 emit_insns_before (insns, before);
2341 if (before == bb->head)
2342 bb->head = insns;
2344 last = prev_nonnote_insn (before);
2346 else
2348 last = emit_insns_after (insns, after);
2349 if (after == bb->end)
2350 bb->end = last;
2353 if (returnjump_p (last))
2355 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2356 This is not currently a problem because this only happens
2357 for the (single) epilogue, which already has a fallthru edge
2358 to EXIT. */
2360 e = bb->succ;
2361 if (e->dest != EXIT_BLOCK_PTR
2362 || e->succ_next != NULL
2363 || (e->flags & EDGE_FALLTHRU) == 0)
2364 abort ();
2365 e->flags &= ~EDGE_FALLTHRU;
2367 emit_barrier_after (last);
2368 bb->end = last;
2370 if (before)
2371 flow_delete_insn (before);
2373 else if (GET_CODE (last) == JUMP_INSN)
2374 abort ();
2375 find_sub_basic_blocks (bb);
2378 /* Update the CFG for all queued instructions. */
2380 void
2381 commit_edge_insertions ()
2383 int i;
2384 basic_block bb;
2385 compute_bb_for_insn (get_max_uid ());
2387 #ifdef ENABLE_CHECKING
2388 verify_flow_info ();
2389 #endif
2391 i = -1;
2392 bb = ENTRY_BLOCK_PTR;
2393 while (1)
2395 edge e, next;
2397 for (e = bb->succ; e; e = next)
2399 next = e->succ_next;
2400 if (e->insns)
2401 commit_one_edge_insertion (e);
2404 if (++i >= n_basic_blocks)
2405 break;
2406 bb = BASIC_BLOCK (i);
2410 /* Add fake edges to the function exit for any non constant calls in
2411 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2412 BLOCKS is zero. Return the nuber of blocks that were split. */
2415 flow_call_edges_add (blocks)
2416 sbitmap blocks;
2418 int i;
2419 int blocks_split = 0;
2420 int bb_num = 0;
2421 basic_block *bbs;
2423 /* Map bb indicies into basic block pointers since split_block
2424 will renumber the basic blocks. */
2426 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2428 if (! blocks)
2430 for (i = 0; i < n_basic_blocks; i++)
2431 bbs[bb_num++] = BASIC_BLOCK (i);
2433 else
2435 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2437 bbs[bb_num++] = BASIC_BLOCK (i);
2442 /* Now add fake edges to the function exit for any non constant
2443 calls since there is no way that we can determine if they will
2444 return or not... */
2446 for (i = 0; i < bb_num; i++)
2448 basic_block bb = bbs[i];
2449 rtx insn;
2450 rtx prev_insn;
2452 for (insn = bb->end; ; insn = prev_insn)
2454 prev_insn = PREV_INSN (insn);
2455 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
2457 edge e;
2459 /* Note that the following may create a new basic block
2460 and renumber the existing basic blocks. */
2461 e = split_block (bb, insn);
2462 if (e)
2463 blocks_split++;
2465 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2467 if (insn == bb->head)
2468 break;
2472 if (blocks_split)
2473 verify_flow_info ();
2475 free (bbs);
2476 return blocks_split;
2479 /* Find unreachable blocks. An unreachable block will have NULL in
2480 block->aux, a non-NULL value indicates the block is reachable. */
2482 void
2483 find_unreachable_blocks ()
2485 edge e;
2486 int i, n;
2487 basic_block *tos, *worklist;
2489 n = n_basic_blocks;
2490 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2492 /* Use basic_block->aux as a marker. Clear them all. */
2494 for (i = 0; i < n; ++i)
2495 BASIC_BLOCK (i)->aux = NULL;
2497 /* Add our starting points to the worklist. Almost always there will
2498 be only one. It isn't inconcievable that we might one day directly
2499 support Fortran alternate entry points. */
2501 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2503 *tos++ = e->dest;
2505 /* Mark the block with a handy non-null value. */
2506 e->dest->aux = e;
2509 /* Iterate: find everything reachable from what we've already seen. */
2511 while (tos != worklist)
2513 basic_block b = *--tos;
2515 for (e = b->succ; e; e = e->succ_next)
2516 if (!e->dest->aux)
2518 *tos++ = e->dest;
2519 e->dest->aux = e;
2523 free (worklist);
2526 /* Delete all unreachable basic blocks. */
2527 static void
2528 delete_unreachable_blocks ()
2530 int i;
2532 find_unreachable_blocks ();
2534 /* Delete all unreachable basic blocks. Count down so that we
2535 don't interfere with the block renumbering that happens in
2536 flow_delete_block. */
2538 for (i = n_basic_blocks - 1; i >= 0; --i)
2540 basic_block b = BASIC_BLOCK (i);
2542 if (b->aux != NULL)
2543 /* This block was found. Tidy up the mark. */
2544 b->aux = NULL;
2545 else
2546 flow_delete_block (b);
2549 tidy_fallthru_edges ();
2552 /* Return true if NOTE is not one of the ones that must be kept paired,
2553 so that we may simply delete them. */
2555 static int
2556 can_delete_note_p (note)
2557 rtx note;
2559 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2560 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2563 /* Unlink a chain of insns between START and FINISH, leaving notes
2564 that must be paired. */
2566 void
2567 flow_delete_insn_chain (start, finish)
2568 rtx start, finish;
2570 /* Unchain the insns one by one. It would be quicker to delete all
2571 of these with a single unchaining, rather than one at a time, but
2572 we need to keep the NOTE's. */
2574 rtx next;
2576 while (1)
2578 next = NEXT_INSN (start);
2579 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2581 else if (GET_CODE (start) == CODE_LABEL
2582 && ! can_delete_label_p (start))
2584 const char *name = LABEL_NAME (start);
2585 PUT_CODE (start, NOTE);
2586 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2587 NOTE_SOURCE_FILE (start) = name;
2589 else
2590 next = flow_delete_insn (start);
2592 if (start == finish)
2593 break;
2594 start = next;
2598 /* Delete the insns in a (non-live) block. We physically delete every
2599 non-deleted-note insn, and update the flow graph appropriately.
2601 Return nonzero if we deleted an exception handler. */
2603 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2604 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2607 flow_delete_block (b)
2608 basic_block b;
2610 int deleted_handler = 0;
2611 rtx insn, end, tmp;
2613 /* If the head of this block is a CODE_LABEL, then it might be the
2614 label for an exception handler which can't be reached.
2616 We need to remove the label from the exception_handler_label list
2617 and remove the associated NOTE_INSN_EH_REGION_BEG and
2618 NOTE_INSN_EH_REGION_END notes. */
2620 insn = b->head;
2622 never_reached_warning (insn);
2624 if (GET_CODE (insn) == CODE_LABEL)
2625 maybe_remove_eh_handler (insn);
2627 /* Include any jump table following the basic block. */
2628 end = b->end;
2629 if (GET_CODE (end) == JUMP_INSN
2630 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2631 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2632 && GET_CODE (tmp) == JUMP_INSN
2633 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2634 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2635 end = tmp;
2637 /* Include any barrier that may follow the basic block. */
2638 tmp = next_nonnote_insn (end);
2639 if (tmp && GET_CODE (tmp) == BARRIER)
2640 end = tmp;
2642 /* Selectively delete the entire chain. */
2643 flow_delete_insn_chain (insn, end);
2645 /* Remove the edges into and out of this block. Note that there may
2646 indeed be edges in, if we are removing an unreachable loop. */
2648 edge e, next, *q;
2650 for (e = b->pred; e; e = next)
2652 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2653 continue;
2654 *q = e->succ_next;
2655 next = e->pred_next;
2656 n_edges--;
2657 free (e);
2659 for (e = b->succ; e; e = next)
2661 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2662 continue;
2663 *q = e->pred_next;
2664 next = e->succ_next;
2665 n_edges--;
2666 free (e);
2669 b->pred = NULL;
2670 b->succ = NULL;
2673 /* Remove the basic block from the array, and compact behind it. */
2674 expunge_block (b);
2676 return deleted_handler;
2679 /* Remove block B from the basic block array and compact behind it. */
2681 static void
2682 expunge_block (b)
2683 basic_block b;
2685 int i, n = n_basic_blocks;
2687 for (i = b->index; i + 1 < n; ++i)
2689 basic_block x = BASIC_BLOCK (i + 1);
2690 BASIC_BLOCK (i) = x;
2691 x->index = i;
2694 basic_block_info->num_elements--;
2695 n_basic_blocks--;
2698 /* Delete INSN by patching it out. Return the next insn. */
2701 flow_delete_insn (insn)
2702 rtx insn;
2704 rtx prev = PREV_INSN (insn);
2705 rtx next = NEXT_INSN (insn);
2706 rtx note;
2708 PREV_INSN (insn) = NULL_RTX;
2709 NEXT_INSN (insn) = NULL_RTX;
2710 INSN_DELETED_P (insn) = 1;
2712 if (prev)
2713 NEXT_INSN (prev) = next;
2714 if (next)
2715 PREV_INSN (next) = prev;
2716 else
2717 set_last_insn (prev);
2719 if (GET_CODE (insn) == CODE_LABEL)
2720 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2722 /* If deleting a jump, decrement the use count of the label. Deleting
2723 the label itself should happen in the normal course of block merging. */
2724 if (GET_CODE (insn) == JUMP_INSN
2725 && JUMP_LABEL (insn)
2726 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2727 LABEL_NUSES (JUMP_LABEL (insn))--;
2729 /* Also if deleting an insn that references a label. */
2730 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2731 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2732 LABEL_NUSES (XEXP (note, 0))--;
2734 if (GET_CODE (insn) == JUMP_INSN
2735 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2736 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2738 rtx pat = PATTERN (insn);
2739 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2740 int len = XVECLEN (pat, diff_vec_p);
2741 int i;
2743 for (i = 0; i < len; i++)
2744 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2747 return next;
2750 /* True if a given label can be deleted. */
2752 static int
2753 can_delete_label_p (label)
2754 rtx label;
2756 rtx x;
2758 if (LABEL_PRESERVE_P (label))
2759 return 0;
2761 for (x = forced_labels; x; x = XEXP (x, 1))
2762 if (label == XEXP (x, 0))
2763 return 0;
2764 for (x = label_value_list; x; x = XEXP (x, 1))
2765 if (label == XEXP (x, 0))
2766 return 0;
2767 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2768 if (label == XEXP (x, 0))
2769 return 0;
2771 /* User declared labels must be preserved. */
2772 if (LABEL_NAME (label) != 0)
2773 return 0;
2775 return 1;
2778 static int
2779 tail_recursion_label_p (label)
2780 rtx label;
2782 rtx x;
2784 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2785 if (label == XEXP (x, 0))
2786 return 1;
2788 return 0;
2791 /* Blocks A and B are to be merged into a single block A. The insns
2792 are already contiguous, hence `nomove'. */
2794 void
2795 merge_blocks_nomove (a, b)
2796 basic_block a, b;
2798 edge e;
2799 rtx b_head, b_end, a_end;
2800 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2801 int b_empty = 0;
2803 /* If there was a CODE_LABEL beginning B, delete it. */
2804 b_head = b->head;
2805 b_end = b->end;
2806 if (GET_CODE (b_head) == CODE_LABEL)
2808 /* Detect basic blocks with nothing but a label. This can happen
2809 in particular at the end of a function. */
2810 if (b_head == b_end)
2811 b_empty = 1;
2812 del_first = del_last = b_head;
2813 b_head = NEXT_INSN (b_head);
2816 /* Delete the basic block note. */
2817 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2819 if (b_head == b_end)
2820 b_empty = 1;
2821 if (! del_last)
2822 del_first = b_head;
2823 del_last = b_head;
2824 b_head = NEXT_INSN (b_head);
2827 /* If there was a jump out of A, delete it. */
2828 a_end = a->end;
2829 if (GET_CODE (a_end) == JUMP_INSN)
2831 rtx prev;
2833 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2834 if (GET_CODE (prev) != NOTE
2835 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2836 || prev == a->head)
2837 break;
2839 del_first = a_end;
2841 #ifdef HAVE_cc0
2842 /* If this was a conditional jump, we need to also delete
2843 the insn that set cc0. */
2844 if (prev && sets_cc0_p (prev))
2846 rtx tmp = prev;
2847 prev = prev_nonnote_insn (prev);
2848 if (!prev)
2849 prev = a->head;
2850 del_first = tmp;
2852 #endif
2854 a_end = prev;
2856 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2857 del_first = NEXT_INSN (a_end);
2859 /* Delete everything marked above as well as crap that might be
2860 hanging out between the two blocks. */
2861 flow_delete_insn_chain (del_first, del_last);
2863 /* Normally there should only be one successor of A and that is B, but
2864 partway though the merge of blocks for conditional_execution we'll
2865 be merging a TEST block with THEN and ELSE successors. Free the
2866 whole lot of them and hope the caller knows what they're doing. */
2867 while (a->succ)
2868 remove_edge (a->succ);
2870 /* Adjust the edges out of B for the new owner. */
2871 for (e = b->succ; e; e = e->succ_next)
2872 e->src = a;
2873 a->succ = b->succ;
2875 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2876 b->pred = b->succ = NULL;
2878 /* Reassociate the insns of B with A. */
2879 if (!b_empty)
2881 if (basic_block_for_insn)
2883 BLOCK_FOR_INSN (b_head) = a;
2884 while (b_head != b_end)
2886 b_head = NEXT_INSN (b_head);
2887 BLOCK_FOR_INSN (b_head) = a;
2890 a_end = b_end;
2892 a->end = a_end;
2894 expunge_block (b);
2897 /* Blocks A and B are to be merged into a single block. A has no incoming
2898 fallthru edge, so it can be moved before B without adding or modifying
2899 any jumps (aside from the jump from A to B). */
2901 static int
2902 merge_blocks_move_predecessor_nojumps (a, b)
2903 basic_block a, b;
2905 rtx start, end, barrier;
2906 int index;
2908 start = a->head;
2909 end = a->end;
2911 barrier = next_nonnote_insn (end);
2912 if (GET_CODE (barrier) != BARRIER)
2913 abort ();
2914 flow_delete_insn (barrier);
2916 /* Move block and loop notes out of the chain so that we do not
2917 disturb their order.
2919 ??? A better solution would be to squeeze out all the non-nested notes
2920 and adjust the block trees appropriately. Even better would be to have
2921 a tighter connection between block trees and rtl so that this is not
2922 necessary. */
2923 start = squeeze_notes (start, end);
2925 /* Scramble the insn chain. */
2926 if (end != PREV_INSN (b->head))
2927 reorder_insns (start, end, PREV_INSN (b->head));
2929 if (rtl_dump_file)
2931 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2932 a->index, b->index);
2935 /* Swap the records for the two blocks around. Although we are deleting B,
2936 A is now where B was and we want to compact the BB array from where
2937 A used to be. */
2938 BASIC_BLOCK (a->index) = b;
2939 BASIC_BLOCK (b->index) = a;
2940 index = a->index;
2941 a->index = b->index;
2942 b->index = index;
2944 /* Now blocks A and B are contiguous. Merge them. */
2945 merge_blocks_nomove (a, b);
2947 return 1;
2950 /* Blocks A and B are to be merged into a single block. B has no outgoing
2951 fallthru edge, so it can be moved after A without adding or modifying
2952 any jumps (aside from the jump from A to B). */
2954 static int
2955 merge_blocks_move_successor_nojumps (a, b)
2956 basic_block a, b;
2958 rtx start, end, barrier;
2960 start = b->head;
2961 end = b->end;
2962 barrier = NEXT_INSN (end);
2964 /* Recognize a jump table following block B. */
2965 if (barrier
2966 && GET_CODE (barrier) == CODE_LABEL
2967 && NEXT_INSN (barrier)
2968 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2969 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2970 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2972 end = NEXT_INSN (barrier);
2973 barrier = NEXT_INSN (end);
2976 /* There had better have been a barrier there. Delete it. */
2977 if (barrier && GET_CODE (barrier) == BARRIER)
2978 flow_delete_insn (barrier);
2980 /* Move block and loop notes out of the chain so that we do not
2981 disturb their order.
2983 ??? A better solution would be to squeeze out all the non-nested notes
2984 and adjust the block trees appropriately. Even better would be to have
2985 a tighter connection between block trees and rtl so that this is not
2986 necessary. */
2987 start = squeeze_notes (start, end);
2989 /* Scramble the insn chain. */
2990 reorder_insns (start, end, a->end);
2992 /* Now blocks A and B are contiguous. Merge them. */
2993 merge_blocks_nomove (a, b);
2995 if (rtl_dump_file)
2997 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2998 b->index, a->index);
3001 return 1;
3004 /* Attempt to merge basic blocks that are potentially non-adjacent.
3005 Return true iff the attempt succeeded. */
3007 static int
3008 merge_blocks (e, b, c, mode)
3009 edge e;
3010 basic_block b, c;
3011 int mode;
3013 /* If C has a tail recursion label, do not merge. There is no
3014 edge recorded from the call_placeholder back to this label, as
3015 that would make optimize_sibling_and_tail_recursive_calls more
3016 complex for no gain. */
3017 if (GET_CODE (c->head) == CODE_LABEL
3018 && tail_recursion_label_p (c->head))
3019 return 0;
3021 /* If B has a fallthru edge to C, no need to move anything. */
3022 if (e->flags & EDGE_FALLTHRU)
3024 merge_blocks_nomove (b, c);
3026 if (rtl_dump_file)
3028 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
3029 b->index, c->index);
3032 return 1;
3034 /* Otherwise we will need to move code around. Do that only if expensive
3035 transformations are allowed. */
3036 else if (mode & CLEANUP_EXPENSIVE)
3038 edge tmp_edge, c_fallthru_edge;
3039 int c_has_outgoing_fallthru;
3040 int b_has_incoming_fallthru;
3042 /* Avoid overactive code motion, as the forwarder blocks should be
3043 eliminated by edge redirection instead. One exception might have
3044 been if B is a forwarder block and C has no fallthru edge, but
3045 that should be cleaned up by bb-reorder instead. */
3046 if (forwarder_block_p (b) || forwarder_block_p (c))
3047 return 0;
3049 /* We must make sure to not munge nesting of lexical blocks,
3050 and loop notes. This is done by squeezing out all the notes
3051 and leaving them there to lie. Not ideal, but functional. */
3053 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
3054 if (tmp_edge->flags & EDGE_FALLTHRU)
3055 break;
3056 c_has_outgoing_fallthru = (tmp_edge != NULL);
3057 c_fallthru_edge = tmp_edge;
3059 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
3060 if (tmp_edge->flags & EDGE_FALLTHRU)
3061 break;
3062 b_has_incoming_fallthru = (tmp_edge != NULL);
3064 /* If B does not have an incoming fallthru, then it can be moved
3065 immediately before C without introducing or modifying jumps.
3066 C cannot be the first block, so we do not have to worry about
3067 accessing a non-existent block. */
3068 if (! b_has_incoming_fallthru)
3069 return merge_blocks_move_predecessor_nojumps (b, c);
3071 /* Otherwise, we're going to try to move C after B. If C does
3072 not have an outgoing fallthru, then it can be moved
3073 immediately after B without introducing or modifying jumps. */
3074 if (! c_has_outgoing_fallthru)
3075 return merge_blocks_move_successor_nojumps (b, c);
3077 /* Otherwise, we'll need to insert an extra jump, and possibly
3078 a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
3079 as we don't have explicit return instructions before epilogues
3080 are generated, so give up on that case. */
3082 if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
3083 && merge_blocks_move_successor_nojumps (b, c))
3085 basic_block target = c_fallthru_edge->dest;
3086 rtx barrier;
3087 basic_block new;
3089 /* This is a dirty hack to avoid code duplication.
3091 Set edge to point to wrong basic block, so
3092 redirect_edge_and_branch_force will do the trick
3093 and rewire edge back to the original location. */
3094 redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
3095 new = redirect_edge_and_branch_force (c_fallthru_edge, target);
3097 /* We've just created barrier, but another barrier is
3098 already present in the stream. Avoid the duplicate. */
3099 barrier = next_nonnote_insn (new ? new->end : b->end);
3100 if (GET_CODE (barrier) != BARRIER)
3101 abort ();
3102 flow_delete_insn (barrier);
3105 return 0;
3107 return 0;
3110 /* Simplify a conditional jump around an unconditional jump.
3111 Return true if something changed. */
3113 static bool
3114 try_simplify_condjump (cbranch_block)
3115 basic_block cbranch_block;
3117 basic_block jump_block, jump_dest_block, cbranch_dest_block;
3118 edge cbranch_jump_edge, cbranch_fallthru_edge;
3119 rtx cbranch_insn;
3121 /* Verify that there are exactly two successors. */
3122 if (!cbranch_block->succ
3123 || !cbranch_block->succ->succ_next
3124 || cbranch_block->succ->succ_next->succ_next)
3125 return false;
3127 /* Verify that we've got a normal conditional branch at the end
3128 of the block. */
3129 cbranch_insn = cbranch_block->end;
3130 if (!any_condjump_p (cbranch_insn))
3131 return false;
3133 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
3134 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
3136 /* The next block must not have multiple predecessors, must not
3137 be the last block in the function, and must contain just the
3138 unconditional jump. */
3139 jump_block = cbranch_fallthru_edge->dest;
3140 if (jump_block->pred->pred_next
3141 || jump_block->index == n_basic_blocks - 1
3142 || !forwarder_block_p (jump_block))
3143 return false;
3144 jump_dest_block = jump_block->succ->dest;
3146 /* The conditional branch must target the block after the
3147 unconditional branch. */
3148 cbranch_dest_block = cbranch_jump_edge->dest;
3150 if (!can_fallthru (jump_block, cbranch_dest_block))
3151 return false;
3153 /* Invert the conditional branch. Prevent jump.c from deleting
3154 "unreachable" instructions. */
3155 LABEL_NUSES (JUMP_LABEL (cbranch_insn))++;
3156 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 1))
3158 LABEL_NUSES (JUMP_LABEL (cbranch_insn))--;
3159 return false;
3162 if (rtl_dump_file)
3163 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
3164 INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
3166 /* Success. Update the CFG to match. Note that after this point
3167 the edge variable names appear backwards; the redirection is done
3168 this way to preserve edge profile data. */
3169 redirect_edge_succ_nodup (cbranch_jump_edge, cbranch_dest_block);
3170 redirect_edge_succ_nodup (cbranch_fallthru_edge, jump_dest_block);
3171 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
3172 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
3174 /* Delete the block with the unconditional jump, and clean up the mess. */
3175 flow_delete_block (jump_block);
3176 tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
3178 return true;
3181 /* Attempt to forward edges leaving basic block B.
3182 Return true if sucessful. */
3184 static bool
3185 try_forward_edges (mode, b)
3186 basic_block b;
3187 int mode;
3189 bool changed = false;
3190 edge e, next;
3192 for (e = b->succ; e ; e = next)
3194 basic_block target, first;
3195 int counter;
3197 next = e->succ_next;
3199 /* Skip complex edges because we don't know how to update them.
3201 Still handle fallthru edges, as we can suceed to forward fallthru
3202 edge to the same place as the branch edge of conditional branch
3203 and turn conditional branch to an unconditonal branch. */
3204 if (e->flags & EDGE_COMPLEX)
3205 continue;
3207 target = first = e->dest;
3208 counter = 0;
3210 /* Look for the real destination of the jump.
3211 Avoid inifinite loop in the infinite empty loop by counting
3212 up to n_basic_blocks. */
3213 while (forwarder_block_p (target)
3214 && target->succ->dest != EXIT_BLOCK_PTR
3215 && counter < n_basic_blocks)
3217 /* Bypass trivial infinite loops. */
3218 if (target == target->succ->dest)
3219 counter = n_basic_blocks;
3221 /* Avoid killing of loop pre-headers, as it is the place loop
3222 optimizer wants to hoist code to.
3224 For fallthru forwarders, the LOOP_BEG note must appear between
3225 the header of block and CODE_LABEL of the loop, for non forwarders
3226 it must appear before the JUMP_INSN. */
3227 if (mode & CLEANUP_PRE_LOOP)
3229 rtx insn = (target->succ->flags & EDGE_FALLTHRU
3230 ? target->head : prev_nonnote_insn (target->end));
3232 if (GET_CODE (insn) != NOTE)
3233 insn = NEXT_INSN (insn);
3235 for (;insn && GET_CODE (insn) != CODE_LABEL && !INSN_P (insn);
3236 insn = NEXT_INSN (insn))
3237 if (GET_CODE (insn) == NOTE
3238 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
3239 break;
3241 if (GET_CODE (insn) == NOTE)
3242 break;
3244 target = target->succ->dest, counter++;
3247 if (counter >= n_basic_blocks)
3249 if (rtl_dump_file)
3250 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n",
3251 target->index);
3253 else if (target == first)
3254 ; /* We didn't do anything. */
3255 else
3257 /* Save the values now, as the edge may get removed. */
3258 gcov_type edge_count = e->count;
3259 int edge_probability = e->probability;
3261 if (redirect_edge_and_branch (e, target))
3263 /* We successfully forwarded the edge. Now update profile
3264 data: for each edge we traversed in the chain, remove
3265 the original edge's execution count. */
3266 int edge_frequency = ((edge_probability * b->frequency
3267 + REG_BR_PROB_BASE / 2)
3268 / REG_BR_PROB_BASE);
3272 first->count -= edge_count;
3273 first->succ->count -= edge_count;
3274 first->frequency -= edge_frequency;
3275 first = first->succ->dest;
3277 while (first != target);
3279 changed = true;
3281 else
3283 if (rtl_dump_file)
3284 fprintf (rtl_dump_file, "Forwarding edge %i->%i to %i failed.\n",
3285 b->index, e->dest->index, target->index);
3290 return changed;
3293 /* Look through the insns at the end of BB1 and BB2 and find the longest
3294 sequence that are equivalent. Store the first insns for that sequence
3295 in *F1 and *F2 and return the sequence length.
3297 To simplify callers of this function, if the blocks match exactly,
3298 store the head of the blocks in *F1 and *F2. */
3300 static int
3301 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
3302 int mode ATTRIBUTE_UNUSED;
3303 basic_block bb1, bb2;
3304 rtx *f1, *f2;
3306 rtx i1, i2, p1, p2, last1, last2, afterlast1, afterlast2;
3307 int ninsns = 0;
3309 /* Skip simple jumps at the end of the blocks. Complex jumps still
3310 need to be compared for equivalence, which we'll do below. */
3312 i1 = bb1->end;
3313 if (onlyjump_p (i1))
3314 i1 = PREV_INSN (i1);
3315 i2 = bb2->end;
3316 if (onlyjump_p (i2))
3317 i2 = PREV_INSN (i2);
3319 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
3320 while (true)
3322 /* Ignore notes. */
3323 while ((GET_CODE (i1) == NOTE && i1 != bb1->head))
3324 i1 = PREV_INSN (i1);
3325 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
3326 i2 = PREV_INSN (i2);
3328 if (i1 == bb1->head || i2 == bb2->head)
3329 break;
3331 /* Verify that I1 and I2 are equivalent. */
3333 if (GET_CODE (i1) != GET_CODE (i2))
3334 break;
3336 p1 = PATTERN (i1);
3337 p2 = PATTERN (i2);
3339 /* If this is a CALL_INSN, compare register usage information.
3340 If we don't check this on stack register machines, the two
3341 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3342 numbers of stack registers in the same basic block.
3343 If we don't check this on machines with delay slots, a delay slot may
3344 be filled that clobbers a parameter expected by the subroutine.
3346 ??? We take the simple route for now and assume that if they're
3347 equal, they were constructed identically. */
3349 if (GET_CODE (i1) == CALL_INSN
3350 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3351 CALL_INSN_FUNCTION_USAGE (i2)))
3352 break;
3354 #ifdef STACK_REGS
3355 /* If cross_jump_death_matters is not 0, the insn's mode
3356 indicates whether or not the insn contains any stack-like
3357 regs. */
3359 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
3361 /* If register stack conversion has already been done, then
3362 death notes must also be compared before it is certain that
3363 the two instruction streams match. */
3365 rtx note;
3366 HARD_REG_SET i1_regset, i2_regset;
3368 CLEAR_HARD_REG_SET (i1_regset);
3369 CLEAR_HARD_REG_SET (i2_regset);
3371 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3372 if (REG_NOTE_KIND (note) == REG_DEAD
3373 && STACK_REG_P (XEXP (note, 0)))
3374 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3376 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3377 if (REG_NOTE_KIND (note) == REG_DEAD
3378 && STACK_REG_P (XEXP (note, 0)))
3379 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3381 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3383 break;
3385 done:
3388 #endif
3390 if (GET_CODE (p1) != GET_CODE (p2))
3391 break;
3393 if (! rtx_renumbered_equal_p (p1, p2))
3395 /* The following code helps take care of G++ cleanups. */
3396 rtx equiv1 = find_reg_equal_equiv_note (i1);
3397 rtx equiv2 = find_reg_equal_equiv_note (i2);
3399 if (equiv1 && equiv2
3400 /* If the equivalences are not to a constant, they may
3401 reference pseudos that no longer exist, so we can't
3402 use them. */
3403 && CONSTANT_P (XEXP (equiv1, 0))
3404 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3406 rtx s1 = single_set (i1);
3407 rtx s2 = single_set (i2);
3408 if (s1 != 0 && s2 != 0
3409 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3411 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3412 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3413 if (! rtx_renumbered_equal_p (p1, p2))
3414 cancel_changes (0);
3415 else if (apply_change_group ())
3416 goto win;
3419 break;
3422 win:
3423 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
3424 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3426 afterlast1 = last1, afterlast2 = last2;
3427 last1 = i1, last2 = i2;
3428 ninsns++;
3430 i1 = PREV_INSN (i1);
3431 i2 = PREV_INSN (i2);
3434 #ifdef HAVE_cc0
3435 if (ninsns)
3437 /* Don't allow the insn after a compare to be shared by
3438 cross-jumping unless the compare is also shared. */
3439 if (reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
3440 last1 = afterlast1, last2 = afterlast2, ninsns--;
3442 #endif
3444 /* Include preceeding notes and labels in the cross-jump. One,
3445 this may bring us to the head of the blocks as requested above.
3446 Two, it keeps line number notes as matched as may be. */
3447 if (ninsns)
3449 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
3450 last1 = PREV_INSN (last1);
3451 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
3452 last1 = PREV_INSN (last1);
3453 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
3454 last2 = PREV_INSN (last2);
3455 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
3456 last2 = PREV_INSN (last2);
3458 *f1 = last1;
3459 *f2 = last2;
3462 return ninsns;
3465 /* Return true iff outgoing edges of BB1 and BB2 match, together with
3466 the branch instruction. This means that if we commonize the control
3467 flow before end of the basic block, the semantic remains unchanged.
3469 We may assume that there exists one edge with a common destination. */
3471 static bool
3472 outgoing_edges_match (bb1, bb2)
3473 basic_block bb1;
3474 basic_block bb2;
3476 /* If BB1 has only one successor, we must be looking at an unconditional
3477 jump. Which, by the assumption above, means that we only need to check
3478 that BB2 has one successor. */
3479 if (bb1->succ && !bb1->succ->succ_next)
3480 return (bb2->succ && !bb2->succ->succ_next);
3482 /* Match conditional jumps - this may get tricky when fallthru and branch
3483 edges are crossed. */
3484 if (bb1->succ
3485 && bb1->succ->succ_next
3486 && !bb1->succ->succ_next->succ_next
3487 && any_condjump_p (bb1->end))
3489 edge b1, f1, b2, f2;
3490 bool reverse, match;
3491 rtx set1, set2, cond1, cond2;
3492 enum rtx_code code1, code2;
3494 if (!bb2->succ
3495 || !bb2->succ->succ_next
3496 || bb1->succ->succ_next->succ_next
3497 || !any_condjump_p (bb2->end))
3498 return false;
3500 b1 = BRANCH_EDGE (bb1);
3501 b2 = BRANCH_EDGE (bb2);
3502 f1 = FALLTHRU_EDGE (bb1);
3503 f2 = FALLTHRU_EDGE (bb2);
3505 /* Get around possible forwarders on fallthru edges. Other cases
3506 should be optimized out already. */
3507 if (forwarder_block_p (f1->dest))
3508 f1 = f1->dest->succ;
3509 if (forwarder_block_p (f2->dest))
3510 f2 = f2->dest->succ;
3512 /* To simplify use of this function, return false if there are
3513 unneeded forwarder blocks. These will get eliminated later
3514 during cleanup_cfg. */
3515 if (forwarder_block_p (f1->dest)
3516 || forwarder_block_p (f2->dest)
3517 || forwarder_block_p (b1->dest)
3518 || forwarder_block_p (b2->dest))
3519 return false;
3521 if (f1->dest == f2->dest && b1->dest == b2->dest)
3522 reverse = false;
3523 else if (f1->dest == b2->dest && b1->dest == f2->dest)
3524 reverse = true;
3525 else
3526 return false;
3528 set1 = pc_set (bb1->end);
3529 set2 = pc_set (bb2->end);
3530 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
3531 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
3532 reverse = !reverse;
3534 cond1 = XEXP (SET_SRC (set1), 0);
3535 cond2 = XEXP (SET_SRC (set2), 0);
3536 code1 = GET_CODE (cond1);
3537 if (reverse)
3538 code2 = reversed_comparison_code (cond2, bb2->end);
3539 else
3540 code2 = GET_CODE (cond2);
3541 if (code2 == UNKNOWN)
3542 return false;
3544 /* Verify codes and operands match. */
3545 match = ((code1 == code2
3546 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
3547 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
3548 || (code1 == swap_condition (code2)
3549 && rtx_renumbered_equal_p (XEXP (cond1, 1),
3550 XEXP (cond2, 0))
3551 && rtx_renumbered_equal_p (XEXP (cond1, 0),
3552 XEXP (cond2, 1))));
3554 /* If we return true, we will join the blocks. Which means that
3555 we will only have one branch prediction bit to work with. Thus
3556 we require the existing branches to have probabilities that are
3557 roughly similar. */
3558 /* ??? We should use bb->frequency to allow merging in infrequently
3559 executed blocks, but at the moment it is not available when
3560 cleanup_cfg is run. */
3561 if (match && !optimize_size)
3563 rtx note1, note2;
3564 int prob1, prob2;
3565 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
3566 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
3568 if (note1 && note2)
3570 prob1 = INTVAL (XEXP (note1, 0));
3571 prob2 = INTVAL (XEXP (note2, 0));
3572 if (reverse)
3573 prob2 = REG_BR_PROB_BASE - prob2;
3575 /* Fail if the difference in probabilities is
3576 greater than 5%. */
3577 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 20)
3578 return false;
3580 else if (note1 || note2)
3581 return false;
3584 if (rtl_dump_file && match)
3585 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
3586 bb1->index, bb2->index);
3588 return match;
3591 /* ??? We can handle computed jumps too. This may be important for
3592 inlined functions containing switch statements. Also jumps w/o
3593 fallthru edges can be handled by simply matching whole insn. */
3594 return false;
3597 /* E1 and E2 are edges with the same destination block. Search their
3598 predecessors for common code. If found, redirect control flow from
3599 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
3601 static bool
3602 try_crossjump_to_edge (mode, e1, e2)
3603 int mode;
3604 edge e1, e2;
3606 int nmatch;
3607 basic_block src1 = e1->src, src2 = e2->src;
3608 basic_block redirect_to;
3609 rtx newpos1, newpos2;
3610 edge s;
3611 rtx last;
3612 rtx label;
3613 rtx note;
3615 /* Search backward through forwarder blocks. We don't need to worry
3616 about multiple entry or chained forwarders, as they will be optimized
3617 away. We do this to look past the unconditional jump following a
3618 conditional jump that is required due to the current CFG shape. */
3619 if (src1->pred
3620 && !src1->pred->pred_next
3621 && forwarder_block_p (src1))
3623 e1 = src1->pred;
3624 src1 = e1->src;
3626 if (src2->pred
3627 && !src2->pred->pred_next
3628 && forwarder_block_p (src2))
3630 e2 = src2->pred;
3631 src2 = e2->src;
3634 /* Nothing to do if we reach ENTRY, or a common source block. */
3635 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
3636 return false;
3637 if (src1 == src2)
3638 return false;
3640 /* Seeing more than 1 forwarder blocks would confuse us later... */
3641 if (forwarder_block_p (e1->dest)
3642 && forwarder_block_p (e1->dest->succ->dest))
3643 return false;
3644 if (forwarder_block_p (e2->dest)
3645 && forwarder_block_p (e2->dest->succ->dest))
3646 return false;
3648 /* Likewise with dead code (possibly newly created by the other optimizations
3649 of cfg_cleanup). */
3650 if (!src1->pred || !src2->pred)
3651 return false;
3653 /* Likewise with complex edges.
3654 ??? We should be able to handle most complex edges later with some
3655 care. */
3656 if (e1->flags & EDGE_COMPLEX)
3657 return false;
3659 /* Look for the common insn sequence, part the first ... */
3660 if (!outgoing_edges_match (src1, src2))
3661 return false;
3663 /* ... and part the second. */
3664 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
3665 if (!nmatch)
3666 return false;
3668 /* Avoid splitting if possible. */
3669 if (newpos2 == src2->head)
3670 redirect_to = src2;
3671 else
3673 if (rtl_dump_file)
3674 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
3675 src2->index, nmatch);
3676 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
3679 if (rtl_dump_file)
3680 fprintf (rtl_dump_file,
3681 "Cross jumping from bb %i to bb %i; %i common insns\n",
3682 src1->index, src2->index, nmatch);
3684 redirect_to->count += src1->count;
3685 redirect_to->frequency += src1->frequency;
3687 /* Recompute the frequencies and counts of outgoing edges. */
3688 for (s = redirect_to->succ; s; s = s->succ_next)
3690 edge s2;
3691 basic_block d = s->dest;
3693 if (forwarder_block_p (d))
3694 d = d->succ->dest;
3695 for (s2 = src1->succ; ; s2 = s2->succ_next)
3697 basic_block d2 = s2->dest;
3698 if (forwarder_block_p (d2))
3699 d2 = d2->succ->dest;
3700 if (d == d2)
3701 break;
3703 s->count += s2->count;
3705 /* Take care to update possible forwarder blocks. We verified
3706 that there is no more than one in the chain, so we can't run
3707 into infinite loop. */
3708 if (forwarder_block_p (s->dest))
3710 s->dest->succ->count += s2->count;
3711 s->dest->count += s2->count;
3712 s->dest->frequency += EDGE_FREQUENCY (s);
3714 if (forwarder_block_p (s2->dest))
3716 s2->dest->succ->count -= s2->count;
3717 s2->dest->count -= s2->count;
3718 s2->dest->frequency -= EDGE_FREQUENCY (s);
3720 if (!redirect_to->frequency && !src1->frequency)
3721 s->probability = (s->probability + s2->probability) / 2;
3722 else
3723 s->probability =
3724 ((s->probability * redirect_to->frequency +
3725 s2->probability * src1->frequency)
3726 / (redirect_to->frequency + src1->frequency));
3729 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
3730 if (note)
3731 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
3733 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
3735 /* Skip possible basic block header. */
3736 if (GET_CODE (newpos1) == CODE_LABEL)
3737 newpos1 = NEXT_INSN (newpos1);
3738 if (GET_CODE (newpos1) == NOTE)
3739 newpos1 = NEXT_INSN (newpos1);
3740 last = src1->end;
3742 /* Emit the jump insn. */
3743 label = block_label (redirect_to);
3744 src1->end = emit_jump_insn_before (gen_jump (label), newpos1);
3745 JUMP_LABEL (src1->end) = label;
3746 LABEL_NUSES (label)++;
3747 if (basic_block_for_insn)
3748 set_block_for_new_insns (src1->end, src1);
3750 /* Delete the now unreachable instructions. */
3751 flow_delete_insn_chain (newpos1, last);
3753 /* Make sure there is a barrier after the new jump. */
3754 last = next_nonnote_insn (src1->end);
3755 if (!last || GET_CODE (last) != BARRIER)
3756 emit_barrier_after (src1->end);
3758 /* Update CFG. */
3759 while (src1->succ)
3760 remove_edge (src1->succ);
3761 make_edge (NULL, src1, redirect_to, 0);
3762 src1->succ->probability = REG_BR_PROB_BASE;
3763 src1->succ->count = src1->count;
3765 return true;
3768 /* Search the predecessors of BB for common insn sequences. When found,
3769 share code between them by redirecting control flow. Return true if
3770 any changes made. */
3772 static bool
3773 try_crossjump_bb (mode, bb)
3774 int mode;
3775 basic_block bb;
3777 edge e, e2, nexte2, nexte, fallthru;
3778 bool changed;
3780 /* Nothing to do if there is not at least two incomming edges. */
3781 if (!bb->pred || !bb->pred->pred_next)
3782 return false;
3784 /* It is always cheapest to redirect a block that ends in a branch to
3785 a block that falls through into BB, as that adds no branches to the
3786 program. We'll try that combination first. */
3787 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
3788 if (fallthru->flags & EDGE_FALLTHRU)
3789 break;
3791 changed = false;
3792 for (e = bb->pred; e; e = nexte)
3794 nexte = e->pred_next;
3796 /* Elide complex edges now, as neither try_crossjump_to_edge
3797 nor outgoing_edges_match can handle them. */
3798 if (e->flags & EDGE_COMPLEX)
3799 continue;
3801 /* As noted above, first try with the fallthru predecessor. */
3802 if (fallthru)
3804 /* Don't combine the fallthru edge into anything else.
3805 If there is a match, we'll do it the other way around. */
3806 if (e == fallthru)
3807 continue;
3809 if (try_crossjump_to_edge (mode, e, fallthru))
3811 changed = true;
3812 nexte = bb->pred;
3813 continue;
3817 /* Non-obvious work limiting check: Recognize that we're going
3818 to call try_crossjump_bb on every basic block. So if we have
3819 two blocks with lots of outgoing edges (a switch) and they
3820 share lots of common destinations, then we would do the
3821 cross-jump check once for each common destination.
3823 Now, if the blocks actually are cross-jump candidates, then
3824 all of their destinations will be shared. Which means that
3825 we only need check them for cross-jump candidacy once. We
3826 can eliminate redundant checks of crossjump(A,B) by arbitrarily
3827 choosing to do the check from the block for which the edge
3828 in question is the first successor of A. */
3829 if (e->src->succ != e)
3830 continue;
3832 for (e2 = bb->pred; e2; e2 = nexte2)
3834 nexte2 = e2->pred_next;
3836 if (e2 == e)
3837 continue;
3839 /* We've already checked the fallthru edge above. */
3840 if (e2 == fallthru)
3841 continue;
3843 /* Again, neither try_crossjump_to_edge nor outgoing_edges_match
3844 can handle complex edges. */
3845 if (e2->flags & EDGE_COMPLEX)
3846 continue;
3848 /* The "first successor" check above only prevents multiple
3849 checks of crossjump(A,B). In order to prevent redundant
3850 checks of crossjump(B,A), require that A be the block
3851 with the lowest index. */
3852 if (e->src->index > e2->src->index)
3853 continue;
3855 if (try_crossjump_to_edge (mode, e, e2))
3857 changed = true;
3858 nexte = bb->pred;
3859 break;
3864 return changed;
3867 /* Do simple CFG optimizations - basic block merging, simplifying of jump
3868 instructions etc. Return nonzero if changes were made. */
3870 static bool
3871 try_optimize_cfg (mode)
3872 int mode;
3874 int i;
3875 bool changed_overall = false;
3876 bool changed;
3877 int iterations = 0;
3879 /* Attempt to merge blocks as made possible by edge removal. If a block
3880 has only one successor, and the successor has only one predecessor,
3881 they may be combined. */
3885 changed = false;
3886 iterations++;
3888 if (rtl_dump_file)
3889 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
3890 iterations);
3892 for (i = 0; i < n_basic_blocks;)
3894 basic_block c, b = BASIC_BLOCK (i);
3895 edge s;
3896 bool changed_here = false;
3898 /* Delete trivially dead basic blocks. */
3899 while (b->pred == NULL)
3901 c = BASIC_BLOCK (b->index - 1);
3902 if (rtl_dump_file)
3903 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
3904 flow_delete_block (b);
3905 changed = true;
3906 b = c;
3909 /* Remove code labels no longer used. Don't do this before
3910 CALL_PLACEHOLDER is removed, as some branches may be hidden
3911 within. */
3912 if (b->pred->pred_next == NULL
3913 && (b->pred->flags & EDGE_FALLTHRU)
3914 && !(b->pred->flags & EDGE_COMPLEX)
3915 && GET_CODE (b->head) == CODE_LABEL
3916 && (!(mode & CLEANUP_PRE_SIBCALL)
3917 || !tail_recursion_label_p (b->head))
3918 /* If previous block ends with condjump jumping to next BB,
3919 we can't delete the label. */
3920 && (b->pred->src == ENTRY_BLOCK_PTR
3921 || !reg_mentioned_p (b->head, b->pred->src->end)))
3923 rtx label = b->head;
3924 b->head = NEXT_INSN (b->head);
3925 flow_delete_insn_chain (label, label);
3926 if (rtl_dump_file)
3927 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
3928 b->index);
3931 /* If we fall through an empty block, we can remove it. */
3932 if (b->pred->pred_next == NULL
3933 && (b->pred->flags & EDGE_FALLTHRU)
3934 && GET_CODE (b->head) != CODE_LABEL
3935 && forwarder_block_p (b)
3936 /* Note that forwarder_block_p true ensures that there
3937 is a successor for this block. */
3938 && (b->succ->flags & EDGE_FALLTHRU)
3939 && n_basic_blocks > 1)
3941 if (rtl_dump_file)
3942 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
3943 b->index);
3944 c = BASIC_BLOCK (b->index ? b->index - 1 : 1);
3945 redirect_edge_succ_nodup (b->pred, b->succ->dest);
3946 flow_delete_block (b);
3947 changed = true;
3948 b = c;
3951 /* Merge blocks. Loop because chains of blocks might be
3952 combineable. */
3953 while ((s = b->succ) != NULL
3954 && s->succ_next == NULL
3955 && !(s->flags & EDGE_COMPLEX)
3956 && (c = s->dest) != EXIT_BLOCK_PTR
3957 && c->pred->pred_next == NULL
3958 /* If the jump insn has side effects,
3959 we can't kill the edge. */
3960 && (GET_CODE (b->end) != JUMP_INSN
3961 || onlyjump_p (b->end))
3962 && merge_blocks (s, b, c, mode))
3963 changed_here = true;
3965 /* Simplify branch over branch. */
3966 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
3967 changed_here = true;
3969 /* If B has a single outgoing edge, but uses a non-trivial jump
3970 instruction without side-effects, we can either delete the
3971 jump entirely, or replace it with a simple unconditional jump.
3972 Use redirect_edge_and_branch to do the dirty work. */
3973 if (b->succ
3974 && ! b->succ->succ_next
3975 && b->succ->dest != EXIT_BLOCK_PTR
3976 && onlyjump_p (b->end)
3977 && redirect_edge_and_branch (b->succ, b->succ->dest))
3978 changed_here = true;
3980 /* Simplify branch to branch. */
3981 if (try_forward_edges (mode, b))
3982 changed_here = true;
3984 /* Look for shared code between blocks. */
3985 if ((mode & CLEANUP_CROSSJUMP)
3986 && try_crossjump_bb (mode, b))
3987 changed_here = true;
3989 /* Don't get confused by the index shift caused by deleting
3990 blocks. */
3991 if (!changed_here)
3992 i = b->index + 1;
3993 else
3994 changed = true;
3997 if ((mode & CLEANUP_CROSSJUMP)
3998 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
3999 changed = true;
4001 #ifdef ENABLE_CHECKING
4002 if (changed)
4003 verify_flow_info ();
4004 #endif
4006 changed_overall |= changed;
4008 while (changed);
4009 return changed_overall;
4012 /* The given edge should potentially be a fallthru edge. If that is in
4013 fact true, delete the jump and barriers that are in the way. */
4015 void
4016 tidy_fallthru_edge (e, b, c)
4017 edge e;
4018 basic_block b, c;
4020 rtx q;
4022 /* ??? In a late-running flow pass, other folks may have deleted basic
4023 blocks by nopping out blocks, leaving multiple BARRIERs between here
4024 and the target label. They ought to be chastized and fixed.
4026 We can also wind up with a sequence of undeletable labels between
4027 one block and the next.
4029 So search through a sequence of barriers, labels, and notes for
4030 the head of block C and assert that we really do fall through. */
4032 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
4033 return;
4035 /* Remove what will soon cease being the jump insn from the source block.
4036 If block B consisted only of this single jump, turn it into a deleted
4037 note. */
4038 q = b->end;
4039 if (GET_CODE (q) == JUMP_INSN
4040 && onlyjump_p (q)
4041 && (any_uncondjump_p (q)
4042 || (b->succ == e && e->succ_next == NULL)))
4044 #ifdef HAVE_cc0
4045 /* If this was a conditional jump, we need to also delete
4046 the insn that set cc0. */
4047 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
4048 q = PREV_INSN (q);
4049 #endif
4051 if (b->head == q)
4053 PUT_CODE (q, NOTE);
4054 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
4055 NOTE_SOURCE_FILE (q) = 0;
4057 else
4059 q = PREV_INSN (q);
4061 /* We don't want a block to end on a line-number note since that has
4062 the potential of changing the code between -g and not -g. */
4063 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
4064 q = PREV_INSN (q);
4067 b->end = q;
4070 /* Selectively unlink the sequence. */
4071 if (q != PREV_INSN (c->head))
4072 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
4074 e->flags |= EDGE_FALLTHRU;
4077 /* Fix up edges that now fall through, or rather should now fall through
4078 but previously required a jump around now deleted blocks. Simplify
4079 the search by only examining blocks numerically adjacent, since this
4080 is how find_basic_blocks created them. */
4082 static void
4083 tidy_fallthru_edges ()
4085 int i;
4087 for (i = 1; i < n_basic_blocks; ++i)
4089 basic_block b = BASIC_BLOCK (i - 1);
4090 basic_block c = BASIC_BLOCK (i);
4091 edge s;
4093 /* We care about simple conditional or unconditional jumps with
4094 a single successor.
4096 If we had a conditional branch to the next instruction when
4097 find_basic_blocks was called, then there will only be one
4098 out edge for the block which ended with the conditional
4099 branch (since we do not create duplicate edges).
4101 Furthermore, the edge will be marked as a fallthru because we
4102 merge the flags for the duplicate edges. So we do not want to
4103 check that the edge is not a FALLTHRU edge. */
4104 if ((s = b->succ) != NULL
4105 && ! (s->flags & EDGE_COMPLEX)
4106 && s->succ_next == NULL
4107 && s->dest == c
4108 /* If the jump insn has side effects, we can't tidy the edge. */
4109 && (GET_CODE (b->end) != JUMP_INSN
4110 || onlyjump_p (b->end)))
4111 tidy_fallthru_edge (s, b, c);
4115 /* Perform data flow analysis.
4116 F is the first insn of the function; FLAGS is a set of PROP_* flags
4117 to be used in accumulating flow info. */
4119 void
4120 life_analysis (f, file, flags)
4121 rtx f;
4122 FILE *file;
4123 int flags;
4125 #ifdef ELIMINABLE_REGS
4126 register int i;
4127 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
4128 #endif
4130 /* Record which registers will be eliminated. We use this in
4131 mark_used_regs. */
4133 CLEAR_HARD_REG_SET (elim_reg_set);
4135 #ifdef ELIMINABLE_REGS
4136 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4137 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4138 #else
4139 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4140 #endif
4142 if (! optimize)
4143 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
4145 /* The post-reload life analysis have (on a global basis) the same
4146 registers live as was computed by reload itself. elimination
4147 Otherwise offsets and such may be incorrect.
4149 Reload will make some registers as live even though they do not
4150 appear in the rtl.
4152 We don't want to create new auto-incs after reload, since they
4153 are unlikely to be useful and can cause problems with shared
4154 stack slots. */
4155 if (reload_completed)
4156 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
4158 /* We want alias analysis information for local dead store elimination. */
4159 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4160 init_alias_analysis ();
4162 /* Always remove no-op moves. Do this before other processing so
4163 that we don't have to keep re-scanning them. */
4164 delete_noop_moves (f);
4166 /* Some targets can emit simpler epilogues if they know that sp was
4167 not ever modified during the function. After reload, of course,
4168 we've already emitted the epilogue so there's no sense searching. */
4169 if (! reload_completed)
4170 notice_stack_pointer_modification (f);
4172 /* Allocate and zero out data structures that will record the
4173 data from lifetime analysis. */
4174 allocate_reg_life_data ();
4175 allocate_bb_life_data ();
4177 /* Find the set of registers live on function exit. */
4178 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
4180 /* "Update" life info from zero. It'd be nice to begin the
4181 relaxation with just the exit and noreturn blocks, but that set
4182 is not immediately handy. */
4184 if (flags & PROP_REG_INFO)
4185 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4186 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
4188 /* Clean up. */
4189 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4190 end_alias_analysis ();
4192 if (file)
4193 dump_flow_info (file);
4195 free_basic_block_vars (1);
4197 #ifdef ENABLE_CHECKING
4199 rtx insn;
4201 /* Search for any REG_LABEL notes which reference deleted labels. */
4202 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4204 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4206 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
4207 abort ();
4210 #endif
4211 /* Removing dead insns should've made jumptables really dead. */
4212 delete_dead_jumptables ();
4215 /* A subroutine of verify_wide_reg, called through for_each_rtx.
4216 Search for REGNO. If found, abort if it is not wider than word_mode. */
4218 static int
4219 verify_wide_reg_1 (px, pregno)
4220 rtx *px;
4221 void *pregno;
4223 rtx x = *px;
4224 unsigned int regno = *(int *) pregno;
4226 if (GET_CODE (x) == REG && REGNO (x) == regno)
4228 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
4229 abort ();
4230 return 1;
4232 return 0;
4235 /* A subroutine of verify_local_live_at_start. Search through insns
4236 between HEAD and END looking for register REGNO. */
4238 static void
4239 verify_wide_reg (regno, head, end)
4240 int regno;
4241 rtx head, end;
4243 while (1)
4245 if (INSN_P (head)
4246 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
4247 return;
4248 if (head == end)
4249 break;
4250 head = NEXT_INSN (head);
4253 /* We didn't find the register at all. Something's way screwy. */
4254 if (rtl_dump_file)
4255 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
4256 print_rtl_and_abort ();
4259 /* A subroutine of update_life_info. Verify that there are no untoward
4260 changes in live_at_start during a local update. */
4262 static void
4263 verify_local_live_at_start (new_live_at_start, bb)
4264 regset new_live_at_start;
4265 basic_block bb;
4267 if (reload_completed)
4269 /* After reload, there are no pseudos, nor subregs of multi-word
4270 registers. The regsets should exactly match. */
4271 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
4273 if (rtl_dump_file)
4275 fprintf (rtl_dump_file,
4276 "live_at_start mismatch in bb %d, aborting\n",
4277 bb->index);
4278 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
4279 debug_bitmap_file (rtl_dump_file, new_live_at_start);
4281 print_rtl_and_abort ();
4284 else
4286 int i;
4288 /* Find the set of changed registers. */
4289 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
4291 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
4293 /* No registers should die. */
4294 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
4296 if (rtl_dump_file)
4297 fprintf (rtl_dump_file,
4298 "Register %d died unexpectedly in block %d\n", i,
4299 bb->index);
4300 print_rtl_and_abort ();
4303 /* Verify that the now-live register is wider than word_mode. */
4304 verify_wide_reg (i, bb->head, bb->end);
4309 /* Updates life information starting with the basic blocks set in BLOCKS.
4310 If BLOCKS is null, consider it to be the universal set.
4312 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
4313 we are only expecting local modifications to basic blocks. If we find
4314 extra registers live at the beginning of a block, then we either killed
4315 useful data, or we have a broken split that wants data not provided.
4316 If we find registers removed from live_at_start, that means we have
4317 a broken peephole that is killing a register it shouldn't.
4319 ??? This is not true in one situation -- when a pre-reload splitter
4320 generates subregs of a multi-word pseudo, current life analysis will
4321 lose the kill. So we _can_ have a pseudo go live. How irritating.
4323 Including PROP_REG_INFO does not properly refresh regs_ever_live
4324 unless the caller resets it to zero. */
4326 void
4327 update_life_info (blocks, extent, prop_flags)
4328 sbitmap blocks;
4329 enum update_life_extent extent;
4330 int prop_flags;
4332 regset tmp;
4333 regset_head tmp_head;
4334 int i;
4336 tmp = INITIALIZE_REG_SET (tmp_head);
4338 /* Changes to the CFG are only allowed when
4339 doing a global update for the entire CFG. */
4340 if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
4341 && (extent == UPDATE_LIFE_LOCAL || blocks))
4342 abort ();
4344 /* For a global update, we go through the relaxation process again. */
4345 if (extent != UPDATE_LIFE_LOCAL)
4347 for ( ; ; )
4349 int changed = 0;
4351 calculate_global_regs_live (blocks, blocks,
4352 prop_flags & (PROP_SCAN_DEAD_CODE
4353 | PROP_ALLOW_CFG_CHANGES));
4355 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4356 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4357 break;
4359 /* Removing dead code may allow the CFG to be simplified which
4360 in turn may allow for further dead code detection / removal. */
4361 for (i = n_basic_blocks - 1; i >= 0; --i)
4363 basic_block bb = BASIC_BLOCK (i);
4365 COPY_REG_SET (tmp, bb->global_live_at_end);
4366 changed |= propagate_block (bb, tmp, NULL, NULL,
4367 prop_flags & (PROP_SCAN_DEAD_CODE
4368 | PROP_KILL_DEAD_CODE));
4371 if (! changed || ! try_optimize_cfg (CLEANUP_EXPENSIVE))
4372 break;
4374 delete_unreachable_blocks ();
4375 mark_critical_edges ();
4378 /* If asked, remove notes from the blocks we'll update. */
4379 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
4380 count_or_remove_death_notes (blocks, 1);
4383 if (blocks)
4385 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4387 basic_block bb = BASIC_BLOCK (i);
4389 COPY_REG_SET (tmp, bb->global_live_at_end);
4390 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4392 if (extent == UPDATE_LIFE_LOCAL)
4393 verify_local_live_at_start (tmp, bb);
4396 else
4398 for (i = n_basic_blocks - 1; i >= 0; --i)
4400 basic_block bb = BASIC_BLOCK (i);
4402 COPY_REG_SET (tmp, bb->global_live_at_end);
4403 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4405 if (extent == UPDATE_LIFE_LOCAL)
4406 verify_local_live_at_start (tmp, bb);
4410 FREE_REG_SET (tmp);
4412 if (prop_flags & PROP_REG_INFO)
4414 /* The only pseudos that are live at the beginning of the function
4415 are those that were not set anywhere in the function. local-alloc
4416 doesn't know how to handle these correctly, so mark them as not
4417 local to any one basic block. */
4418 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
4419 FIRST_PSEUDO_REGISTER, i,
4420 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4422 /* We have a problem with any pseudoreg that lives across the setjmp.
4423 ANSI says that if a user variable does not change in value between
4424 the setjmp and the longjmp, then the longjmp preserves it. This
4425 includes longjmp from a place where the pseudo appears dead.
4426 (In principle, the value still exists if it is in scope.)
4427 If the pseudo goes in a hard reg, some other value may occupy
4428 that hard reg where this pseudo is dead, thus clobbering the pseudo.
4429 Conclusion: such a pseudo must not go in a hard reg. */
4430 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
4431 FIRST_PSEUDO_REGISTER, i,
4433 if (regno_reg_rtx[i] != 0)
4435 REG_LIVE_LENGTH (i) = -1;
4436 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4442 /* Free the variables allocated by find_basic_blocks.
4444 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
4446 void
4447 free_basic_block_vars (keep_head_end_p)
4448 int keep_head_end_p;
4450 if (basic_block_for_insn)
4452 VARRAY_FREE (basic_block_for_insn);
4453 basic_block_for_insn = NULL;
4456 if (! keep_head_end_p)
4458 if (basic_block_info)
4460 clear_edges ();
4461 VARRAY_FREE (basic_block_info);
4463 n_basic_blocks = 0;
4465 ENTRY_BLOCK_PTR->aux = NULL;
4466 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
4467 EXIT_BLOCK_PTR->aux = NULL;
4468 EXIT_BLOCK_PTR->global_live_at_start = NULL;
4472 /* Delete any insns that copy a register to itself. */
4474 void
4475 delete_noop_moves (f)
4476 rtx f ATTRIBUTE_UNUSED;
4478 int i;
4479 rtx insn, next;
4480 basic_block bb;
4482 for (i = 0; i < n_basic_blocks; i++)
4484 bb = BASIC_BLOCK (i);
4485 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
4487 next = NEXT_INSN (insn);
4488 if (INSN_P (insn) && noop_move_p (insn))
4490 /* Do not call flow_delete_insn here to not confuse backward
4491 pointers of LIBCALL block. */
4492 PUT_CODE (insn, NOTE);
4493 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4494 NOTE_SOURCE_FILE (insn) = 0;
4500 /* Delete any jump tables never referenced. We can't delete them at the
4501 time of removing tablejump insn as they are referenced by the preceeding
4502 insns computing the destination, so we delay deleting and garbagecollect
4503 them once life information is computed. */
4504 static void
4505 delete_dead_jumptables ()
4507 rtx insn, next;
4508 for (insn = get_insns (); insn; insn = next)
4510 next = NEXT_INSN (insn);
4511 if (GET_CODE (insn) == CODE_LABEL
4512 && LABEL_NUSES (insn) == 0
4513 && GET_CODE (next) == JUMP_INSN
4514 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4515 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4517 if (rtl_dump_file)
4518 fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
4519 flow_delete_insn (NEXT_INSN (insn));
4520 flow_delete_insn (insn);
4521 next = NEXT_INSN (next);
4526 /* Determine if the stack pointer is constant over the life of the function.
4527 Only useful before prologues have been emitted. */
4529 static void
4530 notice_stack_pointer_modification_1 (x, pat, data)
4531 rtx x;
4532 rtx pat ATTRIBUTE_UNUSED;
4533 void *data ATTRIBUTE_UNUSED;
4535 if (x == stack_pointer_rtx
4536 /* The stack pointer is only modified indirectly as the result
4537 of a push until later in flow. See the comments in rtl.texi
4538 regarding Embedded Side-Effects on Addresses. */
4539 || (GET_CODE (x) == MEM
4540 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
4541 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
4542 current_function_sp_is_unchanging = 0;
4545 static void
4546 notice_stack_pointer_modification (f)
4547 rtx f;
4549 rtx insn;
4551 /* Assume that the stack pointer is unchanging if alloca hasn't
4552 been used. */
4553 current_function_sp_is_unchanging = !current_function_calls_alloca;
4554 if (! current_function_sp_is_unchanging)
4555 return;
4557 for (insn = f; insn; insn = NEXT_INSN (insn))
4559 if (INSN_P (insn))
4561 /* Check if insn modifies the stack pointer. */
4562 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
4563 NULL);
4564 if (! current_function_sp_is_unchanging)
4565 return;
4570 /* Mark a register in SET. Hard registers in large modes get all
4571 of their component registers set as well. */
4573 static void
4574 mark_reg (reg, xset)
4575 rtx reg;
4576 void *xset;
4578 regset set = (regset) xset;
4579 int regno = REGNO (reg);
4581 if (GET_MODE (reg) == BLKmode)
4582 abort ();
4584 SET_REGNO_REG_SET (set, regno);
4585 if (regno < FIRST_PSEUDO_REGISTER)
4587 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4588 while (--n > 0)
4589 SET_REGNO_REG_SET (set, regno + n);
4593 /* Mark those regs which are needed at the end of the function as live
4594 at the end of the last basic block. */
4596 static void
4597 mark_regs_live_at_end (set)
4598 regset set;
4600 unsigned int i;
4602 /* If exiting needs the right stack value, consider the stack pointer
4603 live at the end of the function. */
4604 if ((HAVE_epilogue && reload_completed)
4605 || ! EXIT_IGNORE_STACK
4606 || (! FRAME_POINTER_REQUIRED
4607 && ! current_function_calls_alloca
4608 && flag_omit_frame_pointer)
4609 || current_function_sp_is_unchanging)
4611 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
4614 /* Mark the frame pointer if needed at the end of the function. If
4615 we end up eliminating it, it will be removed from the live list
4616 of each basic block by reload. */
4618 if (! reload_completed || frame_pointer_needed)
4620 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
4621 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4622 /* If they are different, also mark the hard frame pointer as live. */
4623 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4624 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
4625 #endif
4628 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4629 /* Many architectures have a GP register even without flag_pic.
4630 Assume the pic register is not in use, or will be handled by
4631 other means, if it is not fixed. */
4632 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4633 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4634 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
4635 #endif
4637 /* Mark all global registers, and all registers used by the epilogue
4638 as being live at the end of the function since they may be
4639 referenced by our caller. */
4640 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4641 if (global_regs[i] || EPILOGUE_USES (i))
4642 SET_REGNO_REG_SET (set, i);
4644 if (HAVE_epilogue && reload_completed)
4646 /* Mark all call-saved registers that we actually used. */
4647 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4648 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
4649 SET_REGNO_REG_SET (set, i);
4652 #ifdef EH_RETURN_DATA_REGNO
4653 /* Mark the registers that will contain data for the handler. */
4654 if (reload_completed && current_function_calls_eh_return)
4655 for (i = 0; ; ++i)
4657 unsigned regno = EH_RETURN_DATA_REGNO(i);
4658 if (regno == INVALID_REGNUM)
4659 break;
4660 SET_REGNO_REG_SET (set, regno);
4662 #endif
4663 #ifdef EH_RETURN_STACKADJ_RTX
4664 if ((! HAVE_epilogue || ! reload_completed)
4665 && current_function_calls_eh_return)
4667 rtx tmp = EH_RETURN_STACKADJ_RTX;
4668 if (tmp && REG_P (tmp))
4669 mark_reg (tmp, set);
4671 #endif
4672 #ifdef EH_RETURN_HANDLER_RTX
4673 if ((! HAVE_epilogue || ! reload_completed)
4674 && current_function_calls_eh_return)
4676 rtx tmp = EH_RETURN_HANDLER_RTX;
4677 if (tmp && REG_P (tmp))
4678 mark_reg (tmp, set);
4680 #endif
4682 /* Mark function return value. */
4683 diddle_return_value (mark_reg, set);
4686 /* Callback function for for_each_successor_phi. DATA is a regset.
4687 Sets the SRC_REGNO, the regno of the phi alternative for phi node
4688 INSN, in the regset. */
4690 static int
4691 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
4692 rtx insn ATTRIBUTE_UNUSED;
4693 int dest_regno ATTRIBUTE_UNUSED;
4694 int src_regno;
4695 void *data;
4697 regset live = (regset) data;
4698 SET_REGNO_REG_SET (live, src_regno);
4699 return 0;
4702 /* Propagate global life info around the graph of basic blocks. Begin
4703 considering blocks with their corresponding bit set in BLOCKS_IN.
4704 If BLOCKS_IN is null, consider it the universal set.
4706 BLOCKS_OUT is set for every block that was changed. */
4708 static void
4709 calculate_global_regs_live (blocks_in, blocks_out, flags)
4710 sbitmap blocks_in, blocks_out;
4711 int flags;
4713 basic_block *queue, *qhead, *qtail, *qend;
4714 regset tmp, new_live_at_end, call_used;
4715 regset_head tmp_head, call_used_head;
4716 regset_head new_live_at_end_head;
4717 int i;
4719 tmp = INITIALIZE_REG_SET (tmp_head);
4720 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
4721 call_used = INITIALIZE_REG_SET (call_used_head);
4723 /* Inconveniently, this is only redily available in hard reg set form. */
4724 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4725 if (call_used_regs[i])
4726 SET_REGNO_REG_SET (call_used, i);
4728 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
4729 because the `head == tail' style test for an empty queue doesn't
4730 work with a full queue. */
4731 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
4732 qtail = queue;
4733 qhead = qend = queue + n_basic_blocks + 2;
4735 /* Queue the blocks set in the initial mask. Do this in reverse block
4736 number order so that we are more likely for the first round to do
4737 useful work. We use AUX non-null to flag that the block is queued. */
4738 if (blocks_in)
4740 /* Clear out the garbage that might be hanging out in bb->aux. */
4741 for (i = n_basic_blocks - 1; i >= 0; --i)
4742 BASIC_BLOCK (i)->aux = NULL;
4744 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
4746 basic_block bb = BASIC_BLOCK (i);
4747 *--qhead = bb;
4748 bb->aux = bb;
4751 else
4753 for (i = 0; i < n_basic_blocks; ++i)
4755 basic_block bb = BASIC_BLOCK (i);
4756 *--qhead = bb;
4757 bb->aux = bb;
4761 if (blocks_out)
4762 sbitmap_zero (blocks_out);
4764 /* We work through the queue until there are no more blocks. What
4765 is live at the end of this block is precisely the union of what
4766 is live at the beginning of all its successors. So, we set its
4767 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
4768 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
4769 this block by walking through the instructions in this block in
4770 reverse order and updating as we go. If that changed
4771 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
4772 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
4774 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
4775 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
4776 must either be live at the end of the block, or used within the
4777 block. In the latter case, it will certainly never disappear
4778 from GLOBAL_LIVE_AT_START. In the former case, the register
4779 could go away only if it disappeared from GLOBAL_LIVE_AT_START
4780 for one of the successor blocks. By induction, that cannot
4781 occur. */
4782 while (qhead != qtail)
4784 int rescan, changed;
4785 basic_block bb;
4786 edge e;
4788 bb = *qhead++;
4789 if (qhead == qend)
4790 qhead = queue;
4791 bb->aux = NULL;
4793 /* Begin by propagating live_at_start from the successor blocks. */
4794 CLEAR_REG_SET (new_live_at_end);
4795 for (e = bb->succ; e; e = e->succ_next)
4797 basic_block sb = e->dest;
4799 /* Call-clobbered registers die across exception and call edges. */
4800 /* ??? Abnormal call edges ignored for the moment, as this gets
4801 confused by sibling call edges, which crashes reg-stack. */
4802 if (e->flags & EDGE_EH)
4804 bitmap_operation (tmp, sb->global_live_at_start,
4805 call_used, BITMAP_AND_COMPL);
4806 IOR_REG_SET (new_live_at_end, tmp);
4808 else
4809 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
4812 /* The all-important stack pointer must always be live. */
4813 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
4815 /* Before reload, there are a few registers that must be forced
4816 live everywhere -- which might not already be the case for
4817 blocks within infinite loops. */
4818 if (! reload_completed)
4820 /* Any reference to any pseudo before reload is a potential
4821 reference of the frame pointer. */
4822 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
4824 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4825 /* Pseudos with argument area equivalences may require
4826 reloading via the argument pointer. */
4827 if (fixed_regs[ARG_POINTER_REGNUM])
4828 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
4829 #endif
4831 /* Any constant, or pseudo with constant equivalences, may
4832 require reloading from memory using the pic register. */
4833 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4834 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4835 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
4838 /* Regs used in phi nodes are not included in
4839 global_live_at_start, since they are live only along a
4840 particular edge. Set those regs that are live because of a
4841 phi node alternative corresponding to this particular block. */
4842 if (in_ssa_form)
4843 for_each_successor_phi (bb, &set_phi_alternative_reg,
4844 new_live_at_end);
4846 if (bb == ENTRY_BLOCK_PTR)
4848 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4849 continue;
4852 /* On our first pass through this block, we'll go ahead and continue.
4853 Recognize first pass by local_set NULL. On subsequent passes, we
4854 get to skip out early if live_at_end wouldn't have changed. */
4856 if (bb->local_set == NULL)
4858 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4859 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4860 rescan = 1;
4862 else
4864 /* If any bits were removed from live_at_end, we'll have to
4865 rescan the block. This wouldn't be necessary if we had
4866 precalculated local_live, however with PROP_SCAN_DEAD_CODE
4867 local_live is really dependent on live_at_end. */
4868 CLEAR_REG_SET (tmp);
4869 rescan = bitmap_operation (tmp, bb->global_live_at_end,
4870 new_live_at_end, BITMAP_AND_COMPL);
4872 if (! rescan)
4874 /* If any of the registers in the new live_at_end set are
4875 conditionally set in this basic block, we must rescan.
4876 This is because conditional lifetimes at the end of the
4877 block do not just take the live_at_end set into account,
4878 but also the liveness at the start of each successor
4879 block. We can miss changes in those sets if we only
4880 compare the new live_at_end against the previous one. */
4881 CLEAR_REG_SET (tmp);
4882 rescan = bitmap_operation (tmp, new_live_at_end,
4883 bb->cond_local_set, BITMAP_AND);
4886 if (! rescan)
4888 /* Find the set of changed bits. Take this opportunity
4889 to notice that this set is empty and early out. */
4890 CLEAR_REG_SET (tmp);
4891 changed = bitmap_operation (tmp, bb->global_live_at_end,
4892 new_live_at_end, BITMAP_XOR);
4893 if (! changed)
4894 continue;
4896 /* If any of the changed bits overlap with local_set,
4897 we'll have to rescan the block. Detect overlap by
4898 the AND with ~local_set turning off bits. */
4899 rescan = bitmap_operation (tmp, tmp, bb->local_set,
4900 BITMAP_AND_COMPL);
4904 /* Let our caller know that BB changed enough to require its
4905 death notes updated. */
4906 if (blocks_out)
4907 SET_BIT (blocks_out, bb->index);
4909 if (! rescan)
4911 /* Add to live_at_start the set of all registers in
4912 new_live_at_end that aren't in the old live_at_end. */
4914 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
4915 BITMAP_AND_COMPL);
4916 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4918 changed = bitmap_operation (bb->global_live_at_start,
4919 bb->global_live_at_start,
4920 tmp, BITMAP_IOR);
4921 if (! changed)
4922 continue;
4924 else
4926 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4928 /* Rescan the block insn by insn to turn (a copy of) live_at_end
4929 into live_at_start. */
4930 propagate_block (bb, new_live_at_end, bb->local_set,
4931 bb->cond_local_set, flags);
4933 /* If live_at start didn't change, no need to go farther. */
4934 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
4935 continue;
4937 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
4940 /* Queue all predecessors of BB so that we may re-examine
4941 their live_at_end. */
4942 for (e = bb->pred; e; e = e->pred_next)
4944 basic_block pb = e->src;
4945 if (pb->aux == NULL)
4947 *qtail++ = pb;
4948 if (qtail == qend)
4949 qtail = queue;
4950 pb->aux = pb;
4955 FREE_REG_SET (tmp);
4956 FREE_REG_SET (new_live_at_end);
4957 FREE_REG_SET (call_used);
4959 if (blocks_out)
4961 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
4963 basic_block bb = BASIC_BLOCK (i);
4964 FREE_REG_SET (bb->local_set);
4965 FREE_REG_SET (bb->cond_local_set);
4968 else
4970 for (i = n_basic_blocks - 1; i >= 0; --i)
4972 basic_block bb = BASIC_BLOCK (i);
4973 FREE_REG_SET (bb->local_set);
4974 FREE_REG_SET (bb->cond_local_set);
4978 free (queue);
4981 /* Subroutines of life analysis. */
4983 /* Allocate the permanent data structures that represent the results
4984 of life analysis. Not static since used also for stupid life analysis. */
4986 void
4987 allocate_bb_life_data ()
4989 register int i;
4991 for (i = 0; i < n_basic_blocks; i++)
4993 basic_block bb = BASIC_BLOCK (i);
4995 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4996 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4999 ENTRY_BLOCK_PTR->global_live_at_end
5000 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5001 EXIT_BLOCK_PTR->global_live_at_start
5002 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5004 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5007 void
5008 allocate_reg_life_data ()
5010 int i;
5012 max_regno = max_reg_num ();
5014 /* Recalculate the register space, in case it has grown. Old style
5015 vector oriented regsets would set regset_{size,bytes} here also. */
5016 allocate_reg_info (max_regno, FALSE, FALSE);
5018 /* Reset all the data we'll collect in propagate_block and its
5019 subroutines. */
5020 for (i = 0; i < max_regno; i++)
5022 REG_N_SETS (i) = 0;
5023 REG_N_REFS (i) = 0;
5024 REG_N_DEATHS (i) = 0;
5025 REG_N_CALLS_CROSSED (i) = 0;
5026 REG_LIVE_LENGTH (i) = 0;
5027 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
5031 /* Delete dead instructions for propagate_block. */
5033 static void
5034 propagate_block_delete_insn (bb, insn)
5035 basic_block bb;
5036 rtx insn;
5038 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
5040 /* If the insn referred to a label, and that label was attached to
5041 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
5042 pretty much mandatory to delete it, because the ADDR_VEC may be
5043 referencing labels that no longer exist.
5045 INSN may reference a deleted label, particularly when a jump
5046 table has been optimized into a direct jump. There's no
5047 real good way to fix up the reference to the deleted label
5048 when the label is deleted, so we just allow it here.
5050 After dead code elimination is complete, we do search for
5051 any REG_LABEL notes which reference deleted labels as a
5052 sanity check. */
5054 if (inote && GET_CODE (inote) == CODE_LABEL)
5056 rtx label = XEXP (inote, 0);
5057 rtx next;
5059 /* The label may be forced if it has been put in the constant
5060 pool. If that is the only use we must discard the table
5061 jump following it, but not the label itself. */
5062 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
5063 && (next = next_nonnote_insn (label)) != NULL
5064 && GET_CODE (next) == JUMP_INSN
5065 && (GET_CODE (PATTERN (next)) == ADDR_VEC
5066 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
5068 rtx pat = PATTERN (next);
5069 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
5070 int len = XVECLEN (pat, diff_vec_p);
5071 int i;
5073 for (i = 0; i < len; i++)
5074 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
5076 flow_delete_insn (next);
5080 if (bb->end == insn)
5081 bb->end = PREV_INSN (insn);
5082 flow_delete_insn (insn);
5085 /* Delete dead libcalls for propagate_block. Return the insn
5086 before the libcall. */
5088 static rtx
5089 propagate_block_delete_libcall (bb, insn, note)
5090 basic_block bb;
5091 rtx insn, note;
5093 rtx first = XEXP (note, 0);
5094 rtx before = PREV_INSN (first);
5096 if (insn == bb->end)
5097 bb->end = before;
5099 flow_delete_insn_chain (first, insn);
5100 return before;
5103 /* Update the life-status of regs for one insn. Return the previous insn. */
5106 propagate_one_insn (pbi, insn)
5107 struct propagate_block_info *pbi;
5108 rtx insn;
5110 rtx prev = PREV_INSN (insn);
5111 int flags = pbi->flags;
5112 int insn_is_dead = 0;
5113 int libcall_is_dead = 0;
5114 rtx note;
5115 int i;
5117 if (! INSN_P (insn))
5118 return prev;
5120 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
5121 if (flags & PROP_SCAN_DEAD_CODE)
5123 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
5124 libcall_is_dead = (insn_is_dead && note != 0
5125 && libcall_dead_p (pbi, note, insn));
5128 /* If an instruction consists of just dead store(s) on final pass,
5129 delete it. */
5130 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
5132 /* If we're trying to delete a prologue or epilogue instruction
5133 that isn't flagged as possibly being dead, something is wrong.
5134 But if we are keeping the stack pointer depressed, we might well
5135 be deleting insns that are used to compute the amount to update
5136 it by, so they are fine. */
5137 if (reload_completed
5138 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5139 && (TYPE_RETURNS_STACK_DEPRESSED
5140 (TREE_TYPE (current_function_decl))))
5141 && (((HAVE_epilogue || HAVE_prologue)
5142 && prologue_epilogue_contains (insn))
5143 || (HAVE_sibcall_epilogue
5144 && sibcall_epilogue_contains (insn)))
5145 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
5146 abort ();
5148 /* Record sets. Do this even for dead instructions, since they
5149 would have killed the values if they hadn't been deleted. */
5150 mark_set_regs (pbi, PATTERN (insn), insn);
5152 /* CC0 is now known to be dead. Either this insn used it,
5153 in which case it doesn't anymore, or clobbered it,
5154 so the next insn can't use it. */
5155 pbi->cc0_live = 0;
5157 if (libcall_is_dead)
5158 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
5159 else
5160 propagate_block_delete_insn (pbi->bb, insn);
5162 return prev;
5165 /* See if this is an increment or decrement that can be merged into
5166 a following memory address. */
5167 #ifdef AUTO_INC_DEC
5169 register rtx x = single_set (insn);
5171 /* Does this instruction increment or decrement a register? */
5172 if ((flags & PROP_AUTOINC)
5173 && x != 0
5174 && GET_CODE (SET_DEST (x)) == REG
5175 && (GET_CODE (SET_SRC (x)) == PLUS
5176 || GET_CODE (SET_SRC (x)) == MINUS)
5177 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
5178 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5179 /* Ok, look for a following memory ref we can combine with.
5180 If one is found, change the memory ref to a PRE_INC
5181 or PRE_DEC, cancel this insn, and return 1.
5182 Return 0 if nothing has been done. */
5183 && try_pre_increment_1 (pbi, insn))
5184 return prev;
5186 #endif /* AUTO_INC_DEC */
5188 CLEAR_REG_SET (pbi->new_set);
5190 /* If this is not the final pass, and this insn is copying the value of
5191 a library call and it's dead, don't scan the insns that perform the
5192 library call, so that the call's arguments are not marked live. */
5193 if (libcall_is_dead)
5195 /* Record the death of the dest reg. */
5196 mark_set_regs (pbi, PATTERN (insn), insn);
5198 insn = XEXP (note, 0);
5199 return PREV_INSN (insn);
5201 else if (GET_CODE (PATTERN (insn)) == SET
5202 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
5203 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
5204 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
5205 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
5206 /* We have an insn to pop a constant amount off the stack.
5207 (Such insns use PLUS regardless of the direction of the stack,
5208 and any insn to adjust the stack by a constant is always a pop.)
5209 These insns, if not dead stores, have no effect on life. */
5211 else
5213 /* Any regs live at the time of a call instruction must not go
5214 in a register clobbered by calls. Find all regs now live and
5215 record this for them. */
5217 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
5218 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5219 { REG_N_CALLS_CROSSED (i)++; });
5221 /* Record sets. Do this even for dead instructions, since they
5222 would have killed the values if they hadn't been deleted. */
5223 mark_set_regs (pbi, PATTERN (insn), insn);
5225 if (GET_CODE (insn) == CALL_INSN)
5227 register int i;
5228 rtx note, cond;
5230 cond = NULL_RTX;
5231 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5232 cond = COND_EXEC_TEST (PATTERN (insn));
5234 /* Non-constant calls clobber memory. */
5235 if (! CONST_CALL_P (insn))
5237 free_EXPR_LIST_list (&pbi->mem_set_list);
5238 pbi->mem_set_list_len = 0;
5241 /* There may be extra registers to be clobbered. */
5242 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5243 note;
5244 note = XEXP (note, 1))
5245 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
5246 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
5247 cond, insn, pbi->flags);
5249 /* Calls change all call-used and global registers. */
5250 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5251 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
5253 /* We do not want REG_UNUSED notes for these registers. */
5254 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
5255 cond, insn,
5256 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
5260 /* If an insn doesn't use CC0, it becomes dead since we assume
5261 that every insn clobbers it. So show it dead here;
5262 mark_used_regs will set it live if it is referenced. */
5263 pbi->cc0_live = 0;
5265 /* Record uses. */
5266 if (! insn_is_dead)
5267 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
5269 /* Sometimes we may have inserted something before INSN (such as a move)
5270 when we make an auto-inc. So ensure we will scan those insns. */
5271 #ifdef AUTO_INC_DEC
5272 prev = PREV_INSN (insn);
5273 #endif
5275 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
5277 register int i;
5278 rtx note, cond;
5280 cond = NULL_RTX;
5281 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5282 cond = COND_EXEC_TEST (PATTERN (insn));
5284 /* Calls use their arguments. */
5285 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5286 note;
5287 note = XEXP (note, 1))
5288 if (GET_CODE (XEXP (note, 0)) == USE)
5289 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
5290 cond, insn);
5292 /* The stack ptr is used (honorarily) by a CALL insn. */
5293 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
5295 /* Calls may also reference any of the global registers,
5296 so they are made live. */
5297 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5298 if (global_regs[i])
5299 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
5300 cond, insn);
5304 /* On final pass, update counts of how many insns in which each reg
5305 is live. */
5306 if (flags & PROP_REG_INFO)
5307 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5308 { REG_LIVE_LENGTH (i)++; });
5310 return prev;
5313 /* Initialize a propagate_block_info struct for public consumption.
5314 Note that the structure itself is opaque to this file, but that
5315 the user can use the regsets provided here. */
5317 struct propagate_block_info *
5318 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
5319 basic_block bb;
5320 regset live, local_set, cond_local_set;
5321 int flags;
5323 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
5325 pbi->bb = bb;
5326 pbi->reg_live = live;
5327 pbi->mem_set_list = NULL_RTX;
5328 pbi->mem_set_list_len = 0;
5329 pbi->local_set = local_set;
5330 pbi->cond_local_set = cond_local_set;
5331 pbi->cc0_live = 0;
5332 pbi->flags = flags;
5334 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5335 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
5336 else
5337 pbi->reg_next_use = NULL;
5339 pbi->new_set = BITMAP_XMALLOC ();
5341 #ifdef HAVE_conditional_execution
5342 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
5343 free_reg_cond_life_info);
5344 pbi->reg_cond_reg = BITMAP_XMALLOC ();
5346 /* If this block ends in a conditional branch, for each register live
5347 from one side of the branch and not the other, record the register
5348 as conditionally dead. */
5349 if (GET_CODE (bb->end) == JUMP_INSN
5350 && any_condjump_p (bb->end))
5352 regset_head diff_head;
5353 regset diff = INITIALIZE_REG_SET (diff_head);
5354 basic_block bb_true, bb_false;
5355 rtx cond_true, cond_false, set_src;
5356 int i;
5358 /* Identify the successor blocks. */
5359 bb_true = bb->succ->dest;
5360 if (bb->succ->succ_next != NULL)
5362 bb_false = bb->succ->succ_next->dest;
5364 if (bb->succ->flags & EDGE_FALLTHRU)
5366 basic_block t = bb_false;
5367 bb_false = bb_true;
5368 bb_true = t;
5370 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
5371 abort ();
5373 else
5375 /* This can happen with a conditional jump to the next insn. */
5376 if (JUMP_LABEL (bb->end) != bb_true->head)
5377 abort ();
5379 /* Simplest way to do nothing. */
5380 bb_false = bb_true;
5383 /* Extract the condition from the branch. */
5384 set_src = SET_SRC (pc_set (bb->end));
5385 cond_true = XEXP (set_src, 0);
5386 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
5387 GET_MODE (cond_true), XEXP (cond_true, 0),
5388 XEXP (cond_true, 1));
5389 if (GET_CODE (XEXP (set_src, 1)) == PC)
5391 rtx t = cond_false;
5392 cond_false = cond_true;
5393 cond_true = t;
5396 /* Compute which register lead different lives in the successors. */
5397 if (bitmap_operation (diff, bb_true->global_live_at_start,
5398 bb_false->global_live_at_start, BITMAP_XOR))
5400 rtx reg = XEXP (cond_true, 0);
5402 if (GET_CODE (reg) == SUBREG)
5403 reg = SUBREG_REG (reg);
5405 if (GET_CODE (reg) != REG)
5406 abort ();
5408 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
5410 /* For each such register, mark it conditionally dead. */
5411 EXECUTE_IF_SET_IN_REG_SET
5412 (diff, 0, i,
5414 struct reg_cond_life_info *rcli;
5415 rtx cond;
5417 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5419 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
5420 cond = cond_false;
5421 else
5422 cond = cond_true;
5423 rcli->condition = cond;
5424 rcli->stores = const0_rtx;
5425 rcli->orig_condition = cond;
5427 splay_tree_insert (pbi->reg_cond_dead, i,
5428 (splay_tree_value) rcli);
5432 FREE_REG_SET (diff);
5434 #endif
5436 /* If this block has no successors, any stores to the frame that aren't
5437 used later in the block are dead. So make a pass over the block
5438 recording any such that are made and show them dead at the end. We do
5439 a very conservative and simple job here. */
5440 if (optimize
5441 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5442 && (TYPE_RETURNS_STACK_DEPRESSED
5443 (TREE_TYPE (current_function_decl))))
5444 && (flags & PROP_SCAN_DEAD_CODE)
5445 && (bb->succ == NULL
5446 || (bb->succ->succ_next == NULL
5447 && bb->succ->dest == EXIT_BLOCK_PTR
5448 && ! current_function_calls_eh_return)))
5450 rtx insn, set;
5451 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
5452 if (GET_CODE (insn) == INSN
5453 && (set = single_set (insn))
5454 && GET_CODE (SET_DEST (set)) == MEM)
5456 rtx mem = SET_DEST (set);
5457 rtx canon_mem = canon_rtx (mem);
5459 /* This optimization is performed by faking a store to the
5460 memory at the end of the block. This doesn't work for
5461 unchanging memories because multiple stores to unchanging
5462 memory is illegal and alias analysis doesn't consider it. */
5463 if (RTX_UNCHANGING_P (canon_mem))
5464 continue;
5466 if (XEXP (canon_mem, 0) == frame_pointer_rtx
5467 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
5468 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
5469 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
5470 add_to_mem_set_list (pbi, canon_mem);
5474 return pbi;
5477 /* Release a propagate_block_info struct. */
5479 void
5480 free_propagate_block_info (pbi)
5481 struct propagate_block_info *pbi;
5483 free_EXPR_LIST_list (&pbi->mem_set_list);
5485 BITMAP_XFREE (pbi->new_set);
5487 #ifdef HAVE_conditional_execution
5488 splay_tree_delete (pbi->reg_cond_dead);
5489 BITMAP_XFREE (pbi->reg_cond_reg);
5490 #endif
5492 if (pbi->reg_next_use)
5493 free (pbi->reg_next_use);
5495 free (pbi);
5498 /* Compute the registers live at the beginning of a basic block BB from
5499 those live at the end.
5501 When called, REG_LIVE contains those live at the end. On return, it
5502 contains those live at the beginning.
5504 LOCAL_SET, if non-null, will be set with all registers killed
5505 unconditionally by this basic block.
5506 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
5507 killed conditionally by this basic block. If there is any unconditional
5508 set of a register, then the corresponding bit will be set in LOCAL_SET
5509 and cleared in COND_LOCAL_SET.
5510 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
5511 case, the resulting set will be equal to the union of the two sets that
5512 would otherwise be computed.
5514 Return non-zero if an INSN is deleted (i.e. by dead code removal). */
5517 propagate_block (bb, live, local_set, cond_local_set, flags)
5518 basic_block bb;
5519 regset live;
5520 regset local_set;
5521 regset cond_local_set;
5522 int flags;
5524 struct propagate_block_info *pbi;
5525 rtx insn, prev;
5526 int changed;
5528 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
5530 if (flags & PROP_REG_INFO)
5532 register int i;
5534 /* Process the regs live at the end of the block.
5535 Mark them as not local to any one basic block. */
5536 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
5537 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
5540 /* Scan the block an insn at a time from end to beginning. */
5542 changed = 0;
5543 for (insn = bb->end;; insn = prev)
5545 /* If this is a call to `setjmp' et al, warn if any
5546 non-volatile datum is live. */
5547 if ((flags & PROP_REG_INFO)
5548 && GET_CODE (insn) == NOTE
5549 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
5550 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
5552 prev = propagate_one_insn (pbi, insn);
5553 changed |= NEXT_INSN (prev) != insn;
5555 if (insn == bb->head)
5556 break;
5559 free_propagate_block_info (pbi);
5561 return changed;
5564 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
5565 (SET expressions whose destinations are registers dead after the insn).
5566 NEEDED is the regset that says which regs are alive after the insn.
5568 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
5570 If X is the entire body of an insn, NOTES contains the reg notes
5571 pertaining to the insn. */
5573 static int
5574 insn_dead_p (pbi, x, call_ok, notes)
5575 struct propagate_block_info *pbi;
5576 rtx x;
5577 int call_ok;
5578 rtx notes ATTRIBUTE_UNUSED;
5580 enum rtx_code code = GET_CODE (x);
5582 #ifdef AUTO_INC_DEC
5583 /* If flow is invoked after reload, we must take existing AUTO_INC
5584 expresions into account. */
5585 if (reload_completed)
5587 for (; notes; notes = XEXP (notes, 1))
5589 if (REG_NOTE_KIND (notes) == REG_INC)
5591 int regno = REGNO (XEXP (notes, 0));
5593 /* Don't delete insns to set global regs. */
5594 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5595 || REGNO_REG_SET_P (pbi->reg_live, regno))
5596 return 0;
5600 #endif
5602 /* If setting something that's a reg or part of one,
5603 see if that register's altered value will be live. */
5605 if (code == SET)
5607 rtx r = SET_DEST (x);
5609 #ifdef HAVE_cc0
5610 if (GET_CODE (r) == CC0)
5611 return ! pbi->cc0_live;
5612 #endif
5614 /* A SET that is a subroutine call cannot be dead. */
5615 if (GET_CODE (SET_SRC (x)) == CALL)
5617 if (! call_ok)
5618 return 0;
5621 /* Don't eliminate loads from volatile memory or volatile asms. */
5622 else if (volatile_refs_p (SET_SRC (x)))
5623 return 0;
5625 if (GET_CODE (r) == MEM)
5627 rtx temp, canon_r;
5629 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
5630 return 0;
5632 canon_r = canon_rtx (r);
5634 /* Walk the set of memory locations we are currently tracking
5635 and see if one is an identical match to this memory location.
5636 If so, this memory write is dead (remember, we're walking
5637 backwards from the end of the block to the start). Since
5638 rtx_equal_p does not check the alias set or flags, we also
5639 must have the potential for them to conflict (anti_dependence). */
5640 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
5641 if (anti_dependence (r, XEXP (temp, 0)))
5643 rtx mem = XEXP (temp, 0);
5645 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
5646 && (GET_MODE_SIZE (GET_MODE (canon_r))
5647 <= GET_MODE_SIZE (GET_MODE (mem))))
5648 return 1;
5650 #ifdef AUTO_INC_DEC
5651 /* Check if memory reference matches an auto increment. Only
5652 post increment/decrement or modify are valid. */
5653 if (GET_MODE (mem) == GET_MODE (r)
5654 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
5655 || GET_CODE (XEXP (mem, 0)) == POST_INC
5656 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
5657 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
5658 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
5659 return 1;
5660 #endif
5663 else
5665 while (GET_CODE (r) == SUBREG
5666 || GET_CODE (r) == STRICT_LOW_PART
5667 || GET_CODE (r) == ZERO_EXTRACT)
5668 r = XEXP (r, 0);
5670 if (GET_CODE (r) == REG)
5672 int regno = REGNO (r);
5674 /* Obvious. */
5675 if (REGNO_REG_SET_P (pbi->reg_live, regno))
5676 return 0;
5678 /* If this is a hard register, verify that subsequent
5679 words are not needed. */
5680 if (regno < FIRST_PSEUDO_REGISTER)
5682 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
5684 while (--n > 0)
5685 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
5686 return 0;
5689 /* Don't delete insns to set global regs. */
5690 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5691 return 0;
5693 /* Make sure insns to set the stack pointer aren't deleted. */
5694 if (regno == STACK_POINTER_REGNUM)
5695 return 0;
5697 /* ??? These bits might be redundant with the force live bits
5698 in calculate_global_regs_live. We would delete from
5699 sequential sets; whether this actually affects real code
5700 for anything but the stack pointer I don't know. */
5701 /* Make sure insns to set the frame pointer aren't deleted. */
5702 if (regno == FRAME_POINTER_REGNUM
5703 && (! reload_completed || frame_pointer_needed))
5704 return 0;
5705 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5706 if (regno == HARD_FRAME_POINTER_REGNUM
5707 && (! reload_completed || frame_pointer_needed))
5708 return 0;
5709 #endif
5711 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5712 /* Make sure insns to set arg pointer are never deleted
5713 (if the arg pointer isn't fixed, there will be a USE
5714 for it, so we can treat it normally). */
5715 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5716 return 0;
5717 #endif
5719 /* Otherwise, the set is dead. */
5720 return 1;
5725 /* If performing several activities, insn is dead if each activity
5726 is individually dead. Also, CLOBBERs and USEs can be ignored; a
5727 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
5728 worth keeping. */
5729 else if (code == PARALLEL)
5731 int i = XVECLEN (x, 0);
5733 for (i--; i >= 0; i--)
5734 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
5735 && GET_CODE (XVECEXP (x, 0, i)) != USE
5736 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
5737 return 0;
5739 return 1;
5742 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
5743 is not necessarily true for hard registers. */
5744 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
5745 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
5746 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
5747 return 1;
5749 /* We do not check other CLOBBER or USE here. An insn consisting of just
5750 a CLOBBER or just a USE should not be deleted. */
5751 return 0;
5754 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
5755 return 1 if the entire library call is dead.
5756 This is true if INSN copies a register (hard or pseudo)
5757 and if the hard return reg of the call insn is dead.
5758 (The caller should have tested the destination of the SET inside
5759 INSN already for death.)
5761 If this insn doesn't just copy a register, then we don't
5762 have an ordinary libcall. In that case, cse could not have
5763 managed to substitute the source for the dest later on,
5764 so we can assume the libcall is dead.
5766 PBI is the block info giving pseudoregs live before this insn.
5767 NOTE is the REG_RETVAL note of the insn. */
5769 static int
5770 libcall_dead_p (pbi, note, insn)
5771 struct propagate_block_info *pbi;
5772 rtx note;
5773 rtx insn;
5775 rtx x = single_set (insn);
5777 if (x)
5779 register rtx r = SET_SRC (x);
5780 if (GET_CODE (r) == REG)
5782 rtx call = XEXP (note, 0);
5783 rtx call_pat;
5784 register int i;
5786 /* Find the call insn. */
5787 while (call != insn && GET_CODE (call) != CALL_INSN)
5788 call = NEXT_INSN (call);
5790 /* If there is none, do nothing special,
5791 since ordinary death handling can understand these insns. */
5792 if (call == insn)
5793 return 0;
5795 /* See if the hard reg holding the value is dead.
5796 If this is a PARALLEL, find the call within it. */
5797 call_pat = PATTERN (call);
5798 if (GET_CODE (call_pat) == PARALLEL)
5800 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
5801 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
5802 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
5803 break;
5805 /* This may be a library call that is returning a value
5806 via invisible pointer. Do nothing special, since
5807 ordinary death handling can understand these insns. */
5808 if (i < 0)
5809 return 0;
5811 call_pat = XVECEXP (call_pat, 0, i);
5814 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
5817 return 1;
5820 /* Return 1 if register REGNO was used before it was set, i.e. if it is
5821 live at function entry. Don't count global register variables, variables
5822 in registers that can be used for function arg passing, or variables in
5823 fixed hard registers. */
5826 regno_uninitialized (regno)
5827 int regno;
5829 if (n_basic_blocks == 0
5830 || (regno < FIRST_PSEUDO_REGISTER
5831 && (global_regs[regno]
5832 || fixed_regs[regno]
5833 || FUNCTION_ARG_REGNO_P (regno))))
5834 return 0;
5836 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
5839 /* 1 if register REGNO was alive at a place where `setjmp' was called
5840 and was set more than once or is an argument.
5841 Such regs may be clobbered by `longjmp'. */
5844 regno_clobbered_at_setjmp (regno)
5845 int regno;
5847 if (n_basic_blocks == 0)
5848 return 0;
5850 return ((REG_N_SETS (regno) > 1
5851 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
5852 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
5855 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
5856 maximal list size; look for overlaps in mode and select the largest. */
5857 static void
5858 add_to_mem_set_list (pbi, mem)
5859 struct propagate_block_info *pbi;
5860 rtx mem;
5862 rtx i;
5864 /* We don't know how large a BLKmode store is, so we must not
5865 take them into consideration. */
5866 if (GET_MODE (mem) == BLKmode)
5867 return;
5869 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
5871 rtx e = XEXP (i, 0);
5872 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
5874 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
5876 #ifdef AUTO_INC_DEC
5877 /* If we must store a copy of the mem, we can just modify
5878 the mode of the stored copy. */
5879 if (pbi->flags & PROP_AUTOINC)
5880 PUT_MODE (e, GET_MODE (mem));
5881 else
5882 #endif
5883 XEXP (i, 0) = mem;
5885 return;
5889 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
5891 #ifdef AUTO_INC_DEC
5892 /* Store a copy of mem, otherwise the address may be
5893 scrogged by find_auto_inc. */
5894 if (pbi->flags & PROP_AUTOINC)
5895 mem = shallow_copy_rtx (mem);
5896 #endif
5897 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
5898 pbi->mem_set_list_len++;
5902 /* INSN references memory, possibly using autoincrement addressing modes.
5903 Find any entries on the mem_set_list that need to be invalidated due
5904 to an address change. */
5906 static void
5907 invalidate_mems_from_autoinc (pbi, insn)
5908 struct propagate_block_info *pbi;
5909 rtx insn;
5911 rtx note = REG_NOTES (insn);
5912 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
5913 if (REG_NOTE_KIND (note) == REG_INC)
5914 invalidate_mems_from_set (pbi, XEXP (note, 0));
5917 /* EXP is a REG. Remove any dependant entries from pbi->mem_set_list. */
5919 static void
5920 invalidate_mems_from_set (pbi, exp)
5921 struct propagate_block_info *pbi;
5922 rtx exp;
5924 rtx temp = pbi->mem_set_list;
5925 rtx prev = NULL_RTX;
5926 rtx next;
5928 while (temp)
5930 next = XEXP (temp, 1);
5931 if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
5933 /* Splice this entry out of the list. */
5934 if (prev)
5935 XEXP (prev, 1) = next;
5936 else
5937 pbi->mem_set_list = next;
5938 free_EXPR_LIST_node (temp);
5939 pbi->mem_set_list_len--;
5941 else
5942 prev = temp;
5943 temp = next;
5947 /* Process the registers that are set within X. Their bits are set to
5948 1 in the regset DEAD, because they are dead prior to this insn.
5950 If INSN is nonzero, it is the insn being processed.
5952 FLAGS is the set of operations to perform. */
5954 static void
5955 mark_set_regs (pbi, x, insn)
5956 struct propagate_block_info *pbi;
5957 rtx x, insn;
5959 rtx cond = NULL_RTX;
5960 rtx link;
5961 enum rtx_code code;
5963 if (insn)
5964 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5966 if (REG_NOTE_KIND (link) == REG_INC)
5967 mark_set_1 (pbi, SET, XEXP (link, 0),
5968 (GET_CODE (x) == COND_EXEC
5969 ? COND_EXEC_TEST (x) : NULL_RTX),
5970 insn, pbi->flags);
5972 retry:
5973 switch (code = GET_CODE (x))
5975 case SET:
5976 case CLOBBER:
5977 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
5978 return;
5980 case COND_EXEC:
5981 cond = COND_EXEC_TEST (x);
5982 x = COND_EXEC_CODE (x);
5983 goto retry;
5985 case PARALLEL:
5987 register int i;
5988 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5990 rtx sub = XVECEXP (x, 0, i);
5991 switch (code = GET_CODE (sub))
5993 case COND_EXEC:
5994 if (cond != NULL_RTX)
5995 abort ();
5997 cond = COND_EXEC_TEST (sub);
5998 sub = COND_EXEC_CODE (sub);
5999 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
6000 break;
6001 /* Fall through. */
6003 case SET:
6004 case CLOBBER:
6005 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
6006 break;
6008 default:
6009 break;
6012 break;
6015 default:
6016 break;
6020 /* Process a single set, which appears in INSN. REG (which may not
6021 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
6022 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
6023 If the set is conditional (because it appear in a COND_EXEC), COND
6024 will be the condition. */
6026 static void
6027 mark_set_1 (pbi, code, reg, cond, insn, flags)
6028 struct propagate_block_info *pbi;
6029 enum rtx_code code;
6030 rtx reg, cond, insn;
6031 int flags;
6033 int regno_first = -1, regno_last = -1;
6034 unsigned long not_dead = 0;
6035 int i;
6037 /* Modifying just one hardware register of a multi-reg value or just a
6038 byte field of a register does not mean the value from before this insn
6039 is now dead. Of course, if it was dead after it's unused now. */
6041 switch (GET_CODE (reg))
6043 case PARALLEL:
6044 /* Some targets place small structures in registers for return values of
6045 functions. We have to detect this case specially here to get correct
6046 flow information. */
6047 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
6048 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
6049 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
6050 flags);
6051 return;
6053 case ZERO_EXTRACT:
6054 case SIGN_EXTRACT:
6055 case STRICT_LOW_PART:
6056 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
6058 reg = XEXP (reg, 0);
6059 while (GET_CODE (reg) == SUBREG
6060 || GET_CODE (reg) == ZERO_EXTRACT
6061 || GET_CODE (reg) == SIGN_EXTRACT
6062 || GET_CODE (reg) == STRICT_LOW_PART);
6063 if (GET_CODE (reg) == MEM)
6064 break;
6065 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
6066 /* Fall through. */
6068 case REG:
6069 regno_last = regno_first = REGNO (reg);
6070 if (regno_first < FIRST_PSEUDO_REGISTER)
6071 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6072 break;
6074 case SUBREG:
6075 if (GET_CODE (SUBREG_REG (reg)) == REG)
6077 enum machine_mode outer_mode = GET_MODE (reg);
6078 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
6080 /* Identify the range of registers affected. This is moderately
6081 tricky for hard registers. See alter_subreg. */
6083 regno_last = regno_first = REGNO (SUBREG_REG (reg));
6084 if (regno_first < FIRST_PSEUDO_REGISTER)
6086 regno_first += subreg_regno_offset (regno_first, inner_mode,
6087 SUBREG_BYTE (reg),
6088 outer_mode);
6089 regno_last = (regno_first
6090 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
6092 /* Since we've just adjusted the register number ranges, make
6093 sure REG matches. Otherwise some_was_live will be clear
6094 when it shouldn't have been, and we'll create incorrect
6095 REG_UNUSED notes. */
6096 reg = gen_rtx_REG (outer_mode, regno_first);
6098 else
6100 /* If the number of words in the subreg is less than the number
6101 of words in the full register, we have a well-defined partial
6102 set. Otherwise the high bits are undefined.
6104 This is only really applicable to pseudos, since we just took
6105 care of multi-word hard registers. */
6106 if (((GET_MODE_SIZE (outer_mode)
6107 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
6108 < ((GET_MODE_SIZE (inner_mode)
6109 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
6110 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
6111 regno_first);
6113 reg = SUBREG_REG (reg);
6116 else
6117 reg = SUBREG_REG (reg);
6118 break;
6120 default:
6121 break;
6124 /* If this set is a MEM, then it kills any aliased writes.
6125 If this set is a REG, then it kills any MEMs which use the reg. */
6126 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
6128 if (GET_CODE (reg) == REG)
6129 invalidate_mems_from_set (pbi, reg);
6131 /* If the memory reference had embedded side effects (autoincrement
6132 address modes. Then we may need to kill some entries on the
6133 memory set list. */
6134 if (insn && GET_CODE (reg) == MEM)
6135 invalidate_mems_from_autoinc (pbi, insn);
6137 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
6138 /* ??? With more effort we could track conditional memory life. */
6139 && ! cond
6140 /* There are no REG_INC notes for SP, so we can't assume we'll see
6141 everything that invalidates it. To be safe, don't eliminate any
6142 stores though SP; none of them should be redundant anyway. */
6143 && ! reg_mentioned_p (stack_pointer_rtx, reg))
6144 add_to_mem_set_list (pbi, canon_rtx (reg));
6147 if (GET_CODE (reg) == REG
6148 && ! (regno_first == FRAME_POINTER_REGNUM
6149 && (! reload_completed || frame_pointer_needed))
6150 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
6151 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
6152 && (! reload_completed || frame_pointer_needed))
6153 #endif
6154 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
6155 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
6156 #endif
6159 int some_was_live = 0, some_was_dead = 0;
6161 for (i = regno_first; i <= regno_last; ++i)
6163 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6164 if (pbi->local_set)
6166 /* Order of the set operation matters here since both
6167 sets may be the same. */
6168 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
6169 if (cond != NULL_RTX
6170 && ! REGNO_REG_SET_P (pbi->local_set, i))
6171 SET_REGNO_REG_SET (pbi->cond_local_set, i);
6172 else
6173 SET_REGNO_REG_SET (pbi->local_set, i);
6175 if (code != CLOBBER)
6176 SET_REGNO_REG_SET (pbi->new_set, i);
6178 some_was_live |= needed_regno;
6179 some_was_dead |= ! needed_regno;
6182 #ifdef HAVE_conditional_execution
6183 /* Consider conditional death in deciding that the register needs
6184 a death note. */
6185 if (some_was_live && ! not_dead
6186 /* The stack pointer is never dead. Well, not strictly true,
6187 but it's very difficult to tell from here. Hopefully
6188 combine_stack_adjustments will fix up the most egregious
6189 errors. */
6190 && regno_first != STACK_POINTER_REGNUM)
6192 for (i = regno_first; i <= regno_last; ++i)
6193 if (! mark_regno_cond_dead (pbi, i, cond))
6194 not_dead |= ((unsigned long) 1) << (i - regno_first);
6196 #endif
6198 /* Additional data to record if this is the final pass. */
6199 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
6200 | PROP_DEATH_NOTES | PROP_AUTOINC))
6202 register rtx y;
6203 register int blocknum = pbi->bb->index;
6205 y = NULL_RTX;
6206 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6208 y = pbi->reg_next_use[regno_first];
6210 /* The next use is no longer next, since a store intervenes. */
6211 for (i = regno_first; i <= regno_last; ++i)
6212 pbi->reg_next_use[i] = 0;
6215 if (flags & PROP_REG_INFO)
6217 for (i = regno_first; i <= regno_last; ++i)
6219 /* Count (weighted) references, stores, etc. This counts a
6220 register twice if it is modified, but that is correct. */
6221 REG_N_SETS (i) += 1;
6222 REG_N_REFS (i) += 1;
6223 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
6225 /* The insns where a reg is live are normally counted
6226 elsewhere, but we want the count to include the insn
6227 where the reg is set, and the normal counting mechanism
6228 would not count it. */
6229 REG_LIVE_LENGTH (i) += 1;
6232 /* If this is a hard reg, record this function uses the reg. */
6233 if (regno_first < FIRST_PSEUDO_REGISTER)
6235 for (i = regno_first; i <= regno_last; i++)
6236 regs_ever_live[i] = 1;
6238 else
6240 /* Keep track of which basic blocks each reg appears in. */
6241 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6242 REG_BASIC_BLOCK (regno_first) = blocknum;
6243 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6244 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6248 if (! some_was_dead)
6250 if (flags & PROP_LOG_LINKS)
6252 /* Make a logical link from the next following insn
6253 that uses this register, back to this insn.
6254 The following insns have already been processed.
6256 We don't build a LOG_LINK for hard registers containing
6257 in ASM_OPERANDs. If these registers get replaced,
6258 we might wind up changing the semantics of the insn,
6259 even if reload can make what appear to be valid
6260 assignments later. */
6261 if (y && (BLOCK_NUM (y) == blocknum)
6262 && (regno_first >= FIRST_PSEUDO_REGISTER
6263 || asm_noperands (PATTERN (y)) < 0))
6264 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
6267 else if (not_dead)
6269 else if (! some_was_live)
6271 if (flags & PROP_REG_INFO)
6272 REG_N_DEATHS (regno_first) += 1;
6274 if (flags & PROP_DEATH_NOTES)
6276 /* Note that dead stores have already been deleted
6277 when possible. If we get here, we have found a
6278 dead store that cannot be eliminated (because the
6279 same insn does something useful). Indicate this
6280 by marking the reg being set as dying here. */
6281 REG_NOTES (insn)
6282 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6285 else
6287 if (flags & PROP_DEATH_NOTES)
6289 /* This is a case where we have a multi-word hard register
6290 and some, but not all, of the words of the register are
6291 needed in subsequent insns. Write REG_UNUSED notes
6292 for those parts that were not needed. This case should
6293 be rare. */
6295 for (i = regno_first; i <= regno_last; ++i)
6296 if (! REGNO_REG_SET_P (pbi->reg_live, i))
6297 REG_NOTES (insn)
6298 = alloc_EXPR_LIST (REG_UNUSED,
6299 gen_rtx_REG (reg_raw_mode[i], i),
6300 REG_NOTES (insn));
6305 /* Mark the register as being dead. */
6306 if (some_was_live
6307 /* The stack pointer is never dead. Well, not strictly true,
6308 but it's very difficult to tell from here. Hopefully
6309 combine_stack_adjustments will fix up the most egregious
6310 errors. */
6311 && regno_first != STACK_POINTER_REGNUM)
6313 for (i = regno_first; i <= regno_last; ++i)
6314 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
6315 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
6318 else if (GET_CODE (reg) == REG)
6320 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6321 pbi->reg_next_use[regno_first] = 0;
6324 /* If this is the last pass and this is a SCRATCH, show it will be dying
6325 here and count it. */
6326 else if (GET_CODE (reg) == SCRATCH)
6328 if (flags & PROP_DEATH_NOTES)
6329 REG_NOTES (insn)
6330 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6334 #ifdef HAVE_conditional_execution
6335 /* Mark REGNO conditionally dead.
6336 Return true if the register is now unconditionally dead. */
6338 static int
6339 mark_regno_cond_dead (pbi, regno, cond)
6340 struct propagate_block_info *pbi;
6341 int regno;
6342 rtx cond;
6344 /* If this is a store to a predicate register, the value of the
6345 predicate is changing, we don't know that the predicate as seen
6346 before is the same as that seen after. Flush all dependent
6347 conditions from reg_cond_dead. This will make all such
6348 conditionally live registers unconditionally live. */
6349 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
6350 flush_reg_cond_reg (pbi, regno);
6352 /* If this is an unconditional store, remove any conditional
6353 life that may have existed. */
6354 if (cond == NULL_RTX)
6355 splay_tree_remove (pbi->reg_cond_dead, regno);
6356 else
6358 splay_tree_node node;
6359 struct reg_cond_life_info *rcli;
6360 rtx ncond;
6362 /* Otherwise this is a conditional set. Record that fact.
6363 It may have been conditionally used, or there may be a
6364 subsequent set with a complimentary condition. */
6366 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
6367 if (node == NULL)
6369 /* The register was unconditionally live previously.
6370 Record the current condition as the condition under
6371 which it is dead. */
6372 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6373 rcli->condition = cond;
6374 rcli->stores = cond;
6375 rcli->orig_condition = const0_rtx;
6376 splay_tree_insert (pbi->reg_cond_dead, regno,
6377 (splay_tree_value) rcli);
6379 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6381 /* Not unconditionaly dead. */
6382 return 0;
6384 else
6386 /* The register was conditionally live previously.
6387 Add the new condition to the old. */
6388 rcli = (struct reg_cond_life_info *) node->value;
6389 ncond = rcli->condition;
6390 ncond = ior_reg_cond (ncond, cond, 1);
6391 if (rcli->stores == const0_rtx)
6392 rcli->stores = cond;
6393 else if (rcli->stores != const1_rtx)
6394 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
6396 /* If the register is now unconditionally dead, remove the entry
6397 in the splay_tree. A register is unconditionally dead if the
6398 dead condition ncond is true. A register is also unconditionally
6399 dead if the sum of all conditional stores is an unconditional
6400 store (stores is true), and the dead condition is identically the
6401 same as the original dead condition initialized at the end of
6402 the block. This is a pointer compare, not an rtx_equal_p
6403 compare. */
6404 if (ncond == const1_rtx
6405 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
6406 splay_tree_remove (pbi->reg_cond_dead, regno);
6407 else
6409 rcli->condition = ncond;
6411 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6413 /* Not unconditionaly dead. */
6414 return 0;
6419 return 1;
6422 /* Called from splay_tree_delete for pbi->reg_cond_life. */
6424 static void
6425 free_reg_cond_life_info (value)
6426 splay_tree_value value;
6428 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
6429 free (rcli);
6432 /* Helper function for flush_reg_cond_reg. */
6434 static int
6435 flush_reg_cond_reg_1 (node, data)
6436 splay_tree_node node;
6437 void *data;
6439 struct reg_cond_life_info *rcli;
6440 int *xdata = (int *) data;
6441 unsigned int regno = xdata[0];
6443 /* Don't need to search if last flushed value was farther on in
6444 the in-order traversal. */
6445 if (xdata[1] >= (int) node->key)
6446 return 0;
6448 /* Splice out portions of the expression that refer to regno. */
6449 rcli = (struct reg_cond_life_info *) node->value;
6450 rcli->condition = elim_reg_cond (rcli->condition, regno);
6451 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
6452 rcli->stores = elim_reg_cond (rcli->stores, regno);
6454 /* If the entire condition is now false, signal the node to be removed. */
6455 if (rcli->condition == const0_rtx)
6457 xdata[1] = node->key;
6458 return -1;
6460 else if (rcli->condition == const1_rtx)
6461 abort ();
6463 return 0;
6466 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
6468 static void
6469 flush_reg_cond_reg (pbi, regno)
6470 struct propagate_block_info *pbi;
6471 int regno;
6473 int pair[2];
6475 pair[0] = regno;
6476 pair[1] = -1;
6477 while (splay_tree_foreach (pbi->reg_cond_dead,
6478 flush_reg_cond_reg_1, pair) == -1)
6479 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
6481 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
6484 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
6485 For ior/and, the ADD flag determines whether we want to add the new
6486 condition X to the old one unconditionally. If it is zero, we will
6487 only return a new expression if X allows us to simplify part of
6488 OLD, otherwise we return OLD unchanged to the caller.
6489 If ADD is nonzero, we will return a new condition in all cases. The
6490 toplevel caller of one of these functions should always pass 1 for
6491 ADD. */
6493 static rtx
6494 ior_reg_cond (old, x, add)
6495 rtx old, x;
6496 int add;
6498 rtx op0, op1;
6500 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6502 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6503 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
6504 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6505 return const1_rtx;
6506 if (GET_CODE (x) == GET_CODE (old)
6507 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6508 return old;
6509 if (! add)
6510 return old;
6511 return gen_rtx_IOR (0, old, x);
6514 switch (GET_CODE (old))
6516 case IOR:
6517 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6518 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6519 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6521 if (op0 == const0_rtx)
6522 return op1;
6523 if (op1 == const0_rtx)
6524 return op0;
6525 if (op0 == const1_rtx || op1 == const1_rtx)
6526 return const1_rtx;
6527 if (op0 == XEXP (old, 0))
6528 op0 = gen_rtx_IOR (0, op0, x);
6529 else
6530 op1 = gen_rtx_IOR (0, op1, x);
6531 return gen_rtx_IOR (0, op0, op1);
6533 if (! add)
6534 return old;
6535 return gen_rtx_IOR (0, old, x);
6537 case AND:
6538 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6539 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6540 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6542 if (op0 == const1_rtx)
6543 return op1;
6544 if (op1 == const1_rtx)
6545 return op0;
6546 if (op0 == const0_rtx || op1 == const0_rtx)
6547 return const0_rtx;
6548 if (op0 == XEXP (old, 0))
6549 op0 = gen_rtx_IOR (0, op0, x);
6550 else
6551 op1 = gen_rtx_IOR (0, op1, x);
6552 return gen_rtx_AND (0, op0, op1);
6554 if (! add)
6555 return old;
6556 return gen_rtx_IOR (0, old, x);
6558 case NOT:
6559 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6560 if (op0 != XEXP (old, 0))
6561 return not_reg_cond (op0);
6562 if (! add)
6563 return old;
6564 return gen_rtx_IOR (0, old, x);
6566 default:
6567 abort ();
6571 static rtx
6572 not_reg_cond (x)
6573 rtx x;
6575 enum rtx_code x_code;
6577 if (x == const0_rtx)
6578 return const1_rtx;
6579 else if (x == const1_rtx)
6580 return const0_rtx;
6581 x_code = GET_CODE (x);
6582 if (x_code == NOT)
6583 return XEXP (x, 0);
6584 if (GET_RTX_CLASS (x_code) == '<'
6585 && GET_CODE (XEXP (x, 0)) == REG)
6587 if (XEXP (x, 1) != const0_rtx)
6588 abort ();
6590 return gen_rtx_fmt_ee (reverse_condition (x_code),
6591 VOIDmode, XEXP (x, 0), const0_rtx);
6593 return gen_rtx_NOT (0, x);
6596 static rtx
6597 and_reg_cond (old, x, add)
6598 rtx old, x;
6599 int add;
6601 rtx op0, op1;
6603 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6605 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6606 && GET_CODE (x) == reverse_condition (GET_CODE (old))
6607 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6608 return const0_rtx;
6609 if (GET_CODE (x) == GET_CODE (old)
6610 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6611 return old;
6612 if (! add)
6613 return old;
6614 return gen_rtx_AND (0, old, x);
6617 switch (GET_CODE (old))
6619 case IOR:
6620 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6621 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6622 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6624 if (op0 == const0_rtx)
6625 return op1;
6626 if (op1 == const0_rtx)
6627 return op0;
6628 if (op0 == const1_rtx || op1 == const1_rtx)
6629 return const1_rtx;
6630 if (op0 == XEXP (old, 0))
6631 op0 = gen_rtx_AND (0, op0, x);
6632 else
6633 op1 = gen_rtx_AND (0, op1, x);
6634 return gen_rtx_IOR (0, op0, op1);
6636 if (! add)
6637 return old;
6638 return gen_rtx_AND (0, old, x);
6640 case AND:
6641 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6642 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6643 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6645 if (op0 == const1_rtx)
6646 return op1;
6647 if (op1 == const1_rtx)
6648 return op0;
6649 if (op0 == const0_rtx || op1 == const0_rtx)
6650 return const0_rtx;
6651 if (op0 == XEXP (old, 0))
6652 op0 = gen_rtx_AND (0, op0, x);
6653 else
6654 op1 = gen_rtx_AND (0, op1, x);
6655 return gen_rtx_AND (0, op0, op1);
6657 if (! add)
6658 return old;
6660 /* If X is identical to one of the existing terms of the AND,
6661 then just return what we already have. */
6662 /* ??? There really should be some sort of recursive check here in
6663 case there are nested ANDs. */
6664 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
6665 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
6666 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
6667 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
6668 return old;
6670 return gen_rtx_AND (0, old, x);
6672 case NOT:
6673 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6674 if (op0 != XEXP (old, 0))
6675 return not_reg_cond (op0);
6676 if (! add)
6677 return old;
6678 return gen_rtx_AND (0, old, x);
6680 default:
6681 abort ();
6685 /* Given a condition X, remove references to reg REGNO and return the
6686 new condition. The removal will be done so that all conditions
6687 involving REGNO are considered to evaluate to false. This function
6688 is used when the value of REGNO changes. */
6690 static rtx
6691 elim_reg_cond (x, regno)
6692 rtx x;
6693 unsigned int regno;
6695 rtx op0, op1;
6697 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6699 if (REGNO (XEXP (x, 0)) == regno)
6700 return const0_rtx;
6701 return x;
6704 switch (GET_CODE (x))
6706 case AND:
6707 op0 = elim_reg_cond (XEXP (x, 0), regno);
6708 op1 = elim_reg_cond (XEXP (x, 1), regno);
6709 if (op0 == const0_rtx || op1 == const0_rtx)
6710 return const0_rtx;
6711 if (op0 == const1_rtx)
6712 return op1;
6713 if (op1 == const1_rtx)
6714 return op0;
6715 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6716 return x;
6717 return gen_rtx_AND (0, op0, op1);
6719 case IOR:
6720 op0 = elim_reg_cond (XEXP (x, 0), regno);
6721 op1 = elim_reg_cond (XEXP (x, 1), regno);
6722 if (op0 == const1_rtx || op1 == const1_rtx)
6723 return const1_rtx;
6724 if (op0 == const0_rtx)
6725 return op1;
6726 if (op1 == const0_rtx)
6727 return op0;
6728 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6729 return x;
6730 return gen_rtx_IOR (0, op0, op1);
6732 case NOT:
6733 op0 = elim_reg_cond (XEXP (x, 0), regno);
6734 if (op0 == const0_rtx)
6735 return const1_rtx;
6736 if (op0 == const1_rtx)
6737 return const0_rtx;
6738 if (op0 != XEXP (x, 0))
6739 return not_reg_cond (op0);
6740 return x;
6742 default:
6743 abort ();
6746 #endif /* HAVE_conditional_execution */
6748 #ifdef AUTO_INC_DEC
6750 /* Try to substitute the auto-inc expression INC as the address inside
6751 MEM which occurs in INSN. Currently, the address of MEM is an expression
6752 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
6753 that has a single set whose source is a PLUS of INCR_REG and something
6754 else. */
6756 static void
6757 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
6758 struct propagate_block_info *pbi;
6759 rtx inc, insn, mem, incr, incr_reg;
6761 int regno = REGNO (incr_reg);
6762 rtx set = single_set (incr);
6763 rtx q = SET_DEST (set);
6764 rtx y = SET_SRC (set);
6765 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
6767 /* Make sure this reg appears only once in this insn. */
6768 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
6769 return;
6771 if (dead_or_set_p (incr, incr_reg)
6772 /* Mustn't autoinc an eliminable register. */
6773 && (regno >= FIRST_PSEUDO_REGISTER
6774 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
6776 /* This is the simple case. Try to make the auto-inc. If
6777 we can't, we are done. Otherwise, we will do any
6778 needed updates below. */
6779 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
6780 return;
6782 else if (GET_CODE (q) == REG
6783 /* PREV_INSN used here to check the semi-open interval
6784 [insn,incr). */
6785 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
6786 /* We must also check for sets of q as q may be
6787 a call clobbered hard register and there may
6788 be a call between PREV_INSN (insn) and incr. */
6789 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
6791 /* We have *p followed sometime later by q = p+size.
6792 Both p and q must be live afterward,
6793 and q is not used between INSN and its assignment.
6794 Change it to q = p, ...*q..., q = q+size.
6795 Then fall into the usual case. */
6796 rtx insns, temp;
6798 start_sequence ();
6799 emit_move_insn (q, incr_reg);
6800 insns = get_insns ();
6801 end_sequence ();
6803 if (basic_block_for_insn)
6804 for (temp = insns; temp; temp = NEXT_INSN (temp))
6805 set_block_for_insn (temp, pbi->bb);
6807 /* If we can't make the auto-inc, or can't make the
6808 replacement into Y, exit. There's no point in making
6809 the change below if we can't do the auto-inc and doing
6810 so is not correct in the pre-inc case. */
6812 XEXP (inc, 0) = q;
6813 validate_change (insn, &XEXP (mem, 0), inc, 1);
6814 validate_change (incr, &XEXP (y, opnum), q, 1);
6815 if (! apply_change_group ())
6816 return;
6818 /* We now know we'll be doing this change, so emit the
6819 new insn(s) and do the updates. */
6820 emit_insns_before (insns, insn);
6822 if (pbi->bb->head == insn)
6823 pbi->bb->head = insns;
6825 /* INCR will become a NOTE and INSN won't contain a
6826 use of INCR_REG. If a use of INCR_REG was just placed in
6827 the insn before INSN, make that the next use.
6828 Otherwise, invalidate it. */
6829 if (GET_CODE (PREV_INSN (insn)) == INSN
6830 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
6831 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
6832 pbi->reg_next_use[regno] = PREV_INSN (insn);
6833 else
6834 pbi->reg_next_use[regno] = 0;
6836 incr_reg = q;
6837 regno = REGNO (q);
6839 /* REGNO is now used in INCR which is below INSN, but
6840 it previously wasn't live here. If we don't mark
6841 it as live, we'll put a REG_DEAD note for it
6842 on this insn, which is incorrect. */
6843 SET_REGNO_REG_SET (pbi->reg_live, regno);
6845 /* If there are any calls between INSN and INCR, show
6846 that REGNO now crosses them. */
6847 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
6848 if (GET_CODE (temp) == CALL_INSN)
6849 REG_N_CALLS_CROSSED (regno)++;
6851 else
6852 return;
6854 /* If we haven't returned, it means we were able to make the
6855 auto-inc, so update the status. First, record that this insn
6856 has an implicit side effect. */
6858 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
6860 /* Modify the old increment-insn to simply copy
6861 the already-incremented value of our register. */
6862 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
6863 abort ();
6865 /* If that makes it a no-op (copying the register into itself) delete
6866 it so it won't appear to be a "use" and a "set" of this
6867 register. */
6868 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
6870 /* If the original source was dead, it's dead now. */
6871 rtx note;
6873 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
6875 remove_note (incr, note);
6876 if (XEXP (note, 0) != incr_reg)
6877 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
6880 PUT_CODE (incr, NOTE);
6881 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
6882 NOTE_SOURCE_FILE (incr) = 0;
6885 if (regno >= FIRST_PSEUDO_REGISTER)
6887 /* Count an extra reference to the reg. When a reg is
6888 incremented, spilling it is worse, so we want to make
6889 that less likely. */
6890 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
6892 /* Count the increment as a setting of the register,
6893 even though it isn't a SET in rtl. */
6894 REG_N_SETS (regno)++;
6898 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
6899 reference. */
6901 static void
6902 find_auto_inc (pbi, x, insn)
6903 struct propagate_block_info *pbi;
6904 rtx x;
6905 rtx insn;
6907 rtx addr = XEXP (x, 0);
6908 HOST_WIDE_INT offset = 0;
6909 rtx set, y, incr, inc_val;
6910 int regno;
6911 int size = GET_MODE_SIZE (GET_MODE (x));
6913 if (GET_CODE (insn) == JUMP_INSN)
6914 return;
6916 /* Here we detect use of an index register which might be good for
6917 postincrement, postdecrement, preincrement, or predecrement. */
6919 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
6920 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
6922 if (GET_CODE (addr) != REG)
6923 return;
6925 regno = REGNO (addr);
6927 /* Is the next use an increment that might make auto-increment? */
6928 incr = pbi->reg_next_use[regno];
6929 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
6930 return;
6931 set = single_set (incr);
6932 if (set == 0 || GET_CODE (set) != SET)
6933 return;
6934 y = SET_SRC (set);
6936 if (GET_CODE (y) != PLUS)
6937 return;
6939 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
6940 inc_val = XEXP (y, 1);
6941 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
6942 inc_val = XEXP (y, 0);
6943 else
6944 return;
6946 if (GET_CODE (inc_val) == CONST_INT)
6948 if (HAVE_POST_INCREMENT
6949 && (INTVAL (inc_val) == size && offset == 0))
6950 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
6951 incr, addr);
6952 else if (HAVE_POST_DECREMENT
6953 && (INTVAL (inc_val) == -size && offset == 0))
6954 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
6955 incr, addr);
6956 else if (HAVE_PRE_INCREMENT
6957 && (INTVAL (inc_val) == size && offset == size))
6958 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
6959 incr, addr);
6960 else if (HAVE_PRE_DECREMENT
6961 && (INTVAL (inc_val) == -size && offset == -size))
6962 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
6963 incr, addr);
6964 else if (HAVE_POST_MODIFY_DISP && offset == 0)
6965 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
6966 gen_rtx_PLUS (Pmode,
6967 addr,
6968 inc_val)),
6969 insn, x, incr, addr);
6971 else if (GET_CODE (inc_val) == REG
6972 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
6973 NEXT_INSN (incr)))
6976 if (HAVE_POST_MODIFY_REG && offset == 0)
6977 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
6978 gen_rtx_PLUS (Pmode,
6979 addr,
6980 inc_val)),
6981 insn, x, incr, addr);
6985 #endif /* AUTO_INC_DEC */
6987 static void
6988 mark_used_reg (pbi, reg, cond, insn)
6989 struct propagate_block_info *pbi;
6990 rtx reg;
6991 rtx cond ATTRIBUTE_UNUSED;
6992 rtx insn;
6994 unsigned int regno_first, regno_last, i;
6995 int some_was_live, some_was_dead, some_not_set;
6997 regno_last = regno_first = REGNO (reg);
6998 if (regno_first < FIRST_PSEUDO_REGISTER)
6999 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
7001 /* Find out if any of this register is live after this instruction. */
7002 some_was_live = some_was_dead = 0;
7003 for (i = regno_first; i <= regno_last; ++i)
7005 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
7006 some_was_live |= needed_regno;
7007 some_was_dead |= ! needed_regno;
7010 /* Find out if any of the register was set this insn. */
7011 some_not_set = 0;
7012 for (i = regno_first; i <= regno_last; ++i)
7013 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
7015 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
7017 /* Record where each reg is used, so when the reg is set we know
7018 the next insn that uses it. */
7019 pbi->reg_next_use[regno_first] = insn;
7022 if (pbi->flags & PROP_REG_INFO)
7024 if (regno_first < FIRST_PSEUDO_REGISTER)
7026 /* If this is a register we are going to try to eliminate,
7027 don't mark it live here. If we are successful in
7028 eliminating it, it need not be live unless it is used for
7029 pseudos, in which case it will have been set live when it
7030 was allocated to the pseudos. If the register will not
7031 be eliminated, reload will set it live at that point.
7033 Otherwise, record that this function uses this register. */
7034 /* ??? The PPC backend tries to "eliminate" on the pic
7035 register to itself. This should be fixed. In the mean
7036 time, hack around it. */
7038 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
7039 && (regno_first == FRAME_POINTER_REGNUM
7040 || regno_first == ARG_POINTER_REGNUM)))
7041 for (i = regno_first; i <= regno_last; ++i)
7042 regs_ever_live[i] = 1;
7044 else
7046 /* Keep track of which basic block each reg appears in. */
7048 register int blocknum = pbi->bb->index;
7049 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
7050 REG_BASIC_BLOCK (regno_first) = blocknum;
7051 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
7052 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
7054 /* Count (weighted) number of uses of each reg. */
7055 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
7056 REG_N_REFS (regno_first)++;
7060 /* Record and count the insns in which a reg dies. If it is used in
7061 this insn and was dead below the insn then it dies in this insn.
7062 If it was set in this insn, we do not make a REG_DEAD note;
7063 likewise if we already made such a note. */
7064 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
7065 && some_was_dead
7066 && some_not_set)
7068 /* Check for the case where the register dying partially
7069 overlaps the register set by this insn. */
7070 if (regno_first != regno_last)
7071 for (i = regno_first; i <= regno_last; ++i)
7072 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
7074 /* If none of the words in X is needed, make a REG_DEAD note.
7075 Otherwise, we must make partial REG_DEAD notes. */
7076 if (! some_was_live)
7078 if ((pbi->flags & PROP_DEATH_NOTES)
7079 && ! find_regno_note (insn, REG_DEAD, regno_first))
7080 REG_NOTES (insn)
7081 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
7083 if (pbi->flags & PROP_REG_INFO)
7084 REG_N_DEATHS (regno_first)++;
7086 else
7088 /* Don't make a REG_DEAD note for a part of a register
7089 that is set in the insn. */
7090 for (i = regno_first; i <= regno_last; ++i)
7091 if (! REGNO_REG_SET_P (pbi->reg_live, i)
7092 && ! dead_or_set_regno_p (insn, i))
7093 REG_NOTES (insn)
7094 = alloc_EXPR_LIST (REG_DEAD,
7095 gen_rtx_REG (reg_raw_mode[i], i),
7096 REG_NOTES (insn));
7100 /* Mark the register as being live. */
7101 for (i = regno_first; i <= regno_last; ++i)
7103 SET_REGNO_REG_SET (pbi->reg_live, i);
7105 #ifdef HAVE_conditional_execution
7106 /* If this is a conditional use, record that fact. If it is later
7107 conditionally set, we'll know to kill the register. */
7108 if (cond != NULL_RTX)
7110 splay_tree_node node;
7111 struct reg_cond_life_info *rcli;
7112 rtx ncond;
7114 if (some_was_live)
7116 node = splay_tree_lookup (pbi->reg_cond_dead, i);
7117 if (node == NULL)
7119 /* The register was unconditionally live previously.
7120 No need to do anything. */
7122 else
7124 /* The register was conditionally live previously.
7125 Subtract the new life cond from the old death cond. */
7126 rcli = (struct reg_cond_life_info *) node->value;
7127 ncond = rcli->condition;
7128 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
7130 /* If the register is now unconditionally live,
7131 remove the entry in the splay_tree. */
7132 if (ncond == const0_rtx)
7133 splay_tree_remove (pbi->reg_cond_dead, i);
7134 else
7136 rcli->condition = ncond;
7137 SET_REGNO_REG_SET (pbi->reg_cond_reg,
7138 REGNO (XEXP (cond, 0)));
7142 else
7144 /* The register was not previously live at all. Record
7145 the condition under which it is still dead. */
7146 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
7147 rcli->condition = not_reg_cond (cond);
7148 rcli->stores = const0_rtx;
7149 rcli->orig_condition = const0_rtx;
7150 splay_tree_insert (pbi->reg_cond_dead, i,
7151 (splay_tree_value) rcli);
7153 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
7156 else if (some_was_live)
7158 /* The register may have been conditionally live previously, but
7159 is now unconditionally live. Remove it from the conditionally
7160 dead list, so that a conditional set won't cause us to think
7161 it dead. */
7162 splay_tree_remove (pbi->reg_cond_dead, i);
7164 #endif
7168 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
7169 This is done assuming the registers needed from X are those that
7170 have 1-bits in PBI->REG_LIVE.
7172 INSN is the containing instruction. If INSN is dead, this function
7173 is not called. */
7175 static void
7176 mark_used_regs (pbi, x, cond, insn)
7177 struct propagate_block_info *pbi;
7178 rtx x, cond, insn;
7180 register RTX_CODE code;
7181 register int regno;
7182 int flags = pbi->flags;
7184 retry:
7185 code = GET_CODE (x);
7186 switch (code)
7188 case LABEL_REF:
7189 case SYMBOL_REF:
7190 case CONST_INT:
7191 case CONST:
7192 case CONST_DOUBLE:
7193 case PC:
7194 case ADDR_VEC:
7195 case ADDR_DIFF_VEC:
7196 return;
7198 #ifdef HAVE_cc0
7199 case CC0:
7200 pbi->cc0_live = 1;
7201 return;
7202 #endif
7204 case CLOBBER:
7205 /* If we are clobbering a MEM, mark any registers inside the address
7206 as being used. */
7207 if (GET_CODE (XEXP (x, 0)) == MEM)
7208 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
7209 return;
7211 case MEM:
7212 /* Don't bother watching stores to mems if this is not the
7213 final pass. We'll not be deleting dead stores this round. */
7214 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
7216 /* Invalidate the data for the last MEM stored, but only if MEM is
7217 something that can be stored into. */
7218 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
7219 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
7220 /* Needn't clear the memory set list. */
7222 else
7224 rtx temp = pbi->mem_set_list;
7225 rtx prev = NULL_RTX;
7226 rtx next;
7228 while (temp)
7230 next = XEXP (temp, 1);
7231 if (anti_dependence (XEXP (temp, 0), x))
7233 /* Splice temp out of the list. */
7234 if (prev)
7235 XEXP (prev, 1) = next;
7236 else
7237 pbi->mem_set_list = next;
7238 free_EXPR_LIST_node (temp);
7239 pbi->mem_set_list_len--;
7241 else
7242 prev = temp;
7243 temp = next;
7247 /* If the memory reference had embedded side effects (autoincrement
7248 address modes. Then we may need to kill some entries on the
7249 memory set list. */
7250 if (insn)
7251 invalidate_mems_from_autoinc (pbi, insn);
7254 #ifdef AUTO_INC_DEC
7255 if (flags & PROP_AUTOINC)
7256 find_auto_inc (pbi, x, insn);
7257 #endif
7258 break;
7260 case SUBREG:
7261 #ifdef CLASS_CANNOT_CHANGE_MODE
7262 if (GET_CODE (SUBREG_REG (x)) == REG
7263 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
7264 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
7265 GET_MODE (SUBREG_REG (x))))
7266 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
7267 #endif
7269 /* While we're here, optimize this case. */
7270 x = SUBREG_REG (x);
7271 if (GET_CODE (x) != REG)
7272 goto retry;
7273 /* Fall through. */
7275 case REG:
7276 /* See a register other than being set => mark it as needed. */
7277 mark_used_reg (pbi, x, cond, insn);
7278 return;
7280 case SET:
7282 register rtx testreg = SET_DEST (x);
7283 int mark_dest = 0;
7285 /* If storing into MEM, don't show it as being used. But do
7286 show the address as being used. */
7287 if (GET_CODE (testreg) == MEM)
7289 #ifdef AUTO_INC_DEC
7290 if (flags & PROP_AUTOINC)
7291 find_auto_inc (pbi, testreg, insn);
7292 #endif
7293 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
7294 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7295 return;
7298 /* Storing in STRICT_LOW_PART is like storing in a reg
7299 in that this SET might be dead, so ignore it in TESTREG.
7300 but in some other ways it is like using the reg.
7302 Storing in a SUBREG or a bit field is like storing the entire
7303 register in that if the register's value is not used
7304 then this SET is not needed. */
7305 while (GET_CODE (testreg) == STRICT_LOW_PART
7306 || GET_CODE (testreg) == ZERO_EXTRACT
7307 || GET_CODE (testreg) == SIGN_EXTRACT
7308 || GET_CODE (testreg) == SUBREG)
7310 #ifdef CLASS_CANNOT_CHANGE_MODE
7311 if (GET_CODE (testreg) == SUBREG
7312 && GET_CODE (SUBREG_REG (testreg)) == REG
7313 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
7314 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
7315 GET_MODE (testreg)))
7316 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
7317 #endif
7319 /* Modifying a single register in an alternate mode
7320 does not use any of the old value. But these other
7321 ways of storing in a register do use the old value. */
7322 if (GET_CODE (testreg) == SUBREG
7323 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
7325 else
7326 mark_dest = 1;
7328 testreg = XEXP (testreg, 0);
7331 /* If this is a store into a register or group of registers,
7332 recursively scan the value being stored. */
7334 if ((GET_CODE (testreg) == PARALLEL
7335 && GET_MODE (testreg) == BLKmode)
7336 || (GET_CODE (testreg) == REG
7337 && (regno = REGNO (testreg),
7338 ! (regno == FRAME_POINTER_REGNUM
7339 && (! reload_completed || frame_pointer_needed)))
7340 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7341 && ! (regno == HARD_FRAME_POINTER_REGNUM
7342 && (! reload_completed || frame_pointer_needed))
7343 #endif
7344 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
7345 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
7346 #endif
7349 if (mark_dest)
7350 mark_used_regs (pbi, SET_DEST (x), cond, insn);
7351 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7352 return;
7355 break;
7357 case ASM_OPERANDS:
7358 case UNSPEC_VOLATILE:
7359 case TRAP_IF:
7360 case ASM_INPUT:
7362 /* Traditional and volatile asm instructions must be considered to use
7363 and clobber all hard registers, all pseudo-registers and all of
7364 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
7366 Consider for instance a volatile asm that changes the fpu rounding
7367 mode. An insn should not be moved across this even if it only uses
7368 pseudo-regs because it might give an incorrectly rounded result.
7370 ?!? Unfortunately, marking all hard registers as live causes massive
7371 problems for the register allocator and marking all pseudos as live
7372 creates mountains of uninitialized variable warnings.
7374 So for now, just clear the memory set list and mark any regs
7375 we can find in ASM_OPERANDS as used. */
7376 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
7378 free_EXPR_LIST_list (&pbi->mem_set_list);
7379 pbi->mem_set_list_len = 0;
7382 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7383 We can not just fall through here since then we would be confused
7384 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7385 traditional asms unlike their normal usage. */
7386 if (code == ASM_OPERANDS)
7388 int j;
7390 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
7391 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
7393 break;
7396 case COND_EXEC:
7397 if (cond != NULL_RTX)
7398 abort ();
7400 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
7402 cond = COND_EXEC_TEST (x);
7403 x = COND_EXEC_CODE (x);
7404 goto retry;
7406 case PHI:
7407 /* We _do_not_ want to scan operands of phi nodes. Operands of
7408 a phi function are evaluated only when control reaches this
7409 block along a particular edge. Therefore, regs that appear
7410 as arguments to phi should not be added to the global live at
7411 start. */
7412 return;
7414 default:
7415 break;
7418 /* Recursively scan the operands of this expression. */
7421 register const char *fmt = GET_RTX_FORMAT (code);
7422 register int i;
7424 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7426 if (fmt[i] == 'e')
7428 /* Tail recursive case: save a function call level. */
7429 if (i == 0)
7431 x = XEXP (x, 0);
7432 goto retry;
7434 mark_used_regs (pbi, XEXP (x, i), cond, insn);
7436 else if (fmt[i] == 'E')
7438 register int j;
7439 for (j = 0; j < XVECLEN (x, i); j++)
7440 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
7446 #ifdef AUTO_INC_DEC
7448 static int
7449 try_pre_increment_1 (pbi, insn)
7450 struct propagate_block_info *pbi;
7451 rtx insn;
7453 /* Find the next use of this reg. If in same basic block,
7454 make it do pre-increment or pre-decrement if appropriate. */
7455 rtx x = single_set (insn);
7456 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
7457 * INTVAL (XEXP (SET_SRC (x), 1)));
7458 int regno = REGNO (SET_DEST (x));
7459 rtx y = pbi->reg_next_use[regno];
7460 if (y != 0
7461 && SET_DEST (x) != stack_pointer_rtx
7462 && BLOCK_NUM (y) == BLOCK_NUM (insn)
7463 /* Don't do this if the reg dies, or gets set in y; a standard addressing
7464 mode would be better. */
7465 && ! dead_or_set_p (y, SET_DEST (x))
7466 && try_pre_increment (y, SET_DEST (x), amount))
7468 /* We have found a suitable auto-increment and already changed
7469 insn Y to do it. So flush this increment instruction. */
7470 propagate_block_delete_insn (pbi->bb, insn);
7472 /* Count a reference to this reg for the increment insn we are
7473 deleting. When a reg is incremented, spilling it is worse,
7474 so we want to make that less likely. */
7475 if (regno >= FIRST_PSEUDO_REGISTER)
7477 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7478 REG_N_SETS (regno)++;
7481 /* Flush any remembered memories depending on the value of
7482 the incremented register. */
7483 invalidate_mems_from_set (pbi, SET_DEST (x));
7485 return 1;
7487 return 0;
7490 /* Try to change INSN so that it does pre-increment or pre-decrement
7491 addressing on register REG in order to add AMOUNT to REG.
7492 AMOUNT is negative for pre-decrement.
7493 Returns 1 if the change could be made.
7494 This checks all about the validity of the result of modifying INSN. */
7496 static int
7497 try_pre_increment (insn, reg, amount)
7498 rtx insn, reg;
7499 HOST_WIDE_INT amount;
7501 register rtx use;
7503 /* Nonzero if we can try to make a pre-increment or pre-decrement.
7504 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
7505 int pre_ok = 0;
7506 /* Nonzero if we can try to make a post-increment or post-decrement.
7507 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
7508 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
7509 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
7510 int post_ok = 0;
7512 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
7513 int do_post = 0;
7515 /* From the sign of increment, see which possibilities are conceivable
7516 on this target machine. */
7517 if (HAVE_PRE_INCREMENT && amount > 0)
7518 pre_ok = 1;
7519 if (HAVE_POST_INCREMENT && amount > 0)
7520 post_ok = 1;
7522 if (HAVE_PRE_DECREMENT && amount < 0)
7523 pre_ok = 1;
7524 if (HAVE_POST_DECREMENT && amount < 0)
7525 post_ok = 1;
7527 if (! (pre_ok || post_ok))
7528 return 0;
7530 /* It is not safe to add a side effect to a jump insn
7531 because if the incremented register is spilled and must be reloaded
7532 there would be no way to store the incremented value back in memory. */
7534 if (GET_CODE (insn) == JUMP_INSN)
7535 return 0;
7537 use = 0;
7538 if (pre_ok)
7539 use = find_use_as_address (PATTERN (insn), reg, 0);
7540 if (post_ok && (use == 0 || use == (rtx) 1))
7542 use = find_use_as_address (PATTERN (insn), reg, -amount);
7543 do_post = 1;
7546 if (use == 0 || use == (rtx) 1)
7547 return 0;
7549 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
7550 return 0;
7552 /* See if this combination of instruction and addressing mode exists. */
7553 if (! validate_change (insn, &XEXP (use, 0),
7554 gen_rtx_fmt_e (amount > 0
7555 ? (do_post ? POST_INC : PRE_INC)
7556 : (do_post ? POST_DEC : PRE_DEC),
7557 Pmode, reg), 0))
7558 return 0;
7560 /* Record that this insn now has an implicit side effect on X. */
7561 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
7562 return 1;
7565 #endif /* AUTO_INC_DEC */
7567 /* Find the place in the rtx X where REG is used as a memory address.
7568 Return the MEM rtx that so uses it.
7569 If PLUSCONST is nonzero, search instead for a memory address equivalent to
7570 (plus REG (const_int PLUSCONST)).
7572 If such an address does not appear, return 0.
7573 If REG appears more than once, or is used other than in such an address,
7574 return (rtx)1. */
7577 find_use_as_address (x, reg, plusconst)
7578 register rtx x;
7579 rtx reg;
7580 HOST_WIDE_INT plusconst;
7582 enum rtx_code code = GET_CODE (x);
7583 const char *fmt = GET_RTX_FORMAT (code);
7584 register int i;
7585 register rtx value = 0;
7586 register rtx tem;
7588 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
7589 return x;
7591 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
7592 && XEXP (XEXP (x, 0), 0) == reg
7593 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7594 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
7595 return x;
7597 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
7599 /* If REG occurs inside a MEM used in a bit-field reference,
7600 that is unacceptable. */
7601 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
7602 return (rtx) (HOST_WIDE_INT) 1;
7605 if (x == reg)
7606 return (rtx) (HOST_WIDE_INT) 1;
7608 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7610 if (fmt[i] == 'e')
7612 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
7613 if (value == 0)
7614 value = tem;
7615 else if (tem != 0)
7616 return (rtx) (HOST_WIDE_INT) 1;
7618 else if (fmt[i] == 'E')
7620 register int j;
7621 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7623 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
7624 if (value == 0)
7625 value = tem;
7626 else if (tem != 0)
7627 return (rtx) (HOST_WIDE_INT) 1;
7632 return value;
7635 /* Write information about registers and basic blocks into FILE.
7636 This is part of making a debugging dump. */
7638 void
7639 dump_regset (r, outf)
7640 regset r;
7641 FILE *outf;
7643 int i;
7644 if (r == NULL)
7646 fputs (" (nil)", outf);
7647 return;
7650 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
7652 fprintf (outf, " %d", i);
7653 if (i < FIRST_PSEUDO_REGISTER)
7654 fprintf (outf, " [%s]",
7655 reg_names[i]);
7659 /* Print a human-reaable representation of R on the standard error
7660 stream. This function is designed to be used from within the
7661 debugger. */
7663 void
7664 debug_regset (r)
7665 regset r;
7667 dump_regset (r, stderr);
7668 putc ('\n', stderr);
7671 void
7672 dump_flow_info (file)
7673 FILE *file;
7675 register int i;
7676 static const char * const reg_class_names[] = REG_CLASS_NAMES;
7678 fprintf (file, "%d registers.\n", max_regno);
7679 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
7680 if (REG_N_REFS (i))
7682 enum reg_class class, altclass;
7683 fprintf (file, "\nRegister %d used %d times across %d insns",
7684 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
7685 if (REG_BASIC_BLOCK (i) >= 0)
7686 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
7687 if (REG_N_SETS (i))
7688 fprintf (file, "; set %d time%s", REG_N_SETS (i),
7689 (REG_N_SETS (i) == 1) ? "" : "s");
7690 if (REG_USERVAR_P (regno_reg_rtx[i]))
7691 fprintf (file, "; user var");
7692 if (REG_N_DEATHS (i) != 1)
7693 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
7694 if (REG_N_CALLS_CROSSED (i) == 1)
7695 fprintf (file, "; crosses 1 call");
7696 else if (REG_N_CALLS_CROSSED (i))
7697 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
7698 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
7699 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
7700 class = reg_preferred_class (i);
7701 altclass = reg_alternate_class (i);
7702 if (class != GENERAL_REGS || altclass != ALL_REGS)
7704 if (altclass == ALL_REGS || class == ALL_REGS)
7705 fprintf (file, "; pref %s", reg_class_names[(int) class]);
7706 else if (altclass == NO_REGS)
7707 fprintf (file, "; %s or none", reg_class_names[(int) class]);
7708 else
7709 fprintf (file, "; pref %s, else %s",
7710 reg_class_names[(int) class],
7711 reg_class_names[(int) altclass]);
7713 if (REG_POINTER (regno_reg_rtx[i]))
7714 fprintf (file, "; pointer");
7715 fprintf (file, ".\n");
7718 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
7719 for (i = 0; i < n_basic_blocks; i++)
7721 register basic_block bb = BASIC_BLOCK (i);
7722 register edge e;
7724 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
7725 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
7726 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7727 fprintf (file, ", freq %i.\n", bb->frequency);
7729 fprintf (file, "Predecessors: ");
7730 for (e = bb->pred; e; e = e->pred_next)
7731 dump_edge_info (file, e, 0);
7733 fprintf (file, "\nSuccessors: ");
7734 for (e = bb->succ; e; e = e->succ_next)
7735 dump_edge_info (file, e, 1);
7737 fprintf (file, "\nRegisters live at start:");
7738 dump_regset (bb->global_live_at_start, file);
7740 fprintf (file, "\nRegisters live at end:");
7741 dump_regset (bb->global_live_at_end, file);
7743 putc ('\n', file);
7746 putc ('\n', file);
7749 void
7750 debug_flow_info ()
7752 dump_flow_info (stderr);
7755 void
7756 dump_edge_info (file, e, do_succ)
7757 FILE *file;
7758 edge e;
7759 int do_succ;
7761 basic_block side = (do_succ ? e->dest : e->src);
7763 if (side == ENTRY_BLOCK_PTR)
7764 fputs (" ENTRY", file);
7765 else if (side == EXIT_BLOCK_PTR)
7766 fputs (" EXIT", file);
7767 else
7768 fprintf (file, " %d", side->index);
7770 if (e->probability)
7771 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
7773 if (e->count)
7775 fprintf (file, " count:");
7776 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
7779 if (e->flags)
7781 static const char * const bitnames[] = {
7782 "fallthru", "crit", "ab", "abcall", "eh", "fake"
7784 int comma = 0;
7785 int i, flags = e->flags;
7787 fputc (' ', file);
7788 fputc ('(', file);
7789 for (i = 0; flags; i++)
7790 if (flags & (1 << i))
7792 flags &= ~(1 << i);
7794 if (comma)
7795 fputc (',', file);
7796 if (i < (int) ARRAY_SIZE (bitnames))
7797 fputs (bitnames[i], file);
7798 else
7799 fprintf (file, "%d", i);
7800 comma = 1;
7802 fputc (')', file);
7806 /* Print out one basic block with live information at start and end. */
7808 void
7809 dump_bb (bb, outf)
7810 basic_block bb;
7811 FILE *outf;
7813 rtx insn;
7814 rtx last;
7815 edge e;
7817 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
7818 bb->index, bb->loop_depth);
7819 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7820 putc ('\n', outf);
7822 fputs (";; Predecessors: ", outf);
7823 for (e = bb->pred; e; e = e->pred_next)
7824 dump_edge_info (outf, e, 0);
7825 putc ('\n', outf);
7827 fputs (";; Registers live at start:", outf);
7828 dump_regset (bb->global_live_at_start, outf);
7829 putc ('\n', outf);
7831 for (insn = bb->head, last = NEXT_INSN (bb->end);
7832 insn != last;
7833 insn = NEXT_INSN (insn))
7834 print_rtl_single (outf, insn);
7836 fputs (";; Registers live at end:", outf);
7837 dump_regset (bb->global_live_at_end, outf);
7838 putc ('\n', outf);
7840 fputs (";; Successors: ", outf);
7841 for (e = bb->succ; e; e = e->succ_next)
7842 dump_edge_info (outf, e, 1);
7843 putc ('\n', outf);
7846 void
7847 debug_bb (bb)
7848 basic_block bb;
7850 dump_bb (bb, stderr);
7853 void
7854 debug_bb_n (n)
7855 int n;
7857 dump_bb (BASIC_BLOCK (n), stderr);
7860 /* Like print_rtl, but also print out live information for the start of each
7861 basic block. */
7863 void
7864 print_rtl_with_bb (outf, rtx_first)
7865 FILE *outf;
7866 rtx rtx_first;
7868 register rtx tmp_rtx;
7870 if (rtx_first == 0)
7871 fprintf (outf, "(nil)\n");
7872 else
7874 int i;
7875 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
7876 int max_uid = get_max_uid ();
7877 basic_block *start = (basic_block *)
7878 xcalloc (max_uid, sizeof (basic_block));
7879 basic_block *end = (basic_block *)
7880 xcalloc (max_uid, sizeof (basic_block));
7881 enum bb_state *in_bb_p = (enum bb_state *)
7882 xcalloc (max_uid, sizeof (enum bb_state));
7884 for (i = n_basic_blocks - 1; i >= 0; i--)
7886 basic_block bb = BASIC_BLOCK (i);
7887 rtx x;
7889 start[INSN_UID (bb->head)] = bb;
7890 end[INSN_UID (bb->end)] = bb;
7891 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
7893 enum bb_state state = IN_MULTIPLE_BB;
7894 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
7895 state = IN_ONE_BB;
7896 in_bb_p[INSN_UID (x)] = state;
7898 if (x == bb->end)
7899 break;
7903 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
7905 int did_output;
7906 basic_block bb;
7908 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
7910 fprintf (outf, ";; Start of basic block %d, registers live:",
7911 bb->index);
7912 dump_regset (bb->global_live_at_start, outf);
7913 putc ('\n', outf);
7916 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
7917 && GET_CODE (tmp_rtx) != NOTE
7918 && GET_CODE (tmp_rtx) != BARRIER)
7919 fprintf (outf, ";; Insn is not within a basic block\n");
7920 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
7921 fprintf (outf, ";; Insn is in multiple basic blocks\n");
7923 did_output = print_rtl_single (outf, tmp_rtx);
7925 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
7927 fprintf (outf, ";; End of basic block %d, registers live:\n",
7928 bb->index);
7929 dump_regset (bb->global_live_at_end, outf);
7930 putc ('\n', outf);
7933 if (did_output)
7934 putc ('\n', outf);
7937 free (start);
7938 free (end);
7939 free (in_bb_p);
7942 if (current_function_epilogue_delay_list != 0)
7944 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
7945 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
7946 tmp_rtx = XEXP (tmp_rtx, 1))
7947 print_rtl_single (outf, XEXP (tmp_rtx, 0));
7951 /* Dump the rtl into the current debugging dump file, then abort. */
7953 static void
7954 print_rtl_and_abort_fcn (file, line, function)
7955 const char *file;
7956 int line;
7957 const char *function;
7959 if (rtl_dump_file)
7961 print_rtl_with_bb (rtl_dump_file, get_insns ());
7962 fclose (rtl_dump_file);
7965 fancy_abort (file, line, function);
7968 /* Recompute register set/reference counts immediately prior to register
7969 allocation.
7971 This avoids problems with set/reference counts changing to/from values
7972 which have special meanings to the register allocators.
7974 Additionally, the reference counts are the primary component used by the
7975 register allocators to prioritize pseudos for allocation to hard regs.
7976 More accurate reference counts generally lead to better register allocation.
7978 F is the first insn to be scanned.
7980 LOOP_STEP denotes how much loop_depth should be incremented per
7981 loop nesting level in order to increase the ref count more for
7982 references in a loop.
7984 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
7985 possibly other information which is used by the register allocators. */
7987 void
7988 recompute_reg_usage (f, loop_step)
7989 rtx f ATTRIBUTE_UNUSED;
7990 int loop_step ATTRIBUTE_UNUSED;
7992 allocate_reg_life_data ();
7993 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
7996 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
7997 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
7998 of the number of registers that died. */
8001 count_or_remove_death_notes (blocks, kill)
8002 sbitmap blocks;
8003 int kill;
8005 int i, count = 0;
8007 for (i = n_basic_blocks - 1; i >= 0; --i)
8009 basic_block bb;
8010 rtx insn;
8012 if (blocks && ! TEST_BIT (blocks, i))
8013 continue;
8015 bb = BASIC_BLOCK (i);
8017 for (insn = bb->head;; insn = NEXT_INSN (insn))
8019 if (INSN_P (insn))
8021 rtx *pprev = &REG_NOTES (insn);
8022 rtx link = *pprev;
8024 while (link)
8026 switch (REG_NOTE_KIND (link))
8028 case REG_DEAD:
8029 if (GET_CODE (XEXP (link, 0)) == REG)
8031 rtx reg = XEXP (link, 0);
8032 int n;
8034 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
8035 n = 1;
8036 else
8037 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
8038 count += n;
8040 /* Fall through. */
8042 case REG_UNUSED:
8043 if (kill)
8045 rtx next = XEXP (link, 1);
8046 free_EXPR_LIST_node (link);
8047 *pprev = link = next;
8048 break;
8050 /* Fall through. */
8052 default:
8053 pprev = &XEXP (link, 1);
8054 link = *pprev;
8055 break;
8060 if (insn == bb->end)
8061 break;
8065 return count;
8069 /* Update insns block within BB. */
8071 void
8072 update_bb_for_insn (bb)
8073 basic_block bb;
8075 rtx insn;
8077 if (! basic_block_for_insn)
8078 return;
8080 for (insn = bb->head; ; insn = NEXT_INSN (insn))
8082 set_block_for_insn (insn, bb);
8084 if (insn == bb->end)
8085 break;
8090 /* Record INSN's block as BB. */
8092 void
8093 set_block_for_insn (insn, bb)
8094 rtx insn;
8095 basic_block bb;
8097 size_t uid = INSN_UID (insn);
8098 if (uid >= basic_block_for_insn->num_elements)
8100 int new_size;
8102 /* Add one-eighth the size so we don't keep calling xrealloc. */
8103 new_size = uid + (uid + 7) / 8;
8105 VARRAY_GROW (basic_block_for_insn, new_size);
8107 VARRAY_BB (basic_block_for_insn, uid) = bb;
8110 /* When a new insn has been inserted into an existing block, it will
8111 sometimes emit more than a single insn. This routine will set the
8112 block number for the specified insn, and look backwards in the insn
8113 chain to see if there are any other uninitialized insns immediately
8114 previous to this one, and set the block number for them too. */
8116 void
8117 set_block_for_new_insns (insn, bb)
8118 rtx insn;
8119 basic_block bb;
8121 set_block_for_insn (insn, bb);
8123 /* Scan the previous instructions setting the block number until we find
8124 an instruction that has the block number set, or we find a note
8125 of any kind. */
8126 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
8128 if (GET_CODE (insn) == NOTE)
8129 break;
8130 if (INSN_UID (insn) >= basic_block_for_insn->num_elements
8131 || BLOCK_FOR_INSN (insn) == 0)
8132 set_block_for_insn (insn, bb);
8133 else
8134 break;
8138 /* Verify the CFG consistency. This function check some CFG invariants and
8139 aborts when something is wrong. Hope that this function will help to
8140 convert many optimization passes to preserve CFG consistent.
8142 Currently it does following checks:
8144 - test head/end pointers
8145 - overlapping of basic blocks
8146 - edge list correctness
8147 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
8148 - tails of basic blocks (ensure that boundary is necesary)
8149 - scans body of the basic block for JUMP_INSN, CODE_LABEL
8150 and NOTE_INSN_BASIC_BLOCK
8151 - check that all insns are in the basic blocks
8152 (except the switch handling code, barriers and notes)
8153 - check that all returns are followed by barriers
8155 In future it can be extended check a lot of other stuff as well
8156 (reachability of basic blocks, life information, etc. etc.). */
8158 void
8159 verify_flow_info ()
8161 const int max_uid = get_max_uid ();
8162 const rtx rtx_first = get_insns ();
8163 rtx last_head = get_last_insn ();
8164 basic_block *bb_info, *last_visited;
8165 rtx x;
8166 int i, last_bb_num_seen, num_bb_notes, err = 0;
8168 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
8169 last_visited = (basic_block *) xcalloc (n_basic_blocks + 2,
8170 sizeof (basic_block));
8172 for (i = n_basic_blocks - 1; i >= 0; i--)
8174 basic_block bb = BASIC_BLOCK (i);
8175 rtx head = bb->head;
8176 rtx end = bb->end;
8178 /* Verify the end of the basic block is in the INSN chain. */
8179 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
8180 if (x == end)
8181 break;
8182 if (!x)
8184 error ("End insn %d for block %d not found in the insn stream.",
8185 INSN_UID (end), bb->index);
8186 err = 1;
8189 /* Work backwards from the end to the head of the basic block
8190 to verify the head is in the RTL chain. */
8191 for (; x != NULL_RTX; x = PREV_INSN (x))
8193 /* While walking over the insn chain, verify insns appear
8194 in only one basic block and initialize the BB_INFO array
8195 used by other passes. */
8196 if (bb_info[INSN_UID (x)] != NULL)
8198 error ("Insn %d is in multiple basic blocks (%d and %d)",
8199 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
8200 err = 1;
8202 bb_info[INSN_UID (x)] = bb;
8204 if (x == head)
8205 break;
8207 if (!x)
8209 error ("Head insn %d for block %d not found in the insn stream.",
8210 INSN_UID (head), bb->index);
8211 err = 1;
8214 last_head = x;
8217 /* Now check the basic blocks (boundaries etc.) */
8218 for (i = n_basic_blocks - 1; i >= 0; i--)
8220 basic_block bb = BASIC_BLOCK (i);
8221 /* Check correctness of edge lists. */
8222 edge e;
8223 int has_fallthru = 0;
8225 e = bb->succ;
8226 while (e)
8228 if (last_visited [e->dest->index + 2] == bb)
8230 error ("verify_flow_info: Duplicate edge %i->%i",
8231 e->src->index, e->dest->index);
8232 err = 1;
8234 last_visited [e->dest->index + 2] = bb;
8236 if (e->flags & EDGE_FALLTHRU)
8237 has_fallthru = 1;
8239 if ((e->flags & EDGE_FALLTHRU)
8240 && e->src != ENTRY_BLOCK_PTR
8241 && e->dest != EXIT_BLOCK_PTR)
8243 rtx insn;
8244 if (e->src->index + 1 != e->dest->index)
8246 error ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
8247 e->src->index, e->dest->index);
8248 err = 1;
8250 else
8251 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
8252 insn = NEXT_INSN (insn))
8253 if (GET_CODE (insn) == BARRIER || INSN_P (insn))
8255 error ("verify_flow_info: Incorrect fallthru %i->%i",
8256 e->src->index, e->dest->index);
8257 fatal_insn ("Wrong insn in the fallthru edge", insn);
8258 err = 1;
8261 if (e->src != bb)
8263 error ("verify_flow_info: Basic block %d succ edge is corrupted",
8264 bb->index);
8265 fprintf (stderr, "Predecessor: ");
8266 dump_edge_info (stderr, e, 0);
8267 fprintf (stderr, "\nSuccessor: ");
8268 dump_edge_info (stderr, e, 1);
8269 fprintf (stderr, "\n");
8270 err = 1;
8272 if (e->dest != EXIT_BLOCK_PTR)
8274 edge e2 = e->dest->pred;
8275 while (e2 && e2 != e)
8276 e2 = e2->pred_next;
8277 if (!e2)
8279 error ("Basic block %i edge lists are corrupted", bb->index);
8280 err = 1;
8283 e = e->succ_next;
8285 if (!has_fallthru)
8287 rtx insn = bb->end;
8289 /* Ensure existence of barrier in BB with no fallthru edges. */
8290 for (insn = bb->end; GET_CODE (insn) != BARRIER;
8291 insn = NEXT_INSN (insn))
8292 if (!insn
8293 || (GET_CODE (insn) == NOTE
8294 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
8296 error ("Missing barrier after block %i", bb->index);
8297 err = 1;
8301 e = bb->pred;
8302 while (e)
8304 if (e->dest != bb)
8306 error ("Basic block %d pred edge is corrupted", bb->index);
8307 fputs ("Predecessor: ", stderr);
8308 dump_edge_info (stderr, e, 0);
8309 fputs ("\nSuccessor: ", stderr);
8310 dump_edge_info (stderr, e, 1);
8311 fputc ('\n', stderr);
8312 err = 1;
8314 if (e->src != ENTRY_BLOCK_PTR)
8316 edge e2 = e->src->succ;
8317 while (e2 && e2 != e)
8318 e2 = e2->succ_next;
8319 if (!e2)
8321 error ("Basic block %i edge lists are corrupted", bb->index);
8322 err = 1;
8325 e = e->pred_next;
8328 /* OK pointers are correct. Now check the header of basic
8329 block. It ought to contain optional CODE_LABEL followed
8330 by NOTE_BASIC_BLOCK. */
8331 x = bb->head;
8332 if (GET_CODE (x) == CODE_LABEL)
8334 if (bb->end == x)
8336 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
8337 bb->index);
8338 err = 1;
8340 x = NEXT_INSN (x);
8342 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
8344 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
8345 bb->index);
8346 err = 1;
8349 if (bb->end == x)
8351 /* Do checks for empty blocks here */
8353 else
8355 x = NEXT_INSN (x);
8356 while (x)
8358 if (NOTE_INSN_BASIC_BLOCK_P (x))
8360 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
8361 INSN_UID (x), bb->index);
8362 err = 1;
8365 if (x == bb->end)
8366 break;
8368 if (GET_CODE (x) == JUMP_INSN
8369 || GET_CODE (x) == CODE_LABEL
8370 || GET_CODE (x) == BARRIER)
8372 error ("In basic block %d:", bb->index);
8373 fatal_insn ("Flow control insn inside a basic block", x);
8376 x = NEXT_INSN (x);
8381 last_bb_num_seen = -1;
8382 num_bb_notes = 0;
8383 x = rtx_first;
8384 while (x)
8386 if (NOTE_INSN_BASIC_BLOCK_P (x))
8388 basic_block bb = NOTE_BASIC_BLOCK (x);
8389 num_bb_notes++;
8390 if (bb->index != last_bb_num_seen + 1)
8391 internal_error ("Basic blocks not numbered consecutively.");
8393 last_bb_num_seen = bb->index;
8396 if (!bb_info[INSN_UID (x)])
8398 switch (GET_CODE (x))
8400 case BARRIER:
8401 case NOTE:
8402 break;
8404 case CODE_LABEL:
8405 /* An addr_vec is placed outside any block block. */
8406 if (NEXT_INSN (x)
8407 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
8408 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
8409 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
8411 x = NEXT_INSN (x);
8414 /* But in any case, non-deletable labels can appear anywhere. */
8415 break;
8417 default:
8418 fatal_insn ("Insn outside basic block", x);
8422 if (INSN_P (x)
8423 && GET_CODE (x) == JUMP_INSN
8424 && returnjump_p (x) && ! condjump_p (x)
8425 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
8426 fatal_insn ("Return not followed by barrier", x);
8428 x = NEXT_INSN (x);
8431 if (num_bb_notes != n_basic_blocks)
8432 internal_error
8433 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
8434 num_bb_notes, n_basic_blocks);
8436 if (err)
8437 internal_error ("verify_flow_info failed.");
8439 /* Clean up. */
8440 free (bb_info);
8441 free (last_visited);
8444 /* Functions to access an edge list with a vector representation.
8445 Enough data is kept such that given an index number, the
8446 pred and succ that edge represents can be determined, or
8447 given a pred and a succ, its index number can be returned.
8448 This allows algorithms which consume a lot of memory to
8449 represent the normally full matrix of edge (pred,succ) with a
8450 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
8451 wasted space in the client code due to sparse flow graphs. */
8453 /* This functions initializes the edge list. Basically the entire
8454 flowgraph is processed, and all edges are assigned a number,
8455 and the data structure is filled in. */
8457 struct edge_list *
8458 create_edge_list ()
8460 struct edge_list *elist;
8461 edge e;
8462 int num_edges;
8463 int x;
8464 int block_count;
8466 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
8468 num_edges = 0;
8470 /* Determine the number of edges in the flow graph by counting successor
8471 edges on each basic block. */
8472 for (x = 0; x < n_basic_blocks; x++)
8474 basic_block bb = BASIC_BLOCK (x);
8476 for (e = bb->succ; e; e = e->succ_next)
8477 num_edges++;
8479 /* Don't forget successors of the entry block. */
8480 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8481 num_edges++;
8483 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
8484 elist->num_blocks = block_count;
8485 elist->num_edges = num_edges;
8486 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
8488 num_edges = 0;
8490 /* Follow successors of the entry block, and register these edges. */
8491 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8493 elist->index_to_edge[num_edges] = e;
8494 num_edges++;
8497 for (x = 0; x < n_basic_blocks; x++)
8499 basic_block bb = BASIC_BLOCK (x);
8501 /* Follow all successors of blocks, and register these edges. */
8502 for (e = bb->succ; e; e = e->succ_next)
8504 elist->index_to_edge[num_edges] = e;
8505 num_edges++;
8508 return elist;
8511 /* This function free's memory associated with an edge list. */
8513 void
8514 free_edge_list (elist)
8515 struct edge_list *elist;
8517 if (elist)
8519 free (elist->index_to_edge);
8520 free (elist);
8524 /* This function provides debug output showing an edge list. */
8526 void
8527 print_edge_list (f, elist)
8528 FILE *f;
8529 struct edge_list *elist;
8531 int x;
8532 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
8533 elist->num_blocks - 2, elist->num_edges);
8535 for (x = 0; x < elist->num_edges; x++)
8537 fprintf (f, " %-4d - edge(", x);
8538 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
8539 fprintf (f, "entry,");
8540 else
8541 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
8543 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
8544 fprintf (f, "exit)\n");
8545 else
8546 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
8550 /* This function provides an internal consistency check of an edge list,
8551 verifying that all edges are present, and that there are no
8552 extra edges. */
8554 void
8555 verify_edge_list (f, elist)
8556 FILE *f;
8557 struct edge_list *elist;
8559 int x, pred, succ, index;
8560 edge e;
8562 for (x = 0; x < n_basic_blocks; x++)
8564 basic_block bb = BASIC_BLOCK (x);
8566 for (e = bb->succ; e; e = e->succ_next)
8568 pred = e->src->index;
8569 succ = e->dest->index;
8570 index = EDGE_INDEX (elist, e->src, e->dest);
8571 if (index == EDGE_INDEX_NO_EDGE)
8573 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8574 continue;
8576 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8577 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8578 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8579 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8580 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8581 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8584 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8586 pred = e->src->index;
8587 succ = e->dest->index;
8588 index = EDGE_INDEX (elist, e->src, e->dest);
8589 if (index == EDGE_INDEX_NO_EDGE)
8591 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8592 continue;
8594 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8595 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8596 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8597 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8598 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8599 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8601 /* We've verified that all the edges are in the list, no lets make sure
8602 there are no spurious edges in the list. */
8604 for (pred = 0; pred < n_basic_blocks; pred++)
8605 for (succ = 0; succ < n_basic_blocks; succ++)
8607 basic_block p = BASIC_BLOCK (pred);
8608 basic_block s = BASIC_BLOCK (succ);
8610 int found_edge = 0;
8612 for (e = p->succ; e; e = e->succ_next)
8613 if (e->dest == s)
8615 found_edge = 1;
8616 break;
8618 for (e = s->pred; e; e = e->pred_next)
8619 if (e->src == p)
8621 found_edge = 1;
8622 break;
8624 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8625 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8626 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
8627 pred, succ);
8628 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8629 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8630 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
8631 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8632 BASIC_BLOCK (succ)));
8634 for (succ = 0; succ < n_basic_blocks; succ++)
8636 basic_block p = ENTRY_BLOCK_PTR;
8637 basic_block s = BASIC_BLOCK (succ);
8639 int found_edge = 0;
8641 for (e = p->succ; e; e = e->succ_next)
8642 if (e->dest == s)
8644 found_edge = 1;
8645 break;
8647 for (e = s->pred; e; e = e->pred_next)
8648 if (e->src == p)
8650 found_edge = 1;
8651 break;
8653 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8654 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8655 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
8656 succ);
8657 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8658 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8659 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
8660 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
8661 BASIC_BLOCK (succ)));
8663 for (pred = 0; pred < n_basic_blocks; pred++)
8665 basic_block p = BASIC_BLOCK (pred);
8666 basic_block s = EXIT_BLOCK_PTR;
8668 int found_edge = 0;
8670 for (e = p->succ; e; e = e->succ_next)
8671 if (e->dest == s)
8673 found_edge = 1;
8674 break;
8676 for (e = s->pred; e; e = e->pred_next)
8677 if (e->src == p)
8679 found_edge = 1;
8680 break;
8682 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8683 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8684 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
8685 pred);
8686 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8687 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8688 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
8689 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8690 EXIT_BLOCK_PTR));
8694 /* This routine will determine what, if any, edge there is between
8695 a specified predecessor and successor. */
8698 find_edge_index (edge_list, pred, succ)
8699 struct edge_list *edge_list;
8700 basic_block pred, succ;
8702 int x;
8703 for (x = 0; x < NUM_EDGES (edge_list); x++)
8705 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
8706 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
8707 return x;
8709 return (EDGE_INDEX_NO_EDGE);
8712 /* This function will remove an edge from the flow graph. */
8714 void
8715 remove_edge (e)
8716 edge e;
8718 edge last_pred = NULL;
8719 edge last_succ = NULL;
8720 edge tmp;
8721 basic_block src, dest;
8722 src = e->src;
8723 dest = e->dest;
8724 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
8725 last_succ = tmp;
8727 if (!tmp)
8728 abort ();
8729 if (last_succ)
8730 last_succ->succ_next = e->succ_next;
8731 else
8732 src->succ = e->succ_next;
8734 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
8735 last_pred = tmp;
8737 if (!tmp)
8738 abort ();
8739 if (last_pred)
8740 last_pred->pred_next = e->pred_next;
8741 else
8742 dest->pred = e->pred_next;
8744 n_edges--;
8745 free (e);
8748 /* This routine will remove any fake successor edges for a basic block.
8749 When the edge is removed, it is also removed from whatever predecessor
8750 list it is in. */
8752 static void
8753 remove_fake_successors (bb)
8754 basic_block bb;
8756 edge e;
8757 for (e = bb->succ; e;)
8759 edge tmp = e;
8760 e = e->succ_next;
8761 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
8762 remove_edge (tmp);
8766 /* This routine will remove all fake edges from the flow graph. If
8767 we remove all fake successors, it will automatically remove all
8768 fake predecessors. */
8770 void
8771 remove_fake_edges ()
8773 int x;
8775 for (x = 0; x < n_basic_blocks; x++)
8776 remove_fake_successors (BASIC_BLOCK (x));
8778 /* We've handled all successors except the entry block's. */
8779 remove_fake_successors (ENTRY_BLOCK_PTR);
8782 /* This function will add a fake edge between any block which has no
8783 successors, and the exit block. Some data flow equations require these
8784 edges to exist. */
8786 void
8787 add_noreturn_fake_exit_edges ()
8789 int x;
8791 for (x = 0; x < n_basic_blocks; x++)
8792 if (BASIC_BLOCK (x)->succ == NULL)
8793 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
8796 /* This function adds a fake edge between any infinite loops to the
8797 exit block. Some optimizations require a path from each node to
8798 the exit node.
8800 See also Morgan, Figure 3.10, pp. 82-83.
8802 The current implementation is ugly, not attempting to minimize the
8803 number of inserted fake edges. To reduce the number of fake edges
8804 to insert, add fake edges from _innermost_ loops containing only
8805 nodes not reachable from the exit block. */
8807 void
8808 connect_infinite_loops_to_exit ()
8810 basic_block unvisited_block;
8812 /* Perform depth-first search in the reverse graph to find nodes
8813 reachable from the exit block. */
8814 struct depth_first_search_dsS dfs_ds;
8816 flow_dfs_compute_reverse_init (&dfs_ds);
8817 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
8819 /* Repeatedly add fake edges, updating the unreachable nodes. */
8820 while (1)
8822 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
8823 if (!unvisited_block)
8824 break;
8825 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
8826 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
8829 flow_dfs_compute_reverse_finish (&dfs_ds);
8831 return;
8834 /* Redirect an edge's successor from one block to another. */
8836 void
8837 redirect_edge_succ (e, new_succ)
8838 edge e;
8839 basic_block new_succ;
8841 edge *pe;
8843 /* Disconnect the edge from the old successor block. */
8844 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
8845 continue;
8846 *pe = (*pe)->pred_next;
8848 /* Reconnect the edge to the new successor block. */
8849 e->pred_next = new_succ->pred;
8850 new_succ->pred = e;
8851 e->dest = new_succ;
8854 /* Like previous but avoid possible dupplicate edge. */
8856 void
8857 redirect_edge_succ_nodup (e, new_succ)
8858 edge e;
8859 basic_block new_succ;
8861 edge s;
8862 /* Check whether the edge is already present. */
8863 for (s = e->src->succ; s; s = s->succ_next)
8864 if (s->dest == new_succ && s != e)
8865 break;
8866 if (s)
8868 s->flags |= e->flags;
8869 s->probability += e->probability;
8870 s->count += e->count;
8871 remove_edge (e);
8873 else
8874 redirect_edge_succ (e, new_succ);
8877 /* Redirect an edge's predecessor from one block to another. */
8879 void
8880 redirect_edge_pred (e, new_pred)
8881 edge e;
8882 basic_block new_pred;
8884 edge *pe;
8886 /* Disconnect the edge from the old predecessor block. */
8887 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
8888 continue;
8889 *pe = (*pe)->succ_next;
8891 /* Reconnect the edge to the new predecessor block. */
8892 e->succ_next = new_pred->succ;
8893 new_pred->succ = e;
8894 e->src = new_pred;
8897 /* Dump the list of basic blocks in the bitmap NODES. */
8899 static void
8900 flow_nodes_print (str, nodes, file)
8901 const char *str;
8902 const sbitmap nodes;
8903 FILE *file;
8905 int node;
8907 if (! nodes)
8908 return;
8910 fprintf (file, "%s { ", str);
8911 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
8912 fputs ("}\n", file);
8916 /* Dump the list of edges in the array EDGE_LIST. */
8918 static void
8919 flow_edge_list_print (str, edge_list, num_edges, file)
8920 const char *str;
8921 const edge *edge_list;
8922 int num_edges;
8923 FILE *file;
8925 int i;
8927 if (! edge_list)
8928 return;
8930 fprintf (file, "%s { ", str);
8931 for (i = 0; i < num_edges; i++)
8932 fprintf (file, "%d->%d ", edge_list[i]->src->index,
8933 edge_list[i]->dest->index);
8934 fputs ("}\n", file);
8938 /* Dump loop related CFG information. */
8940 static void
8941 flow_loops_cfg_dump (loops, file)
8942 const struct loops *loops;
8943 FILE *file;
8945 int i;
8947 if (! loops->num || ! file || ! loops->cfg.dom)
8948 return;
8950 for (i = 0; i < n_basic_blocks; i++)
8952 edge succ;
8954 fprintf (file, ";; %d succs { ", i);
8955 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
8956 fprintf (file, "%d ", succ->dest->index);
8957 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
8960 /* Dump the DFS node order. */
8961 if (loops->cfg.dfs_order)
8963 fputs (";; DFS order: ", file);
8964 for (i = 0; i < n_basic_blocks; i++)
8965 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
8966 fputs ("\n", file);
8968 /* Dump the reverse completion node order. */
8969 if (loops->cfg.rc_order)
8971 fputs (";; RC order: ", file);
8972 for (i = 0; i < n_basic_blocks; i++)
8973 fprintf (file, "%d ", loops->cfg.rc_order[i]);
8974 fputs ("\n", file);
8978 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
8980 static int
8981 flow_loop_nested_p (outer, loop)
8982 struct loop *outer;
8983 struct loop *loop;
8985 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
8989 /* Dump the loop information specified by LOOP to the stream FILE
8990 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
8991 void
8992 flow_loop_dump (loop, file, loop_dump_aux, verbose)
8993 const struct loop *loop;
8994 FILE *file;
8995 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
8996 int verbose;
8998 if (! loop || ! loop->header)
8999 return;
9001 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
9002 loop->num, INSN_UID (loop->first->head),
9003 INSN_UID (loop->last->end),
9004 loop->shared ? " shared" : "",
9005 loop->invalid ? " invalid" : "");
9006 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
9007 loop->header->index, loop->latch->index,
9008 loop->pre_header ? loop->pre_header->index : -1,
9009 loop->first->index, loop->last->index);
9010 fprintf (file, ";; depth %d, level %d, outer %ld\n",
9011 loop->depth, loop->level,
9012 (long) (loop->outer ? loop->outer->num : -1));
9014 if (loop->pre_header_edges)
9015 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
9016 loop->num_pre_header_edges, file);
9017 flow_edge_list_print (";; entry edges", loop->entry_edges,
9018 loop->num_entries, file);
9019 fprintf (file, ";; %d", loop->num_nodes);
9020 flow_nodes_print (" nodes", loop->nodes, file);
9021 flow_edge_list_print (";; exit edges", loop->exit_edges,
9022 loop->num_exits, file);
9023 if (loop->exits_doms)
9024 flow_nodes_print (";; exit doms", loop->exits_doms, file);
9025 if (loop_dump_aux)
9026 loop_dump_aux (loop, file, verbose);
9030 /* Dump the loop information specified by LOOPS to the stream FILE,
9031 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9032 void
9033 flow_loops_dump (loops, file, loop_dump_aux, verbose)
9034 const struct loops *loops;
9035 FILE *file;
9036 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9037 int verbose;
9039 int i;
9040 int num_loops;
9042 num_loops = loops->num;
9043 if (! num_loops || ! file)
9044 return;
9046 fprintf (file, ";; %d loops found, %d levels\n",
9047 num_loops, loops->levels);
9049 for (i = 0; i < num_loops; i++)
9051 struct loop *loop = &loops->array[i];
9053 flow_loop_dump (loop, file, loop_dump_aux, verbose);
9055 if (loop->shared)
9057 int j;
9059 for (j = 0; j < i; j++)
9061 struct loop *oloop = &loops->array[j];
9063 if (loop->header == oloop->header)
9065 int disjoint;
9066 int smaller;
9068 smaller = loop->num_nodes < oloop->num_nodes;
9070 /* If the union of LOOP and OLOOP is different than
9071 the larger of LOOP and OLOOP then LOOP and OLOOP
9072 must be disjoint. */
9073 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
9074 smaller ? oloop : loop);
9075 fprintf (file,
9076 ";; loop header %d shared by loops %d, %d %s\n",
9077 loop->header->index, i, j,
9078 disjoint ? "disjoint" : "nested");
9084 if (verbose)
9085 flow_loops_cfg_dump (loops, file);
9089 /* Free all the memory allocated for LOOPS. */
9091 void
9092 flow_loops_free (loops)
9093 struct loops *loops;
9095 if (loops->array)
9097 int i;
9099 if (! loops->num)
9100 abort ();
9102 /* Free the loop descriptors. */
9103 for (i = 0; i < loops->num; i++)
9105 struct loop *loop = &loops->array[i];
9107 if (loop->pre_header_edges)
9108 free (loop->pre_header_edges);
9109 if (loop->nodes)
9110 sbitmap_free (loop->nodes);
9111 if (loop->entry_edges)
9112 free (loop->entry_edges);
9113 if (loop->exit_edges)
9114 free (loop->exit_edges);
9115 if (loop->exits_doms)
9116 sbitmap_free (loop->exits_doms);
9118 free (loops->array);
9119 loops->array = NULL;
9121 if (loops->cfg.dom)
9122 sbitmap_vector_free (loops->cfg.dom);
9123 if (loops->cfg.dfs_order)
9124 free (loops->cfg.dfs_order);
9126 if (loops->shared_headers)
9127 sbitmap_free (loops->shared_headers);
9132 /* Find the entry edges into the loop with header HEADER and nodes
9133 NODES and store in ENTRY_EDGES array. Return the number of entry
9134 edges from the loop. */
9136 static int
9137 flow_loop_entry_edges_find (header, nodes, entry_edges)
9138 basic_block header;
9139 const sbitmap nodes;
9140 edge **entry_edges;
9142 edge e;
9143 int num_entries;
9145 *entry_edges = NULL;
9147 num_entries = 0;
9148 for (e = header->pred; e; e = e->pred_next)
9150 basic_block src = e->src;
9152 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9153 num_entries++;
9156 if (! num_entries)
9157 abort ();
9159 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
9161 num_entries = 0;
9162 for (e = header->pred; e; e = e->pred_next)
9164 basic_block src = e->src;
9166 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9167 (*entry_edges)[num_entries++] = e;
9170 return num_entries;
9174 /* Find the exit edges from the loop using the bitmap of loop nodes
9175 NODES and store in EXIT_EDGES array. Return the number of
9176 exit edges from the loop. */
9178 static int
9179 flow_loop_exit_edges_find (nodes, exit_edges)
9180 const sbitmap nodes;
9181 edge **exit_edges;
9183 edge e;
9184 int node;
9185 int num_exits;
9187 *exit_edges = NULL;
9189 /* Check all nodes within the loop to see if there are any
9190 successors not in the loop. Note that a node may have multiple
9191 exiting edges ????? A node can have one jumping edge and one fallthru
9192 edge so only one of these can exit the loop. */
9193 num_exits = 0;
9194 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9195 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9197 basic_block dest = e->dest;
9199 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9200 num_exits++;
9204 if (! num_exits)
9205 return 0;
9207 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
9209 /* Store all exiting edges into an array. */
9210 num_exits = 0;
9211 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9212 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9214 basic_block dest = e->dest;
9216 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9217 (*exit_edges)[num_exits++] = e;
9221 return num_exits;
9225 /* Find the nodes contained within the loop with header HEADER and
9226 latch LATCH and store in NODES. Return the number of nodes within
9227 the loop. */
9229 static int
9230 flow_loop_nodes_find (header, latch, nodes)
9231 basic_block header;
9232 basic_block latch;
9233 sbitmap nodes;
9235 basic_block *stack;
9236 int sp;
9237 int num_nodes = 0;
9239 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
9240 sp = 0;
9242 /* Start with only the loop header in the set of loop nodes. */
9243 sbitmap_zero (nodes);
9244 SET_BIT (nodes, header->index);
9245 num_nodes++;
9246 header->loop_depth++;
9248 /* Push the loop latch on to the stack. */
9249 if (! TEST_BIT (nodes, latch->index))
9251 SET_BIT (nodes, latch->index);
9252 latch->loop_depth++;
9253 num_nodes++;
9254 stack[sp++] = latch;
9257 while (sp)
9259 basic_block node;
9260 edge e;
9262 node = stack[--sp];
9263 for (e = node->pred; e; e = e->pred_next)
9265 basic_block ancestor = e->src;
9267 /* If each ancestor not marked as part of loop, add to set of
9268 loop nodes and push on to stack. */
9269 if (ancestor != ENTRY_BLOCK_PTR
9270 && ! TEST_BIT (nodes, ancestor->index))
9272 SET_BIT (nodes, ancestor->index);
9273 ancestor->loop_depth++;
9274 num_nodes++;
9275 stack[sp++] = ancestor;
9279 free (stack);
9280 return num_nodes;
9283 /* Compute the depth first search order and store in the array
9284 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
9285 RC_ORDER is non-zero, return the reverse completion number for each
9286 node. Returns the number of nodes visited. A depth first search
9287 tries to get as far away from the starting point as quickly as
9288 possible. */
9291 flow_depth_first_order_compute (dfs_order, rc_order)
9292 int *dfs_order;
9293 int *rc_order;
9295 edge *stack;
9296 int sp;
9297 int dfsnum = 0;
9298 int rcnum = n_basic_blocks - 1;
9299 sbitmap visited;
9301 /* Allocate stack for back-tracking up CFG. */
9302 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
9303 sp = 0;
9305 /* Allocate bitmap to track nodes that have been visited. */
9306 visited = sbitmap_alloc (n_basic_blocks);
9308 /* None of the nodes in the CFG have been visited yet. */
9309 sbitmap_zero (visited);
9311 /* Push the first edge on to the stack. */
9312 stack[sp++] = ENTRY_BLOCK_PTR->succ;
9314 while (sp)
9316 edge e;
9317 basic_block src;
9318 basic_block dest;
9320 /* Look at the edge on the top of the stack. */
9321 e = stack[sp - 1];
9322 src = e->src;
9323 dest = e->dest;
9325 /* Check if the edge destination has been visited yet. */
9326 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
9328 /* Mark that we have visited the destination. */
9329 SET_BIT (visited, dest->index);
9331 if (dfs_order)
9332 dfs_order[dfsnum++] = dest->index;
9334 if (dest->succ)
9336 /* Since the DEST node has been visited for the first
9337 time, check its successors. */
9338 stack[sp++] = dest->succ;
9340 else
9342 /* There are no successors for the DEST node so assign
9343 its reverse completion number. */
9344 if (rc_order)
9345 rc_order[rcnum--] = dest->index;
9348 else
9350 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
9352 /* There are no more successors for the SRC node
9353 so assign its reverse completion number. */
9354 if (rc_order)
9355 rc_order[rcnum--] = src->index;
9358 if (e->succ_next)
9359 stack[sp - 1] = e->succ_next;
9360 else
9361 sp--;
9365 free (stack);
9366 sbitmap_free (visited);
9368 /* The number of nodes visited should not be greater than
9369 n_basic_blocks. */
9370 if (dfsnum > n_basic_blocks)
9371 abort ();
9373 /* There are some nodes left in the CFG that are unreachable. */
9374 if (dfsnum < n_basic_blocks)
9375 abort ();
9376 return dfsnum;
9379 /* Compute the depth first search order on the _reverse_ graph and
9380 store in the array DFS_ORDER, marking the nodes visited in VISITED.
9381 Returns the number of nodes visited.
9383 The computation is split into three pieces:
9385 flow_dfs_compute_reverse_init () creates the necessary data
9386 structures.
9388 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
9389 structures. The block will start the search.
9391 flow_dfs_compute_reverse_execute () continues (or starts) the
9392 search using the block on the top of the stack, stopping when the
9393 stack is empty.
9395 flow_dfs_compute_reverse_finish () destroys the necessary data
9396 structures.
9398 Thus, the user will probably call ..._init(), call ..._add_bb() to
9399 add a beginning basic block to the stack, call ..._execute(),
9400 possibly add another bb to the stack and again call ..._execute(),
9401 ..., and finally call _finish(). */
9403 /* Initialize the data structures used for depth-first search on the
9404 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
9405 added to the basic block stack. DATA is the current depth-first
9406 search context. If INITIALIZE_STACK is non-zero, there is an
9407 element on the stack. */
9409 static void
9410 flow_dfs_compute_reverse_init (data)
9411 depth_first_search_ds data;
9413 /* Allocate stack for back-tracking up CFG. */
9414 data->stack =
9415 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
9416 * sizeof (basic_block));
9417 data->sp = 0;
9419 /* Allocate bitmap to track nodes that have been visited. */
9420 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
9422 /* None of the nodes in the CFG have been visited yet. */
9423 sbitmap_zero (data->visited_blocks);
9425 return;
9428 /* Add the specified basic block to the top of the dfs data
9429 structures. When the search continues, it will start at the
9430 block. */
9432 static void
9433 flow_dfs_compute_reverse_add_bb (data, bb)
9434 depth_first_search_ds data;
9435 basic_block bb;
9437 data->stack[data->sp++] = bb;
9438 return;
9441 /* Continue the depth-first search through the reverse graph starting
9442 with the block at the stack's top and ending when the stack is
9443 empty. Visited nodes are marked. Returns an unvisited basic
9444 block, or NULL if there is none available. */
9446 static basic_block
9447 flow_dfs_compute_reverse_execute (data)
9448 depth_first_search_ds data;
9450 basic_block bb;
9451 edge e;
9452 int i;
9454 while (data->sp > 0)
9456 bb = data->stack[--data->sp];
9458 /* Mark that we have visited this node. */
9459 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
9461 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
9463 /* Perform depth-first search on adjacent vertices. */
9464 for (e = bb->pred; e; e = e->pred_next)
9465 flow_dfs_compute_reverse_add_bb (data, e->src);
9469 /* Determine if there are unvisited basic blocks. */
9470 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
9471 if (!TEST_BIT (data->visited_blocks, i))
9472 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
9473 return NULL;
9476 /* Destroy the data structures needed for depth-first search on the
9477 reverse graph. */
9479 static void
9480 flow_dfs_compute_reverse_finish (data)
9481 depth_first_search_ds data;
9483 free (data->stack);
9484 sbitmap_free (data->visited_blocks);
9485 return;
9489 /* Find the root node of the loop pre-header extended basic block and
9490 the edges along the trace from the root node to the loop header. */
9492 static void
9493 flow_loop_pre_header_scan (loop)
9494 struct loop *loop;
9496 int num = 0;
9497 basic_block ebb;
9499 loop->num_pre_header_edges = 0;
9501 if (loop->num_entries != 1)
9502 return;
9504 ebb = loop->entry_edges[0]->src;
9506 if (ebb != ENTRY_BLOCK_PTR)
9508 edge e;
9510 /* Count number of edges along trace from loop header to
9511 root of pre-header extended basic block. Usually this is
9512 only one or two edges. */
9513 num++;
9514 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
9516 ebb = ebb->pred->src;
9517 num++;
9520 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
9521 loop->num_pre_header_edges = num;
9523 /* Store edges in order that they are followed. The source
9524 of the first edge is the root node of the pre-header extended
9525 basic block and the destination of the last last edge is
9526 the loop header. */
9527 for (e = loop->entry_edges[0]; num; e = e->src->pred)
9529 loop->pre_header_edges[--num] = e;
9535 /* Return the block for the pre-header of the loop with header
9536 HEADER where DOM specifies the dominator information. Return NULL if
9537 there is no pre-header. */
9539 static basic_block
9540 flow_loop_pre_header_find (header, dom)
9541 basic_block header;
9542 const sbitmap *dom;
9544 basic_block pre_header;
9545 edge e;
9547 /* If block p is a predecessor of the header and is the only block
9548 that the header does not dominate, then it is the pre-header. */
9549 pre_header = NULL;
9550 for (e = header->pred; e; e = e->pred_next)
9552 basic_block node = e->src;
9554 if (node != ENTRY_BLOCK_PTR
9555 && ! TEST_BIT (dom[node->index], header->index))
9557 if (pre_header == NULL)
9558 pre_header = node;
9559 else
9561 /* There are multiple edges into the header from outside
9562 the loop so there is no pre-header block. */
9563 pre_header = NULL;
9564 break;
9568 return pre_header;
9571 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
9572 previously added. The insertion algorithm assumes that the loops
9573 are added in the order found by a depth first search of the CFG. */
9575 static void
9576 flow_loop_tree_node_add (prevloop, loop)
9577 struct loop *prevloop;
9578 struct loop *loop;
9581 if (flow_loop_nested_p (prevloop, loop))
9583 prevloop->inner = loop;
9584 loop->outer = prevloop;
9585 return;
9588 while (prevloop->outer)
9590 if (flow_loop_nested_p (prevloop->outer, loop))
9592 prevloop->next = loop;
9593 loop->outer = prevloop->outer;
9594 return;
9596 prevloop = prevloop->outer;
9599 prevloop->next = loop;
9600 loop->outer = NULL;
9603 /* Build the loop hierarchy tree for LOOPS. */
9605 static void
9606 flow_loops_tree_build (loops)
9607 struct loops *loops;
9609 int i;
9610 int num_loops;
9612 num_loops = loops->num;
9613 if (! num_loops)
9614 return;
9616 /* Root the loop hierarchy tree with the first loop found.
9617 Since we used a depth first search this should be the
9618 outermost loop. */
9619 loops->tree_root = &loops->array[0];
9620 loops->tree_root->outer = loops->tree_root->inner = loops->tree_root->next = NULL;
9622 /* Add the remaining loops to the tree. */
9623 for (i = 1; i < num_loops; i++)
9624 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
9627 /* Helper function to compute loop nesting depth and enclosed loop level
9628 for the natural loop specified by LOOP at the loop depth DEPTH.
9629 Returns the loop level. */
9631 static int
9632 flow_loop_level_compute (loop, depth)
9633 struct loop *loop;
9634 int depth;
9636 struct loop *inner;
9637 int level = 1;
9639 if (! loop)
9640 return 0;
9642 /* Traverse loop tree assigning depth and computing level as the
9643 maximum level of all the inner loops of this loop. The loop
9644 level is equivalent to the height of the loop in the loop tree
9645 and corresponds to the number of enclosed loop levels (including
9646 itself). */
9647 for (inner = loop->inner; inner; inner = inner->next)
9649 int ilevel;
9651 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
9653 if (ilevel > level)
9654 level = ilevel;
9656 loop->level = level;
9657 loop->depth = depth;
9658 return level;
9661 /* Compute the loop nesting depth and enclosed loop level for the loop
9662 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
9663 level. */
9665 static int
9666 flow_loops_level_compute (loops)
9667 struct loops *loops;
9669 struct loop *loop;
9670 int level;
9671 int levels = 0;
9673 /* Traverse all the outer level loops. */
9674 for (loop = loops->tree_root; loop; loop = loop->next)
9676 level = flow_loop_level_compute (loop, 1);
9677 if (level > levels)
9678 levels = level;
9680 return levels;
9684 /* Scan a single natural loop specified by LOOP collecting information
9685 about it specified by FLAGS. */
9688 flow_loop_scan (loops, loop, flags)
9689 struct loops *loops;
9690 struct loop *loop;
9691 int flags;
9693 /* Determine prerequisites. */
9694 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
9695 flags |= LOOP_EXIT_EDGES;
9697 if (flags & LOOP_ENTRY_EDGES)
9699 /* Find edges which enter the loop header.
9700 Note that the entry edges should only
9701 enter the header of a natural loop. */
9702 loop->num_entries
9703 = flow_loop_entry_edges_find (loop->header,
9704 loop->nodes,
9705 &loop->entry_edges);
9708 if (flags & LOOP_EXIT_EDGES)
9710 /* Find edges which exit the loop. */
9711 loop->num_exits
9712 = flow_loop_exit_edges_find (loop->nodes,
9713 &loop->exit_edges);
9716 if (flags & LOOP_EXITS_DOMS)
9718 int j;
9720 /* Determine which loop nodes dominate all the exits
9721 of the loop. */
9722 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
9723 sbitmap_copy (loop->exits_doms, loop->nodes);
9724 for (j = 0; j < loop->num_exits; j++)
9725 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
9726 loops->cfg.dom[loop->exit_edges[j]->src->index]);
9728 /* The header of a natural loop must dominate
9729 all exits. */
9730 if (! TEST_BIT (loop->exits_doms, loop->header->index))
9731 abort ();
9734 if (flags & LOOP_PRE_HEADER)
9736 /* Look to see if the loop has a pre-header node. */
9737 loop->pre_header
9738 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
9740 /* Find the blocks within the extended basic block of
9741 the loop pre-header. */
9742 flow_loop_pre_header_scan (loop);
9744 return 1;
9748 /* Find all the natural loops in the function and save in LOOPS structure
9749 and recalculate loop_depth information in basic block structures.
9750 FLAGS controls which loop information is collected.
9751 Return the number of natural loops found. */
9754 flow_loops_find (loops, flags)
9755 struct loops *loops;
9756 int flags;
9758 int i;
9759 int b;
9760 int num_loops;
9761 edge e;
9762 sbitmap headers;
9763 sbitmap *dom;
9764 int *dfs_order;
9765 int *rc_order;
9767 /* This function cannot be repeatedly called with different
9768 flags to build up the loop information. The loop tree
9769 must always be built if this function is called. */
9770 if (! (flags & LOOP_TREE))
9771 abort ();
9773 memset (loops, 0, sizeof (*loops));
9775 /* Taking care of this degenerate case makes the rest of
9776 this code simpler. */
9777 if (n_basic_blocks == 0)
9778 return 0;
9780 dfs_order = NULL;
9781 rc_order = NULL;
9783 /* Compute the dominators. */
9784 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
9785 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
9787 /* Count the number of loop edges (back edges). This should be the
9788 same as the number of natural loops. */
9790 num_loops = 0;
9791 for (b = 0; b < n_basic_blocks; b++)
9793 basic_block header;
9795 header = BASIC_BLOCK (b);
9796 header->loop_depth = 0;
9798 for (e = header->pred; e; e = e->pred_next)
9800 basic_block latch = e->src;
9802 /* Look for back edges where a predecessor is dominated
9803 by this block. A natural loop has a single entry
9804 node (header) that dominates all the nodes in the
9805 loop. It also has single back edge to the header
9806 from a latch node. Note that multiple natural loops
9807 may share the same header. */
9808 if (b != header->index)
9809 abort ();
9811 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
9812 num_loops++;
9816 if (num_loops)
9818 /* Compute depth first search order of the CFG so that outer
9819 natural loops will be found before inner natural loops. */
9820 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9821 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9822 flow_depth_first_order_compute (dfs_order, rc_order);
9824 /* Save CFG derived information to avoid recomputing it. */
9825 loops->cfg.dom = dom;
9826 loops->cfg.dfs_order = dfs_order;
9827 loops->cfg.rc_order = rc_order;
9829 /* Allocate loop structures. */
9830 loops->array
9831 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
9833 headers = sbitmap_alloc (n_basic_blocks);
9834 sbitmap_zero (headers);
9836 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
9837 sbitmap_zero (loops->shared_headers);
9839 /* Find and record information about all the natural loops
9840 in the CFG. */
9841 num_loops = 0;
9842 for (b = 0; b < n_basic_blocks; b++)
9844 basic_block header;
9846 /* Search the nodes of the CFG in reverse completion order
9847 so that we can find outer loops first. */
9848 header = BASIC_BLOCK (rc_order[b]);
9850 /* Look for all the possible latch blocks for this header. */
9851 for (e = header->pred; e; e = e->pred_next)
9853 basic_block latch = e->src;
9855 /* Look for back edges where a predecessor is dominated
9856 by this block. A natural loop has a single entry
9857 node (header) that dominates all the nodes in the
9858 loop. It also has single back edge to the header
9859 from a latch node. Note that multiple natural loops
9860 may share the same header. */
9861 if (latch != ENTRY_BLOCK_PTR
9862 && TEST_BIT (dom[latch->index], header->index))
9864 struct loop *loop;
9866 loop = loops->array + num_loops;
9868 loop->header = header;
9869 loop->latch = latch;
9870 loop->num = num_loops;
9872 num_loops++;
9877 for (i = 0; i < num_loops; i++)
9879 struct loop *loop = &loops->array[i];
9881 /* Keep track of blocks that are loop headers so
9882 that we can tell which loops should be merged. */
9883 if (TEST_BIT (headers, loop->header->index))
9884 SET_BIT (loops->shared_headers, loop->header->index);
9885 SET_BIT (headers, loop->header->index);
9887 /* Find nodes contained within the loop. */
9888 loop->nodes = sbitmap_alloc (n_basic_blocks);
9889 loop->num_nodes
9890 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
9892 /* Compute first and last blocks within the loop.
9893 These are often the same as the loop header and
9894 loop latch respectively, but this is not always
9895 the case. */
9896 loop->first
9897 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
9898 loop->last
9899 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
9901 flow_loop_scan (loops, loop, flags);
9904 /* Natural loops with shared headers may either be disjoint or
9905 nested. Disjoint loops with shared headers cannot be inner
9906 loops and should be merged. For now just mark loops that share
9907 headers. */
9908 for (i = 0; i < num_loops; i++)
9909 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
9910 loops->array[i].shared = 1;
9912 sbitmap_free (headers);
9914 else
9916 sbitmap_vector_free (dom);
9919 loops->num = num_loops;
9921 /* Build the loop hierarchy tree. */
9922 flow_loops_tree_build (loops);
9924 /* Assign the loop nesting depth and enclosed loop level for each
9925 loop. */
9926 loops->levels = flow_loops_level_compute (loops);
9928 return num_loops;
9932 /* Update the information regarding the loops in the CFG
9933 specified by LOOPS. */
9935 flow_loops_update (loops, flags)
9936 struct loops *loops;
9937 int flags;
9939 /* One day we may want to update the current loop data. For now
9940 throw away the old stuff and rebuild what we need. */
9941 if (loops->array)
9942 flow_loops_free (loops);
9944 return flow_loops_find (loops, flags);
9948 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
9951 flow_loop_outside_edge_p (loop, e)
9952 const struct loop *loop;
9953 edge e;
9955 if (e->dest != loop->header)
9956 abort ();
9957 return (e->src == ENTRY_BLOCK_PTR)
9958 || ! TEST_BIT (loop->nodes, e->src->index);
9961 /* Clear LOG_LINKS fields of insns in a chain.
9962 Also clear the global_live_at_{start,end} fields of the basic block
9963 structures. */
9965 void
9966 clear_log_links (insns)
9967 rtx insns;
9969 rtx i;
9970 int b;
9972 for (i = insns; i; i = NEXT_INSN (i))
9973 if (INSN_P (i))
9974 LOG_LINKS (i) = 0;
9976 for (b = 0; b < n_basic_blocks; b++)
9978 basic_block bb = BASIC_BLOCK (b);
9980 bb->global_live_at_start = NULL;
9981 bb->global_live_at_end = NULL;
9984 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
9985 EXIT_BLOCK_PTR->global_live_at_start = NULL;
9988 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
9989 correspond to the hard registers, if any, set in that map. This
9990 could be done far more efficiently by having all sorts of special-cases
9991 with moving single words, but probably isn't worth the trouble. */
9993 void
9994 reg_set_to_hard_reg_set (to, from)
9995 HARD_REG_SET *to;
9996 bitmap from;
9998 int i;
10000 EXECUTE_IF_SET_IN_BITMAP
10001 (from, 0, i,
10003 if (i >= FIRST_PSEUDO_REGISTER)
10004 return;
10005 SET_HARD_REG_BIT (*to, i);
10009 /* Called once at intialization time. */
10011 void
10012 init_flow ()
10014 static int initialized;
10016 if (!initialized)
10018 gcc_obstack_init (&flow_obstack);
10019 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10020 initialized = 1;
10022 else
10024 obstack_free (&flow_obstack, flow_firstobj);
10025 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10029 /* Assume that the preceeding pass has possibly eliminated jump instructions
10030 or converted the unconditional jumps. Eliminate the edges from CFG. */
10032 void
10033 purge_dead_edges (bb)
10034 basic_block bb;
10036 edge e, next;
10037 rtx insn = bb->end;
10038 if (GET_CODE (insn) == JUMP_INSN && !simplejump_p (insn))
10039 return;
10040 if (GET_CODE (insn) == JUMP_INSN)
10042 int removed = 0;
10043 rtx note;
10044 edge b,f;
10045 /* We do care only about conditional jumps and simplejumps. */
10046 if (!any_condjump_p (insn)
10047 && !returnjump_p (insn)
10048 && !simplejump_p (insn))
10049 return;
10050 for (e = bb->succ; e; e = next)
10052 next = e->succ_next;
10054 /* Check purposes we can have edge. */
10055 if ((e->flags & EDGE_FALLTHRU)
10056 && any_condjump_p (insn))
10057 continue;
10058 if (e->dest != EXIT_BLOCK_PTR
10059 && e->dest->head == JUMP_LABEL (insn))
10060 continue;
10061 if (e->dest == EXIT_BLOCK_PTR
10062 && returnjump_p (insn))
10063 continue;
10064 removed = 1;
10065 remove_edge (e);
10067 if (!bb->succ || !removed)
10068 return;
10069 if (rtl_dump_file)
10070 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
10071 if (!optimize)
10072 return;
10074 /* Redistribute probabilities. */
10075 if (!bb->succ->succ_next)
10077 bb->succ->probability = REG_BR_PROB_BASE;
10078 bb->succ->count = bb->count;
10080 else
10082 note = find_reg_note (insn, REG_BR_PROB, NULL);
10083 if (!note)
10084 return;
10085 b = BRANCH_EDGE (bb);
10086 f = FALLTHRU_EDGE (bb);
10087 b->probability = INTVAL (XEXP (note, 0));
10088 f->probability = REG_BR_PROB_BASE - b->probability;
10089 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
10090 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
10092 return;
10094 /* If we don't see a jump insn, we don't know exactly why the block would
10095 have been broken at this point. Look for a simple, non-fallthru edge,
10096 as these are only created by conditional branches. If we find such an
10097 edge we know that there used to be a jump here and can then safely
10098 remove all non-fallthru edges. */
10099 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
10100 e = e->succ_next);
10101 if (!e)
10102 return;
10103 for (e = bb->succ; e; e = next)
10105 next = e->succ_next;
10106 if (!(e->flags & EDGE_FALLTHRU))
10107 remove_edge (e);
10109 if (!bb->succ || bb->succ->succ_next)
10110 abort ();
10111 bb->succ->probability = REG_BR_PROB_BASE;
10112 bb->succ->count = bb->count;
10114 if (rtl_dump_file)
10115 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
10116 bb->index);
10117 return;
10120 /* Search all basic blocks for potentionally dead edges and purge them. */
10122 void
10123 purge_all_dead_edges ()
10125 int i;
10126 for (i = 0; i < n_basic_blocks; i++)
10127 purge_dead_edges (BASIC_BLOCK (i));