* c-common.c (decl_attributes): Take a pointer to the node to
[official-gcc.git] / gcc / flow.c
blob92f63099ff2d3b8dc86db01c36e22ba0d50a497c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "expr.h"
137 #include "ssa.h"
139 #include "obstack.h"
140 #include "splay-tree.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
145 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
146 the stack pointer does not matter. The value is tested only in
147 functions that have frame pointers.
148 No definition is equivalent to always zero. */
149 #ifndef EXIT_IGNORE_STACK
150 #define EXIT_IGNORE_STACK 0
151 #endif
153 #ifndef HAVE_epilogue
154 #define HAVE_epilogue 0
155 #endif
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
158 #endif
159 #ifndef HAVE_sibcall_epilogue
160 #define HAVE_sibcall_epilogue 0
161 #endif
163 #ifndef LOCAL_REGNO
164 #define LOCAL_REGNO(REGNO) 0
165 #endif
166 #ifndef EPILOGUE_USES
167 #define EPILOGUE_USES(REGNO) 0
168 #endif
170 #ifdef HAVE_conditional_execution
171 #ifndef REVERSE_CONDEXEC_PREDICATES_P
172 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
173 #endif
174 #endif
176 /* The obstack on which the flow graph components are allocated. */
178 struct obstack flow_obstack;
179 static char *flow_firstobj;
181 /* Number of basic blocks in the current function. */
183 int n_basic_blocks;
185 /* Number of edges in the current function. */
187 int n_edges;
189 /* The basic block array. */
191 varray_type basic_block_info;
193 /* The special entry and exit blocks. */
195 struct basic_block_def entry_exit_blocks[2]
196 = {{NULL, /* head */
197 NULL, /* end */
198 NULL, /* head_tree */
199 NULL, /* end_tree */
200 NULL, /* pred */
201 NULL, /* succ */
202 NULL, /* local_set */
203 NULL, /* cond_local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
206 NULL, /* aux */
207 ENTRY_BLOCK, /* index */
208 0, /* loop_depth */
209 0, /* count */
210 0 /* frequency */
213 NULL, /* head */
214 NULL, /* end */
215 NULL, /* head_tree */
216 NULL, /* end_tree */
217 NULL, /* pred */
218 NULL, /* succ */
219 NULL, /* local_set */
220 NULL, /* cond_local_set */
221 NULL, /* global_live_at_start */
222 NULL, /* global_live_at_end */
223 NULL, /* aux */
224 EXIT_BLOCK, /* index */
225 0, /* loop_depth */
226 0, /* count */
227 0 /* frequency */
231 /* Nonzero if the second flow pass has completed. */
232 int flow2_completed;
234 /* Maximum register number used in this function, plus one. */
236 int max_regno;
238 /* Indexed by n, giving various register information */
240 varray_type reg_n_info;
242 /* Size of a regset for the current function,
243 in (1) bytes and (2) elements. */
245 int regset_bytes;
246 int regset_size;
248 /* Regset of regs live when calls to `setjmp'-like functions happen. */
249 /* ??? Does this exist only for the setjmp-clobbered warning message? */
251 regset regs_live_at_setjmp;
253 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
254 that have to go in the same hard reg.
255 The first two regs in the list are a pair, and the next two
256 are another pair, etc. */
257 rtx regs_may_share;
259 /* Callback that determines if it's ok for a function to have no
260 noreturn attribute. */
261 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
263 /* Set of registers that may be eliminable. These are handled specially
264 in updating regs_ever_live. */
266 static HARD_REG_SET elim_reg_set;
268 /* The basic block structure for every insn, indexed by uid. */
270 varray_type basic_block_for_insn;
272 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
273 /* ??? Should probably be using LABEL_NUSES instead. It would take a
274 bit of surgery to be able to use or co-opt the routines in jump. */
276 static rtx label_value_list;
277 static rtx tail_recursion_label_list;
279 /* Holds information for tracking conditional register life information. */
280 struct reg_cond_life_info
282 /* A boolean expression of conditions under which a register is dead. */
283 rtx condition;
284 /* Conditions under which a register is dead at the basic block end. */
285 rtx orig_condition;
287 /* A boolean expression of conditions under which a register has been
288 stored into. */
289 rtx stores;
291 /* ??? Could store mask of bytes that are dead, so that we could finally
292 track lifetimes of multi-word registers accessed via subregs. */
295 /* For use in communicating between propagate_block and its subroutines.
296 Holds all information needed to compute life and def-use information. */
298 struct propagate_block_info
300 /* The basic block we're considering. */
301 basic_block bb;
303 /* Bit N is set if register N is conditionally or unconditionally live. */
304 regset reg_live;
306 /* Bit N is set if register N is set this insn. */
307 regset new_set;
309 /* Element N is the next insn that uses (hard or pseudo) register N
310 within the current basic block; or zero, if there is no such insn. */
311 rtx *reg_next_use;
313 /* Contains a list of all the MEMs we are tracking for dead store
314 elimination. */
315 rtx mem_set_list;
317 /* If non-null, record the set of registers set unconditionally in the
318 basic block. */
319 regset local_set;
321 /* If non-null, record the set of registers set conditionally in the
322 basic block. */
323 regset cond_local_set;
325 #ifdef HAVE_conditional_execution
326 /* Indexed by register number, holds a reg_cond_life_info for each
327 register that is not unconditionally live or dead. */
328 splay_tree reg_cond_dead;
330 /* Bit N is set if register N is in an expression in reg_cond_dead. */
331 regset reg_cond_reg;
332 #endif
334 /* The length of mem_set_list. */
335 int mem_set_list_len;
337 /* Non-zero if the value of CC0 is live. */
338 int cc0_live;
340 /* Flags controling the set of information propagate_block collects. */
341 int flags;
344 /* Maximum length of pbi->mem_set_list before we start dropping
345 new elements on the floor. */
346 #define MAX_MEM_SET_LIST_LEN 100
348 /* Store the data structures necessary for depth-first search. */
349 struct depth_first_search_dsS {
350 /* stack for backtracking during the algorithm */
351 basic_block *stack;
353 /* number of edges in the stack. That is, positions 0, ..., sp-1
354 have edges. */
355 unsigned int sp;
357 /* record of basic blocks already seen by depth-first search */
358 sbitmap visited_blocks;
360 typedef struct depth_first_search_dsS *depth_first_search_ds;
362 /* Have print_rtl_and_abort give the same information that fancy_abort
363 does. */
364 #define print_rtl_and_abort() \
365 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
367 /* Forward declarations */
368 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
369 static bool try_crossjump_bb PARAMS ((int, basic_block));
370 static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
371 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
372 rtx *, rtx *));
373 static int count_basic_blocks PARAMS ((rtx));
374 static void find_basic_blocks_1 PARAMS ((rtx));
375 static rtx find_label_refs PARAMS ((rtx, rtx));
376 static void make_edges PARAMS ((rtx));
377 static void make_label_edge PARAMS ((sbitmap *, basic_block,
378 rtx, int));
379 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
381 static void commit_one_edge_insertion PARAMS ((edge));
383 static void delete_unreachable_blocks PARAMS ((void));
384 static int can_delete_note_p PARAMS ((rtx));
385 static void expunge_block PARAMS ((basic_block));
386 static int can_delete_label_p PARAMS ((rtx));
387 static int tail_recursion_label_p PARAMS ((rtx));
388 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
389 basic_block));
390 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
391 basic_block));
392 static int merge_blocks PARAMS ((edge,basic_block,basic_block,
393 int));
394 static bool try_optimize_cfg PARAMS ((int));
395 static bool forwarder_block_p PARAMS ((basic_block));
396 static bool can_fallthru PARAMS ((basic_block, basic_block));
397 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
398 static bool try_simplify_condjump PARAMS ((basic_block));
399 static bool try_forward_edges PARAMS ((basic_block));
400 static void tidy_fallthru_edges PARAMS ((void));
401 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
402 static void verify_wide_reg PARAMS ((int, rtx, rtx));
403 static void verify_local_live_at_start PARAMS ((regset, basic_block));
404 static int noop_move_p PARAMS ((rtx));
405 static void delete_noop_moves PARAMS ((rtx));
406 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
407 static void notice_stack_pointer_modification PARAMS ((rtx));
408 static void mark_reg PARAMS ((rtx, void *));
409 static void mark_regs_live_at_end PARAMS ((regset));
410 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
411 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
412 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
413 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
414 static int insn_dead_p PARAMS ((struct propagate_block_info *,
415 rtx, int, rtx));
416 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
417 rtx, rtx));
418 static void mark_set_regs PARAMS ((struct propagate_block_info *,
419 rtx, rtx));
420 static void mark_set_1 PARAMS ((struct propagate_block_info *,
421 enum rtx_code, rtx, rtx,
422 rtx, int));
423 #ifdef HAVE_conditional_execution
424 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
425 int, rtx));
426 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
427 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
428 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
429 int));
430 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
431 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
432 static rtx not_reg_cond PARAMS ((rtx));
433 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
434 #endif
435 #ifdef AUTO_INC_DEC
436 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
437 rtx, rtx, rtx, rtx, rtx));
438 static void find_auto_inc PARAMS ((struct propagate_block_info *,
439 rtx, rtx));
440 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
441 rtx));
442 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
443 #endif
444 static void mark_used_reg PARAMS ((struct propagate_block_info *,
445 rtx, rtx, rtx));
446 static void mark_used_regs PARAMS ((struct propagate_block_info *,
447 rtx, rtx, rtx));
448 void dump_flow_info PARAMS ((FILE *));
449 void debug_flow_info PARAMS ((void));
450 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
451 const char *))
452 ATTRIBUTE_NORETURN;
454 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
455 rtx));
456 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
457 rtx));
458 static void remove_fake_successors PARAMS ((basic_block));
459 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
460 FILE *));
461 static void flow_edge_list_print PARAMS ((const char *, const edge *,
462 int, FILE *));
463 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
464 FILE *));
465 static int flow_loop_nested_p PARAMS ((struct loop *,
466 struct loop *));
467 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
468 edge **));
469 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
470 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
471 static void flow_dfs_compute_reverse_init
472 PARAMS ((depth_first_search_ds));
473 static void flow_dfs_compute_reverse_add_bb
474 PARAMS ((depth_first_search_ds, basic_block));
475 static basic_block flow_dfs_compute_reverse_execute
476 PARAMS ((depth_first_search_ds));
477 static void flow_dfs_compute_reverse_finish
478 PARAMS ((depth_first_search_ds));
479 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
480 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
481 const sbitmap *));
482 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
483 static void flow_loops_tree_build PARAMS ((struct loops *));
484 static int flow_loop_level_compute PARAMS ((struct loop *, int));
485 static int flow_loops_level_compute PARAMS ((struct loops *));
486 static void allocate_bb_life_data PARAMS ((void));
487 static void find_sub_basic_blocks PARAMS ((basic_block));
488 static bool redirect_edge_and_branch PARAMS ((edge, basic_block));
489 static basic_block redirect_edge_and_branch_force PARAMS ((edge, basic_block));
490 static rtx block_label PARAMS ((basic_block));
492 /* Find basic blocks of the current function.
493 F is the first insn of the function and NREGS the number of register
494 numbers in use. */
496 void
497 find_basic_blocks (f, nregs, file)
498 rtx f;
499 int nregs ATTRIBUTE_UNUSED;
500 FILE *file ATTRIBUTE_UNUSED;
502 int max_uid;
504 /* Flush out existing data. */
505 if (basic_block_info != NULL)
507 int i;
509 clear_edges ();
511 /* Clear bb->aux on all extant basic blocks. We'll use this as a
512 tag for reuse during create_basic_block, just in case some pass
513 copies around basic block notes improperly. */
514 for (i = 0; i < n_basic_blocks; ++i)
515 BASIC_BLOCK (i)->aux = NULL;
517 VARRAY_FREE (basic_block_info);
520 n_basic_blocks = count_basic_blocks (f);
522 /* Size the basic block table. The actual structures will be allocated
523 by find_basic_blocks_1, since we want to keep the structure pointers
524 stable across calls to find_basic_blocks. */
525 /* ??? This whole issue would be much simpler if we called find_basic_blocks
526 exactly once, and thereafter we don't have a single long chain of
527 instructions at all until close to the end of compilation when we
528 actually lay them out. */
530 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
532 find_basic_blocks_1 (f);
534 /* Record the block to which an insn belongs. */
535 /* ??? This should be done another way, by which (perhaps) a label is
536 tagged directly with the basic block that it starts. It is used for
537 more than that currently, but IMO that is the only valid use. */
539 max_uid = get_max_uid ();
540 #ifdef AUTO_INC_DEC
541 /* Leave space for insns life_analysis makes in some cases for auto-inc.
542 These cases are rare, so we don't need too much space. */
543 max_uid += max_uid / 10;
544 #endif
546 compute_bb_for_insn (max_uid);
548 /* Discover the edges of our cfg. */
549 make_edges (label_value_list);
551 /* Do very simple cleanup now, for the benefit of code that runs between
552 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
553 tidy_fallthru_edges ();
555 mark_critical_edges ();
557 #ifdef ENABLE_CHECKING
558 verify_flow_info ();
559 #endif
562 void
563 check_function_return_warnings ()
565 if (warn_missing_noreturn
566 && !TREE_THIS_VOLATILE (cfun->decl)
567 && EXIT_BLOCK_PTR->pred == NULL
568 && (lang_missing_noreturn_ok_p
569 && !lang_missing_noreturn_ok_p (cfun->decl)))
570 warning ("function might be possible candidate for attribute `noreturn'");
572 /* If we have a path to EXIT, then we do return. */
573 if (TREE_THIS_VOLATILE (cfun->decl)
574 && EXIT_BLOCK_PTR->pred != NULL)
575 warning ("`noreturn' function does return");
577 /* If the clobber_return_insn appears in some basic block, then we
578 do reach the end without returning a value. */
579 else if (warn_return_type
580 && cfun->x_clobber_return_insn != NULL
581 && EXIT_BLOCK_PTR->pred != NULL)
583 int max_uid = get_max_uid ();
585 /* If clobber_return_insn was excised by jump1, then renumber_insns
586 can make max_uid smaller than the number still recorded in our rtx.
587 That's fine, since this is a quick way of verifying that the insn
588 is no longer in the chain. */
589 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
591 /* Recompute insn->block mapping, since the initial mapping is
592 set before we delete unreachable blocks. */
593 compute_bb_for_insn (max_uid);
595 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
596 warning ("control reaches end of non-void function");
601 /* Count the basic blocks of the function. */
603 static int
604 count_basic_blocks (f)
605 rtx f;
607 register rtx insn;
608 register RTX_CODE prev_code;
609 register int count = 0;
610 int saw_abnormal_edge = 0;
612 prev_code = JUMP_INSN;
613 for (insn = f; insn; insn = NEXT_INSN (insn))
615 enum rtx_code code = GET_CODE (insn);
617 if (code == CODE_LABEL
618 || (GET_RTX_CLASS (code) == 'i'
619 && (prev_code == JUMP_INSN
620 || prev_code == BARRIER
621 || saw_abnormal_edge)))
623 saw_abnormal_edge = 0;
624 count++;
627 /* Record whether this insn created an edge. */
628 if (code == CALL_INSN)
630 rtx note;
632 /* If there is a nonlocal goto label and the specified
633 region number isn't -1, we have an edge. */
634 if (nonlocal_goto_handler_labels
635 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
636 || INTVAL (XEXP (note, 0)) >= 0))
637 saw_abnormal_edge = 1;
639 else if (can_throw_internal (insn))
640 saw_abnormal_edge = 1;
642 else if (flag_non_call_exceptions
643 && code == INSN
644 && can_throw_internal (insn))
645 saw_abnormal_edge = 1;
647 if (code != NOTE)
648 prev_code = code;
651 /* The rest of the compiler works a bit smoother when we don't have to
652 check for the edge case of do-nothing functions with no basic blocks. */
653 if (count == 0)
655 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
656 count = 1;
659 return count;
662 /* Scan a list of insns for labels referred to other than by jumps.
663 This is used to scan the alternatives of a call placeholder. */
664 static rtx
665 find_label_refs (f, lvl)
666 rtx f;
667 rtx lvl;
669 rtx insn;
671 for (insn = f; insn; insn = NEXT_INSN (insn))
672 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
674 rtx note;
676 /* Make a list of all labels referred to other than by jumps
677 (which just don't have the REG_LABEL notes).
679 Make a special exception for labels followed by an ADDR*VEC,
680 as this would be a part of the tablejump setup code.
682 Make a special exception to registers loaded with label
683 values just before jump insns that use them. */
685 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
686 if (REG_NOTE_KIND (note) == REG_LABEL)
688 rtx lab = XEXP (note, 0), next;
690 if ((next = next_nonnote_insn (lab)) != NULL
691 && GET_CODE (next) == JUMP_INSN
692 && (GET_CODE (PATTERN (next)) == ADDR_VEC
693 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
695 else if (GET_CODE (lab) == NOTE)
697 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
698 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
700 else
701 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
705 return lvl;
708 /* Assume that someone emitted code with control flow instructions to the
709 basic block. Update the data structure. */
710 static void
711 find_sub_basic_blocks (bb)
712 basic_block bb;
714 rtx first_insn = bb->head, insn;
715 rtx end = bb->end;
716 edge succ_list = bb->succ;
717 rtx jump_insn = NULL_RTX;
718 int created = 0;
719 int barrier = 0;
720 edge falltru = 0;
721 basic_block first_bb = bb, last_bb;
722 int i;
724 if (GET_CODE (first_insn) == LABEL_REF)
725 first_insn = NEXT_INSN (first_insn);
726 first_insn = NEXT_INSN (first_insn);
727 bb->succ = NULL;
729 insn = first_insn;
730 /* Scan insn chain and try to find new basic block boundaries. */
731 while (insn != end)
733 enum rtx_code code = GET_CODE (insn);
734 switch (code)
736 case JUMP_INSN:
737 /* We need some special care for those expressions. */
738 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
739 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
740 abort();
741 jump_insn = insn;
742 break;
743 case BARRIER:
744 if (!jump_insn)
745 abort ();
746 barrier = 1;
747 break;
748 /* On code label, split current basic block. */
749 case CODE_LABEL:
750 falltru = split_block (bb, PREV_INSN (insn));
751 if (jump_insn)
752 bb->end = jump_insn;
753 bb = falltru->dest;
754 if (barrier)
755 remove_edge (falltru);
756 barrier = 0;
757 jump_insn = 0;
758 created = 1;
759 if (LABEL_ALTERNATE_NAME (insn))
760 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
761 break;
762 case INSN:
763 /* In case we've previously split insn on the JUMP_INSN, move the
764 block header to proper place. */
765 if (jump_insn)
767 falltru = split_block (bb, PREV_INSN (insn));
768 bb->end = jump_insn;
769 bb = falltru->dest;
770 if (barrier)
771 abort ();
772 jump_insn = 0;
774 default:
775 break;
777 insn = NEXT_INSN (insn);
779 /* Last basic block must end in the original BB end. */
780 if (jump_insn)
781 abort ();
783 /* Wire in the original edges for last basic block. */
784 if (created)
786 bb->succ = succ_list;
787 while (succ_list)
788 succ_list->src = bb, succ_list = succ_list->succ_next;
790 else
791 bb->succ = succ_list;
793 /* Now re-scan and wire in all edges. This expect simple (conditional)
794 jumps at the end of each new basic blocks. */
795 last_bb = bb;
796 for (i = first_bb->index; i < last_bb->index; i++)
798 bb = BASIC_BLOCK (i);
799 if (GET_CODE (bb->end) == JUMP_INSN)
801 mark_jump_label (PATTERN (bb->end), bb->end, 0, 0);
802 make_label_edge (NULL, bb, JUMP_LABEL (bb->end), 0);
804 insn = NEXT_INSN (insn);
808 /* Find all basic blocks of the function whose first insn is F.
810 Collect and return a list of labels whose addresses are taken. This
811 will be used in make_edges for use with computed gotos. */
813 static void
814 find_basic_blocks_1 (f)
815 rtx f;
817 register rtx insn, next;
818 int i = 0;
819 rtx bb_note = NULL_RTX;
820 rtx lvl = NULL_RTX;
821 rtx trll = NULL_RTX;
822 rtx head = NULL_RTX;
823 rtx end = NULL_RTX;
825 /* We process the instructions in a slightly different way than we did
826 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
827 closed out the previous block, so that it gets attached at the proper
828 place. Since this form should be equivalent to the previous,
829 count_basic_blocks continues to use the old form as a check. */
831 for (insn = f; insn; insn = next)
833 enum rtx_code code = GET_CODE (insn);
835 next = NEXT_INSN (insn);
837 switch (code)
839 case NOTE:
841 int kind = NOTE_LINE_NUMBER (insn);
843 /* Look for basic block notes with which to keep the
844 basic_block_info pointers stable. Unthread the note now;
845 we'll put it back at the right place in create_basic_block.
846 Or not at all if we've already found a note in this block. */
847 if (kind == NOTE_INSN_BASIC_BLOCK)
849 if (bb_note == NULL_RTX)
850 bb_note = insn;
851 else
852 next = flow_delete_insn (insn);
854 break;
857 case CODE_LABEL:
858 /* A basic block starts at a label. If we've closed one off due
859 to a barrier or some such, no need to do it again. */
860 if (head != NULL_RTX)
862 create_basic_block (i++, head, end, bb_note);
863 bb_note = NULL_RTX;
866 head = end = insn;
867 break;
869 case JUMP_INSN:
870 /* A basic block ends at a jump. */
871 if (head == NULL_RTX)
872 head = insn;
873 else
875 /* ??? Make a special check for table jumps. The way this
876 happens is truly and amazingly gross. We are about to
877 create a basic block that contains just a code label and
878 an addr*vec jump insn. Worse, an addr_diff_vec creates
879 its own natural loop.
881 Prevent this bit of brain damage, pasting things together
882 correctly in make_edges.
884 The correct solution involves emitting the table directly
885 on the tablejump instruction as a note, or JUMP_LABEL. */
887 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
888 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
890 head = end = NULL;
891 n_basic_blocks--;
892 break;
895 end = insn;
896 goto new_bb_inclusive;
898 case BARRIER:
899 /* A basic block ends at a barrier. It may be that an unconditional
900 jump already closed the basic block -- no need to do it again. */
901 if (head == NULL_RTX)
902 break;
903 goto new_bb_exclusive;
905 case CALL_INSN:
907 /* Record whether this call created an edge. */
908 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
909 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
911 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
913 /* Scan each of the alternatives for label refs. */
914 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
915 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
916 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
917 /* Record its tail recursion label, if any. */
918 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
919 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
922 /* A basic block ends at a call that can either throw or
923 do a non-local goto. */
924 if ((nonlocal_goto_handler_labels && region >= 0)
925 || can_throw_internal (insn))
927 new_bb_inclusive:
928 if (head == NULL_RTX)
929 head = insn;
930 end = insn;
932 new_bb_exclusive:
933 create_basic_block (i++, head, end, bb_note);
934 head = end = NULL_RTX;
935 bb_note = NULL_RTX;
936 break;
939 /* Fall through. */
941 case INSN:
942 /* Non-call exceptions generate new blocks just like calls. */
943 if (flag_non_call_exceptions && can_throw_internal (insn))
944 goto new_bb_inclusive;
946 if (head == NULL_RTX)
947 head = insn;
948 end = insn;
949 break;
951 default:
952 abort ();
955 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
957 rtx note;
959 /* Make a list of all labels referred to other than by jumps.
961 Make a special exception for labels followed by an ADDR*VEC,
962 as this would be a part of the tablejump setup code.
964 Make a special exception to registers loaded with label
965 values just before jump insns that use them. */
967 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
968 if (REG_NOTE_KIND (note) == REG_LABEL)
970 rtx lab = XEXP (note, 0), next;
972 if ((next = next_nonnote_insn (lab)) != NULL
973 && GET_CODE (next) == JUMP_INSN
974 && (GET_CODE (PATTERN (next)) == ADDR_VEC
975 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
977 else if (GET_CODE (lab) == NOTE)
979 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
980 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
982 else
983 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
988 if (head != NULL_RTX)
989 create_basic_block (i++, head, end, bb_note);
990 else if (bb_note)
991 flow_delete_insn (bb_note);
993 if (i != n_basic_blocks)
994 abort ();
996 label_value_list = lvl;
997 tail_recursion_label_list = trll;
1000 /* Tidy the CFG by deleting unreachable code and whatnot. */
1002 void
1003 cleanup_cfg (mode)
1004 int mode;
1006 delete_unreachable_blocks ();
1007 if (try_optimize_cfg (mode))
1008 delete_unreachable_blocks ();
1009 mark_critical_edges ();
1011 /* Kill the data we won't maintain. */
1012 free_EXPR_LIST_list (&label_value_list);
1013 free_EXPR_LIST_list (&tail_recursion_label_list);
1016 /* Create a new basic block consisting of the instructions between
1017 HEAD and END inclusive. Reuses the note and basic block struct
1018 in BB_NOTE, if any. */
1020 void
1021 create_basic_block (index, head, end, bb_note)
1022 int index;
1023 rtx head, end, bb_note;
1025 basic_block bb;
1027 if (bb_note
1028 && ! RTX_INTEGRATED_P (bb_note)
1029 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1030 && bb->aux == NULL)
1032 /* If we found an existing note, thread it back onto the chain. */
1034 rtx after;
1036 if (GET_CODE (head) == CODE_LABEL)
1037 after = head;
1038 else
1040 after = PREV_INSN (head);
1041 head = bb_note;
1044 if (after != bb_note && NEXT_INSN (after) != bb_note)
1045 reorder_insns (bb_note, bb_note, after);
1047 else
1049 /* Otherwise we must create a note and a basic block structure.
1050 Since we allow basic block structs in rtl, give the struct
1051 the same lifetime by allocating it off the function obstack
1052 rather than using malloc. */
1054 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1055 memset (bb, 0, sizeof (*bb));
1057 if (GET_CODE (head) == CODE_LABEL)
1058 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1059 else
1061 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1062 head = bb_note;
1064 NOTE_BASIC_BLOCK (bb_note) = bb;
1067 /* Always include the bb note in the block. */
1068 if (NEXT_INSN (end) == bb_note)
1069 end = bb_note;
1071 bb->head = head;
1072 bb->end = end;
1073 bb->index = index;
1074 BASIC_BLOCK (index) = bb;
1076 /* Tag the block so that we know it has been used when considering
1077 other basic block notes. */
1078 bb->aux = bb;
1081 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
1082 note associated with the BLOCK. */
1085 first_insn_after_basic_block_note (block)
1086 basic_block block;
1088 rtx insn;
1090 /* Get the first instruction in the block. */
1091 insn = block->head;
1093 if (insn == NULL_RTX)
1094 return NULL_RTX;
1095 if (GET_CODE (insn) == CODE_LABEL)
1096 insn = NEXT_INSN (insn);
1097 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
1098 abort ();
1100 return NEXT_INSN (insn);
1103 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1104 indexed by INSN_UID. MAX is the size of the array. */
1106 void
1107 compute_bb_for_insn (max)
1108 int max;
1110 int i;
1112 if (basic_block_for_insn)
1113 VARRAY_FREE (basic_block_for_insn);
1114 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1116 for (i = 0; i < n_basic_blocks; ++i)
1118 basic_block bb = BASIC_BLOCK (i);
1119 rtx insn, end;
1121 end = bb->end;
1122 insn = bb->head;
1123 while (1)
1125 int uid = INSN_UID (insn);
1126 if (uid < max)
1127 VARRAY_BB (basic_block_for_insn, uid) = bb;
1128 if (insn == end)
1129 break;
1130 insn = NEXT_INSN (insn);
1135 /* Free the memory associated with the edge structures. */
1137 void
1138 clear_edges ()
1140 int i;
1141 edge n, e;
1143 for (i = 0; i < n_basic_blocks; ++i)
1145 basic_block bb = BASIC_BLOCK (i);
1147 for (e = bb->succ; e; e = n)
1149 n = e->succ_next;
1150 free (e);
1153 bb->succ = 0;
1154 bb->pred = 0;
1157 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1159 n = e->succ_next;
1160 free (e);
1163 ENTRY_BLOCK_PTR->succ = 0;
1164 EXIT_BLOCK_PTR->pred = 0;
1166 n_edges = 0;
1169 /* Identify the edges between basic blocks.
1171 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1172 that are otherwise unreachable may be reachable with a non-local goto.
1174 BB_EH_END is an array indexed by basic block number in which we record
1175 the list of exception regions active at the end of the basic block. */
1177 static void
1178 make_edges (label_value_list)
1179 rtx label_value_list;
1181 int i;
1182 sbitmap *edge_cache = NULL;
1184 /* Assume no computed jump; revise as we create edges. */
1185 current_function_has_computed_jump = 0;
1187 /* Heavy use of computed goto in machine-generated code can lead to
1188 nearly fully-connected CFGs. In that case we spend a significant
1189 amount of time searching the edge lists for duplicates. */
1190 if (forced_labels || label_value_list)
1192 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1193 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1196 /* By nature of the way these get numbered, block 0 is always the entry. */
1197 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1199 for (i = 0; i < n_basic_blocks; ++i)
1201 basic_block bb = BASIC_BLOCK (i);
1202 rtx insn, x;
1203 enum rtx_code code;
1204 int force_fallthru = 0;
1206 if (GET_CODE (bb->head) == CODE_LABEL
1207 && LABEL_ALTERNATE_NAME (bb->head))
1208 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1210 /* Examine the last instruction of the block, and discover the
1211 ways we can leave the block. */
1213 insn = bb->end;
1214 code = GET_CODE (insn);
1216 /* A branch. */
1217 if (code == JUMP_INSN)
1219 rtx tmp;
1221 /* Recognize exception handling placeholders. */
1222 if (GET_CODE (PATTERN (insn)) == RESX)
1223 make_eh_edge (edge_cache, bb, insn);
1225 /* Recognize a non-local goto as a branch outside the
1226 current function. */
1227 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1230 /* ??? Recognize a tablejump and do the right thing. */
1231 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1232 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1233 && GET_CODE (tmp) == JUMP_INSN
1234 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1235 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1237 rtvec vec;
1238 int j;
1240 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1241 vec = XVEC (PATTERN (tmp), 0);
1242 else
1243 vec = XVEC (PATTERN (tmp), 1);
1245 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1246 make_label_edge (edge_cache, bb,
1247 XEXP (RTVEC_ELT (vec, j), 0), 0);
1249 /* Some targets (eg, ARM) emit a conditional jump that also
1250 contains the out-of-range target. Scan for these and
1251 add an edge if necessary. */
1252 if ((tmp = single_set (insn)) != NULL
1253 && SET_DEST (tmp) == pc_rtx
1254 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1255 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1256 make_label_edge (edge_cache, bb,
1257 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1259 #ifdef CASE_DROPS_THROUGH
1260 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1261 us naturally detecting fallthru into the next block. */
1262 force_fallthru = 1;
1263 #endif
1266 /* If this is a computed jump, then mark it as reaching
1267 everything on the label_value_list and forced_labels list. */
1268 else if (computed_jump_p (insn))
1270 current_function_has_computed_jump = 1;
1272 for (x = label_value_list; x; x = XEXP (x, 1))
1273 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1275 for (x = forced_labels; x; x = XEXP (x, 1))
1276 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1279 /* Returns create an exit out. */
1280 else if (returnjump_p (insn))
1281 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1283 /* Otherwise, we have a plain conditional or unconditional jump. */
1284 else
1286 if (! JUMP_LABEL (insn))
1287 abort ();
1288 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1292 /* If this is a sibling call insn, then this is in effect a
1293 combined call and return, and so we need an edge to the
1294 exit block. No need to worry about EH edges, since we
1295 wouldn't have created the sibling call in the first place. */
1297 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1298 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1299 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1301 /* If this is a CALL_INSN, then mark it as reaching the active EH
1302 handler for this CALL_INSN. If we're handling non-call
1303 exceptions then any insn can reach any of the active handlers.
1305 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1307 else if (code == CALL_INSN || flag_non_call_exceptions)
1309 /* Add any appropriate EH edges. */
1310 make_eh_edge (edge_cache, bb, insn);
1312 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1314 /* ??? This could be made smarter: in some cases it's possible
1315 to tell that certain calls will not do a nonlocal goto.
1317 For example, if the nested functions that do the nonlocal
1318 gotos do not have their addresses taken, then only calls to
1319 those functions or to other nested functions that use them
1320 could possibly do nonlocal gotos. */
1321 /* We do know that a REG_EH_REGION note with a value less
1322 than 0 is guaranteed not to perform a non-local goto. */
1323 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1324 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1325 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1326 make_label_edge (edge_cache, bb, XEXP (x, 0),
1327 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1331 /* Find out if we can drop through to the next block. */
1332 insn = next_nonnote_insn (insn);
1333 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1334 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1335 else if (i + 1 < n_basic_blocks)
1337 rtx tmp = BLOCK_HEAD (i + 1);
1338 if (GET_CODE (tmp) == NOTE)
1339 tmp = next_nonnote_insn (tmp);
1340 if (force_fallthru || insn == tmp)
1341 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1345 if (edge_cache)
1346 sbitmap_vector_free (edge_cache);
1349 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1350 about the edge that is accumulated between calls. */
1352 void
1353 make_edge (edge_cache, src, dst, flags)
1354 sbitmap *edge_cache;
1355 basic_block src, dst;
1356 int flags;
1358 int use_edge_cache;
1359 edge e;
1361 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1362 many edges to them, and we didn't allocate memory for it. */
1363 use_edge_cache = (edge_cache
1364 && src != ENTRY_BLOCK_PTR
1365 && dst != EXIT_BLOCK_PTR);
1367 /* Make sure we don't add duplicate edges. */
1368 switch (use_edge_cache)
1370 default:
1371 /* Quick test for non-existance of the edge. */
1372 if (! TEST_BIT (edge_cache[src->index], dst->index))
1373 break;
1375 /* The edge exists; early exit if no work to do. */
1376 if (flags == 0)
1377 return;
1379 /* FALLTHRU */
1380 case 0:
1381 for (e = src->succ; e; e = e->succ_next)
1382 if (e->dest == dst)
1384 e->flags |= flags;
1385 return;
1387 break;
1390 e = (edge) xcalloc (1, sizeof (*e));
1391 n_edges++;
1393 e->succ_next = src->succ;
1394 e->pred_next = dst->pred;
1395 e->src = src;
1396 e->dest = dst;
1397 e->flags = flags;
1399 src->succ = e;
1400 dst->pred = e;
1402 if (use_edge_cache)
1403 SET_BIT (edge_cache[src->index], dst->index);
1406 /* Create an edge from a basic block to a label. */
1408 static void
1409 make_label_edge (edge_cache, src, label, flags)
1410 sbitmap *edge_cache;
1411 basic_block src;
1412 rtx label;
1413 int flags;
1415 if (GET_CODE (label) != CODE_LABEL)
1416 abort ();
1418 /* If the label was never emitted, this insn is junk, but avoid a
1419 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1420 as a result of a syntax error and a diagnostic has already been
1421 printed. */
1423 if (INSN_UID (label) == 0)
1424 return;
1426 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1429 /* Create the edges generated by INSN in REGION. */
1431 static void
1432 make_eh_edge (edge_cache, src, insn)
1433 sbitmap *edge_cache;
1434 basic_block src;
1435 rtx insn;
1437 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1438 rtx handlers, i;
1440 handlers = reachable_handlers (insn);
1442 for (i = handlers; i; i = XEXP (i, 1))
1443 make_label_edge (edge_cache, src, XEXP (i, 0),
1444 EDGE_ABNORMAL | EDGE_EH | is_call);
1446 free_INSN_LIST_list (&handlers);
1449 /* Identify critical edges and set the bits appropriately. */
1451 void
1452 mark_critical_edges ()
1454 int i, n = n_basic_blocks;
1455 basic_block bb;
1457 /* We begin with the entry block. This is not terribly important now,
1458 but could be if a front end (Fortran) implemented alternate entry
1459 points. */
1460 bb = ENTRY_BLOCK_PTR;
1461 i = -1;
1463 while (1)
1465 edge e;
1467 /* (1) Critical edges must have a source with multiple successors. */
1468 if (bb->succ && bb->succ->succ_next)
1470 for (e = bb->succ; e; e = e->succ_next)
1472 /* (2) Critical edges must have a destination with multiple
1473 predecessors. Note that we know there is at least one
1474 predecessor -- the edge we followed to get here. */
1475 if (e->dest->pred->pred_next)
1476 e->flags |= EDGE_CRITICAL;
1477 else
1478 e->flags &= ~EDGE_CRITICAL;
1481 else
1483 for (e = bb->succ; e; e = e->succ_next)
1484 e->flags &= ~EDGE_CRITICAL;
1487 if (++i >= n)
1488 break;
1489 bb = BASIC_BLOCK (i);
1493 /* Split a block BB after insn INSN creating a new fallthru edge.
1494 Return the new edge. Note that to keep other parts of the compiler happy,
1495 this function renumbers all the basic blocks so that the new
1496 one has a number one greater than the block split. */
1498 edge
1499 split_block (bb, insn)
1500 basic_block bb;
1501 rtx insn;
1503 basic_block new_bb;
1504 edge new_edge;
1505 edge e;
1506 rtx bb_note;
1507 int i, j;
1509 /* There is no point splitting the block after its end. */
1510 if (bb->end == insn)
1511 return 0;
1513 /* Create the new structures. */
1514 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1515 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1516 n_edges++;
1518 memset (new_bb, 0, sizeof (*new_bb));
1520 new_bb->head = NEXT_INSN (insn);
1521 new_bb->end = bb->end;
1522 bb->end = insn;
1524 new_bb->succ = bb->succ;
1525 bb->succ = new_edge;
1526 new_bb->pred = new_edge;
1527 new_bb->count = bb->count;
1528 new_bb->frequency = bb->frequency;
1529 new_bb->loop_depth = bb->loop_depth;
1531 new_edge->src = bb;
1532 new_edge->dest = new_bb;
1533 new_edge->flags = EDGE_FALLTHRU;
1534 new_edge->probability = REG_BR_PROB_BASE;
1535 new_edge->count = bb->count;
1537 /* Redirect the src of the successor edges of bb to point to new_bb. */
1538 for (e = new_bb->succ; e; e = e->succ_next)
1539 e->src = new_bb;
1541 /* Place the new block just after the block being split. */
1542 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1544 /* Some parts of the compiler expect blocks to be number in
1545 sequential order so insert the new block immediately after the
1546 block being split.. */
1547 j = bb->index;
1548 for (i = n_basic_blocks - 1; i > j + 1; --i)
1550 basic_block tmp = BASIC_BLOCK (i - 1);
1551 BASIC_BLOCK (i) = tmp;
1552 tmp->index = i;
1555 BASIC_BLOCK (i) = new_bb;
1556 new_bb->index = i;
1558 if (GET_CODE (new_bb->head) == CODE_LABEL)
1560 /* Create the basic block note. */
1561 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1562 new_bb->head);
1563 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1565 else
1567 /* Create the basic block note. */
1568 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1569 new_bb->head);
1570 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1571 new_bb->head = bb_note;
1574 update_bb_for_insn (new_bb);
1576 if (bb->global_live_at_start)
1578 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1579 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1580 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1582 /* We now have to calculate which registers are live at the end
1583 of the split basic block and at the start of the new basic
1584 block. Start with those registers that are known to be live
1585 at the end of the original basic block and get
1586 propagate_block to determine which registers are live. */
1587 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1588 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1589 COPY_REG_SET (bb->global_live_at_end,
1590 new_bb->global_live_at_start);
1593 return new_edge;
1596 /* Return label in the head of basic block. Create one if it doesn't exist. */
1597 static rtx
1598 block_label (block)
1599 basic_block block;
1601 if (GET_CODE (block->head) != CODE_LABEL)
1602 block->head = emit_label_before (gen_label_rtx (), block->head);
1603 return block->head;
1606 /* Return true if the block has no effect and only forwards control flow to
1607 its single destination. */
1608 static bool
1609 forwarder_block_p (bb)
1610 basic_block bb;
1612 rtx insn = bb->head;
1613 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1614 || !bb->succ || bb->succ->succ_next)
1615 return false;
1617 while (insn != bb->end)
1619 if (active_insn_p (insn))
1620 return false;
1621 insn = NEXT_INSN (insn);
1623 return (!active_insn_p (insn)
1624 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1627 /* Return nonzero if we can reach target from src by falling trought. */
1628 static bool
1629 can_fallthru (src, target)
1630 basic_block src, target;
1632 rtx insn = src->end;
1633 rtx insn2 = target->head;
1635 if (src->index + 1 == target->index && !active_insn_p (insn2))
1636 insn2 = next_active_insn (insn2);
1637 /* ??? Later we may add code to move jump tables offline. */
1638 return next_active_insn (insn) == insn2;
1641 /* Attempt to perform edge redirection by replacing possibly complex jump
1642 instruction by unconditional jump or removing jump completely.
1643 This can apply only if all edges now point to the same block.
1645 The parameters and return values are equivalent to redirect_edge_and_branch.
1647 static bool
1648 try_redirect_by_replacing_jump (e, target)
1649 edge e;
1650 basic_block target;
1652 basic_block src = e->src;
1653 rtx insn = src->end;
1654 edge tmp;
1655 rtx set;
1656 int fallthru = 0;
1658 /* Verify that all targets will be TARGET. */
1659 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1660 if (tmp->dest != target && tmp != e)
1661 break;
1662 if (tmp || !onlyjump_p (insn))
1663 return false;
1665 /* Avoid removing branch with side effects. */
1666 set = single_set (insn);
1667 if (!set || side_effects_p (set))
1668 return false;
1670 /* See if we can create the fallthru edge. */
1671 if (can_fallthru (src, target))
1673 src->end = PREV_INSN (insn);
1674 if (rtl_dump_file)
1675 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1676 flow_delete_insn (insn);
1677 fallthru = 1;
1679 /* Selectivly unlink whole insn chain. */
1680 if (src->end != PREV_INSN (target->head))
1681 flow_delete_insn_chain (NEXT_INSN (src->end),
1682 PREV_INSN (target->head));
1684 /* If this already is simplejump, redirect it. */
1685 else if (simplejump_p (insn))
1687 if (e->dest == target)
1688 return false;
1689 if (rtl_dump_file)
1690 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1691 INSN_UID (insn), e->dest->index, target->index);
1692 redirect_jump (insn, block_label (target), 0);
1694 /* Or replace possibly complicated jump insn by simple jump insn. */
1695 else
1697 rtx target_label = block_label (target);
1698 rtx barrier;
1700 src->end = PREV_INSN (insn);
1701 src->end = emit_jump_insn_after (gen_jump (target_label), src->end);
1702 JUMP_LABEL (src->end) = target_label;
1703 LABEL_NUSES (target_label)++;
1704 if (basic_block_for_insn)
1705 set_block_for_new_insns (src->end, src);
1706 if (rtl_dump_file)
1707 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1708 INSN_UID (insn), INSN_UID (src->end));
1709 flow_delete_insn (insn);
1710 barrier = next_nonnote_insn (src->end);
1711 if (!barrier || GET_CODE (barrier) != BARRIER)
1712 emit_barrier_after (src->end);
1715 /* Keep only one edge out and set proper flags. */
1716 while (src->succ->succ_next)
1717 remove_edge (src->succ);
1718 e = src->succ;
1719 if (fallthru)
1720 e->flags = EDGE_FALLTHRU;
1721 else
1722 e->flags = 0;
1723 e->probability = REG_BR_PROB_BASE;
1724 e->count = src->count;
1726 /* In case we've zapped an conditional jump, we need to kill the cc0
1727 setter too if available. */
1728 #ifdef HAVE_cc0
1729 insn = src->end;
1730 if (GET_CODE (insn) == JUMP_INSN)
1731 insn = prev_nonnote_insn (insn);
1732 if (sets_cc0_p (insn))
1734 if (insn == src->end)
1735 src->end = PREV_INSN (insn);
1736 flow_delete_insn (insn);
1738 #endif
1740 /* We don't want a block to end on a line-number note since that has
1741 the potential of changing the code between -g and not -g. */
1742 while (GET_CODE (e->src->end) == NOTE
1743 && NOTE_LINE_NUMBER (e->src->end) >= 0)
1745 rtx prev = PREV_INSN (e->src->end);
1746 flow_delete_insn (e->src->end);
1747 e->src->end = prev;
1750 if (e->dest != target)
1751 redirect_edge_succ (e, target);
1752 return true;
1755 /* Attempt to change code to redirect edge E to TARGET.
1756 Don't do that on expense of adding new instructions or reordering
1757 basic blocks.
1759 Function can be also called with edge destionation equivalent to the
1760 TARGET. Then it should try the simplifications and do nothing if
1761 none is possible.
1763 Return true if transformation suceeded. We still return flase in case
1764 E already destinated TARGET and we didn't managed to simplify instruction
1765 stream. */
1766 static bool
1767 redirect_edge_and_branch (e, target)
1768 edge e;
1769 basic_block target;
1771 rtx tmp;
1772 rtx old_label = e->dest->head;
1773 basic_block src = e->src;
1774 rtx insn = src->end;
1776 if (try_redirect_by_replacing_jump (e, target))
1777 return true;
1778 /* Do this fast path late, as we want above code to simplify for cases
1779 where called on single edge leaving basic block containing nontrivial
1780 jump insn. */
1781 else if (e->dest == target)
1782 return false;
1784 /* We can only redirect non-fallthru edges of jump insn. */
1785 if (e->flags & EDGE_FALLTHRU)
1786 return false;
1787 if (GET_CODE (insn) != JUMP_INSN)
1788 return false;
1790 /* Recognize a tablejump and adjust all matching cases. */
1791 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1792 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1793 && GET_CODE (tmp) == JUMP_INSN
1794 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1795 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1797 rtvec vec;
1798 int j;
1799 rtx new_label = block_label (target);
1801 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1802 vec = XVEC (PATTERN (tmp), 0);
1803 else
1804 vec = XVEC (PATTERN (tmp), 1);
1806 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1807 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1809 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1810 --LABEL_NUSES (old_label);
1811 ++LABEL_NUSES (new_label);
1814 /* Handle casesi dispatch insns */
1815 if ((tmp = single_set (insn)) != NULL
1816 && SET_DEST (tmp) == pc_rtx
1817 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1818 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1819 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1821 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1822 new_label);
1823 --LABEL_NUSES (old_label);
1824 ++LABEL_NUSES (new_label);
1827 else
1829 /* ?? We may play the games with moving the named labels from
1830 one basic block to the other in case only one computed_jump is
1831 available. */
1832 if (computed_jump_p (insn))
1833 return false;
1835 /* A return instruction can't be redirected. */
1836 if (returnjump_p (insn))
1837 return false;
1839 /* If the insn doesn't go where we think, we're confused. */
1840 if (JUMP_LABEL (insn) != old_label)
1841 abort ();
1842 redirect_jump (insn, block_label (target), 0);
1845 if (rtl_dump_file)
1846 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
1847 e->src->index, e->dest->index, target->index);
1848 if (e->dest != target)
1850 edge s;
1851 /* Check whether the edge is already present. */
1852 for (s = src->succ; s; s=s->succ_next)
1853 if (s->dest == target)
1854 break;
1855 if (s)
1857 s->flags |= e->flags;
1858 s->probability += e->probability;
1859 s->count += e->count;
1860 remove_edge (e);
1862 else
1863 redirect_edge_succ (e, target);
1865 return true;
1868 /* Redirect edge even at the expense of creating new jump insn or
1869 basic block. Return new basic block if created, NULL otherwise.
1870 Abort if converison is impossible. */
1871 static basic_block
1872 redirect_edge_and_branch_force (e, target)
1873 edge e;
1874 basic_block target;
1876 basic_block new_bb;
1877 edge new_edge;
1878 rtx label;
1879 rtx bb_note;
1880 int i, j;
1882 if (redirect_edge_and_branch (e, target))
1883 return NULL;
1884 if (e->dest == target)
1885 return NULL;
1886 if (e->flags & EDGE_ABNORMAL)
1887 abort ();
1888 if (!(e->flags & EDGE_FALLTHRU))
1889 abort ();
1891 e->flags &= ~EDGE_FALLTHRU;
1892 label = block_label (target);
1893 /* Case of the fallthru block. */
1894 if (!e->src->succ->succ_next)
1896 e->src->end = emit_jump_insn_after (gen_jump (label), e->src->end);
1897 JUMP_LABEL (e->src->end) = label;
1898 LABEL_NUSES (label)++;
1899 if (basic_block_for_insn)
1900 set_block_for_insn (e->src->end, e->src);
1901 emit_barrier_after (e->src->end);
1902 if (rtl_dump_file)
1903 fprintf (rtl_dump_file,
1904 "Emitting jump insn %i to redirect edge %i->%i to %i\n",
1905 INSN_UID (e->src->end), e->src->index, e->dest->index,
1906 target->index);
1907 redirect_edge_succ (e, target);
1908 return NULL;
1910 /* Redirecting fallthru edge of the conditional needs extra work. */
1912 if (rtl_dump_file)
1913 fprintf (rtl_dump_file,
1914 "Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
1915 INSN_UID (e->src->end), e->src->index, e->dest->index,
1916 target->index);
1918 /* Create the new structures. */
1919 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1920 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1921 n_edges++;
1923 memset (new_bb, 0, sizeof (*new_bb));
1925 new_bb->end = new_bb->head = e->src->end;
1926 new_bb->succ = NULL;
1927 new_bb->pred = new_edge;
1928 new_bb->count = e->count;
1929 new_bb->frequency = e->probability * e->src->frequency / REG_BR_PROB_BASE;
1930 new_bb->loop_depth = e->dest->loop_depth;
1932 new_edge->flags = EDGE_FALLTHRU;
1933 new_edge->probability = e->probability;
1934 new_edge->count = e->count;
1936 /* Wire edge in. */
1937 new_edge->src = e->src;
1938 new_edge->dest = new_bb;
1939 new_edge->succ_next = e->src->succ;
1940 e->src->succ = new_edge;
1941 new_edge->pred_next = NULL;
1943 /* Redirect old edge. */
1944 redirect_edge_succ (e, target);
1945 redirect_edge_pred (e, new_bb);
1946 e->probability = REG_BR_PROB_BASE;
1948 /* Place the new block just after the block being split. */
1949 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1951 /* Some parts of the compiler expect blocks to be number in
1952 sequential order so insert the new block immediately after the
1953 block being split.. */
1954 j = new_edge->src->index;
1955 for (i = n_basic_blocks - 1; i > j + 1; --i)
1957 basic_block tmp = BASIC_BLOCK (i - 1);
1958 BASIC_BLOCK (i) = tmp;
1959 tmp->index = i;
1962 BASIC_BLOCK (i) = new_bb;
1963 new_bb->index = i;
1965 /* Create the basic block note. */
1966 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
1967 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1968 new_bb->head = bb_note;
1970 new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
1971 JUMP_LABEL (new_bb->end) = label;
1972 LABEL_NUSES (label)++;
1973 if (basic_block_for_insn)
1974 set_block_for_insn (new_bb->end, new_bb);
1975 emit_barrier_after (new_bb->end);
1976 return new_bb;
1979 /* Split a (typically critical) edge. Return the new block.
1980 Abort on abnormal edges.
1982 ??? The code generally expects to be called on critical edges.
1983 The case of a block ending in an unconditional jump to a
1984 block with multiple predecessors is not handled optimally. */
1986 basic_block
1987 split_edge (edge_in)
1988 edge edge_in;
1990 basic_block old_pred, bb, old_succ;
1991 edge edge_out;
1992 rtx bb_note;
1993 int i, j;
1995 /* Abnormal edges cannot be split. */
1996 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1997 abort ();
1999 old_pred = edge_in->src;
2000 old_succ = edge_in->dest;
2002 /* Create the new structures. */
2003 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
2004 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
2005 n_edges++;
2007 memset (bb, 0, sizeof (*bb));
2009 /* ??? This info is likely going to be out of date very soon. */
2010 if (old_succ->global_live_at_start)
2012 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2013 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2014 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
2015 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
2018 /* Wire them up. */
2019 bb->succ = edge_out;
2020 bb->count = edge_in->count;
2021 bb->frequency = (edge_in->probability * edge_in->src->frequency
2022 / REG_BR_PROB_BASE);
2024 edge_in->flags &= ~EDGE_CRITICAL;
2026 edge_out->pred_next = old_succ->pred;
2027 edge_out->succ_next = NULL;
2028 edge_out->src = bb;
2029 edge_out->dest = old_succ;
2030 edge_out->flags = EDGE_FALLTHRU;
2031 edge_out->probability = REG_BR_PROB_BASE;
2032 edge_out->count = edge_in->count;
2034 old_succ->pred = edge_out;
2036 /* Tricky case -- if there existed a fallthru into the successor
2037 (and we're not it) we must add a new unconditional jump around
2038 the new block we're actually interested in.
2040 Further, if that edge is critical, this means a second new basic
2041 block must be created to hold it. In order to simplify correct
2042 insn placement, do this before we touch the existing basic block
2043 ordering for the block we were really wanting. */
2044 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2046 edge e;
2047 for (e = edge_out->pred_next; e; e = e->pred_next)
2048 if (e->flags & EDGE_FALLTHRU)
2049 break;
2051 if (e)
2053 basic_block jump_block;
2054 rtx pos;
2056 if ((e->flags & EDGE_CRITICAL) == 0
2057 && e->src != ENTRY_BLOCK_PTR)
2059 /* Non critical -- we can simply add a jump to the end
2060 of the existing predecessor. */
2061 jump_block = e->src;
2063 else
2065 /* We need a new block to hold the jump. The simplest
2066 way to do the bulk of the work here is to recursively
2067 call ourselves. */
2068 jump_block = split_edge (e);
2069 e = jump_block->succ;
2072 /* Now add the jump insn ... */
2073 pos = emit_jump_insn_after (gen_jump (old_succ->head),
2074 jump_block->end);
2075 jump_block->end = pos;
2076 if (basic_block_for_insn)
2077 set_block_for_insn (pos, jump_block);
2078 emit_barrier_after (pos);
2080 /* ... let jump know that label is in use, ... */
2081 JUMP_LABEL (pos) = old_succ->head;
2082 ++LABEL_NUSES (old_succ->head);
2084 /* ... and clear fallthru on the outgoing edge. */
2085 e->flags &= ~EDGE_FALLTHRU;
2087 /* Continue splitting the interesting edge. */
2091 /* Place the new block just in front of the successor. */
2092 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2093 if (old_succ == EXIT_BLOCK_PTR)
2094 j = n_basic_blocks - 1;
2095 else
2096 j = old_succ->index;
2097 for (i = n_basic_blocks - 1; i > j; --i)
2099 basic_block tmp = BASIC_BLOCK (i - 1);
2100 BASIC_BLOCK (i) = tmp;
2101 tmp->index = i;
2103 BASIC_BLOCK (i) = bb;
2104 bb->index = i;
2106 /* Create the basic block note.
2108 Where we place the note can have a noticable impact on the generated
2109 code. Consider this cfg:
2115 +->1-->2--->E
2117 +--+
2119 If we need to insert an insn on the edge from block 0 to block 1,
2120 we want to ensure the instructions we insert are outside of any
2121 loop notes that physically sit between block 0 and block 1. Otherwise
2122 we confuse the loop optimizer into thinking the loop is a phony. */
2123 if (old_succ != EXIT_BLOCK_PTR
2124 && PREV_INSN (old_succ->head)
2125 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
2126 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
2127 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
2128 PREV_INSN (old_succ->head));
2129 else if (old_succ != EXIT_BLOCK_PTR)
2130 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
2131 else
2132 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
2133 NOTE_BASIC_BLOCK (bb_note) = bb;
2134 bb->head = bb->end = bb_note;
2136 /* For non-fallthry edges, we must adjust the predecessor's
2137 jump instruction to target our new block. */
2138 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2140 if (!redirect_edge_and_branch (edge_in, bb))
2141 abort ();
2143 else
2144 redirect_edge_succ (edge_in, bb);
2146 return bb;
2149 /* Queue instructions for insertion on an edge between two basic blocks.
2150 The new instructions and basic blocks (if any) will not appear in the
2151 CFG until commit_edge_insertions is called. */
2153 void
2154 insert_insn_on_edge (pattern, e)
2155 rtx pattern;
2156 edge e;
2158 /* We cannot insert instructions on an abnormal critical edge.
2159 It will be easier to find the culprit if we die now. */
2160 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2161 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2162 abort ();
2164 if (e->insns == NULL_RTX)
2165 start_sequence ();
2166 else
2167 push_to_sequence (e->insns);
2169 emit_insn (pattern);
2171 e->insns = get_insns ();
2172 end_sequence ();
2175 /* Update the CFG for the instructions queued on edge E. */
2177 static void
2178 commit_one_edge_insertion (e)
2179 edge e;
2181 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2182 basic_block bb;
2184 /* Pull the insns off the edge now since the edge might go away. */
2185 insns = e->insns;
2186 e->insns = NULL_RTX;
2188 /* Figure out where to put these things. If the destination has
2189 one predecessor, insert there. Except for the exit block. */
2190 if (e->dest->pred->pred_next == NULL
2191 && e->dest != EXIT_BLOCK_PTR)
2193 bb = e->dest;
2195 /* Get the location correct wrt a code label, and "nice" wrt
2196 a basic block note, and before everything else. */
2197 tmp = bb->head;
2198 if (GET_CODE (tmp) == CODE_LABEL)
2199 tmp = NEXT_INSN (tmp);
2200 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2201 tmp = NEXT_INSN (tmp);
2202 if (tmp == bb->head)
2203 before = tmp;
2204 else
2205 after = PREV_INSN (tmp);
2208 /* If the source has one successor and the edge is not abnormal,
2209 insert there. Except for the entry block. */
2210 else if ((e->flags & EDGE_ABNORMAL) == 0
2211 && e->src->succ->succ_next == NULL
2212 && e->src != ENTRY_BLOCK_PTR)
2214 bb = e->src;
2215 /* It is possible to have a non-simple jump here. Consider a target
2216 where some forms of unconditional jumps clobber a register. This
2217 happens on the fr30 for example.
2219 We know this block has a single successor, so we can just emit
2220 the queued insns before the jump. */
2221 if (GET_CODE (bb->end) == JUMP_INSN)
2223 before = bb->end;
2225 else
2227 /* We'd better be fallthru, or we've lost track of what's what. */
2228 if ((e->flags & EDGE_FALLTHRU) == 0)
2229 abort ();
2231 after = bb->end;
2235 /* Otherwise we must split the edge. */
2236 else
2238 bb = split_edge (e);
2239 after = bb->end;
2242 /* Now that we've found the spot, do the insertion. */
2244 /* Set the new block number for these insns, if structure is allocated. */
2245 if (basic_block_for_insn)
2247 rtx i;
2248 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2249 set_block_for_insn (i, bb);
2252 if (before)
2254 emit_insns_before (insns, before);
2255 if (before == bb->head)
2256 bb->head = insns;
2258 last = prev_nonnote_insn (before);
2260 else
2262 last = emit_insns_after (insns, after);
2263 if (after == bb->end)
2264 bb->end = last;
2267 if (returnjump_p (last))
2269 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2270 This is not currently a problem because this only happens
2271 for the (single) epilogue, which already has a fallthru edge
2272 to EXIT. */
2274 e = bb->succ;
2275 if (e->dest != EXIT_BLOCK_PTR
2276 || e->succ_next != NULL
2277 || (e->flags & EDGE_FALLTHRU) == 0)
2278 abort ();
2279 e->flags &= ~EDGE_FALLTHRU;
2281 emit_barrier_after (last);
2282 bb->end = last;
2284 if (before)
2285 flow_delete_insn (before);
2287 else if (GET_CODE (last) == JUMP_INSN)
2288 abort ();
2289 find_sub_basic_blocks (bb);
2292 /* Update the CFG for all queued instructions. */
2294 void
2295 commit_edge_insertions ()
2297 int i;
2298 basic_block bb;
2300 #ifdef ENABLE_CHECKING
2301 verify_flow_info ();
2302 #endif
2304 i = -1;
2305 bb = ENTRY_BLOCK_PTR;
2306 while (1)
2308 edge e, next;
2310 for (e = bb->succ; e; e = next)
2312 next = e->succ_next;
2313 if (e->insns)
2314 commit_one_edge_insertion (e);
2317 if (++i >= n_basic_blocks)
2318 break;
2319 bb = BASIC_BLOCK (i);
2323 /* Add fake edges to the function exit for any non constant calls in
2324 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2325 BLOCKS is zero. Return the nuber of blocks that were split. */
2328 flow_call_edges_add (blocks)
2329 sbitmap blocks;
2331 int i;
2332 int blocks_split = 0;
2333 int bb_num = 0;
2334 basic_block *bbs;
2336 /* Map bb indicies into basic block pointers since split_block
2337 will renumber the basic blocks. */
2339 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2341 if (! blocks)
2343 for (i = 0; i < n_basic_blocks; i++)
2344 bbs[bb_num++] = BASIC_BLOCK (i);
2346 else
2348 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2350 bbs[bb_num++] = BASIC_BLOCK (i);
2355 /* Now add fake edges to the function exit for any non constant
2356 calls since there is no way that we can determine if they will
2357 return or not... */
2359 for (i = 0; i < bb_num; i++)
2361 basic_block bb = bbs[i];
2362 rtx insn;
2363 rtx prev_insn;
2365 for (insn = bb->end; ; insn = prev_insn)
2367 prev_insn = PREV_INSN (insn);
2368 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
2370 edge e;
2372 /* Note that the following may create a new basic block
2373 and renumber the existing basic blocks. */
2374 e = split_block (bb, insn);
2375 if (e)
2376 blocks_split++;
2378 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2380 if (insn == bb->head)
2381 break;
2385 if (blocks_split)
2386 verify_flow_info ();
2388 free (bbs);
2389 return blocks_split;
2392 /* Find unreachable blocks. An unreachable block will have NULL in
2393 block->aux, a non-NULL value indicates the block is reachable. */
2395 void
2396 find_unreachable_blocks ()
2398 edge e;
2399 int i, n;
2400 basic_block *tos, *worklist;
2402 n = n_basic_blocks;
2403 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2405 /* Use basic_block->aux as a marker. Clear them all. */
2407 for (i = 0; i < n; ++i)
2408 BASIC_BLOCK (i)->aux = NULL;
2410 /* Add our starting points to the worklist. Almost always there will
2411 be only one. It isn't inconcievable that we might one day directly
2412 support Fortran alternate entry points. */
2414 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2416 *tos++ = e->dest;
2418 /* Mark the block with a handy non-null value. */
2419 e->dest->aux = e;
2422 /* Iterate: find everything reachable from what we've already seen. */
2424 while (tos != worklist)
2426 basic_block b = *--tos;
2428 for (e = b->succ; e; e = e->succ_next)
2429 if (!e->dest->aux)
2431 *tos++ = e->dest;
2432 e->dest->aux = e;
2436 free (worklist);
2439 /* Delete all unreachable basic blocks. */
2440 static void
2441 delete_unreachable_blocks ()
2443 int i;
2445 find_unreachable_blocks ();
2447 /* Delete all unreachable basic blocks. Count down so that we
2448 don't interfere with the block renumbering that happens in
2449 flow_delete_block. */
2451 for (i = n_basic_blocks - 1; i >= 0; --i)
2453 basic_block b = BASIC_BLOCK (i);
2455 if (b->aux != NULL)
2456 /* This block was found. Tidy up the mark. */
2457 b->aux = NULL;
2458 else
2459 flow_delete_block (b);
2462 tidy_fallthru_edges ();
2465 /* Return true if NOTE is not one of the ones that must be kept paired,
2466 so that we may simply delete them. */
2468 static int
2469 can_delete_note_p (note)
2470 rtx note;
2472 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2473 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2476 /* Unlink a chain of insns between START and FINISH, leaving notes
2477 that must be paired. */
2479 void
2480 flow_delete_insn_chain (start, finish)
2481 rtx start, finish;
2483 /* Unchain the insns one by one. It would be quicker to delete all
2484 of these with a single unchaining, rather than one at a time, but
2485 we need to keep the NOTE's. */
2487 rtx next;
2489 while (1)
2491 next = NEXT_INSN (start);
2492 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2494 else if (GET_CODE (start) == CODE_LABEL
2495 && ! can_delete_label_p (start))
2497 const char *name = LABEL_NAME (start);
2498 PUT_CODE (start, NOTE);
2499 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2500 NOTE_SOURCE_FILE (start) = name;
2502 else
2503 next = flow_delete_insn (start);
2505 if (start == finish)
2506 break;
2507 start = next;
2511 /* Delete the insns in a (non-live) block. We physically delete every
2512 non-deleted-note insn, and update the flow graph appropriately.
2514 Return nonzero if we deleted an exception handler. */
2516 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2517 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2520 flow_delete_block (b)
2521 basic_block b;
2523 int deleted_handler = 0;
2524 rtx insn, end, tmp;
2526 /* If the head of this block is a CODE_LABEL, then it might be the
2527 label for an exception handler which can't be reached.
2529 We need to remove the label from the exception_handler_label list
2530 and remove the associated NOTE_INSN_EH_REGION_BEG and
2531 NOTE_INSN_EH_REGION_END notes. */
2533 insn = b->head;
2535 never_reached_warning (insn);
2537 if (GET_CODE (insn) == CODE_LABEL)
2538 maybe_remove_eh_handler (insn);
2540 /* Include any jump table following the basic block. */
2541 end = b->end;
2542 if (GET_CODE (end) == JUMP_INSN
2543 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2544 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2545 && GET_CODE (tmp) == JUMP_INSN
2546 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2547 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2548 end = tmp;
2550 /* Include any barrier that may follow the basic block. */
2551 tmp = next_nonnote_insn (end);
2552 if (tmp && GET_CODE (tmp) == BARRIER)
2553 end = tmp;
2555 /* Selectively delete the entire chain. */
2556 flow_delete_insn_chain (insn, end);
2558 /* Remove the edges into and out of this block. Note that there may
2559 indeed be edges in, if we are removing an unreachable loop. */
2561 edge e, next, *q;
2563 for (e = b->pred; e; e = next)
2565 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2566 continue;
2567 *q = e->succ_next;
2568 next = e->pred_next;
2569 n_edges--;
2570 free (e);
2572 for (e = b->succ; e; e = next)
2574 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2575 continue;
2576 *q = e->pred_next;
2577 next = e->succ_next;
2578 n_edges--;
2579 free (e);
2582 b->pred = NULL;
2583 b->succ = NULL;
2586 /* Remove the basic block from the array, and compact behind it. */
2587 expunge_block (b);
2589 return deleted_handler;
2592 /* Remove block B from the basic block array and compact behind it. */
2594 static void
2595 expunge_block (b)
2596 basic_block b;
2598 int i, n = n_basic_blocks;
2600 for (i = b->index; i + 1 < n; ++i)
2602 basic_block x = BASIC_BLOCK (i + 1);
2603 BASIC_BLOCK (i) = x;
2604 x->index = i;
2607 basic_block_info->num_elements--;
2608 n_basic_blocks--;
2611 /* Delete INSN by patching it out. Return the next insn. */
2614 flow_delete_insn (insn)
2615 rtx insn;
2617 rtx prev = PREV_INSN (insn);
2618 rtx next = NEXT_INSN (insn);
2619 rtx note;
2621 PREV_INSN (insn) = NULL_RTX;
2622 NEXT_INSN (insn) = NULL_RTX;
2623 INSN_DELETED_P (insn) = 1;
2625 if (prev)
2626 NEXT_INSN (prev) = next;
2627 if (next)
2628 PREV_INSN (next) = prev;
2629 else
2630 set_last_insn (prev);
2632 if (GET_CODE (insn) == CODE_LABEL)
2633 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2635 /* If deleting a jump, decrement the use count of the label. Deleting
2636 the label itself should happen in the normal course of block merging. */
2637 if (GET_CODE (insn) == JUMP_INSN
2638 && JUMP_LABEL (insn)
2639 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2640 LABEL_NUSES (JUMP_LABEL (insn))--;
2642 /* Also if deleting an insn that references a label. */
2643 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2644 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2645 LABEL_NUSES (XEXP (note, 0))--;
2647 if (GET_CODE (insn) == JUMP_INSN
2648 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2649 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2651 rtx pat = PATTERN (insn);
2652 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2653 int len = XVECLEN (pat, diff_vec_p);
2654 int i;
2656 for (i = 0; i < len; i++)
2657 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2660 return next;
2663 /* True if a given label can be deleted. */
2665 static int
2666 can_delete_label_p (label)
2667 rtx label;
2669 rtx x;
2671 if (LABEL_PRESERVE_P (label))
2672 return 0;
2674 for (x = forced_labels; x; x = XEXP (x, 1))
2675 if (label == XEXP (x, 0))
2676 return 0;
2677 for (x = label_value_list; x; x = XEXP (x, 1))
2678 if (label == XEXP (x, 0))
2679 return 0;
2680 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2681 if (label == XEXP (x, 0))
2682 return 0;
2684 /* User declared labels must be preserved. */
2685 if (LABEL_NAME (label) != 0)
2686 return 0;
2688 return 1;
2691 static int
2692 tail_recursion_label_p (label)
2693 rtx label;
2695 rtx x;
2697 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2698 if (label == XEXP (x, 0))
2699 return 1;
2701 return 0;
2704 /* Blocks A and B are to be merged into a single block A. The insns
2705 are already contiguous, hence `nomove'. */
2707 void
2708 merge_blocks_nomove (a, b)
2709 basic_block a, b;
2711 edge e;
2712 rtx b_head, b_end, a_end;
2713 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2714 int b_empty = 0;
2716 /* If there was a CODE_LABEL beginning B, delete it. */
2717 b_head = b->head;
2718 b_end = b->end;
2719 if (GET_CODE (b_head) == CODE_LABEL)
2721 /* Detect basic blocks with nothing but a label. This can happen
2722 in particular at the end of a function. */
2723 if (b_head == b_end)
2724 b_empty = 1;
2725 del_first = del_last = b_head;
2726 b_head = NEXT_INSN (b_head);
2729 /* Delete the basic block note. */
2730 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2732 if (b_head == b_end)
2733 b_empty = 1;
2734 if (! del_last)
2735 del_first = b_head;
2736 del_last = b_head;
2737 b_head = NEXT_INSN (b_head);
2740 /* If there was a jump out of A, delete it. */
2741 a_end = a->end;
2742 if (GET_CODE (a_end) == JUMP_INSN)
2744 rtx prev;
2746 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2747 if (GET_CODE (prev) != NOTE
2748 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2749 || prev == a->head)
2750 break;
2752 del_first = a_end;
2754 #ifdef HAVE_cc0
2755 /* If this was a conditional jump, we need to also delete
2756 the insn that set cc0. */
2757 if (prev && sets_cc0_p (prev))
2759 rtx tmp = prev;
2760 prev = prev_nonnote_insn (prev);
2761 if (!prev)
2762 prev = a->head;
2763 del_first = tmp;
2765 #endif
2767 a_end = prev;
2769 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2770 del_first = NEXT_INSN (a_end);
2772 /* Delete everything marked above as well as crap that might be
2773 hanging out between the two blocks. */
2774 flow_delete_insn_chain (del_first, del_last);
2776 /* Normally there should only be one successor of A and that is B, but
2777 partway though the merge of blocks for conditional_execution we'll
2778 be merging a TEST block with THEN and ELSE successors. Free the
2779 whole lot of them and hope the caller knows what they're doing. */
2780 while (a->succ)
2781 remove_edge (a->succ);
2783 /* Adjust the edges out of B for the new owner. */
2784 for (e = b->succ; e; e = e->succ_next)
2785 e->src = a;
2786 a->succ = b->succ;
2788 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2789 b->pred = b->succ = NULL;
2791 /* Reassociate the insns of B with A. */
2792 if (!b_empty)
2794 if (basic_block_for_insn)
2796 BLOCK_FOR_INSN (b_head) = a;
2797 while (b_head != b_end)
2799 b_head = NEXT_INSN (b_head);
2800 BLOCK_FOR_INSN (b_head) = a;
2803 a_end = b_end;
2805 a->end = a_end;
2807 expunge_block (b);
2810 /* Blocks A and B are to be merged into a single block. A has no incoming
2811 fallthru edge, so it can be moved before B without adding or modifying
2812 any jumps (aside from the jump from A to B). */
2814 static int
2815 merge_blocks_move_predecessor_nojumps (a, b)
2816 basic_block a, b;
2818 rtx start, end, barrier;
2819 int index;
2821 start = a->head;
2822 end = a->end;
2824 barrier = next_nonnote_insn (end);
2825 if (GET_CODE (barrier) != BARRIER)
2826 abort ();
2827 flow_delete_insn (barrier);
2829 /* Move block and loop notes out of the chain so that we do not
2830 disturb their order.
2832 ??? A better solution would be to squeeze out all the non-nested notes
2833 and adjust the block trees appropriately. Even better would be to have
2834 a tighter connection between block trees and rtl so that this is not
2835 necessary. */
2836 start = squeeze_notes (start, end);
2838 /* Scramble the insn chain. */
2839 if (end != PREV_INSN (b->head))
2840 reorder_insns (start, end, PREV_INSN (b->head));
2842 if (rtl_dump_file)
2844 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2845 a->index, b->index);
2848 /* Swap the records for the two blocks around. Although we are deleting B,
2849 A is now where B was and we want to compact the BB array from where
2850 A used to be. */
2851 BASIC_BLOCK (a->index) = b;
2852 BASIC_BLOCK (b->index) = a;
2853 index = a->index;
2854 a->index = b->index;
2855 b->index = index;
2857 /* Now blocks A and B are contiguous. Merge them. */
2858 merge_blocks_nomove (a, b);
2860 return 1;
2863 /* Blocks A and B are to be merged into a single block. B has no outgoing
2864 fallthru edge, so it can be moved after A without adding or modifying
2865 any jumps (aside from the jump from A to B). */
2867 static int
2868 merge_blocks_move_successor_nojumps (a, b)
2869 basic_block a, b;
2871 rtx start, end, barrier;
2873 start = b->head;
2874 end = b->end;
2875 barrier = NEXT_INSN (end);
2877 /* Recognize a jump table following block B. */
2878 if (barrier
2879 && GET_CODE (barrier) == CODE_LABEL
2880 && NEXT_INSN (barrier)
2881 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2882 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2883 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2885 end = NEXT_INSN (barrier);
2886 barrier = NEXT_INSN (end);
2889 /* There had better have been a barrier there. Delete it. */
2890 if (barrier && GET_CODE (barrier) == BARRIER)
2891 flow_delete_insn (barrier);
2893 /* Move block and loop notes out of the chain so that we do not
2894 disturb their order.
2896 ??? A better solution would be to squeeze out all the non-nested notes
2897 and adjust the block trees appropriately. Even better would be to have
2898 a tighter connection between block trees and rtl so that this is not
2899 necessary. */
2900 start = squeeze_notes (start, end);
2902 /* Scramble the insn chain. */
2903 reorder_insns (start, end, a->end);
2905 /* Now blocks A and B are contiguous. Merge them. */
2906 merge_blocks_nomove (a, b);
2908 if (rtl_dump_file)
2910 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2911 b->index, a->index);
2914 return 1;
2917 /* Attempt to merge basic blocks that are potentially non-adjacent.
2918 Return true iff the attempt succeeded. */
2920 static int
2921 merge_blocks (e, b, c, mode)
2922 edge e;
2923 basic_block b, c;
2924 int mode;
2926 /* If C has a tail recursion label, do not merge. There is no
2927 edge recorded from the call_placeholder back to this label, as
2928 that would make optimize_sibling_and_tail_recursive_calls more
2929 complex for no gain. */
2930 if (GET_CODE (c->head) == CODE_LABEL
2931 && tail_recursion_label_p (c->head))
2932 return 0;
2934 /* If B has a fallthru edge to C, no need to move anything. */
2935 if (e->flags & EDGE_FALLTHRU)
2937 merge_blocks_nomove (b, c);
2939 if (rtl_dump_file)
2941 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2942 b->index, c->index);
2945 return 1;
2947 /* Otherwise we will need to move code around. Do that only if expensive
2948 transformations are allowed. */
2949 else if (mode & CLEANUP_EXPENSIVE)
2951 edge tmp_edge, c_fallthru_edge;
2952 int c_has_outgoing_fallthru;
2953 int b_has_incoming_fallthru;
2955 /* We must make sure to not munge nesting of exception regions,
2956 lexical blocks, and loop notes.
2958 The first is taken care of by requiring that the active eh
2959 region at the end of one block always matches the active eh
2960 region at the beginning of the next block.
2962 The later two are taken care of by squeezing out all the notes. */
2964 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2965 executed and we may want to treat blocks which have two out
2966 edges, one normal, one abnormal as only having one edge for
2967 block merging purposes. */
2969 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2970 if (tmp_edge->flags & EDGE_FALLTHRU)
2971 break;
2972 c_has_outgoing_fallthru = (tmp_edge != NULL);
2973 c_fallthru_edge = tmp_edge;
2975 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2976 if (tmp_edge->flags & EDGE_FALLTHRU)
2977 break;
2978 b_has_incoming_fallthru = (tmp_edge != NULL);
2980 /* If B does not have an incoming fallthru, then it can be moved
2981 immediately before C without introducing or modifying jumps.
2982 C cannot be the first block, so we do not have to worry about
2983 accessing a non-existent block. */
2984 if (! b_has_incoming_fallthru)
2985 return merge_blocks_move_predecessor_nojumps (b, c);
2987 /* Otherwise, we're going to try to move C after B. If C does
2988 not have an outgoing fallthru, then it can be moved
2989 immediately after B without introducing or modifying jumps. */
2990 if (! c_has_outgoing_fallthru)
2991 return merge_blocks_move_successor_nojumps (b, c);
2993 /* Otherwise, we'll need to insert an extra jump, and possibly
2994 a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
2995 as we don't have explicit return instructions before epilogues
2996 are generated, so give up on that case. */
2998 if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
2999 && merge_blocks_move_successor_nojumps (b, c))
3001 basic_block target = c_fallthru_edge->dest;
3002 rtx barrier;
3003 basic_block new;
3005 /* This is a dirty hack to avoid code duplication.
3007 Set edge to point to wrong basic block, so
3008 redirect_edge_and_branch_force will do the trick
3009 and rewire edge back to the original location. */
3010 redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
3011 new = redirect_edge_and_branch_force (c_fallthru_edge, target);
3013 /* We've just created barrier, but other barrier is already present
3014 in the stream. Avoid duplicate. */
3015 barrier = next_nonnote_insn (new ? new->end : b->end);
3016 if (GET_CODE (barrier) != BARRIER)
3017 abort ();
3018 flow_delete_insn (barrier);
3021 return 0;
3023 return 0;
3026 /* Simplify conditional jump around an jump.
3027 Return nonzero in case optimization matched. */
3029 static bool
3030 try_simplify_condjump (src)
3031 basic_block src;
3033 basic_block final_block, next_block;
3034 rtx insn = src->end;
3035 edge branch, fallthru;
3037 /* Verify that there are exactly two successors. */
3038 if (!src->succ || !src->succ->succ_next || src->succ->succ_next->succ_next
3039 || !any_condjump_p (insn))
3040 return false;
3042 fallthru = FALLTHRU_EDGE (src);
3044 /* Following block must be simple forwarder block with single
3045 entry and must not be last in the stream. */
3046 next_block = fallthru->dest;
3047 if (!forwarder_block_p (next_block)
3048 || next_block->pred->pred_next
3049 || next_block->index == n_basic_blocks - 1)
3050 return false;
3052 /* The branch must target to block afterwards. */
3053 final_block = BASIC_BLOCK (next_block->index + 1);
3055 branch = BRANCH_EDGE (src);
3057 if (branch->dest != final_block)
3058 return false;
3060 /* Avoid jump.c from being overactive on removin ureachable insns. */
3061 LABEL_NUSES (JUMP_LABEL (insn))++;
3062 if (!invert_jump (insn, block_label (next_block->succ->dest), 1))
3064 LABEL_NUSES (JUMP_LABEL (insn))--;
3065 return false;
3067 if (rtl_dump_file)
3068 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
3069 INSN_UID (insn), INSN_UID (next_block->end));
3071 redirect_edge_succ (branch, final_block);
3072 redirect_edge_succ (fallthru, next_block->succ->dest);
3074 branch->flags |= EDGE_FALLTHRU;
3075 fallthru->flags &= ~EDGE_FALLTHRU;
3077 flow_delete_block (next_block);
3078 return true;
3081 /* Attempt to forward edges leaving basic block B.
3082 Return nonzero if sucessfull. */
3084 static bool
3085 try_forward_edges (b)
3086 basic_block b;
3088 bool changed = 0;
3089 edge e;
3090 for (e = b->succ; e; e = e->succ_next)
3092 basic_block target = e->dest, first = e->dest;
3093 int counter = 0;
3095 /* Look for the real destination of jump.
3096 Avoid inifinite loop in the infinite empty loop by counting
3097 up to n_basic_blocks. */
3098 while (forwarder_block_p (target)
3099 && target->succ->dest != EXIT_BLOCK_PTR
3100 && counter < n_basic_blocks)
3102 /* Bypass trivial infinite loops. */
3103 if (target == target->succ->dest)
3104 counter = n_basic_blocks;
3105 target = target->succ->dest, counter++;
3108 if (target != first && counter < n_basic_blocks
3109 && redirect_edge_and_branch (e, target))
3111 while (first != target)
3113 first->count -= e->count;
3114 first->succ->count -= e->count;
3115 first->frequency -= ((e->probability * b->frequency
3116 + REG_BR_PROB_BASE / 2)
3117 / REG_BR_PROB_BASE);
3118 first = first->succ->dest;
3120 /* We've possibly removed the edge. */
3121 changed = 1;
3122 e = b->succ;
3124 else if (rtl_dump_file && counter == n_basic_blocks)
3125 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n", target->index);
3126 else if (rtl_dump_file && first != target)
3127 fprintf (rtl_dump_file,
3128 "Forwarding edge %i->%i to %i failed.\n", b->index,
3129 e->dest->index, target->index);
3131 return changed;
3134 /* Compare the instructions before end of B1 and B2
3135 to find an opportunity for cross jumping.
3136 (This means detecting identical sequences of insns)
3137 Find the longest possible equivalent sequences
3138 and store the first insns of those sequences into *F1 and *F2
3139 and return length of that sequence.
3141 To simplify callers of this function, in the
3142 all instructions were matched, allways store bb->head. */
3144 static int
3145 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
3146 int mode;
3147 basic_block bb1, bb2;
3148 rtx *f1, *f2;
3150 rtx i1 = onlyjump_p (bb1->end) ? PREV_INSN (bb1->end): bb1->end;
3151 rtx i2 = onlyjump_p (bb2->end) ? PREV_INSN (bb2->end): bb2->end;
3152 rtx p1, p2;
3153 int lose = 0;
3154 int ninsns = 0;
3155 rtx last1 = bb1->end, last2 = bb2->end;
3156 rtx afterlast1 = bb1->end, afterlast2 = bb2->end;
3158 /* In case basic block ends by nontrivial jump instruction, count it as
3159 an instruction. Do not count an unconditional jump, as it will be
3160 removed by basic_block reordering pass in case it is on the common
3161 path. */
3162 if (bb1->succ->succ_next && bb1->end != i1)
3163 ninsns++;
3165 for (;i1 != bb1->head; i1 = PREV_INSN (i1))
3167 /* Ignore notes. */
3168 if (GET_CODE (i1) == NOTE)
3169 continue;
3170 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
3171 i2 = PREV_INSN (i2);
3173 if (GET_CODE (i1) != GET_CODE (i2))
3174 break;
3176 p1 = PATTERN (i1);
3177 p2 = PATTERN (i2);
3179 /* If this is a CALL_INSN, compare register usage information.
3180 If we don't check this on stack register machines, the two
3181 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3182 numbers of stack registers in the same basic block.
3183 If we don't check this on machines with delay slots, a delay slot may
3184 be filled that clobbers a parameter expected by the subroutine.
3186 ??? We take the simple route for now and assume that if they're
3187 equal, they were constructed identically. */
3189 if (GET_CODE (i1) == CALL_INSN
3190 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3191 CALL_INSN_FUNCTION_USAGE (i2)))
3192 lose = 1;
3194 #ifdef STACK_REGS
3195 /* If cross_jump_death_matters is not 0, the insn's mode
3196 indicates whether or not the insn contains any stack-like
3197 regs. */
3199 if (!lose && (mode & CLEANUP_POST_REGSTACK ) && stack_regs_mentioned (i1))
3201 /* If register stack conversion has already been done, then
3202 death notes must also be compared before it is certain that
3203 the two instruction streams match. */
3205 rtx note;
3206 HARD_REG_SET i1_regset, i2_regset;
3208 CLEAR_HARD_REG_SET (i1_regset);
3209 CLEAR_HARD_REG_SET (i2_regset);
3211 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3212 if (REG_NOTE_KIND (note) == REG_DEAD
3213 && STACK_REG_P (XEXP (note, 0)))
3214 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3216 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3217 if (REG_NOTE_KIND (note) == REG_DEAD
3218 && STACK_REG_P (XEXP (note, 0)))
3219 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3221 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3223 lose = 1;
3225 done:
3228 #endif
3230 if (lose || GET_CODE (p1) != GET_CODE (p2)
3231 || ! rtx_renumbered_equal_p (p1, p2))
3233 /* The following code helps take care of G++ cleanups. */
3234 rtx equiv1;
3235 rtx equiv2;
3237 if (!lose && GET_CODE (p1) == GET_CODE (p2)
3238 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
3239 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
3240 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
3241 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
3242 /* If the equivalences are not to a constant, they may
3243 reference pseudos that no longer exist, so we can't
3244 use them. */
3245 && CONSTANT_P (XEXP (equiv1, 0))
3246 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3248 rtx s1 = single_set (i1);
3249 rtx s2 = single_set (i2);
3250 if (s1 != 0 && s2 != 0
3251 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3253 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3254 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3255 if (! rtx_renumbered_equal_p (p1, p2))
3256 cancel_changes (0);
3257 else if (apply_change_group ())
3258 goto win;
3262 /* Insns fail to match; cross jumping is limited to the following
3263 insns. */
3265 #ifdef HAVE_cc0
3266 /* Don't allow the insn after a compare to be shared by
3267 cross-jumping unless the compare is also shared.
3268 Here, if either of these non-matching insns is a compare,
3269 exclude the following insn from possible cross-jumping. */
3270 if (sets_cc0_p (p1) || sets_cc0_p (p2))
3271 last1 = afterlast1, last2 = afterlast2, ninsns--;
3272 #endif
3273 break;
3276 win:
3277 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3279 /* Ok, this insn is potentially includable in a cross-jump here. */
3280 afterlast1 = last1, afterlast2 = last2;
3281 last1 = i1, last2 = i2;
3282 ninsns++;
3285 if (i2 == bb2->end)
3286 break;
3287 i2 = PREV_INSN (i2);
3290 /* Skip the notes to reach potential head of basic block. */
3291 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
3292 last1 = PREV_INSN (last1);
3293 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
3294 last1 = PREV_INSN (last1);
3295 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
3296 last2 = PREV_INSN (last2);
3297 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
3298 last2 = PREV_INSN (last2);
3300 *f1 = last1;
3301 *f2 = last2;
3302 return ninsns;
3305 /* Return true iff outgoing edges of BB1 and BB2 match, together with
3306 the branch instruction. This means that if we commonize the control
3307 flow before end of the basic block, the semantic remains unchanged.
3309 Assume that at least one outgoing edge is forwarded to the same
3310 location. */
3311 static bool
3312 outgoing_edges_match (bb1, bb2)
3313 basic_block bb1;
3314 basic_block bb2;
3316 /* bb1 has one succesor, so we are seeing unconditional jump. */
3317 if (bb1->succ && !bb1->succ->succ_next)
3318 return (bb2->succ && !bb2->succ->succ_next);
3320 /* Match conditional jumps - this may get tricky when fallthru and branch
3321 edges are crossed. */
3322 if (bb1->succ && bb1->succ->succ_next && !bb1->succ->succ_next->succ_next
3323 && any_condjump_p (bb1->end))
3325 edge b1, f1, b2, f2;
3326 bool reverse, match;
3327 rtx set1, set2, cond1, cond2;
3328 enum rtx_code code1, code2;
3330 if (!bb2->succ || !bb2->succ->succ_next
3331 || bb1->succ->succ_next->succ_next || !any_condjump_p (bb2->end))
3332 return false;
3333 b1 = BRANCH_EDGE (bb1);
3334 b2 = BRANCH_EDGE (bb2);
3335 f1 = FALLTHRU_EDGE (bb1);
3336 f2 = FALLTHRU_EDGE (bb2);
3338 /* Get around possible forwarders on fallthru edges. Other cases
3339 should be optimized out already. */
3340 if (forwarder_block_p (f1->dest))
3341 f1 = f1->dest->succ;
3342 if (forwarder_block_p (f2->dest))
3343 f2 = f2->dest->succ;
3345 /* To simplify use of this function, return false if there are
3346 unneeded forwarder blocks. These will get eliminated later
3347 during cleanup_cfg. */
3348 if (forwarder_block_p (f1->dest)
3349 || forwarder_block_p (f2->dest)
3350 || forwarder_block_p (b1->dest)
3351 || forwarder_block_p (b2->dest))
3352 return false;
3354 if (f1->dest == f2->dest && b1->dest == b2->dest)
3355 reverse = false;
3356 else if (f1->dest == b2->dest && b1->dest == f2->dest)
3357 reverse = true;
3358 else
3359 return false;
3361 set1 = pc_set (bb1->end);
3362 set2 = pc_set (bb2->end);
3363 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
3364 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
3365 reverse = !reverse;
3367 cond1 = XEXP (SET_SRC (set1), 0);
3368 cond2 = XEXP (SET_SRC (set2), 0);
3369 code1 = GET_CODE (cond1);
3370 if (reverse)
3371 code2 = reversed_comparison_code (cond2, bb2->end);
3372 else
3373 code2 = GET_CODE (cond2);
3375 if (code2 == UNKNOWN)
3376 return false;
3378 /* See if we don have (cross) match in the codes and operands. */
3379 match = ((code1 == code2
3380 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
3381 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
3382 || (code1 == swap_condition (code2)
3383 && rtx_renumbered_equal_p (XEXP (cond1, 1),
3384 XEXP (cond2, 0))
3385 && rtx_renumbered_equal_p (XEXP (cond1, 0),
3386 XEXP (cond2, 1))));
3387 /* In case of returning true, we will commonize the flow.
3388 This also means, that both branches will contain only single
3389 branch prediction algorithm. To match require resulting branch
3390 to be still well predictable. */
3391 if (match && !optimize_size)
3393 rtx note1, note2;
3394 int prob1, prob2;
3395 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
3396 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
3397 if (!note1 || !note2)
3398 return false;
3399 prob1 = INTVAL (XEXP (note1, 0));
3400 prob2 = INTVAL (XEXP (note2, 0));
3401 if (reverse)
3402 prob2 = REG_BR_PROB_BASE - prob2;
3404 /* ??? Later we should use basic block frequency to allow merging
3405 in the infrequent blocks, but at the moment it is not
3406 available when cleanup_cfg is run. */
3407 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 90)
3408 return false;
3410 if (rtl_dump_file && match)
3411 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
3412 bb1->index, bb2->index);
3413 return match;
3415 /* ??? We can handle computed jumps too. This may be important for
3416 inlined functions containing switch statements. Also jumps w/o
3417 fallthru edges can be handled by simply matching whole insn. */
3418 return false;
3421 /* Assume that e1 and e2 are the edges from the same basic block.
3422 Attempt to find common code on both paths and forward control flow
3423 from the first path to second if such exist. */
3424 static bool
3425 try_crossjump_to_edge (mode, e1, e2)
3426 int mode;
3427 edge e1, e2;
3429 int nmatch;
3430 basic_block redirect_to;
3431 rtx newpos1, newpos2;
3432 rtx first, last;
3433 edge s;
3434 rtx note;
3435 rtx label;
3436 rtx barrier;
3438 /* Skip forwarder blocks. This is needed to avoid forced forwarders
3439 after conditional jumps from making us to miss optimization.
3441 We don't need to worry about multiple entry or chained forwarders, as they
3442 will be optimized out. */
3443 if (e1->src->pred && !e1->src->pred->pred_next
3444 && forwarder_block_p (e1->src))
3445 e1 = e1->src->pred;
3446 if (e2->src->pred && !e2->src->pred->pred_next
3447 && forwarder_block_p (e2->src))
3448 e2 = e2->src->pred;
3450 if (e1->src == ENTRY_BLOCK_PTR || e2->src == ENTRY_BLOCK_PTR)
3451 return false;
3452 if (e1->src == e2->src)
3453 return false;
3455 /* Seeing more than 1 forwarder blocks would confuse us later... */
3456 if (forwarder_block_p (e1->dest)
3457 && forwarder_block_p (e1->dest->succ->dest))
3458 return false;
3459 if (forwarder_block_p (e2->dest)
3460 && forwarder_block_p (e2->dest->succ->dest))
3461 return false;
3462 /* ... similary as seeing dead code... */
3463 if (!e1->src->pred || !e2->src->pred)
3464 return false;
3465 /* ...similary non-jump edges. */
3466 if (e1->flags & EDGE_COMPLEX)
3467 return false;
3469 if (!outgoing_edges_match (e1->src, e2->src))
3470 return false;
3471 nmatch = flow_find_cross_jump (mode, e1->src, e2->src, &newpos1, &newpos2);
3472 if (!nmatch)
3473 return false;
3475 /* Avoid splitting if possible. */
3476 if (newpos2 == e2->src->head)
3477 redirect_to = e2->src;
3478 else
3480 if (rtl_dump_file)
3481 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
3482 e2->src->index, nmatch);
3483 redirect_to = split_block (e2->src, PREV_INSN (newpos2))->dest;
3486 if (rtl_dump_file)
3487 fprintf (rtl_dump_file,
3488 "Cross jumping from bb %i to bb %i. %i insn commoized\n",
3489 e1->src->index, e2->src->index, nmatch);
3491 redirect_to->count += e1->src->count;
3492 redirect_to->frequency += e1->src->frequency;
3494 /* Recompute the frequencies and counts of outgoing edges. */
3495 for (s = redirect_to->succ; s; s = s->succ_next)
3497 edge s2;
3498 basic_block d = (forwarder_block_p (s->dest) ? s->dest->succ->dest
3499 : s->dest);
3500 for (s2 = e1->src->succ;; s2 = s2->succ_next)
3502 basic_block d2 =
3503 (forwarder_block_p (s2->dest) ? s2->dest->succ->dest : s2->dest);
3504 if (d == d2)
3505 break;
3507 s->count += s2->count;
3509 /* Take care to update possible forwarder blocks. We took care
3510 that there is no more than one in chain, so we can't run
3511 into infinite loop. */
3512 if (forwarder_block_p (s->dest))
3514 s->dest->succ->count += s2->count;
3515 s->dest->count += s2->count;
3516 s->dest->frequency += ((s->probability * s->src->frequency)
3517 / REG_BR_PROB_BASE);
3519 if (forwarder_block_p (s2->dest))
3521 s2->dest->succ->count -= s2->count;
3522 s2->dest->count -= s2->count;
3523 s2->dest->frequency -= ((s->probability * s->src->frequency)
3524 / REG_BR_PROB_BASE);
3526 if (!redirect_to->frequency && !e1->src->frequency)
3527 s->probability = (s->probability + s2->probability) / 2;
3528 else
3529 s->probability =
3530 ((s->probability * redirect_to->frequency +
3531 s2->probability * e1->src->frequency)
3532 / (redirect_to->frequency + e1->src->frequency));
3535 /* FIXME: enable once probabilities are fetched properly at
3536 CFG build. */
3537 #if 0
3538 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
3539 if (note)
3540 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
3541 #endif
3543 /* Skip possible basic block header. */
3544 first = newpos1;
3545 if (GET_CODE (first) == CODE_LABEL)
3546 first = NEXT_INSN (first);
3547 if (GET_CODE (first) == NOTE)
3548 first = NEXT_INSN (first);
3550 last = e1->src->end;
3552 /* Now emit the jump insn. */
3553 label = block_label (redirect_to);
3554 e1->src->end = emit_jump_insn_after (gen_jump (label), e1->src->end);
3555 JUMP_LABEL (e1->src->end) = label;
3556 LABEL_NUSES (label)++;
3557 if (basic_block_for_insn)
3558 set_block_for_insn (e1->src->end, e1->src);
3560 flow_delete_insn_chain (first, last);
3562 barrier = next_nonnote_insn (e1->src->end);
3563 if (!barrier || GET_CODE (barrier) != BARRIER)
3564 emit_barrier_after (e1->src->end);
3566 /* Update CFG. */
3567 while (e1->src->succ->succ_next)
3568 remove_edge (e1->src->succ);
3569 e1->src->succ->flags = 0;
3570 redirect_edge_succ (e1->src->succ, redirect_to);
3571 return true;
3574 /* Attempt to implement cross jumping. This means moving one or more branches
3575 to BB earlier to BB predecesors commonizing some code. */
3576 static bool
3577 try_crossjump_bb (mode, bb)
3578 int mode;
3579 basic_block bb;
3581 edge e, e2, nexte2, nexte, fallthru;
3582 bool changed = false;
3584 /* In case basic block has single predecesor, do nothing. */
3585 if (!bb->pred || !bb->pred->pred_next)
3586 return false;
3588 /* It is always cheapest to jump into fallthru edge. */
3589 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
3590 if (fallthru->flags & EDGE_FALLTHRU)
3591 break;
3593 for (e = bb->pred; e; e = nexte)
3595 nexte = e->pred_next;
3596 /* First of all prioritize the fallthru edge, as the cheapest. */
3597 if (e != fallthru && fallthru
3598 && try_crossjump_to_edge (mode, e, fallthru))
3599 changed = true, nexte = bb->pred;
3600 else
3601 /* Try match in other incomming edges.
3603 Loop only over the earlier edges to avoid,as the later
3604 will be examined in the oposite direction. */
3605 for (e2 = bb->pred; e2 != e; e2 = nexte2)
3607 nexte2 = e2->pred_next;
3608 if (e2 != fallthru && try_crossjump_to_edge (mode, e, e2))
3610 changed = true;
3611 nexte = bb->pred;
3613 /* We may've removed the fallthru edge. */
3614 for (fallthru = bb->pred; fallthru;
3615 fallthru = fallthru->pred_next)
3616 if (fallthru->flags & EDGE_FALLTHRU)
3617 break;
3618 break;
3622 return changed;
3625 /* Do simple CFG optimizations - basic block merging, simplifying of jump
3626 instructions etc.
3628 Return nonzero in case some optimizations matched. */
3630 static bool
3631 try_optimize_cfg (mode)
3632 int mode;
3634 int i;
3635 bool changed_overall = 0;
3636 bool changed;
3637 int iterations = 0;
3639 /* Attempt to merge blocks as made possible by edge removal. If a block
3640 has only one successor, and the successor has only one predecessor,
3641 they may be combined. */
3645 changed = 0;
3646 iterations++;
3647 if (rtl_dump_file)
3648 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
3649 iterations);
3650 for (i = 0; i < n_basic_blocks;)
3652 basic_block c, b = BASIC_BLOCK (i);
3653 edge s;
3654 int changed_here = 0;
3656 /* Delete trivially dead basic blocks. */
3657 while (b->pred == NULL)
3659 c = BASIC_BLOCK (b->index - 1);
3660 if (rtl_dump_file)
3661 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
3662 flow_delete_block (b);
3663 changed = 1;
3664 b = c;
3666 /* The fallthru forwarder block can be deleted. */
3667 if (b->pred->pred_next == NULL
3668 && forwarder_block_p (b)
3669 && n_basic_blocks > 1
3670 && (b->pred->flags & EDGE_FALLTHRU)
3671 && (b->succ->flags & EDGE_FALLTHRU))
3673 if (rtl_dump_file)
3674 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
3675 b->index);
3676 c = BASIC_BLOCK (i ? i - 1 : i + 1);
3677 redirect_edge_succ (b->pred, b->succ->dest);
3678 flow_delete_block (b);
3679 changed = 1;
3680 b = c;
3683 /* A loop because chains of blocks might be combineable. */
3684 while ((s = b->succ) != NULL
3685 && s->succ_next == NULL
3686 && (s->flags & EDGE_EH) == 0
3687 && (c = s->dest) != EXIT_BLOCK_PTR
3688 && c->pred->pred_next == NULL
3689 /* If the jump insn has side effects,
3690 we can't kill the edge. */
3691 && (GET_CODE (b->end) != JUMP_INSN
3692 || onlyjump_p (b->end)) && merge_blocks (s, b, c, mode))
3693 changed_here = 1;
3695 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
3696 changed_here = 1;
3698 /* In the case basic blocks has single outgoing edge, but over by the
3699 non-trivial jump instruction, we can replace it by unconditional
3700 jump, or delete the jump completely. Use logic of
3701 redirect_edge_and_branch to do the dirty job for us.
3703 We match cases as conditional jumps jumping to the next block or
3704 dispatch tables. */
3706 if (b->succ
3707 && b->succ->succ_next == NULL
3708 && GET_CODE (b->end) == JUMP_INSN
3709 && b->succ->dest != EXIT_BLOCK_PTR
3710 && redirect_edge_and_branch (b->succ, b->succ->dest))
3711 changed_here = 1;
3713 if (try_forward_edges (b))
3714 changed_here = 1;
3716 if ((mode & CLEANUP_CROSSJUMP) && try_crossjump_bb (mode, b))
3717 changed_here = 1;
3719 /* Don't get confused by the index shift caused by deleting
3720 blocks. */
3721 if (!changed_here)
3722 i = b->index + 1;
3723 else
3724 changed = 1;
3726 if ((mode & CLEANUP_CROSSJUMP) && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
3727 changed = 1;
3728 #ifdef ENABLE_CHECKING
3729 if (changed)
3730 verify_flow_info ();
3731 #endif
3732 changed_overall |= changed;
3734 while (changed);
3735 return changed_overall;
3738 /* The given edge should potentially be a fallthru edge. If that is in
3739 fact true, delete the jump and barriers that are in the way. */
3741 void
3742 tidy_fallthru_edge (e, b, c)
3743 edge e;
3744 basic_block b, c;
3746 rtx q;
3748 /* ??? In a late-running flow pass, other folks may have deleted basic
3749 blocks by nopping out blocks, leaving multiple BARRIERs between here
3750 and the target label. They ought to be chastized and fixed.
3752 We can also wind up with a sequence of undeletable labels between
3753 one block and the next.
3755 So search through a sequence of barriers, labels, and notes for
3756 the head of block C and assert that we really do fall through. */
3758 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
3759 return;
3761 /* Remove what will soon cease being the jump insn from the source block.
3762 If block B consisted only of this single jump, turn it into a deleted
3763 note. */
3764 q = b->end;
3765 if (GET_CODE (q) == JUMP_INSN
3766 && onlyjump_p (q)
3767 && (any_uncondjump_p (q)
3768 || (b->succ == e && e->succ_next == NULL)))
3770 #ifdef HAVE_cc0
3771 /* If this was a conditional jump, we need to also delete
3772 the insn that set cc0. */
3773 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
3774 q = PREV_INSN (q);
3775 #endif
3777 if (b->head == q)
3779 PUT_CODE (q, NOTE);
3780 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
3781 NOTE_SOURCE_FILE (q) = 0;
3783 else
3785 q = PREV_INSN (q);
3787 /* We don't want a block to end on a line-number note since that has
3788 the potential of changing the code between -g and not -g. */
3789 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
3790 q = PREV_INSN (q);
3793 b->end = q;
3796 /* Selectively unlink the sequence. */
3797 if (q != PREV_INSN (c->head))
3798 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
3800 e->flags |= EDGE_FALLTHRU;
3803 /* Fix up edges that now fall through, or rather should now fall through
3804 but previously required a jump around now deleted blocks. Simplify
3805 the search by only examining blocks numerically adjacent, since this
3806 is how find_basic_blocks created them. */
3808 static void
3809 tidy_fallthru_edges ()
3811 int i;
3813 for (i = 1; i < n_basic_blocks; ++i)
3815 basic_block b = BASIC_BLOCK (i - 1);
3816 basic_block c = BASIC_BLOCK (i);
3817 edge s;
3819 /* We care about simple conditional or unconditional jumps with
3820 a single successor.
3822 If we had a conditional branch to the next instruction when
3823 find_basic_blocks was called, then there will only be one
3824 out edge for the block which ended with the conditional
3825 branch (since we do not create duplicate edges).
3827 Furthermore, the edge will be marked as a fallthru because we
3828 merge the flags for the duplicate edges. So we do not want to
3829 check that the edge is not a FALLTHRU edge. */
3830 if ((s = b->succ) != NULL
3831 && ! (s->flags & EDGE_COMPLEX)
3832 && s->succ_next == NULL
3833 && s->dest == c
3834 /* If the jump insn has side effects, we can't tidy the edge. */
3835 && (GET_CODE (b->end) != JUMP_INSN
3836 || onlyjump_p (b->end)))
3837 tidy_fallthru_edge (s, b, c);
3841 /* Perform data flow analysis.
3842 F is the first insn of the function; FLAGS is a set of PROP_* flags
3843 to be used in accumulating flow info. */
3845 void
3846 life_analysis (f, file, flags)
3847 rtx f;
3848 FILE *file;
3849 int flags;
3851 #ifdef ELIMINABLE_REGS
3852 register int i;
3853 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
3854 #endif
3856 /* Record which registers will be eliminated. We use this in
3857 mark_used_regs. */
3859 CLEAR_HARD_REG_SET (elim_reg_set);
3861 #ifdef ELIMINABLE_REGS
3862 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3863 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3864 #else
3865 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3866 #endif
3868 if (! optimize)
3869 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
3871 /* The post-reload life analysis have (on a global basis) the same
3872 registers live as was computed by reload itself. elimination
3873 Otherwise offsets and such may be incorrect.
3875 Reload will make some registers as live even though they do not
3876 appear in the rtl.
3878 We don't want to create new auto-incs after reload, since they
3879 are unlikely to be useful and can cause problems with shared
3880 stack slots. */
3881 if (reload_completed)
3882 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
3884 /* We want alias analysis information for local dead store elimination. */
3885 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3886 init_alias_analysis ();
3888 /* Always remove no-op moves. Do this before other processing so
3889 that we don't have to keep re-scanning them. */
3890 delete_noop_moves (f);
3892 /* Some targets can emit simpler epilogues if they know that sp was
3893 not ever modified during the function. After reload, of course,
3894 we've already emitted the epilogue so there's no sense searching. */
3895 if (! reload_completed)
3896 notice_stack_pointer_modification (f);
3898 /* Allocate and zero out data structures that will record the
3899 data from lifetime analysis. */
3900 allocate_reg_life_data ();
3901 allocate_bb_life_data ();
3903 /* Find the set of registers live on function exit. */
3904 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
3906 /* "Update" life info from zero. It'd be nice to begin the
3907 relaxation with just the exit and noreturn blocks, but that set
3908 is not immediately handy. */
3910 if (flags & PROP_REG_INFO)
3911 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3912 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
3914 /* Clean up. */
3915 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3916 end_alias_analysis ();
3918 if (file)
3919 dump_flow_info (file);
3921 free_basic_block_vars (1);
3923 #ifdef ENABLE_CHECKING
3925 rtx insn;
3927 /* Search for any REG_LABEL notes which reference deleted labels. */
3928 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3930 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3932 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
3933 abort ();
3936 #endif
3939 /* A subroutine of verify_wide_reg, called through for_each_rtx.
3940 Search for REGNO. If found, abort if it is not wider than word_mode. */
3942 static int
3943 verify_wide_reg_1 (px, pregno)
3944 rtx *px;
3945 void *pregno;
3947 rtx x = *px;
3948 unsigned int regno = *(int *) pregno;
3950 if (GET_CODE (x) == REG && REGNO (x) == regno)
3952 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
3953 abort ();
3954 return 1;
3956 return 0;
3959 /* A subroutine of verify_local_live_at_start. Search through insns
3960 between HEAD and END looking for register REGNO. */
3962 static void
3963 verify_wide_reg (regno, head, end)
3964 int regno;
3965 rtx head, end;
3967 while (1)
3969 if (INSN_P (head)
3970 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
3971 return;
3972 if (head == end)
3973 break;
3974 head = NEXT_INSN (head);
3977 /* We didn't find the register at all. Something's way screwy. */
3978 if (rtl_dump_file)
3979 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
3980 print_rtl_and_abort ();
3983 /* A subroutine of update_life_info. Verify that there are no untoward
3984 changes in live_at_start during a local update. */
3986 static void
3987 verify_local_live_at_start (new_live_at_start, bb)
3988 regset new_live_at_start;
3989 basic_block bb;
3991 if (reload_completed)
3993 /* After reload, there are no pseudos, nor subregs of multi-word
3994 registers. The regsets should exactly match. */
3995 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
3997 if (rtl_dump_file)
3999 fprintf (rtl_dump_file,
4000 "live_at_start mismatch in bb %d, aborting\n",
4001 bb->index);
4002 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
4003 debug_bitmap_file (rtl_dump_file, new_live_at_start);
4005 print_rtl_and_abort ();
4008 else
4010 int i;
4012 /* Find the set of changed registers. */
4013 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
4015 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
4017 /* No registers should die. */
4018 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
4020 if (rtl_dump_file)
4021 fprintf (rtl_dump_file,
4022 "Register %d died unexpectedly in block %d\n", i,
4023 bb->index);
4024 print_rtl_and_abort ();
4027 /* Verify that the now-live register is wider than word_mode. */
4028 verify_wide_reg (i, bb->head, bb->end);
4033 /* Updates life information starting with the basic blocks set in BLOCKS.
4034 If BLOCKS is null, consider it to be the universal set.
4036 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
4037 we are only expecting local modifications to basic blocks. If we find
4038 extra registers live at the beginning of a block, then we either killed
4039 useful data, or we have a broken split that wants data not provided.
4040 If we find registers removed from live_at_start, that means we have
4041 a broken peephole that is killing a register it shouldn't.
4043 ??? This is not true in one situation -- when a pre-reload splitter
4044 generates subregs of a multi-word pseudo, current life analysis will
4045 lose the kill. So we _can_ have a pseudo go live. How irritating.
4047 Including PROP_REG_INFO does not properly refresh regs_ever_live
4048 unless the caller resets it to zero. */
4050 void
4051 update_life_info (blocks, extent, prop_flags)
4052 sbitmap blocks;
4053 enum update_life_extent extent;
4054 int prop_flags;
4056 regset tmp;
4057 regset_head tmp_head;
4058 int i;
4060 tmp = INITIALIZE_REG_SET (tmp_head);
4062 /* For a global update, we go through the relaxation process again. */
4063 if (extent != UPDATE_LIFE_LOCAL)
4065 calculate_global_regs_live (blocks, blocks,
4066 prop_flags & PROP_SCAN_DEAD_CODE);
4068 /* If asked, remove notes from the blocks we'll update. */
4069 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
4070 count_or_remove_death_notes (blocks, 1);
4073 if (blocks)
4075 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4077 basic_block bb = BASIC_BLOCK (i);
4079 COPY_REG_SET (tmp, bb->global_live_at_end);
4080 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4082 if (extent == UPDATE_LIFE_LOCAL)
4083 verify_local_live_at_start (tmp, bb);
4086 else
4088 for (i = n_basic_blocks - 1; i >= 0; --i)
4090 basic_block bb = BASIC_BLOCK (i);
4092 COPY_REG_SET (tmp, bb->global_live_at_end);
4093 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4095 if (extent == UPDATE_LIFE_LOCAL)
4096 verify_local_live_at_start (tmp, bb);
4100 FREE_REG_SET (tmp);
4102 if (prop_flags & PROP_REG_INFO)
4104 /* The only pseudos that are live at the beginning of the function
4105 are those that were not set anywhere in the function. local-alloc
4106 doesn't know how to handle these correctly, so mark them as not
4107 local to any one basic block. */
4108 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
4109 FIRST_PSEUDO_REGISTER, i,
4110 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4112 /* We have a problem with any pseudoreg that lives across the setjmp.
4113 ANSI says that if a user variable does not change in value between
4114 the setjmp and the longjmp, then the longjmp preserves it. This
4115 includes longjmp from a place where the pseudo appears dead.
4116 (In principle, the value still exists if it is in scope.)
4117 If the pseudo goes in a hard reg, some other value may occupy
4118 that hard reg where this pseudo is dead, thus clobbering the pseudo.
4119 Conclusion: such a pseudo must not go in a hard reg. */
4120 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
4121 FIRST_PSEUDO_REGISTER, i,
4123 if (regno_reg_rtx[i] != 0)
4125 REG_LIVE_LENGTH (i) = -1;
4126 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4132 /* Free the variables allocated by find_basic_blocks.
4134 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
4136 void
4137 free_basic_block_vars (keep_head_end_p)
4138 int keep_head_end_p;
4140 if (basic_block_for_insn)
4142 VARRAY_FREE (basic_block_for_insn);
4143 basic_block_for_insn = NULL;
4146 if (! keep_head_end_p)
4148 if (basic_block_info)
4150 clear_edges ();
4151 VARRAY_FREE (basic_block_info);
4153 n_basic_blocks = 0;
4155 ENTRY_BLOCK_PTR->aux = NULL;
4156 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
4157 EXIT_BLOCK_PTR->aux = NULL;
4158 EXIT_BLOCK_PTR->global_live_at_start = NULL;
4162 /* Return nonzero if an insn consists only of SETs, each of which only sets a
4163 value to itself. */
4165 static int
4166 noop_move_p (insn)
4167 rtx insn;
4169 rtx pat = PATTERN (insn);
4171 /* Insns carrying these notes are useful later on. */
4172 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
4173 return 0;
4175 if (GET_CODE (pat) == SET && set_noop_p (pat))
4176 return 1;
4178 if (GET_CODE (pat) == PARALLEL)
4180 int i;
4181 /* If nothing but SETs of registers to themselves,
4182 this insn can also be deleted. */
4183 for (i = 0; i < XVECLEN (pat, 0); i++)
4185 rtx tem = XVECEXP (pat, 0, i);
4187 if (GET_CODE (tem) == USE
4188 || GET_CODE (tem) == CLOBBER)
4189 continue;
4191 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
4192 return 0;
4195 return 1;
4197 return 0;
4200 /* Delete any insns that copy a register to itself. */
4202 static void
4203 delete_noop_moves (f)
4204 rtx f;
4206 rtx insn;
4207 for (insn = f; insn; insn = NEXT_INSN (insn))
4209 if (GET_CODE (insn) == INSN && noop_move_p (insn))
4211 PUT_CODE (insn, NOTE);
4212 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4213 NOTE_SOURCE_FILE (insn) = 0;
4218 /* Determine if the stack pointer is constant over the life of the function.
4219 Only useful before prologues have been emitted. */
4221 static void
4222 notice_stack_pointer_modification_1 (x, pat, data)
4223 rtx x;
4224 rtx pat ATTRIBUTE_UNUSED;
4225 void *data ATTRIBUTE_UNUSED;
4227 if (x == stack_pointer_rtx
4228 /* The stack pointer is only modified indirectly as the result
4229 of a push until later in flow. See the comments in rtl.texi
4230 regarding Embedded Side-Effects on Addresses. */
4231 || (GET_CODE (x) == MEM
4232 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
4233 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
4234 current_function_sp_is_unchanging = 0;
4237 static void
4238 notice_stack_pointer_modification (f)
4239 rtx f;
4241 rtx insn;
4243 /* Assume that the stack pointer is unchanging if alloca hasn't
4244 been used. */
4245 current_function_sp_is_unchanging = !current_function_calls_alloca;
4246 if (! current_function_sp_is_unchanging)
4247 return;
4249 for (insn = f; insn; insn = NEXT_INSN (insn))
4251 if (INSN_P (insn))
4253 /* Check if insn modifies the stack pointer. */
4254 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
4255 NULL);
4256 if (! current_function_sp_is_unchanging)
4257 return;
4262 /* Mark a register in SET. Hard registers in large modes get all
4263 of their component registers set as well. */
4265 static void
4266 mark_reg (reg, xset)
4267 rtx reg;
4268 void *xset;
4270 regset set = (regset) xset;
4271 int regno = REGNO (reg);
4273 if (GET_MODE (reg) == BLKmode)
4274 abort ();
4276 SET_REGNO_REG_SET (set, regno);
4277 if (regno < FIRST_PSEUDO_REGISTER)
4279 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4280 while (--n > 0)
4281 SET_REGNO_REG_SET (set, regno + n);
4285 /* Mark those regs which are needed at the end of the function as live
4286 at the end of the last basic block. */
4288 static void
4289 mark_regs_live_at_end (set)
4290 regset set;
4292 unsigned int i;
4294 /* If exiting needs the right stack value, consider the stack pointer
4295 live at the end of the function. */
4296 if ((HAVE_epilogue && reload_completed)
4297 || ! EXIT_IGNORE_STACK
4298 || (! FRAME_POINTER_REQUIRED
4299 && ! current_function_calls_alloca
4300 && flag_omit_frame_pointer)
4301 || current_function_sp_is_unchanging)
4303 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
4306 /* Mark the frame pointer if needed at the end of the function. If
4307 we end up eliminating it, it will be removed from the live list
4308 of each basic block by reload. */
4310 if (! reload_completed || frame_pointer_needed)
4312 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
4313 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4314 /* If they are different, also mark the hard frame pointer as live. */
4315 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4316 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
4317 #endif
4320 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4321 /* Many architectures have a GP register even without flag_pic.
4322 Assume the pic register is not in use, or will be handled by
4323 other means, if it is not fixed. */
4324 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4325 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4326 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
4327 #endif
4329 /* Mark all global registers, and all registers used by the epilogue
4330 as being live at the end of the function since they may be
4331 referenced by our caller. */
4332 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4333 if (global_regs[i] || EPILOGUE_USES (i))
4334 SET_REGNO_REG_SET (set, i);
4336 if (HAVE_epilogue && reload_completed)
4338 /* Mark all call-saved registers that we actually used. */
4339 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4340 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
4341 SET_REGNO_REG_SET (set, i);
4344 #ifdef EH_RETURN_DATA_REGNO
4345 /* Mark the registers that will contain data for the handler. */
4346 if (reload_completed && current_function_calls_eh_return)
4347 for (i = 0; ; ++i)
4349 unsigned regno = EH_RETURN_DATA_REGNO(i);
4350 if (regno == INVALID_REGNUM)
4351 break;
4352 SET_REGNO_REG_SET (set, regno);
4354 #endif
4355 #ifdef EH_RETURN_STACKADJ_RTX
4356 if ((! HAVE_epilogue || ! reload_completed)
4357 && current_function_calls_eh_return)
4359 rtx tmp = EH_RETURN_STACKADJ_RTX;
4360 if (tmp && REG_P (tmp))
4361 mark_reg (tmp, set);
4363 #endif
4364 #ifdef EH_RETURN_HANDLER_RTX
4365 if ((! HAVE_epilogue || ! reload_completed)
4366 && current_function_calls_eh_return)
4368 rtx tmp = EH_RETURN_HANDLER_RTX;
4369 if (tmp && REG_P (tmp))
4370 mark_reg (tmp, set);
4372 #endif
4374 /* Mark function return value. */
4375 diddle_return_value (mark_reg, set);
4378 /* Callback function for for_each_successor_phi. DATA is a regset.
4379 Sets the SRC_REGNO, the regno of the phi alternative for phi node
4380 INSN, in the regset. */
4382 static int
4383 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
4384 rtx insn ATTRIBUTE_UNUSED;
4385 int dest_regno ATTRIBUTE_UNUSED;
4386 int src_regno;
4387 void *data;
4389 regset live = (regset) data;
4390 SET_REGNO_REG_SET (live, src_regno);
4391 return 0;
4394 /* Propagate global life info around the graph of basic blocks. Begin
4395 considering blocks with their corresponding bit set in BLOCKS_IN.
4396 If BLOCKS_IN is null, consider it the universal set.
4398 BLOCKS_OUT is set for every block that was changed. */
4400 static void
4401 calculate_global_regs_live (blocks_in, blocks_out, flags)
4402 sbitmap blocks_in, blocks_out;
4403 int flags;
4405 basic_block *queue, *qhead, *qtail, *qend;
4406 regset tmp, new_live_at_end, call_used;
4407 regset_head tmp_head, call_used_head;
4408 regset_head new_live_at_end_head;
4409 int i;
4411 tmp = INITIALIZE_REG_SET (tmp_head);
4412 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
4413 call_used = INITIALIZE_REG_SET (call_used_head);
4415 /* Inconveniently, this is only redily available in hard reg set form. */
4416 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4417 if (call_used_regs[i])
4418 SET_REGNO_REG_SET (call_used, i);
4420 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
4421 because the `head == tail' style test for an empty queue doesn't
4422 work with a full queue. */
4423 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
4424 qtail = queue;
4425 qhead = qend = queue + n_basic_blocks + 2;
4427 /* Queue the blocks set in the initial mask. Do this in reverse block
4428 number order so that we are more likely for the first round to do
4429 useful work. We use AUX non-null to flag that the block is queued. */
4430 if (blocks_in)
4432 /* Clear out the garbage that might be hanging out in bb->aux. */
4433 for (i = n_basic_blocks - 1; i >= 0; --i)
4434 BASIC_BLOCK (i)->aux = NULL;
4436 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
4438 basic_block bb = BASIC_BLOCK (i);
4439 *--qhead = bb;
4440 bb->aux = bb;
4443 else
4445 for (i = 0; i < n_basic_blocks; ++i)
4447 basic_block bb = BASIC_BLOCK (i);
4448 *--qhead = bb;
4449 bb->aux = bb;
4453 if (blocks_out)
4454 sbitmap_zero (blocks_out);
4456 /* We work through the queue until there are no more blocks. What
4457 is live at the end of this block is precisely the union of what
4458 is live at the beginning of all its successors. So, we set its
4459 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
4460 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
4461 this block by walking through the instructions in this block in
4462 reverse order and updating as we go. If that changed
4463 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
4464 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
4466 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
4467 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
4468 must either be live at the end of the block, or used within the
4469 block. In the latter case, it will certainly never disappear
4470 from GLOBAL_LIVE_AT_START. In the former case, the register
4471 could go away only if it disappeared from GLOBAL_LIVE_AT_START
4472 for one of the successor blocks. By induction, that cannot
4473 occur. */
4474 while (qhead != qtail)
4476 int rescan, changed;
4477 basic_block bb;
4478 edge e;
4480 bb = *qhead++;
4481 if (qhead == qend)
4482 qhead = queue;
4483 bb->aux = NULL;
4485 /* Begin by propagating live_at_start from the successor blocks. */
4486 CLEAR_REG_SET (new_live_at_end);
4487 for (e = bb->succ; e; e = e->succ_next)
4489 basic_block sb = e->dest;
4491 /* Call-clobbered registers die across exception and call edges. */
4492 /* ??? Abnormal call edges ignored for the moment, as this gets
4493 confused by sibling call edges, which crashes reg-stack. */
4494 if (e->flags & EDGE_EH)
4496 bitmap_operation (tmp, sb->global_live_at_start,
4497 call_used, BITMAP_AND_COMPL);
4498 IOR_REG_SET (new_live_at_end, tmp);
4500 else
4501 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
4504 /* The all-important stack pointer must always be live. */
4505 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
4507 /* Before reload, there are a few registers that must be forced
4508 live everywhere -- which might not already be the case for
4509 blocks within infinite loops. */
4510 if (! reload_completed)
4512 /* Any reference to any pseudo before reload is a potential
4513 reference of the frame pointer. */
4514 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
4516 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4517 /* Pseudos with argument area equivalences may require
4518 reloading via the argument pointer. */
4519 if (fixed_regs[ARG_POINTER_REGNUM])
4520 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
4521 #endif
4523 /* Any constant, or pseudo with constant equivalences, may
4524 require reloading from memory using the pic register. */
4525 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4526 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4527 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
4530 /* Regs used in phi nodes are not included in
4531 global_live_at_start, since they are live only along a
4532 particular edge. Set those regs that are live because of a
4533 phi node alternative corresponding to this particular block. */
4534 if (in_ssa_form)
4535 for_each_successor_phi (bb, &set_phi_alternative_reg,
4536 new_live_at_end);
4538 if (bb == ENTRY_BLOCK_PTR)
4540 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4541 continue;
4544 /* On our first pass through this block, we'll go ahead and continue.
4545 Recognize first pass by local_set NULL. On subsequent passes, we
4546 get to skip out early if live_at_end wouldn't have changed. */
4548 if (bb->local_set == NULL)
4550 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4551 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4552 rescan = 1;
4554 else
4556 /* If any bits were removed from live_at_end, we'll have to
4557 rescan the block. This wouldn't be necessary if we had
4558 precalculated local_live, however with PROP_SCAN_DEAD_CODE
4559 local_live is really dependent on live_at_end. */
4560 CLEAR_REG_SET (tmp);
4561 rescan = bitmap_operation (tmp, bb->global_live_at_end,
4562 new_live_at_end, BITMAP_AND_COMPL);
4564 if (! rescan)
4566 /* If any of the registers in the new live_at_end set are
4567 conditionally set in this basic block, we must rescan.
4568 This is because conditional lifetimes at the end of the
4569 block do not just take the live_at_end set into account,
4570 but also the liveness at the start of each successor
4571 block. We can miss changes in those sets if we only
4572 compare the new live_at_end against the previous one. */
4573 CLEAR_REG_SET (tmp);
4574 rescan = bitmap_operation (tmp, new_live_at_end,
4575 bb->cond_local_set, BITMAP_AND);
4578 if (! rescan)
4580 /* Find the set of changed bits. Take this opportunity
4581 to notice that this set is empty and early out. */
4582 CLEAR_REG_SET (tmp);
4583 changed = bitmap_operation (tmp, bb->global_live_at_end,
4584 new_live_at_end, BITMAP_XOR);
4585 if (! changed)
4586 continue;
4588 /* If any of the changed bits overlap with local_set,
4589 we'll have to rescan the block. Detect overlap by
4590 the AND with ~local_set turning off bits. */
4591 rescan = bitmap_operation (tmp, tmp, bb->local_set,
4592 BITMAP_AND_COMPL);
4596 /* Let our caller know that BB changed enough to require its
4597 death notes updated. */
4598 if (blocks_out)
4599 SET_BIT (blocks_out, bb->index);
4601 if (! rescan)
4603 /* Add to live_at_start the set of all registers in
4604 new_live_at_end that aren't in the old live_at_end. */
4606 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
4607 BITMAP_AND_COMPL);
4608 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4610 changed = bitmap_operation (bb->global_live_at_start,
4611 bb->global_live_at_start,
4612 tmp, BITMAP_IOR);
4613 if (! changed)
4614 continue;
4616 else
4618 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4620 /* Rescan the block insn by insn to turn (a copy of) live_at_end
4621 into live_at_start. */
4622 propagate_block (bb, new_live_at_end, bb->local_set,
4623 bb->cond_local_set, flags);
4625 /* If live_at start didn't change, no need to go farther. */
4626 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
4627 continue;
4629 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
4632 /* Queue all predecessors of BB so that we may re-examine
4633 their live_at_end. */
4634 for (e = bb->pred; e; e = e->pred_next)
4636 basic_block pb = e->src;
4637 if (pb->aux == NULL)
4639 *qtail++ = pb;
4640 if (qtail == qend)
4641 qtail = queue;
4642 pb->aux = pb;
4647 FREE_REG_SET (tmp);
4648 FREE_REG_SET (new_live_at_end);
4649 FREE_REG_SET (call_used);
4651 if (blocks_out)
4653 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
4655 basic_block bb = BASIC_BLOCK (i);
4656 FREE_REG_SET (bb->local_set);
4657 FREE_REG_SET (bb->cond_local_set);
4660 else
4662 for (i = n_basic_blocks - 1; i >= 0; --i)
4664 basic_block bb = BASIC_BLOCK (i);
4665 FREE_REG_SET (bb->local_set);
4666 FREE_REG_SET (bb->cond_local_set);
4670 free (queue);
4673 /* Subroutines of life analysis. */
4675 /* Allocate the permanent data structures that represent the results
4676 of life analysis. Not static since used also for stupid life analysis. */
4678 static void
4679 allocate_bb_life_data ()
4681 register int i;
4683 for (i = 0; i < n_basic_blocks; i++)
4685 basic_block bb = BASIC_BLOCK (i);
4687 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4688 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4691 ENTRY_BLOCK_PTR->global_live_at_end
4692 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4693 EXIT_BLOCK_PTR->global_live_at_start
4694 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4696 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4699 void
4700 allocate_reg_life_data ()
4702 int i;
4704 max_regno = max_reg_num ();
4706 /* Recalculate the register space, in case it has grown. Old style
4707 vector oriented regsets would set regset_{size,bytes} here also. */
4708 allocate_reg_info (max_regno, FALSE, FALSE);
4710 /* Reset all the data we'll collect in propagate_block and its
4711 subroutines. */
4712 for (i = 0; i < max_regno; i++)
4714 REG_N_SETS (i) = 0;
4715 REG_N_REFS (i) = 0;
4716 REG_N_DEATHS (i) = 0;
4717 REG_N_CALLS_CROSSED (i) = 0;
4718 REG_LIVE_LENGTH (i) = 0;
4719 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4723 /* Delete dead instructions for propagate_block. */
4725 static void
4726 propagate_block_delete_insn (bb, insn)
4727 basic_block bb;
4728 rtx insn;
4730 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4732 /* If the insn referred to a label, and that label was attached to
4733 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
4734 pretty much mandatory to delete it, because the ADDR_VEC may be
4735 referencing labels that no longer exist.
4737 INSN may reference a deleted label, particularly when a jump
4738 table has been optimized into a direct jump. There's no
4739 real good way to fix up the reference to the deleted label
4740 when the label is deleted, so we just allow it here.
4742 After dead code elimination is complete, we do search for
4743 any REG_LABEL notes which reference deleted labels as a
4744 sanity check. */
4746 if (inote && GET_CODE (inote) == CODE_LABEL)
4748 rtx label = XEXP (inote, 0);
4749 rtx next;
4751 /* The label may be forced if it has been put in the constant
4752 pool. If that is the only use we must discard the table
4753 jump following it, but not the label itself. */
4754 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
4755 && (next = next_nonnote_insn (label)) != NULL
4756 && GET_CODE (next) == JUMP_INSN
4757 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4758 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4760 rtx pat = PATTERN (next);
4761 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4762 int len = XVECLEN (pat, diff_vec_p);
4763 int i;
4765 for (i = 0; i < len; i++)
4766 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
4768 flow_delete_insn (next);
4772 if (bb->end == insn)
4773 bb->end = PREV_INSN (insn);
4774 flow_delete_insn (insn);
4777 /* Delete dead libcalls for propagate_block. Return the insn
4778 before the libcall. */
4780 static rtx
4781 propagate_block_delete_libcall (bb, insn, note)
4782 basic_block bb;
4783 rtx insn, note;
4785 rtx first = XEXP (note, 0);
4786 rtx before = PREV_INSN (first);
4788 if (insn == bb->end)
4789 bb->end = before;
4791 flow_delete_insn_chain (first, insn);
4792 return before;
4795 /* Update the life-status of regs for one insn. Return the previous insn. */
4798 propagate_one_insn (pbi, insn)
4799 struct propagate_block_info *pbi;
4800 rtx insn;
4802 rtx prev = PREV_INSN (insn);
4803 int flags = pbi->flags;
4804 int insn_is_dead = 0;
4805 int libcall_is_dead = 0;
4806 rtx note;
4807 int i;
4809 if (! INSN_P (insn))
4810 return prev;
4812 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4813 if (flags & PROP_SCAN_DEAD_CODE)
4815 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
4816 libcall_is_dead = (insn_is_dead && note != 0
4817 && libcall_dead_p (pbi, note, insn));
4820 /* If an instruction consists of just dead store(s) on final pass,
4821 delete it. */
4822 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
4824 /* If we're trying to delete a prologue or epilogue instruction
4825 that isn't flagged as possibly being dead, something is wrong.
4826 But if we are keeping the stack pointer depressed, we might well
4827 be deleting insns that are used to compute the amount to update
4828 it by, so they are fine. */
4829 if (reload_completed
4830 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4831 && (TYPE_RETURNS_STACK_DEPRESSED
4832 (TREE_TYPE (current_function_decl))))
4833 && (((HAVE_epilogue || HAVE_prologue)
4834 && prologue_epilogue_contains (insn))
4835 || (HAVE_sibcall_epilogue
4836 && sibcall_epilogue_contains (insn)))
4837 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
4838 abort ();
4840 /* Record sets. Do this even for dead instructions, since they
4841 would have killed the values if they hadn't been deleted. */
4842 mark_set_regs (pbi, PATTERN (insn), insn);
4844 /* CC0 is now known to be dead. Either this insn used it,
4845 in which case it doesn't anymore, or clobbered it,
4846 so the next insn can't use it. */
4847 pbi->cc0_live = 0;
4849 if (libcall_is_dead)
4850 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
4851 else
4852 propagate_block_delete_insn (pbi->bb, insn);
4854 return prev;
4857 /* See if this is an increment or decrement that can be merged into
4858 a following memory address. */
4859 #ifdef AUTO_INC_DEC
4861 register rtx x = single_set (insn);
4863 /* Does this instruction increment or decrement a register? */
4864 if ((flags & PROP_AUTOINC)
4865 && x != 0
4866 && GET_CODE (SET_DEST (x)) == REG
4867 && (GET_CODE (SET_SRC (x)) == PLUS
4868 || GET_CODE (SET_SRC (x)) == MINUS)
4869 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
4870 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
4871 /* Ok, look for a following memory ref we can combine with.
4872 If one is found, change the memory ref to a PRE_INC
4873 or PRE_DEC, cancel this insn, and return 1.
4874 Return 0 if nothing has been done. */
4875 && try_pre_increment_1 (pbi, insn))
4876 return prev;
4878 #endif /* AUTO_INC_DEC */
4880 CLEAR_REG_SET (pbi->new_set);
4882 /* If this is not the final pass, and this insn is copying the value of
4883 a library call and it's dead, don't scan the insns that perform the
4884 library call, so that the call's arguments are not marked live. */
4885 if (libcall_is_dead)
4887 /* Record the death of the dest reg. */
4888 mark_set_regs (pbi, PATTERN (insn), insn);
4890 insn = XEXP (note, 0);
4891 return PREV_INSN (insn);
4893 else if (GET_CODE (PATTERN (insn)) == SET
4894 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
4895 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
4896 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
4897 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
4898 /* We have an insn to pop a constant amount off the stack.
4899 (Such insns use PLUS regardless of the direction of the stack,
4900 and any insn to adjust the stack by a constant is always a pop.)
4901 These insns, if not dead stores, have no effect on life. */
4903 else
4905 /* Any regs live at the time of a call instruction must not go
4906 in a register clobbered by calls. Find all regs now live and
4907 record this for them. */
4909 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
4910 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
4911 { REG_N_CALLS_CROSSED (i)++; });
4913 /* Record sets. Do this even for dead instructions, since they
4914 would have killed the values if they hadn't been deleted. */
4915 mark_set_regs (pbi, PATTERN (insn), insn);
4917 if (GET_CODE (insn) == CALL_INSN)
4919 register int i;
4920 rtx note, cond;
4922 cond = NULL_RTX;
4923 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
4924 cond = COND_EXEC_TEST (PATTERN (insn));
4926 /* Non-constant calls clobber memory. */
4927 if (! CONST_CALL_P (insn))
4929 free_EXPR_LIST_list (&pbi->mem_set_list);
4930 pbi->mem_set_list_len = 0;
4933 /* There may be extra registers to be clobbered. */
4934 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4935 note;
4936 note = XEXP (note, 1))
4937 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
4938 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
4939 cond, insn, pbi->flags);
4941 /* Calls change all call-used and global registers. */
4942 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4943 if (call_used_regs[i] && ! global_regs[i]
4944 && ! fixed_regs[i])
4946 /* We do not want REG_UNUSED notes for these registers. */
4947 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
4948 cond, insn,
4949 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
4953 /* If an insn doesn't use CC0, it becomes dead since we assume
4954 that every insn clobbers it. So show it dead here;
4955 mark_used_regs will set it live if it is referenced. */
4956 pbi->cc0_live = 0;
4958 /* Record uses. */
4959 if (! insn_is_dead)
4960 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
4962 /* Sometimes we may have inserted something before INSN (such as a move)
4963 when we make an auto-inc. So ensure we will scan those insns. */
4964 #ifdef AUTO_INC_DEC
4965 prev = PREV_INSN (insn);
4966 #endif
4968 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
4970 register int i;
4971 rtx note, cond;
4973 cond = NULL_RTX;
4974 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
4975 cond = COND_EXEC_TEST (PATTERN (insn));
4977 /* Calls use their arguments. */
4978 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4979 note;
4980 note = XEXP (note, 1))
4981 if (GET_CODE (XEXP (note, 0)) == USE)
4982 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
4983 cond, insn);
4985 /* The stack ptr is used (honorarily) by a CALL insn. */
4986 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
4988 /* Calls may also reference any of the global registers,
4989 so they are made live. */
4990 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4991 if (global_regs[i])
4992 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
4993 cond, insn);
4997 /* On final pass, update counts of how many insns in which each reg
4998 is live. */
4999 if (flags & PROP_REG_INFO)
5000 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5001 { REG_LIVE_LENGTH (i)++; });
5003 return prev;
5006 /* Initialize a propagate_block_info struct for public consumption.
5007 Note that the structure itself is opaque to this file, but that
5008 the user can use the regsets provided here. */
5010 struct propagate_block_info *
5011 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
5012 basic_block bb;
5013 regset live, local_set, cond_local_set;
5014 int flags;
5016 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
5018 pbi->bb = bb;
5019 pbi->reg_live = live;
5020 pbi->mem_set_list = NULL_RTX;
5021 pbi->mem_set_list_len = 0;
5022 pbi->local_set = local_set;
5023 pbi->cond_local_set = cond_local_set;
5024 pbi->cc0_live = 0;
5025 pbi->flags = flags;
5027 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5028 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
5029 else
5030 pbi->reg_next_use = NULL;
5032 pbi->new_set = BITMAP_XMALLOC ();
5034 #ifdef HAVE_conditional_execution
5035 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
5036 free_reg_cond_life_info);
5037 pbi->reg_cond_reg = BITMAP_XMALLOC ();
5039 /* If this block ends in a conditional branch, for each register live
5040 from one side of the branch and not the other, record the register
5041 as conditionally dead. */
5042 if (GET_CODE (bb->end) == JUMP_INSN
5043 && any_condjump_p (bb->end))
5045 regset_head diff_head;
5046 regset diff = INITIALIZE_REG_SET (diff_head);
5047 basic_block bb_true, bb_false;
5048 rtx cond_true, cond_false, set_src;
5049 int i;
5051 /* Identify the successor blocks. */
5052 bb_true = bb->succ->dest;
5053 if (bb->succ->succ_next != NULL)
5055 bb_false = bb->succ->succ_next->dest;
5057 if (bb->succ->flags & EDGE_FALLTHRU)
5059 basic_block t = bb_false;
5060 bb_false = bb_true;
5061 bb_true = t;
5063 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
5064 abort ();
5066 else
5068 /* This can happen with a conditional jump to the next insn. */
5069 if (JUMP_LABEL (bb->end) != bb_true->head)
5070 abort ();
5072 /* Simplest way to do nothing. */
5073 bb_false = bb_true;
5076 /* Extract the condition from the branch. */
5077 set_src = SET_SRC (pc_set (bb->end));
5078 cond_true = XEXP (set_src, 0);
5079 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
5080 GET_MODE (cond_true), XEXP (cond_true, 0),
5081 XEXP (cond_true, 1));
5082 if (GET_CODE (XEXP (set_src, 1)) == PC)
5084 rtx t = cond_false;
5085 cond_false = cond_true;
5086 cond_true = t;
5089 /* Compute which register lead different lives in the successors. */
5090 if (bitmap_operation (diff, bb_true->global_live_at_start,
5091 bb_false->global_live_at_start, BITMAP_XOR))
5093 rtx reg = XEXP (cond_true, 0);
5095 if (GET_CODE (reg) == SUBREG)
5096 reg = SUBREG_REG (reg);
5098 if (GET_CODE (reg) != REG)
5099 abort ();
5101 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
5103 /* For each such register, mark it conditionally dead. */
5104 EXECUTE_IF_SET_IN_REG_SET
5105 (diff, 0, i,
5107 struct reg_cond_life_info *rcli;
5108 rtx cond;
5110 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5112 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
5113 cond = cond_false;
5114 else
5115 cond = cond_true;
5116 rcli->condition = cond;
5117 rcli->stores = const0_rtx;
5118 rcli->orig_condition = cond;
5120 splay_tree_insert (pbi->reg_cond_dead, i,
5121 (splay_tree_value) rcli);
5125 FREE_REG_SET (diff);
5127 #endif
5129 /* If this block has no successors, any stores to the frame that aren't
5130 used later in the block are dead. So make a pass over the block
5131 recording any such that are made and show them dead at the end. We do
5132 a very conservative and simple job here. */
5133 if (optimize
5134 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5135 && (TYPE_RETURNS_STACK_DEPRESSED
5136 (TREE_TYPE (current_function_decl))))
5137 && (flags & PROP_SCAN_DEAD_CODE)
5138 && (bb->succ == NULL
5139 || (bb->succ->succ_next == NULL
5140 && bb->succ->dest == EXIT_BLOCK_PTR
5141 && ! current_function_calls_eh_return)))
5143 rtx insn, set;
5144 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
5145 if (GET_CODE (insn) == INSN
5146 && (set = single_set (insn))
5147 && GET_CODE (SET_DEST (set)) == MEM)
5149 rtx mem = SET_DEST (set);
5150 rtx canon_mem = canon_rtx (mem);
5152 /* This optimization is performed by faking a store to the
5153 memory at the end of the block. This doesn't work for
5154 unchanging memories because multiple stores to unchanging
5155 memory is illegal and alias analysis doesn't consider it. */
5156 if (RTX_UNCHANGING_P (canon_mem))
5157 continue;
5159 if (XEXP (canon_mem, 0) == frame_pointer_rtx
5160 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
5161 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
5162 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
5164 #ifdef AUTO_INC_DEC
5165 /* Store a copy of mem, otherwise the address may be scrogged
5166 by find_auto_inc. This matters because insn_dead_p uses
5167 an rtx_equal_p check to determine if two addresses are
5168 the same. This works before find_auto_inc, but fails
5169 after find_auto_inc, causing discrepencies between the
5170 set of live registers calculated during the
5171 calculate_global_regs_live phase and what actually exists
5172 after flow completes, leading to aborts. */
5173 if (flags & PROP_AUTOINC)
5174 mem = shallow_copy_rtx (mem);
5175 #endif
5176 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
5177 if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
5178 break;
5183 return pbi;
5186 /* Release a propagate_block_info struct. */
5188 void
5189 free_propagate_block_info (pbi)
5190 struct propagate_block_info *pbi;
5192 free_EXPR_LIST_list (&pbi->mem_set_list);
5194 BITMAP_XFREE (pbi->new_set);
5196 #ifdef HAVE_conditional_execution
5197 splay_tree_delete (pbi->reg_cond_dead);
5198 BITMAP_XFREE (pbi->reg_cond_reg);
5199 #endif
5201 if (pbi->reg_next_use)
5202 free (pbi->reg_next_use);
5204 free (pbi);
5207 /* Compute the registers live at the beginning of a basic block BB from
5208 those live at the end.
5210 When called, REG_LIVE contains those live at the end. On return, it
5211 contains those live at the beginning.
5213 LOCAL_SET, if non-null, will be set with all registers killed
5214 unconditionally by this basic block.
5215 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
5216 killed conditionally by this basic block. If there is any unconditional
5217 set of a register, then the corresponding bit will be set in LOCAL_SET
5218 and cleared in COND_LOCAL_SET.
5219 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
5220 case, the resulting set will be equal to the union of the two sets that
5221 would otherwise be computed. */
5223 void
5224 propagate_block (bb, live, local_set, cond_local_set, flags)
5225 basic_block bb;
5226 regset live;
5227 regset local_set;
5228 regset cond_local_set;
5229 int flags;
5231 struct propagate_block_info *pbi;
5232 rtx insn, prev;
5234 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
5236 if (flags & PROP_REG_INFO)
5238 register int i;
5240 /* Process the regs live at the end of the block.
5241 Mark them as not local to any one basic block. */
5242 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
5243 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
5246 /* Scan the block an insn at a time from end to beginning. */
5248 for (insn = bb->end;; insn = prev)
5250 /* If this is a call to `setjmp' et al, warn if any
5251 non-volatile datum is live. */
5252 if ((flags & PROP_REG_INFO)
5253 && GET_CODE (insn) == NOTE
5254 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
5255 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
5257 prev = propagate_one_insn (pbi, insn);
5259 if (insn == bb->head)
5260 break;
5263 free_propagate_block_info (pbi);
5266 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
5267 (SET expressions whose destinations are registers dead after the insn).
5268 NEEDED is the regset that says which regs are alive after the insn.
5270 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
5272 If X is the entire body of an insn, NOTES contains the reg notes
5273 pertaining to the insn. */
5275 static int
5276 insn_dead_p (pbi, x, call_ok, notes)
5277 struct propagate_block_info *pbi;
5278 rtx x;
5279 int call_ok;
5280 rtx notes ATTRIBUTE_UNUSED;
5282 enum rtx_code code = GET_CODE (x);
5284 #ifdef AUTO_INC_DEC
5285 /* If flow is invoked after reload, we must take existing AUTO_INC
5286 expresions into account. */
5287 if (reload_completed)
5289 for (; notes; notes = XEXP (notes, 1))
5291 if (REG_NOTE_KIND (notes) == REG_INC)
5293 int regno = REGNO (XEXP (notes, 0));
5295 /* Don't delete insns to set global regs. */
5296 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5297 || REGNO_REG_SET_P (pbi->reg_live, regno))
5298 return 0;
5302 #endif
5304 /* If setting something that's a reg or part of one,
5305 see if that register's altered value will be live. */
5307 if (code == SET)
5309 rtx r = SET_DEST (x);
5311 #ifdef HAVE_cc0
5312 if (GET_CODE (r) == CC0)
5313 return ! pbi->cc0_live;
5314 #endif
5316 /* A SET that is a subroutine call cannot be dead. */
5317 if (GET_CODE (SET_SRC (x)) == CALL)
5319 if (! call_ok)
5320 return 0;
5323 /* Don't eliminate loads from volatile memory or volatile asms. */
5324 else if (volatile_refs_p (SET_SRC (x)))
5325 return 0;
5327 if (GET_CODE (r) == MEM)
5329 rtx temp;
5331 if (MEM_VOLATILE_P (r))
5332 return 0;
5334 /* Walk the set of memory locations we are currently tracking
5335 and see if one is an identical match to this memory location.
5336 If so, this memory write is dead (remember, we're walking
5337 backwards from the end of the block to the start). Since
5338 rtx_equal_p does not check the alias set or flags, we also
5339 must have the potential for them to conflict (anti_dependence). */
5340 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
5341 if (anti_dependence (r, XEXP (temp, 0)))
5343 rtx mem = XEXP (temp, 0);
5345 if (rtx_equal_p (mem, r))
5346 return 1;
5347 #ifdef AUTO_INC_DEC
5348 /* Check if memory reference matches an auto increment. Only
5349 post increment/decrement or modify are valid. */
5350 if (GET_MODE (mem) == GET_MODE (r)
5351 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
5352 || GET_CODE (XEXP (mem, 0)) == POST_INC
5353 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
5354 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
5355 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
5356 return 1;
5357 #endif
5360 else
5362 while (GET_CODE (r) == SUBREG
5363 || GET_CODE (r) == STRICT_LOW_PART
5364 || GET_CODE (r) == ZERO_EXTRACT)
5365 r = XEXP (r, 0);
5367 if (GET_CODE (r) == REG)
5369 int regno = REGNO (r);
5371 /* Obvious. */
5372 if (REGNO_REG_SET_P (pbi->reg_live, regno))
5373 return 0;
5375 /* If this is a hard register, verify that subsequent
5376 words are not needed. */
5377 if (regno < FIRST_PSEUDO_REGISTER)
5379 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
5381 while (--n > 0)
5382 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
5383 return 0;
5386 /* Don't delete insns to set global regs. */
5387 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5388 return 0;
5390 /* Make sure insns to set the stack pointer aren't deleted. */
5391 if (regno == STACK_POINTER_REGNUM)
5392 return 0;
5394 /* ??? These bits might be redundant with the force live bits
5395 in calculate_global_regs_live. We would delete from
5396 sequential sets; whether this actually affects real code
5397 for anything but the stack pointer I don't know. */
5398 /* Make sure insns to set the frame pointer aren't deleted. */
5399 if (regno == FRAME_POINTER_REGNUM
5400 && (! reload_completed || frame_pointer_needed))
5401 return 0;
5402 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5403 if (regno == HARD_FRAME_POINTER_REGNUM
5404 && (! reload_completed || frame_pointer_needed))
5405 return 0;
5406 #endif
5408 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5409 /* Make sure insns to set arg pointer are never deleted
5410 (if the arg pointer isn't fixed, there will be a USE
5411 for it, so we can treat it normally). */
5412 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5413 return 0;
5414 #endif
5416 /* Otherwise, the set is dead. */
5417 return 1;
5422 /* If performing several activities, insn is dead if each activity
5423 is individually dead. Also, CLOBBERs and USEs can be ignored; a
5424 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
5425 worth keeping. */
5426 else if (code == PARALLEL)
5428 int i = XVECLEN (x, 0);
5430 for (i--; i >= 0; i--)
5431 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
5432 && GET_CODE (XVECEXP (x, 0, i)) != USE
5433 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
5434 return 0;
5436 return 1;
5439 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
5440 is not necessarily true for hard registers. */
5441 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
5442 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
5443 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
5444 return 1;
5446 /* We do not check other CLOBBER or USE here. An insn consisting of just
5447 a CLOBBER or just a USE should not be deleted. */
5448 return 0;
5451 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
5452 return 1 if the entire library call is dead.
5453 This is true if INSN copies a register (hard or pseudo)
5454 and if the hard return reg of the call insn is dead.
5455 (The caller should have tested the destination of the SET inside
5456 INSN already for death.)
5458 If this insn doesn't just copy a register, then we don't
5459 have an ordinary libcall. In that case, cse could not have
5460 managed to substitute the source for the dest later on,
5461 so we can assume the libcall is dead.
5463 PBI is the block info giving pseudoregs live before this insn.
5464 NOTE is the REG_RETVAL note of the insn. */
5466 static int
5467 libcall_dead_p (pbi, note, insn)
5468 struct propagate_block_info *pbi;
5469 rtx note;
5470 rtx insn;
5472 rtx x = single_set (insn);
5474 if (x)
5476 register rtx r = SET_SRC (x);
5477 if (GET_CODE (r) == REG)
5479 rtx call = XEXP (note, 0);
5480 rtx call_pat;
5481 register int i;
5483 /* Find the call insn. */
5484 while (call != insn && GET_CODE (call) != CALL_INSN)
5485 call = NEXT_INSN (call);
5487 /* If there is none, do nothing special,
5488 since ordinary death handling can understand these insns. */
5489 if (call == insn)
5490 return 0;
5492 /* See if the hard reg holding the value is dead.
5493 If this is a PARALLEL, find the call within it. */
5494 call_pat = PATTERN (call);
5495 if (GET_CODE (call_pat) == PARALLEL)
5497 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
5498 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
5499 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
5500 break;
5502 /* This may be a library call that is returning a value
5503 via invisible pointer. Do nothing special, since
5504 ordinary death handling can understand these insns. */
5505 if (i < 0)
5506 return 0;
5508 call_pat = XVECEXP (call_pat, 0, i);
5511 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
5514 return 1;
5517 /* Return 1 if register REGNO was used before it was set, i.e. if it is
5518 live at function entry. Don't count global register variables, variables
5519 in registers that can be used for function arg passing, or variables in
5520 fixed hard registers. */
5523 regno_uninitialized (regno)
5524 int regno;
5526 if (n_basic_blocks == 0
5527 || (regno < FIRST_PSEUDO_REGISTER
5528 && (global_regs[regno]
5529 || fixed_regs[regno]
5530 || FUNCTION_ARG_REGNO_P (regno))))
5531 return 0;
5533 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
5536 /* 1 if register REGNO was alive at a place where `setjmp' was called
5537 and was set more than once or is an argument.
5538 Such regs may be clobbered by `longjmp'. */
5541 regno_clobbered_at_setjmp (regno)
5542 int regno;
5544 if (n_basic_blocks == 0)
5545 return 0;
5547 return ((REG_N_SETS (regno) > 1
5548 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
5549 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
5552 /* INSN references memory, possibly using autoincrement addressing modes.
5553 Find any entries on the mem_set_list that need to be invalidated due
5554 to an address change. */
5556 static void
5557 invalidate_mems_from_autoinc (pbi, insn)
5558 struct propagate_block_info *pbi;
5559 rtx insn;
5561 rtx note = REG_NOTES (insn);
5562 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
5564 if (REG_NOTE_KIND (note) == REG_INC)
5566 rtx temp = pbi->mem_set_list;
5567 rtx prev = NULL_RTX;
5568 rtx next;
5570 while (temp)
5572 next = XEXP (temp, 1);
5573 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
5575 /* Splice temp out of list. */
5576 if (prev)
5577 XEXP (prev, 1) = next;
5578 else
5579 pbi->mem_set_list = next;
5580 free_EXPR_LIST_node (temp);
5581 pbi->mem_set_list_len--;
5583 else
5584 prev = temp;
5585 temp = next;
5591 /* EXP is either a MEM or a REG. Remove any dependant entries
5592 from pbi->mem_set_list. */
5594 static void
5595 invalidate_mems_from_set (pbi, exp)
5596 struct propagate_block_info *pbi;
5597 rtx exp;
5599 rtx temp = pbi->mem_set_list;
5600 rtx prev = NULL_RTX;
5601 rtx next;
5603 while (temp)
5605 next = XEXP (temp, 1);
5606 if ((GET_CODE (exp) == MEM
5607 && output_dependence (XEXP (temp, 0), exp))
5608 || (GET_CODE (exp) == REG
5609 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
5611 /* Splice this entry out of the list. */
5612 if (prev)
5613 XEXP (prev, 1) = next;
5614 else
5615 pbi->mem_set_list = next;
5616 free_EXPR_LIST_node (temp);
5617 pbi->mem_set_list_len--;
5619 else
5620 prev = temp;
5621 temp = next;
5625 /* Process the registers that are set within X. Their bits are set to
5626 1 in the regset DEAD, because they are dead prior to this insn.
5628 If INSN is nonzero, it is the insn being processed.
5630 FLAGS is the set of operations to perform. */
5632 static void
5633 mark_set_regs (pbi, x, insn)
5634 struct propagate_block_info *pbi;
5635 rtx x, insn;
5637 rtx cond = NULL_RTX;
5638 rtx link;
5639 enum rtx_code code;
5641 if (insn)
5642 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5644 if (REG_NOTE_KIND (link) == REG_INC)
5645 mark_set_1 (pbi, SET, XEXP (link, 0),
5646 (GET_CODE (x) == COND_EXEC
5647 ? COND_EXEC_TEST (x) : NULL_RTX),
5648 insn, pbi->flags);
5650 retry:
5651 switch (code = GET_CODE (x))
5653 case SET:
5654 case CLOBBER:
5655 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
5656 return;
5658 case COND_EXEC:
5659 cond = COND_EXEC_TEST (x);
5660 x = COND_EXEC_CODE (x);
5661 goto retry;
5663 case PARALLEL:
5665 register int i;
5666 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5668 rtx sub = XVECEXP (x, 0, i);
5669 switch (code = GET_CODE (sub))
5671 case COND_EXEC:
5672 if (cond != NULL_RTX)
5673 abort ();
5675 cond = COND_EXEC_TEST (sub);
5676 sub = COND_EXEC_CODE (sub);
5677 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
5678 break;
5679 /* Fall through. */
5681 case SET:
5682 case CLOBBER:
5683 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
5684 break;
5686 default:
5687 break;
5690 break;
5693 default:
5694 break;
5698 /* Process a single set, which appears in INSN. REG (which may not
5699 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
5700 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
5701 If the set is conditional (because it appear in a COND_EXEC), COND
5702 will be the condition. */
5704 static void
5705 mark_set_1 (pbi, code, reg, cond, insn, flags)
5706 struct propagate_block_info *pbi;
5707 enum rtx_code code;
5708 rtx reg, cond, insn;
5709 int flags;
5711 int regno_first = -1, regno_last = -1;
5712 unsigned long not_dead = 0;
5713 int i;
5715 /* Modifying just one hardware register of a multi-reg value or just a
5716 byte field of a register does not mean the value from before this insn
5717 is now dead. Of course, if it was dead after it's unused now. */
5719 switch (GET_CODE (reg))
5721 case PARALLEL:
5722 /* Some targets place small structures in registers for return values of
5723 functions. We have to detect this case specially here to get correct
5724 flow information. */
5725 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5726 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
5727 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
5728 flags);
5729 return;
5731 case ZERO_EXTRACT:
5732 case SIGN_EXTRACT:
5733 case STRICT_LOW_PART:
5734 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
5736 reg = XEXP (reg, 0);
5737 while (GET_CODE (reg) == SUBREG
5738 || GET_CODE (reg) == ZERO_EXTRACT
5739 || GET_CODE (reg) == SIGN_EXTRACT
5740 || GET_CODE (reg) == STRICT_LOW_PART);
5741 if (GET_CODE (reg) == MEM)
5742 break;
5743 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
5744 /* Fall through. */
5746 case REG:
5747 regno_last = regno_first = REGNO (reg);
5748 if (regno_first < FIRST_PSEUDO_REGISTER)
5749 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
5750 break;
5752 case SUBREG:
5753 if (GET_CODE (SUBREG_REG (reg)) == REG)
5755 enum machine_mode outer_mode = GET_MODE (reg);
5756 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
5758 /* Identify the range of registers affected. This is moderately
5759 tricky for hard registers. See alter_subreg. */
5761 regno_last = regno_first = REGNO (SUBREG_REG (reg));
5762 if (regno_first < FIRST_PSEUDO_REGISTER)
5764 regno_first += subreg_regno_offset (regno_first, inner_mode,
5765 SUBREG_BYTE (reg),
5766 outer_mode);
5767 regno_last = (regno_first
5768 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
5770 /* Since we've just adjusted the register number ranges, make
5771 sure REG matches. Otherwise some_was_live will be clear
5772 when it shouldn't have been, and we'll create incorrect
5773 REG_UNUSED notes. */
5774 reg = gen_rtx_REG (outer_mode, regno_first);
5776 else
5778 /* If the number of words in the subreg is less than the number
5779 of words in the full register, we have a well-defined partial
5780 set. Otherwise the high bits are undefined.
5782 This is only really applicable to pseudos, since we just took
5783 care of multi-word hard registers. */
5784 if (((GET_MODE_SIZE (outer_mode)
5785 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
5786 < ((GET_MODE_SIZE (inner_mode)
5787 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
5788 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
5789 regno_first);
5791 reg = SUBREG_REG (reg);
5794 else
5795 reg = SUBREG_REG (reg);
5796 break;
5798 default:
5799 break;
5802 /* If this set is a MEM, then it kills any aliased writes.
5803 If this set is a REG, then it kills any MEMs which use the reg. */
5804 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5806 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
5807 invalidate_mems_from_set (pbi, reg);
5809 /* If the memory reference had embedded side effects (autoincrement
5810 address modes. Then we may need to kill some entries on the
5811 memory set list. */
5812 if (insn && GET_CODE (reg) == MEM)
5813 invalidate_mems_from_autoinc (pbi, insn);
5815 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
5816 && GET_CODE (reg) == MEM && ! side_effects_p (reg)
5817 /* ??? With more effort we could track conditional memory life. */
5818 && ! cond
5819 /* We do not know the size of a BLKmode store, so we do not track
5820 them for redundant store elimination. */
5821 && GET_MODE (reg) != BLKmode
5822 /* There are no REG_INC notes for SP, so we can't assume we'll see
5823 everything that invalidates it. To be safe, don't eliminate any
5824 stores though SP; none of them should be redundant anyway. */
5825 && ! reg_mentioned_p (stack_pointer_rtx, reg))
5827 #ifdef AUTO_INC_DEC
5828 /* Store a copy of mem, otherwise the address may be
5829 scrogged by find_auto_inc. */
5830 if (flags & PROP_AUTOINC)
5831 reg = shallow_copy_rtx (reg);
5832 #endif
5833 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
5834 pbi->mem_set_list_len++;
5838 if (GET_CODE (reg) == REG
5839 && ! (regno_first == FRAME_POINTER_REGNUM
5840 && (! reload_completed || frame_pointer_needed))
5841 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5842 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
5843 && (! reload_completed || frame_pointer_needed))
5844 #endif
5845 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5846 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
5847 #endif
5850 int some_was_live = 0, some_was_dead = 0;
5852 for (i = regno_first; i <= regno_last; ++i)
5854 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
5855 if (pbi->local_set)
5857 /* Order of the set operation matters here since both
5858 sets may be the same. */
5859 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
5860 if (cond != NULL_RTX
5861 && ! REGNO_REG_SET_P (pbi->local_set, i))
5862 SET_REGNO_REG_SET (pbi->cond_local_set, i);
5863 else
5864 SET_REGNO_REG_SET (pbi->local_set, i);
5866 if (code != CLOBBER)
5867 SET_REGNO_REG_SET (pbi->new_set, i);
5869 some_was_live |= needed_regno;
5870 some_was_dead |= ! needed_regno;
5873 #ifdef HAVE_conditional_execution
5874 /* Consider conditional death in deciding that the register needs
5875 a death note. */
5876 if (some_was_live && ! not_dead
5877 /* The stack pointer is never dead. Well, not strictly true,
5878 but it's very difficult to tell from here. Hopefully
5879 combine_stack_adjustments will fix up the most egregious
5880 errors. */
5881 && regno_first != STACK_POINTER_REGNUM)
5883 for (i = regno_first; i <= regno_last; ++i)
5884 if (! mark_regno_cond_dead (pbi, i, cond))
5885 not_dead |= ((unsigned long) 1) << (i - regno_first);
5887 #endif
5889 /* Additional data to record if this is the final pass. */
5890 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
5891 | PROP_DEATH_NOTES | PROP_AUTOINC))
5893 register rtx y;
5894 register int blocknum = pbi->bb->index;
5896 y = NULL_RTX;
5897 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5899 y = pbi->reg_next_use[regno_first];
5901 /* The next use is no longer next, since a store intervenes. */
5902 for (i = regno_first; i <= regno_last; ++i)
5903 pbi->reg_next_use[i] = 0;
5906 if (flags & PROP_REG_INFO)
5908 for (i = regno_first; i <= regno_last; ++i)
5910 /* Count (weighted) references, stores, etc. This counts a
5911 register twice if it is modified, but that is correct. */
5912 REG_N_SETS (i) += 1;
5913 REG_N_REFS (i) += 1;
5914 REG_FREQ (i) += (optimize_size || !pbi->bb->frequency
5915 ? 1 : pbi->bb->frequency);
5917 /* The insns where a reg is live are normally counted
5918 elsewhere, but we want the count to include the insn
5919 where the reg is set, and the normal counting mechanism
5920 would not count it. */
5921 REG_LIVE_LENGTH (i) += 1;
5924 /* If this is a hard reg, record this function uses the reg. */
5925 if (regno_first < FIRST_PSEUDO_REGISTER)
5927 for (i = regno_first; i <= regno_last; i++)
5928 regs_ever_live[i] = 1;
5930 else
5932 /* Keep track of which basic blocks each reg appears in. */
5933 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
5934 REG_BASIC_BLOCK (regno_first) = blocknum;
5935 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
5936 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
5940 if (! some_was_dead)
5942 if (flags & PROP_LOG_LINKS)
5944 /* Make a logical link from the next following insn
5945 that uses this register, back to this insn.
5946 The following insns have already been processed.
5948 We don't build a LOG_LINK for hard registers containing
5949 in ASM_OPERANDs. If these registers get replaced,
5950 we might wind up changing the semantics of the insn,
5951 even if reload can make what appear to be valid
5952 assignments later. */
5953 if (y && (BLOCK_NUM (y) == blocknum)
5954 && (regno_first >= FIRST_PSEUDO_REGISTER
5955 || asm_noperands (PATTERN (y)) < 0))
5956 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
5959 else if (not_dead)
5961 else if (! some_was_live)
5963 if (flags & PROP_REG_INFO)
5964 REG_N_DEATHS (regno_first) += 1;
5966 if (flags & PROP_DEATH_NOTES)
5968 /* Note that dead stores have already been deleted
5969 when possible. If we get here, we have found a
5970 dead store that cannot be eliminated (because the
5971 same insn does something useful). Indicate this
5972 by marking the reg being set as dying here. */
5973 REG_NOTES (insn)
5974 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
5977 else
5979 if (flags & PROP_DEATH_NOTES)
5981 /* This is a case where we have a multi-word hard register
5982 and some, but not all, of the words of the register are
5983 needed in subsequent insns. Write REG_UNUSED notes
5984 for those parts that were not needed. This case should
5985 be rare. */
5987 for (i = regno_first; i <= regno_last; ++i)
5988 if (! REGNO_REG_SET_P (pbi->reg_live, i))
5989 REG_NOTES (insn)
5990 = alloc_EXPR_LIST (REG_UNUSED,
5991 gen_rtx_REG (reg_raw_mode[i], i),
5992 REG_NOTES (insn));
5997 /* Mark the register as being dead. */
5998 if (some_was_live
5999 /* The stack pointer is never dead. Well, not strictly true,
6000 but it's very difficult to tell from here. Hopefully
6001 combine_stack_adjustments will fix up the most egregious
6002 errors. */
6003 && regno_first != STACK_POINTER_REGNUM)
6005 for (i = regno_first; i <= regno_last; ++i)
6006 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
6007 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
6010 else if (GET_CODE (reg) == REG)
6012 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6013 pbi->reg_next_use[regno_first] = 0;
6016 /* If this is the last pass and this is a SCRATCH, show it will be dying
6017 here and count it. */
6018 else if (GET_CODE (reg) == SCRATCH)
6020 if (flags & PROP_DEATH_NOTES)
6021 REG_NOTES (insn)
6022 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6026 #ifdef HAVE_conditional_execution
6027 /* Mark REGNO conditionally dead.
6028 Return true if the register is now unconditionally dead. */
6030 static int
6031 mark_regno_cond_dead (pbi, regno, cond)
6032 struct propagate_block_info *pbi;
6033 int regno;
6034 rtx cond;
6036 /* If this is a store to a predicate register, the value of the
6037 predicate is changing, we don't know that the predicate as seen
6038 before is the same as that seen after. Flush all dependent
6039 conditions from reg_cond_dead. This will make all such
6040 conditionally live registers unconditionally live. */
6041 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
6042 flush_reg_cond_reg (pbi, regno);
6044 /* If this is an unconditional store, remove any conditional
6045 life that may have existed. */
6046 if (cond == NULL_RTX)
6047 splay_tree_remove (pbi->reg_cond_dead, regno);
6048 else
6050 splay_tree_node node;
6051 struct reg_cond_life_info *rcli;
6052 rtx ncond;
6054 /* Otherwise this is a conditional set. Record that fact.
6055 It may have been conditionally used, or there may be a
6056 subsequent set with a complimentary condition. */
6058 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
6059 if (node == NULL)
6061 /* The register was unconditionally live previously.
6062 Record the current condition as the condition under
6063 which it is dead. */
6064 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6065 rcli->condition = cond;
6066 rcli->stores = cond;
6067 rcli->orig_condition = const0_rtx;
6068 splay_tree_insert (pbi->reg_cond_dead, regno,
6069 (splay_tree_value) rcli);
6071 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6073 /* Not unconditionaly dead. */
6074 return 0;
6076 else
6078 /* The register was conditionally live previously.
6079 Add the new condition to the old. */
6080 rcli = (struct reg_cond_life_info *) node->value;
6081 ncond = rcli->condition;
6082 ncond = ior_reg_cond (ncond, cond, 1);
6083 if (rcli->stores == const0_rtx)
6084 rcli->stores = cond;
6085 else if (rcli->stores != const1_rtx)
6086 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
6088 /* If the register is now unconditionally dead, remove the entry
6089 in the splay_tree. A register is unconditionally dead if the
6090 dead condition ncond is true. A register is also unconditionally
6091 dead if the sum of all conditional stores is an unconditional
6092 store (stores is true), and the dead condition is identically the
6093 same as the original dead condition initialized at the end of
6094 the block. This is a pointer compare, not an rtx_equal_p
6095 compare. */
6096 if (ncond == const1_rtx
6097 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
6098 splay_tree_remove (pbi->reg_cond_dead, regno);
6099 else
6101 rcli->condition = ncond;
6103 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6105 /* Not unconditionaly dead. */
6106 return 0;
6111 return 1;
6114 /* Called from splay_tree_delete for pbi->reg_cond_life. */
6116 static void
6117 free_reg_cond_life_info (value)
6118 splay_tree_value value;
6120 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
6121 free (rcli);
6124 /* Helper function for flush_reg_cond_reg. */
6126 static int
6127 flush_reg_cond_reg_1 (node, data)
6128 splay_tree_node node;
6129 void *data;
6131 struct reg_cond_life_info *rcli;
6132 int *xdata = (int *) data;
6133 unsigned int regno = xdata[0];
6135 /* Don't need to search if last flushed value was farther on in
6136 the in-order traversal. */
6137 if (xdata[1] >= (int) node->key)
6138 return 0;
6140 /* Splice out portions of the expression that refer to regno. */
6141 rcli = (struct reg_cond_life_info *) node->value;
6142 rcli->condition = elim_reg_cond (rcli->condition, regno);
6143 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
6144 rcli->stores = elim_reg_cond (rcli->stores, regno);
6146 /* If the entire condition is now false, signal the node to be removed. */
6147 if (rcli->condition == const0_rtx)
6149 xdata[1] = node->key;
6150 return -1;
6152 else if (rcli->condition == const1_rtx)
6153 abort ();
6155 return 0;
6158 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
6160 static void
6161 flush_reg_cond_reg (pbi, regno)
6162 struct propagate_block_info *pbi;
6163 int regno;
6165 int pair[2];
6167 pair[0] = regno;
6168 pair[1] = -1;
6169 while (splay_tree_foreach (pbi->reg_cond_dead,
6170 flush_reg_cond_reg_1, pair) == -1)
6171 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
6173 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
6176 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
6177 For ior/and, the ADD flag determines whether we want to add the new
6178 condition X to the old one unconditionally. If it is zero, we will
6179 only return a new expression if X allows us to simplify part of
6180 OLD, otherwise we return OLD unchanged to the caller.
6181 If ADD is nonzero, we will return a new condition in all cases. The
6182 toplevel caller of one of these functions should always pass 1 for
6183 ADD. */
6185 static rtx
6186 ior_reg_cond (old, x, add)
6187 rtx old, x;
6188 int add;
6190 rtx op0, op1;
6192 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6194 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6195 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
6196 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6197 return const1_rtx;
6198 if (GET_CODE (x) == GET_CODE (old)
6199 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6200 return old;
6201 if (! add)
6202 return old;
6203 return gen_rtx_IOR (0, old, x);
6206 switch (GET_CODE (old))
6208 case IOR:
6209 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6210 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6211 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6213 if (op0 == const0_rtx)
6214 return op1;
6215 if (op1 == const0_rtx)
6216 return op0;
6217 if (op0 == const1_rtx || op1 == const1_rtx)
6218 return const1_rtx;
6219 if (op0 == XEXP (old, 0))
6220 op0 = gen_rtx_IOR (0, op0, x);
6221 else
6222 op1 = gen_rtx_IOR (0, op1, x);
6223 return gen_rtx_IOR (0, op0, op1);
6225 if (! add)
6226 return old;
6227 return gen_rtx_IOR (0, old, x);
6229 case AND:
6230 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6231 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6232 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6234 if (op0 == const1_rtx)
6235 return op1;
6236 if (op1 == const1_rtx)
6237 return op0;
6238 if (op0 == const0_rtx || op1 == const0_rtx)
6239 return const0_rtx;
6240 if (op0 == XEXP (old, 0))
6241 op0 = gen_rtx_IOR (0, op0, x);
6242 else
6243 op1 = gen_rtx_IOR (0, op1, x);
6244 return gen_rtx_AND (0, op0, op1);
6246 if (! add)
6247 return old;
6248 return gen_rtx_IOR (0, old, x);
6250 case NOT:
6251 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6252 if (op0 != XEXP (old, 0))
6253 return not_reg_cond (op0);
6254 if (! add)
6255 return old;
6256 return gen_rtx_IOR (0, old, x);
6258 default:
6259 abort ();
6263 static rtx
6264 not_reg_cond (x)
6265 rtx x;
6267 enum rtx_code x_code;
6269 if (x == const0_rtx)
6270 return const1_rtx;
6271 else if (x == const1_rtx)
6272 return const0_rtx;
6273 x_code = GET_CODE (x);
6274 if (x_code == NOT)
6275 return XEXP (x, 0);
6276 if (GET_RTX_CLASS (x_code) == '<'
6277 && GET_CODE (XEXP (x, 0)) == REG)
6279 if (XEXP (x, 1) != const0_rtx)
6280 abort ();
6282 return gen_rtx_fmt_ee (reverse_condition (x_code),
6283 VOIDmode, XEXP (x, 0), const0_rtx);
6285 return gen_rtx_NOT (0, x);
6288 static rtx
6289 and_reg_cond (old, x, add)
6290 rtx old, x;
6291 int add;
6293 rtx op0, op1;
6295 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6297 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6298 && GET_CODE (x) == reverse_condition (GET_CODE (old))
6299 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6300 return const0_rtx;
6301 if (GET_CODE (x) == GET_CODE (old)
6302 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6303 return old;
6304 if (! add)
6305 return old;
6306 return gen_rtx_AND (0, old, x);
6309 switch (GET_CODE (old))
6311 case IOR:
6312 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6313 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6314 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6316 if (op0 == const0_rtx)
6317 return op1;
6318 if (op1 == const0_rtx)
6319 return op0;
6320 if (op0 == const1_rtx || op1 == const1_rtx)
6321 return const1_rtx;
6322 if (op0 == XEXP (old, 0))
6323 op0 = gen_rtx_AND (0, op0, x);
6324 else
6325 op1 = gen_rtx_AND (0, op1, x);
6326 return gen_rtx_IOR (0, op0, op1);
6328 if (! add)
6329 return old;
6330 return gen_rtx_AND (0, old, x);
6332 case AND:
6333 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6334 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6335 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6337 if (op0 == const1_rtx)
6338 return op1;
6339 if (op1 == const1_rtx)
6340 return op0;
6341 if (op0 == const0_rtx || op1 == const0_rtx)
6342 return const0_rtx;
6343 if (op0 == XEXP (old, 0))
6344 op0 = gen_rtx_AND (0, op0, x);
6345 else
6346 op1 = gen_rtx_AND (0, op1, x);
6347 return gen_rtx_AND (0, op0, op1);
6349 if (! add)
6350 return old;
6352 /* If X is identical to one of the existing terms of the AND,
6353 then just return what we already have. */
6354 /* ??? There really should be some sort of recursive check here in
6355 case there are nested ANDs. */
6356 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
6357 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
6358 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
6359 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
6360 return old;
6362 return gen_rtx_AND (0, old, x);
6364 case NOT:
6365 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6366 if (op0 != XEXP (old, 0))
6367 return not_reg_cond (op0);
6368 if (! add)
6369 return old;
6370 return gen_rtx_AND (0, old, x);
6372 default:
6373 abort ();
6377 /* Given a condition X, remove references to reg REGNO and return the
6378 new condition. The removal will be done so that all conditions
6379 involving REGNO are considered to evaluate to false. This function
6380 is used when the value of REGNO changes. */
6382 static rtx
6383 elim_reg_cond (x, regno)
6384 rtx x;
6385 unsigned int regno;
6387 rtx op0, op1;
6389 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6391 if (REGNO (XEXP (x, 0)) == regno)
6392 return const0_rtx;
6393 return x;
6396 switch (GET_CODE (x))
6398 case AND:
6399 op0 = elim_reg_cond (XEXP (x, 0), regno);
6400 op1 = elim_reg_cond (XEXP (x, 1), regno);
6401 if (op0 == const0_rtx || op1 == const0_rtx)
6402 return const0_rtx;
6403 if (op0 == const1_rtx)
6404 return op1;
6405 if (op1 == const1_rtx)
6406 return op0;
6407 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6408 return x;
6409 return gen_rtx_AND (0, op0, op1);
6411 case IOR:
6412 op0 = elim_reg_cond (XEXP (x, 0), regno);
6413 op1 = elim_reg_cond (XEXP (x, 1), regno);
6414 if (op0 == const1_rtx || op1 == const1_rtx)
6415 return const1_rtx;
6416 if (op0 == const0_rtx)
6417 return op1;
6418 if (op1 == const0_rtx)
6419 return op0;
6420 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6421 return x;
6422 return gen_rtx_IOR (0, op0, op1);
6424 case NOT:
6425 op0 = elim_reg_cond (XEXP (x, 0), regno);
6426 if (op0 == const0_rtx)
6427 return const1_rtx;
6428 if (op0 == const1_rtx)
6429 return const0_rtx;
6430 if (op0 != XEXP (x, 0))
6431 return not_reg_cond (op0);
6432 return x;
6434 default:
6435 abort ();
6438 #endif /* HAVE_conditional_execution */
6440 #ifdef AUTO_INC_DEC
6442 /* Try to substitute the auto-inc expression INC as the address inside
6443 MEM which occurs in INSN. Currently, the address of MEM is an expression
6444 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
6445 that has a single set whose source is a PLUS of INCR_REG and something
6446 else. */
6448 static void
6449 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
6450 struct propagate_block_info *pbi;
6451 rtx inc, insn, mem, incr, incr_reg;
6453 int regno = REGNO (incr_reg);
6454 rtx set = single_set (incr);
6455 rtx q = SET_DEST (set);
6456 rtx y = SET_SRC (set);
6457 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
6459 /* Make sure this reg appears only once in this insn. */
6460 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
6461 return;
6463 if (dead_or_set_p (incr, incr_reg)
6464 /* Mustn't autoinc an eliminable register. */
6465 && (regno >= FIRST_PSEUDO_REGISTER
6466 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
6468 /* This is the simple case. Try to make the auto-inc. If
6469 we can't, we are done. Otherwise, we will do any
6470 needed updates below. */
6471 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
6472 return;
6474 else if (GET_CODE (q) == REG
6475 /* PREV_INSN used here to check the semi-open interval
6476 [insn,incr). */
6477 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
6478 /* We must also check for sets of q as q may be
6479 a call clobbered hard register and there may
6480 be a call between PREV_INSN (insn) and incr. */
6481 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
6483 /* We have *p followed sometime later by q = p+size.
6484 Both p and q must be live afterward,
6485 and q is not used between INSN and its assignment.
6486 Change it to q = p, ...*q..., q = q+size.
6487 Then fall into the usual case. */
6488 rtx insns, temp;
6490 start_sequence ();
6491 emit_move_insn (q, incr_reg);
6492 insns = get_insns ();
6493 end_sequence ();
6495 if (basic_block_for_insn)
6496 for (temp = insns; temp; temp = NEXT_INSN (temp))
6497 set_block_for_insn (temp, pbi->bb);
6499 /* If we can't make the auto-inc, or can't make the
6500 replacement into Y, exit. There's no point in making
6501 the change below if we can't do the auto-inc and doing
6502 so is not correct in the pre-inc case. */
6504 XEXP (inc, 0) = q;
6505 validate_change (insn, &XEXP (mem, 0), inc, 1);
6506 validate_change (incr, &XEXP (y, opnum), q, 1);
6507 if (! apply_change_group ())
6508 return;
6510 /* We now know we'll be doing this change, so emit the
6511 new insn(s) and do the updates. */
6512 emit_insns_before (insns, insn);
6514 if (pbi->bb->head == insn)
6515 pbi->bb->head = insns;
6517 /* INCR will become a NOTE and INSN won't contain a
6518 use of INCR_REG. If a use of INCR_REG was just placed in
6519 the insn before INSN, make that the next use.
6520 Otherwise, invalidate it. */
6521 if (GET_CODE (PREV_INSN (insn)) == INSN
6522 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
6523 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
6524 pbi->reg_next_use[regno] = PREV_INSN (insn);
6525 else
6526 pbi->reg_next_use[regno] = 0;
6528 incr_reg = q;
6529 regno = REGNO (q);
6531 /* REGNO is now used in INCR which is below INSN, but
6532 it previously wasn't live here. If we don't mark
6533 it as live, we'll put a REG_DEAD note for it
6534 on this insn, which is incorrect. */
6535 SET_REGNO_REG_SET (pbi->reg_live, regno);
6537 /* If there are any calls between INSN and INCR, show
6538 that REGNO now crosses them. */
6539 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
6540 if (GET_CODE (temp) == CALL_INSN)
6541 REG_N_CALLS_CROSSED (regno)++;
6543 else
6544 return;
6546 /* If we haven't returned, it means we were able to make the
6547 auto-inc, so update the status. First, record that this insn
6548 has an implicit side effect. */
6550 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
6552 /* Modify the old increment-insn to simply copy
6553 the already-incremented value of our register. */
6554 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
6555 abort ();
6557 /* If that makes it a no-op (copying the register into itself) delete
6558 it so it won't appear to be a "use" and a "set" of this
6559 register. */
6560 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
6562 /* If the original source was dead, it's dead now. */
6563 rtx note;
6565 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
6567 remove_note (incr, note);
6568 if (XEXP (note, 0) != incr_reg)
6569 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
6572 PUT_CODE (incr, NOTE);
6573 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
6574 NOTE_SOURCE_FILE (incr) = 0;
6577 if (regno >= FIRST_PSEUDO_REGISTER)
6579 /* Count an extra reference to the reg. When a reg is
6580 incremented, spilling it is worse, so we want to make
6581 that less likely. */
6582 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
6583 ? 1 : pbi->bb->frequency);
6585 /* Count the increment as a setting of the register,
6586 even though it isn't a SET in rtl. */
6587 REG_N_SETS (regno)++;
6591 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
6592 reference. */
6594 static void
6595 find_auto_inc (pbi, x, insn)
6596 struct propagate_block_info *pbi;
6597 rtx x;
6598 rtx insn;
6600 rtx addr = XEXP (x, 0);
6601 HOST_WIDE_INT offset = 0;
6602 rtx set, y, incr, inc_val;
6603 int regno;
6604 int size = GET_MODE_SIZE (GET_MODE (x));
6606 if (GET_CODE (insn) == JUMP_INSN)
6607 return;
6609 /* Here we detect use of an index register which might be good for
6610 postincrement, postdecrement, preincrement, or predecrement. */
6612 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
6613 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
6615 if (GET_CODE (addr) != REG)
6616 return;
6618 regno = REGNO (addr);
6620 /* Is the next use an increment that might make auto-increment? */
6621 incr = pbi->reg_next_use[regno];
6622 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
6623 return;
6624 set = single_set (incr);
6625 if (set == 0 || GET_CODE (set) != SET)
6626 return;
6627 y = SET_SRC (set);
6629 if (GET_CODE (y) != PLUS)
6630 return;
6632 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
6633 inc_val = XEXP (y, 1);
6634 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
6635 inc_val = XEXP (y, 0);
6636 else
6637 return;
6639 if (GET_CODE (inc_val) == CONST_INT)
6641 if (HAVE_POST_INCREMENT
6642 && (INTVAL (inc_val) == size && offset == 0))
6643 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
6644 incr, addr);
6645 else if (HAVE_POST_DECREMENT
6646 && (INTVAL (inc_val) == -size && offset == 0))
6647 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
6648 incr, addr);
6649 else if (HAVE_PRE_INCREMENT
6650 && (INTVAL (inc_val) == size && offset == size))
6651 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
6652 incr, addr);
6653 else if (HAVE_PRE_DECREMENT
6654 && (INTVAL (inc_val) == -size && offset == -size))
6655 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
6656 incr, addr);
6657 else if (HAVE_POST_MODIFY_DISP && offset == 0)
6658 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
6659 gen_rtx_PLUS (Pmode,
6660 addr,
6661 inc_val)),
6662 insn, x, incr, addr);
6664 else if (GET_CODE (inc_val) == REG
6665 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
6666 NEXT_INSN (incr)))
6669 if (HAVE_POST_MODIFY_REG && offset == 0)
6670 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
6671 gen_rtx_PLUS (Pmode,
6672 addr,
6673 inc_val)),
6674 insn, x, incr, addr);
6678 #endif /* AUTO_INC_DEC */
6680 static void
6681 mark_used_reg (pbi, reg, cond, insn)
6682 struct propagate_block_info *pbi;
6683 rtx reg;
6684 rtx cond ATTRIBUTE_UNUSED;
6685 rtx insn;
6687 unsigned int regno_first, regno_last, i;
6688 int some_was_live, some_was_dead, some_not_set;
6690 regno_last = regno_first = REGNO (reg);
6691 if (regno_first < FIRST_PSEUDO_REGISTER)
6692 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6694 /* Find out if any of this register is live after this instruction. */
6695 some_was_live = some_was_dead = 0;
6696 for (i = regno_first; i <= regno_last; ++i)
6698 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6699 some_was_live |= needed_regno;
6700 some_was_dead |= ! needed_regno;
6703 /* Find out if any of the register was set this insn. */
6704 some_not_set = 0;
6705 for (i = regno_first; i <= regno_last; ++i)
6706 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
6708 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6710 /* Record where each reg is used, so when the reg is set we know
6711 the next insn that uses it. */
6712 pbi->reg_next_use[regno_first] = insn;
6715 if (pbi->flags & PROP_REG_INFO)
6717 if (regno_first < FIRST_PSEUDO_REGISTER)
6719 /* If this is a register we are going to try to eliminate,
6720 don't mark it live here. If we are successful in
6721 eliminating it, it need not be live unless it is used for
6722 pseudos, in which case it will have been set live when it
6723 was allocated to the pseudos. If the register will not
6724 be eliminated, reload will set it live at that point.
6726 Otherwise, record that this function uses this register. */
6727 /* ??? The PPC backend tries to "eliminate" on the pic
6728 register to itself. This should be fixed. In the mean
6729 time, hack around it. */
6731 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
6732 && (regno_first == FRAME_POINTER_REGNUM
6733 || regno_first == ARG_POINTER_REGNUM)))
6734 for (i = regno_first; i <= regno_last; ++i)
6735 regs_ever_live[i] = 1;
6737 else
6739 /* Keep track of which basic block each reg appears in. */
6741 register int blocknum = pbi->bb->index;
6742 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6743 REG_BASIC_BLOCK (regno_first) = blocknum;
6744 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6745 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6747 /* Count (weighted) number of uses of each reg. */
6748 REG_FREQ (regno_first)
6749 += (optimize_size || !pbi->bb->frequency ? 1 : pbi->bb->frequency);
6750 REG_N_REFS (regno_first)++;
6754 /* Record and count the insns in which a reg dies. If it is used in
6755 this insn and was dead below the insn then it dies in this insn.
6756 If it was set in this insn, we do not make a REG_DEAD note;
6757 likewise if we already made such a note. */
6758 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
6759 && some_was_dead
6760 && some_not_set)
6762 /* Check for the case where the register dying partially
6763 overlaps the register set by this insn. */
6764 if (regno_first != regno_last)
6765 for (i = regno_first; i <= regno_last; ++i)
6766 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
6768 /* If none of the words in X is needed, make a REG_DEAD note.
6769 Otherwise, we must make partial REG_DEAD notes. */
6770 if (! some_was_live)
6772 if ((pbi->flags & PROP_DEATH_NOTES)
6773 && ! find_regno_note (insn, REG_DEAD, regno_first))
6774 REG_NOTES (insn)
6775 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
6777 if (pbi->flags & PROP_REG_INFO)
6778 REG_N_DEATHS (regno_first)++;
6780 else
6782 /* Don't make a REG_DEAD note for a part of a register
6783 that is set in the insn. */
6784 for (i = regno_first; i <= regno_last; ++i)
6785 if (! REGNO_REG_SET_P (pbi->reg_live, i)
6786 && ! dead_or_set_regno_p (insn, i))
6787 REG_NOTES (insn)
6788 = alloc_EXPR_LIST (REG_DEAD,
6789 gen_rtx_REG (reg_raw_mode[i], i),
6790 REG_NOTES (insn));
6794 /* Mark the register as being live. */
6795 for (i = regno_first; i <= regno_last; ++i)
6797 SET_REGNO_REG_SET (pbi->reg_live, i);
6799 #ifdef HAVE_conditional_execution
6800 /* If this is a conditional use, record that fact. If it is later
6801 conditionally set, we'll know to kill the register. */
6802 if (cond != NULL_RTX)
6804 splay_tree_node node;
6805 struct reg_cond_life_info *rcli;
6806 rtx ncond;
6808 if (some_was_live)
6810 node = splay_tree_lookup (pbi->reg_cond_dead, i);
6811 if (node == NULL)
6813 /* The register was unconditionally live previously.
6814 No need to do anything. */
6816 else
6818 /* The register was conditionally live previously.
6819 Subtract the new life cond from the old death cond. */
6820 rcli = (struct reg_cond_life_info *) node->value;
6821 ncond = rcli->condition;
6822 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
6824 /* If the register is now unconditionally live,
6825 remove the entry in the splay_tree. */
6826 if (ncond == const0_rtx)
6827 splay_tree_remove (pbi->reg_cond_dead, i);
6828 else
6830 rcli->condition = ncond;
6831 SET_REGNO_REG_SET (pbi->reg_cond_reg,
6832 REGNO (XEXP (cond, 0)));
6836 else
6838 /* The register was not previously live at all. Record
6839 the condition under which it is still dead. */
6840 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6841 rcli->condition = not_reg_cond (cond);
6842 rcli->stores = const0_rtx;
6843 rcli->orig_condition = const0_rtx;
6844 splay_tree_insert (pbi->reg_cond_dead, i,
6845 (splay_tree_value) rcli);
6847 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6850 else if (some_was_live)
6852 /* The register may have been conditionally live previously, but
6853 is now unconditionally live. Remove it from the conditionally
6854 dead list, so that a conditional set won't cause us to think
6855 it dead. */
6856 splay_tree_remove (pbi->reg_cond_dead, i);
6858 #endif
6862 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
6863 This is done assuming the registers needed from X are those that
6864 have 1-bits in PBI->REG_LIVE.
6866 INSN is the containing instruction. If INSN is dead, this function
6867 is not called. */
6869 static void
6870 mark_used_regs (pbi, x, cond, insn)
6871 struct propagate_block_info *pbi;
6872 rtx x, cond, insn;
6874 register RTX_CODE code;
6875 register int regno;
6876 int flags = pbi->flags;
6878 retry:
6879 code = GET_CODE (x);
6880 switch (code)
6882 case LABEL_REF:
6883 case SYMBOL_REF:
6884 case CONST_INT:
6885 case CONST:
6886 case CONST_DOUBLE:
6887 case PC:
6888 case ADDR_VEC:
6889 case ADDR_DIFF_VEC:
6890 return;
6892 #ifdef HAVE_cc0
6893 case CC0:
6894 pbi->cc0_live = 1;
6895 return;
6896 #endif
6898 case CLOBBER:
6899 /* If we are clobbering a MEM, mark any registers inside the address
6900 as being used. */
6901 if (GET_CODE (XEXP (x, 0)) == MEM)
6902 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
6903 return;
6905 case MEM:
6906 /* Don't bother watching stores to mems if this is not the
6907 final pass. We'll not be deleting dead stores this round. */
6908 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
6910 /* Invalidate the data for the last MEM stored, but only if MEM is
6911 something that can be stored into. */
6912 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
6913 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
6914 /* Needn't clear the memory set list. */
6916 else
6918 rtx temp = pbi->mem_set_list;
6919 rtx prev = NULL_RTX;
6920 rtx next;
6922 while (temp)
6924 next = XEXP (temp, 1);
6925 if (anti_dependence (XEXP (temp, 0), x))
6927 /* Splice temp out of the list. */
6928 if (prev)
6929 XEXP (prev, 1) = next;
6930 else
6931 pbi->mem_set_list = next;
6932 free_EXPR_LIST_node (temp);
6933 pbi->mem_set_list_len--;
6935 else
6936 prev = temp;
6937 temp = next;
6941 /* If the memory reference had embedded side effects (autoincrement
6942 address modes. Then we may need to kill some entries on the
6943 memory set list. */
6944 if (insn)
6945 invalidate_mems_from_autoinc (pbi, insn);
6948 #ifdef AUTO_INC_DEC
6949 if (flags & PROP_AUTOINC)
6950 find_auto_inc (pbi, x, insn);
6951 #endif
6952 break;
6954 case SUBREG:
6955 #ifdef CLASS_CANNOT_CHANGE_MODE
6956 if (GET_CODE (SUBREG_REG (x)) == REG
6957 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
6958 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
6959 GET_MODE (SUBREG_REG (x))))
6960 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
6961 #endif
6963 /* While we're here, optimize this case. */
6964 x = SUBREG_REG (x);
6965 if (GET_CODE (x) != REG)
6966 goto retry;
6967 /* Fall through. */
6969 case REG:
6970 /* See a register other than being set => mark it as needed. */
6971 mark_used_reg (pbi, x, cond, insn);
6972 return;
6974 case SET:
6976 register rtx testreg = SET_DEST (x);
6977 int mark_dest = 0;
6979 /* If storing into MEM, don't show it as being used. But do
6980 show the address as being used. */
6981 if (GET_CODE (testreg) == MEM)
6983 #ifdef AUTO_INC_DEC
6984 if (flags & PROP_AUTOINC)
6985 find_auto_inc (pbi, testreg, insn);
6986 #endif
6987 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
6988 mark_used_regs (pbi, SET_SRC (x), cond, insn);
6989 return;
6992 /* Storing in STRICT_LOW_PART is like storing in a reg
6993 in that this SET might be dead, so ignore it in TESTREG.
6994 but in some other ways it is like using the reg.
6996 Storing in a SUBREG or a bit field is like storing the entire
6997 register in that if the register's value is not used
6998 then this SET is not needed. */
6999 while (GET_CODE (testreg) == STRICT_LOW_PART
7000 || GET_CODE (testreg) == ZERO_EXTRACT
7001 || GET_CODE (testreg) == SIGN_EXTRACT
7002 || GET_CODE (testreg) == SUBREG)
7004 #ifdef CLASS_CANNOT_CHANGE_MODE
7005 if (GET_CODE (testreg) == SUBREG
7006 && GET_CODE (SUBREG_REG (testreg)) == REG
7007 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
7008 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
7009 GET_MODE (testreg)))
7010 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
7011 #endif
7013 /* Modifying a single register in an alternate mode
7014 does not use any of the old value. But these other
7015 ways of storing in a register do use the old value. */
7016 if (GET_CODE (testreg) == SUBREG
7017 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
7019 else
7020 mark_dest = 1;
7022 testreg = XEXP (testreg, 0);
7025 /* If this is a store into a register or group of registers,
7026 recursively scan the value being stored. */
7028 if ((GET_CODE (testreg) == PARALLEL
7029 && GET_MODE (testreg) == BLKmode)
7030 || (GET_CODE (testreg) == REG
7031 && (regno = REGNO (testreg),
7032 ! (regno == FRAME_POINTER_REGNUM
7033 && (! reload_completed || frame_pointer_needed)))
7034 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7035 && ! (regno == HARD_FRAME_POINTER_REGNUM
7036 && (! reload_completed || frame_pointer_needed))
7037 #endif
7038 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
7039 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
7040 #endif
7043 if (mark_dest)
7044 mark_used_regs (pbi, SET_DEST (x), cond, insn);
7045 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7046 return;
7049 break;
7051 case ASM_OPERANDS:
7052 case UNSPEC_VOLATILE:
7053 case TRAP_IF:
7054 case ASM_INPUT:
7056 /* Traditional and volatile asm instructions must be considered to use
7057 and clobber all hard registers, all pseudo-registers and all of
7058 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
7060 Consider for instance a volatile asm that changes the fpu rounding
7061 mode. An insn should not be moved across this even if it only uses
7062 pseudo-regs because it might give an incorrectly rounded result.
7064 ?!? Unfortunately, marking all hard registers as live causes massive
7065 problems for the register allocator and marking all pseudos as live
7066 creates mountains of uninitialized variable warnings.
7068 So for now, just clear the memory set list and mark any regs
7069 we can find in ASM_OPERANDS as used. */
7070 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
7072 free_EXPR_LIST_list (&pbi->mem_set_list);
7073 pbi->mem_set_list_len = 0;
7076 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7077 We can not just fall through here since then we would be confused
7078 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7079 traditional asms unlike their normal usage. */
7080 if (code == ASM_OPERANDS)
7082 int j;
7084 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
7085 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
7087 break;
7090 case COND_EXEC:
7091 if (cond != NULL_RTX)
7092 abort ();
7094 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
7096 cond = COND_EXEC_TEST (x);
7097 x = COND_EXEC_CODE (x);
7098 goto retry;
7100 case PHI:
7101 /* We _do_not_ want to scan operands of phi nodes. Operands of
7102 a phi function are evaluated only when control reaches this
7103 block along a particular edge. Therefore, regs that appear
7104 as arguments to phi should not be added to the global live at
7105 start. */
7106 return;
7108 default:
7109 break;
7112 /* Recursively scan the operands of this expression. */
7115 register const char *fmt = GET_RTX_FORMAT (code);
7116 register int i;
7118 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7120 if (fmt[i] == 'e')
7122 /* Tail recursive case: save a function call level. */
7123 if (i == 0)
7125 x = XEXP (x, 0);
7126 goto retry;
7128 mark_used_regs (pbi, XEXP (x, i), cond, insn);
7130 else if (fmt[i] == 'E')
7132 register int j;
7133 for (j = 0; j < XVECLEN (x, i); j++)
7134 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
7140 #ifdef AUTO_INC_DEC
7142 static int
7143 try_pre_increment_1 (pbi, insn)
7144 struct propagate_block_info *pbi;
7145 rtx insn;
7147 /* Find the next use of this reg. If in same basic block,
7148 make it do pre-increment or pre-decrement if appropriate. */
7149 rtx x = single_set (insn);
7150 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
7151 * INTVAL (XEXP (SET_SRC (x), 1)));
7152 int regno = REGNO (SET_DEST (x));
7153 rtx y = pbi->reg_next_use[regno];
7154 if (y != 0
7155 && SET_DEST (x) != stack_pointer_rtx
7156 && BLOCK_NUM (y) == BLOCK_NUM (insn)
7157 /* Don't do this if the reg dies, or gets set in y; a standard addressing
7158 mode would be better. */
7159 && ! dead_or_set_p (y, SET_DEST (x))
7160 && try_pre_increment (y, SET_DEST (x), amount))
7162 /* We have found a suitable auto-increment and already changed
7163 insn Y to do it. So flush this increment instruction. */
7164 propagate_block_delete_insn (pbi->bb, insn);
7166 /* Count a reference to this reg for the increment insn we are
7167 deleting. When a reg is incremented, spilling it is worse,
7168 so we want to make that less likely. */
7169 if (regno >= FIRST_PSEUDO_REGISTER)
7171 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
7172 ? 1 : pbi->bb->frequency);
7173 REG_N_SETS (regno)++;
7176 /* Flush any remembered memories depending on the value of
7177 the incremented register. */
7178 invalidate_mems_from_set (pbi, SET_DEST (x));
7180 return 1;
7182 return 0;
7185 /* Try to change INSN so that it does pre-increment or pre-decrement
7186 addressing on register REG in order to add AMOUNT to REG.
7187 AMOUNT is negative for pre-decrement.
7188 Returns 1 if the change could be made.
7189 This checks all about the validity of the result of modifying INSN. */
7191 static int
7192 try_pre_increment (insn, reg, amount)
7193 rtx insn, reg;
7194 HOST_WIDE_INT amount;
7196 register rtx use;
7198 /* Nonzero if we can try to make a pre-increment or pre-decrement.
7199 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
7200 int pre_ok = 0;
7201 /* Nonzero if we can try to make a post-increment or post-decrement.
7202 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
7203 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
7204 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
7205 int post_ok = 0;
7207 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
7208 int do_post = 0;
7210 /* From the sign of increment, see which possibilities are conceivable
7211 on this target machine. */
7212 if (HAVE_PRE_INCREMENT && amount > 0)
7213 pre_ok = 1;
7214 if (HAVE_POST_INCREMENT && amount > 0)
7215 post_ok = 1;
7217 if (HAVE_PRE_DECREMENT && amount < 0)
7218 pre_ok = 1;
7219 if (HAVE_POST_DECREMENT && amount < 0)
7220 post_ok = 1;
7222 if (! (pre_ok || post_ok))
7223 return 0;
7225 /* It is not safe to add a side effect to a jump insn
7226 because if the incremented register is spilled and must be reloaded
7227 there would be no way to store the incremented value back in memory. */
7229 if (GET_CODE (insn) == JUMP_INSN)
7230 return 0;
7232 use = 0;
7233 if (pre_ok)
7234 use = find_use_as_address (PATTERN (insn), reg, 0);
7235 if (post_ok && (use == 0 || use == (rtx) 1))
7237 use = find_use_as_address (PATTERN (insn), reg, -amount);
7238 do_post = 1;
7241 if (use == 0 || use == (rtx) 1)
7242 return 0;
7244 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
7245 return 0;
7247 /* See if this combination of instruction and addressing mode exists. */
7248 if (! validate_change (insn, &XEXP (use, 0),
7249 gen_rtx_fmt_e (amount > 0
7250 ? (do_post ? POST_INC : PRE_INC)
7251 : (do_post ? POST_DEC : PRE_DEC),
7252 Pmode, reg), 0))
7253 return 0;
7255 /* Record that this insn now has an implicit side effect on X. */
7256 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
7257 return 1;
7260 #endif /* AUTO_INC_DEC */
7262 /* Find the place in the rtx X where REG is used as a memory address.
7263 Return the MEM rtx that so uses it.
7264 If PLUSCONST is nonzero, search instead for a memory address equivalent to
7265 (plus REG (const_int PLUSCONST)).
7267 If such an address does not appear, return 0.
7268 If REG appears more than once, or is used other than in such an address,
7269 return (rtx)1. */
7272 find_use_as_address (x, reg, plusconst)
7273 register rtx x;
7274 rtx reg;
7275 HOST_WIDE_INT plusconst;
7277 enum rtx_code code = GET_CODE (x);
7278 const char *fmt = GET_RTX_FORMAT (code);
7279 register int i;
7280 register rtx value = 0;
7281 register rtx tem;
7283 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
7284 return x;
7286 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
7287 && XEXP (XEXP (x, 0), 0) == reg
7288 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7289 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
7290 return x;
7292 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
7294 /* If REG occurs inside a MEM used in a bit-field reference,
7295 that is unacceptable. */
7296 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
7297 return (rtx) (HOST_WIDE_INT) 1;
7300 if (x == reg)
7301 return (rtx) (HOST_WIDE_INT) 1;
7303 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7305 if (fmt[i] == 'e')
7307 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
7308 if (value == 0)
7309 value = tem;
7310 else if (tem != 0)
7311 return (rtx) (HOST_WIDE_INT) 1;
7313 else if (fmt[i] == 'E')
7315 register int j;
7316 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7318 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
7319 if (value == 0)
7320 value = tem;
7321 else if (tem != 0)
7322 return (rtx) (HOST_WIDE_INT) 1;
7327 return value;
7330 /* Write information about registers and basic blocks into FILE.
7331 This is part of making a debugging dump. */
7333 void
7334 dump_regset (r, outf)
7335 regset r;
7336 FILE *outf;
7338 int i;
7339 if (r == NULL)
7341 fputs (" (nil)", outf);
7342 return;
7345 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
7347 fprintf (outf, " %d", i);
7348 if (i < FIRST_PSEUDO_REGISTER)
7349 fprintf (outf, " [%s]",
7350 reg_names[i]);
7354 /* Print a human-reaable representation of R on the standard error
7355 stream. This function is designed to be used from within the
7356 debugger. */
7358 void
7359 debug_regset (r)
7360 regset r;
7362 dump_regset (r, stderr);
7363 putc ('\n', stderr);
7366 void
7367 dump_flow_info (file)
7368 FILE *file;
7370 register int i;
7371 static const char * const reg_class_names[] = REG_CLASS_NAMES;
7373 fprintf (file, "%d registers.\n", max_regno);
7374 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
7375 if (REG_N_REFS (i))
7377 enum reg_class class, altclass;
7378 fprintf (file, "\nRegister %d used %d times across %d insns",
7379 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
7380 if (REG_BASIC_BLOCK (i) >= 0)
7381 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
7382 if (REG_N_SETS (i))
7383 fprintf (file, "; set %d time%s", REG_N_SETS (i),
7384 (REG_N_SETS (i) == 1) ? "" : "s");
7385 if (REG_USERVAR_P (regno_reg_rtx[i]))
7386 fprintf (file, "; user var");
7387 if (REG_N_DEATHS (i) != 1)
7388 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
7389 if (REG_N_CALLS_CROSSED (i) == 1)
7390 fprintf (file, "; crosses 1 call");
7391 else if (REG_N_CALLS_CROSSED (i))
7392 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
7393 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
7394 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
7395 class = reg_preferred_class (i);
7396 altclass = reg_alternate_class (i);
7397 if (class != GENERAL_REGS || altclass != ALL_REGS)
7399 if (altclass == ALL_REGS || class == ALL_REGS)
7400 fprintf (file, "; pref %s", reg_class_names[(int) class]);
7401 else if (altclass == NO_REGS)
7402 fprintf (file, "; %s or none", reg_class_names[(int) class]);
7403 else
7404 fprintf (file, "; pref %s, else %s",
7405 reg_class_names[(int) class],
7406 reg_class_names[(int) altclass]);
7408 if (REG_POINTER (regno_reg_rtx[i]))
7409 fprintf (file, "; pointer");
7410 fprintf (file, ".\n");
7413 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
7414 for (i = 0; i < n_basic_blocks; i++)
7416 register basic_block bb = BASIC_BLOCK (i);
7417 register edge e;
7419 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
7420 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
7421 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7422 fprintf (file, ", freq %i.\n", bb->frequency);
7424 fprintf (file, "Predecessors: ");
7425 for (e = bb->pred; e; e = e->pred_next)
7426 dump_edge_info (file, e, 0);
7428 fprintf (file, "\nSuccessors: ");
7429 for (e = bb->succ; e; e = e->succ_next)
7430 dump_edge_info (file, e, 1);
7432 fprintf (file, "\nRegisters live at start:");
7433 dump_regset (bb->global_live_at_start, file);
7435 fprintf (file, "\nRegisters live at end:");
7436 dump_regset (bb->global_live_at_end, file);
7438 putc ('\n', file);
7441 putc ('\n', file);
7444 void
7445 debug_flow_info ()
7447 dump_flow_info (stderr);
7450 void
7451 dump_edge_info (file, e, do_succ)
7452 FILE *file;
7453 edge e;
7454 int do_succ;
7456 basic_block side = (do_succ ? e->dest : e->src);
7458 if (side == ENTRY_BLOCK_PTR)
7459 fputs (" ENTRY", file);
7460 else if (side == EXIT_BLOCK_PTR)
7461 fputs (" EXIT", file);
7462 else
7463 fprintf (file, " %d", side->index);
7465 if (e->probability)
7466 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
7468 if (e->count)
7470 fprintf (file, " count:");
7471 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
7474 if (e->flags)
7476 static const char * const bitnames[] = {
7477 "fallthru", "crit", "ab", "abcall", "eh", "fake"
7479 int comma = 0;
7480 int i, flags = e->flags;
7482 fputc (' ', file);
7483 fputc ('(', file);
7484 for (i = 0; flags; i++)
7485 if (flags & (1 << i))
7487 flags &= ~(1 << i);
7489 if (comma)
7490 fputc (',', file);
7491 if (i < (int) ARRAY_SIZE (bitnames))
7492 fputs (bitnames[i], file);
7493 else
7494 fprintf (file, "%d", i);
7495 comma = 1;
7497 fputc (')', file);
7501 /* Print out one basic block with live information at start and end. */
7503 void
7504 dump_bb (bb, outf)
7505 basic_block bb;
7506 FILE *outf;
7508 rtx insn;
7509 rtx last;
7510 edge e;
7512 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
7513 bb->index, bb->loop_depth);
7514 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7515 putc ('\n', outf);
7517 fputs (";; Predecessors: ", outf);
7518 for (e = bb->pred; e; e = e->pred_next)
7519 dump_edge_info (outf, e, 0);
7520 putc ('\n', outf);
7522 fputs (";; Registers live at start:", outf);
7523 dump_regset (bb->global_live_at_start, outf);
7524 putc ('\n', outf);
7526 for (insn = bb->head, last = NEXT_INSN (bb->end);
7527 insn != last;
7528 insn = NEXT_INSN (insn))
7529 print_rtl_single (outf, insn);
7531 fputs (";; Registers live at end:", outf);
7532 dump_regset (bb->global_live_at_end, outf);
7533 putc ('\n', outf);
7535 fputs (";; Successors: ", outf);
7536 for (e = bb->succ; e; e = e->succ_next)
7537 dump_edge_info (outf, e, 1);
7538 putc ('\n', outf);
7541 void
7542 debug_bb (bb)
7543 basic_block bb;
7545 dump_bb (bb, stderr);
7548 void
7549 debug_bb_n (n)
7550 int n;
7552 dump_bb (BASIC_BLOCK (n), stderr);
7555 /* Like print_rtl, but also print out live information for the start of each
7556 basic block. */
7558 void
7559 print_rtl_with_bb (outf, rtx_first)
7560 FILE *outf;
7561 rtx rtx_first;
7563 register rtx tmp_rtx;
7565 if (rtx_first == 0)
7566 fprintf (outf, "(nil)\n");
7567 else
7569 int i;
7570 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
7571 int max_uid = get_max_uid ();
7572 basic_block *start = (basic_block *)
7573 xcalloc (max_uid, sizeof (basic_block));
7574 basic_block *end = (basic_block *)
7575 xcalloc (max_uid, sizeof (basic_block));
7576 enum bb_state *in_bb_p = (enum bb_state *)
7577 xcalloc (max_uid, sizeof (enum bb_state));
7579 for (i = n_basic_blocks - 1; i >= 0; i--)
7581 basic_block bb = BASIC_BLOCK (i);
7582 rtx x;
7584 start[INSN_UID (bb->head)] = bb;
7585 end[INSN_UID (bb->end)] = bb;
7586 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
7588 enum bb_state state = IN_MULTIPLE_BB;
7589 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
7590 state = IN_ONE_BB;
7591 in_bb_p[INSN_UID (x)] = state;
7593 if (x == bb->end)
7594 break;
7598 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
7600 int did_output;
7601 basic_block bb;
7603 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
7605 fprintf (outf, ";; Start of basic block %d, registers live:",
7606 bb->index);
7607 dump_regset (bb->global_live_at_start, outf);
7608 putc ('\n', outf);
7611 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
7612 && GET_CODE (tmp_rtx) != NOTE
7613 && GET_CODE (tmp_rtx) != BARRIER)
7614 fprintf (outf, ";; Insn is not within a basic block\n");
7615 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
7616 fprintf (outf, ";; Insn is in multiple basic blocks\n");
7618 did_output = print_rtl_single (outf, tmp_rtx);
7620 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
7622 fprintf (outf, ";; End of basic block %d, registers live:\n",
7623 bb->index);
7624 dump_regset (bb->global_live_at_end, outf);
7625 putc ('\n', outf);
7628 if (did_output)
7629 putc ('\n', outf);
7632 free (start);
7633 free (end);
7634 free (in_bb_p);
7637 if (current_function_epilogue_delay_list != 0)
7639 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
7640 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
7641 tmp_rtx = XEXP (tmp_rtx, 1))
7642 print_rtl_single (outf, XEXP (tmp_rtx, 0));
7646 /* Dump the rtl into the current debugging dump file, then abort. */
7648 static void
7649 print_rtl_and_abort_fcn (file, line, function)
7650 const char *file;
7651 int line;
7652 const char *function;
7654 if (rtl_dump_file)
7656 print_rtl_with_bb (rtl_dump_file, get_insns ());
7657 fclose (rtl_dump_file);
7660 fancy_abort (file, line, function);
7663 /* Recompute register set/reference counts immediately prior to register
7664 allocation.
7666 This avoids problems with set/reference counts changing to/from values
7667 which have special meanings to the register allocators.
7669 Additionally, the reference counts are the primary component used by the
7670 register allocators to prioritize pseudos for allocation to hard regs.
7671 More accurate reference counts generally lead to better register allocation.
7673 F is the first insn to be scanned.
7675 LOOP_STEP denotes how much loop_depth should be incremented per
7676 loop nesting level in order to increase the ref count more for
7677 references in a loop.
7679 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
7680 possibly other information which is used by the register allocators. */
7682 void
7683 recompute_reg_usage (f, loop_step)
7684 rtx f ATTRIBUTE_UNUSED;
7685 int loop_step ATTRIBUTE_UNUSED;
7687 allocate_reg_life_data ();
7688 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
7691 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
7692 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
7693 of the number of registers that died. */
7696 count_or_remove_death_notes (blocks, kill)
7697 sbitmap blocks;
7698 int kill;
7700 int i, count = 0;
7702 for (i = n_basic_blocks - 1; i >= 0; --i)
7704 basic_block bb;
7705 rtx insn;
7707 if (blocks && ! TEST_BIT (blocks, i))
7708 continue;
7710 bb = BASIC_BLOCK (i);
7712 for (insn = bb->head;; insn = NEXT_INSN (insn))
7714 if (INSN_P (insn))
7716 rtx *pprev = &REG_NOTES (insn);
7717 rtx link = *pprev;
7719 while (link)
7721 switch (REG_NOTE_KIND (link))
7723 case REG_DEAD:
7724 if (GET_CODE (XEXP (link, 0)) == REG)
7726 rtx reg = XEXP (link, 0);
7727 int n;
7729 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
7730 n = 1;
7731 else
7732 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
7733 count += n;
7735 /* Fall through. */
7737 case REG_UNUSED:
7738 if (kill)
7740 rtx next = XEXP (link, 1);
7741 free_EXPR_LIST_node (link);
7742 *pprev = link = next;
7743 break;
7745 /* Fall through. */
7747 default:
7748 pprev = &XEXP (link, 1);
7749 link = *pprev;
7750 break;
7755 if (insn == bb->end)
7756 break;
7760 return count;
7764 /* Update insns block within BB. */
7766 void
7767 update_bb_for_insn (bb)
7768 basic_block bb;
7770 rtx insn;
7772 if (! basic_block_for_insn)
7773 return;
7775 for (insn = bb->head; ; insn = NEXT_INSN (insn))
7777 set_block_for_insn (insn, bb);
7779 if (insn == bb->end)
7780 break;
7785 /* Record INSN's block as BB. */
7787 void
7788 set_block_for_insn (insn, bb)
7789 rtx insn;
7790 basic_block bb;
7792 size_t uid = INSN_UID (insn);
7793 if (uid >= basic_block_for_insn->num_elements)
7795 int new_size;
7797 /* Add one-eighth the size so we don't keep calling xrealloc. */
7798 new_size = uid + (uid + 7) / 8;
7800 VARRAY_GROW (basic_block_for_insn, new_size);
7802 VARRAY_BB (basic_block_for_insn, uid) = bb;
7805 /* When a new insn has been inserted into an existing block, it will
7806 sometimes emit more than a single insn. This routine will set the
7807 block number for the specified insn, and look backwards in the insn
7808 chain to see if there are any other uninitialized insns immediately
7809 previous to this one, and set the block number for them too. */
7811 void
7812 set_block_for_new_insns (insn, bb)
7813 rtx insn;
7814 basic_block bb;
7816 set_block_for_insn (insn, bb);
7818 /* Scan the previous instructions setting the block number until we find
7819 an instruction that has the block number set, or we find a note
7820 of any kind. */
7821 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
7823 if (GET_CODE (insn) == NOTE)
7824 break;
7825 if (INSN_UID (insn) >= basic_block_for_insn->num_elements
7826 || BLOCK_FOR_INSN (insn) == 0)
7827 set_block_for_insn (insn, bb);
7828 else
7829 break;
7833 /* Verify the CFG consistency. This function check some CFG invariants and
7834 aborts when something is wrong. Hope that this function will help to
7835 convert many optimization passes to preserve CFG consistent.
7837 Currently it does following checks:
7839 - test head/end pointers
7840 - overlapping of basic blocks
7841 - edge list corectness
7842 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
7843 - tails of basic blocks (ensure that boundary is necesary)
7844 - scans body of the basic block for JUMP_INSN, CODE_LABEL
7845 and NOTE_INSN_BASIC_BLOCK
7846 - check that all insns are in the basic blocks
7847 (except the switch handling code, barriers and notes)
7848 - check that all returns are followed by barriers
7850 In future it can be extended check a lot of other stuff as well
7851 (reachability of basic blocks, life information, etc. etc.). */
7853 void
7854 verify_flow_info ()
7856 const int max_uid = get_max_uid ();
7857 const rtx rtx_first = get_insns ();
7858 rtx last_head = get_last_insn ();
7859 basic_block *bb_info;
7860 rtx x;
7861 int i, last_bb_num_seen, num_bb_notes, err = 0;
7863 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
7865 for (i = n_basic_blocks - 1; i >= 0; i--)
7867 basic_block bb = BASIC_BLOCK (i);
7868 rtx head = bb->head;
7869 rtx end = bb->end;
7871 /* Verify the end of the basic block is in the INSN chain. */
7872 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
7873 if (x == end)
7874 break;
7875 if (!x)
7877 error ("End insn %d for block %d not found in the insn stream.",
7878 INSN_UID (end), bb->index);
7879 err = 1;
7882 /* Work backwards from the end to the head of the basic block
7883 to verify the head is in the RTL chain. */
7884 for (; x != NULL_RTX; x = PREV_INSN (x))
7886 /* While walking over the insn chain, verify insns appear
7887 in only one basic block and initialize the BB_INFO array
7888 used by other passes. */
7889 if (bb_info[INSN_UID (x)] != NULL)
7891 error ("Insn %d is in multiple basic blocks (%d and %d)",
7892 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
7893 err = 1;
7895 bb_info[INSN_UID (x)] = bb;
7897 if (x == head)
7898 break;
7900 if (!x)
7902 error ("Head insn %d for block %d not found in the insn stream.",
7903 INSN_UID (head), bb->index);
7904 err = 1;
7907 last_head = x;
7910 /* Now check the basic blocks (boundaries etc.) */
7911 for (i = n_basic_blocks - 1; i >= 0; i--)
7913 basic_block bb = BASIC_BLOCK (i);
7914 /* Check corectness of edge lists */
7915 edge e;
7917 e = bb->succ;
7918 while (e)
7920 if ((e->flags & EDGE_FALLTHRU)
7921 && e->src != ENTRY_BLOCK_PTR
7922 && e->dest != EXIT_BLOCK_PTR
7923 && (e->src->index + 1 != e->dest->index
7924 || !can_fallthru (e->src, e->dest)))
7926 error ("verify_flow_info: Incorrect fallthru edge %i->%i",
7927 e->src->index, e->dest->index);
7928 err = 1;
7931 if (e->src != bb)
7933 error ("verify_flow_info: Basic block %d succ edge is corrupted",
7934 bb->index);
7935 fprintf (stderr, "Predecessor: ");
7936 dump_edge_info (stderr, e, 0);
7937 fprintf (stderr, "\nSuccessor: ");
7938 dump_edge_info (stderr, e, 1);
7939 fprintf (stderr, "\n");
7940 err = 1;
7942 if (e->dest != EXIT_BLOCK_PTR)
7944 edge e2 = e->dest->pred;
7945 while (e2 && e2 != e)
7946 e2 = e2->pred_next;
7947 if (!e2)
7949 error ("Basic block %i edge lists are corrupted", bb->index);
7950 err = 1;
7953 e = e->succ_next;
7956 e = bb->pred;
7957 while (e)
7959 if (e->dest != bb)
7961 error ("Basic block %d pred edge is corrupted", bb->index);
7962 fputs ("Predecessor: ", stderr);
7963 dump_edge_info (stderr, e, 0);
7964 fputs ("\nSuccessor: ", stderr);
7965 dump_edge_info (stderr, e, 1);
7966 fputc ('\n', stderr);
7967 err = 1;
7969 if (e->src != ENTRY_BLOCK_PTR)
7971 edge e2 = e->src->succ;
7972 while (e2 && e2 != e)
7973 e2 = e2->succ_next;
7974 if (!e2)
7976 error ("Basic block %i edge lists are corrupted", bb->index);
7977 err = 1;
7980 e = e->pred_next;
7983 /* OK pointers are correct. Now check the header of basic
7984 block. It ought to contain optional CODE_LABEL followed
7985 by NOTE_BASIC_BLOCK. */
7986 x = bb->head;
7987 if (GET_CODE (x) == CODE_LABEL)
7989 if (bb->end == x)
7991 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
7992 bb->index);
7993 err = 1;
7995 x = NEXT_INSN (x);
7997 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
7999 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
8000 bb->index);
8001 err = 1;
8004 if (bb->end == x)
8006 /* Do checks for empty blocks here */
8008 else
8010 x = NEXT_INSN (x);
8011 while (x)
8013 if (NOTE_INSN_BASIC_BLOCK_P (x))
8015 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
8016 INSN_UID (x), bb->index);
8017 err = 1;
8020 if (x == bb->end)
8021 break;
8023 if (GET_CODE (x) == JUMP_INSN
8024 || GET_CODE (x) == CODE_LABEL
8025 || GET_CODE (x) == BARRIER)
8027 error ("In basic block %d:", bb->index);
8028 fatal_insn ("Flow control insn inside a basic block", x);
8031 x = NEXT_INSN (x);
8036 last_bb_num_seen = -1;
8037 num_bb_notes = 0;
8038 x = rtx_first;
8039 while (x)
8041 if (NOTE_INSN_BASIC_BLOCK_P (x))
8043 basic_block bb = NOTE_BASIC_BLOCK (x);
8044 num_bb_notes++;
8045 if (bb->index != last_bb_num_seen + 1)
8046 /* Basic blocks not numbered consecutively. */
8047 abort ();
8049 last_bb_num_seen = bb->index;
8052 if (!bb_info[INSN_UID (x)])
8054 switch (GET_CODE (x))
8056 case BARRIER:
8057 case NOTE:
8058 break;
8060 case CODE_LABEL:
8061 /* An addr_vec is placed outside any block block. */
8062 if (NEXT_INSN (x)
8063 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
8064 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
8065 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
8067 x = NEXT_INSN (x);
8070 /* But in any case, non-deletable labels can appear anywhere. */
8071 break;
8073 default:
8074 fatal_insn ("Insn outside basic block", x);
8078 if (INSN_P (x)
8079 && GET_CODE (x) == JUMP_INSN
8080 && returnjump_p (x) && ! condjump_p (x)
8081 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
8082 fatal_insn ("Return not followed by barrier", x);
8084 x = NEXT_INSN (x);
8087 if (num_bb_notes != n_basic_blocks)
8088 internal_error
8089 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
8090 num_bb_notes, n_basic_blocks);
8092 if (err)
8093 abort ();
8095 /* Clean up. */
8096 free (bb_info);
8099 /* Functions to access an edge list with a vector representation.
8100 Enough data is kept such that given an index number, the
8101 pred and succ that edge represents can be determined, or
8102 given a pred and a succ, its index number can be returned.
8103 This allows algorithms which consume a lot of memory to
8104 represent the normally full matrix of edge (pred,succ) with a
8105 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
8106 wasted space in the client code due to sparse flow graphs. */
8108 /* This functions initializes the edge list. Basically the entire
8109 flowgraph is processed, and all edges are assigned a number,
8110 and the data structure is filled in. */
8112 struct edge_list *
8113 create_edge_list ()
8115 struct edge_list *elist;
8116 edge e;
8117 int num_edges;
8118 int x;
8119 int block_count;
8121 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
8123 num_edges = 0;
8125 /* Determine the number of edges in the flow graph by counting successor
8126 edges on each basic block. */
8127 for (x = 0; x < n_basic_blocks; x++)
8129 basic_block bb = BASIC_BLOCK (x);
8131 for (e = bb->succ; e; e = e->succ_next)
8132 num_edges++;
8134 /* Don't forget successors of the entry block. */
8135 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8136 num_edges++;
8138 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
8139 elist->num_blocks = block_count;
8140 elist->num_edges = num_edges;
8141 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
8143 num_edges = 0;
8145 /* Follow successors of the entry block, and register these edges. */
8146 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8148 elist->index_to_edge[num_edges] = e;
8149 num_edges++;
8152 for (x = 0; x < n_basic_blocks; x++)
8154 basic_block bb = BASIC_BLOCK (x);
8156 /* Follow all successors of blocks, and register these edges. */
8157 for (e = bb->succ; e; e = e->succ_next)
8159 elist->index_to_edge[num_edges] = e;
8160 num_edges++;
8163 return elist;
8166 /* This function free's memory associated with an edge list. */
8168 void
8169 free_edge_list (elist)
8170 struct edge_list *elist;
8172 if (elist)
8174 free (elist->index_to_edge);
8175 free (elist);
8179 /* This function provides debug output showing an edge list. */
8181 void
8182 print_edge_list (f, elist)
8183 FILE *f;
8184 struct edge_list *elist;
8186 int x;
8187 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
8188 elist->num_blocks - 2, elist->num_edges);
8190 for (x = 0; x < elist->num_edges; x++)
8192 fprintf (f, " %-4d - edge(", x);
8193 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
8194 fprintf (f, "entry,");
8195 else
8196 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
8198 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
8199 fprintf (f, "exit)\n");
8200 else
8201 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
8205 /* This function provides an internal consistency check of an edge list,
8206 verifying that all edges are present, and that there are no
8207 extra edges. */
8209 void
8210 verify_edge_list (f, elist)
8211 FILE *f;
8212 struct edge_list *elist;
8214 int x, pred, succ, index;
8215 edge e;
8217 for (x = 0; x < n_basic_blocks; x++)
8219 basic_block bb = BASIC_BLOCK (x);
8221 for (e = bb->succ; e; e = e->succ_next)
8223 pred = e->src->index;
8224 succ = e->dest->index;
8225 index = EDGE_INDEX (elist, e->src, e->dest);
8226 if (index == EDGE_INDEX_NO_EDGE)
8228 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8229 continue;
8231 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8232 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8233 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8234 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8235 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8236 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8239 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8241 pred = e->src->index;
8242 succ = e->dest->index;
8243 index = EDGE_INDEX (elist, e->src, e->dest);
8244 if (index == EDGE_INDEX_NO_EDGE)
8246 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8247 continue;
8249 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8250 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8251 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8252 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8253 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8254 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8256 /* We've verified that all the edges are in the list, no lets make sure
8257 there are no spurious edges in the list. */
8259 for (pred = 0; pred < n_basic_blocks; pred++)
8260 for (succ = 0; succ < n_basic_blocks; succ++)
8262 basic_block p = BASIC_BLOCK (pred);
8263 basic_block s = BASIC_BLOCK (succ);
8265 int found_edge = 0;
8267 for (e = p->succ; e; e = e->succ_next)
8268 if (e->dest == s)
8270 found_edge = 1;
8271 break;
8273 for (e = s->pred; e; e = e->pred_next)
8274 if (e->src == p)
8276 found_edge = 1;
8277 break;
8279 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8280 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8281 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
8282 pred, succ);
8283 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8284 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8285 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
8286 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8287 BASIC_BLOCK (succ)));
8289 for (succ = 0; succ < n_basic_blocks; succ++)
8291 basic_block p = ENTRY_BLOCK_PTR;
8292 basic_block s = BASIC_BLOCK (succ);
8294 int found_edge = 0;
8296 for (e = p->succ; e; e = e->succ_next)
8297 if (e->dest == s)
8299 found_edge = 1;
8300 break;
8302 for (e = s->pred; e; e = e->pred_next)
8303 if (e->src == p)
8305 found_edge = 1;
8306 break;
8308 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8309 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8310 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
8311 succ);
8312 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8313 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8314 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
8315 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
8316 BASIC_BLOCK (succ)));
8318 for (pred = 0; pred < n_basic_blocks; pred++)
8320 basic_block p = BASIC_BLOCK (pred);
8321 basic_block s = EXIT_BLOCK_PTR;
8323 int found_edge = 0;
8325 for (e = p->succ; e; e = e->succ_next)
8326 if (e->dest == s)
8328 found_edge = 1;
8329 break;
8331 for (e = s->pred; e; e = e->pred_next)
8332 if (e->src == p)
8334 found_edge = 1;
8335 break;
8337 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8338 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8339 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
8340 pred);
8341 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8342 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8343 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
8344 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8345 EXIT_BLOCK_PTR));
8349 /* This routine will determine what, if any, edge there is between
8350 a specified predecessor and successor. */
8353 find_edge_index (edge_list, pred, succ)
8354 struct edge_list *edge_list;
8355 basic_block pred, succ;
8357 int x;
8358 for (x = 0; x < NUM_EDGES (edge_list); x++)
8360 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
8361 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
8362 return x;
8364 return (EDGE_INDEX_NO_EDGE);
8367 /* This function will remove an edge from the flow graph. */
8369 void
8370 remove_edge (e)
8371 edge e;
8373 edge last_pred = NULL;
8374 edge last_succ = NULL;
8375 edge tmp;
8376 basic_block src, dest;
8377 src = e->src;
8378 dest = e->dest;
8379 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
8380 last_succ = tmp;
8382 if (!tmp)
8383 abort ();
8384 if (last_succ)
8385 last_succ->succ_next = e->succ_next;
8386 else
8387 src->succ = e->succ_next;
8389 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
8390 last_pred = tmp;
8392 if (!tmp)
8393 abort ();
8394 if (last_pred)
8395 last_pred->pred_next = e->pred_next;
8396 else
8397 dest->pred = e->pred_next;
8399 n_edges--;
8400 free (e);
8403 /* This routine will remove any fake successor edges for a basic block.
8404 When the edge is removed, it is also removed from whatever predecessor
8405 list it is in. */
8407 static void
8408 remove_fake_successors (bb)
8409 basic_block bb;
8411 edge e;
8412 for (e = bb->succ; e;)
8414 edge tmp = e;
8415 e = e->succ_next;
8416 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
8417 remove_edge (tmp);
8421 /* This routine will remove all fake edges from the flow graph. If
8422 we remove all fake successors, it will automatically remove all
8423 fake predecessors. */
8425 void
8426 remove_fake_edges ()
8428 int x;
8430 for (x = 0; x < n_basic_blocks; x++)
8431 remove_fake_successors (BASIC_BLOCK (x));
8433 /* We've handled all successors except the entry block's. */
8434 remove_fake_successors (ENTRY_BLOCK_PTR);
8437 /* This function will add a fake edge between any block which has no
8438 successors, and the exit block. Some data flow equations require these
8439 edges to exist. */
8441 void
8442 add_noreturn_fake_exit_edges ()
8444 int x;
8446 for (x = 0; x < n_basic_blocks; x++)
8447 if (BASIC_BLOCK (x)->succ == NULL)
8448 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
8451 /* This function adds a fake edge between any infinite loops to the
8452 exit block. Some optimizations require a path from each node to
8453 the exit node.
8455 See also Morgan, Figure 3.10, pp. 82-83.
8457 The current implementation is ugly, not attempting to minimize the
8458 number of inserted fake edges. To reduce the number of fake edges
8459 to insert, add fake edges from _innermost_ loops containing only
8460 nodes not reachable from the exit block. */
8462 void
8463 connect_infinite_loops_to_exit ()
8465 basic_block unvisited_block;
8467 /* Perform depth-first search in the reverse graph to find nodes
8468 reachable from the exit block. */
8469 struct depth_first_search_dsS dfs_ds;
8471 flow_dfs_compute_reverse_init (&dfs_ds);
8472 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
8474 /* Repeatedly add fake edges, updating the unreachable nodes. */
8475 while (1)
8477 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
8478 if (!unvisited_block)
8479 break;
8480 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
8481 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
8484 flow_dfs_compute_reverse_finish (&dfs_ds);
8486 return;
8489 /* Redirect an edge's successor from one block to another. */
8491 void
8492 redirect_edge_succ (e, new_succ)
8493 edge e;
8494 basic_block new_succ;
8496 edge *pe;
8498 /* Disconnect the edge from the old successor block. */
8499 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
8500 continue;
8501 *pe = (*pe)->pred_next;
8503 /* Reconnect the edge to the new successor block. */
8504 e->pred_next = new_succ->pred;
8505 new_succ->pred = e;
8506 e->dest = new_succ;
8509 /* Redirect an edge's predecessor from one block to another. */
8511 void
8512 redirect_edge_pred (e, new_pred)
8513 edge e;
8514 basic_block new_pred;
8516 edge *pe;
8518 /* Disconnect the edge from the old predecessor block. */
8519 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
8520 continue;
8521 *pe = (*pe)->succ_next;
8523 /* Reconnect the edge to the new predecessor block. */
8524 e->succ_next = new_pred->succ;
8525 new_pred->succ = e;
8526 e->src = new_pred;
8529 /* Dump the list of basic blocks in the bitmap NODES. */
8531 static void
8532 flow_nodes_print (str, nodes, file)
8533 const char *str;
8534 const sbitmap nodes;
8535 FILE *file;
8537 int node;
8539 if (! nodes)
8540 return;
8542 fprintf (file, "%s { ", str);
8543 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
8544 fputs ("}\n", file);
8548 /* Dump the list of edges in the array EDGE_LIST. */
8550 static void
8551 flow_edge_list_print (str, edge_list, num_edges, file)
8552 const char *str;
8553 const edge *edge_list;
8554 int num_edges;
8555 FILE *file;
8557 int i;
8559 if (! edge_list)
8560 return;
8562 fprintf (file, "%s { ", str);
8563 for (i = 0; i < num_edges; i++)
8564 fprintf (file, "%d->%d ", edge_list[i]->src->index,
8565 edge_list[i]->dest->index);
8566 fputs ("}\n", file);
8570 /* Dump loop related CFG information. */
8572 static void
8573 flow_loops_cfg_dump (loops, file)
8574 const struct loops *loops;
8575 FILE *file;
8577 int i;
8579 if (! loops->num || ! file || ! loops->cfg.dom)
8580 return;
8582 for (i = 0; i < n_basic_blocks; i++)
8584 edge succ;
8586 fprintf (file, ";; %d succs { ", i);
8587 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
8588 fprintf (file, "%d ", succ->dest->index);
8589 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
8592 /* Dump the DFS node order. */
8593 if (loops->cfg.dfs_order)
8595 fputs (";; DFS order: ", file);
8596 for (i = 0; i < n_basic_blocks; i++)
8597 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
8598 fputs ("\n", file);
8600 /* Dump the reverse completion node order. */
8601 if (loops->cfg.rc_order)
8603 fputs (";; RC order: ", file);
8604 for (i = 0; i < n_basic_blocks; i++)
8605 fprintf (file, "%d ", loops->cfg.rc_order[i]);
8606 fputs ("\n", file);
8610 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
8612 static int
8613 flow_loop_nested_p (outer, loop)
8614 struct loop *outer;
8615 struct loop *loop;
8617 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
8621 /* Dump the loop information specified by LOOP to the stream FILE
8622 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
8623 void
8624 flow_loop_dump (loop, file, loop_dump_aux, verbose)
8625 const struct loop *loop;
8626 FILE *file;
8627 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
8628 int verbose;
8630 if (! loop || ! loop->header)
8631 return;
8633 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
8634 loop->num, INSN_UID (loop->first->head),
8635 INSN_UID (loop->last->end),
8636 loop->shared ? " shared" : "",
8637 loop->invalid ? " invalid" : "");
8638 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
8639 loop->header->index, loop->latch->index,
8640 loop->pre_header ? loop->pre_header->index : -1,
8641 loop->first->index, loop->last->index);
8642 fprintf (file, ";; depth %d, level %d, outer %ld\n",
8643 loop->depth, loop->level,
8644 (long) (loop->outer ? loop->outer->num : -1));
8646 if (loop->pre_header_edges)
8647 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
8648 loop->num_pre_header_edges, file);
8649 flow_edge_list_print (";; entry edges", loop->entry_edges,
8650 loop->num_entries, file);
8651 fprintf (file, ";; %d", loop->num_nodes);
8652 flow_nodes_print (" nodes", loop->nodes, file);
8653 flow_edge_list_print (";; exit edges", loop->exit_edges,
8654 loop->num_exits, file);
8655 if (loop->exits_doms)
8656 flow_nodes_print (";; exit doms", loop->exits_doms, file);
8657 if (loop_dump_aux)
8658 loop_dump_aux (loop, file, verbose);
8662 /* Dump the loop information specified by LOOPS to the stream FILE,
8663 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
8664 void
8665 flow_loops_dump (loops, file, loop_dump_aux, verbose)
8666 const struct loops *loops;
8667 FILE *file;
8668 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
8669 int verbose;
8671 int i;
8672 int num_loops;
8674 num_loops = loops->num;
8675 if (! num_loops || ! file)
8676 return;
8678 fprintf (file, ";; %d loops found, %d levels\n",
8679 num_loops, loops->levels);
8681 for (i = 0; i < num_loops; i++)
8683 struct loop *loop = &loops->array[i];
8685 flow_loop_dump (loop, file, loop_dump_aux, verbose);
8687 if (loop->shared)
8689 int j;
8691 for (j = 0; j < i; j++)
8693 struct loop *oloop = &loops->array[j];
8695 if (loop->header == oloop->header)
8697 int disjoint;
8698 int smaller;
8700 smaller = loop->num_nodes < oloop->num_nodes;
8702 /* If the union of LOOP and OLOOP is different than
8703 the larger of LOOP and OLOOP then LOOP and OLOOP
8704 must be disjoint. */
8705 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
8706 smaller ? oloop : loop);
8707 fprintf (file,
8708 ";; loop header %d shared by loops %d, %d %s\n",
8709 loop->header->index, i, j,
8710 disjoint ? "disjoint" : "nested");
8716 if (verbose)
8717 flow_loops_cfg_dump (loops, file);
8721 /* Free all the memory allocated for LOOPS. */
8723 void
8724 flow_loops_free (loops)
8725 struct loops *loops;
8727 if (loops->array)
8729 int i;
8731 if (! loops->num)
8732 abort ();
8734 /* Free the loop descriptors. */
8735 for (i = 0; i < loops->num; i++)
8737 struct loop *loop = &loops->array[i];
8739 if (loop->pre_header_edges)
8740 free (loop->pre_header_edges);
8741 if (loop->nodes)
8742 sbitmap_free (loop->nodes);
8743 if (loop->entry_edges)
8744 free (loop->entry_edges);
8745 if (loop->exit_edges)
8746 free (loop->exit_edges);
8747 if (loop->exits_doms)
8748 sbitmap_free (loop->exits_doms);
8750 free (loops->array);
8751 loops->array = NULL;
8753 if (loops->cfg.dom)
8754 sbitmap_vector_free (loops->cfg.dom);
8755 if (loops->cfg.dfs_order)
8756 free (loops->cfg.dfs_order);
8758 if (loops->shared_headers)
8759 sbitmap_free (loops->shared_headers);
8764 /* Find the entry edges into the loop with header HEADER and nodes
8765 NODES and store in ENTRY_EDGES array. Return the number of entry
8766 edges from the loop. */
8768 static int
8769 flow_loop_entry_edges_find (header, nodes, entry_edges)
8770 basic_block header;
8771 const sbitmap nodes;
8772 edge **entry_edges;
8774 edge e;
8775 int num_entries;
8777 *entry_edges = NULL;
8779 num_entries = 0;
8780 for (e = header->pred; e; e = e->pred_next)
8782 basic_block src = e->src;
8784 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
8785 num_entries++;
8788 if (! num_entries)
8789 abort ();
8791 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
8793 num_entries = 0;
8794 for (e = header->pred; e; e = e->pred_next)
8796 basic_block src = e->src;
8798 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
8799 (*entry_edges)[num_entries++] = e;
8802 return num_entries;
8806 /* Find the exit edges from the loop using the bitmap of loop nodes
8807 NODES and store in EXIT_EDGES array. Return the number of
8808 exit edges from the loop. */
8810 static int
8811 flow_loop_exit_edges_find (nodes, exit_edges)
8812 const sbitmap nodes;
8813 edge **exit_edges;
8815 edge e;
8816 int node;
8817 int num_exits;
8819 *exit_edges = NULL;
8821 /* Check all nodes within the loop to see if there are any
8822 successors not in the loop. Note that a node may have multiple
8823 exiting edges ????? A node can have one jumping edge and one fallthru
8824 edge so only one of these can exit the loop. */
8825 num_exits = 0;
8826 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
8827 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
8829 basic_block dest = e->dest;
8831 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
8832 num_exits++;
8836 if (! num_exits)
8837 return 0;
8839 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
8841 /* Store all exiting edges into an array. */
8842 num_exits = 0;
8843 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
8844 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
8846 basic_block dest = e->dest;
8848 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
8849 (*exit_edges)[num_exits++] = e;
8853 return num_exits;
8857 /* Find the nodes contained within the loop with header HEADER and
8858 latch LATCH and store in NODES. Return the number of nodes within
8859 the loop. */
8861 static int
8862 flow_loop_nodes_find (header, latch, nodes)
8863 basic_block header;
8864 basic_block latch;
8865 sbitmap nodes;
8867 basic_block *stack;
8868 int sp;
8869 int num_nodes = 0;
8871 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
8872 sp = 0;
8874 /* Start with only the loop header in the set of loop nodes. */
8875 sbitmap_zero (nodes);
8876 SET_BIT (nodes, header->index);
8877 num_nodes++;
8878 header->loop_depth++;
8880 /* Push the loop latch on to the stack. */
8881 if (! TEST_BIT (nodes, latch->index))
8883 SET_BIT (nodes, latch->index);
8884 latch->loop_depth++;
8885 num_nodes++;
8886 stack[sp++] = latch;
8889 while (sp)
8891 basic_block node;
8892 edge e;
8894 node = stack[--sp];
8895 for (e = node->pred; e; e = e->pred_next)
8897 basic_block ancestor = e->src;
8899 /* If each ancestor not marked as part of loop, add to set of
8900 loop nodes and push on to stack. */
8901 if (ancestor != ENTRY_BLOCK_PTR
8902 && ! TEST_BIT (nodes, ancestor->index))
8904 SET_BIT (nodes, ancestor->index);
8905 ancestor->loop_depth++;
8906 num_nodes++;
8907 stack[sp++] = ancestor;
8911 free (stack);
8912 return num_nodes;
8915 /* Compute the depth first search order and store in the array
8916 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
8917 RC_ORDER is non-zero, return the reverse completion number for each
8918 node. Returns the number of nodes visited. A depth first search
8919 tries to get as far away from the starting point as quickly as
8920 possible. */
8923 flow_depth_first_order_compute (dfs_order, rc_order)
8924 int *dfs_order;
8925 int *rc_order;
8927 edge *stack;
8928 int sp;
8929 int dfsnum = 0;
8930 int rcnum = n_basic_blocks - 1;
8931 sbitmap visited;
8933 /* Allocate stack for back-tracking up CFG. */
8934 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
8935 sp = 0;
8937 /* Allocate bitmap to track nodes that have been visited. */
8938 visited = sbitmap_alloc (n_basic_blocks);
8940 /* None of the nodes in the CFG have been visited yet. */
8941 sbitmap_zero (visited);
8943 /* Push the first edge on to the stack. */
8944 stack[sp++] = ENTRY_BLOCK_PTR->succ;
8946 while (sp)
8948 edge e;
8949 basic_block src;
8950 basic_block dest;
8952 /* Look at the edge on the top of the stack. */
8953 e = stack[sp - 1];
8954 src = e->src;
8955 dest = e->dest;
8957 /* Check if the edge destination has been visited yet. */
8958 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
8960 /* Mark that we have visited the destination. */
8961 SET_BIT (visited, dest->index);
8963 if (dfs_order)
8964 dfs_order[dfsnum++] = dest->index;
8966 if (dest->succ)
8968 /* Since the DEST node has been visited for the first
8969 time, check its successors. */
8970 stack[sp++] = dest->succ;
8972 else
8974 /* There are no successors for the DEST node so assign
8975 its reverse completion number. */
8976 if (rc_order)
8977 rc_order[rcnum--] = dest->index;
8980 else
8982 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
8984 /* There are no more successors for the SRC node
8985 so assign its reverse completion number. */
8986 if (rc_order)
8987 rc_order[rcnum--] = src->index;
8990 if (e->succ_next)
8991 stack[sp - 1] = e->succ_next;
8992 else
8993 sp--;
8997 free (stack);
8998 sbitmap_free (visited);
9000 /* The number of nodes visited should not be greater than
9001 n_basic_blocks. */
9002 if (dfsnum > n_basic_blocks)
9003 abort ();
9005 /* There are some nodes left in the CFG that are unreachable. */
9006 if (dfsnum < n_basic_blocks)
9007 abort ();
9008 return dfsnum;
9011 /* Compute the depth first search order on the _reverse_ graph and
9012 store in the array DFS_ORDER, marking the nodes visited in VISITED.
9013 Returns the number of nodes visited.
9015 The computation is split into three pieces:
9017 flow_dfs_compute_reverse_init () creates the necessary data
9018 structures.
9020 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
9021 structures. The block will start the search.
9023 flow_dfs_compute_reverse_execute () continues (or starts) the
9024 search using the block on the top of the stack, stopping when the
9025 stack is empty.
9027 flow_dfs_compute_reverse_finish () destroys the necessary data
9028 structures.
9030 Thus, the user will probably call ..._init(), call ..._add_bb() to
9031 add a beginning basic block to the stack, call ..._execute(),
9032 possibly add another bb to the stack and again call ..._execute(),
9033 ..., and finally call _finish(). */
9035 /* Initialize the data structures used for depth-first search on the
9036 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
9037 added to the basic block stack. DATA is the current depth-first
9038 search context. If INITIALIZE_STACK is non-zero, there is an
9039 element on the stack. */
9041 static void
9042 flow_dfs_compute_reverse_init (data)
9043 depth_first_search_ds data;
9045 /* Allocate stack for back-tracking up CFG. */
9046 data->stack =
9047 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
9048 * sizeof (basic_block));
9049 data->sp = 0;
9051 /* Allocate bitmap to track nodes that have been visited. */
9052 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
9054 /* None of the nodes in the CFG have been visited yet. */
9055 sbitmap_zero (data->visited_blocks);
9057 return;
9060 /* Add the specified basic block to the top of the dfs data
9061 structures. When the search continues, it will start at the
9062 block. */
9064 static void
9065 flow_dfs_compute_reverse_add_bb (data, bb)
9066 depth_first_search_ds data;
9067 basic_block bb;
9069 data->stack[data->sp++] = bb;
9070 return;
9073 /* Continue the depth-first search through the reverse graph starting
9074 with the block at the stack's top and ending when the stack is
9075 empty. Visited nodes are marked. Returns an unvisited basic
9076 block, or NULL if there is none available. */
9078 static basic_block
9079 flow_dfs_compute_reverse_execute (data)
9080 depth_first_search_ds data;
9082 basic_block bb;
9083 edge e;
9084 int i;
9086 while (data->sp > 0)
9088 bb = data->stack[--data->sp];
9090 /* Mark that we have visited this node. */
9091 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
9093 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
9095 /* Perform depth-first search on adjacent vertices. */
9096 for (e = bb->pred; e; e = e->pred_next)
9097 flow_dfs_compute_reverse_add_bb (data, e->src);
9101 /* Determine if there are unvisited basic blocks. */
9102 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
9103 if (!TEST_BIT (data->visited_blocks, i))
9104 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
9105 return NULL;
9108 /* Destroy the data structures needed for depth-first search on the
9109 reverse graph. */
9111 static void
9112 flow_dfs_compute_reverse_finish (data)
9113 depth_first_search_ds data;
9115 free (data->stack);
9116 sbitmap_free (data->visited_blocks);
9117 return;
9121 /* Find the root node of the loop pre-header extended basic block and
9122 the edges along the trace from the root node to the loop header. */
9124 static void
9125 flow_loop_pre_header_scan (loop)
9126 struct loop *loop;
9128 int num = 0;
9129 basic_block ebb;
9131 loop->num_pre_header_edges = 0;
9133 if (loop->num_entries != 1)
9134 return;
9136 ebb = loop->entry_edges[0]->src;
9138 if (ebb != ENTRY_BLOCK_PTR)
9140 edge e;
9142 /* Count number of edges along trace from loop header to
9143 root of pre-header extended basic block. Usually this is
9144 only one or two edges. */
9145 num++;
9146 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
9148 ebb = ebb->pred->src;
9149 num++;
9152 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
9153 loop->num_pre_header_edges = num;
9155 /* Store edges in order that they are followed. The source
9156 of the first edge is the root node of the pre-header extended
9157 basic block and the destination of the last last edge is
9158 the loop header. */
9159 for (e = loop->entry_edges[0]; num; e = e->src->pred)
9161 loop->pre_header_edges[--num] = e;
9167 /* Return the block for the pre-header of the loop with header
9168 HEADER where DOM specifies the dominator information. Return NULL if
9169 there is no pre-header. */
9171 static basic_block
9172 flow_loop_pre_header_find (header, dom)
9173 basic_block header;
9174 const sbitmap *dom;
9176 basic_block pre_header;
9177 edge e;
9179 /* If block p is a predecessor of the header and is the only block
9180 that the header does not dominate, then it is the pre-header. */
9181 pre_header = NULL;
9182 for (e = header->pred; e; e = e->pred_next)
9184 basic_block node = e->src;
9186 if (node != ENTRY_BLOCK_PTR
9187 && ! TEST_BIT (dom[node->index], header->index))
9189 if (pre_header == NULL)
9190 pre_header = node;
9191 else
9193 /* There are multiple edges into the header from outside
9194 the loop so there is no pre-header block. */
9195 pre_header = NULL;
9196 break;
9200 return pre_header;
9203 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
9204 previously added. The insertion algorithm assumes that the loops
9205 are added in the order found by a depth first search of the CFG. */
9207 static void
9208 flow_loop_tree_node_add (prevloop, loop)
9209 struct loop *prevloop;
9210 struct loop *loop;
9213 if (flow_loop_nested_p (prevloop, loop))
9215 prevloop->inner = loop;
9216 loop->outer = prevloop;
9217 return;
9220 while (prevloop->outer)
9222 if (flow_loop_nested_p (prevloop->outer, loop))
9224 prevloop->next = loop;
9225 loop->outer = prevloop->outer;
9226 return;
9228 prevloop = prevloop->outer;
9231 prevloop->next = loop;
9232 loop->outer = NULL;
9235 /* Build the loop hierarchy tree for LOOPS. */
9237 static void
9238 flow_loops_tree_build (loops)
9239 struct loops *loops;
9241 int i;
9242 int num_loops;
9244 num_loops = loops->num;
9245 if (! num_loops)
9246 return;
9248 /* Root the loop hierarchy tree with the first loop found.
9249 Since we used a depth first search this should be the
9250 outermost loop. */
9251 loops->tree_root = &loops->array[0];
9252 loops->tree_root->outer = loops->tree_root->inner = loops->tree_root->next = NULL;
9254 /* Add the remaining loops to the tree. */
9255 for (i = 1; i < num_loops; i++)
9256 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
9259 /* Helper function to compute loop nesting depth and enclosed loop level
9260 for the natural loop specified by LOOP at the loop depth DEPTH.
9261 Returns the loop level. */
9263 static int
9264 flow_loop_level_compute (loop, depth)
9265 struct loop *loop;
9266 int depth;
9268 struct loop *inner;
9269 int level = 1;
9271 if (! loop)
9272 return 0;
9274 /* Traverse loop tree assigning depth and computing level as the
9275 maximum level of all the inner loops of this loop. The loop
9276 level is equivalent to the height of the loop in the loop tree
9277 and corresponds to the number of enclosed loop levels (including
9278 itself). */
9279 for (inner = loop->inner; inner; inner = inner->next)
9281 int ilevel;
9283 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
9285 if (ilevel > level)
9286 level = ilevel;
9288 loop->level = level;
9289 loop->depth = depth;
9290 return level;
9293 /* Compute the loop nesting depth and enclosed loop level for the loop
9294 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
9295 level. */
9297 static int
9298 flow_loops_level_compute (loops)
9299 struct loops *loops;
9301 struct loop *loop;
9302 int level;
9303 int levels = 0;
9305 /* Traverse all the outer level loops. */
9306 for (loop = loops->tree_root; loop; loop = loop->next)
9308 level = flow_loop_level_compute (loop, 1);
9309 if (level > levels)
9310 levels = level;
9312 return levels;
9316 /* Scan a single natural loop specified by LOOP collecting information
9317 about it specified by FLAGS. */
9320 flow_loop_scan (loops, loop, flags)
9321 struct loops *loops;
9322 struct loop *loop;
9323 int flags;
9325 /* Determine prerequisites. */
9326 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
9327 flags |= LOOP_EXIT_EDGES;
9329 if (flags & LOOP_ENTRY_EDGES)
9331 /* Find edges which enter the loop header.
9332 Note that the entry edges should only
9333 enter the header of a natural loop. */
9334 loop->num_entries
9335 = flow_loop_entry_edges_find (loop->header,
9336 loop->nodes,
9337 &loop->entry_edges);
9340 if (flags & LOOP_EXIT_EDGES)
9342 /* Find edges which exit the loop. */
9343 loop->num_exits
9344 = flow_loop_exit_edges_find (loop->nodes,
9345 &loop->exit_edges);
9348 if (flags & LOOP_EXITS_DOMS)
9350 int j;
9352 /* Determine which loop nodes dominate all the exits
9353 of the loop. */
9354 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
9355 sbitmap_copy (loop->exits_doms, loop->nodes);
9356 for (j = 0; j < loop->num_exits; j++)
9357 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
9358 loops->cfg.dom[loop->exit_edges[j]->src->index]);
9360 /* The header of a natural loop must dominate
9361 all exits. */
9362 if (! TEST_BIT (loop->exits_doms, loop->header->index))
9363 abort ();
9366 if (flags & LOOP_PRE_HEADER)
9368 /* Look to see if the loop has a pre-header node. */
9369 loop->pre_header
9370 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
9372 /* Find the blocks within the extended basic block of
9373 the loop pre-header. */
9374 flow_loop_pre_header_scan (loop);
9376 return 1;
9380 /* Find all the natural loops in the function and save in LOOPS structure
9381 and recalculate loop_depth information in basic block structures.
9382 FLAGS controls which loop information is collected.
9383 Return the number of natural loops found. */
9386 flow_loops_find (loops, flags)
9387 struct loops *loops;
9388 int flags;
9390 int i;
9391 int b;
9392 int num_loops;
9393 edge e;
9394 sbitmap headers;
9395 sbitmap *dom;
9396 int *dfs_order;
9397 int *rc_order;
9399 /* This function cannot be repeatedly called with different
9400 flags to build up the loop information. The loop tree
9401 must always be built if this function is called. */
9402 if (! (flags & LOOP_TREE))
9403 abort ();
9405 memset (loops, 0, sizeof (*loops));
9407 /* Taking care of this degenerate case makes the rest of
9408 this code simpler. */
9409 if (n_basic_blocks == 0)
9410 return 0;
9412 dfs_order = NULL;
9413 rc_order = NULL;
9415 /* Compute the dominators. */
9416 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
9417 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
9419 /* Count the number of loop edges (back edges). This should be the
9420 same as the number of natural loops. */
9422 num_loops = 0;
9423 for (b = 0; b < n_basic_blocks; b++)
9425 basic_block header;
9427 header = BASIC_BLOCK (b);
9428 header->loop_depth = 0;
9430 for (e = header->pred; e; e = e->pred_next)
9432 basic_block latch = e->src;
9434 /* Look for back edges where a predecessor is dominated
9435 by this block. A natural loop has a single entry
9436 node (header) that dominates all the nodes in the
9437 loop. It also has single back edge to the header
9438 from a latch node. Note that multiple natural loops
9439 may share the same header. */
9440 if (b != header->index)
9441 abort ();
9443 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
9444 num_loops++;
9448 if (num_loops)
9450 /* Compute depth first search order of the CFG so that outer
9451 natural loops will be found before inner natural loops. */
9452 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9453 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9454 flow_depth_first_order_compute (dfs_order, rc_order);
9456 /* Save CFG derived information to avoid recomputing it. */
9457 loops->cfg.dom = dom;
9458 loops->cfg.dfs_order = dfs_order;
9459 loops->cfg.rc_order = rc_order;
9461 /* Allocate loop structures. */
9462 loops->array
9463 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
9465 headers = sbitmap_alloc (n_basic_blocks);
9466 sbitmap_zero (headers);
9468 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
9469 sbitmap_zero (loops->shared_headers);
9471 /* Find and record information about all the natural loops
9472 in the CFG. */
9473 num_loops = 0;
9474 for (b = 0; b < n_basic_blocks; b++)
9476 basic_block header;
9478 /* Search the nodes of the CFG in reverse completion order
9479 so that we can find outer loops first. */
9480 header = BASIC_BLOCK (rc_order[b]);
9482 /* Look for all the possible latch blocks for this header. */
9483 for (e = header->pred; e; e = e->pred_next)
9485 basic_block latch = e->src;
9487 /* Look for back edges where a predecessor is dominated
9488 by this block. A natural loop has a single entry
9489 node (header) that dominates all the nodes in the
9490 loop. It also has single back edge to the header
9491 from a latch node. Note that multiple natural loops
9492 may share the same header. */
9493 if (latch != ENTRY_BLOCK_PTR
9494 && TEST_BIT (dom[latch->index], header->index))
9496 struct loop *loop;
9498 loop = loops->array + num_loops;
9500 loop->header = header;
9501 loop->latch = latch;
9502 loop->num = num_loops;
9504 num_loops++;
9509 for (i = 0; i < num_loops; i++)
9511 struct loop *loop = &loops->array[i];
9513 /* Keep track of blocks that are loop headers so
9514 that we can tell which loops should be merged. */
9515 if (TEST_BIT (headers, loop->header->index))
9516 SET_BIT (loops->shared_headers, loop->header->index);
9517 SET_BIT (headers, loop->header->index);
9519 /* Find nodes contained within the loop. */
9520 loop->nodes = sbitmap_alloc (n_basic_blocks);
9521 loop->num_nodes
9522 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
9524 /* Compute first and last blocks within the loop.
9525 These are often the same as the loop header and
9526 loop latch respectively, but this is not always
9527 the case. */
9528 loop->first
9529 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
9530 loop->last
9531 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
9533 flow_loop_scan (loops, loop, flags);
9536 /* Natural loops with shared headers may either be disjoint or
9537 nested. Disjoint loops with shared headers cannot be inner
9538 loops and should be merged. For now just mark loops that share
9539 headers. */
9540 for (i = 0; i < num_loops; i++)
9541 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
9542 loops->array[i].shared = 1;
9544 sbitmap_free (headers);
9546 else
9548 sbitmap_vector_free (dom);
9551 loops->num = num_loops;
9553 /* Build the loop hierarchy tree. */
9554 flow_loops_tree_build (loops);
9556 /* Assign the loop nesting depth and enclosed loop level for each
9557 loop. */
9558 loops->levels = flow_loops_level_compute (loops);
9560 return num_loops;
9564 /* Update the information regarding the loops in the CFG
9565 specified by LOOPS. */
9567 flow_loops_update (loops, flags)
9568 struct loops *loops;
9569 int flags;
9571 /* One day we may want to update the current loop data. For now
9572 throw away the old stuff and rebuild what we need. */
9573 if (loops->array)
9574 flow_loops_free (loops);
9576 return flow_loops_find (loops, flags);
9580 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
9583 flow_loop_outside_edge_p (loop, e)
9584 const struct loop *loop;
9585 edge e;
9587 if (e->dest != loop->header)
9588 abort ();
9589 return (e->src == ENTRY_BLOCK_PTR)
9590 || ! TEST_BIT (loop->nodes, e->src->index);
9593 /* Clear LOG_LINKS fields of insns in a chain.
9594 Also clear the global_live_at_{start,end} fields of the basic block
9595 structures. */
9597 void
9598 clear_log_links (insns)
9599 rtx insns;
9601 rtx i;
9602 int b;
9604 for (i = insns; i; i = NEXT_INSN (i))
9605 if (INSN_P (i))
9606 LOG_LINKS (i) = 0;
9608 for (b = 0; b < n_basic_blocks; b++)
9610 basic_block bb = BASIC_BLOCK (b);
9612 bb->global_live_at_start = NULL;
9613 bb->global_live_at_end = NULL;
9616 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
9617 EXIT_BLOCK_PTR->global_live_at_start = NULL;
9620 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
9621 correspond to the hard registers, if any, set in that map. This
9622 could be done far more efficiently by having all sorts of special-cases
9623 with moving single words, but probably isn't worth the trouble. */
9625 void
9626 reg_set_to_hard_reg_set (to, from)
9627 HARD_REG_SET *to;
9628 bitmap from;
9630 int i;
9632 EXECUTE_IF_SET_IN_BITMAP
9633 (from, 0, i,
9635 if (i >= FIRST_PSEUDO_REGISTER)
9636 return;
9637 SET_HARD_REG_BIT (*to, i);
9641 /* Called once at intialization time. */
9643 void
9644 init_flow ()
9646 static int initialized;
9648 if (!initialized)
9650 gcc_obstack_init (&flow_obstack);
9651 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
9652 initialized = 1;
9654 else
9656 obstack_free (&flow_obstack, flow_firstobj);
9657 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);