Bump dates.
[official-gcc.git] / gcc / flow.c
blob5def54c4fcafbbda621f17ed307959c3a3dafd00
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
43 ** life_analysis **
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
112 /* TODO:
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
137 #include "expr.h"
138 #include "ssa.h"
140 #include "obstack.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
171 /* The obstack on which the flow graph components are allocated. */
173 struct obstack flow_obstack;
174 static char *flow_firstobj;
176 /* Number of basic blocks in the current function. */
178 int n_basic_blocks;
180 /* Number of edges in the current function. */
182 int n_edges;
184 /* The basic block array. */
186 varray_type basic_block_info;
188 /* The special entry and exit blocks. */
190 struct basic_block_def entry_exit_blocks[2]
191 = {{NULL, /* head */
192 NULL, /* end */
193 NULL, /* pred */
194 NULL, /* succ */
195 NULL, /* local_set */
196 NULL, /* cond_local_set */
197 NULL, /* global_live_at_start */
198 NULL, /* global_live_at_end */
199 NULL, /* aux */
200 ENTRY_BLOCK, /* index */
201 0, /* loop_depth */
202 -1, -1, /* eh_beg, eh_end */
203 0 /* count */
206 NULL, /* head */
207 NULL, /* end */
208 NULL, /* pred */
209 NULL, /* succ */
210 NULL, /* local_set */
211 NULL, /* cond_local_set */
212 NULL, /* global_live_at_start */
213 NULL, /* global_live_at_end */
214 NULL, /* aux */
215 EXIT_BLOCK, /* index */
216 0, /* loop_depth */
217 -1, -1, /* eh_beg, eh_end */
218 0 /* count */
222 /* Nonzero if the second flow pass has completed. */
223 int flow2_completed;
225 /* Maximum register number used in this function, plus one. */
227 int max_regno;
229 /* Indexed by n, giving various register information */
231 varray_type reg_n_info;
233 /* Size of a regset for the current function,
234 in (1) bytes and (2) elements. */
236 int regset_bytes;
237 int regset_size;
239 /* Regset of regs live when calls to `setjmp'-like functions happen. */
240 /* ??? Does this exist only for the setjmp-clobbered warning message? */
242 regset regs_live_at_setjmp;
244 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
245 that have to go in the same hard reg.
246 The first two regs in the list are a pair, and the next two
247 are another pair, etc. */
248 rtx regs_may_share;
250 /* Callback that determines if it's ok for a function to have no
251 noreturn attribute. */
252 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
254 /* Set of registers that may be eliminable. These are handled specially
255 in updating regs_ever_live. */
257 static HARD_REG_SET elim_reg_set;
259 /* The basic block structure for every insn, indexed by uid. */
261 varray_type basic_block_for_insn;
263 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
264 /* ??? Should probably be using LABEL_NUSES instead. It would take a
265 bit of surgery to be able to use or co-opt the routines in jump. */
267 static rtx label_value_list;
268 static rtx tail_recursion_label_list;
270 /* Holds information for tracking conditional register life information. */
271 struct reg_cond_life_info
273 /* A boolean expression of conditions under which a register is dead. */
274 rtx condition;
275 /* Conditions under which a register is dead at the basic block end. */
276 rtx orig_condition;
278 /* A boolean expression of conditions under which a register has been
279 stored into. */
280 rtx stores;
282 /* ??? Could store mask of bytes that are dead, so that we could finally
283 track lifetimes of multi-word registers accessed via subregs. */
286 /* For use in communicating between propagate_block and its subroutines.
287 Holds all information needed to compute life and def-use information. */
289 struct propagate_block_info
291 /* The basic block we're considering. */
292 basic_block bb;
294 /* Bit N is set if register N is conditionally or unconditionally live. */
295 regset reg_live;
297 /* Bit N is set if register N is set this insn. */
298 regset new_set;
300 /* Element N is the next insn that uses (hard or pseudo) register N
301 within the current basic block; or zero, if there is no such insn. */
302 rtx *reg_next_use;
304 /* Contains a list of all the MEMs we are tracking for dead store
305 elimination. */
306 rtx mem_set_list;
308 /* If non-null, record the set of registers set unconditionally in the
309 basic block. */
310 regset local_set;
312 /* If non-null, record the set of registers set conditionally in the
313 basic block. */
314 regset cond_local_set;
316 #ifdef HAVE_conditional_execution
317 /* Indexed by register number, holds a reg_cond_life_info for each
318 register that is not unconditionally live or dead. */
319 splay_tree reg_cond_dead;
321 /* Bit N is set if register N is in an expression in reg_cond_dead. */
322 regset reg_cond_reg;
323 #endif
325 /* The length of mem_set_list. */
326 int mem_set_list_len;
328 /* Non-zero if the value of CC0 is live. */
329 int cc0_live;
331 /* Flags controling the set of information propagate_block collects. */
332 int flags;
335 /* Maximum length of pbi->mem_set_list before we start dropping
336 new elements on the floor. */
337 #define MAX_MEM_SET_LIST_LEN 100
339 /* Store the data structures necessary for depth-first search. */
340 struct depth_first_search_dsS {
341 /* stack for backtracking during the algorithm */
342 basic_block *stack;
344 /* number of edges in the stack. That is, positions 0, ..., sp-1
345 have edges. */
346 unsigned int sp;
348 /* record of basic blocks already seen by depth-first search */
349 sbitmap visited_blocks;
351 typedef struct depth_first_search_dsS *depth_first_search_ds;
353 /* Forward declarations */
354 static int count_basic_blocks PARAMS ((rtx));
355 static void find_basic_blocks_1 PARAMS ((rtx));
356 static rtx find_label_refs PARAMS ((rtx, rtx));
357 static void clear_edges PARAMS ((void));
358 static void make_edges PARAMS ((rtx));
359 static void make_label_edge PARAMS ((sbitmap *, basic_block,
360 rtx, int));
361 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
362 basic_block, rtx, int));
363 static void mark_critical_edges PARAMS ((void));
364 static void move_stray_eh_region_notes PARAMS ((void));
365 static void record_active_eh_regions PARAMS ((rtx));
367 static void commit_one_edge_insertion PARAMS ((edge));
369 static void delete_unreachable_blocks PARAMS ((void));
370 static void delete_eh_regions PARAMS ((void));
371 static int can_delete_note_p PARAMS ((rtx));
372 static void expunge_block PARAMS ((basic_block));
373 static int can_delete_label_p PARAMS ((rtx));
374 static int tail_recursion_label_p PARAMS ((rtx));
375 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
376 basic_block));
377 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
378 basic_block));
379 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
380 static void try_merge_blocks PARAMS ((void));
381 static void tidy_fallthru_edges PARAMS ((void));
382 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
383 static void verify_wide_reg PARAMS ((int, rtx, rtx));
384 static void verify_local_live_at_start PARAMS ((regset, basic_block));
385 static int set_noop_p PARAMS ((rtx));
386 static int noop_move_p PARAMS ((rtx));
387 static void delete_noop_moves PARAMS ((rtx));
388 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
389 static void notice_stack_pointer_modification PARAMS ((rtx));
390 static void mark_reg PARAMS ((rtx, void *));
391 static void mark_regs_live_at_end PARAMS ((regset));
392 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
393 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
394 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
395 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
396 static int insn_dead_p PARAMS ((struct propagate_block_info *,
397 rtx, int, rtx));
398 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
399 rtx, rtx));
400 static void mark_set_regs PARAMS ((struct propagate_block_info *,
401 rtx, rtx));
402 static void mark_set_1 PARAMS ((struct propagate_block_info *,
403 enum rtx_code, rtx, rtx,
404 rtx, int));
405 #ifdef HAVE_conditional_execution
406 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
407 int, rtx));
408 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
409 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
410 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
411 int));
412 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
413 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
414 static rtx not_reg_cond PARAMS ((rtx));
415 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
416 #endif
417 #ifdef AUTO_INC_DEC
418 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
419 rtx, rtx, rtx, rtx, rtx));
420 static void find_auto_inc PARAMS ((struct propagate_block_info *,
421 rtx, rtx));
422 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
423 rtx));
424 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
425 #endif
426 static void mark_used_reg PARAMS ((struct propagate_block_info *,
427 rtx, rtx, rtx));
428 static void mark_used_regs PARAMS ((struct propagate_block_info *,
429 rtx, rtx, rtx));
430 void dump_flow_info PARAMS ((FILE *));
431 void debug_flow_info PARAMS ((void));
432 static void dump_edge_info PARAMS ((FILE *, edge, int));
433 static void print_rtl_and_abort PARAMS ((void));
435 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
436 rtx));
437 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
438 rtx));
439 static void remove_fake_successors PARAMS ((basic_block));
440 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
441 FILE *));
442 static void flow_edge_list_print PARAMS ((const char *, const edge *,
443 int, FILE *));
444 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
445 FILE *));
446 static int flow_loop_nested_p PARAMS ((struct loop *,
447 struct loop *));
448 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
449 edge **));
450 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
451 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
452 static int flow_depth_first_order_compute PARAMS ((int *, int *));
453 static void flow_dfs_compute_reverse_init
454 PARAMS ((depth_first_search_ds));
455 static void flow_dfs_compute_reverse_add_bb
456 PARAMS ((depth_first_search_ds, basic_block));
457 static basic_block flow_dfs_compute_reverse_execute
458 PARAMS ((depth_first_search_ds));
459 static void flow_dfs_compute_reverse_finish
460 PARAMS ((depth_first_search_ds));
461 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
462 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
463 const sbitmap *));
464 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
465 static void flow_loops_tree_build PARAMS ((struct loops *));
466 static int flow_loop_level_compute PARAMS ((struct loop *, int));
467 static int flow_loops_level_compute PARAMS ((struct loops *));
468 static void allocate_bb_life_data PARAMS ((void));
470 /* Find basic blocks of the current function.
471 F is the first insn of the function and NREGS the number of register
472 numbers in use. */
474 void
475 find_basic_blocks (f, nregs, file)
476 rtx f;
477 int nregs ATTRIBUTE_UNUSED;
478 FILE *file ATTRIBUTE_UNUSED;
480 int max_uid;
482 /* Flush out existing data. */
483 if (basic_block_info != NULL)
485 int i;
487 clear_edges ();
489 /* Clear bb->aux on all extant basic blocks. We'll use this as a
490 tag for reuse during create_basic_block, just in case some pass
491 copies around basic block notes improperly. */
492 for (i = 0; i < n_basic_blocks; ++i)
493 BASIC_BLOCK (i)->aux = NULL;
495 VARRAY_FREE (basic_block_info);
498 n_basic_blocks = count_basic_blocks (f);
500 /* Size the basic block table. The actual structures will be allocated
501 by find_basic_blocks_1, since we want to keep the structure pointers
502 stable across calls to find_basic_blocks. */
503 /* ??? This whole issue would be much simpler if we called find_basic_blocks
504 exactly once, and thereafter we don't have a single long chain of
505 instructions at all until close to the end of compilation when we
506 actually lay them out. */
508 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
510 find_basic_blocks_1 (f);
512 /* Record the block to which an insn belongs. */
513 /* ??? This should be done another way, by which (perhaps) a label is
514 tagged directly with the basic block that it starts. It is used for
515 more than that currently, but IMO that is the only valid use. */
517 max_uid = get_max_uid ();
518 #ifdef AUTO_INC_DEC
519 /* Leave space for insns life_analysis makes in some cases for auto-inc.
520 These cases are rare, so we don't need too much space. */
521 max_uid += max_uid / 10;
522 #endif
524 compute_bb_for_insn (max_uid);
526 /* Discover the edges of our cfg. */
527 record_active_eh_regions (f);
528 make_edges (label_value_list);
530 /* Do very simple cleanup now, for the benefit of code that runs between
531 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
532 tidy_fallthru_edges ();
534 mark_critical_edges ();
536 #ifdef ENABLE_CHECKING
537 verify_flow_info ();
538 #endif
541 void
542 check_function_return_warnings ()
544 if (warn_missing_noreturn
545 && !TREE_THIS_VOLATILE (cfun->decl)
546 && EXIT_BLOCK_PTR->pred == NULL
547 && (lang_missing_noreturn_ok_p
548 && !lang_missing_noreturn_ok_p (cfun->decl)))
549 warning ("function might be possible candidate for attribute `noreturn'");
551 /* If we have a path to EXIT, then we do return. */
552 if (TREE_THIS_VOLATILE (cfun->decl)
553 && EXIT_BLOCK_PTR->pred != NULL)
554 warning ("`noreturn' function does return");
556 /* If the clobber_return_insn appears in some basic block, then we
557 do reach the end without returning a value. */
558 else if (warn_return_type
559 && cfun->x_clobber_return_insn != NULL
560 && EXIT_BLOCK_PTR->pred != NULL)
562 int max_uid = get_max_uid ();
564 /* If clobber_return_insn was excised by jump1, then renumber_insns
565 can make max_uid smaller than the number still recorded in our rtx.
566 That's fine, since this is a quick way of verifying that the insn
567 is no longer in the chain. */
568 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
570 /* Recompute insn->block mapping, since the initial mapping is
571 set before we delete unreachable blocks. */
572 compute_bb_for_insn (max_uid);
574 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
575 warning ("control reaches end of non-void function");
580 /* Count the basic blocks of the function. */
582 static int
583 count_basic_blocks (f)
584 rtx f;
586 register rtx insn;
587 register RTX_CODE prev_code;
588 register int count = 0;
589 int eh_region = 0;
590 int call_had_abnormal_edge = 0;
592 prev_code = JUMP_INSN;
593 for (insn = f; insn; insn = NEXT_INSN (insn))
595 register RTX_CODE code = GET_CODE (insn);
597 if (code == CODE_LABEL
598 || (GET_RTX_CLASS (code) == 'i'
599 && (prev_code == JUMP_INSN
600 || prev_code == BARRIER
601 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
602 count++;
604 /* Record whether this call created an edge. */
605 if (code == CALL_INSN)
607 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
608 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
610 call_had_abnormal_edge = 0;
612 /* If there is an EH region or rethrow, we have an edge. */
613 if ((eh_region && region > 0)
614 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
615 call_had_abnormal_edge = 1;
616 else if (nonlocal_goto_handler_labels && region >= 0)
617 /* If there is a nonlocal goto label and the specified
618 region number isn't -1, we have an edge. (0 means
619 no throw, but might have a nonlocal goto). */
620 call_had_abnormal_edge = 1;
623 if (code != NOTE)
624 prev_code = code;
625 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
626 ++eh_region;
627 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
628 --eh_region;
631 /* The rest of the compiler works a bit smoother when we don't have to
632 check for the edge case of do-nothing functions with no basic blocks. */
633 if (count == 0)
635 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
636 count = 1;
639 return count;
642 /* Scan a list of insns for labels referred to other than by jumps.
643 This is used to scan the alternatives of a call placeholder. */
644 static rtx
645 find_label_refs (f, lvl)
646 rtx f;
647 rtx lvl;
649 rtx insn;
651 for (insn = f; insn; insn = NEXT_INSN (insn))
652 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
654 rtx note;
656 /* Make a list of all labels referred to other than by jumps
657 (which just don't have the REG_LABEL notes).
659 Make a special exception for labels followed by an ADDR*VEC,
660 as this would be a part of the tablejump setup code.
662 Make a special exception for the eh_return_stub_label, which
663 we know isn't part of any otherwise visible control flow.
665 Make a special exception to registers loaded with label
666 values just before jump insns that use them. */
668 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
669 if (REG_NOTE_KIND (note) == REG_LABEL)
671 rtx lab = XEXP (note, 0), next;
673 if (lab == eh_return_stub_label)
675 else if ((next = next_nonnote_insn (lab)) != NULL
676 && GET_CODE (next) == JUMP_INSN
677 && (GET_CODE (PATTERN (next)) == ADDR_VEC
678 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
680 else if (GET_CODE (lab) == NOTE)
682 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
683 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
685 else
686 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
690 return lvl;
693 /* Find all basic blocks of the function whose first insn is F.
695 Collect and return a list of labels whose addresses are taken. This
696 will be used in make_edges for use with computed gotos. */
698 static void
699 find_basic_blocks_1 (f)
700 rtx f;
702 register rtx insn, next;
703 int i = 0;
704 rtx bb_note = NULL_RTX;
705 rtx eh_list = NULL_RTX;
706 rtx lvl = NULL_RTX;
707 rtx trll = NULL_RTX;
708 rtx head = NULL_RTX;
709 rtx end = NULL_RTX;
711 /* We process the instructions in a slightly different way than we did
712 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
713 closed out the previous block, so that it gets attached at the proper
714 place. Since this form should be equivalent to the previous,
715 count_basic_blocks continues to use the old form as a check. */
717 for (insn = f; insn; insn = next)
719 enum rtx_code code = GET_CODE (insn);
721 next = NEXT_INSN (insn);
723 switch (code)
725 case NOTE:
727 int kind = NOTE_LINE_NUMBER (insn);
729 /* Keep a LIFO list of the currently active exception notes. */
730 if (kind == NOTE_INSN_EH_REGION_BEG)
731 eh_list = alloc_INSN_LIST (insn, eh_list);
732 else if (kind == NOTE_INSN_EH_REGION_END)
734 rtx t = eh_list;
736 eh_list = XEXP (eh_list, 1);
737 free_INSN_LIST_node (t);
740 /* Look for basic block notes with which to keep the
741 basic_block_info pointers stable. Unthread the note now;
742 we'll put it back at the right place in create_basic_block.
743 Or not at all if we've already found a note in this block. */
744 else if (kind == NOTE_INSN_BASIC_BLOCK)
746 if (bb_note == NULL_RTX)
747 bb_note = insn;
748 else
749 next = flow_delete_insn (insn);
751 break;
754 case CODE_LABEL:
755 /* A basic block starts at a label. If we've closed one off due
756 to a barrier or some such, no need to do it again. */
757 if (head != NULL_RTX)
759 /* While we now have edge lists with which other portions of
760 the compiler might determine a call ending a basic block
761 does not imply an abnormal edge, it will be a bit before
762 everything can be updated. So continue to emit a noop at
763 the end of such a block. */
764 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
766 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
767 end = emit_insn_after (nop, end);
770 create_basic_block (i++, head, end, bb_note);
771 bb_note = NULL_RTX;
774 head = end = insn;
775 break;
777 case JUMP_INSN:
778 /* A basic block ends at a jump. */
779 if (head == NULL_RTX)
780 head = insn;
781 else
783 /* ??? Make a special check for table jumps. The way this
784 happens is truly and amazingly gross. We are about to
785 create a basic block that contains just a code label and
786 an addr*vec jump insn. Worse, an addr_diff_vec creates
787 its own natural loop.
789 Prevent this bit of brain damage, pasting things together
790 correctly in make_edges.
792 The correct solution involves emitting the table directly
793 on the tablejump instruction as a note, or JUMP_LABEL. */
795 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
796 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
798 head = end = NULL;
799 n_basic_blocks--;
800 break;
803 end = insn;
804 goto new_bb_inclusive;
806 case BARRIER:
807 /* A basic block ends at a barrier. It may be that an unconditional
808 jump already closed the basic block -- no need to do it again. */
809 if (head == NULL_RTX)
810 break;
812 /* While we now have edge lists with which other portions of the
813 compiler might determine a call ending a basic block does not
814 imply an abnormal edge, it will be a bit before everything can
815 be updated. So continue to emit a noop at the end of such a
816 block. */
817 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
819 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
820 end = emit_insn_after (nop, end);
822 goto new_bb_exclusive;
824 case CALL_INSN:
826 /* Record whether this call created an edge. */
827 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
828 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
829 int call_has_abnormal_edge = 0;
831 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
833 /* Scan each of the alternatives for label refs. */
834 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
835 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
836 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
837 /* Record its tail recursion label, if any. */
838 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
839 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
842 /* If there is an EH region or rethrow, we have an edge. */
843 if ((eh_list && region > 0)
844 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
845 call_has_abnormal_edge = 1;
846 else if (nonlocal_goto_handler_labels && region >= 0)
847 /* If there is a nonlocal goto label and the specified
848 region number isn't -1, we have an edge. (0 means
849 no throw, but might have a nonlocal goto). */
850 call_has_abnormal_edge = 1;
852 /* A basic block ends at a call that can either throw or
853 do a non-local goto. */
854 if (call_has_abnormal_edge)
856 new_bb_inclusive:
857 if (head == NULL_RTX)
858 head = insn;
859 end = insn;
861 new_bb_exclusive:
862 create_basic_block (i++, head, end, bb_note);
863 head = end = NULL_RTX;
864 bb_note = NULL_RTX;
865 break;
868 /* Fall through. */
870 default:
871 if (GET_RTX_CLASS (code) == 'i')
873 if (head == NULL_RTX)
874 head = insn;
875 end = insn;
877 break;
880 if (GET_RTX_CLASS (code) == 'i'
881 && GET_CODE (insn) != JUMP_INSN)
883 rtx note;
885 /* Make a list of all labels referred to other than by jumps.
887 Make a special exception for labels followed by an ADDR*VEC,
888 as this would be a part of the tablejump setup code.
890 Make a special exception for the eh_return_stub_label, which
891 we know isn't part of any otherwise visible control flow.
893 Make a special exception to registers loaded with label
894 values just before jump insns that use them. */
896 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
897 if (REG_NOTE_KIND (note) == REG_LABEL)
899 rtx lab = XEXP (note, 0), next;
901 if (lab == eh_return_stub_label)
903 else if ((next = next_nonnote_insn (lab)) != NULL
904 && GET_CODE (next) == JUMP_INSN
905 && (GET_CODE (PATTERN (next)) == ADDR_VEC
906 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
908 else if (GET_CODE (lab) == NOTE)
910 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
911 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
913 else
914 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
919 if (head != NULL_RTX)
920 create_basic_block (i++, head, end, bb_note);
921 else if (bb_note)
922 flow_delete_insn (bb_note);
924 if (i != n_basic_blocks)
925 abort ();
927 label_value_list = lvl;
928 tail_recursion_label_list = trll;
931 /* Tidy the CFG by deleting unreachable code and whatnot. */
933 void
934 cleanup_cfg (f)
935 rtx f;
937 delete_unreachable_blocks ();
938 move_stray_eh_region_notes ();
939 record_active_eh_regions (f);
940 try_merge_blocks ();
941 mark_critical_edges ();
943 /* Kill the data we won't maintain. */
944 free_EXPR_LIST_list (&label_value_list);
945 free_EXPR_LIST_list (&tail_recursion_label_list);
948 /* Create a new basic block consisting of the instructions between
949 HEAD and END inclusive. Reuses the note and basic block struct
950 in BB_NOTE, if any. */
952 void
953 create_basic_block (index, head, end, bb_note)
954 int index;
955 rtx head, end, bb_note;
957 basic_block bb;
959 if (bb_note
960 && ! RTX_INTEGRATED_P (bb_note)
961 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
962 && bb->aux == NULL)
964 /* If we found an existing note, thread it back onto the chain. */
966 rtx after;
968 if (GET_CODE (head) == CODE_LABEL)
969 after = head;
970 else
972 after = PREV_INSN (head);
973 head = bb_note;
976 if (after != bb_note && NEXT_INSN (after) != bb_note)
977 reorder_insns (bb_note, bb_note, after);
979 else
981 /* Otherwise we must create a note and a basic block structure.
982 Since we allow basic block structs in rtl, give the struct
983 the same lifetime by allocating it off the function obstack
984 rather than using malloc. */
986 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
987 memset (bb, 0, sizeof (*bb));
989 if (GET_CODE (head) == CODE_LABEL)
990 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
991 else
993 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
994 head = bb_note;
996 NOTE_BASIC_BLOCK (bb_note) = bb;
999 /* Always include the bb note in the block. */
1000 if (NEXT_INSN (end) == bb_note)
1001 end = bb_note;
1003 bb->head = head;
1004 bb->end = end;
1005 bb->index = index;
1006 BASIC_BLOCK (index) = bb;
1008 /* Tag the block so that we know it has been used when considering
1009 other basic block notes. */
1010 bb->aux = bb;
1013 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1014 indexed by INSN_UID. MAX is the size of the array. */
1016 void
1017 compute_bb_for_insn (max)
1018 int max;
1020 int i;
1022 if (basic_block_for_insn)
1023 VARRAY_FREE (basic_block_for_insn);
1024 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1026 for (i = 0; i < n_basic_blocks; ++i)
1028 basic_block bb = BASIC_BLOCK (i);
1029 rtx insn, end;
1031 end = bb->end;
1032 insn = bb->head;
1033 while (1)
1035 int uid = INSN_UID (insn);
1036 if (uid < max)
1037 VARRAY_BB (basic_block_for_insn, uid) = bb;
1038 if (insn == end)
1039 break;
1040 insn = NEXT_INSN (insn);
1045 /* Free the memory associated with the edge structures. */
1047 static void
1048 clear_edges ()
1050 int i;
1051 edge n, e;
1053 for (i = 0; i < n_basic_blocks; ++i)
1055 basic_block bb = BASIC_BLOCK (i);
1057 for (e = bb->succ; e; e = n)
1059 n = e->succ_next;
1060 free (e);
1063 bb->succ = 0;
1064 bb->pred = 0;
1067 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1069 n = e->succ_next;
1070 free (e);
1073 ENTRY_BLOCK_PTR->succ = 0;
1074 EXIT_BLOCK_PTR->pred = 0;
1076 n_edges = 0;
1079 /* Identify the edges between basic blocks.
1081 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1082 that are otherwise unreachable may be reachable with a non-local goto.
1084 BB_EH_END is an array indexed by basic block number in which we record
1085 the list of exception regions active at the end of the basic block. */
1087 static void
1088 make_edges (label_value_list)
1089 rtx label_value_list;
1091 int i;
1092 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
1093 sbitmap *edge_cache = NULL;
1095 /* Assume no computed jump; revise as we create edges. */
1096 current_function_has_computed_jump = 0;
1098 /* Heavy use of computed goto in machine-generated code can lead to
1099 nearly fully-connected CFGs. In that case we spend a significant
1100 amount of time searching the edge lists for duplicates. */
1101 if (forced_labels || label_value_list)
1103 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1104 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1107 /* By nature of the way these get numbered, block 0 is always the entry. */
1108 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1110 for (i = 0; i < n_basic_blocks; ++i)
1112 basic_block bb = BASIC_BLOCK (i);
1113 rtx insn, x;
1114 enum rtx_code code;
1115 int force_fallthru = 0;
1117 /* Examine the last instruction of the block, and discover the
1118 ways we can leave the block. */
1120 insn = bb->end;
1121 code = GET_CODE (insn);
1123 /* A branch. */
1124 if (code == JUMP_INSN)
1126 rtx tmp;
1128 /* Recognize a non-local goto as a branch outside the
1129 current function. */
1130 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1133 /* ??? Recognize a tablejump and do the right thing. */
1134 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1135 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1136 && GET_CODE (tmp) == JUMP_INSN
1137 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1138 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1140 rtvec vec;
1141 int j;
1143 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1144 vec = XVEC (PATTERN (tmp), 0);
1145 else
1146 vec = XVEC (PATTERN (tmp), 1);
1148 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1149 make_label_edge (edge_cache, bb,
1150 XEXP (RTVEC_ELT (vec, j), 0), 0);
1152 /* Some targets (eg, ARM) emit a conditional jump that also
1153 contains the out-of-range target. Scan for these and
1154 add an edge if necessary. */
1155 if ((tmp = single_set (insn)) != NULL
1156 && SET_DEST (tmp) == pc_rtx
1157 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1158 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1159 make_label_edge (edge_cache, bb,
1160 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1162 #ifdef CASE_DROPS_THROUGH
1163 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1164 us naturally detecting fallthru into the next block. */
1165 force_fallthru = 1;
1166 #endif
1169 /* If this is a computed jump, then mark it as reaching
1170 everything on the label_value_list and forced_labels list. */
1171 else if (computed_jump_p (insn))
1173 current_function_has_computed_jump = 1;
1175 for (x = label_value_list; x; x = XEXP (x, 1))
1176 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1178 for (x = forced_labels; x; x = XEXP (x, 1))
1179 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1182 /* Returns create an exit out. */
1183 else if (returnjump_p (insn))
1184 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1186 /* Otherwise, we have a plain conditional or unconditional jump. */
1187 else
1189 if (! JUMP_LABEL (insn))
1190 abort ();
1191 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1195 /* If this is a sibling call insn, then this is in effect a
1196 combined call and return, and so we need an edge to the
1197 exit block. No need to worry about EH edges, since we
1198 wouldn't have created the sibling call in the first place. */
1200 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1201 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1202 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1204 /* If this is a CALL_INSN, then mark it as reaching the active EH
1205 handler for this CALL_INSN. If we're handling asynchronous
1206 exceptions then any insn can reach any of the active handlers.
1208 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1210 else if (code == CALL_INSN || asynchronous_exceptions)
1212 /* Add any appropriate EH edges. We do this unconditionally
1213 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1214 on the call, and this needn't be within an EH region. */
1215 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1217 /* If we have asynchronous exceptions, do the same for *all*
1218 exception regions active in the block. */
1219 if (asynchronous_exceptions
1220 && bb->eh_beg != bb->eh_end)
1222 if (bb->eh_beg >= 0)
1223 make_eh_edge (edge_cache, eh_nest_info, bb,
1224 NULL_RTX, bb->eh_beg);
1226 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1227 if (GET_CODE (x) == NOTE
1228 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1229 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1231 int region = NOTE_EH_HANDLER (x);
1232 make_eh_edge (edge_cache, eh_nest_info, bb,
1233 NULL_RTX, region);
1237 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1239 /* ??? This could be made smarter: in some cases it's possible
1240 to tell that certain calls will not do a nonlocal goto.
1242 For example, if the nested functions that do the nonlocal
1243 gotos do not have their addresses taken, then only calls to
1244 those functions or to other nested functions that use them
1245 could possibly do nonlocal gotos. */
1246 /* We do know that a REG_EH_REGION note with a value less
1247 than 0 is guaranteed not to perform a non-local goto. */
1248 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1249 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1250 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1251 make_label_edge (edge_cache, bb, XEXP (x, 0),
1252 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1256 /* We know something about the structure of the function __throw in
1257 libgcc2.c. It is the only function that ever contains eh_stub
1258 labels. It modifies its return address so that the last block
1259 returns to one of the eh_stub labels within it. So we have to
1260 make additional edges in the flow graph. */
1261 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1262 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1264 /* Find out if we can drop through to the next block. */
1265 insn = next_nonnote_insn (insn);
1266 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1267 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1268 else if (i + 1 < n_basic_blocks)
1270 rtx tmp = BLOCK_HEAD (i + 1);
1271 if (GET_CODE (tmp) == NOTE)
1272 tmp = next_nonnote_insn (tmp);
1273 if (force_fallthru || insn == tmp)
1274 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1278 free_eh_nesting_info (eh_nest_info);
1279 if (edge_cache)
1280 sbitmap_vector_free (edge_cache);
1283 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1284 about the edge that is accumulated between calls. */
1286 void
1287 make_edge (edge_cache, src, dst, flags)
1288 sbitmap *edge_cache;
1289 basic_block src, dst;
1290 int flags;
1292 int use_edge_cache;
1293 edge e;
1295 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1296 many edges to them, and we didn't allocate memory for it. */
1297 use_edge_cache = (edge_cache
1298 && src != ENTRY_BLOCK_PTR
1299 && dst != EXIT_BLOCK_PTR);
1301 /* Make sure we don't add duplicate edges. */
1302 switch (use_edge_cache)
1304 default:
1305 /* Quick test for non-existance of the edge. */
1306 if (! TEST_BIT (edge_cache[src->index], dst->index))
1307 break;
1309 /* The edge exists; early exit if no work to do. */
1310 if (flags == 0)
1311 return;
1313 /* FALLTHRU */
1314 case 0:
1315 for (e = src->succ; e; e = e->succ_next)
1316 if (e->dest == dst)
1318 e->flags |= flags;
1319 return;
1321 break;
1324 e = (edge) xcalloc (1, sizeof (*e));
1325 n_edges++;
1327 e->succ_next = src->succ;
1328 e->pred_next = dst->pred;
1329 e->src = src;
1330 e->dest = dst;
1331 e->flags = flags;
1333 src->succ = e;
1334 dst->pred = e;
1336 if (use_edge_cache)
1337 SET_BIT (edge_cache[src->index], dst->index);
1340 /* Create an edge from a basic block to a label. */
1342 static void
1343 make_label_edge (edge_cache, src, label, flags)
1344 sbitmap *edge_cache;
1345 basic_block src;
1346 rtx label;
1347 int flags;
1349 if (GET_CODE (label) != CODE_LABEL)
1350 abort ();
1352 /* If the label was never emitted, this insn is junk, but avoid a
1353 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1354 as a result of a syntax error and a diagnostic has already been
1355 printed. */
1357 if (INSN_UID (label) == 0)
1358 return;
1360 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1363 /* Create the edges generated by INSN in REGION. */
1365 static void
1366 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1367 sbitmap *edge_cache;
1368 eh_nesting_info *eh_nest_info;
1369 basic_block src;
1370 rtx insn;
1371 int region;
1373 handler_info **handler_list;
1374 int num, is_call;
1376 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1377 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1378 while (--num >= 0)
1380 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1381 EDGE_ABNORMAL | EDGE_EH | is_call);
1385 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1386 dangerous if we intend to move basic blocks around. Move such notes
1387 into the following block. */
1389 static void
1390 move_stray_eh_region_notes ()
1392 int i;
1393 basic_block b1, b2;
1395 if (n_basic_blocks < 2)
1396 return;
1398 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1399 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1401 rtx insn, next, list = NULL_RTX;
1403 b1 = BASIC_BLOCK (i);
1404 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1406 next = NEXT_INSN (insn);
1407 if (GET_CODE (insn) == NOTE
1408 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1409 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1411 /* Unlink from the insn chain. */
1412 NEXT_INSN (PREV_INSN (insn)) = next;
1413 PREV_INSN (next) = PREV_INSN (insn);
1415 /* Queue it. */
1416 NEXT_INSN (insn) = list;
1417 list = insn;
1421 if (list == NULL_RTX)
1422 continue;
1424 /* Find where to insert these things. */
1425 insn = b2->head;
1426 if (GET_CODE (insn) == CODE_LABEL)
1427 insn = NEXT_INSN (insn);
1429 while (list)
1431 next = NEXT_INSN (list);
1432 add_insn_after (list, insn);
1433 list = next;
1438 /* Recompute eh_beg/eh_end for each basic block. */
1440 static void
1441 record_active_eh_regions (f)
1442 rtx f;
1444 rtx insn, eh_list = NULL_RTX;
1445 int i = 0;
1446 basic_block bb = BASIC_BLOCK (0);
1448 for (insn = f; insn; insn = NEXT_INSN (insn))
1450 if (bb->head == insn)
1451 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1453 if (GET_CODE (insn) == NOTE)
1455 int kind = NOTE_LINE_NUMBER (insn);
1456 if (kind == NOTE_INSN_EH_REGION_BEG)
1457 eh_list = alloc_INSN_LIST (insn, eh_list);
1458 else if (kind == NOTE_INSN_EH_REGION_END)
1460 rtx t = XEXP (eh_list, 1);
1461 free_INSN_LIST_node (eh_list);
1462 eh_list = t;
1466 if (bb->end == insn)
1468 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1469 i += 1;
1470 if (i == n_basic_blocks)
1471 break;
1472 bb = BASIC_BLOCK (i);
1477 /* Identify critical edges and set the bits appropriately. */
1479 static void
1480 mark_critical_edges ()
1482 int i, n = n_basic_blocks;
1483 basic_block bb;
1485 /* We begin with the entry block. This is not terribly important now,
1486 but could be if a front end (Fortran) implemented alternate entry
1487 points. */
1488 bb = ENTRY_BLOCK_PTR;
1489 i = -1;
1491 while (1)
1493 edge e;
1495 /* (1) Critical edges must have a source with multiple successors. */
1496 if (bb->succ && bb->succ->succ_next)
1498 for (e = bb->succ; e; e = e->succ_next)
1500 /* (2) Critical edges must have a destination with multiple
1501 predecessors. Note that we know there is at least one
1502 predecessor -- the edge we followed to get here. */
1503 if (e->dest->pred->pred_next)
1504 e->flags |= EDGE_CRITICAL;
1505 else
1506 e->flags &= ~EDGE_CRITICAL;
1509 else
1511 for (e = bb->succ; e; e = e->succ_next)
1512 e->flags &= ~EDGE_CRITICAL;
1515 if (++i >= n)
1516 break;
1517 bb = BASIC_BLOCK (i);
1521 /* Split a block BB after insn INSN creating a new fallthru edge.
1522 Return the new edge. Note that to keep other parts of the compiler happy,
1523 this function renumbers all the basic blocks so that the new
1524 one has a number one greater than the block split. */
1526 edge
1527 split_block (bb, insn)
1528 basic_block bb;
1529 rtx insn;
1531 basic_block new_bb;
1532 edge new_edge;
1533 edge e;
1534 rtx bb_note;
1535 int i, j;
1537 /* There is no point splitting the block after its end. */
1538 if (bb->end == insn)
1539 return 0;
1541 /* Create the new structures. */
1542 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1543 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1544 n_edges++;
1546 memset (new_bb, 0, sizeof (*new_bb));
1548 new_bb->head = NEXT_INSN (insn);
1549 new_bb->end = bb->end;
1550 bb->end = insn;
1552 new_bb->succ = bb->succ;
1553 bb->succ = new_edge;
1554 new_bb->pred = new_edge;
1555 new_bb->count = bb->count;
1556 new_bb->loop_depth = bb->loop_depth;
1558 new_edge->src = bb;
1559 new_edge->dest = new_bb;
1560 new_edge->flags = EDGE_FALLTHRU;
1561 new_edge->probability = REG_BR_PROB_BASE;
1562 new_edge->count = bb->count;
1564 /* Redirect the src of the successor edges of bb to point to new_bb. */
1565 for (e = new_bb->succ; e; e = e->succ_next)
1566 e->src = new_bb;
1568 /* Place the new block just after the block being split. */
1569 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1571 /* Some parts of the compiler expect blocks to be number in
1572 sequential order so insert the new block immediately after the
1573 block being split.. */
1574 j = bb->index;
1575 for (i = n_basic_blocks - 1; i > j + 1; --i)
1577 basic_block tmp = BASIC_BLOCK (i - 1);
1578 BASIC_BLOCK (i) = tmp;
1579 tmp->index = i;
1582 BASIC_BLOCK (i) = new_bb;
1583 new_bb->index = i;
1585 /* Create the basic block note. */
1586 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1587 new_bb->head);
1588 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1589 new_bb->head = bb_note;
1591 update_bb_for_insn (new_bb);
1593 if (bb->global_live_at_start)
1595 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1596 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1597 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1599 /* We now have to calculate which registers are live at the end
1600 of the split basic block and at the start of the new basic
1601 block. Start with those registers that are known to be live
1602 at the end of the original basic block and get
1603 propagate_block to determine which registers are live. */
1604 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1605 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1606 COPY_REG_SET (bb->global_live_at_end,
1607 new_bb->global_live_at_start);
1610 return new_edge;
1614 /* Split a (typically critical) edge. Return the new block.
1615 Abort on abnormal edges.
1617 ??? The code generally expects to be called on critical edges.
1618 The case of a block ending in an unconditional jump to a
1619 block with multiple predecessors is not handled optimally. */
1621 basic_block
1622 split_edge (edge_in)
1623 edge edge_in;
1625 basic_block old_pred, bb, old_succ;
1626 edge edge_out;
1627 rtx bb_note;
1628 int i, j;
1630 /* Abnormal edges cannot be split. */
1631 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1632 abort ();
1634 old_pred = edge_in->src;
1635 old_succ = edge_in->dest;
1637 /* Remove the existing edge from the destination's pred list. */
1639 edge *pp;
1640 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1641 continue;
1642 *pp = edge_in->pred_next;
1643 edge_in->pred_next = NULL;
1646 /* Create the new structures. */
1647 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1648 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1649 n_edges++;
1651 memset (bb, 0, sizeof (*bb));
1653 /* ??? This info is likely going to be out of date very soon. */
1654 if (old_succ->global_live_at_start)
1656 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1657 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1658 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1659 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1662 /* Wire them up. */
1663 bb->pred = edge_in;
1664 bb->succ = edge_out;
1665 bb->count = edge_in->count;
1667 edge_in->dest = bb;
1668 edge_in->flags &= ~EDGE_CRITICAL;
1670 edge_out->pred_next = old_succ->pred;
1671 edge_out->succ_next = NULL;
1672 edge_out->src = bb;
1673 edge_out->dest = old_succ;
1674 edge_out->flags = EDGE_FALLTHRU;
1675 edge_out->probability = REG_BR_PROB_BASE;
1676 edge_out->count = edge_in->count;
1678 old_succ->pred = edge_out;
1680 /* Tricky case -- if there existed a fallthru into the successor
1681 (and we're not it) we must add a new unconditional jump around
1682 the new block we're actually interested in.
1684 Further, if that edge is critical, this means a second new basic
1685 block must be created to hold it. In order to simplify correct
1686 insn placement, do this before we touch the existing basic block
1687 ordering for the block we were really wanting. */
1688 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1690 edge e;
1691 for (e = edge_out->pred_next; e; e = e->pred_next)
1692 if (e->flags & EDGE_FALLTHRU)
1693 break;
1695 if (e)
1697 basic_block jump_block;
1698 rtx pos;
1700 if ((e->flags & EDGE_CRITICAL) == 0
1701 && e->src != ENTRY_BLOCK_PTR)
1703 /* Non critical -- we can simply add a jump to the end
1704 of the existing predecessor. */
1705 jump_block = e->src;
1707 else
1709 /* We need a new block to hold the jump. The simplest
1710 way to do the bulk of the work here is to recursively
1711 call ourselves. */
1712 jump_block = split_edge (e);
1713 e = jump_block->succ;
1716 /* Now add the jump insn ... */
1717 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1718 jump_block->end);
1719 jump_block->end = pos;
1720 if (basic_block_for_insn)
1721 set_block_for_insn (pos, jump_block);
1722 emit_barrier_after (pos);
1724 /* ... let jump know that label is in use, ... */
1725 JUMP_LABEL (pos) = old_succ->head;
1726 ++LABEL_NUSES (old_succ->head);
1728 /* ... and clear fallthru on the outgoing edge. */
1729 e->flags &= ~EDGE_FALLTHRU;
1731 /* Continue splitting the interesting edge. */
1735 /* Place the new block just in front of the successor. */
1736 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1737 if (old_succ == EXIT_BLOCK_PTR)
1738 j = n_basic_blocks - 1;
1739 else
1740 j = old_succ->index;
1741 for (i = n_basic_blocks - 1; i > j; --i)
1743 basic_block tmp = BASIC_BLOCK (i - 1);
1744 BASIC_BLOCK (i) = tmp;
1745 tmp->index = i;
1747 BASIC_BLOCK (i) = bb;
1748 bb->index = i;
1750 /* Create the basic block note.
1752 Where we place the note can have a noticable impact on the generated
1753 code. Consider this cfg:
1759 +->1-->2--->E
1761 +--+
1763 If we need to insert an insn on the edge from block 0 to block 1,
1764 we want to ensure the instructions we insert are outside of any
1765 loop notes that physically sit between block 0 and block 1. Otherwise
1766 we confuse the loop optimizer into thinking the loop is a phony. */
1767 if (old_succ != EXIT_BLOCK_PTR
1768 && PREV_INSN (old_succ->head)
1769 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1770 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1771 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1772 PREV_INSN (old_succ->head));
1773 else if (old_succ != EXIT_BLOCK_PTR)
1774 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1775 else
1776 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1777 NOTE_BASIC_BLOCK (bb_note) = bb;
1778 bb->head = bb->end = bb_note;
1780 /* Not quite simple -- for non-fallthru edges, we must adjust the
1781 predecessor's jump instruction to target our new block. */
1782 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1784 rtx tmp, insn = old_pred->end;
1785 rtx old_label = old_succ->head;
1786 rtx new_label = gen_label_rtx ();
1788 if (GET_CODE (insn) != JUMP_INSN)
1789 abort ();
1791 /* ??? Recognize a tablejump and adjust all matching cases. */
1792 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1793 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1794 && GET_CODE (tmp) == JUMP_INSN
1795 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1796 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1798 rtvec vec;
1799 int j;
1801 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1802 vec = XVEC (PATTERN (tmp), 0);
1803 else
1804 vec = XVEC (PATTERN (tmp), 1);
1806 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1807 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1809 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1810 --LABEL_NUSES (old_label);
1811 ++LABEL_NUSES (new_label);
1814 /* Handle casesi dispatch insns */
1815 if ((tmp = single_set (insn)) != NULL
1816 && SET_DEST (tmp) == pc_rtx
1817 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1818 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1819 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1821 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1822 new_label);
1823 --LABEL_NUSES (old_label);
1824 ++LABEL_NUSES (new_label);
1827 else
1829 /* This would have indicated an abnormal edge. */
1830 if (computed_jump_p (insn))
1831 abort ();
1833 /* A return instruction can't be redirected. */
1834 if (returnjump_p (insn))
1835 abort ();
1837 /* If the insn doesn't go where we think, we're confused. */
1838 if (JUMP_LABEL (insn) != old_label)
1839 abort ();
1841 redirect_jump (insn, new_label, 0);
1844 emit_label_before (new_label, bb_note);
1845 bb->head = new_label;
1848 return bb;
1851 /* Queue instructions for insertion on an edge between two basic blocks.
1852 The new instructions and basic blocks (if any) will not appear in the
1853 CFG until commit_edge_insertions is called. */
1855 void
1856 insert_insn_on_edge (pattern, e)
1857 rtx pattern;
1858 edge e;
1860 /* We cannot insert instructions on an abnormal critical edge.
1861 It will be easier to find the culprit if we die now. */
1862 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1863 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1864 abort ();
1866 if (e->insns == NULL_RTX)
1867 start_sequence ();
1868 else
1869 push_to_sequence (e->insns);
1871 emit_insn (pattern);
1873 e->insns = get_insns ();
1874 end_sequence ();
1877 /* Update the CFG for the instructions queued on edge E. */
1879 static void
1880 commit_one_edge_insertion (e)
1881 edge e;
1883 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1884 basic_block bb;
1886 /* Pull the insns off the edge now since the edge might go away. */
1887 insns = e->insns;
1888 e->insns = NULL_RTX;
1890 /* Figure out where to put these things. If the destination has
1891 one predecessor, insert there. Except for the exit block. */
1892 if (e->dest->pred->pred_next == NULL
1893 && e->dest != EXIT_BLOCK_PTR)
1895 bb = e->dest;
1897 /* Get the location correct wrt a code label, and "nice" wrt
1898 a basic block note, and before everything else. */
1899 tmp = bb->head;
1900 if (GET_CODE (tmp) == CODE_LABEL)
1901 tmp = NEXT_INSN (tmp);
1902 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1903 tmp = NEXT_INSN (tmp);
1904 if (tmp == bb->head)
1905 before = tmp;
1906 else
1907 after = PREV_INSN (tmp);
1910 /* If the source has one successor and the edge is not abnormal,
1911 insert there. Except for the entry block. */
1912 else if ((e->flags & EDGE_ABNORMAL) == 0
1913 && e->src->succ->succ_next == NULL
1914 && e->src != ENTRY_BLOCK_PTR)
1916 bb = e->src;
1917 /* It is possible to have a non-simple jump here. Consider a target
1918 where some forms of unconditional jumps clobber a register. This
1919 happens on the fr30 for example.
1921 We know this block has a single successor, so we can just emit
1922 the queued insns before the jump. */
1923 if (GET_CODE (bb->end) == JUMP_INSN)
1925 before = bb->end;
1927 else
1929 /* We'd better be fallthru, or we've lost track of what's what. */
1930 if ((e->flags & EDGE_FALLTHRU) == 0)
1931 abort ();
1933 after = bb->end;
1937 /* Otherwise we must split the edge. */
1938 else
1940 bb = split_edge (e);
1941 after = bb->end;
1944 /* Now that we've found the spot, do the insertion. */
1946 /* Set the new block number for these insns, if structure is allocated. */
1947 if (basic_block_for_insn)
1949 rtx i;
1950 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1951 set_block_for_insn (i, bb);
1954 if (before)
1956 emit_insns_before (insns, before);
1957 if (before == bb->head)
1958 bb->head = insns;
1960 last = prev_nonnote_insn (before);
1962 else
1964 last = emit_insns_after (insns, after);
1965 if (after == bb->end)
1966 bb->end = last;
1969 if (returnjump_p (last))
1971 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1972 This is not currently a problem because this only happens
1973 for the (single) epilogue, which already has a fallthru edge
1974 to EXIT. */
1976 e = bb->succ;
1977 if (e->dest != EXIT_BLOCK_PTR
1978 || e->succ_next != NULL
1979 || (e->flags & EDGE_FALLTHRU) == 0)
1980 abort ();
1981 e->flags &= ~EDGE_FALLTHRU;
1983 emit_barrier_after (last);
1984 bb->end = last;
1986 if (before)
1987 flow_delete_insn (before);
1989 else if (GET_CODE (last) == JUMP_INSN)
1990 abort ();
1993 /* Update the CFG for all queued instructions. */
1995 void
1996 commit_edge_insertions ()
1998 int i;
1999 basic_block bb;
2001 #ifdef ENABLE_CHECKING
2002 verify_flow_info ();
2003 #endif
2005 i = -1;
2006 bb = ENTRY_BLOCK_PTR;
2007 while (1)
2009 edge e, next;
2011 for (e = bb->succ; e; e = next)
2013 next = e->succ_next;
2014 if (e->insns)
2015 commit_one_edge_insertion (e);
2018 if (++i >= n_basic_blocks)
2019 break;
2020 bb = BASIC_BLOCK (i);
2024 /* Add fake edges to the function exit for any non constant calls in
2025 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2026 BLOCKS is zero. Return the nuber of blocks that were split. */
2029 flow_call_edges_add (blocks)
2030 sbitmap blocks;
2032 int i;
2033 int blocks_split = 0;
2034 int bb_num = 0;
2035 basic_block *bbs;
2037 /* Map bb indicies into basic block pointers since split_block
2038 will renumber the basic blocks. */
2040 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2042 if (! blocks)
2044 for (i = 0; i < n_basic_blocks; i++)
2045 bbs[bb_num++] = BASIC_BLOCK (i);
2047 else
2049 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2051 bbs[bb_num++] = BASIC_BLOCK (i);
2056 /* Now add fake edges to the function exit for any non constant
2057 calls since there is no way that we can determine if they will
2058 return or not... */
2060 for (i = 0; i < bb_num; i++)
2062 basic_block bb = bbs[i];
2063 rtx insn;
2064 rtx prev_insn;
2066 for (insn = bb->end; ; insn = prev_insn)
2068 prev_insn = PREV_INSN (insn);
2069 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
2071 edge e;
2073 /* Note that the following may create a new basic block
2074 and renumber the existing basic blocks. */
2075 e = split_block (bb, insn);
2076 if (e)
2077 blocks_split++;
2079 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2081 if (insn == bb->head)
2082 break;
2086 if (blocks_split)
2087 verify_flow_info ();
2089 free (bbs);
2090 return blocks_split;
2093 /* Delete all unreachable basic blocks. */
2095 static void
2096 delete_unreachable_blocks ()
2098 basic_block *worklist, *tos;
2099 int deleted_handler;
2100 edge e;
2101 int i, n;
2103 n = n_basic_blocks;
2104 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2106 /* Use basic_block->aux as a marker. Clear them all. */
2108 for (i = 0; i < n; ++i)
2109 BASIC_BLOCK (i)->aux = NULL;
2111 /* Add our starting points to the worklist. Almost always there will
2112 be only one. It isn't inconcievable that we might one day directly
2113 support Fortran alternate entry points. */
2115 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2117 *tos++ = e->dest;
2119 /* Mark the block with a handy non-null value. */
2120 e->dest->aux = e;
2123 /* Iterate: find everything reachable from what we've already seen. */
2125 while (tos != worklist)
2127 basic_block b = *--tos;
2129 for (e = b->succ; e; e = e->succ_next)
2130 if (!e->dest->aux)
2132 *tos++ = e->dest;
2133 e->dest->aux = e;
2137 /* Delete all unreachable basic blocks. Count down so that we don't
2138 interfere with the block renumbering that happens in flow_delete_block. */
2140 deleted_handler = 0;
2142 for (i = n - 1; i >= 0; --i)
2144 basic_block b = BASIC_BLOCK (i);
2146 if (b->aux != NULL)
2147 /* This block was found. Tidy up the mark. */
2148 b->aux = NULL;
2149 else
2150 deleted_handler |= flow_delete_block (b);
2153 tidy_fallthru_edges ();
2155 /* If we deleted an exception handler, we may have EH region begin/end
2156 blocks to remove as well. */
2157 if (deleted_handler)
2158 delete_eh_regions ();
2160 free (worklist);
2163 /* Find EH regions for which there is no longer a handler, and delete them. */
2165 static void
2166 delete_eh_regions ()
2168 rtx insn;
2170 update_rethrow_references ();
2172 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2173 if (GET_CODE (insn) == NOTE)
2175 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2176 || (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2178 int num = NOTE_EH_HANDLER (insn);
2179 /* A NULL handler indicates a region is no longer needed,
2180 as long as its rethrow label isn't used. */
2181 if (get_first_handler (num) == NULL && ! rethrow_used (num))
2183 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2184 NOTE_SOURCE_FILE (insn) = 0;
2190 /* Return true if NOTE is not one of the ones that must be kept paired,
2191 so that we may simply delete them. */
2193 static int
2194 can_delete_note_p (note)
2195 rtx note;
2197 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2198 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2201 /* Unlink a chain of insns between START and FINISH, leaving notes
2202 that must be paired. */
2204 void
2205 flow_delete_insn_chain (start, finish)
2206 rtx start, finish;
2208 /* Unchain the insns one by one. It would be quicker to delete all
2209 of these with a single unchaining, rather than one at a time, but
2210 we need to keep the NOTE's. */
2212 rtx next;
2214 while (1)
2216 next = NEXT_INSN (start);
2217 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2219 else if (GET_CODE (start) == CODE_LABEL
2220 && ! can_delete_label_p (start))
2222 const char *name = LABEL_NAME (start);
2223 PUT_CODE (start, NOTE);
2224 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2225 NOTE_SOURCE_FILE (start) = name;
2227 else
2228 next = flow_delete_insn (start);
2230 if (start == finish)
2231 break;
2232 start = next;
2236 /* Delete the insns in a (non-live) block. We physically delete every
2237 non-deleted-note insn, and update the flow graph appropriately.
2239 Return nonzero if we deleted an exception handler. */
2241 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2242 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2245 flow_delete_block (b)
2246 basic_block b;
2248 int deleted_handler = 0;
2249 rtx insn, end, tmp;
2251 /* If the head of this block is a CODE_LABEL, then it might be the
2252 label for an exception handler which can't be reached.
2254 We need to remove the label from the exception_handler_label list
2255 and remove the associated NOTE_INSN_EH_REGION_BEG and
2256 NOTE_INSN_EH_REGION_END notes. */
2258 insn = b->head;
2260 never_reached_warning (insn);
2262 if (GET_CODE (insn) == CODE_LABEL)
2264 rtx x, *prev = &exception_handler_labels;
2266 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2268 if (XEXP (x, 0) == insn)
2270 /* Found a match, splice this label out of the EH label list. */
2271 *prev = XEXP (x, 1);
2272 XEXP (x, 1) = NULL_RTX;
2273 XEXP (x, 0) = NULL_RTX;
2275 /* Remove the handler from all regions */
2276 remove_handler (insn);
2277 deleted_handler = 1;
2278 break;
2280 prev = &XEXP (x, 1);
2284 /* Include any jump table following the basic block. */
2285 end = b->end;
2286 if (GET_CODE (end) == JUMP_INSN
2287 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2288 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2289 && GET_CODE (tmp) == JUMP_INSN
2290 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2291 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2292 end = tmp;
2294 /* Include any barrier that may follow the basic block. */
2295 tmp = next_nonnote_insn (end);
2296 if (tmp && GET_CODE (tmp) == BARRIER)
2297 end = tmp;
2299 /* Selectively delete the entire chain. */
2300 flow_delete_insn_chain (insn, end);
2302 /* Remove the edges into and out of this block. Note that there may
2303 indeed be edges in, if we are removing an unreachable loop. */
2305 edge e, next, *q;
2307 for (e = b->pred; e; e = next)
2309 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2310 continue;
2311 *q = e->succ_next;
2312 next = e->pred_next;
2313 n_edges--;
2314 free (e);
2316 for (e = b->succ; e; e = next)
2318 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2319 continue;
2320 *q = e->pred_next;
2321 next = e->succ_next;
2322 n_edges--;
2323 free (e);
2326 b->pred = NULL;
2327 b->succ = NULL;
2330 /* Remove the basic block from the array, and compact behind it. */
2331 expunge_block (b);
2333 return deleted_handler;
2336 /* Remove block B from the basic block array and compact behind it. */
2338 static void
2339 expunge_block (b)
2340 basic_block b;
2342 int i, n = n_basic_blocks;
2344 for (i = b->index; i + 1 < n; ++i)
2346 basic_block x = BASIC_BLOCK (i + 1);
2347 BASIC_BLOCK (i) = x;
2348 x->index = i;
2351 basic_block_info->num_elements--;
2352 n_basic_blocks--;
2355 /* Delete INSN by patching it out. Return the next insn. */
2358 flow_delete_insn (insn)
2359 rtx insn;
2361 rtx prev = PREV_INSN (insn);
2362 rtx next = NEXT_INSN (insn);
2363 rtx note;
2365 PREV_INSN (insn) = NULL_RTX;
2366 NEXT_INSN (insn) = NULL_RTX;
2367 INSN_DELETED_P (insn) = 1;
2369 if (prev)
2370 NEXT_INSN (prev) = next;
2371 if (next)
2372 PREV_INSN (next) = prev;
2373 else
2374 set_last_insn (prev);
2376 if (GET_CODE (insn) == CODE_LABEL)
2377 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2379 /* If deleting a jump, decrement the use count of the label. Deleting
2380 the label itself should happen in the normal course of block merging. */
2381 if (GET_CODE (insn) == JUMP_INSN
2382 && JUMP_LABEL (insn)
2383 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2384 LABEL_NUSES (JUMP_LABEL (insn))--;
2386 /* Also if deleting an insn that references a label. */
2387 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2388 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2389 LABEL_NUSES (XEXP (note, 0))--;
2391 return next;
2394 /* True if a given label can be deleted. */
2396 static int
2397 can_delete_label_p (label)
2398 rtx label;
2400 rtx x;
2402 if (LABEL_PRESERVE_P (label))
2403 return 0;
2405 for (x = forced_labels; x; x = XEXP (x, 1))
2406 if (label == XEXP (x, 0))
2407 return 0;
2408 for (x = label_value_list; x; x = XEXP (x, 1))
2409 if (label == XEXP (x, 0))
2410 return 0;
2411 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2412 if (label == XEXP (x, 0))
2413 return 0;
2415 /* User declared labels must be preserved. */
2416 if (LABEL_NAME (label) != 0)
2417 return 0;
2419 return 1;
2422 static int
2423 tail_recursion_label_p (label)
2424 rtx label;
2426 rtx x;
2428 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2429 if (label == XEXP (x, 0))
2430 return 1;
2432 return 0;
2435 /* Blocks A and B are to be merged into a single block A. The insns
2436 are already contiguous, hence `nomove'. */
2438 void
2439 merge_blocks_nomove (a, b)
2440 basic_block a, b;
2442 edge e;
2443 rtx b_head, b_end, a_end;
2444 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2445 int b_empty = 0;
2447 /* If there was a CODE_LABEL beginning B, delete it. */
2448 b_head = b->head;
2449 b_end = b->end;
2450 if (GET_CODE (b_head) == CODE_LABEL)
2452 /* Detect basic blocks with nothing but a label. This can happen
2453 in particular at the end of a function. */
2454 if (b_head == b_end)
2455 b_empty = 1;
2456 del_first = del_last = b_head;
2457 b_head = NEXT_INSN (b_head);
2460 /* Delete the basic block note. */
2461 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2463 if (b_head == b_end)
2464 b_empty = 1;
2465 if (! del_last)
2466 del_first = b_head;
2467 del_last = b_head;
2468 b_head = NEXT_INSN (b_head);
2471 /* If there was a jump out of A, delete it. */
2472 a_end = a->end;
2473 if (GET_CODE (a_end) == JUMP_INSN)
2475 rtx prev;
2477 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2478 if (GET_CODE (prev) != NOTE
2479 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2480 || prev == a->head)
2481 break;
2483 del_first = a_end;
2485 #ifdef HAVE_cc0
2486 /* If this was a conditional jump, we need to also delete
2487 the insn that set cc0. */
2488 if (prev && sets_cc0_p (prev))
2490 rtx tmp = prev;
2491 prev = prev_nonnote_insn (prev);
2492 if (!prev)
2493 prev = a->head;
2494 del_first = tmp;
2496 #endif
2498 a_end = prev;
2500 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2501 del_first = NEXT_INSN (a_end);
2503 /* Delete everything marked above as well as crap that might be
2504 hanging out between the two blocks. */
2505 flow_delete_insn_chain (del_first, del_last);
2507 /* Normally there should only be one successor of A and that is B, but
2508 partway though the merge of blocks for conditional_execution we'll
2509 be merging a TEST block with THEN and ELSE successors. Free the
2510 whole lot of them and hope the caller knows what they're doing. */
2511 while (a->succ)
2512 remove_edge (a->succ);
2514 /* Adjust the edges out of B for the new owner. */
2515 for (e = b->succ; e; e = e->succ_next)
2516 e->src = a;
2517 a->succ = b->succ;
2519 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2520 b->pred = b->succ = NULL;
2522 /* Reassociate the insns of B with A. */
2523 if (!b_empty)
2525 if (basic_block_for_insn)
2527 BLOCK_FOR_INSN (b_head) = a;
2528 while (b_head != b_end)
2530 b_head = NEXT_INSN (b_head);
2531 BLOCK_FOR_INSN (b_head) = a;
2534 a_end = b_end;
2536 a->end = a_end;
2538 expunge_block (b);
2541 /* Blocks A and B are to be merged into a single block. A has no incoming
2542 fallthru edge, so it can be moved before B without adding or modifying
2543 any jumps (aside from the jump from A to B). */
2545 static int
2546 merge_blocks_move_predecessor_nojumps (a, b)
2547 basic_block a, b;
2549 rtx start, end, barrier;
2550 int index;
2552 start = a->head;
2553 end = a->end;
2555 barrier = next_nonnote_insn (end);
2556 if (GET_CODE (barrier) != BARRIER)
2557 abort ();
2558 flow_delete_insn (barrier);
2560 /* Move block and loop notes out of the chain so that we do not
2561 disturb their order.
2563 ??? A better solution would be to squeeze out all the non-nested notes
2564 and adjust the block trees appropriately. Even better would be to have
2565 a tighter connection between block trees and rtl so that this is not
2566 necessary. */
2567 start = squeeze_notes (start, end);
2569 /* Scramble the insn chain. */
2570 if (end != PREV_INSN (b->head))
2571 reorder_insns (start, end, PREV_INSN (b->head));
2573 if (rtl_dump_file)
2575 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2576 a->index, b->index);
2579 /* Swap the records for the two blocks around. Although we are deleting B,
2580 A is now where B was and we want to compact the BB array from where
2581 A used to be. */
2582 BASIC_BLOCK (a->index) = b;
2583 BASIC_BLOCK (b->index) = a;
2584 index = a->index;
2585 a->index = b->index;
2586 b->index = index;
2588 /* Now blocks A and B are contiguous. Merge them. */
2589 merge_blocks_nomove (a, b);
2591 return 1;
2594 /* Blocks A and B are to be merged into a single block. B has no outgoing
2595 fallthru edge, so it can be moved after A without adding or modifying
2596 any jumps (aside from the jump from A to B). */
2598 static int
2599 merge_blocks_move_successor_nojumps (a, b)
2600 basic_block a, b;
2602 rtx start, end, barrier;
2604 start = b->head;
2605 end = b->end;
2606 barrier = NEXT_INSN (end);
2608 /* Recognize a jump table following block B. */
2609 if (GET_CODE (barrier) == CODE_LABEL
2610 && NEXT_INSN (barrier)
2611 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2612 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2613 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2615 end = NEXT_INSN (barrier);
2616 barrier = NEXT_INSN (end);
2619 /* There had better have been a barrier there. Delete it. */
2620 if (GET_CODE (barrier) != BARRIER)
2621 abort ();
2622 flow_delete_insn (barrier);
2624 /* Move block and loop notes out of the chain so that we do not
2625 disturb their order.
2627 ??? A better solution would be to squeeze out all the non-nested notes
2628 and adjust the block trees appropriately. Even better would be to have
2629 a tighter connection between block trees and rtl so that this is not
2630 necessary. */
2631 start = squeeze_notes (start, end);
2633 /* Scramble the insn chain. */
2634 reorder_insns (start, end, a->end);
2636 /* Now blocks A and B are contiguous. Merge them. */
2637 merge_blocks_nomove (a, b);
2639 if (rtl_dump_file)
2641 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2642 b->index, a->index);
2645 return 1;
2648 /* Attempt to merge basic blocks that are potentially non-adjacent.
2649 Return true iff the attempt succeeded. */
2651 static int
2652 merge_blocks (e, b, c)
2653 edge e;
2654 basic_block b, c;
2656 /* If C has a tail recursion label, do not merge. There is no
2657 edge recorded from the call_placeholder back to this label, as
2658 that would make optimize_sibling_and_tail_recursive_calls more
2659 complex for no gain. */
2660 if (GET_CODE (c->head) == CODE_LABEL
2661 && tail_recursion_label_p (c->head))
2662 return 0;
2664 /* If B has a fallthru edge to C, no need to move anything. */
2665 if (e->flags & EDGE_FALLTHRU)
2667 merge_blocks_nomove (b, c);
2669 if (rtl_dump_file)
2671 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2672 b->index, c->index);
2675 return 1;
2677 else
2679 edge tmp_edge;
2680 basic_block d;
2681 int c_has_outgoing_fallthru;
2682 int b_has_incoming_fallthru;
2684 /* We must make sure to not munge nesting of exception regions,
2685 lexical blocks, and loop notes.
2687 The first is taken care of by requiring that the active eh
2688 region at the end of one block always matches the active eh
2689 region at the beginning of the next block.
2691 The later two are taken care of by squeezing out all the notes. */
2693 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2694 executed and we may want to treat blocks which have two out
2695 edges, one normal, one abnormal as only having one edge for
2696 block merging purposes. */
2698 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2699 if (tmp_edge->flags & EDGE_FALLTHRU)
2700 break;
2701 c_has_outgoing_fallthru = (tmp_edge != NULL);
2703 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2704 if (tmp_edge->flags & EDGE_FALLTHRU)
2705 break;
2706 b_has_incoming_fallthru = (tmp_edge != NULL);
2708 /* If B does not have an incoming fallthru, and the exception regions
2709 match, then it can be moved immediately before C without introducing
2710 or modifying jumps.
2712 C can not be the first block, so we do not have to worry about
2713 accessing a non-existent block. */
2714 d = BASIC_BLOCK (c->index - 1);
2715 if (! b_has_incoming_fallthru
2716 && d->eh_end == b->eh_beg
2717 && b->eh_end == c->eh_beg)
2718 return merge_blocks_move_predecessor_nojumps (b, c);
2720 /* Otherwise, we're going to try to move C after B. Make sure the
2721 exception regions match.
2723 If B is the last basic block, then we must not try to access the
2724 block structure for block B + 1. Luckily in that case we do not
2725 need to worry about matching exception regions. */
2726 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2727 if (b->eh_end == c->eh_beg
2728 && (d == NULL || c->eh_end == d->eh_beg))
2730 /* If C does not have an outgoing fallthru, then it can be moved
2731 immediately after B without introducing or modifying jumps. */
2732 if (! c_has_outgoing_fallthru)
2733 return merge_blocks_move_successor_nojumps (b, c);
2735 /* Otherwise, we'll need to insert an extra jump, and possibly
2736 a new block to contain it. */
2737 /* ??? Not implemented yet. */
2740 return 0;
2744 /* Top level driver for merge_blocks. */
2746 static void
2747 try_merge_blocks ()
2749 int i;
2751 /* Attempt to merge blocks as made possible by edge removal. If a block
2752 has only one successor, and the successor has only one predecessor,
2753 they may be combined. */
2755 for (i = 0; i < n_basic_blocks;)
2757 basic_block c, b = BASIC_BLOCK (i);
2758 edge s;
2760 /* A loop because chains of blocks might be combineable. */
2761 while ((s = b->succ) != NULL
2762 && s->succ_next == NULL
2763 && (s->flags & EDGE_EH) == 0
2764 && (c = s->dest) != EXIT_BLOCK_PTR
2765 && c->pred->pred_next == NULL
2766 /* If the jump insn has side effects, we can't kill the edge. */
2767 && (GET_CODE (b->end) != JUMP_INSN
2768 || onlyjump_p (b->end))
2769 && merge_blocks (s, b, c))
2770 continue;
2772 /* Don't get confused by the index shift caused by deleting blocks. */
2773 i = b->index + 1;
2777 /* The given edge should potentially be a fallthru edge. If that is in
2778 fact true, delete the jump and barriers that are in the way. */
2780 void
2781 tidy_fallthru_edge (e, b, c)
2782 edge e;
2783 basic_block b, c;
2785 rtx q;
2787 /* ??? In a late-running flow pass, other folks may have deleted basic
2788 blocks by nopping out blocks, leaving multiple BARRIERs between here
2789 and the target label. They ought to be chastized and fixed.
2791 We can also wind up with a sequence of undeletable labels between
2792 one block and the next.
2794 So search through a sequence of barriers, labels, and notes for
2795 the head of block C and assert that we really do fall through. */
2797 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2798 return;
2800 /* Remove what will soon cease being the jump insn from the source block.
2801 If block B consisted only of this single jump, turn it into a deleted
2802 note. */
2803 q = b->end;
2804 if (GET_CODE (q) == JUMP_INSN
2805 && onlyjump_p (q)
2806 && (any_uncondjump_p (q)
2807 || (b->succ == e && e->succ_next == NULL)))
2809 #ifdef HAVE_cc0
2810 /* If this was a conditional jump, we need to also delete
2811 the insn that set cc0. */
2812 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2813 q = PREV_INSN (q);
2814 #endif
2816 if (b->head == q)
2818 PUT_CODE (q, NOTE);
2819 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2820 NOTE_SOURCE_FILE (q) = 0;
2822 else
2824 q = PREV_INSN (q);
2826 /* We don't want a block to end on a line-number note since that has
2827 the potential of changing the code between -g and not -g. */
2828 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
2829 q = PREV_INSN (q);
2832 b->end = q;
2835 /* Selectively unlink the sequence. */
2836 if (q != PREV_INSN (c->head))
2837 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2839 e->flags |= EDGE_FALLTHRU;
2842 /* Fix up edges that now fall through, or rather should now fall through
2843 but previously required a jump around now deleted blocks. Simplify
2844 the search by only examining blocks numerically adjacent, since this
2845 is how find_basic_blocks created them. */
2847 static void
2848 tidy_fallthru_edges ()
2850 int i;
2852 for (i = 1; i < n_basic_blocks; ++i)
2854 basic_block b = BASIC_BLOCK (i - 1);
2855 basic_block c = BASIC_BLOCK (i);
2856 edge s;
2858 /* We care about simple conditional or unconditional jumps with
2859 a single successor.
2861 If we had a conditional branch to the next instruction when
2862 find_basic_blocks was called, then there will only be one
2863 out edge for the block which ended with the conditional
2864 branch (since we do not create duplicate edges).
2866 Furthermore, the edge will be marked as a fallthru because we
2867 merge the flags for the duplicate edges. So we do not want to
2868 check that the edge is not a FALLTHRU edge. */
2869 if ((s = b->succ) != NULL
2870 && s->succ_next == NULL
2871 && s->dest == c
2872 /* If the jump insn has side effects, we can't tidy the edge. */
2873 && (GET_CODE (b->end) != JUMP_INSN
2874 || onlyjump_p (b->end)))
2875 tidy_fallthru_edge (s, b, c);
2879 /* Perform data flow analysis.
2880 F is the first insn of the function; FLAGS is a set of PROP_* flags
2881 to be used in accumulating flow info. */
2883 void
2884 life_analysis (f, file, flags)
2885 rtx f;
2886 FILE *file;
2887 int flags;
2889 #ifdef ELIMINABLE_REGS
2890 register int i;
2891 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2892 #endif
2894 /* Record which registers will be eliminated. We use this in
2895 mark_used_regs. */
2897 CLEAR_HARD_REG_SET (elim_reg_set);
2899 #ifdef ELIMINABLE_REGS
2900 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2901 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2902 #else
2903 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2904 #endif
2906 if (! optimize)
2907 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
2909 /* The post-reload life analysis have (on a global basis) the same
2910 registers live as was computed by reload itself. elimination
2911 Otherwise offsets and such may be incorrect.
2913 Reload will make some registers as live even though they do not
2914 appear in the rtl.
2916 We don't want to create new auto-incs after reload, since they
2917 are unlikely to be useful and can cause problems with shared
2918 stack slots. */
2919 if (reload_completed)
2920 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2922 /* We want alias analysis information for local dead store elimination. */
2923 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2924 init_alias_analysis ();
2926 /* Always remove no-op moves. Do this before other processing so
2927 that we don't have to keep re-scanning them. */
2928 delete_noop_moves (f);
2930 /* Some targets can emit simpler epilogues if they know that sp was
2931 not ever modified during the function. After reload, of course,
2932 we've already emitted the epilogue so there's no sense searching. */
2933 if (! reload_completed)
2934 notice_stack_pointer_modification (f);
2936 /* Allocate and zero out data structures that will record the
2937 data from lifetime analysis. */
2938 allocate_reg_life_data ();
2939 allocate_bb_life_data ();
2941 /* Find the set of registers live on function exit. */
2942 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2944 /* "Update" life info from zero. It'd be nice to begin the
2945 relaxation with just the exit and noreturn blocks, but that set
2946 is not immediately handy. */
2948 if (flags & PROP_REG_INFO)
2949 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2950 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2952 /* Clean up. */
2953 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2954 end_alias_analysis ();
2956 if (file)
2957 dump_flow_info (file);
2959 free_basic_block_vars (1);
2962 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2963 Search for REGNO. If found, abort if it is not wider than word_mode. */
2965 static int
2966 verify_wide_reg_1 (px, pregno)
2967 rtx *px;
2968 void *pregno;
2970 rtx x = *px;
2971 unsigned int regno = *(int *) pregno;
2973 if (GET_CODE (x) == REG && REGNO (x) == regno)
2975 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2976 abort ();
2977 return 1;
2979 return 0;
2982 /* A subroutine of verify_local_live_at_start. Search through insns
2983 between HEAD and END looking for register REGNO. */
2985 static void
2986 verify_wide_reg (regno, head, end)
2987 int regno;
2988 rtx head, end;
2990 while (1)
2992 if (INSN_P (head)
2993 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2994 return;
2995 if (head == end)
2996 break;
2997 head = NEXT_INSN (head);
3000 /* We didn't find the register at all. Something's way screwy. */
3001 if (rtl_dump_file)
3002 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
3003 print_rtl_and_abort ();
3006 /* A subroutine of update_life_info. Verify that there are no untoward
3007 changes in live_at_start during a local update. */
3009 static void
3010 verify_local_live_at_start (new_live_at_start, bb)
3011 regset new_live_at_start;
3012 basic_block bb;
3014 if (reload_completed)
3016 /* After reload, there are no pseudos, nor subregs of multi-word
3017 registers. The regsets should exactly match. */
3018 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
3020 if (rtl_dump_file)
3022 fprintf (rtl_dump_file,
3023 "live_at_start mismatch in bb %d, aborting\n",
3024 bb->index);
3025 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
3026 debug_bitmap_file (rtl_dump_file, new_live_at_start);
3028 print_rtl_and_abort ();
3031 else
3033 int i;
3035 /* Find the set of changed registers. */
3036 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
3038 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
3040 /* No registers should die. */
3041 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
3043 if (rtl_dump_file)
3044 fprintf (rtl_dump_file,
3045 "Register %d died unexpectedly in block %d\n", i,
3046 bb->index);
3047 print_rtl_and_abort ();
3050 /* Verify that the now-live register is wider than word_mode. */
3051 verify_wide_reg (i, bb->head, bb->end);
3056 /* Updates life information starting with the basic blocks set in BLOCKS.
3057 If BLOCKS is null, consider it to be the universal set.
3059 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
3060 we are only expecting local modifications to basic blocks. If we find
3061 extra registers live at the beginning of a block, then we either killed
3062 useful data, or we have a broken split that wants data not provided.
3063 If we find registers removed from live_at_start, that means we have
3064 a broken peephole that is killing a register it shouldn't.
3066 ??? This is not true in one situation -- when a pre-reload splitter
3067 generates subregs of a multi-word pseudo, current life analysis will
3068 lose the kill. So we _can_ have a pseudo go live. How irritating.
3070 Including PROP_REG_INFO does not properly refresh regs_ever_live
3071 unless the caller resets it to zero. */
3073 void
3074 update_life_info (blocks, extent, prop_flags)
3075 sbitmap blocks;
3076 enum update_life_extent extent;
3077 int prop_flags;
3079 regset tmp;
3080 regset_head tmp_head;
3081 int i;
3083 tmp = INITIALIZE_REG_SET (tmp_head);
3085 /* For a global update, we go through the relaxation process again. */
3086 if (extent != UPDATE_LIFE_LOCAL)
3088 calculate_global_regs_live (blocks, blocks,
3089 prop_flags & PROP_SCAN_DEAD_CODE);
3091 /* If asked, remove notes from the blocks we'll update. */
3092 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
3093 count_or_remove_death_notes (blocks, 1);
3096 if (blocks)
3098 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
3100 basic_block bb = BASIC_BLOCK (i);
3102 COPY_REG_SET (tmp, bb->global_live_at_end);
3103 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3105 if (extent == UPDATE_LIFE_LOCAL)
3106 verify_local_live_at_start (tmp, bb);
3109 else
3111 for (i = n_basic_blocks - 1; i >= 0; --i)
3113 basic_block bb = BASIC_BLOCK (i);
3115 COPY_REG_SET (tmp, bb->global_live_at_end);
3116 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3118 if (extent == UPDATE_LIFE_LOCAL)
3119 verify_local_live_at_start (tmp, bb);
3123 FREE_REG_SET (tmp);
3125 if (prop_flags & PROP_REG_INFO)
3127 /* The only pseudos that are live at the beginning of the function
3128 are those that were not set anywhere in the function. local-alloc
3129 doesn't know how to handle these correctly, so mark them as not
3130 local to any one basic block. */
3131 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
3132 FIRST_PSEUDO_REGISTER, i,
3133 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3135 /* We have a problem with any pseudoreg that lives across the setjmp.
3136 ANSI says that if a user variable does not change in value between
3137 the setjmp and the longjmp, then the longjmp preserves it. This
3138 includes longjmp from a place where the pseudo appears dead.
3139 (In principle, the value still exists if it is in scope.)
3140 If the pseudo goes in a hard reg, some other value may occupy
3141 that hard reg where this pseudo is dead, thus clobbering the pseudo.
3142 Conclusion: such a pseudo must not go in a hard reg. */
3143 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
3144 FIRST_PSEUDO_REGISTER, i,
3146 if (regno_reg_rtx[i] != 0)
3148 REG_LIVE_LENGTH (i) = -1;
3149 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3155 /* Free the variables allocated by find_basic_blocks.
3157 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
3159 void
3160 free_basic_block_vars (keep_head_end_p)
3161 int keep_head_end_p;
3163 if (basic_block_for_insn)
3165 VARRAY_FREE (basic_block_for_insn);
3166 basic_block_for_insn = NULL;
3169 if (! keep_head_end_p)
3171 clear_edges ();
3172 VARRAY_FREE (basic_block_info);
3173 n_basic_blocks = 0;
3175 ENTRY_BLOCK_PTR->aux = NULL;
3176 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
3177 EXIT_BLOCK_PTR->aux = NULL;
3178 EXIT_BLOCK_PTR->global_live_at_start = NULL;
3182 /* Return nonzero if the destination of SET equals the source. */
3184 static int
3185 set_noop_p (set)
3186 rtx set;
3188 rtx src = SET_SRC (set);
3189 rtx dst = SET_DEST (set);
3191 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
3193 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
3194 return 0;
3195 src = SUBREG_REG (src);
3196 dst = SUBREG_REG (dst);
3199 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
3200 && REGNO (src) == REGNO (dst));
3203 /* Return nonzero if an insn consists only of SETs, each of which only sets a
3204 value to itself. */
3206 static int
3207 noop_move_p (insn)
3208 rtx insn;
3210 rtx pat = PATTERN (insn);
3212 /* Insns carrying these notes are useful later on. */
3213 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
3214 return 0;
3216 if (GET_CODE (pat) == SET && set_noop_p (pat))
3217 return 1;
3219 if (GET_CODE (pat) == PARALLEL)
3221 int i;
3222 /* If nothing but SETs of registers to themselves,
3223 this insn can also be deleted. */
3224 for (i = 0; i < XVECLEN (pat, 0); i++)
3226 rtx tem = XVECEXP (pat, 0, i);
3228 if (GET_CODE (tem) == USE
3229 || GET_CODE (tem) == CLOBBER)
3230 continue;
3232 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
3233 return 0;
3236 return 1;
3238 return 0;
3241 /* Delete any insns that copy a register to itself. */
3243 static void
3244 delete_noop_moves (f)
3245 rtx f;
3247 rtx insn;
3248 for (insn = f; insn; insn = NEXT_INSN (insn))
3250 if (GET_CODE (insn) == INSN && noop_move_p (insn))
3252 PUT_CODE (insn, NOTE);
3253 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3254 NOTE_SOURCE_FILE (insn) = 0;
3259 /* Determine if the stack pointer is constant over the life of the function.
3260 Only useful before prologues have been emitted. */
3262 static void
3263 notice_stack_pointer_modification_1 (x, pat, data)
3264 rtx x;
3265 rtx pat ATTRIBUTE_UNUSED;
3266 void *data ATTRIBUTE_UNUSED;
3268 if (x == stack_pointer_rtx
3269 /* The stack pointer is only modified indirectly as the result
3270 of a push until later in flow. See the comments in rtl.texi
3271 regarding Embedded Side-Effects on Addresses. */
3272 || (GET_CODE (x) == MEM
3273 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
3274 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
3275 current_function_sp_is_unchanging = 0;
3278 static void
3279 notice_stack_pointer_modification (f)
3280 rtx f;
3282 rtx insn;
3284 /* Assume that the stack pointer is unchanging if alloca hasn't
3285 been used. */
3286 current_function_sp_is_unchanging = !current_function_calls_alloca;
3287 if (! current_function_sp_is_unchanging)
3288 return;
3290 for (insn = f; insn; insn = NEXT_INSN (insn))
3292 if (INSN_P (insn))
3294 /* Check if insn modifies the stack pointer. */
3295 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3296 NULL);
3297 if (! current_function_sp_is_unchanging)
3298 return;
3303 /* Mark a register in SET. Hard registers in large modes get all
3304 of their component registers set as well. */
3306 static void
3307 mark_reg (reg, xset)
3308 rtx reg;
3309 void *xset;
3311 regset set = (regset) xset;
3312 int regno = REGNO (reg);
3314 if (GET_MODE (reg) == BLKmode)
3315 abort ();
3317 SET_REGNO_REG_SET (set, regno);
3318 if (regno < FIRST_PSEUDO_REGISTER)
3320 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3321 while (--n > 0)
3322 SET_REGNO_REG_SET (set, regno + n);
3326 /* Mark those regs which are needed at the end of the function as live
3327 at the end of the last basic block. */
3329 static void
3330 mark_regs_live_at_end (set)
3331 regset set;
3333 int i;
3335 /* If exiting needs the right stack value, consider the stack pointer
3336 live at the end of the function. */
3337 if ((HAVE_epilogue && reload_completed)
3338 || ! EXIT_IGNORE_STACK
3339 || (! FRAME_POINTER_REQUIRED
3340 && ! current_function_calls_alloca
3341 && flag_omit_frame_pointer)
3342 || current_function_sp_is_unchanging)
3344 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3347 /* Mark the frame pointer if needed at the end of the function. If
3348 we end up eliminating it, it will be removed from the live list
3349 of each basic block by reload. */
3351 if (! reload_completed || frame_pointer_needed)
3353 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3354 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3355 /* If they are different, also mark the hard frame pointer as live. */
3356 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3357 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3358 #endif
3361 #ifdef PIC_OFFSET_TABLE_REGNUM
3362 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3363 /* Many architectures have a GP register even without flag_pic.
3364 Assume the pic register is not in use, or will be handled by
3365 other means, if it is not fixed. */
3366 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3367 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3368 #endif
3369 #endif
3371 /* Mark all global registers, and all registers used by the epilogue
3372 as being live at the end of the function since they may be
3373 referenced by our caller. */
3374 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3375 if (global_regs[i] || EPILOGUE_USES (i))
3376 SET_REGNO_REG_SET (set, i);
3378 /* Mark all call-saved registers that we actaully used. */
3379 if (HAVE_epilogue && reload_completed)
3381 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3382 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3383 SET_REGNO_REG_SET (set, i);
3386 /* Mark function return value. */
3387 diddle_return_value (mark_reg, set);
3390 /* Callback function for for_each_successor_phi. DATA is a regset.
3391 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3392 INSN, in the regset. */
3394 static int
3395 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3396 rtx insn ATTRIBUTE_UNUSED;
3397 int dest_regno ATTRIBUTE_UNUSED;
3398 int src_regno;
3399 void *data;
3401 regset live = (regset) data;
3402 SET_REGNO_REG_SET (live, src_regno);
3403 return 0;
3406 /* Propagate global life info around the graph of basic blocks. Begin
3407 considering blocks with their corresponding bit set in BLOCKS_IN.
3408 If BLOCKS_IN is null, consider it the universal set.
3410 BLOCKS_OUT is set for every block that was changed. */
3412 static void
3413 calculate_global_regs_live (blocks_in, blocks_out, flags)
3414 sbitmap blocks_in, blocks_out;
3415 int flags;
3417 basic_block *queue, *qhead, *qtail, *qend;
3418 regset tmp, new_live_at_end;
3419 regset_head tmp_head;
3420 regset_head new_live_at_end_head;
3421 int i;
3423 tmp = INITIALIZE_REG_SET (tmp_head);
3424 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3426 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3427 because the `head == tail' style test for an empty queue doesn't
3428 work with a full queue. */
3429 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3430 qtail = queue;
3431 qhead = qend = queue + n_basic_blocks + 2;
3433 /* Queue the blocks set in the initial mask. Do this in reverse block
3434 number order so that we are more likely for the first round to do
3435 useful work. We use AUX non-null to flag that the block is queued. */
3436 if (blocks_in)
3438 /* Clear out the garbage that might be hanging out in bb->aux. */
3439 for (i = n_basic_blocks - 1; i >= 0; --i)
3440 BASIC_BLOCK (i)->aux = NULL;
3442 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3444 basic_block bb = BASIC_BLOCK (i);
3445 *--qhead = bb;
3446 bb->aux = bb;
3449 else
3451 for (i = 0; i < n_basic_blocks; ++i)
3453 basic_block bb = BASIC_BLOCK (i);
3454 *--qhead = bb;
3455 bb->aux = bb;
3459 if (blocks_out)
3460 sbitmap_zero (blocks_out);
3462 while (qhead != qtail)
3464 int rescan, changed;
3465 basic_block bb;
3466 edge e;
3468 bb = *qhead++;
3469 if (qhead == qend)
3470 qhead = queue;
3471 bb->aux = NULL;
3473 /* Begin by propogating live_at_start from the successor blocks. */
3474 CLEAR_REG_SET (new_live_at_end);
3475 for (e = bb->succ; e; e = e->succ_next)
3477 basic_block sb = e->dest;
3478 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3481 /* The all-important stack pointer must always be live. */
3482 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3484 /* Before reload, there are a few registers that must be forced
3485 live everywhere -- which might not already be the case for
3486 blocks within infinite loops. */
3487 if (! reload_completed)
3489 /* Any reference to any pseudo before reload is a potential
3490 reference of the frame pointer. */
3491 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
3493 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3494 /* Pseudos with argument area equivalences may require
3495 reloading via the argument pointer. */
3496 if (fixed_regs[ARG_POINTER_REGNUM])
3497 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
3498 #endif
3500 #ifdef PIC_OFFSET_TABLE_REGNUM
3501 /* Any constant, or pseudo with constant equivalences, may
3502 require reloading from memory using the pic register. */
3503 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3504 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
3505 #endif
3508 /* Regs used in phi nodes are not included in
3509 global_live_at_start, since they are live only along a
3510 particular edge. Set those regs that are live because of a
3511 phi node alternative corresponding to this particular block. */
3512 if (in_ssa_form)
3513 for_each_successor_phi (bb, &set_phi_alternative_reg,
3514 new_live_at_end);
3516 if (bb == ENTRY_BLOCK_PTR)
3518 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3519 continue;
3522 /* On our first pass through this block, we'll go ahead and continue.
3523 Recognize first pass by local_set NULL. On subsequent passes, we
3524 get to skip out early if live_at_end wouldn't have changed. */
3526 if (bb->local_set == NULL)
3528 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3529 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3530 rescan = 1;
3532 else
3534 /* If any bits were removed from live_at_end, we'll have to
3535 rescan the block. This wouldn't be necessary if we had
3536 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3537 local_live is really dependent on live_at_end. */
3538 CLEAR_REG_SET (tmp);
3539 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3540 new_live_at_end, BITMAP_AND_COMPL);
3542 if (! rescan)
3544 /* If any of the registers in the new live_at_end set are
3545 conditionally set in this basic block, we must rescan.
3546 This is because conditional lifetimes at the end of the
3547 block do not just take the live_at_end set into account,
3548 but also the liveness at the start of each successor
3549 block. We can miss changes in those sets if we only
3550 compare the new live_at_end against the previous one. */
3551 CLEAR_REG_SET (tmp);
3552 rescan = bitmap_operation (tmp, new_live_at_end,
3553 bb->cond_local_set, BITMAP_AND);
3556 if (! rescan)
3558 /* Find the set of changed bits. Take this opportunity
3559 to notice that this set is empty and early out. */
3560 CLEAR_REG_SET (tmp);
3561 changed = bitmap_operation (tmp, bb->global_live_at_end,
3562 new_live_at_end, BITMAP_XOR);
3563 if (! changed)
3564 continue;
3566 /* If any of the changed bits overlap with local_set,
3567 we'll have to rescan the block. Detect overlap by
3568 the AND with ~local_set turning off bits. */
3569 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3570 BITMAP_AND_COMPL);
3574 /* Let our caller know that BB changed enough to require its
3575 death notes updated. */
3576 if (blocks_out)
3577 SET_BIT (blocks_out, bb->index);
3579 if (! rescan)
3581 /* Add to live_at_start the set of all registers in
3582 new_live_at_end that aren't in the old live_at_end. */
3584 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3585 BITMAP_AND_COMPL);
3586 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3588 changed = bitmap_operation (bb->global_live_at_start,
3589 bb->global_live_at_start,
3590 tmp, BITMAP_IOR);
3591 if (! changed)
3592 continue;
3594 else
3596 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3598 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3599 into live_at_start. */
3600 propagate_block (bb, new_live_at_end, bb->local_set,
3601 bb->cond_local_set, flags);
3603 /* If live_at start didn't change, no need to go farther. */
3604 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3605 continue;
3607 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3610 /* Queue all predecessors of BB so that we may re-examine
3611 their live_at_end. */
3612 for (e = bb->pred; e; e = e->pred_next)
3614 basic_block pb = e->src;
3615 if (pb->aux == NULL)
3617 *qtail++ = pb;
3618 if (qtail == qend)
3619 qtail = queue;
3620 pb->aux = pb;
3625 FREE_REG_SET (tmp);
3626 FREE_REG_SET (new_live_at_end);
3628 if (blocks_out)
3630 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3632 basic_block bb = BASIC_BLOCK (i);
3633 FREE_REG_SET (bb->local_set);
3634 FREE_REG_SET (bb->cond_local_set);
3637 else
3639 for (i = n_basic_blocks - 1; i >= 0; --i)
3641 basic_block bb = BASIC_BLOCK (i);
3642 FREE_REG_SET (bb->local_set);
3643 FREE_REG_SET (bb->cond_local_set);
3647 free (queue);
3650 /* Subroutines of life analysis. */
3652 /* Allocate the permanent data structures that represent the results
3653 of life analysis. Not static since used also for stupid life analysis. */
3655 static void
3656 allocate_bb_life_data ()
3658 register int i;
3660 for (i = 0; i < n_basic_blocks; i++)
3662 basic_block bb = BASIC_BLOCK (i);
3664 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3665 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3668 ENTRY_BLOCK_PTR->global_live_at_end
3669 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3670 EXIT_BLOCK_PTR->global_live_at_start
3671 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3673 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3676 void
3677 allocate_reg_life_data ()
3679 int i;
3681 max_regno = max_reg_num ();
3683 /* Recalculate the register space, in case it has grown. Old style
3684 vector oriented regsets would set regset_{size,bytes} here also. */
3685 allocate_reg_info (max_regno, FALSE, FALSE);
3687 /* Reset all the data we'll collect in propagate_block and its
3688 subroutines. */
3689 for (i = 0; i < max_regno; i++)
3691 REG_N_SETS (i) = 0;
3692 REG_N_REFS (i) = 0;
3693 REG_N_DEATHS (i) = 0;
3694 REG_N_CALLS_CROSSED (i) = 0;
3695 REG_LIVE_LENGTH (i) = 0;
3696 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3700 /* Delete dead instructions for propagate_block. */
3702 static void
3703 propagate_block_delete_insn (bb, insn)
3704 basic_block bb;
3705 rtx insn;
3707 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3709 /* If the insn referred to a label, and that label was attached to
3710 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3711 pretty much mandatory to delete it, because the ADDR_VEC may be
3712 referencing labels that no longer exist. */
3714 if (inote)
3716 rtx label = XEXP (inote, 0);
3717 rtx next;
3719 if (LABEL_NUSES (label) == 1
3720 && (next = next_nonnote_insn (label)) != NULL
3721 && GET_CODE (next) == JUMP_INSN
3722 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3723 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3725 rtx pat = PATTERN (next);
3726 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3727 int len = XVECLEN (pat, diff_vec_p);
3728 int i;
3730 for (i = 0; i < len; i++)
3731 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3733 flow_delete_insn (next);
3737 if (bb->end == insn)
3738 bb->end = PREV_INSN (insn);
3739 flow_delete_insn (insn);
3742 /* Delete dead libcalls for propagate_block. Return the insn
3743 before the libcall. */
3745 static rtx
3746 propagate_block_delete_libcall (bb, insn, note)
3747 basic_block bb;
3748 rtx insn, note;
3750 rtx first = XEXP (note, 0);
3751 rtx before = PREV_INSN (first);
3753 if (insn == bb->end)
3754 bb->end = before;
3756 flow_delete_insn_chain (first, insn);
3757 return before;
3760 /* Update the life-status of regs for one insn. Return the previous insn. */
3763 propagate_one_insn (pbi, insn)
3764 struct propagate_block_info *pbi;
3765 rtx insn;
3767 rtx prev = PREV_INSN (insn);
3768 int flags = pbi->flags;
3769 int insn_is_dead = 0;
3770 int libcall_is_dead = 0;
3771 rtx note;
3772 int i;
3774 if (! INSN_P (insn))
3775 return prev;
3777 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3778 if (flags & PROP_SCAN_DEAD_CODE)
3780 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
3781 libcall_is_dead = (insn_is_dead && note != 0
3782 && libcall_dead_p (pbi, note, insn));
3785 /* If an instruction consists of just dead store(s) on final pass,
3786 delete it. */
3787 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3789 /* If we're trying to delete a prologue or epilogue instruction
3790 that isn't flagged as possibly being dead, something is wrong.
3791 But if we are keeping the stack pointer depressed, we might well
3792 be deleting insns that are used to compute the amount to update
3793 it by, so they are fine. */
3794 if (reload_completed
3795 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
3796 && (TYPE_RETURNS_STACK_DEPRESSED
3797 (TREE_TYPE (current_function_decl))))
3798 && (((HAVE_epilogue || HAVE_prologue)
3799 && prologue_epilogue_contains (insn))
3800 || (HAVE_sibcall_epilogue
3801 && sibcall_epilogue_contains (insn)))
3802 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3803 abort ();
3805 /* Record sets. Do this even for dead instructions, since they
3806 would have killed the values if they hadn't been deleted. */
3807 mark_set_regs (pbi, PATTERN (insn), insn);
3809 /* CC0 is now known to be dead. Either this insn used it,
3810 in which case it doesn't anymore, or clobbered it,
3811 so the next insn can't use it. */
3812 pbi->cc0_live = 0;
3814 if (libcall_is_dead)
3816 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3817 insn = NEXT_INSN (prev);
3819 else
3820 propagate_block_delete_insn (pbi->bb, insn);
3822 return prev;
3825 /* See if this is an increment or decrement that can be merged into
3826 a following memory address. */
3827 #ifdef AUTO_INC_DEC
3829 register rtx x = single_set (insn);
3831 /* Does this instruction increment or decrement a register? */
3832 if ((flags & PROP_AUTOINC)
3833 && x != 0
3834 && GET_CODE (SET_DEST (x)) == REG
3835 && (GET_CODE (SET_SRC (x)) == PLUS
3836 || GET_CODE (SET_SRC (x)) == MINUS)
3837 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3838 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3839 /* Ok, look for a following memory ref we can combine with.
3840 If one is found, change the memory ref to a PRE_INC
3841 or PRE_DEC, cancel this insn, and return 1.
3842 Return 0 if nothing has been done. */
3843 && try_pre_increment_1 (pbi, insn))
3844 return prev;
3846 #endif /* AUTO_INC_DEC */
3848 CLEAR_REG_SET (pbi->new_set);
3850 /* If this is not the final pass, and this insn is copying the value of
3851 a library call and it's dead, don't scan the insns that perform the
3852 library call, so that the call's arguments are not marked live. */
3853 if (libcall_is_dead)
3855 /* Record the death of the dest reg. */
3856 mark_set_regs (pbi, PATTERN (insn), insn);
3858 insn = XEXP (note, 0);
3859 return PREV_INSN (insn);
3861 else if (GET_CODE (PATTERN (insn)) == SET
3862 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3863 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3864 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3865 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3866 /* We have an insn to pop a constant amount off the stack.
3867 (Such insns use PLUS regardless of the direction of the stack,
3868 and any insn to adjust the stack by a constant is always a pop.)
3869 These insns, if not dead stores, have no effect on life. */
3871 else
3873 /* Any regs live at the time of a call instruction must not go
3874 in a register clobbered by calls. Find all regs now live and
3875 record this for them. */
3877 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3878 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3879 { REG_N_CALLS_CROSSED (i)++; });
3881 /* Record sets. Do this even for dead instructions, since they
3882 would have killed the values if they hadn't been deleted. */
3883 mark_set_regs (pbi, PATTERN (insn), insn);
3885 if (GET_CODE (insn) == CALL_INSN)
3887 register int i;
3888 rtx note, cond;
3890 cond = NULL_RTX;
3891 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3892 cond = COND_EXEC_TEST (PATTERN (insn));
3894 /* Non-constant calls clobber memory. */
3895 if (! CONST_CALL_P (insn))
3897 free_EXPR_LIST_list (&pbi->mem_set_list);
3898 pbi->mem_set_list_len = 0;
3901 /* There may be extra registers to be clobbered. */
3902 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3903 note;
3904 note = XEXP (note, 1))
3905 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3906 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3907 cond, insn, pbi->flags);
3909 /* Calls change all call-used and global registers. */
3910 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3911 if (call_used_regs[i] && ! global_regs[i]
3912 && ! fixed_regs[i])
3914 /* We do not want REG_UNUSED notes for these registers. */
3915 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3916 cond, insn,
3917 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3921 /* If an insn doesn't use CC0, it becomes dead since we assume
3922 that every insn clobbers it. So show it dead here;
3923 mark_used_regs will set it live if it is referenced. */
3924 pbi->cc0_live = 0;
3926 /* Record uses. */
3927 if (! insn_is_dead)
3928 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3930 /* Sometimes we may have inserted something before INSN (such as a move)
3931 when we make an auto-inc. So ensure we will scan those insns. */
3932 #ifdef AUTO_INC_DEC
3933 prev = PREV_INSN (insn);
3934 #endif
3936 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3938 register int i;
3939 rtx note, cond;
3941 cond = NULL_RTX;
3942 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3943 cond = COND_EXEC_TEST (PATTERN (insn));
3945 /* Calls use their arguments. */
3946 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3947 note;
3948 note = XEXP (note, 1))
3949 if (GET_CODE (XEXP (note, 0)) == USE)
3950 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3951 cond, insn);
3953 /* The stack ptr is used (honorarily) by a CALL insn. */
3954 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3956 /* Calls may also reference any of the global registers,
3957 so they are made live. */
3958 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3959 if (global_regs[i])
3960 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3961 cond, insn);
3965 /* On final pass, update counts of how many insns in which each reg
3966 is live. */
3967 if (flags & PROP_REG_INFO)
3968 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3969 { REG_LIVE_LENGTH (i)++; });
3971 return prev;
3974 /* Initialize a propagate_block_info struct for public consumption.
3975 Note that the structure itself is opaque to this file, but that
3976 the user can use the regsets provided here. */
3978 struct propagate_block_info *
3979 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
3980 basic_block bb;
3981 regset live, local_set, cond_local_set;
3982 int flags;
3984 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
3986 pbi->bb = bb;
3987 pbi->reg_live = live;
3988 pbi->mem_set_list = NULL_RTX;
3989 pbi->mem_set_list_len = 0;
3990 pbi->local_set = local_set;
3991 pbi->cond_local_set = cond_local_set;
3992 pbi->cc0_live = 0;
3993 pbi->flags = flags;
3995 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3996 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3997 else
3998 pbi->reg_next_use = NULL;
4000 pbi->new_set = BITMAP_XMALLOC ();
4002 #ifdef HAVE_conditional_execution
4003 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
4004 free_reg_cond_life_info);
4005 pbi->reg_cond_reg = BITMAP_XMALLOC ();
4007 /* If this block ends in a conditional branch, for each register live
4008 from one side of the branch and not the other, record the register
4009 as conditionally dead. */
4010 if (GET_CODE (bb->end) == JUMP_INSN
4011 && any_condjump_p (bb->end))
4013 regset_head diff_head;
4014 regset diff = INITIALIZE_REG_SET (diff_head);
4015 basic_block bb_true, bb_false;
4016 rtx cond_true, cond_false, set_src;
4017 int i;
4019 /* Identify the successor blocks. */
4020 bb_true = bb->succ->dest;
4021 if (bb->succ->succ_next != NULL)
4023 bb_false = bb->succ->succ_next->dest;
4025 if (bb->succ->flags & EDGE_FALLTHRU)
4027 basic_block t = bb_false;
4028 bb_false = bb_true;
4029 bb_true = t;
4031 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
4032 abort ();
4034 else
4036 /* This can happen with a conditional jump to the next insn. */
4037 if (JUMP_LABEL (bb->end) != bb_true->head)
4038 abort ();
4040 /* Simplest way to do nothing. */
4041 bb_false = bb_true;
4044 /* Extract the condition from the branch. */
4045 set_src = SET_SRC (pc_set (bb->end));
4046 cond_true = XEXP (set_src, 0);
4047 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
4048 GET_MODE (cond_true), XEXP (cond_true, 0),
4049 XEXP (cond_true, 1));
4050 if (GET_CODE (XEXP (set_src, 1)) == PC)
4052 rtx t = cond_false;
4053 cond_false = cond_true;
4054 cond_true = t;
4057 /* Compute which register lead different lives in the successors. */
4058 if (bitmap_operation (diff, bb_true->global_live_at_start,
4059 bb_false->global_live_at_start, BITMAP_XOR))
4061 rtx reg = XEXP (cond_true, 0);
4063 if (GET_CODE (reg) == SUBREG)
4064 reg = SUBREG_REG (reg);
4066 if (GET_CODE (reg) != REG)
4067 abort ();
4069 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
4071 /* For each such register, mark it conditionally dead. */
4072 EXECUTE_IF_SET_IN_REG_SET
4073 (diff, 0, i,
4075 struct reg_cond_life_info *rcli;
4076 rtx cond;
4078 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4080 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
4081 cond = cond_false;
4082 else
4083 cond = cond_true;
4084 rcli->condition = cond;
4085 rcli->stores = const0_rtx;
4086 rcli->orig_condition = cond;
4088 splay_tree_insert (pbi->reg_cond_dead, i,
4089 (splay_tree_value) rcli);
4093 FREE_REG_SET (diff);
4095 #endif
4097 /* If this block has no successors, any stores to the frame that aren't
4098 used later in the block are dead. So make a pass over the block
4099 recording any such that are made and show them dead at the end. We do
4100 a very conservative and simple job here. */
4101 if (optimize
4102 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4103 && (TYPE_RETURNS_STACK_DEPRESSED
4104 (TREE_TYPE (current_function_decl))))
4105 && (flags & PROP_SCAN_DEAD_CODE)
4106 && (bb->succ == NULL
4107 || (bb->succ->succ_next == NULL
4108 && bb->succ->dest == EXIT_BLOCK_PTR)))
4110 rtx insn;
4111 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
4112 if (GET_CODE (insn) == INSN
4113 && GET_CODE (PATTERN (insn)) == SET
4114 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
4116 rtx mem = SET_DEST (PATTERN (insn));
4118 /* This optimization is performed by faking a store to the
4119 memory at the end of the block. This doesn't work for
4120 unchanging memories because multiple stores to unchanging
4121 memory is illegal and alias analysis doesn't consider it. */
4122 if (RTX_UNCHANGING_P (mem))
4123 continue;
4125 if (XEXP (mem, 0) == frame_pointer_rtx
4126 || (GET_CODE (XEXP (mem, 0)) == PLUS
4127 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
4128 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
4130 #ifdef AUTO_INC_DEC
4131 /* Store a copy of mem, otherwise the address may be scrogged
4132 by find_auto_inc. This matters because insn_dead_p uses
4133 an rtx_equal_p check to determine if two addresses are
4134 the same. This works before find_auto_inc, but fails
4135 after find_auto_inc, causing discrepencies between the
4136 set of live registers calculated during the
4137 calculate_global_regs_live phase and what actually exists
4138 after flow completes, leading to aborts. */
4139 if (flags & PROP_AUTOINC)
4140 mem = shallow_copy_rtx (mem);
4141 #endif
4142 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
4143 if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
4144 break;
4149 return pbi;
4152 /* Release a propagate_block_info struct. */
4154 void
4155 free_propagate_block_info (pbi)
4156 struct propagate_block_info *pbi;
4158 free_EXPR_LIST_list (&pbi->mem_set_list);
4160 BITMAP_XFREE (pbi->new_set);
4162 #ifdef HAVE_conditional_execution
4163 splay_tree_delete (pbi->reg_cond_dead);
4164 BITMAP_XFREE (pbi->reg_cond_reg);
4165 #endif
4167 if (pbi->reg_next_use)
4168 free (pbi->reg_next_use);
4170 free (pbi);
4173 /* Compute the registers live at the beginning of a basic block BB from
4174 those live at the end.
4176 When called, REG_LIVE contains those live at the end. On return, it
4177 contains those live at the beginning.
4179 LOCAL_SET, if non-null, will be set with all registers killed
4180 unconditionally by this basic block.
4181 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
4182 killed conditionally by this basic block. If there is any unconditional
4183 set of a register, then the corresponding bit will be set in LOCAL_SET
4184 and cleared in COND_LOCAL_SET.
4185 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
4186 case, the resulting set will be equal to the union of the two sets that
4187 would otherwise be computed. */
4189 void
4190 propagate_block (bb, live, local_set, cond_local_set, flags)
4191 basic_block bb;
4192 regset live;
4193 regset local_set;
4194 regset cond_local_set;
4195 int flags;
4197 struct propagate_block_info *pbi;
4198 rtx insn, prev;
4200 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
4202 if (flags & PROP_REG_INFO)
4204 register int i;
4206 /* Process the regs live at the end of the block.
4207 Mark them as not local to any one basic block. */
4208 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
4209 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4212 /* Scan the block an insn at a time from end to beginning. */
4214 for (insn = bb->end;; insn = prev)
4216 /* If this is a call to `setjmp' et al, warn if any
4217 non-volatile datum is live. */
4218 if ((flags & PROP_REG_INFO)
4219 && GET_CODE (insn) == NOTE
4220 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
4221 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
4223 prev = propagate_one_insn (pbi, insn);
4225 if (insn == bb->head)
4226 break;
4229 free_propagate_block_info (pbi);
4232 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4233 (SET expressions whose destinations are registers dead after the insn).
4234 NEEDED is the regset that says which regs are alive after the insn.
4236 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4238 If X is the entire body of an insn, NOTES contains the reg notes
4239 pertaining to the insn. */
4241 static int
4242 insn_dead_p (pbi, x, call_ok, notes)
4243 struct propagate_block_info *pbi;
4244 rtx x;
4245 int call_ok;
4246 rtx notes ATTRIBUTE_UNUSED;
4248 enum rtx_code code = GET_CODE (x);
4250 #ifdef AUTO_INC_DEC
4251 /* If flow is invoked after reload, we must take existing AUTO_INC
4252 expresions into account. */
4253 if (reload_completed)
4255 for (; notes; notes = XEXP (notes, 1))
4257 if (REG_NOTE_KIND (notes) == REG_INC)
4259 int regno = REGNO (XEXP (notes, 0));
4261 /* Don't delete insns to set global regs. */
4262 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4263 || REGNO_REG_SET_P (pbi->reg_live, regno))
4264 return 0;
4268 #endif
4270 /* If setting something that's a reg or part of one,
4271 see if that register's altered value will be live. */
4273 if (code == SET)
4275 rtx r = SET_DEST (x);
4277 #ifdef HAVE_cc0
4278 if (GET_CODE (r) == CC0)
4279 return ! pbi->cc0_live;
4280 #endif
4282 /* A SET that is a subroutine call cannot be dead. */
4283 if (GET_CODE (SET_SRC (x)) == CALL)
4285 if (! call_ok)
4286 return 0;
4289 /* Don't eliminate loads from volatile memory or volatile asms. */
4290 else if (volatile_refs_p (SET_SRC (x)))
4291 return 0;
4293 if (GET_CODE (r) == MEM)
4295 rtx temp;
4297 if (MEM_VOLATILE_P (r))
4298 return 0;
4300 /* Walk the set of memory locations we are currently tracking
4301 and see if one is an identical match to this memory location.
4302 If so, this memory write is dead (remember, we're walking
4303 backwards from the end of the block to the start). Since
4304 rtx_equal_p does not check the alias set or flags, we also
4305 must have the potential for them to conflict (anti_dependence). */
4306 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
4307 if (anti_dependence (r, XEXP (temp, 0)))
4309 rtx mem = XEXP (temp, 0);
4311 if (rtx_equal_p (mem, r))
4312 return 1;
4313 #ifdef AUTO_INC_DEC
4314 /* Check if memory reference matches an auto increment. Only
4315 post increment/decrement or modify are valid. */
4316 if (GET_MODE (mem) == GET_MODE (r)
4317 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
4318 || GET_CODE (XEXP (mem, 0)) == POST_INC
4319 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
4320 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
4321 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
4322 return 1;
4323 #endif
4326 else
4328 while (GET_CODE (r) == SUBREG
4329 || GET_CODE (r) == STRICT_LOW_PART
4330 || GET_CODE (r) == ZERO_EXTRACT)
4331 r = XEXP (r, 0);
4333 if (GET_CODE (r) == REG)
4335 int regno = REGNO (r);
4337 /* Obvious. */
4338 if (REGNO_REG_SET_P (pbi->reg_live, regno))
4339 return 0;
4341 /* If this is a hard register, verify that subsequent
4342 words are not needed. */
4343 if (regno < FIRST_PSEUDO_REGISTER)
4345 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
4347 while (--n > 0)
4348 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
4349 return 0;
4352 /* Don't delete insns to set global regs. */
4353 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4354 return 0;
4356 /* Make sure insns to set the stack pointer aren't deleted. */
4357 if (regno == STACK_POINTER_REGNUM)
4358 return 0;
4360 /* ??? These bits might be redundant with the force live bits
4361 in calculate_global_regs_live. We would delete from
4362 sequential sets; whether this actually affects real code
4363 for anything but the stack pointer I don't know. */
4364 /* Make sure insns to set the frame pointer aren't deleted. */
4365 if (regno == FRAME_POINTER_REGNUM
4366 && (! reload_completed || frame_pointer_needed))
4367 return 0;
4368 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4369 if (regno == HARD_FRAME_POINTER_REGNUM
4370 && (! reload_completed || frame_pointer_needed))
4371 return 0;
4372 #endif
4374 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4375 /* Make sure insns to set arg pointer are never deleted
4376 (if the arg pointer isn't fixed, there will be a USE
4377 for it, so we can treat it normally). */
4378 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4379 return 0;
4380 #endif
4382 /* Otherwise, the set is dead. */
4383 return 1;
4388 /* If performing several activities, insn is dead if each activity
4389 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4390 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4391 worth keeping. */
4392 else if (code == PARALLEL)
4394 int i = XVECLEN (x, 0);
4396 for (i--; i >= 0; i--)
4397 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4398 && GET_CODE (XVECEXP (x, 0, i)) != USE
4399 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4400 return 0;
4402 return 1;
4405 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4406 is not necessarily true for hard registers. */
4407 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4408 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4409 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4410 return 1;
4412 /* We do not check other CLOBBER or USE here. An insn consisting of just
4413 a CLOBBER or just a USE should not be deleted. */
4414 return 0;
4417 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4418 return 1 if the entire library call is dead.
4419 This is true if INSN copies a register (hard or pseudo)
4420 and if the hard return reg of the call insn is dead.
4421 (The caller should have tested the destination of the SET inside
4422 INSN already for death.)
4424 If this insn doesn't just copy a register, then we don't
4425 have an ordinary libcall. In that case, cse could not have
4426 managed to substitute the source for the dest later on,
4427 so we can assume the libcall is dead.
4429 PBI is the block info giving pseudoregs live before this insn.
4430 NOTE is the REG_RETVAL note of the insn. */
4432 static int
4433 libcall_dead_p (pbi, note, insn)
4434 struct propagate_block_info *pbi;
4435 rtx note;
4436 rtx insn;
4438 rtx x = single_set (insn);
4440 if (x)
4442 register rtx r = SET_SRC (x);
4443 if (GET_CODE (r) == REG)
4445 rtx call = XEXP (note, 0);
4446 rtx call_pat;
4447 register int i;
4449 /* Find the call insn. */
4450 while (call != insn && GET_CODE (call) != CALL_INSN)
4451 call = NEXT_INSN (call);
4453 /* If there is none, do nothing special,
4454 since ordinary death handling can understand these insns. */
4455 if (call == insn)
4456 return 0;
4458 /* See if the hard reg holding the value is dead.
4459 If this is a PARALLEL, find the call within it. */
4460 call_pat = PATTERN (call);
4461 if (GET_CODE (call_pat) == PARALLEL)
4463 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4464 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4465 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4466 break;
4468 /* This may be a library call that is returning a value
4469 via invisible pointer. Do nothing special, since
4470 ordinary death handling can understand these insns. */
4471 if (i < 0)
4472 return 0;
4474 call_pat = XVECEXP (call_pat, 0, i);
4477 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4480 return 1;
4483 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4484 live at function entry. Don't count global register variables, variables
4485 in registers that can be used for function arg passing, or variables in
4486 fixed hard registers. */
4489 regno_uninitialized (regno)
4490 int regno;
4492 if (n_basic_blocks == 0
4493 || (regno < FIRST_PSEUDO_REGISTER
4494 && (global_regs[regno]
4495 || fixed_regs[regno]
4496 || FUNCTION_ARG_REGNO_P (regno))))
4497 return 0;
4499 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4502 /* 1 if register REGNO was alive at a place where `setjmp' was called
4503 and was set more than once or is an argument.
4504 Such regs may be clobbered by `longjmp'. */
4507 regno_clobbered_at_setjmp (regno)
4508 int regno;
4510 if (n_basic_blocks == 0)
4511 return 0;
4513 return ((REG_N_SETS (regno) > 1
4514 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4515 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4518 /* INSN references memory, possibly using autoincrement addressing modes.
4519 Find any entries on the mem_set_list that need to be invalidated due
4520 to an address change. */
4522 static void
4523 invalidate_mems_from_autoinc (pbi, insn)
4524 struct propagate_block_info *pbi;
4525 rtx insn;
4527 rtx note = REG_NOTES (insn);
4528 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4530 if (REG_NOTE_KIND (note) == REG_INC)
4532 rtx temp = pbi->mem_set_list;
4533 rtx prev = NULL_RTX;
4534 rtx next;
4536 while (temp)
4538 next = XEXP (temp, 1);
4539 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4541 /* Splice temp out of list. */
4542 if (prev)
4543 XEXP (prev, 1) = next;
4544 else
4545 pbi->mem_set_list = next;
4546 free_EXPR_LIST_node (temp);
4547 pbi->mem_set_list_len--;
4549 else
4550 prev = temp;
4551 temp = next;
4557 /* EXP is either a MEM or a REG. Remove any dependant entries
4558 from pbi->mem_set_list. */
4560 static void
4561 invalidate_mems_from_set (pbi, exp)
4562 struct propagate_block_info *pbi;
4563 rtx exp;
4565 rtx temp = pbi->mem_set_list;
4566 rtx prev = NULL_RTX;
4567 rtx next;
4569 while (temp)
4571 next = XEXP (temp, 1);
4572 if ((GET_CODE (exp) == MEM
4573 && output_dependence (XEXP (temp, 0), exp))
4574 || (GET_CODE (exp) == REG
4575 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
4577 /* Splice this entry out of the list. */
4578 if (prev)
4579 XEXP (prev, 1) = next;
4580 else
4581 pbi->mem_set_list = next;
4582 free_EXPR_LIST_node (temp);
4583 pbi->mem_set_list_len--;
4585 else
4586 prev = temp;
4587 temp = next;
4591 /* Process the registers that are set within X. Their bits are set to
4592 1 in the regset DEAD, because they are dead prior to this insn.
4594 If INSN is nonzero, it is the insn being processed.
4596 FLAGS is the set of operations to perform. */
4598 static void
4599 mark_set_regs (pbi, x, insn)
4600 struct propagate_block_info *pbi;
4601 rtx x, insn;
4603 rtx cond = NULL_RTX;
4604 rtx link;
4605 enum rtx_code code;
4607 if (insn)
4608 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4610 if (REG_NOTE_KIND (link) == REG_INC)
4611 mark_set_1 (pbi, SET, XEXP (link, 0),
4612 (GET_CODE (x) == COND_EXEC
4613 ? COND_EXEC_TEST (x) : NULL_RTX),
4614 insn, pbi->flags);
4616 retry:
4617 switch (code = GET_CODE (x))
4619 case SET:
4620 case CLOBBER:
4621 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4622 return;
4624 case COND_EXEC:
4625 cond = COND_EXEC_TEST (x);
4626 x = COND_EXEC_CODE (x);
4627 goto retry;
4629 case PARALLEL:
4631 register int i;
4632 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4634 rtx sub = XVECEXP (x, 0, i);
4635 switch (code = GET_CODE (sub))
4637 case COND_EXEC:
4638 if (cond != NULL_RTX)
4639 abort ();
4641 cond = COND_EXEC_TEST (sub);
4642 sub = COND_EXEC_CODE (sub);
4643 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4644 break;
4645 /* Fall through. */
4647 case SET:
4648 case CLOBBER:
4649 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4650 break;
4652 default:
4653 break;
4656 break;
4659 default:
4660 break;
4664 /* Process a single SET rtx, X. */
4666 static void
4667 mark_set_1 (pbi, code, reg, cond, insn, flags)
4668 struct propagate_block_info *pbi;
4669 enum rtx_code code;
4670 rtx reg, cond, insn;
4671 int flags;
4673 int regno_first = -1, regno_last = -1;
4674 unsigned long not_dead = 0;
4675 int i;
4677 /* Modifying just one hardware register of a multi-reg value or just a
4678 byte field of a register does not mean the value from before this insn
4679 is now dead. Of course, if it was dead after it's unused now. */
4681 switch (GET_CODE (reg))
4683 case PARALLEL:
4684 /* Some targets place small structures in registers for return values of
4685 functions. We have to detect this case specially here to get correct
4686 flow information. */
4687 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4688 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
4689 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
4690 flags);
4691 return;
4693 case ZERO_EXTRACT:
4694 case SIGN_EXTRACT:
4695 case STRICT_LOW_PART:
4696 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4698 reg = XEXP (reg, 0);
4699 while (GET_CODE (reg) == SUBREG
4700 || GET_CODE (reg) == ZERO_EXTRACT
4701 || GET_CODE (reg) == SIGN_EXTRACT
4702 || GET_CODE (reg) == STRICT_LOW_PART);
4703 if (GET_CODE (reg) == MEM)
4704 break;
4705 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4706 /* Fall through. */
4708 case REG:
4709 regno_last = regno_first = REGNO (reg);
4710 if (regno_first < FIRST_PSEUDO_REGISTER)
4711 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4712 break;
4714 case SUBREG:
4715 if (GET_CODE (SUBREG_REG (reg)) == REG)
4717 enum machine_mode outer_mode = GET_MODE (reg);
4718 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4720 /* Identify the range of registers affected. This is moderately
4721 tricky for hard registers. See alter_subreg. */
4723 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4724 if (regno_first < FIRST_PSEUDO_REGISTER)
4726 #ifdef ALTER_HARD_SUBREG
4727 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4728 inner_mode, regno_first);
4729 #else
4730 regno_first += SUBREG_WORD (reg);
4731 #endif
4732 regno_last = (regno_first
4733 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4735 /* Since we've just adjusted the register number ranges, make
4736 sure REG matches. Otherwise some_was_live will be clear
4737 when it shouldn't have been, and we'll create incorrect
4738 REG_UNUSED notes. */
4739 reg = gen_rtx_REG (outer_mode, regno_first);
4741 else
4743 /* If the number of words in the subreg is less than the number
4744 of words in the full register, we have a well-defined partial
4745 set. Otherwise the high bits are undefined.
4747 This is only really applicable to pseudos, since we just took
4748 care of multi-word hard registers. */
4749 if (((GET_MODE_SIZE (outer_mode)
4750 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4751 < ((GET_MODE_SIZE (inner_mode)
4752 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4753 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
4754 regno_first);
4756 reg = SUBREG_REG (reg);
4759 else
4760 reg = SUBREG_REG (reg);
4761 break;
4763 default:
4764 break;
4767 /* If this set is a MEM, then it kills any aliased writes.
4768 If this set is a REG, then it kills any MEMs which use the reg. */
4769 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4771 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4772 invalidate_mems_from_set (pbi, reg);
4774 /* If the memory reference had embedded side effects (autoincrement
4775 address modes. Then we may need to kill some entries on the
4776 memory set list. */
4777 if (insn && GET_CODE (reg) == MEM)
4778 invalidate_mems_from_autoinc (pbi, insn);
4780 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
4781 && GET_CODE (reg) == MEM && ! side_effects_p (reg)
4782 /* ??? With more effort we could track conditional memory life. */
4783 && ! cond
4784 /* We do not know the size of a BLKmode store, so we do not track
4785 them for redundant store elimination. */
4786 && GET_MODE (reg) != BLKmode
4787 /* There are no REG_INC notes for SP, so we can't assume we'll see
4788 everything that invalidates it. To be safe, don't eliminate any
4789 stores though SP; none of them should be redundant anyway. */
4790 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4792 #ifdef AUTO_INC_DEC
4793 /* Store a copy of mem, otherwise the address may be
4794 scrogged by find_auto_inc. */
4795 if (flags & PROP_AUTOINC)
4796 reg = shallow_copy_rtx (reg);
4797 #endif
4798 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4799 pbi->mem_set_list_len++;
4803 if (GET_CODE (reg) == REG
4804 && ! (regno_first == FRAME_POINTER_REGNUM
4805 && (! reload_completed || frame_pointer_needed))
4806 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4807 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4808 && (! reload_completed || frame_pointer_needed))
4809 #endif
4810 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4811 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4812 #endif
4815 int some_was_live = 0, some_was_dead = 0;
4817 for (i = regno_first; i <= regno_last; ++i)
4819 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4820 if (pbi->local_set)
4822 /* Order of the set operation matters here since both
4823 sets may be the same. */
4824 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
4825 if (cond != NULL_RTX
4826 && ! REGNO_REG_SET_P (pbi->local_set, i))
4827 SET_REGNO_REG_SET (pbi->cond_local_set, i);
4828 else
4829 SET_REGNO_REG_SET (pbi->local_set, i);
4831 if (code != CLOBBER)
4832 SET_REGNO_REG_SET (pbi->new_set, i);
4834 some_was_live |= needed_regno;
4835 some_was_dead |= ! needed_regno;
4838 #ifdef HAVE_conditional_execution
4839 /* Consider conditional death in deciding that the register needs
4840 a death note. */
4841 if (some_was_live && ! not_dead
4842 /* The stack pointer is never dead. Well, not strictly true,
4843 but it's very difficult to tell from here. Hopefully
4844 combine_stack_adjustments will fix up the most egregious
4845 errors. */
4846 && regno_first != STACK_POINTER_REGNUM)
4848 for (i = regno_first; i <= regno_last; ++i)
4849 if (! mark_regno_cond_dead (pbi, i, cond))
4850 not_dead |= ((unsigned long) 1) << (i - regno_first);
4852 #endif
4854 /* Additional data to record if this is the final pass. */
4855 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4856 | PROP_DEATH_NOTES | PROP_AUTOINC))
4858 register rtx y;
4859 register int blocknum = pbi->bb->index;
4861 y = NULL_RTX;
4862 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4864 y = pbi->reg_next_use[regno_first];
4866 /* The next use is no longer next, since a store intervenes. */
4867 for (i = regno_first; i <= regno_last; ++i)
4868 pbi->reg_next_use[i] = 0;
4871 if (flags & PROP_REG_INFO)
4873 for (i = regno_first; i <= regno_last; ++i)
4875 /* Count (weighted) references, stores, etc. This counts a
4876 register twice if it is modified, but that is correct. */
4877 REG_N_SETS (i) += 1;
4878 REG_N_REFS (i) += (optimize_size ? 1
4879 : pbi->bb->loop_depth + 1);
4881 /* The insns where a reg is live are normally counted
4882 elsewhere, but we want the count to include the insn
4883 where the reg is set, and the normal counting mechanism
4884 would not count it. */
4885 REG_LIVE_LENGTH (i) += 1;
4888 /* If this is a hard reg, record this function uses the reg. */
4889 if (regno_first < FIRST_PSEUDO_REGISTER)
4891 for (i = regno_first; i <= regno_last; i++)
4892 regs_ever_live[i] = 1;
4894 else
4896 /* Keep track of which basic blocks each reg appears in. */
4897 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4898 REG_BASIC_BLOCK (regno_first) = blocknum;
4899 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4900 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4904 if (! some_was_dead)
4906 if (flags & PROP_LOG_LINKS)
4908 /* Make a logical link from the next following insn
4909 that uses this register, back to this insn.
4910 The following insns have already been processed.
4912 We don't build a LOG_LINK for hard registers containing
4913 in ASM_OPERANDs. If these registers get replaced,
4914 we might wind up changing the semantics of the insn,
4915 even if reload can make what appear to be valid
4916 assignments later. */
4917 if (y && (BLOCK_NUM (y) == blocknum)
4918 && (regno_first >= FIRST_PSEUDO_REGISTER
4919 || asm_noperands (PATTERN (y)) < 0))
4920 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4923 else if (not_dead)
4925 else if (! some_was_live)
4927 if (flags & PROP_REG_INFO)
4928 REG_N_DEATHS (regno_first) += 1;
4930 if (flags & PROP_DEATH_NOTES)
4932 /* Note that dead stores have already been deleted
4933 when possible. If we get here, we have found a
4934 dead store that cannot be eliminated (because the
4935 same insn does something useful). Indicate this
4936 by marking the reg being set as dying here. */
4937 REG_NOTES (insn)
4938 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4941 else
4943 if (flags & PROP_DEATH_NOTES)
4945 /* This is a case where we have a multi-word hard register
4946 and some, but not all, of the words of the register are
4947 needed in subsequent insns. Write REG_UNUSED notes
4948 for those parts that were not needed. This case should
4949 be rare. */
4951 for (i = regno_first; i <= regno_last; ++i)
4952 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4953 REG_NOTES (insn)
4954 = alloc_EXPR_LIST (REG_UNUSED,
4955 gen_rtx_REG (reg_raw_mode[i], i),
4956 REG_NOTES (insn));
4961 /* Mark the register as being dead. */
4962 if (some_was_live
4963 /* The stack pointer is never dead. Well, not strictly true,
4964 but it's very difficult to tell from here. Hopefully
4965 combine_stack_adjustments will fix up the most egregious
4966 errors. */
4967 && regno_first != STACK_POINTER_REGNUM)
4969 for (i = regno_first; i <= regno_last; ++i)
4970 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
4971 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4974 else if (GET_CODE (reg) == REG)
4976 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4977 pbi->reg_next_use[regno_first] = 0;
4980 /* If this is the last pass and this is a SCRATCH, show it will be dying
4981 here and count it. */
4982 else if (GET_CODE (reg) == SCRATCH)
4984 if (flags & PROP_DEATH_NOTES)
4985 REG_NOTES (insn)
4986 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4990 #ifdef HAVE_conditional_execution
4991 /* Mark REGNO conditionally dead.
4992 Return true if the register is now unconditionally dead. */
4994 static int
4995 mark_regno_cond_dead (pbi, regno, cond)
4996 struct propagate_block_info *pbi;
4997 int regno;
4998 rtx cond;
5000 /* If this is a store to a predicate register, the value of the
5001 predicate is changing, we don't know that the predicate as seen
5002 before is the same as that seen after. Flush all dependent
5003 conditions from reg_cond_dead. This will make all such
5004 conditionally live registers unconditionally live. */
5005 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
5006 flush_reg_cond_reg (pbi, regno);
5008 /* If this is an unconditional store, remove any conditional
5009 life that may have existed. */
5010 if (cond == NULL_RTX)
5011 splay_tree_remove (pbi->reg_cond_dead, regno);
5012 else
5014 splay_tree_node node;
5015 struct reg_cond_life_info *rcli;
5016 rtx ncond;
5018 /* Otherwise this is a conditional set. Record that fact.
5019 It may have been conditionally used, or there may be a
5020 subsequent set with a complimentary condition. */
5022 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5023 if (node == NULL)
5025 /* The register was unconditionally live previously.
5026 Record the current condition as the condition under
5027 which it is dead. */
5028 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5029 rcli->condition = cond;
5030 rcli->stores = cond;
5031 rcli->orig_condition = const0_rtx;
5032 splay_tree_insert (pbi->reg_cond_dead, regno,
5033 (splay_tree_value) rcli);
5035 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5037 /* Not unconditionaly dead. */
5038 return 0;
5040 else
5042 /* The register was conditionally live previously.
5043 Add the new condition to the old. */
5044 rcli = (struct reg_cond_life_info *) node->value;
5045 ncond = rcli->condition;
5046 ncond = ior_reg_cond (ncond, cond, 1);
5047 if (rcli->stores == const0_rtx)
5048 rcli->stores = cond;
5049 else if (rcli->stores != const1_rtx)
5050 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
5052 /* If the register is now unconditionally dead, remove the entry
5053 in the splay_tree. A register is unconditionally dead if the
5054 dead condition ncond is true. A register is also unconditionally
5055 dead if the sum of all conditional stores is an unconditional
5056 store (stores is true), and the dead condition is identically the
5057 same as the original dead condition initialized at the end of
5058 the block. This is a pointer compare, not an rtx_equal_p
5059 compare. */
5060 if (ncond == const1_rtx
5061 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
5062 splay_tree_remove (pbi->reg_cond_dead, regno);
5063 else
5065 rcli->condition = ncond;
5067 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5069 /* Not unconditionaly dead. */
5070 return 0;
5075 return 1;
5078 /* Called from splay_tree_delete for pbi->reg_cond_life. */
5080 static void
5081 free_reg_cond_life_info (value)
5082 splay_tree_value value;
5084 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
5085 free (rcli);
5088 /* Helper function for flush_reg_cond_reg. */
5090 static int
5091 flush_reg_cond_reg_1 (node, data)
5092 splay_tree_node node;
5093 void *data;
5095 struct reg_cond_life_info *rcli;
5096 int *xdata = (int *) data;
5097 unsigned int regno = xdata[0];
5099 /* Don't need to search if last flushed value was farther on in
5100 the in-order traversal. */
5101 if (xdata[1] >= (int) node->key)
5102 return 0;
5104 /* Splice out portions of the expression that refer to regno. */
5105 rcli = (struct reg_cond_life_info *) node->value;
5106 rcli->condition = elim_reg_cond (rcli->condition, regno);
5107 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
5108 rcli->stores = elim_reg_cond (rcli->stores, regno);
5110 /* If the entire condition is now false, signal the node to be removed. */
5111 if (rcli->condition == const0_rtx)
5113 xdata[1] = node->key;
5114 return -1;
5116 else if (rcli->condition == const1_rtx)
5117 abort ();
5119 return 0;
5122 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
5124 static void
5125 flush_reg_cond_reg (pbi, regno)
5126 struct propagate_block_info *pbi;
5127 int regno;
5129 int pair[2];
5131 pair[0] = regno;
5132 pair[1] = -1;
5133 while (splay_tree_foreach (pbi->reg_cond_dead,
5134 flush_reg_cond_reg_1, pair) == -1)
5135 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
5137 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
5140 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
5141 For ior/and, the ADD flag determines whether we want to add the new
5142 condition X to the old one unconditionally. If it is zero, we will
5143 only return a new expression if X allows us to simplify part of
5144 OLD, otherwise we return OLD unchanged to the caller.
5145 If ADD is nonzero, we will return a new condition in all cases. The
5146 toplevel caller of one of these functions should always pass 1 for
5147 ADD. */
5149 static rtx
5150 ior_reg_cond (old, x, add)
5151 rtx old, x;
5152 int add;
5154 rtx op0, op1;
5156 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5158 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5159 && GET_CODE (x) == reverse_condition (GET_CODE (old))
5160 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5161 return const1_rtx;
5162 if (GET_CODE (x) == GET_CODE (old)
5163 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5164 return old;
5165 if (! add)
5166 return old;
5167 return gen_rtx_IOR (0, old, x);
5170 switch (GET_CODE (old))
5172 case IOR:
5173 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5174 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5175 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5177 if (op0 == const0_rtx)
5178 return op1;
5179 if (op1 == const0_rtx)
5180 return op0;
5181 if (op0 == const1_rtx || op1 == const1_rtx)
5182 return const1_rtx;
5183 if (op0 == XEXP (old, 0))
5184 op0 = gen_rtx_IOR (0, op0, x);
5185 else
5186 op1 = gen_rtx_IOR (0, op1, x);
5187 return gen_rtx_IOR (0, op0, op1);
5189 if (! add)
5190 return old;
5191 return gen_rtx_IOR (0, old, x);
5193 case AND:
5194 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5195 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5196 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5198 if (op0 == const1_rtx)
5199 return op1;
5200 if (op1 == const1_rtx)
5201 return op0;
5202 if (op0 == const0_rtx || op1 == const0_rtx)
5203 return const0_rtx;
5204 if (op0 == XEXP (old, 0))
5205 op0 = gen_rtx_IOR (0, op0, x);
5206 else
5207 op1 = gen_rtx_IOR (0, op1, x);
5208 return gen_rtx_AND (0, op0, op1);
5210 if (! add)
5211 return old;
5212 return gen_rtx_IOR (0, old, x);
5214 case NOT:
5215 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5216 if (op0 != XEXP (old, 0))
5217 return not_reg_cond (op0);
5218 if (! add)
5219 return old;
5220 return gen_rtx_IOR (0, old, x);
5222 default:
5223 abort ();
5227 static rtx
5228 not_reg_cond (x)
5229 rtx x;
5231 enum rtx_code x_code;
5233 if (x == const0_rtx)
5234 return const1_rtx;
5235 else if (x == const1_rtx)
5236 return const0_rtx;
5237 x_code = GET_CODE (x);
5238 if (x_code == NOT)
5239 return XEXP (x, 0);
5240 if (GET_RTX_CLASS (x_code) == '<'
5241 && GET_CODE (XEXP (x, 0)) == REG)
5243 if (XEXP (x, 1) != const0_rtx)
5244 abort ();
5246 return gen_rtx_fmt_ee (reverse_condition (x_code),
5247 VOIDmode, XEXP (x, 0), const0_rtx);
5249 return gen_rtx_NOT (0, x);
5252 static rtx
5253 and_reg_cond (old, x, add)
5254 rtx old, x;
5255 int add;
5257 rtx op0, op1;
5259 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5261 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5262 && GET_CODE (x) == reverse_condition (GET_CODE (old))
5263 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5264 return const0_rtx;
5265 if (GET_CODE (x) == GET_CODE (old)
5266 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5267 return old;
5268 if (! add)
5269 return old;
5270 return gen_rtx_AND (0, old, x);
5273 switch (GET_CODE (old))
5275 case IOR:
5276 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5277 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5278 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5280 if (op0 == const0_rtx)
5281 return op1;
5282 if (op1 == const0_rtx)
5283 return op0;
5284 if (op0 == const1_rtx || op1 == const1_rtx)
5285 return const1_rtx;
5286 if (op0 == XEXP (old, 0))
5287 op0 = gen_rtx_AND (0, op0, x);
5288 else
5289 op1 = gen_rtx_AND (0, op1, x);
5290 return gen_rtx_IOR (0, op0, op1);
5292 if (! add)
5293 return old;
5294 return gen_rtx_AND (0, old, x);
5296 case AND:
5297 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5298 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5299 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5301 if (op0 == const1_rtx)
5302 return op1;
5303 if (op1 == const1_rtx)
5304 return op0;
5305 if (op0 == const0_rtx || op1 == const0_rtx)
5306 return const0_rtx;
5307 if (op0 == XEXP (old, 0))
5308 op0 = gen_rtx_AND (0, op0, x);
5309 else
5310 op1 = gen_rtx_AND (0, op1, x);
5311 return gen_rtx_AND (0, op0, op1);
5313 if (! add)
5314 return old;
5316 /* If X is identical to one of the existing terms of the AND,
5317 then just return what we already have. */
5318 /* ??? There really should be some sort of recursive check here in
5319 case there are nested ANDs. */
5320 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
5321 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
5322 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
5323 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
5324 return old;
5326 return gen_rtx_AND (0, old, x);
5328 case NOT:
5329 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5330 if (op0 != XEXP (old, 0))
5331 return not_reg_cond (op0);
5332 if (! add)
5333 return old;
5334 return gen_rtx_AND (0, old, x);
5336 default:
5337 abort ();
5341 /* Given a condition X, remove references to reg REGNO and return the
5342 new condition. The removal will be done so that all conditions
5343 involving REGNO are considered to evaluate to false. This function
5344 is used when the value of REGNO changes. */
5346 static rtx
5347 elim_reg_cond (x, regno)
5348 rtx x;
5349 unsigned int regno;
5351 rtx op0, op1;
5353 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
5355 if (REGNO (XEXP (x, 0)) == regno)
5356 return const0_rtx;
5357 return x;
5360 switch (GET_CODE (x))
5362 case AND:
5363 op0 = elim_reg_cond (XEXP (x, 0), regno);
5364 op1 = elim_reg_cond (XEXP (x, 1), regno);
5365 if (op0 == const0_rtx || op1 == const0_rtx)
5366 return const0_rtx;
5367 if (op0 == const1_rtx)
5368 return op1;
5369 if (op1 == const1_rtx)
5370 return op0;
5371 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5372 return x;
5373 return gen_rtx_AND (0, op0, op1);
5375 case IOR:
5376 op0 = elim_reg_cond (XEXP (x, 0), regno);
5377 op1 = elim_reg_cond (XEXP (x, 1), regno);
5378 if (op0 == const1_rtx || op1 == const1_rtx)
5379 return const1_rtx;
5380 if (op0 == const0_rtx)
5381 return op1;
5382 if (op1 == const0_rtx)
5383 return op0;
5384 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5385 return x;
5386 return gen_rtx_IOR (0, op0, op1);
5388 case NOT:
5389 op0 = elim_reg_cond (XEXP (x, 0), regno);
5390 if (op0 == const0_rtx)
5391 return const1_rtx;
5392 if (op0 == const1_rtx)
5393 return const0_rtx;
5394 if (op0 != XEXP (x, 0))
5395 return not_reg_cond (op0);
5396 return x;
5398 default:
5399 abort ();
5402 #endif /* HAVE_conditional_execution */
5404 #ifdef AUTO_INC_DEC
5406 /* Try to substitute the auto-inc expression INC as the address inside
5407 MEM which occurs in INSN. Currently, the address of MEM is an expression
5408 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5409 that has a single set whose source is a PLUS of INCR_REG and something
5410 else. */
5412 static void
5413 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
5414 struct propagate_block_info *pbi;
5415 rtx inc, insn, mem, incr, incr_reg;
5417 int regno = REGNO (incr_reg);
5418 rtx set = single_set (incr);
5419 rtx q = SET_DEST (set);
5420 rtx y = SET_SRC (set);
5421 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
5423 /* Make sure this reg appears only once in this insn. */
5424 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
5425 return;
5427 if (dead_or_set_p (incr, incr_reg)
5428 /* Mustn't autoinc an eliminable register. */
5429 && (regno >= FIRST_PSEUDO_REGISTER
5430 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
5432 /* This is the simple case. Try to make the auto-inc. If
5433 we can't, we are done. Otherwise, we will do any
5434 needed updates below. */
5435 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
5436 return;
5438 else if (GET_CODE (q) == REG
5439 /* PREV_INSN used here to check the semi-open interval
5440 [insn,incr). */
5441 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
5442 /* We must also check for sets of q as q may be
5443 a call clobbered hard register and there may
5444 be a call between PREV_INSN (insn) and incr. */
5445 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
5447 /* We have *p followed sometime later by q = p+size.
5448 Both p and q must be live afterward,
5449 and q is not used between INSN and its assignment.
5450 Change it to q = p, ...*q..., q = q+size.
5451 Then fall into the usual case. */
5452 rtx insns, temp;
5454 start_sequence ();
5455 emit_move_insn (q, incr_reg);
5456 insns = get_insns ();
5457 end_sequence ();
5459 if (basic_block_for_insn)
5460 for (temp = insns; temp; temp = NEXT_INSN (temp))
5461 set_block_for_insn (temp, pbi->bb);
5463 /* If we can't make the auto-inc, or can't make the
5464 replacement into Y, exit. There's no point in making
5465 the change below if we can't do the auto-inc and doing
5466 so is not correct in the pre-inc case. */
5468 XEXP (inc, 0) = q;
5469 validate_change (insn, &XEXP (mem, 0), inc, 1);
5470 validate_change (incr, &XEXP (y, opnum), q, 1);
5471 if (! apply_change_group ())
5472 return;
5474 /* We now know we'll be doing this change, so emit the
5475 new insn(s) and do the updates. */
5476 emit_insns_before (insns, insn);
5478 if (pbi->bb->head == insn)
5479 pbi->bb->head = insns;
5481 /* INCR will become a NOTE and INSN won't contain a
5482 use of INCR_REG. If a use of INCR_REG was just placed in
5483 the insn before INSN, make that the next use.
5484 Otherwise, invalidate it. */
5485 if (GET_CODE (PREV_INSN (insn)) == INSN
5486 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
5487 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
5488 pbi->reg_next_use[regno] = PREV_INSN (insn);
5489 else
5490 pbi->reg_next_use[regno] = 0;
5492 incr_reg = q;
5493 regno = REGNO (q);
5495 /* REGNO is now used in INCR which is below INSN, but
5496 it previously wasn't live here. If we don't mark
5497 it as live, we'll put a REG_DEAD note for it
5498 on this insn, which is incorrect. */
5499 SET_REGNO_REG_SET (pbi->reg_live, regno);
5501 /* If there are any calls between INSN and INCR, show
5502 that REGNO now crosses them. */
5503 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
5504 if (GET_CODE (temp) == CALL_INSN)
5505 REG_N_CALLS_CROSSED (regno)++;
5507 else
5508 return;
5510 /* If we haven't returned, it means we were able to make the
5511 auto-inc, so update the status. First, record that this insn
5512 has an implicit side effect. */
5514 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
5516 /* Modify the old increment-insn to simply copy
5517 the already-incremented value of our register. */
5518 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
5519 abort ();
5521 /* If that makes it a no-op (copying the register into itself) delete
5522 it so it won't appear to be a "use" and a "set" of this
5523 register. */
5524 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
5526 /* If the original source was dead, it's dead now. */
5527 rtx note;
5529 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
5531 remove_note (incr, note);
5532 if (XEXP (note, 0) != incr_reg)
5533 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
5536 PUT_CODE (incr, NOTE);
5537 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
5538 NOTE_SOURCE_FILE (incr) = 0;
5541 if (regno >= FIRST_PSEUDO_REGISTER)
5543 /* Count an extra reference to the reg. When a reg is
5544 incremented, spilling it is worse, so we want to make
5545 that less likely. */
5546 REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
5548 /* Count the increment as a setting of the register,
5549 even though it isn't a SET in rtl. */
5550 REG_N_SETS (regno)++;
5554 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5555 reference. */
5557 static void
5558 find_auto_inc (pbi, x, insn)
5559 struct propagate_block_info *pbi;
5560 rtx x;
5561 rtx insn;
5563 rtx addr = XEXP (x, 0);
5564 HOST_WIDE_INT offset = 0;
5565 rtx set, y, incr, inc_val;
5566 int regno;
5567 int size = GET_MODE_SIZE (GET_MODE (x));
5569 if (GET_CODE (insn) == JUMP_INSN)
5570 return;
5572 /* Here we detect use of an index register which might be good for
5573 postincrement, postdecrement, preincrement, or predecrement. */
5575 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5576 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5578 if (GET_CODE (addr) != REG)
5579 return;
5581 regno = REGNO (addr);
5583 /* Is the next use an increment that might make auto-increment? */
5584 incr = pbi->reg_next_use[regno];
5585 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5586 return;
5587 set = single_set (incr);
5588 if (set == 0 || GET_CODE (set) != SET)
5589 return;
5590 y = SET_SRC (set);
5592 if (GET_CODE (y) != PLUS)
5593 return;
5595 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5596 inc_val = XEXP (y, 1);
5597 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5598 inc_val = XEXP (y, 0);
5599 else
5600 return;
5602 if (GET_CODE (inc_val) == CONST_INT)
5604 if (HAVE_POST_INCREMENT
5605 && (INTVAL (inc_val) == size && offset == 0))
5606 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5607 incr, addr);
5608 else if (HAVE_POST_DECREMENT
5609 && (INTVAL (inc_val) == -size && offset == 0))
5610 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5611 incr, addr);
5612 else if (HAVE_PRE_INCREMENT
5613 && (INTVAL (inc_val) == size && offset == size))
5614 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5615 incr, addr);
5616 else if (HAVE_PRE_DECREMENT
5617 && (INTVAL (inc_val) == -size && offset == -size))
5618 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5619 incr, addr);
5620 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5621 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5622 gen_rtx_PLUS (Pmode,
5623 addr,
5624 inc_val)),
5625 insn, x, incr, addr);
5627 else if (GET_CODE (inc_val) == REG
5628 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5629 NEXT_INSN (incr)))
5632 if (HAVE_POST_MODIFY_REG && offset == 0)
5633 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5634 gen_rtx_PLUS (Pmode,
5635 addr,
5636 inc_val)),
5637 insn, x, incr, addr);
5641 #endif /* AUTO_INC_DEC */
5643 static void
5644 mark_used_reg (pbi, reg, cond, insn)
5645 struct propagate_block_info *pbi;
5646 rtx reg;
5647 rtx cond ATTRIBUTE_UNUSED;
5648 rtx insn;
5650 int regno = REGNO (reg);
5651 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
5652 int some_was_dead = ! some_was_live;
5653 int some_not_set;
5654 int n;
5656 /* A hard reg in a wide mode may really be multiple registers.
5657 If so, mark all of them just like the first. */
5658 if (regno < FIRST_PSEUDO_REGISTER)
5660 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5661 while (--n > 0)
5663 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5664 some_was_live |= needed_regno;
5665 some_was_dead |= ! needed_regno;
5669 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5671 /* Record where each reg is used, so when the reg is set we know
5672 the next insn that uses it. */
5673 pbi->reg_next_use[regno] = insn;
5676 if (pbi->flags & PROP_REG_INFO)
5678 if (regno < FIRST_PSEUDO_REGISTER)
5680 /* If this is a register we are going to try to eliminate,
5681 don't mark it live here. If we are successful in
5682 eliminating it, it need not be live unless it is used for
5683 pseudos, in which case it will have been set live when it
5684 was allocated to the pseudos. If the register will not
5685 be eliminated, reload will set it live at that point.
5687 Otherwise, record that this function uses this register. */
5688 /* ??? The PPC backend tries to "eliminate" on the pic
5689 register to itself. This should be fixed. In the mean
5690 time, hack around it. */
5692 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5693 && (regno == FRAME_POINTER_REGNUM
5694 || regno == ARG_POINTER_REGNUM)))
5696 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5698 regs_ever_live[regno + --n] = 1;
5699 while (n > 0);
5702 else
5704 /* Keep track of which basic block each reg appears in. */
5706 register int blocknum = pbi->bb->index;
5707 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5708 REG_BASIC_BLOCK (regno) = blocknum;
5709 else if (REG_BASIC_BLOCK (regno) != blocknum)
5710 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5712 /* Count (weighted) number of uses of each reg. */
5713 REG_N_REFS (regno) += (optimize_size ? 1
5714 : pbi->bb->loop_depth + 1);
5718 /* Find out if any of the register was set this insn. */
5719 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5720 if (regno < FIRST_PSEUDO_REGISTER)
5722 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5723 while (--n > 0)
5724 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5727 /* Record and count the insns in which a reg dies. If it is used in
5728 this insn and was dead below the insn then it dies in this insn.
5729 If it was set in this insn, we do not make a REG_DEAD note;
5730 likewise if we already made such a note. */
5731 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5732 && some_was_dead
5733 && some_not_set)
5735 /* Check for the case where the register dying partially
5736 overlaps the register set by this insn. */
5737 if (regno < FIRST_PSEUDO_REGISTER
5738 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5740 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5741 while (--n >= 0)
5742 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5745 /* If none of the words in X is needed, make a REG_DEAD note.
5746 Otherwise, we must make partial REG_DEAD notes. */
5747 if (! some_was_live)
5749 if ((pbi->flags & PROP_DEATH_NOTES)
5750 && ! find_regno_note (insn, REG_DEAD, regno))
5751 REG_NOTES (insn)
5752 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5754 if (pbi->flags & PROP_REG_INFO)
5755 REG_N_DEATHS (regno)++;
5757 else
5759 /* Don't make a REG_DEAD note for a part of a register
5760 that is set in the insn. */
5762 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5763 for (; n >= regno; n--)
5764 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5765 && ! dead_or_set_regno_p (insn, n))
5766 REG_NOTES (insn)
5767 = alloc_EXPR_LIST (REG_DEAD,
5768 gen_rtx_REG (reg_raw_mode[n], n),
5769 REG_NOTES (insn));
5773 SET_REGNO_REG_SET (pbi->reg_live, regno);
5774 if (regno < FIRST_PSEUDO_REGISTER)
5776 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5777 while (--n > 0)
5778 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5781 #ifdef HAVE_conditional_execution
5782 /* If this is a conditional use, record that fact. If it is later
5783 conditionally set, we'll know to kill the register. */
5784 if (cond != NULL_RTX)
5786 splay_tree_node node;
5787 struct reg_cond_life_info *rcli;
5788 rtx ncond;
5790 if (some_was_live)
5792 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5793 if (node == NULL)
5795 /* The register was unconditionally live previously.
5796 No need to do anything. */
5798 else
5800 /* The register was conditionally live previously.
5801 Subtract the new life cond from the old death cond. */
5802 rcli = (struct reg_cond_life_info *) node->value;
5803 ncond = rcli->condition;
5804 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
5806 /* If the register is now unconditionally live, remove the
5807 entry in the splay_tree. */
5808 if (ncond == const0_rtx)
5809 splay_tree_remove (pbi->reg_cond_dead, regno);
5810 else
5812 rcli->condition = ncond;
5813 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5817 else
5819 /* The register was not previously live at all. Record
5820 the condition under which it is still dead. */
5821 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5822 rcli->condition = not_reg_cond (cond);
5823 rcli->stores = const0_rtx;
5824 rcli->orig_condition = const0_rtx;
5825 splay_tree_insert (pbi->reg_cond_dead, regno,
5826 (splay_tree_value) rcli);
5828 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5831 else if (some_was_live)
5833 splay_tree_node node;
5835 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5836 if (node != NULL)
5838 /* The register was conditionally live previously, but is now
5839 unconditionally so. Remove it from the conditionally dead
5840 list, so that a conditional set won't cause us to think
5841 it dead. */
5842 splay_tree_remove (pbi->reg_cond_dead, regno);
5846 #endif
5849 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5850 This is done assuming the registers needed from X are those that
5851 have 1-bits in PBI->REG_LIVE.
5853 INSN is the containing instruction. If INSN is dead, this function
5854 is not called. */
5856 static void
5857 mark_used_regs (pbi, x, cond, insn)
5858 struct propagate_block_info *pbi;
5859 rtx x, cond, insn;
5861 register RTX_CODE code;
5862 register int regno;
5863 int flags = pbi->flags;
5865 retry:
5866 code = GET_CODE (x);
5867 switch (code)
5869 case LABEL_REF:
5870 case SYMBOL_REF:
5871 case CONST_INT:
5872 case CONST:
5873 case CONST_DOUBLE:
5874 case PC:
5875 case ADDR_VEC:
5876 case ADDR_DIFF_VEC:
5877 return;
5879 #ifdef HAVE_cc0
5880 case CC0:
5881 pbi->cc0_live = 1;
5882 return;
5883 #endif
5885 case CLOBBER:
5886 /* If we are clobbering a MEM, mark any registers inside the address
5887 as being used. */
5888 if (GET_CODE (XEXP (x, 0)) == MEM)
5889 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5890 return;
5892 case MEM:
5893 /* Don't bother watching stores to mems if this is not the
5894 final pass. We'll not be deleting dead stores this round. */
5895 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5897 /* Invalidate the data for the last MEM stored, but only if MEM is
5898 something that can be stored into. */
5899 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5900 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5901 /* Needn't clear the memory set list. */
5903 else
5905 rtx temp = pbi->mem_set_list;
5906 rtx prev = NULL_RTX;
5907 rtx next;
5909 while (temp)
5911 next = XEXP (temp, 1);
5912 if (anti_dependence (XEXP (temp, 0), x))
5914 /* Splice temp out of the list. */
5915 if (prev)
5916 XEXP (prev, 1) = next;
5917 else
5918 pbi->mem_set_list = next;
5919 free_EXPR_LIST_node (temp);
5920 pbi->mem_set_list_len--;
5922 else
5923 prev = temp;
5924 temp = next;
5928 /* If the memory reference had embedded side effects (autoincrement
5929 address modes. Then we may need to kill some entries on the
5930 memory set list. */
5931 if (insn)
5932 invalidate_mems_from_autoinc (pbi, insn);
5935 #ifdef AUTO_INC_DEC
5936 if (flags & PROP_AUTOINC)
5937 find_auto_inc (pbi, x, insn);
5938 #endif
5939 break;
5941 case SUBREG:
5942 #ifdef CLASS_CANNOT_CHANGE_MODE
5943 if (GET_CODE (SUBREG_REG (x)) == REG
5944 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5945 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5946 GET_MODE (SUBREG_REG (x))))
5947 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5948 #endif
5950 /* While we're here, optimize this case. */
5951 x = SUBREG_REG (x);
5952 if (GET_CODE (x) != REG)
5953 goto retry;
5954 /* Fall through. */
5956 case REG:
5957 /* See a register other than being set => mark it as needed. */
5958 mark_used_reg (pbi, x, cond, insn);
5959 return;
5961 case SET:
5963 register rtx testreg = SET_DEST (x);
5964 int mark_dest = 0;
5966 /* If storing into MEM, don't show it as being used. But do
5967 show the address as being used. */
5968 if (GET_CODE (testreg) == MEM)
5970 #ifdef AUTO_INC_DEC
5971 if (flags & PROP_AUTOINC)
5972 find_auto_inc (pbi, testreg, insn);
5973 #endif
5974 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5975 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5976 return;
5979 /* Storing in STRICT_LOW_PART is like storing in a reg
5980 in that this SET might be dead, so ignore it in TESTREG.
5981 but in some other ways it is like using the reg.
5983 Storing in a SUBREG or a bit field is like storing the entire
5984 register in that if the register's value is not used
5985 then this SET is not needed. */
5986 while (GET_CODE (testreg) == STRICT_LOW_PART
5987 || GET_CODE (testreg) == ZERO_EXTRACT
5988 || GET_CODE (testreg) == SIGN_EXTRACT
5989 || GET_CODE (testreg) == SUBREG)
5991 #ifdef CLASS_CANNOT_CHANGE_MODE
5992 if (GET_CODE (testreg) == SUBREG
5993 && GET_CODE (SUBREG_REG (testreg)) == REG
5994 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5995 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5996 GET_MODE (testreg)))
5997 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5998 #endif
6000 /* Modifying a single register in an alternate mode
6001 does not use any of the old value. But these other
6002 ways of storing in a register do use the old value. */
6003 if (GET_CODE (testreg) == SUBREG
6004 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
6006 else
6007 mark_dest = 1;
6009 testreg = XEXP (testreg, 0);
6012 /* If this is a store into a register or group of registers,
6013 recursively scan the value being stored. */
6015 if ((GET_CODE (testreg) == PARALLEL
6016 && GET_MODE (testreg) == BLKmode)
6017 || (GET_CODE (testreg) == REG
6018 && (regno = REGNO (testreg),
6019 ! (regno == FRAME_POINTER_REGNUM
6020 && (! reload_completed || frame_pointer_needed)))
6021 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
6022 && ! (regno == HARD_FRAME_POINTER_REGNUM
6023 && (! reload_completed || frame_pointer_needed))
6024 #endif
6025 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
6026 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
6027 #endif
6030 if (mark_dest)
6031 mark_used_regs (pbi, SET_DEST (x), cond, insn);
6032 mark_used_regs (pbi, SET_SRC (x), cond, insn);
6033 return;
6036 break;
6038 case ASM_OPERANDS:
6039 case UNSPEC_VOLATILE:
6040 case TRAP_IF:
6041 case ASM_INPUT:
6043 /* Traditional and volatile asm instructions must be considered to use
6044 and clobber all hard registers, all pseudo-registers and all of
6045 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
6047 Consider for instance a volatile asm that changes the fpu rounding
6048 mode. An insn should not be moved across this even if it only uses
6049 pseudo-regs because it might give an incorrectly rounded result.
6051 ?!? Unfortunately, marking all hard registers as live causes massive
6052 problems for the register allocator and marking all pseudos as live
6053 creates mountains of uninitialized variable warnings.
6055 So for now, just clear the memory set list and mark any regs
6056 we can find in ASM_OPERANDS as used. */
6057 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
6059 free_EXPR_LIST_list (&pbi->mem_set_list);
6060 pbi->mem_set_list_len = 0;
6063 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
6064 We can not just fall through here since then we would be confused
6065 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
6066 traditional asms unlike their normal usage. */
6067 if (code == ASM_OPERANDS)
6069 int j;
6071 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
6072 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
6074 break;
6077 case COND_EXEC:
6078 if (cond != NULL_RTX)
6079 abort ();
6081 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
6083 cond = COND_EXEC_TEST (x);
6084 x = COND_EXEC_CODE (x);
6085 goto retry;
6087 case PHI:
6088 /* We _do_not_ want to scan operands of phi nodes. Operands of
6089 a phi function are evaluated only when control reaches this
6090 block along a particular edge. Therefore, regs that appear
6091 as arguments to phi should not be added to the global live at
6092 start. */
6093 return;
6095 default:
6096 break;
6099 /* Recursively scan the operands of this expression. */
6102 register const char *fmt = GET_RTX_FORMAT (code);
6103 register int i;
6105 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6107 if (fmt[i] == 'e')
6109 /* Tail recursive case: save a function call level. */
6110 if (i == 0)
6112 x = XEXP (x, 0);
6113 goto retry;
6115 mark_used_regs (pbi, XEXP (x, i), cond, insn);
6117 else if (fmt[i] == 'E')
6119 register int j;
6120 for (j = 0; j < XVECLEN (x, i); j++)
6121 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
6127 #ifdef AUTO_INC_DEC
6129 static int
6130 try_pre_increment_1 (pbi, insn)
6131 struct propagate_block_info *pbi;
6132 rtx insn;
6134 /* Find the next use of this reg. If in same basic block,
6135 make it do pre-increment or pre-decrement if appropriate. */
6136 rtx x = single_set (insn);
6137 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
6138 * INTVAL (XEXP (SET_SRC (x), 1)));
6139 int regno = REGNO (SET_DEST (x));
6140 rtx y = pbi->reg_next_use[regno];
6141 if (y != 0
6142 && SET_DEST (x) != stack_pointer_rtx
6143 && BLOCK_NUM (y) == BLOCK_NUM (insn)
6144 /* Don't do this if the reg dies, or gets set in y; a standard addressing
6145 mode would be better. */
6146 && ! dead_or_set_p (y, SET_DEST (x))
6147 && try_pre_increment (y, SET_DEST (x), amount))
6149 /* We have found a suitable auto-increment and already changed
6150 insn Y to do it. So flush this increment instruction. */
6151 propagate_block_delete_insn (pbi->bb, insn);
6153 /* Count a reference to this reg for the increment insn we are
6154 deleting. When a reg is incremented, spilling it is worse,
6155 so we want to make that less likely. */
6156 if (regno >= FIRST_PSEUDO_REGISTER)
6158 REG_N_REFS (regno) += (optimize_size ? 1
6159 : pbi->bb->loop_depth + 1);
6160 REG_N_SETS (regno)++;
6163 /* Flush any remembered memories depending on the value of
6164 the incremented register. */
6165 invalidate_mems_from_set (pbi, SET_DEST (x));
6167 return 1;
6169 return 0;
6172 /* Try to change INSN so that it does pre-increment or pre-decrement
6173 addressing on register REG in order to add AMOUNT to REG.
6174 AMOUNT is negative for pre-decrement.
6175 Returns 1 if the change could be made.
6176 This checks all about the validity of the result of modifying INSN. */
6178 static int
6179 try_pre_increment (insn, reg, amount)
6180 rtx insn, reg;
6181 HOST_WIDE_INT amount;
6183 register rtx use;
6185 /* Nonzero if we can try to make a pre-increment or pre-decrement.
6186 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
6187 int pre_ok = 0;
6188 /* Nonzero if we can try to make a post-increment or post-decrement.
6189 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
6190 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
6191 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
6192 int post_ok = 0;
6194 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
6195 int do_post = 0;
6197 /* From the sign of increment, see which possibilities are conceivable
6198 on this target machine. */
6199 if (HAVE_PRE_INCREMENT && amount > 0)
6200 pre_ok = 1;
6201 if (HAVE_POST_INCREMENT && amount > 0)
6202 post_ok = 1;
6204 if (HAVE_PRE_DECREMENT && amount < 0)
6205 pre_ok = 1;
6206 if (HAVE_POST_DECREMENT && amount < 0)
6207 post_ok = 1;
6209 if (! (pre_ok || post_ok))
6210 return 0;
6212 /* It is not safe to add a side effect to a jump insn
6213 because if the incremented register is spilled and must be reloaded
6214 there would be no way to store the incremented value back in memory. */
6216 if (GET_CODE (insn) == JUMP_INSN)
6217 return 0;
6219 use = 0;
6220 if (pre_ok)
6221 use = find_use_as_address (PATTERN (insn), reg, 0);
6222 if (post_ok && (use == 0 || use == (rtx) 1))
6224 use = find_use_as_address (PATTERN (insn), reg, -amount);
6225 do_post = 1;
6228 if (use == 0 || use == (rtx) 1)
6229 return 0;
6231 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
6232 return 0;
6234 /* See if this combination of instruction and addressing mode exists. */
6235 if (! validate_change (insn, &XEXP (use, 0),
6236 gen_rtx_fmt_e (amount > 0
6237 ? (do_post ? POST_INC : PRE_INC)
6238 : (do_post ? POST_DEC : PRE_DEC),
6239 Pmode, reg), 0))
6240 return 0;
6242 /* Record that this insn now has an implicit side effect on X. */
6243 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
6244 return 1;
6247 #endif /* AUTO_INC_DEC */
6249 /* Find the place in the rtx X where REG is used as a memory address.
6250 Return the MEM rtx that so uses it.
6251 If PLUSCONST is nonzero, search instead for a memory address equivalent to
6252 (plus REG (const_int PLUSCONST)).
6254 If such an address does not appear, return 0.
6255 If REG appears more than once, or is used other than in such an address,
6256 return (rtx)1. */
6259 find_use_as_address (x, reg, plusconst)
6260 register rtx x;
6261 rtx reg;
6262 HOST_WIDE_INT plusconst;
6264 enum rtx_code code = GET_CODE (x);
6265 const char *fmt = GET_RTX_FORMAT (code);
6266 register int i;
6267 register rtx value = 0;
6268 register rtx tem;
6270 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
6271 return x;
6273 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
6274 && XEXP (XEXP (x, 0), 0) == reg
6275 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6276 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
6277 return x;
6279 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
6281 /* If REG occurs inside a MEM used in a bit-field reference,
6282 that is unacceptable. */
6283 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
6284 return (rtx) (HOST_WIDE_INT) 1;
6287 if (x == reg)
6288 return (rtx) (HOST_WIDE_INT) 1;
6290 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6292 if (fmt[i] == 'e')
6294 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
6295 if (value == 0)
6296 value = tem;
6297 else if (tem != 0)
6298 return (rtx) (HOST_WIDE_INT) 1;
6300 else if (fmt[i] == 'E')
6302 register int j;
6303 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6305 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
6306 if (value == 0)
6307 value = tem;
6308 else if (tem != 0)
6309 return (rtx) (HOST_WIDE_INT) 1;
6314 return value;
6317 /* Write information about registers and basic blocks into FILE.
6318 This is part of making a debugging dump. */
6320 void
6321 dump_regset (r, outf)
6322 regset r;
6323 FILE *outf;
6325 int i;
6326 if (r == NULL)
6328 fputs (" (nil)", outf);
6329 return;
6332 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
6334 fprintf (outf, " %d", i);
6335 if (i < FIRST_PSEUDO_REGISTER)
6336 fprintf (outf, " [%s]",
6337 reg_names[i]);
6341 void
6342 debug_regset (r)
6343 regset r;
6345 dump_regset (r, stderr);
6346 putc ('\n', stderr);
6349 void
6350 dump_flow_info (file)
6351 FILE *file;
6353 register int i;
6354 static const char * const reg_class_names[] = REG_CLASS_NAMES;
6356 fprintf (file, "%d registers.\n", max_regno);
6357 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
6358 if (REG_N_REFS (i))
6360 enum reg_class class, altclass;
6361 fprintf (file, "\nRegister %d used %d times across %d insns",
6362 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
6363 if (REG_BASIC_BLOCK (i) >= 0)
6364 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
6365 if (REG_N_SETS (i))
6366 fprintf (file, "; set %d time%s", REG_N_SETS (i),
6367 (REG_N_SETS (i) == 1) ? "" : "s");
6368 if (REG_USERVAR_P (regno_reg_rtx[i]))
6369 fprintf (file, "; user var");
6370 if (REG_N_DEATHS (i) != 1)
6371 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
6372 if (REG_N_CALLS_CROSSED (i) == 1)
6373 fprintf (file, "; crosses 1 call");
6374 else if (REG_N_CALLS_CROSSED (i))
6375 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
6376 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
6377 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
6378 class = reg_preferred_class (i);
6379 altclass = reg_alternate_class (i);
6380 if (class != GENERAL_REGS || altclass != ALL_REGS)
6382 if (altclass == ALL_REGS || class == ALL_REGS)
6383 fprintf (file, "; pref %s", reg_class_names[(int) class]);
6384 else if (altclass == NO_REGS)
6385 fprintf (file, "; %s or none", reg_class_names[(int) class]);
6386 else
6387 fprintf (file, "; pref %s, else %s",
6388 reg_class_names[(int) class],
6389 reg_class_names[(int) altclass]);
6391 if (REG_POINTER (regno_reg_rtx[i]))
6392 fprintf (file, "; pointer");
6393 fprintf (file, ".\n");
6396 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
6397 for (i = 0; i < n_basic_blocks; i++)
6399 register basic_block bb = BASIC_BLOCK (i);
6400 register edge e;
6402 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
6403 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
6405 fprintf (file, "Predecessors: ");
6406 for (e = bb->pred; e; e = e->pred_next)
6407 dump_edge_info (file, e, 0);
6409 fprintf (file, "\nSuccessors: ");
6410 for (e = bb->succ; e; e = e->succ_next)
6411 dump_edge_info (file, e, 1);
6413 fprintf (file, "\nRegisters live at start:");
6414 dump_regset (bb->global_live_at_start, file);
6416 fprintf (file, "\nRegisters live at end:");
6417 dump_regset (bb->global_live_at_end, file);
6419 putc ('\n', file);
6422 putc ('\n', file);
6425 void
6426 debug_flow_info ()
6428 dump_flow_info (stderr);
6431 static void
6432 dump_edge_info (file, e, do_succ)
6433 FILE *file;
6434 edge e;
6435 int do_succ;
6437 basic_block side = (do_succ ? e->dest : e->src);
6439 if (side == ENTRY_BLOCK_PTR)
6440 fputs (" ENTRY", file);
6441 else if (side == EXIT_BLOCK_PTR)
6442 fputs (" EXIT", file);
6443 else
6444 fprintf (file, " %d", side->index);
6446 if (e->count)
6447 fprintf (file, " count:%d", e->count);
6449 if (e->flags)
6451 static const char * const bitnames[] = {
6452 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6454 int comma = 0;
6455 int i, flags = e->flags;
6457 fputc (' ', file);
6458 fputc ('(', file);
6459 for (i = 0; flags; i++)
6460 if (flags & (1 << i))
6462 flags &= ~(1 << i);
6464 if (comma)
6465 fputc (',', file);
6466 if (i < (int) ARRAY_SIZE (bitnames))
6467 fputs (bitnames[i], file);
6468 else
6469 fprintf (file, "%d", i);
6470 comma = 1;
6472 fputc (')', file);
6476 /* Print out one basic block with live information at start and end. */
6478 void
6479 dump_bb (bb, outf)
6480 basic_block bb;
6481 FILE *outf;
6483 rtx insn;
6484 rtx last;
6485 edge e;
6487 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
6488 bb->index, bb->loop_depth, bb->count);
6489 if (bb->eh_beg != -1 || bb->eh_end != -1)
6490 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
6491 putc ('\n', outf);
6493 fputs (";; Predecessors: ", outf);
6494 for (e = bb->pred; e; e = e->pred_next)
6495 dump_edge_info (outf, e, 0);
6496 putc ('\n', outf);
6498 fputs (";; Registers live at start:", outf);
6499 dump_regset (bb->global_live_at_start, outf);
6500 putc ('\n', outf);
6502 for (insn = bb->head, last = NEXT_INSN (bb->end);
6503 insn != last;
6504 insn = NEXT_INSN (insn))
6505 print_rtl_single (outf, insn);
6507 fputs (";; Registers live at end:", outf);
6508 dump_regset (bb->global_live_at_end, outf);
6509 putc ('\n', outf);
6511 fputs (";; Successors: ", outf);
6512 for (e = bb->succ; e; e = e->succ_next)
6513 dump_edge_info (outf, e, 1);
6514 putc ('\n', outf);
6517 void
6518 debug_bb (bb)
6519 basic_block bb;
6521 dump_bb (bb, stderr);
6524 void
6525 debug_bb_n (n)
6526 int n;
6528 dump_bb (BASIC_BLOCK (n), stderr);
6531 /* Like print_rtl, but also print out live information for the start of each
6532 basic block. */
6534 void
6535 print_rtl_with_bb (outf, rtx_first)
6536 FILE *outf;
6537 rtx rtx_first;
6539 register rtx tmp_rtx;
6541 if (rtx_first == 0)
6542 fprintf (outf, "(nil)\n");
6543 else
6545 int i;
6546 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
6547 int max_uid = get_max_uid ();
6548 basic_block *start = (basic_block *)
6549 xcalloc (max_uid, sizeof (basic_block));
6550 basic_block *end = (basic_block *)
6551 xcalloc (max_uid, sizeof (basic_block));
6552 enum bb_state *in_bb_p = (enum bb_state *)
6553 xcalloc (max_uid, sizeof (enum bb_state));
6555 for (i = n_basic_blocks - 1; i >= 0; i--)
6557 basic_block bb = BASIC_BLOCK (i);
6558 rtx x;
6560 start[INSN_UID (bb->head)] = bb;
6561 end[INSN_UID (bb->end)] = bb;
6562 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6564 enum bb_state state = IN_MULTIPLE_BB;
6565 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
6566 state = IN_ONE_BB;
6567 in_bb_p[INSN_UID (x)] = state;
6569 if (x == bb->end)
6570 break;
6574 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6576 int did_output;
6577 basic_block bb;
6579 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6581 fprintf (outf, ";; Start of basic block %d, registers live:",
6582 bb->index);
6583 dump_regset (bb->global_live_at_start, outf);
6584 putc ('\n', outf);
6587 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6588 && GET_CODE (tmp_rtx) != NOTE
6589 && GET_CODE (tmp_rtx) != BARRIER)
6590 fprintf (outf, ";; Insn is not within a basic block\n");
6591 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6592 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6594 did_output = print_rtl_single (outf, tmp_rtx);
6596 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6598 fprintf (outf, ";; End of basic block %d, registers live:\n",
6599 bb->index);
6600 dump_regset (bb->global_live_at_end, outf);
6601 putc ('\n', outf);
6604 if (did_output)
6605 putc ('\n', outf);
6608 free (start);
6609 free (end);
6610 free (in_bb_p);
6613 if (current_function_epilogue_delay_list != 0)
6615 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6616 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6617 tmp_rtx = XEXP (tmp_rtx, 1))
6618 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6622 /* Dump the rtl into the current debugging dump file, then abort. */
6623 static void
6624 print_rtl_and_abort ()
6626 if (rtl_dump_file)
6628 print_rtl_with_bb (rtl_dump_file, get_insns ());
6629 fclose (rtl_dump_file);
6631 abort ();
6634 /* Recompute register set/reference counts immediately prior to register
6635 allocation.
6637 This avoids problems with set/reference counts changing to/from values
6638 which have special meanings to the register allocators.
6640 Additionally, the reference counts are the primary component used by the
6641 register allocators to prioritize pseudos for allocation to hard regs.
6642 More accurate reference counts generally lead to better register allocation.
6644 F is the first insn to be scanned.
6646 LOOP_STEP denotes how much loop_depth should be incremented per
6647 loop nesting level in order to increase the ref count more for
6648 references in a loop.
6650 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6651 possibly other information which is used by the register allocators. */
6653 void
6654 recompute_reg_usage (f, loop_step)
6655 rtx f ATTRIBUTE_UNUSED;
6656 int loop_step ATTRIBUTE_UNUSED;
6658 allocate_reg_life_data ();
6659 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6662 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6663 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6664 of the number of registers that died. */
6667 count_or_remove_death_notes (blocks, kill)
6668 sbitmap blocks;
6669 int kill;
6671 int i, count = 0;
6673 for (i = n_basic_blocks - 1; i >= 0; --i)
6675 basic_block bb;
6676 rtx insn;
6678 if (blocks && ! TEST_BIT (blocks, i))
6679 continue;
6681 bb = BASIC_BLOCK (i);
6683 for (insn = bb->head;; insn = NEXT_INSN (insn))
6685 if (INSN_P (insn))
6687 rtx *pprev = &REG_NOTES (insn);
6688 rtx link = *pprev;
6690 while (link)
6692 switch (REG_NOTE_KIND (link))
6694 case REG_DEAD:
6695 if (GET_CODE (XEXP (link, 0)) == REG)
6697 rtx reg = XEXP (link, 0);
6698 int n;
6700 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6701 n = 1;
6702 else
6703 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6704 count += n;
6706 /* Fall through. */
6708 case REG_UNUSED:
6709 if (kill)
6711 rtx next = XEXP (link, 1);
6712 free_EXPR_LIST_node (link);
6713 *pprev = link = next;
6714 break;
6716 /* Fall through. */
6718 default:
6719 pprev = &XEXP (link, 1);
6720 link = *pprev;
6721 break;
6726 if (insn == bb->end)
6727 break;
6731 return count;
6735 /* Update insns block within BB. */
6737 void
6738 update_bb_for_insn (bb)
6739 basic_block bb;
6741 rtx insn;
6743 if (! basic_block_for_insn)
6744 return;
6746 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6748 set_block_for_insn (insn, bb);
6750 if (insn == bb->end)
6751 break;
6756 /* Record INSN's block as BB. */
6758 void
6759 set_block_for_insn (insn, bb)
6760 rtx insn;
6761 basic_block bb;
6763 size_t uid = INSN_UID (insn);
6764 if (uid >= basic_block_for_insn->num_elements)
6766 int new_size;
6768 /* Add one-eighth the size so we don't keep calling xrealloc. */
6769 new_size = uid + (uid + 7) / 8;
6771 VARRAY_GROW (basic_block_for_insn, new_size);
6773 VARRAY_BB (basic_block_for_insn, uid) = bb;
6776 /* Record INSN's block number as BB. */
6777 /* ??? This has got to go. */
6779 void
6780 set_block_num (insn, bb)
6781 rtx insn;
6782 int bb;
6784 set_block_for_insn (insn, BASIC_BLOCK (bb));
6787 /* Verify the CFG consistency. This function check some CFG invariants and
6788 aborts when something is wrong. Hope that this function will help to
6789 convert many optimization passes to preserve CFG consistent.
6791 Currently it does following checks:
6793 - test head/end pointers
6794 - overlapping of basic blocks
6795 - edge list corectness
6796 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6797 - tails of basic blocks (ensure that boundary is necesary)
6798 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6799 and NOTE_INSN_BASIC_BLOCK
6800 - check that all insns are in the basic blocks
6801 (except the switch handling code, barriers and notes)
6802 - check that all returns are followed by barriers
6804 In future it can be extended check a lot of other stuff as well
6805 (reachability of basic blocks, life information, etc. etc.). */
6807 void
6808 verify_flow_info ()
6810 const int max_uid = get_max_uid ();
6811 const rtx rtx_first = get_insns ();
6812 rtx last_head = get_last_insn ();
6813 basic_block *bb_info;
6814 rtx x;
6815 int i, last_bb_num_seen, num_bb_notes, err = 0;
6817 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6819 for (i = n_basic_blocks - 1; i >= 0; i--)
6821 basic_block bb = BASIC_BLOCK (i);
6822 rtx head = bb->head;
6823 rtx end = bb->end;
6825 /* Verify the end of the basic block is in the INSN chain. */
6826 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6827 if (x == end)
6828 break;
6829 if (!x)
6831 error ("End insn %d for block %d not found in the insn stream.",
6832 INSN_UID (end), bb->index);
6833 err = 1;
6836 /* Work backwards from the end to the head of the basic block
6837 to verify the head is in the RTL chain. */
6838 for (; x != NULL_RTX; x = PREV_INSN (x))
6840 /* While walking over the insn chain, verify insns appear
6841 in only one basic block and initialize the BB_INFO array
6842 used by other passes. */
6843 if (bb_info[INSN_UID (x)] != NULL)
6845 error ("Insn %d is in multiple basic blocks (%d and %d)",
6846 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6847 err = 1;
6849 bb_info[INSN_UID (x)] = bb;
6851 if (x == head)
6852 break;
6854 if (!x)
6856 error ("Head insn %d for block %d not found in the insn stream.",
6857 INSN_UID (head), bb->index);
6858 err = 1;
6861 last_head = x;
6864 /* Now check the basic blocks (boundaries etc.) */
6865 for (i = n_basic_blocks - 1; i >= 0; i--)
6867 basic_block bb = BASIC_BLOCK (i);
6868 /* Check corectness of edge lists */
6869 edge e;
6871 e = bb->succ;
6872 while (e)
6874 if (e->src != bb)
6876 fprintf (stderr,
6877 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6878 bb->index);
6879 fprintf (stderr, "Predecessor: ");
6880 dump_edge_info (stderr, e, 0);
6881 fprintf (stderr, "\nSuccessor: ");
6882 dump_edge_info (stderr, e, 1);
6883 fflush (stderr);
6884 err = 1;
6886 if (e->dest != EXIT_BLOCK_PTR)
6888 edge e2 = e->dest->pred;
6889 while (e2 && e2 != e)
6890 e2 = e2->pred_next;
6891 if (!e2)
6893 error ("Basic block %i edge lists are corrupted", bb->index);
6894 err = 1;
6897 e = e->succ_next;
6900 e = bb->pred;
6901 while (e)
6903 if (e->dest != bb)
6905 error ("Basic block %d pred edge is corrupted", bb->index);
6906 fputs ("Predecessor: ", stderr);
6907 dump_edge_info (stderr, e, 0);
6908 fputs ("\nSuccessor: ", stderr);
6909 dump_edge_info (stderr, e, 1);
6910 fputc ('\n', stderr);
6911 err = 1;
6913 if (e->src != ENTRY_BLOCK_PTR)
6915 edge e2 = e->src->succ;
6916 while (e2 && e2 != e)
6917 e2 = e2->succ_next;
6918 if (!e2)
6920 error ("Basic block %i edge lists are corrupted", bb->index);
6921 err = 1;
6924 e = e->pred_next;
6927 /* OK pointers are correct. Now check the header of basic
6928 block. It ought to contain optional CODE_LABEL followed
6929 by NOTE_BASIC_BLOCK. */
6930 x = bb->head;
6931 if (GET_CODE (x) == CODE_LABEL)
6933 if (bb->end == x)
6935 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6936 bb->index);
6937 err = 1;
6939 x = NEXT_INSN (x);
6941 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6943 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6944 bb->index);
6945 err = 1;
6948 if (bb->end == x)
6950 /* Do checks for empty blocks here */
6952 else
6954 x = NEXT_INSN (x);
6955 while (x)
6957 if (NOTE_INSN_BASIC_BLOCK_P (x))
6959 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6960 INSN_UID (x), bb->index);
6961 err = 1;
6964 if (x == bb->end)
6965 break;
6967 if (GET_CODE (x) == JUMP_INSN
6968 || GET_CODE (x) == CODE_LABEL
6969 || GET_CODE (x) == BARRIER)
6971 error ("In basic block %d:", bb->index);
6972 fatal_insn ("Flow control insn inside a basic block", x);
6975 x = NEXT_INSN (x);
6980 last_bb_num_seen = -1;
6981 num_bb_notes = 0;
6982 x = rtx_first;
6983 while (x)
6985 if (NOTE_INSN_BASIC_BLOCK_P (x))
6987 basic_block bb = NOTE_BASIC_BLOCK (x);
6988 num_bb_notes++;
6989 if (bb->index != last_bb_num_seen + 1)
6990 /* Basic blocks not numbered consecutively. */
6991 abort ();
6993 last_bb_num_seen = bb->index;
6996 if (!bb_info[INSN_UID (x)])
6998 switch (GET_CODE (x))
7000 case BARRIER:
7001 case NOTE:
7002 break;
7004 case CODE_LABEL:
7005 /* An addr_vec is placed outside any block block. */
7006 if (NEXT_INSN (x)
7007 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
7008 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
7009 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
7011 x = NEXT_INSN (x);
7014 /* But in any case, non-deletable labels can appear anywhere. */
7015 break;
7017 default:
7018 fatal_insn ("Insn outside basic block", x);
7022 if (INSN_P (x)
7023 && GET_CODE (x) == JUMP_INSN
7024 && returnjump_p (x) && ! condjump_p (x)
7025 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
7026 fatal_insn ("Return not followed by barrier", x);
7028 x = NEXT_INSN (x);
7031 if (num_bb_notes != n_basic_blocks)
7032 internal_error
7033 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
7034 num_bb_notes, n_basic_blocks);
7036 if (err)
7037 abort ();
7039 /* Clean up. */
7040 free (bb_info);
7043 /* Functions to access an edge list with a vector representation.
7044 Enough data is kept such that given an index number, the
7045 pred and succ that edge represents can be determined, or
7046 given a pred and a succ, its index number can be returned.
7047 This allows algorithms which consume a lot of memory to
7048 represent the normally full matrix of edge (pred,succ) with a
7049 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
7050 wasted space in the client code due to sparse flow graphs. */
7052 /* This functions initializes the edge list. Basically the entire
7053 flowgraph is processed, and all edges are assigned a number,
7054 and the data structure is filled in. */
7056 struct edge_list *
7057 create_edge_list ()
7059 struct edge_list *elist;
7060 edge e;
7061 int num_edges;
7062 int x;
7063 int block_count;
7065 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
7067 num_edges = 0;
7069 /* Determine the number of edges in the flow graph by counting successor
7070 edges on each basic block. */
7071 for (x = 0; x < n_basic_blocks; x++)
7073 basic_block bb = BASIC_BLOCK (x);
7075 for (e = bb->succ; e; e = e->succ_next)
7076 num_edges++;
7078 /* Don't forget successors of the entry block. */
7079 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7080 num_edges++;
7082 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
7083 elist->num_blocks = block_count;
7084 elist->num_edges = num_edges;
7085 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
7087 num_edges = 0;
7089 /* Follow successors of the entry block, and register these edges. */
7090 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7092 elist->index_to_edge[num_edges] = e;
7093 num_edges++;
7096 for (x = 0; x < n_basic_blocks; x++)
7098 basic_block bb = BASIC_BLOCK (x);
7100 /* Follow all successors of blocks, and register these edges. */
7101 for (e = bb->succ; e; e = e->succ_next)
7103 elist->index_to_edge[num_edges] = e;
7104 num_edges++;
7107 return elist;
7110 /* This function free's memory associated with an edge list. */
7112 void
7113 free_edge_list (elist)
7114 struct edge_list *elist;
7116 if (elist)
7118 free (elist->index_to_edge);
7119 free (elist);
7123 /* This function provides debug output showing an edge list. */
7125 void
7126 print_edge_list (f, elist)
7127 FILE *f;
7128 struct edge_list *elist;
7130 int x;
7131 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
7132 elist->num_blocks - 2, elist->num_edges);
7134 for (x = 0; x < elist->num_edges; x++)
7136 fprintf (f, " %-4d - edge(", x);
7137 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
7138 fprintf (f, "entry,");
7139 else
7140 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
7142 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
7143 fprintf (f, "exit)\n");
7144 else
7145 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
7149 /* This function provides an internal consistency check of an edge list,
7150 verifying that all edges are present, and that there are no
7151 extra edges. */
7153 void
7154 verify_edge_list (f, elist)
7155 FILE *f;
7156 struct edge_list *elist;
7158 int x, pred, succ, index;
7159 edge e;
7161 for (x = 0; x < n_basic_blocks; x++)
7163 basic_block bb = BASIC_BLOCK (x);
7165 for (e = bb->succ; e; e = e->succ_next)
7167 pred = e->src->index;
7168 succ = e->dest->index;
7169 index = EDGE_INDEX (elist, e->src, e->dest);
7170 if (index == EDGE_INDEX_NO_EDGE)
7172 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7173 continue;
7175 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7176 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7177 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7178 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7179 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7180 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7183 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7185 pred = e->src->index;
7186 succ = e->dest->index;
7187 index = EDGE_INDEX (elist, e->src, e->dest);
7188 if (index == EDGE_INDEX_NO_EDGE)
7190 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7191 continue;
7193 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7194 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7195 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7196 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7197 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7198 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7200 /* We've verified that all the edges are in the list, no lets make sure
7201 there are no spurious edges in the list. */
7203 for (pred = 0; pred < n_basic_blocks; pred++)
7204 for (succ = 0; succ < n_basic_blocks; succ++)
7206 basic_block p = BASIC_BLOCK (pred);
7207 basic_block s = BASIC_BLOCK (succ);
7209 int found_edge = 0;
7211 for (e = p->succ; e; e = e->succ_next)
7212 if (e->dest == s)
7214 found_edge = 1;
7215 break;
7217 for (e = s->pred; e; e = e->pred_next)
7218 if (e->src == p)
7220 found_edge = 1;
7221 break;
7223 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7224 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7225 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
7226 pred, succ);
7227 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7228 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7229 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
7230 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7231 BASIC_BLOCK (succ)));
7233 for (succ = 0; succ < n_basic_blocks; succ++)
7235 basic_block p = ENTRY_BLOCK_PTR;
7236 basic_block s = BASIC_BLOCK (succ);
7238 int found_edge = 0;
7240 for (e = p->succ; e; e = e->succ_next)
7241 if (e->dest == s)
7243 found_edge = 1;
7244 break;
7246 for (e = s->pred; e; e = e->pred_next)
7247 if (e->src == p)
7249 found_edge = 1;
7250 break;
7252 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7253 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7254 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
7255 succ);
7256 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7257 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7258 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
7259 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
7260 BASIC_BLOCK (succ)));
7262 for (pred = 0; pred < n_basic_blocks; pred++)
7264 basic_block p = BASIC_BLOCK (pred);
7265 basic_block s = EXIT_BLOCK_PTR;
7267 int found_edge = 0;
7269 for (e = p->succ; e; e = e->succ_next)
7270 if (e->dest == s)
7272 found_edge = 1;
7273 break;
7275 for (e = s->pred; e; e = e->pred_next)
7276 if (e->src == p)
7278 found_edge = 1;
7279 break;
7281 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7282 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7283 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
7284 pred);
7285 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7286 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7287 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
7288 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7289 EXIT_BLOCK_PTR));
7293 /* This routine will determine what, if any, edge there is between
7294 a specified predecessor and successor. */
7297 find_edge_index (edge_list, pred, succ)
7298 struct edge_list *edge_list;
7299 basic_block pred, succ;
7301 int x;
7302 for (x = 0; x < NUM_EDGES (edge_list); x++)
7304 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
7305 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
7306 return x;
7308 return (EDGE_INDEX_NO_EDGE);
7311 /* This function will remove an edge from the flow graph. */
7313 void
7314 remove_edge (e)
7315 edge e;
7317 edge last_pred = NULL;
7318 edge last_succ = NULL;
7319 edge tmp;
7320 basic_block src, dest;
7321 src = e->src;
7322 dest = e->dest;
7323 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
7324 last_succ = tmp;
7326 if (!tmp)
7327 abort ();
7328 if (last_succ)
7329 last_succ->succ_next = e->succ_next;
7330 else
7331 src->succ = e->succ_next;
7333 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
7334 last_pred = tmp;
7336 if (!tmp)
7337 abort ();
7338 if (last_pred)
7339 last_pred->pred_next = e->pred_next;
7340 else
7341 dest->pred = e->pred_next;
7343 n_edges--;
7344 free (e);
7347 /* This routine will remove any fake successor edges for a basic block.
7348 When the edge is removed, it is also removed from whatever predecessor
7349 list it is in. */
7351 static void
7352 remove_fake_successors (bb)
7353 basic_block bb;
7355 edge e;
7356 for (e = bb->succ; e;)
7358 edge tmp = e;
7359 e = e->succ_next;
7360 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
7361 remove_edge (tmp);
7365 /* This routine will remove all fake edges from the flow graph. If
7366 we remove all fake successors, it will automatically remove all
7367 fake predecessors. */
7369 void
7370 remove_fake_edges ()
7372 int x;
7374 for (x = 0; x < n_basic_blocks; x++)
7375 remove_fake_successors (BASIC_BLOCK (x));
7377 /* We've handled all successors except the entry block's. */
7378 remove_fake_successors (ENTRY_BLOCK_PTR);
7381 /* This function will add a fake edge between any block which has no
7382 successors, and the exit block. Some data flow equations require these
7383 edges to exist. */
7385 void
7386 add_noreturn_fake_exit_edges ()
7388 int x;
7390 for (x = 0; x < n_basic_blocks; x++)
7391 if (BASIC_BLOCK (x)->succ == NULL)
7392 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7395 /* This function adds a fake edge between any infinite loops to the
7396 exit block. Some optimizations require a path from each node to
7397 the exit node.
7399 See also Morgan, Figure 3.10, pp. 82-83.
7401 The current implementation is ugly, not attempting to minimize the
7402 number of inserted fake edges. To reduce the number of fake edges
7403 to insert, add fake edges from _innermost_ loops containing only
7404 nodes not reachable from the exit block. */
7406 void
7407 connect_infinite_loops_to_exit ()
7409 basic_block unvisited_block;
7411 /* Perform depth-first search in the reverse graph to find nodes
7412 reachable from the exit block. */
7413 struct depth_first_search_dsS dfs_ds;
7415 flow_dfs_compute_reverse_init (&dfs_ds);
7416 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7418 /* Repeatedly add fake edges, updating the unreachable nodes. */
7419 while (1)
7421 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7422 if (!unvisited_block)
7423 break;
7424 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7425 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7428 flow_dfs_compute_reverse_finish (&dfs_ds);
7430 return;
7433 /* Redirect an edge's successor from one block to another. */
7435 void
7436 redirect_edge_succ (e, new_succ)
7437 edge e;
7438 basic_block new_succ;
7440 edge *pe;
7442 /* Disconnect the edge from the old successor block. */
7443 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7444 continue;
7445 *pe = (*pe)->pred_next;
7447 /* Reconnect the edge to the new successor block. */
7448 e->pred_next = new_succ->pred;
7449 new_succ->pred = e;
7450 e->dest = new_succ;
7453 /* Redirect an edge's predecessor from one block to another. */
7455 void
7456 redirect_edge_pred (e, new_pred)
7457 edge e;
7458 basic_block new_pred;
7460 edge *pe;
7462 /* Disconnect the edge from the old predecessor block. */
7463 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7464 continue;
7465 *pe = (*pe)->succ_next;
7467 /* Reconnect the edge to the new predecessor block. */
7468 e->succ_next = new_pred->succ;
7469 new_pred->succ = e;
7470 e->src = new_pred;
7473 /* Dump the list of basic blocks in the bitmap NODES. */
7475 static void
7476 flow_nodes_print (str, nodes, file)
7477 const char *str;
7478 const sbitmap nodes;
7479 FILE *file;
7481 int node;
7483 if (! nodes)
7484 return;
7486 fprintf (file, "%s { ", str);
7487 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7488 fputs ("}\n", file);
7492 /* Dump the list of edges in the array EDGE_LIST. */
7494 static void
7495 flow_edge_list_print (str, edge_list, num_edges, file)
7496 const char *str;
7497 const edge *edge_list;
7498 int num_edges;
7499 FILE *file;
7501 int i;
7503 if (! edge_list)
7504 return;
7506 fprintf (file, "%s { ", str);
7507 for (i = 0; i < num_edges; i++)
7508 fprintf (file, "%d->%d ", edge_list[i]->src->index,
7509 edge_list[i]->dest->index);
7510 fputs ("}\n", file);
7514 /* Dump loop related CFG information. */
7516 static void
7517 flow_loops_cfg_dump (loops, file)
7518 const struct loops *loops;
7519 FILE *file;
7521 int i;
7523 if (! loops->num || ! file || ! loops->cfg.dom)
7524 return;
7526 for (i = 0; i < n_basic_blocks; i++)
7528 edge succ;
7530 fprintf (file, ";; %d succs { ", i);
7531 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7532 fprintf (file, "%d ", succ->dest->index);
7533 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7536 /* Dump the DFS node order. */
7537 if (loops->cfg.dfs_order)
7539 fputs (";; DFS order: ", file);
7540 for (i = 0; i < n_basic_blocks; i++)
7541 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7542 fputs ("\n", file);
7544 /* Dump the reverse completion node order. */
7545 if (loops->cfg.rc_order)
7547 fputs (";; RC order: ", file);
7548 for (i = 0; i < n_basic_blocks; i++)
7549 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7550 fputs ("\n", file);
7554 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7556 static int
7557 flow_loop_nested_p (outer, loop)
7558 struct loop *outer;
7559 struct loop *loop;
7561 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7565 /* Dump the loop information specified by LOOP to the stream FILE
7566 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7567 void
7568 flow_loop_dump (loop, file, loop_dump_aux, verbose)
7569 const struct loop *loop;
7570 FILE *file;
7571 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7572 int verbose;
7574 if (! loop || ! loop->header)
7575 return;
7577 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
7578 loop->num, INSN_UID (loop->first->head),
7579 INSN_UID (loop->last->end),
7580 loop->shared ? " shared" : "",
7581 loop->invalid ? " invalid" : "");
7582 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7583 loop->header->index, loop->latch->index,
7584 loop->pre_header ? loop->pre_header->index : -1,
7585 loop->first->index, loop->last->index);
7586 fprintf (file, ";; depth %d, level %d, outer %ld\n",
7587 loop->depth, loop->level,
7588 (long) (loop->outer ? loop->outer->num : -1));
7590 if (loop->pre_header_edges)
7591 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
7592 loop->num_pre_header_edges, file);
7593 flow_edge_list_print (";; entry edges", loop->entry_edges,
7594 loop->num_entries, file);
7595 fprintf (file, ";; %d", loop->num_nodes);
7596 flow_nodes_print (" nodes", loop->nodes, file);
7597 flow_edge_list_print (";; exit edges", loop->exit_edges,
7598 loop->num_exits, file);
7599 if (loop->exits_doms)
7600 flow_nodes_print (";; exit doms", loop->exits_doms, file);
7601 if (loop_dump_aux)
7602 loop_dump_aux (loop, file, verbose);
7606 /* Dump the loop information specified by LOOPS to the stream FILE,
7607 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7608 void
7609 flow_loops_dump (loops, file, loop_dump_aux, verbose)
7610 const struct loops *loops;
7611 FILE *file;
7612 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7613 int verbose;
7615 int i;
7616 int num_loops;
7618 num_loops = loops->num;
7619 if (! num_loops || ! file)
7620 return;
7622 fprintf (file, ";; %d loops found, %d levels\n",
7623 num_loops, loops->levels);
7625 for (i = 0; i < num_loops; i++)
7627 struct loop *loop = &loops->array[i];
7629 flow_loop_dump (loop, file, loop_dump_aux, verbose);
7631 if (loop->shared)
7633 int j;
7635 for (j = 0; j < i; j++)
7637 struct loop *oloop = &loops->array[j];
7639 if (loop->header == oloop->header)
7641 int disjoint;
7642 int smaller;
7644 smaller = loop->num_nodes < oloop->num_nodes;
7646 /* If the union of LOOP and OLOOP is different than
7647 the larger of LOOP and OLOOP then LOOP and OLOOP
7648 must be disjoint. */
7649 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7650 smaller ? oloop : loop);
7651 fprintf (file,
7652 ";; loop header %d shared by loops %d, %d %s\n",
7653 loop->header->index, i, j,
7654 disjoint ? "disjoint" : "nested");
7660 if (verbose)
7661 flow_loops_cfg_dump (loops, file);
7665 /* Free all the memory allocated for LOOPS. */
7667 void
7668 flow_loops_free (loops)
7669 struct loops *loops;
7671 if (loops->array)
7673 int i;
7675 if (! loops->num)
7676 abort ();
7678 /* Free the loop descriptors. */
7679 for (i = 0; i < loops->num; i++)
7681 struct loop *loop = &loops->array[i];
7683 if (loop->pre_header_edges)
7684 free (loop->pre_header_edges);
7685 if (loop->nodes)
7686 sbitmap_free (loop->nodes);
7687 if (loop->entry_edges)
7688 free (loop->entry_edges);
7689 if (loop->exit_edges)
7690 free (loop->exit_edges);
7691 if (loop->exits_doms)
7692 sbitmap_free (loop->exits_doms);
7694 free (loops->array);
7695 loops->array = NULL;
7697 if (loops->cfg.dom)
7698 sbitmap_vector_free (loops->cfg.dom);
7699 if (loops->cfg.dfs_order)
7700 free (loops->cfg.dfs_order);
7702 if (loops->shared_headers)
7703 sbitmap_free (loops->shared_headers);
7708 /* Find the entry edges into the loop with header HEADER and nodes
7709 NODES and store in ENTRY_EDGES array. Return the number of entry
7710 edges from the loop. */
7712 static int
7713 flow_loop_entry_edges_find (header, nodes, entry_edges)
7714 basic_block header;
7715 const sbitmap nodes;
7716 edge **entry_edges;
7718 edge e;
7719 int num_entries;
7721 *entry_edges = NULL;
7723 num_entries = 0;
7724 for (e = header->pred; e; e = e->pred_next)
7726 basic_block src = e->src;
7728 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7729 num_entries++;
7732 if (! num_entries)
7733 abort ();
7735 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
7737 num_entries = 0;
7738 for (e = header->pred; e; e = e->pred_next)
7740 basic_block src = e->src;
7742 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7743 (*entry_edges)[num_entries++] = e;
7746 return num_entries;
7750 /* Find the exit edges from the loop using the bitmap of loop nodes
7751 NODES and store in EXIT_EDGES array. Return the number of
7752 exit edges from the loop. */
7754 static int
7755 flow_loop_exit_edges_find (nodes, exit_edges)
7756 const sbitmap nodes;
7757 edge **exit_edges;
7759 edge e;
7760 int node;
7761 int num_exits;
7763 *exit_edges = NULL;
7765 /* Check all nodes within the loop to see if there are any
7766 successors not in the loop. Note that a node may have multiple
7767 exiting edges ????? A node can have one jumping edge and one fallthru
7768 edge so only one of these can exit the loop. */
7769 num_exits = 0;
7770 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7771 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7773 basic_block dest = e->dest;
7775 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7776 num_exits++;
7780 if (! num_exits)
7781 return 0;
7783 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
7785 /* Store all exiting edges into an array. */
7786 num_exits = 0;
7787 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7788 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7790 basic_block dest = e->dest;
7792 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7793 (*exit_edges)[num_exits++] = e;
7797 return num_exits;
7801 /* Find the nodes contained within the loop with header HEADER and
7802 latch LATCH and store in NODES. Return the number of nodes within
7803 the loop. */
7805 static int
7806 flow_loop_nodes_find (header, latch, nodes)
7807 basic_block header;
7808 basic_block latch;
7809 sbitmap nodes;
7811 basic_block *stack;
7812 int sp;
7813 int num_nodes = 0;
7815 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7816 sp = 0;
7818 /* Start with only the loop header in the set of loop nodes. */
7819 sbitmap_zero (nodes);
7820 SET_BIT (nodes, header->index);
7821 num_nodes++;
7822 header->loop_depth++;
7824 /* Push the loop latch on to the stack. */
7825 if (! TEST_BIT (nodes, latch->index))
7827 SET_BIT (nodes, latch->index);
7828 latch->loop_depth++;
7829 num_nodes++;
7830 stack[sp++] = latch;
7833 while (sp)
7835 basic_block node;
7836 edge e;
7838 node = stack[--sp];
7839 for (e = node->pred; e; e = e->pred_next)
7841 basic_block ancestor = e->src;
7843 /* If each ancestor not marked as part of loop, add to set of
7844 loop nodes and push on to stack. */
7845 if (ancestor != ENTRY_BLOCK_PTR
7846 && ! TEST_BIT (nodes, ancestor->index))
7848 SET_BIT (nodes, ancestor->index);
7849 ancestor->loop_depth++;
7850 num_nodes++;
7851 stack[sp++] = ancestor;
7855 free (stack);
7856 return num_nodes;
7859 /* Compute the depth first search order and store in the array
7860 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7861 RC_ORDER is non-zero, return the reverse completion number for each
7862 node. Returns the number of nodes visited. A depth first search
7863 tries to get as far away from the starting point as quickly as
7864 possible. */
7866 static int
7867 flow_depth_first_order_compute (dfs_order, rc_order)
7868 int *dfs_order;
7869 int *rc_order;
7871 edge *stack;
7872 int sp;
7873 int dfsnum = 0;
7874 int rcnum = n_basic_blocks - 1;
7875 sbitmap visited;
7877 /* Allocate stack for back-tracking up CFG. */
7878 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7879 sp = 0;
7881 /* Allocate bitmap to track nodes that have been visited. */
7882 visited = sbitmap_alloc (n_basic_blocks);
7884 /* None of the nodes in the CFG have been visited yet. */
7885 sbitmap_zero (visited);
7887 /* Push the first edge on to the stack. */
7888 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7890 while (sp)
7892 edge e;
7893 basic_block src;
7894 basic_block dest;
7896 /* Look at the edge on the top of the stack. */
7897 e = stack[sp - 1];
7898 src = e->src;
7899 dest = e->dest;
7901 /* Check if the edge destination has been visited yet. */
7902 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7904 /* Mark that we have visited the destination. */
7905 SET_BIT (visited, dest->index);
7907 if (dfs_order)
7908 dfs_order[dfsnum++] = dest->index;
7910 if (dest->succ)
7912 /* Since the DEST node has been visited for the first
7913 time, check its successors. */
7914 stack[sp++] = dest->succ;
7916 else
7918 /* There are no successors for the DEST node so assign
7919 its reverse completion number. */
7920 if (rc_order)
7921 rc_order[rcnum--] = dest->index;
7924 else
7926 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7928 /* There are no more successors for the SRC node
7929 so assign its reverse completion number. */
7930 if (rc_order)
7931 rc_order[rcnum--] = src->index;
7934 if (e->succ_next)
7935 stack[sp - 1] = e->succ_next;
7936 else
7937 sp--;
7941 free (stack);
7942 sbitmap_free (visited);
7944 /* The number of nodes visited should not be greater than
7945 n_basic_blocks. */
7946 if (dfsnum > n_basic_blocks)
7947 abort ();
7949 /* There are some nodes left in the CFG that are unreachable. */
7950 if (dfsnum < n_basic_blocks)
7951 abort ();
7952 return dfsnum;
7955 /* Compute the depth first search order on the _reverse_ graph and
7956 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7957 Returns the number of nodes visited.
7959 The computation is split into three pieces:
7961 flow_dfs_compute_reverse_init () creates the necessary data
7962 structures.
7964 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7965 structures. The block will start the search.
7967 flow_dfs_compute_reverse_execute () continues (or starts) the
7968 search using the block on the top of the stack, stopping when the
7969 stack is empty.
7971 flow_dfs_compute_reverse_finish () destroys the necessary data
7972 structures.
7974 Thus, the user will probably call ..._init(), call ..._add_bb() to
7975 add a beginning basic block to the stack, call ..._execute(),
7976 possibly add another bb to the stack and again call ..._execute(),
7977 ..., and finally call _finish(). */
7979 /* Initialize the data structures used for depth-first search on the
7980 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7981 added to the basic block stack. DATA is the current depth-first
7982 search context. If INITIALIZE_STACK is non-zero, there is an
7983 element on the stack. */
7985 static void
7986 flow_dfs_compute_reverse_init (data)
7987 depth_first_search_ds data;
7989 /* Allocate stack for back-tracking up CFG. */
7990 data->stack =
7991 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
7992 * sizeof (basic_block));
7993 data->sp = 0;
7995 /* Allocate bitmap to track nodes that have been visited. */
7996 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
7998 /* None of the nodes in the CFG have been visited yet. */
7999 sbitmap_zero (data->visited_blocks);
8001 return;
8004 /* Add the specified basic block to the top of the dfs data
8005 structures. When the search continues, it will start at the
8006 block. */
8008 static void
8009 flow_dfs_compute_reverse_add_bb (data, bb)
8010 depth_first_search_ds data;
8011 basic_block bb;
8013 data->stack[data->sp++] = bb;
8014 return;
8017 /* Continue the depth-first search through the reverse graph starting
8018 with the block at the stack's top and ending when the stack is
8019 empty. Visited nodes are marked. Returns an unvisited basic
8020 block, or NULL if there is none available. */
8022 static basic_block
8023 flow_dfs_compute_reverse_execute (data)
8024 depth_first_search_ds data;
8026 basic_block bb;
8027 edge e;
8028 int i;
8030 while (data->sp > 0)
8032 bb = data->stack[--data->sp];
8034 /* Mark that we have visited this node. */
8035 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
8037 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
8039 /* Perform depth-first search on adjacent vertices. */
8040 for (e = bb->pred; e; e = e->pred_next)
8041 flow_dfs_compute_reverse_add_bb (data, e->src);
8045 /* Determine if there are unvisited basic blocks. */
8046 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
8047 if (!TEST_BIT (data->visited_blocks, i))
8048 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
8049 return NULL;
8052 /* Destroy the data structures needed for depth-first search on the
8053 reverse graph. */
8055 static void
8056 flow_dfs_compute_reverse_finish (data)
8057 depth_first_search_ds data;
8059 free (data->stack);
8060 sbitmap_free (data->visited_blocks);
8061 return;
8065 /* Find the root node of the loop pre-header extended basic block and
8066 the edges along the trace from the root node to the loop header. */
8068 static void
8069 flow_loop_pre_header_scan (loop)
8070 struct loop *loop;
8072 int num = 0;
8073 basic_block ebb;
8075 loop->num_pre_header_edges = 0;
8077 if (loop->num_entries != 1)
8078 return;
8080 ebb = loop->entry_edges[0]->src;
8082 if (ebb != ENTRY_BLOCK_PTR)
8084 edge e;
8086 /* Count number of edges along trace from loop header to
8087 root of pre-header extended basic block. Usually this is
8088 only one or two edges. */
8089 num++;
8090 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
8092 ebb = ebb->pred->src;
8093 num++;
8096 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
8097 loop->num_pre_header_edges = num;
8099 /* Store edges in order that they are followed. The source
8100 of the first edge is the root node of the pre-header extended
8101 basic block and the destination of the last last edge is
8102 the loop header. */
8103 for (e = loop->entry_edges[0]; num; e = e->src->pred)
8105 loop->pre_header_edges[--num] = e;
8111 /* Return the block for the pre-header of the loop with header
8112 HEADER where DOM specifies the dominator information. Return NULL if
8113 there is no pre-header. */
8115 static basic_block
8116 flow_loop_pre_header_find (header, dom)
8117 basic_block header;
8118 const sbitmap *dom;
8120 basic_block pre_header;
8121 edge e;
8123 /* If block p is a predecessor of the header and is the only block
8124 that the header does not dominate, then it is the pre-header. */
8125 pre_header = NULL;
8126 for (e = header->pred; e; e = e->pred_next)
8128 basic_block node = e->src;
8130 if (node != ENTRY_BLOCK_PTR
8131 && ! TEST_BIT (dom[node->index], header->index))
8133 if (pre_header == NULL)
8134 pre_header = node;
8135 else
8137 /* There are multiple edges into the header from outside
8138 the loop so there is no pre-header block. */
8139 pre_header = NULL;
8140 break;
8144 return pre_header;
8147 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
8148 previously added. The insertion algorithm assumes that the loops
8149 are added in the order found by a depth first search of the CFG. */
8151 static void
8152 flow_loop_tree_node_add (prevloop, loop)
8153 struct loop *prevloop;
8154 struct loop *loop;
8157 if (flow_loop_nested_p (prevloop, loop))
8159 prevloop->inner = loop;
8160 loop->outer = prevloop;
8161 return;
8164 while (prevloop->outer)
8166 if (flow_loop_nested_p (prevloop->outer, loop))
8168 prevloop->next = loop;
8169 loop->outer = prevloop->outer;
8170 return;
8172 prevloop = prevloop->outer;
8175 prevloop->next = loop;
8176 loop->outer = NULL;
8179 /* Build the loop hierarchy tree for LOOPS. */
8181 static void
8182 flow_loops_tree_build (loops)
8183 struct loops *loops;
8185 int i;
8186 int num_loops;
8188 num_loops = loops->num;
8189 if (! num_loops)
8190 return;
8192 /* Root the loop hierarchy tree with the first loop found.
8193 Since we used a depth first search this should be the
8194 outermost loop. */
8195 loops->tree = &loops->array[0];
8196 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
8198 /* Add the remaining loops to the tree. */
8199 for (i = 1; i < num_loops; i++)
8200 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
8203 /* Helper function to compute loop nesting depth and enclosed loop level
8204 for the natural loop specified by LOOP at the loop depth DEPTH.
8205 Returns the loop level. */
8207 static int
8208 flow_loop_level_compute (loop, depth)
8209 struct loop *loop;
8210 int depth;
8212 struct loop *inner;
8213 int level = 1;
8215 if (! loop)
8216 return 0;
8218 /* Traverse loop tree assigning depth and computing level as the
8219 maximum level of all the inner loops of this loop. The loop
8220 level is equivalent to the height of the loop in the loop tree
8221 and corresponds to the number of enclosed loop levels (including
8222 itself). */
8223 for (inner = loop->inner; inner; inner = inner->next)
8225 int ilevel;
8227 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
8229 if (ilevel > level)
8230 level = ilevel;
8232 loop->level = level;
8233 loop->depth = depth;
8234 return level;
8237 /* Compute the loop nesting depth and enclosed loop level for the loop
8238 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
8239 level. */
8241 static int
8242 flow_loops_level_compute (loops)
8243 struct loops *loops;
8245 struct loop *loop;
8246 int level;
8247 int levels = 0;
8249 /* Traverse all the outer level loops. */
8250 for (loop = loops->tree; loop; loop = loop->next)
8252 level = flow_loop_level_compute (loop, 1);
8253 if (level > levels)
8254 levels = level;
8256 return levels;
8260 /* Scan a single natural loop specified by LOOP collecting information
8261 about it specified by FLAGS. */
8264 flow_loop_scan (loops, loop, flags)
8265 struct loops *loops;
8266 struct loop *loop;
8267 int flags;
8269 /* Determine prerequisites. */
8270 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
8271 flags |= LOOP_EXIT_EDGES;
8273 if (flags & LOOP_ENTRY_EDGES)
8275 /* Find edges which enter the loop header.
8276 Note that the entry edges should only
8277 enter the header of a natural loop. */
8278 loop->num_entries
8279 = flow_loop_entry_edges_find (loop->header,
8280 loop->nodes,
8281 &loop->entry_edges);
8284 if (flags & LOOP_EXIT_EDGES)
8286 /* Find edges which exit the loop. */
8287 loop->num_exits
8288 = flow_loop_exit_edges_find (loop->nodes,
8289 &loop->exit_edges);
8292 if (flags & LOOP_EXITS_DOMS)
8294 int j;
8296 /* Determine which loop nodes dominate all the exits
8297 of the loop. */
8298 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
8299 sbitmap_copy (loop->exits_doms, loop->nodes);
8300 for (j = 0; j < loop->num_exits; j++)
8301 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
8302 loops->cfg.dom[loop->exit_edges[j]->src->index]);
8304 /* The header of a natural loop must dominate
8305 all exits. */
8306 if (! TEST_BIT (loop->exits_doms, loop->header->index))
8307 abort ();
8310 if (flags & LOOP_PRE_HEADER)
8312 /* Look to see if the loop has a pre-header node. */
8313 loop->pre_header
8314 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
8316 /* Find the blocks within the extended basic block of
8317 the loop pre-header. */
8318 flow_loop_pre_header_scan (loop);
8320 return 1;
8324 /* Find all the natural loops in the function and save in LOOPS structure
8325 and recalculate loop_depth information in basic block structures.
8326 FLAGS controls which loop information is collected.
8327 Return the number of natural loops found. */
8330 flow_loops_find (loops, flags)
8331 struct loops *loops;
8332 int flags;
8334 int i;
8335 int b;
8336 int num_loops;
8337 edge e;
8338 sbitmap headers;
8339 sbitmap *dom;
8340 int *dfs_order;
8341 int *rc_order;
8343 /* This function cannot be repeatedly called with different
8344 flags to build up the loop information. The loop tree
8345 must always be built if this function is called. */
8346 if (! (flags & LOOP_TREE))
8347 abort ();
8349 memset (loops, 0, sizeof (*loops));
8351 /* Taking care of this degenerate case makes the rest of
8352 this code simpler. */
8353 if (n_basic_blocks == 0)
8354 return 0;
8356 dfs_order = NULL;
8357 rc_order = NULL;
8359 /* Compute the dominators. */
8360 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
8361 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
8363 /* Count the number of loop edges (back edges). This should be the
8364 same as the number of natural loops. */
8366 num_loops = 0;
8367 for (b = 0; b < n_basic_blocks; b++)
8369 basic_block header;
8371 header = BASIC_BLOCK (b);
8372 header->loop_depth = 0;
8374 for (e = header->pred; e; e = e->pred_next)
8376 basic_block latch = e->src;
8378 /* Look for back edges where a predecessor is dominated
8379 by this block. A natural loop has a single entry
8380 node (header) that dominates all the nodes in the
8381 loop. It also has single back edge to the header
8382 from a latch node. Note that multiple natural loops
8383 may share the same header. */
8384 if (b != header->index)
8385 abort ();
8387 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
8388 num_loops++;
8392 if (num_loops)
8394 /* Compute depth first search order of the CFG so that outer
8395 natural loops will be found before inner natural loops. */
8396 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8397 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8398 flow_depth_first_order_compute (dfs_order, rc_order);
8400 /* Save CFG derived information to avoid recomputing it. */
8401 loops->cfg.dom = dom;
8402 loops->cfg.dfs_order = dfs_order;
8403 loops->cfg.rc_order = rc_order;
8405 /* Allocate loop structures. */
8406 loops->array
8407 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
8409 headers = sbitmap_alloc (n_basic_blocks);
8410 sbitmap_zero (headers);
8412 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
8413 sbitmap_zero (loops->shared_headers);
8415 /* Find and record information about all the natural loops
8416 in the CFG. */
8417 num_loops = 0;
8418 for (b = 0; b < n_basic_blocks; b++)
8420 basic_block header;
8422 /* Search the nodes of the CFG in reverse completion order
8423 so that we can find outer loops first. */
8424 header = BASIC_BLOCK (rc_order[b]);
8426 /* Look for all the possible latch blocks for this header. */
8427 for (e = header->pred; e; e = e->pred_next)
8429 basic_block latch = e->src;
8431 /* Look for back edges where a predecessor is dominated
8432 by this block. A natural loop has a single entry
8433 node (header) that dominates all the nodes in the
8434 loop. It also has single back edge to the header
8435 from a latch node. Note that multiple natural loops
8436 may share the same header. */
8437 if (latch != ENTRY_BLOCK_PTR
8438 && TEST_BIT (dom[latch->index], header->index))
8440 struct loop *loop;
8442 loop = loops->array + num_loops;
8444 loop->header = header;
8445 loop->latch = latch;
8446 loop->num = num_loops;
8448 num_loops++;
8453 for (i = 0; i < num_loops; i++)
8455 struct loop *loop = &loops->array[i];
8457 /* Keep track of blocks that are loop headers so
8458 that we can tell which loops should be merged. */
8459 if (TEST_BIT (headers, loop->header->index))
8460 SET_BIT (loops->shared_headers, loop->header->index);
8461 SET_BIT (headers, loop->header->index);
8463 /* Find nodes contained within the loop. */
8464 loop->nodes = sbitmap_alloc (n_basic_blocks);
8465 loop->num_nodes
8466 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
8468 /* Compute first and last blocks within the loop.
8469 These are often the same as the loop header and
8470 loop latch respectively, but this is not always
8471 the case. */
8472 loop->first
8473 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
8474 loop->last
8475 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
8477 flow_loop_scan (loops, loop, flags);
8480 /* Natural loops with shared headers may either be disjoint or
8481 nested. Disjoint loops with shared headers cannot be inner
8482 loops and should be merged. For now just mark loops that share
8483 headers. */
8484 for (i = 0; i < num_loops; i++)
8485 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
8486 loops->array[i].shared = 1;
8488 sbitmap_free (headers);
8491 loops->num = num_loops;
8493 /* Build the loop hierarchy tree. */
8494 flow_loops_tree_build (loops);
8496 /* Assign the loop nesting depth and enclosed loop level for each
8497 loop. */
8498 loops->levels = flow_loops_level_compute (loops);
8500 return num_loops;
8504 /* Update the information regarding the loops in the CFG
8505 specified by LOOPS. */
8507 flow_loops_update (loops, flags)
8508 struct loops *loops;
8509 int flags;
8511 /* One day we may want to update the current loop data. For now
8512 throw away the old stuff and rebuild what we need. */
8513 if (loops->array)
8514 flow_loops_free (loops);
8516 return flow_loops_find (loops, flags);
8520 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8523 flow_loop_outside_edge_p (loop, e)
8524 const struct loop *loop;
8525 edge e;
8527 if (e->dest != loop->header)
8528 abort ();
8529 return (e->src == ENTRY_BLOCK_PTR)
8530 || ! TEST_BIT (loop->nodes, e->src->index);
8533 /* Clear LOG_LINKS fields of insns in a chain.
8534 Also clear the global_live_at_{start,end} fields of the basic block
8535 structures. */
8537 void
8538 clear_log_links (insns)
8539 rtx insns;
8541 rtx i;
8542 int b;
8544 for (i = insns; i; i = NEXT_INSN (i))
8545 if (INSN_P (i))
8546 LOG_LINKS (i) = 0;
8548 for (b = 0; b < n_basic_blocks; b++)
8550 basic_block bb = BASIC_BLOCK (b);
8552 bb->global_live_at_start = NULL;
8553 bb->global_live_at_end = NULL;
8556 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
8557 EXIT_BLOCK_PTR->global_live_at_start = NULL;
8560 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8561 correspond to the hard registers, if any, set in that map. This
8562 could be done far more efficiently by having all sorts of special-cases
8563 with moving single words, but probably isn't worth the trouble. */
8565 void
8566 reg_set_to_hard_reg_set (to, from)
8567 HARD_REG_SET *to;
8568 bitmap from;
8570 int i;
8572 EXECUTE_IF_SET_IN_BITMAP
8573 (from, 0, i,
8575 if (i >= FIRST_PSEUDO_REGISTER)
8576 return;
8577 SET_HARD_REG_BIT (*to, i);
8581 /* Called once at intialization time. */
8583 void
8584 init_flow ()
8586 static int initialized;
8588 if (!initialized)
8590 gcc_obstack_init (&flow_obstack);
8591 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
8592 initialized = 1;
8594 else
8596 obstack_free (&flow_obstack, flow_firstobj);
8597 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);