1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "hard-reg-set.h"
34 #include "tree-pass.h"
38 DEF_VEC_ALLOC_I(int,heap
);
41 /* -------------------------------------------------------------------------
42 Core mark/delete routines
43 ------------------------------------------------------------------------- */
45 /* The data-flow information needed by this pass. */
46 static bool df_in_progress
= false;
48 /* True if we deleted at least one instruction. */
49 static bool something_changed
;
51 /* Instructions that have been marked but whose dependencies have not
52 yet been processed. */
53 static VEC(rtx
,heap
) *worklist
;
55 static bitmap_obstack dce_blocks_bitmap_obstack
;
56 static bitmap_obstack dce_tmp_bitmap_obstack
;
58 static sbitmap marked
= NULL
;
60 /* A subroutine for which BODY is part of the instruction being tested;
61 either the top-level pattern, or an element of a PARALLEL. The
62 instruction is known not to be a bare USE or CLOBBER. */
65 deletable_insn_p_1 (rtx body
)
67 switch (GET_CODE (body
))
71 /* The UNSPEC case was added here because the ia-64 claims that
72 USEs do not work after reload and generates UNSPECS rather
73 than USEs. Since dce is run after reload we need to avoid
74 deleting these even if they are dead. If it turns out that
75 USEs really do work after reload, the ia-64 should be
76 changed, and the UNSPEC case can be removed. */
81 if (volatile_insn_p (body
))
84 if (flag_non_call_exceptions
&& may_trap_p (body
))
91 /* Return true if INSN is a normal instruction that can be deleted by
95 deletable_insn_p (rtx insn
, bool fast
)
100 if (!NONJUMP_INSN_P (insn
))
103 body
= PATTERN (insn
);
104 switch (GET_CODE (body
))
112 /* A CLOBBER of a dead pseudo register serves no purpose.
113 That is not necessarily true for hard registers until
116 return REG_P (x
) && (!HARD_REGISTER_P (x
) || reload_completed
);
119 /* Because of the way that use-def chains are built, it is not
120 possible to tell if the clobber is dead because it can
121 never be the target of a use-def chain. */
125 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
126 if (!deletable_insn_p_1 (XVECEXP (body
, 0, i
)))
131 return deletable_insn_p_1 (body
);
136 /* Return true if INSN has not been marked as needed. */
139 marked_insn_p (rtx insn
)
142 return TEST_BIT (marked
, INSN_UID (insn
));
144 /* Artificial defs are always needed and they do not have an
150 /* If INSN has not yet been marked as needed, mark it now, and add it to
154 mark_insn (rtx insn
, bool fast
)
156 if (!marked_insn_p (insn
))
159 VEC_safe_push (rtx
, heap
, worklist
, insn
);
160 SET_BIT (marked
, INSN_UID (insn
));
162 fprintf (dump_file
, " Adding insn %d to worklist\n", INSN_UID (insn
));
167 /* A note_stores callback used by mark_nonreg_stores. DATA is the
168 instruction containing DEST. */
171 mark_nonreg_stores_1 (rtx dest
, const_rtx pattern
, void *data
)
173 if (GET_CODE (pattern
) != CLOBBER
&& !REG_P (dest
))
174 mark_insn ((rtx
) data
, true);
178 /* A note_stores callback used by mark_nonreg_stores. DATA is the
179 instruction containing DEST. */
182 mark_nonreg_stores_2 (rtx dest
, const_rtx pattern
, void *data
)
184 if (GET_CODE (pattern
) != CLOBBER
&& !REG_P (dest
))
185 mark_insn ((rtx
) data
, false);
189 /* Mark INSN if BODY stores to a non-register destination. */
192 mark_nonreg_stores (rtx body
, rtx insn
, bool fast
)
195 note_stores (body
, mark_nonreg_stores_1
, insn
);
197 note_stores (body
, mark_nonreg_stores_2
, insn
);
201 /* Initialize global variables for a new DCE pass. */
209 df_chain_add_problem (DF_UD_CHAIN
);
216 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack
);
217 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack
);
218 marked
= sbitmap_alloc (get_max_uid () + 1);
219 sbitmap_zero (marked
);
223 /* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
224 bad dangling REG_EQUAL notes. */
227 delete_corresponding_reg_eq_notes (rtx insn
)
229 struct df_ref
**def_rec
;
230 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
232 struct df_ref
*def
= *def_rec
;
233 unsigned int regno
= DF_REF_REGNO (def
);
234 /* This loop is a little tricky. We cannot just go down the
235 chain because it is being modified by the actions in the
236 loop. So we just get the head. We plan to drain the list
238 while (DF_REG_EQ_USE_CHAIN (regno
))
240 struct df_ref
*eq_use
= DF_REG_EQ_USE_CHAIN (regno
);
241 rtx noted_insn
= DF_REF_INSN (eq_use
);
242 rtx note
= find_reg_note (noted_insn
, REG_EQUAL
, NULL_RTX
);
244 note
= find_reg_note (noted_insn
, REG_EQUIV
, NULL_RTX
);
246 /* This assert is generally triggered when someone deletes a
247 REG_EQUAL or REG_EQUIV note by hacking the list manually
248 rather than calling remove_note. */
250 remove_note (noted_insn
, note
);
256 /* Delete every instruction that hasn't been marked. Clear the insn
257 from DCE_DF if DF_DELETE is true. */
260 delete_unmarked_insns (void)
265 something_changed
= false;
267 FOR_BB_INSNS_SAFE (bb
, insn
, next
)
270 if (noop_move_p (insn
))
272 /* Note that this code does not handle the case where
273 the last insn of libcall is deleted. As it turns out
274 this case is excluded in the call to noop_move_p. */
275 rtx note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
);
276 if (note
&& (XEXP (note
, 0) != insn
))
278 rtx new_libcall_insn
= next_real_insn (insn
);
279 rtx retval_note
= find_reg_note (XEXP (note
, 0),
280 REG_RETVAL
, NULL_RTX
);
281 REG_NOTES (new_libcall_insn
)
282 = gen_rtx_INSN_LIST (REG_LIBCALL
, XEXP (note
, 0),
283 REG_NOTES (new_libcall_insn
));
284 XEXP (retval_note
, 0) = new_libcall_insn
;
287 else if (marked_insn_p (insn
))
290 /* WARNING, this debugging can itself cause problems if the
291 edge of the counter causes part of a libcall to be
292 deleted but not all of it. */
297 fprintf (dump_file
, "DCE: Deleting insn %d\n", INSN_UID (insn
));
299 /* Before we delete the insn, we have to delete
300 REG_EQUAL of the destination regs of the deleted insn
301 to prevent dangling REG_EQUAL. */
302 delete_corresponding_reg_eq_notes (insn
);
304 delete_insn_and_edges (insn
);
305 something_changed
= true;
310 /* Mark all insns using DELETE_PARM in the libcall that contains
313 mark_libcall (rtx start_insn
, bool delete_parm
)
315 rtx note
= find_reg_note (start_insn
, REG_LIBCALL_ID
, NULL_RTX
);
316 int id
= INTVAL (XEXP (note
, 0));
319 mark_insn (start_insn
, delete_parm
);
320 insn
= NEXT_INSN (start_insn
);
322 /* There are tales, long ago and far away, of the mystical nested
323 libcall. No one alive has actually seen one, but other parts of
324 the compiler support them so we will here. */
325 for (insn
= NEXT_INSN (start_insn
); insn
; insn
= NEXT_INSN (insn
))
329 /* Stay in the loop as long as we are in any libcall. */
330 if ((note
= find_reg_note (insn
, REG_LIBCALL_ID
, NULL_RTX
)))
332 if (id
== INTVAL (XEXP (note
, 0)))
334 mark_insn (insn
, delete_parm
);
336 fprintf (dump_file
, "matching forward libcall %d[%d]\n",
337 INSN_UID (insn
), id
);
345 for (insn
= PREV_INSN (start_insn
); insn
; insn
= PREV_INSN (insn
))
349 /* Stay in the loop as long as we are in any libcall. */
350 if ((note
= find_reg_note (insn
, REG_LIBCALL_ID
, NULL_RTX
)))
352 if (id
== INTVAL (XEXP (note
, 0)))
354 mark_insn (insn
, delete_parm
);
356 fprintf (dump_file
, "matching backward libcall %d[%d]\n",
357 INSN_UID (insn
), id
);
367 /* Go through the instructions and mark those whose necessity is not
368 dependent on inter-instruction information. Make sure all other
369 instructions are not marked. */
372 prescan_insns_for_dce (bool fast
)
378 fprintf (dump_file
, "Finding needed instructions:\n");
381 FOR_BB_INSNS (bb
, insn
)
384 rtx note
= find_reg_note (insn
, REG_LIBCALL_ID
, NULL_RTX
);
386 mark_libcall (insn
, fast
);
387 else if (deletable_insn_p (insn
, fast
))
388 mark_nonreg_stores (PATTERN (insn
), insn
, fast
);
390 mark_insn (insn
, fast
);
394 fprintf (dump_file
, "Finished finding needed instructions:\n");
398 /* UD-based DSE routines. */
400 /* Mark instructions that define artificially-used registers, such as
401 the frame pointer and the stack pointer. */
404 mark_artificial_uses (void)
407 struct df_link
*defs
;
408 struct df_ref
**use_rec
;
412 for (use_rec
= df_get_artificial_uses (bb
->index
);
414 for (defs
= DF_REF_CHAIN (*use_rec
); defs
; defs
= defs
->next
)
415 mark_insn (DF_REF_INSN (defs
->ref
), false);
419 /* Mark every instruction that defines a register value that INSN uses. */
422 mark_reg_dependencies (rtx insn
)
424 struct df_link
*defs
;
425 struct df_ref
**use_rec
;
427 /* If this is part of a libcall, mark the entire libcall. */
428 if (find_reg_note (insn
, REG_LIBCALL_ID
, NULL_RTX
))
429 mark_libcall (insn
, false);
431 for (use_rec
= DF_INSN_USES (insn
); *use_rec
; use_rec
++)
433 struct df_ref
*use
= *use_rec
;
436 fprintf (dump_file
, "Processing use of ");
437 print_simple_rtl (dump_file
, DF_REF_REG (use
));
438 fprintf (dump_file
, " in insn %d:\n", INSN_UID (insn
));
440 for (defs
= DF_REF_CHAIN (use
); defs
; defs
= defs
->next
)
441 mark_insn (DF_REF_INSN (defs
->ref
), false);
449 sbitmap_free (marked
);
450 gcc_assert (VEC_empty (rtx
, worklist
));
454 /* UD-chain based DCE. */
457 rest_of_handle_ud_dce (void)
461 df_in_progress
= false;
464 prescan_insns_for_dce (false);
465 mark_artificial_uses ();
466 while (VEC_length (rtx
, worklist
) > 0)
468 insn
= VEC_pop (rtx
, worklist
);
469 mark_reg_dependencies (insn
);
471 /* Before any insns are deleted, we must remove the chains since
472 they are not bidirectional. */
473 df_remove_problem (df_chain
);
474 delete_unmarked_insns ();
484 return optimize
> 1 && flag_dce
;
487 struct tree_opt_pass pass_ud_rtl_dce
=
490 gate_ud_dce
, /* gate */
491 rest_of_handle_ud_dce
, /* execute */
494 0, /* static_pass_number */
496 0, /* properties_required */
497 0, /* properties_provided */
498 0, /* properties_destroyed */
499 0, /* todo_flags_start */
502 TODO_ggc_collect
, /* todo_flags_finish */
506 /* -------------------------------------------------------------------------
508 ------------------------------------------------------------------------- */
511 /* Free the data allocated by init_dce. */
516 sbitmap_free (marked
);
517 bitmap_obstack_release (&dce_blocks_bitmap_obstack
);
518 bitmap_obstack_release (&dce_tmp_bitmap_obstack
);
519 df_in_progress
= false;
523 /* Process basic block BB. Return true if the live_in set has
527 dce_process_block (basic_block bb
, bool redo_out
)
529 bitmap local_live
= BITMAP_ALLOC (&dce_tmp_bitmap_obstack
);
532 struct df_ref
**def_rec
, **use_rec
;
533 unsigned int bb_index
= bb
->index
;
537 /* Need to redo the live_out set of this block if when one of
538 the succs of this block has had a change in it live in
542 df_confluence_function_n con_fun_n
= df_lr
->problem
->con_fun_n
;
543 bitmap_clear (DF_LR_OUT (bb
));
544 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
550 fprintf (dump_file
, "processing block %d live out = ", bb
->index
);
551 df_print_regset (dump_file
, DF_LR_OUT (bb
));
554 bitmap_copy (local_live
, DF_LR_OUT (bb
));
556 /* Process the artificial defs and uses at the bottom of the block. */
557 for (def_rec
= df_get_artificial_defs (bb_index
); *def_rec
; def_rec
++)
559 struct df_ref
*def
= *def_rec
;
560 if (((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
561 && (!(DF_REF_FLAGS (def
) & (DF_REF_PARTIAL
| DF_REF_CONDITIONAL
))))
562 bitmap_clear_bit (local_live
, DF_REF_REGNO (def
));
565 for (use_rec
= df_get_artificial_uses (bb_index
); *use_rec
; use_rec
++)
567 struct df_ref
*use
= *use_rec
;
568 if ((DF_REF_FLAGS (use
) & DF_REF_AT_TOP
) == 0)
569 bitmap_set_bit (local_live
, DF_REF_REGNO (use
));
572 FOR_BB_INSNS_REVERSE (bb
, insn
)
575 /* If this is a recursive call, the libcall will have already
577 if (!marked_insn_p (insn
))
581 /* The insn is needed if there is someone who uses the output. */
582 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
583 if (bitmap_bit_p (local_live
, DF_REF_REGNO (*def_rec
)))
591 rtx note
= find_reg_note (insn
, REG_LIBCALL_ID
, NULL_RTX
);
593 /* If we need to mark an insn in the middle of a
594 libcall, we need to back up to mark the entire
595 libcall. Given that libcalls are rare, rescanning
596 the block should be a reasonable solution to trying
597 to figure out how to back up. */
601 fprintf (dump_file
, "needed libcall %d\n", INSN_UID (insn
));
602 mark_libcall (insn
, true);
603 BITMAP_FREE (local_live
);
604 return dce_process_block (bb
, false);
607 mark_insn (insn
, true);
611 /* No matter if the instruction is needed or not, we remove
612 any regno in the defs from the live set. */
613 df_simulate_defs (insn
, local_live
);
615 /* On the other hand, we do not allow the dead uses to set
616 anything in local_live. */
617 if (marked_insn_p (insn
))
618 df_simulate_uses (insn
, local_live
);
621 for (def_rec
= df_get_artificial_defs (bb_index
); *def_rec
; def_rec
++)
623 struct df_ref
*def
= *def_rec
;
624 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
625 && (!(DF_REF_FLAGS (def
) & (DF_REF_PARTIAL
| DF_REF_CONDITIONAL
))))
626 bitmap_clear_bit (local_live
, DF_REF_REGNO (def
));
629 /* Process the uses that are live into an exception handler. */
630 for (use_rec
= df_get_artificial_uses (bb_index
); *use_rec
; use_rec
++)
632 /* Add use to set of uses in this BB. */
633 struct df_ref
*use
= *use_rec
;
634 if (DF_REF_FLAGS (use
) & DF_REF_AT_TOP
)
635 bitmap_set_bit (local_live
, DF_REF_REGNO (use
));
639 block_changed
= !bitmap_equal_p (local_live
, DF_LR_IN (bb
));
641 bitmap_copy (DF_LR_IN (bb
), local_live
);
643 BITMAP_FREE (local_live
);
644 return block_changed
;
650 int *postorder
= df_get_postorder (DF_BACKWARD
);
651 int n_blocks
= df_get_n_blocks (DF_BACKWARD
);
653 /* The set of blocks that have been seen on this iteration. */
654 bitmap processed
= BITMAP_ALLOC (&dce_blocks_bitmap_obstack
);
655 /* The set of blocks that need to have the out vectors reset because
656 the in of one of their successors has changed. */
657 bitmap redo_out
= BITMAP_ALLOC (&dce_blocks_bitmap_obstack
);
658 bitmap all_blocks
= BITMAP_ALLOC (&dce_blocks_bitmap_obstack
);
659 bool global_changed
= true;
663 prescan_insns_for_dce (true);
665 for (i
= 0; i
< n_blocks
; i
++)
666 bitmap_set_bit (all_blocks
, postorder
[i
]);
668 while (global_changed
)
670 global_changed
= false;
671 for (i
= 0; i
< n_blocks
; i
++)
673 int index
= postorder
[i
];
674 basic_block bb
= BASIC_BLOCK (index
);
677 if (index
< NUM_FIXED_BLOCKS
)
679 bitmap_set_bit (processed
, index
);
684 = dce_process_block (bb
, bitmap_bit_p (redo_out
, index
));
685 bitmap_set_bit (processed
, index
);
691 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
692 if (bitmap_bit_p (processed
, e
->src
->index
))
693 /* Be tricky about when we need to iterate the
694 analysis. We only have redo the analysis if the
695 bitmaps change at the top of a block that is the
697 global_changed
= true;
699 bitmap_set_bit (redo_out
, e
->src
->index
);
705 /* Turn off the RUN_DCE flag to prevent recursive calls to
707 int old_flag
= df_clear_flags (DF_LR_RUN_DCE
);
709 /* So something was deleted that requires a redo. Do it on
711 delete_unmarked_insns ();
712 sbitmap_zero (marked
);
713 bitmap_clear (processed
);
714 bitmap_clear (redo_out
);
716 /* We do not need to rescan any instructions. We only need
717 to redo the dataflow equations for the blocks that had a
718 change at the top of the block. Then we need to redo the
720 df_analyze_problem (df_lr
, all_blocks
, postorder
, n_blocks
);
722 if (old_flag
& DF_LR_RUN_DCE
)
723 df_set_flags (DF_LR_RUN_DCE
);
724 prescan_insns_for_dce (true);
729 delete_unmarked_insns ();
731 BITMAP_FREE (processed
);
732 BITMAP_FREE (redo_out
);
733 BITMAP_FREE (all_blocks
);
737 /* Callback for running pass_rtl_dce. */
740 rest_of_handle_fast_dce (void)
745 df_in_progress
= false;
750 /* This is an internal call that is used by the df live register
751 problem to run fast dce as a side effect of creating the live
752 information. The stack is organized so that the lr problem is run,
753 this pass is run, which updates the live info and the df scanning
754 info, and then returns to allow the rest of the problems to be run.
756 This can be called by elsewhere but it will not update the bit
757 vectors for any other problems than LR.
761 run_fast_df_dce (void)
765 /* If dce is able to delete something, it has to happen
766 immediately. Otherwise there will be problems handling the
768 enum df_changeable_flags old_flags
769 = df_clear_flags (DF_DEFER_INSN_RESCAN
+ DF_NO_INSN_RESCAN
);
771 df_in_progress
= true;
772 rest_of_handle_fast_dce ();
773 df_set_flags (old_flags
);
780 return optimize
> 0 && flag_dce
;
784 /* Run a fast DCE pass and return true if any instructions were
790 return gate_fast_dce () && (rest_of_handle_fast_dce (), something_changed
);
794 struct tree_opt_pass pass_fast_rtl_dce
=
797 gate_fast_dce
, /* gate */
798 rest_of_handle_fast_dce
, /* execute */
801 0, /* static_pass_number */
803 0, /* properties_required */
804 0, /* properties_provided */
805 0, /* properties_destroyed */
806 0, /* todo_flags_start */
809 TODO_ggc_collect
, /* todo_flags_finish */