mangle.c (java_mangle_decl): Remove dead check.
[official-gcc.git] / gcc / dce.c
blob70b9e2265149cc8fb74d1cfc0f52d5f83b24b891
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hashtab.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "df.h"
31 #include "cselib.h"
32 #include "dce.h"
33 #include "timevar.h"
34 #include "tree-pass.h"
35 #include "dbgcnt.h"
37 DEF_VEC_I(int);
38 DEF_VEC_ALLOC_I(int,heap);
41 /* -------------------------------------------------------------------------
42 Core mark/delete routines
43 ------------------------------------------------------------------------- */
45 /* True if we are invoked while the df engine is running; in this case,
46 we don't want to reenter it. */
47 static bool df_in_progress = false;
49 /* Instructions that have been marked but whose dependencies have not
50 yet been processed. */
51 static VEC(rtx,heap) *worklist;
53 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
54 static sbitmap marked;
56 /* Bitmap obstacks used for block processing by the fast algorithm. */
57 static bitmap_obstack dce_blocks_bitmap_obstack;
58 static bitmap_obstack dce_tmp_bitmap_obstack;
61 /* A subroutine for which BODY is part of the instruction being tested;
62 either the top-level pattern, or an element of a PARALLEL. The
63 instruction is known not to be a bare USE or CLOBBER. */
65 static bool
66 deletable_insn_p_1 (rtx body)
68 switch (GET_CODE (body))
70 case PREFETCH:
71 case TRAP_IF:
72 /* The UNSPEC case was added here because the ia-64 claims that
73 USEs do not work after reload and generates UNSPECS rather
74 than USEs. Since dce is run after reload we need to avoid
75 deleting these even if they are dead. If it turns out that
76 USEs really do work after reload, the ia-64 should be
77 changed, and the UNSPEC case can be removed. */
78 case UNSPEC:
79 return false;
81 default:
82 if (volatile_refs_p (body))
83 return false;
85 if (flag_non_call_exceptions && may_trap_p (body))
86 return false;
88 return true;
93 /* Return true if INSN is a normal instruction that can be deleted by
94 the DCE pass. */
96 static bool
97 deletable_insn_p (rtx insn, bool fast)
99 rtx body, x;
100 int i;
102 if (!NONJUMP_INSN_P (insn))
103 return false;
105 body = PATTERN (insn);
106 switch (GET_CODE (body))
108 case USE:
109 return false;
111 case CLOBBER:
112 if (fast)
114 /* A CLOBBER of a dead pseudo register serves no purpose.
115 That is not necessarily true for hard registers until
116 after reload. */
117 x = XEXP (body, 0);
118 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
120 else
121 /* Because of the way that use-def chains are built, it is not
122 possible to tell if the clobber is dead because it can
123 never be the target of a use-def chain. */
124 return false;
126 case PARALLEL:
127 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
128 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
129 return false;
130 return true;
132 default:
133 return deletable_insn_p_1 (body);
138 /* Return true if INSN has been marked as needed. */
140 static inline int
141 marked_insn_p (rtx insn)
143 if (insn)
144 return TEST_BIT (marked, INSN_UID (insn));
145 else
146 /* Artificial defs are always needed and they do not have an
147 insn. */
148 return true;
152 /* If INSN has not yet been marked as needed, mark it now, and add it to
153 the worklist. */
155 static void
156 mark_insn (rtx insn, bool fast)
158 if (!marked_insn_p (insn))
160 if (!fast)
161 VEC_safe_push (rtx, heap, worklist, insn);
162 SET_BIT (marked, INSN_UID (insn));
163 if (dump_file)
164 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
169 /* A note_stores callback used by mark_nonreg_stores. DATA is the
170 instruction containing DEST. */
172 static void
173 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
175 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
176 mark_insn ((rtx) data, true);
180 /* A note_stores callback used by mark_nonreg_stores. DATA is the
181 instruction containing DEST. */
183 static void
184 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
186 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
187 mark_insn ((rtx) data, false);
191 /* Mark INSN if BODY stores to a non-register destination. */
193 static void
194 mark_nonreg_stores (rtx body, rtx insn, bool fast)
196 if (fast)
197 note_stores (body, mark_nonreg_stores_1, insn);
198 else
199 note_stores (body, mark_nonreg_stores_2, insn);
203 /* Return true if the entire libcall sequence starting at INSN is dead.
204 NOTE is the REG_LIBCALL note attached to INSN.
206 A libcall sequence is a block of insns with no side-effects, i.e.
207 that is only used for its return value. The terminology derives
208 from that of a call, but a libcall sequence need not contain one.
209 It is only defined by a pair of REG_LIBCALL/REG_RETVAL notes.
211 From a dataflow viewpoint, a libcall sequence has the property that
212 no UD chain can enter it from the outside. As a consequence, if a
213 libcall sequence has a dead return value, it is effectively dead.
214 This is both enforced by CSE (cse_extended_basic_block) and relied
215 upon by delete_trivially_dead_insns.
217 However, in practice, the return value business is a tricky one and
218 only checking the liveness of the last insn is not sufficient to
219 decide whether the whole sequence is dead (e.g. PR middle-end/19551)
220 so we check the liveness of every insn starting from the call. */
222 static bool
223 libcall_dead_p (rtx insn, rtx note)
225 rtx last = XEXP (note, 0);
227 /* Find the call insn. */
228 while (insn != last && !CALL_P (insn))
229 insn = NEXT_INSN (insn);
231 /* If there is none, do nothing special, since ordinary death handling
232 can understand these insns. */
233 if (!CALL_P (insn))
234 return false;
236 /* If this is a call that returns a value via an invisible pointer, the
237 dataflow engine cannot see it so it has been marked unconditionally.
238 Skip it unless it has been made the last insn in the libcall, for
239 example by the combiner, in which case we're left with no easy way
240 of asserting its liveness. */
241 if (!single_set (insn))
243 if (insn == last)
244 return false;
245 insn = NEXT_INSN (insn);
248 while (insn != NEXT_INSN (last))
250 if (INSN_P (insn) && marked_insn_p (insn))
251 return false;
252 insn = NEXT_INSN (insn);
255 return true;
259 /* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
260 bad dangling REG_EQUAL notes. */
262 static void
263 delete_corresponding_reg_eq_notes (rtx insn)
265 struct df_ref **def_rec;
266 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
268 struct df_ref *def = *def_rec;
269 unsigned int regno = DF_REF_REGNO (def);
270 /* This loop is a little tricky. We cannot just go down the
271 chain because it is being modified by the actions in the
272 loop. So we just get the head. We plan to drain the list
273 anyway. */
274 while (DF_REG_EQ_USE_CHAIN (regno))
276 struct df_ref *eq_use = DF_REG_EQ_USE_CHAIN (regno);
277 rtx noted_insn = DF_REF_INSN (eq_use);
278 rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
279 if (!note)
280 note = find_reg_note (noted_insn, REG_EQUIV, NULL_RTX);
282 /* This assert is generally triggered when someone deletes a
283 REG_EQUAL or REG_EQUIV note by hacking the list manually
284 rather than calling remove_note. */
285 gcc_assert (note);
286 remove_note (noted_insn, note);
292 /* Delete every instruction that hasn't been marked. */
294 static void
295 delete_unmarked_insns (void)
297 basic_block bb;
298 rtx insn, next;
300 FOR_EACH_BB (bb)
301 FOR_BB_INSNS_SAFE (bb, insn, next)
302 if (INSN_P (insn))
304 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
306 /* Always delete no-op moves. */
307 if (noop_move_p (insn))
310 /* Try to delete libcall sequences as a whole. */
311 else if (note && libcall_dead_p (insn, note))
313 rtx last = XEXP (note, 0);
315 if (!dbg_cnt (dce))
316 continue;
318 if (dump_file)
319 fprintf (dump_file, "DCE: Deleting libcall %d-%d\n",
320 INSN_UID (insn), INSN_UID (last));
322 next = NEXT_INSN (last);
323 delete_insn_chain_and_edges (insn, last);
324 continue;
327 /* Otherwise rely only on the DCE algorithm. */
328 else if (marked_insn_p (insn))
329 continue;
331 if (!dbg_cnt (dce))
332 continue;
334 if (dump_file)
335 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
337 /* Before we delete the insn we have to delete REG_EQUAL notes
338 for the destination regs in order to avoid dangling notes. */
339 delete_corresponding_reg_eq_notes (insn);
341 /* If we're about to delete the first insn of a libcall, then
342 move the REG_LIBCALL note to the next real insn and update
343 the REG_RETVAL note. */
344 if (note && (XEXP (note, 0) != insn))
346 rtx new_libcall_insn = next_real_insn (insn);
347 rtx retval_note = find_reg_note (XEXP (note, 0),
348 REG_RETVAL, NULL_RTX);
349 /* If the RETVAL and LIBCALL notes would land on the same
350 insn just remove them. */
351 if (XEXP (note, 0) == new_libcall_insn)
352 remove_note (new_libcall_insn, retval_note);
353 else
355 REG_NOTES (new_libcall_insn)
356 = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
357 REG_NOTES (new_libcall_insn));
358 XEXP (retval_note, 0) = new_libcall_insn;
362 /* If the insn contains a REG_RETVAL note and is dead, but the
363 libcall as a whole is not dead, then we want to remove the
364 insn, but not the whole libcall sequence. However, we also
365 need to remove the dangling REG_LIBCALL note in order to
366 avoid mismatched notes. We could find a new location for
367 the REG_RETVAL note, but it hardly seems worth the effort. */
368 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
369 if (note && (XEXP (note, 0) != insn))
371 rtx libcall_note
372 = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
373 remove_note (XEXP (note, 0), libcall_note);
376 /* Now delete the insn. */
377 delete_insn_and_edges (insn);
382 /* Helper function for prescan_insns_for_dce: prescan the entire libcall
383 sequence starting at INSN and return the insn following the libcall.
384 NOTE is the REG_LIBCALL note attached to INSN. */
386 static rtx
387 prescan_libcall_for_dce (rtx insn, rtx note, bool fast)
389 rtx last = XEXP (note, 0);
391 /* A libcall is never necessary on its own but we need to mark the stores
392 to a non-register destination. */
393 while (insn != last && !CALL_P (insn))
395 if (INSN_P (insn))
396 mark_nonreg_stores (PATTERN (insn), insn, fast);
397 insn = NEXT_INSN (insn);
400 /* If this is a call that returns a value via an invisible pointer, the
401 dataflow engine cannot see it so it has to be marked unconditionally. */
402 if (CALL_P (insn) && !single_set (insn))
404 mark_insn (insn, fast);
405 insn = NEXT_INSN (insn);
408 while (insn != NEXT_INSN (last))
410 if (INSN_P (insn))
411 mark_nonreg_stores (PATTERN (insn), insn, fast);
412 insn = NEXT_INSN (insn);
415 return insn;
419 /* Go through the instructions and mark those whose necessity is not
420 dependent on inter-instruction information. Make sure all other
421 instructions are not marked. */
423 static void
424 prescan_insns_for_dce (bool fast)
426 basic_block bb;
427 rtx insn, next;
429 if (dump_file)
430 fprintf (dump_file, "Finding needed instructions:\n");
432 FOR_EACH_BB (bb)
433 FOR_BB_INSNS_SAFE (bb, insn, next)
434 if (INSN_P (insn))
436 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
437 if (note)
438 next = prescan_libcall_for_dce (insn, note, fast);
439 else if (deletable_insn_p (insn, fast))
440 mark_nonreg_stores (PATTERN (insn), insn, fast);
441 else
442 mark_insn (insn, fast);
445 if (dump_file)
446 fprintf (dump_file, "Finished finding needed instructions:\n");
450 /* UD-based DSE routines. */
452 /* Mark instructions that define artificially-used registers, such as
453 the frame pointer and the stack pointer. */
455 static void
456 mark_artificial_uses (void)
458 basic_block bb;
459 struct df_link *defs;
460 struct df_ref **use_rec;
462 FOR_ALL_BB (bb)
464 for (use_rec = df_get_artificial_uses (bb->index);
465 *use_rec; use_rec++)
466 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
467 mark_insn (DF_REF_INSN (defs->ref), false);
472 /* Mark every instruction that defines a register value that INSN uses. */
474 static void
475 mark_reg_dependencies (rtx insn)
477 struct df_link *defs;
478 struct df_ref **use_rec;
480 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
482 struct df_ref *use = *use_rec;
483 if (dump_file)
485 fprintf (dump_file, "Processing use of ");
486 print_simple_rtl (dump_file, DF_REF_REG (use));
487 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
489 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
490 mark_insn (DF_REF_INSN (defs->ref), false);
495 /* Initialize global variables for a new DCE pass. */
497 static void
498 init_dce (bool fast)
500 if (!df_in_progress)
502 if (!fast)
503 df_chain_add_problem (DF_UD_CHAIN);
504 df_analyze ();
507 if (dump_file)
508 df_dump (dump_file);
510 if (fast)
512 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
513 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
516 marked = sbitmap_alloc (get_max_uid () + 1);
517 sbitmap_zero (marked);
521 /* Free the data allocated by init_dce. */
523 static void
524 fini_dce (bool fast)
526 sbitmap_free (marked);
528 if (fast)
530 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
531 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
536 /* UD-chain based DCE. */
538 static unsigned int
539 rest_of_handle_ud_dce (void)
541 rtx insn;
543 init_dce (false);
545 prescan_insns_for_dce (false);
546 mark_artificial_uses ();
547 while (VEC_length (rtx, worklist) > 0)
549 insn = VEC_pop (rtx, worklist);
550 mark_reg_dependencies (insn);
553 /* Before any insns are deleted, we must remove the chains since
554 they are not bidirectional. */
555 df_remove_problem (df_chain);
556 delete_unmarked_insns ();
558 fini_dce (false);
559 return 0;
563 static bool
564 gate_ud_dce (void)
566 return optimize > 1 && flag_dce
567 && dbg_cnt (dce_ud);
570 struct tree_opt_pass pass_ud_rtl_dce =
572 "dce", /* name */
573 gate_ud_dce, /* gate */
574 rest_of_handle_ud_dce, /* execute */
575 NULL, /* sub */
576 NULL, /* next */
577 0, /* static_pass_number */
578 TV_DCE, /* tv_id */
579 0, /* properties_required */
580 0, /* properties_provided */
581 0, /* properties_destroyed */
582 0, /* todo_flags_start */
583 TODO_dump_func |
584 TODO_df_finish | TODO_verify_rtl_sharing |
585 TODO_ggc_collect, /* todo_flags_finish */
586 'w' /* letter */
590 /* -------------------------------------------------------------------------
591 Fast DCE functions
592 ------------------------------------------------------------------------- */
594 /* Process basic block BB. Return true if the live_in set has changed. */
596 static bool
597 dce_process_block (basic_block bb, bool redo_out)
599 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
600 bitmap au;
601 rtx insn;
602 bool block_changed;
603 struct df_ref **def_rec, **use_rec;
604 unsigned int bb_index = bb->index;
606 if (redo_out)
608 /* Need to redo the live_out set of this block if when one of
609 the succs of this block has had a change in it live in
610 set. */
611 edge e;
612 edge_iterator ei;
613 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
614 bitmap_clear (DF_LR_OUT (bb));
615 FOR_EACH_EDGE (e, ei, bb->succs)
616 (*con_fun_n) (e);
619 if (dump_file)
621 fprintf (dump_file, "processing block %d live out = ", bb->index);
622 df_print_regset (dump_file, DF_LR_OUT (bb));
625 bitmap_copy (local_live, DF_LR_OUT (bb));
627 /* Process the artificial defs and uses at the bottom of the block. */
628 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
630 struct df_ref *def = *def_rec;
631 if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
632 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
633 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
636 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
638 struct df_ref *use = *use_rec;
639 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
640 bitmap_set_bit (local_live, DF_REF_REGNO (use));
643 /* These regs are considered always live so if they end up dying
644 because of some def, we need to bring the back again.
645 Calling df_simulate_fixup_sets has the disadvantage of calling
646 bb_has_eh_pred once per insn, so we cache the information here. */
647 if (bb_has_eh_pred (bb))
648 au = df->eh_block_artificial_uses;
649 else
650 au = df->regular_block_artificial_uses;
652 FOR_BB_INSNS_REVERSE (bb, insn)
653 if (INSN_P (insn))
655 bool needed = false;
657 /* The insn is needed if there is someone who uses the output. */
658 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
659 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
660 || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
662 needed = true;
663 break;
666 if (needed)
667 mark_insn (insn, true);
669 /* No matter if the instruction is needed or not, we remove
670 any regno in the defs from the live set. */
671 df_simulate_defs (insn, local_live);
673 /* On the other hand, we do not allow the dead uses to set
674 anything in local_live. */
675 if (marked_insn_p (insn))
676 df_simulate_uses (insn, local_live);
679 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
681 struct df_ref *def = *def_rec;
682 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
683 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
684 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
687 #ifdef EH_USES
688 /* Process the uses that are live into an exception handler. */
689 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
691 /* Add use to set of uses in this BB. */
692 struct df_ref *use = *use_rec;
693 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
694 bitmap_set_bit (local_live, DF_REF_REGNO (use));
696 #endif
698 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
699 if (block_changed)
700 bitmap_copy (DF_LR_IN (bb), local_live);
702 BITMAP_FREE (local_live);
703 return block_changed;
707 /* Perform fast DCE once initialization is done. */
709 static void
710 fast_dce (void)
712 int *postorder = df_get_postorder (DF_BACKWARD);
713 int n_blocks = df_get_n_blocks (DF_BACKWARD);
714 /* The set of blocks that have been seen on this iteration. */
715 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
716 /* The set of blocks that need to have the out vectors reset because
717 the in of one of their successors has changed. */
718 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
719 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
720 bool global_changed = true;
721 int i;
723 prescan_insns_for_dce (true);
725 for (i = 0; i < n_blocks; i++)
726 bitmap_set_bit (all_blocks, postorder[i]);
728 while (global_changed)
730 global_changed = false;
732 for (i = 0; i < n_blocks; i++)
734 int index = postorder[i];
735 basic_block bb = BASIC_BLOCK (index);
736 bool local_changed;
738 if (index < NUM_FIXED_BLOCKS)
740 bitmap_set_bit (processed, index);
741 continue;
744 local_changed
745 = dce_process_block (bb, bitmap_bit_p (redo_out, index));
746 bitmap_set_bit (processed, index);
748 if (local_changed)
750 edge e;
751 edge_iterator ei;
752 FOR_EACH_EDGE (e, ei, bb->preds)
753 if (bitmap_bit_p (processed, e->src->index))
754 /* Be tricky about when we need to iterate the
755 analysis. We only have redo the analysis if the
756 bitmaps change at the top of a block that is the
757 entry to a loop. */
758 global_changed = true;
759 else
760 bitmap_set_bit (redo_out, e->src->index);
764 if (global_changed)
766 /* Turn off the RUN_DCE flag to prevent recursive calls to
767 dce. */
768 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
770 /* So something was deleted that requires a redo. Do it on
771 the cheap. */
772 delete_unmarked_insns ();
773 sbitmap_zero (marked);
774 bitmap_clear (processed);
775 bitmap_clear (redo_out);
777 /* We do not need to rescan any instructions. We only need
778 to redo the dataflow equations for the blocks that had a
779 change at the top of the block. Then we need to redo the
780 iteration. */
781 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
783 if (old_flag & DF_LR_RUN_DCE)
784 df_set_flags (DF_LR_RUN_DCE);
786 prescan_insns_for_dce (true);
790 delete_unmarked_insns ();
792 BITMAP_FREE (processed);
793 BITMAP_FREE (redo_out);
794 BITMAP_FREE (all_blocks);
798 /* Fast DCE. */
800 static unsigned int
801 rest_of_handle_fast_dce (void)
803 init_dce (true);
804 fast_dce ();
805 fini_dce (true);
806 return 0;
810 /* This is an internal call that is used by the df live register
811 problem to run fast dce as a side effect of creating the live
812 information. The stack is organized so that the lr problem is run,
813 this pass is run, which updates the live info and the df scanning
814 info, and then returns to allow the rest of the problems to be run.
816 This can be called by elsewhere but it will not update the bit
817 vectors for any other problems than LR. */
819 void
820 run_fast_df_dce (void)
822 if (flag_dce)
824 /* If dce is able to delete something, it has to happen
825 immediately. Otherwise there will be problems handling the
826 eq_notes. */
827 enum df_changeable_flags old_flags
828 = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
830 df_in_progress = true;
831 rest_of_handle_fast_dce ();
832 df_in_progress = false;
834 df_set_flags (old_flags);
839 /* Run a fast DCE pass. */
841 void
842 run_fast_dce (void)
844 if (flag_dce)
845 rest_of_handle_fast_dce ();
849 static bool
850 gate_fast_dce (void)
852 return optimize > 0 && flag_dce
853 && dbg_cnt (dce_fast);
856 struct tree_opt_pass pass_fast_rtl_dce =
858 "dce", /* name */
859 gate_fast_dce, /* gate */
860 rest_of_handle_fast_dce, /* execute */
861 NULL, /* sub */
862 NULL, /* next */
863 0, /* static_pass_number */
864 TV_DCE, /* tv_id */
865 0, /* properties_required */
866 0, /* properties_provided */
867 0, /* properties_destroyed */
868 0, /* todo_flags_start */
869 TODO_dump_func |
870 TODO_df_finish | TODO_verify_rtl_sharing |
871 TODO_ggc_collect, /* todo_flags_finish */
872 'w' /* letter */