* gcc.dg/pr32912-3.c: Compile with -w.
[official-gcc.git] / gcc / dce.c
blobe63a07a9ef4213f71c170b73f53a7f29a5929ef3
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hashtab.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "df.h"
31 #include "cselib.h"
32 #include "dce.h"
33 #include "timevar.h"
34 #include "tree-pass.h"
35 #include "dbgcnt.h"
37 DEF_VEC_I(int);
38 DEF_VEC_ALLOC_I(int,heap);
41 /* -------------------------------------------------------------------------
42 Core mark/delete routines
43 ------------------------------------------------------------------------- */
45 /* True if we are invoked while the df engine is running; in this case,
46 we don't want to reenter it. */
47 static bool df_in_progress = false;
49 /* Instructions that have been marked but whose dependencies have not
50 yet been processed. */
51 static VEC(rtx,heap) *worklist;
53 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
54 static sbitmap marked;
56 /* Bitmap obstacks used for block processing by the fast algorithm. */
57 static bitmap_obstack dce_blocks_bitmap_obstack;
58 static bitmap_obstack dce_tmp_bitmap_obstack;
61 /* A subroutine for which BODY is part of the instruction being tested;
62 either the top-level pattern, or an element of a PARALLEL. The
63 instruction is known not to be a bare USE or CLOBBER. */
65 static bool
66 deletable_insn_p_1 (rtx body)
68 switch (GET_CODE (body))
70 case PREFETCH:
71 case TRAP_IF:
72 /* The UNSPEC case was added here because the ia-64 claims that
73 USEs do not work after reload and generates UNSPECS rather
74 than USEs. Since dce is run after reload we need to avoid
75 deleting these even if they are dead. If it turns out that
76 USEs really do work after reload, the ia-64 should be
77 changed, and the UNSPEC case can be removed. */
78 case UNSPEC:
79 return false;
81 default:
82 if (volatile_refs_p (body))
83 return false;
85 if (flag_non_call_exceptions && may_trap_p (body))
86 return false;
88 return true;
93 /* Return true if INSN is a normal instruction that can be deleted by
94 the DCE pass. */
96 static bool
97 deletable_insn_p (rtx insn, bool fast)
99 rtx body, x;
100 int i;
102 if (!NONJUMP_INSN_P (insn))
103 return false;
105 body = PATTERN (insn);
106 switch (GET_CODE (body))
108 case USE:
109 return false;
111 case CLOBBER:
112 if (fast)
114 /* A CLOBBER of a dead pseudo register serves no purpose.
115 That is not necessarily true for hard registers until
116 after reload. */
117 x = XEXP (body, 0);
118 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
120 else
121 /* Because of the way that use-def chains are built, it is not
122 possible to tell if the clobber is dead because it can
123 never be the target of a use-def chain. */
124 return false;
126 case PARALLEL:
127 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
128 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
129 return false;
130 return true;
132 default:
133 return deletable_insn_p_1 (body);
138 /* Return true if INSN has been marked as needed. */
140 static inline int
141 marked_insn_p (rtx insn)
143 if (insn)
144 return TEST_BIT (marked, INSN_UID (insn));
145 else
146 /* Artificial defs are always needed and they do not have an
147 insn. */
148 return true;
152 /* If INSN has not yet been marked as needed, mark it now, and add it to
153 the worklist. */
155 static void
156 mark_insn (rtx insn, bool fast)
158 if (!marked_insn_p (insn))
160 if (!fast)
161 VEC_safe_push (rtx, heap, worklist, insn);
162 SET_BIT (marked, INSN_UID (insn));
163 if (dump_file)
164 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
169 /* A note_stores callback used by mark_nonreg_stores. DATA is the
170 instruction containing DEST. */
172 static void
173 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
175 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
176 mark_insn ((rtx) data, true);
180 /* A note_stores callback used by mark_nonreg_stores. DATA is the
181 instruction containing DEST. */
183 static void
184 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
186 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
187 mark_insn ((rtx) data, false);
191 /* Mark INSN if BODY stores to a non-register destination. */
193 static void
194 mark_nonreg_stores (rtx body, rtx insn, bool fast)
196 if (fast)
197 note_stores (body, mark_nonreg_stores_1, insn);
198 else
199 note_stores (body, mark_nonreg_stores_2, insn);
203 /* Return true if the entire libcall sequence starting at INSN is dead.
204 NOTE is the REG_LIBCALL note attached to INSN.
206 A libcall sequence is a block of insns with no side-effects, i.e.
207 that is only used for its return value. The terminology derives
208 from that of a call, but a libcall sequence need not contain one.
209 It is only defined by a pair of REG_LIBCALL/REG_RETVAL notes.
211 From a dataflow viewpoint, a libcall sequence has the property that
212 no UD chain can enter it from the outside. As a consequence, if a
213 libcall sequence has a dead return value, it is effectively dead.
214 This is both enforced by CSE (cse_extended_basic_block) and relied
215 upon by delete_trivially_dead_insns.
217 However, in practice, the return value business is a tricky one and
218 only checking the liveness of the last insn is not sufficient to
219 decide whether the whole sequence is dead (e.g. PR middle-end/19551)
220 so we check the liveness of every insn starting from the call. */
222 static bool
223 libcall_dead_p (rtx insn, rtx note)
225 rtx last = XEXP (note, 0);
227 /* Find the call insn. */
228 while (insn != last && !CALL_P (insn))
229 insn = NEXT_INSN (insn);
231 /* If there is none, do nothing special, since ordinary death handling
232 can understand these insns. */
233 if (!CALL_P (insn))
234 return false;
236 /* If this is a call that returns a value via an invisible pointer, the
237 dataflow engine cannot see it so it has been marked unconditionally.
238 Skip it unless it has been made the last insn in the libcall, for
239 example by the combiner, in which case we're left with no easy way
240 of asserting its liveness. */
241 if (!single_set (insn))
243 if (insn == last)
244 return false;
245 insn = NEXT_INSN (insn);
248 while (insn != NEXT_INSN (last))
250 if (INSN_P (insn) && marked_insn_p (insn))
251 return false;
252 insn = NEXT_INSN (insn);
255 return true;
259 /* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
260 bad dangling REG_EQUAL notes. */
262 static void
263 delete_corresponding_reg_eq_notes (rtx insn)
265 struct df_ref **def_rec;
266 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
268 struct df_ref *def = *def_rec;
269 unsigned int regno = DF_REF_REGNO (def);
270 /* This loop is a little tricky. We cannot just go down the
271 chain because it is being modified by the actions in the
272 loop. So we just get the head. We plan to drain the list
273 anyway. */
274 while (DF_REG_EQ_USE_CHAIN (regno))
276 struct df_ref *eq_use = DF_REG_EQ_USE_CHAIN (regno);
277 rtx noted_insn = DF_REF_INSN (eq_use);
278 rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
279 if (!note)
280 note = find_reg_note (noted_insn, REG_EQUIV, NULL_RTX);
282 /* This assert is generally triggered when someone deletes a
283 REG_EQUAL or REG_EQUIV note by hacking the list manually
284 rather than calling remove_note. */
285 gcc_assert (note);
286 remove_note (noted_insn, note);
292 /* Delete every instruction that hasn't been marked. */
294 static void
295 delete_unmarked_insns (void)
297 basic_block bb;
298 rtx insn, next;
300 FOR_EACH_BB (bb)
301 FOR_BB_INSNS_SAFE (bb, insn, next)
302 if (INSN_P (insn))
304 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
306 /* Always delete no-op moves. */
307 if (noop_move_p (insn))
310 /* Try to delete libcall sequences as a whole. */
311 else if (note && libcall_dead_p (insn, note))
313 rtx last = XEXP (note, 0);
315 if (!dbg_cnt (dce))
316 continue;
318 if (dump_file)
319 fprintf (dump_file, "DCE: Deleting libcall %d-%d\n",
320 INSN_UID (insn), INSN_UID (last));
322 next = NEXT_INSN (last);
323 delete_insn_chain_and_edges (insn, last);
324 continue;
327 /* Otherwise rely only on the DCE algorithm. */
328 else if (marked_insn_p (insn))
329 continue;
331 if (!dbg_cnt (dce))
332 continue;
334 if (dump_file)
335 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
337 /* Before we delete the insn we have to delete REG_EQUAL notes
338 for the destination regs in order to avoid dangling notes. */
339 delete_corresponding_reg_eq_notes (insn);
341 /* If we're about to delete the first insn of a libcall, then
342 move the REG_LIBCALL note to the next real insn and update
343 the REG_RETVAL note. */
344 if (note && (XEXP (note, 0) != insn))
346 rtx new_libcall_insn = next_real_insn (insn);
347 rtx retval_note = find_reg_note (XEXP (note, 0),
348 REG_RETVAL, NULL_RTX);
349 /* If the RETVAL and LIBCALL notes would land on the same
350 insn just remove them. */
351 if (XEXP (note, 0) == new_libcall_insn)
352 remove_note (new_libcall_insn, retval_note);
353 else
355 REG_NOTES (new_libcall_insn)
356 = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
357 REG_NOTES (new_libcall_insn));
358 XEXP (retval_note, 0) = new_libcall_insn;
362 /* If the insn contains a REG_RETVAL note and is dead, but the
363 libcall as a whole is not dead, then we want to remove the
364 insn, but not the whole libcall sequence. However, we also
365 need to remove the dangling REG_LIBCALL note in order to
366 avoid mismatched notes. We could find a new location for
367 the REG_RETVAL note, but it hardly seems worth the effort. */
368 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
369 if (note && (XEXP (note, 0) != insn))
371 rtx libcall_note
372 = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
373 remove_note (XEXP (note, 0), libcall_note);
376 /* Now delete the insn. */
377 delete_insn_and_edges (insn);
382 /* Helper function for prescan_insns_for_dce: prescan the entire libcall
383 sequence starting at INSN and return the insn following the libcall.
384 NOTE is the REG_LIBCALL note attached to INSN. */
386 static rtx
387 prescan_libcall_for_dce (rtx insn, rtx note, bool fast)
389 rtx last = XEXP (note, 0);
391 /* A libcall is never necessary on its own but we need to mark the stores
392 to a non-register destination. */
393 while (insn != last && !CALL_P (insn))
395 if (INSN_P (insn))
396 mark_nonreg_stores (PATTERN (insn), insn, fast);
397 insn = NEXT_INSN (insn);
400 /* If this is a call that returns a value via an invisible pointer, the
401 dataflow engine cannot see it so it has to be marked unconditionally. */
402 if (CALL_P (insn) && !single_set (insn))
404 mark_insn (insn, fast);
405 insn = NEXT_INSN (insn);
408 while (insn != NEXT_INSN (last))
410 if (INSN_P (insn))
411 mark_nonreg_stores (PATTERN (insn), insn, fast);
412 insn = NEXT_INSN (insn);
415 return insn;
419 /* Go through the instructions and mark those whose necessity is not
420 dependent on inter-instruction information. Make sure all other
421 instructions are not marked. */
423 static void
424 prescan_insns_for_dce (bool fast)
426 basic_block bb;
427 rtx insn, next;
429 if (dump_file)
430 fprintf (dump_file, "Finding needed instructions:\n");
432 FOR_EACH_BB (bb)
433 FOR_BB_INSNS_SAFE (bb, insn, next)
434 if (INSN_P (insn))
436 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
437 if (note)
438 next = prescan_libcall_for_dce (insn, note, fast);
439 else if (deletable_insn_p (insn, fast))
440 mark_nonreg_stores (PATTERN (insn), insn, fast);
441 else
442 mark_insn (insn, fast);
445 if (dump_file)
446 fprintf (dump_file, "Finished finding needed instructions:\n");
450 /* UD-based DSE routines. */
452 /* Mark instructions that define artificially-used registers, such as
453 the frame pointer and the stack pointer. */
455 static void
456 mark_artificial_uses (void)
458 basic_block bb;
459 struct df_link *defs;
460 struct df_ref **use_rec;
462 FOR_ALL_BB (bb)
464 for (use_rec = df_get_artificial_uses (bb->index);
465 *use_rec; use_rec++)
466 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
467 mark_insn (DF_REF_INSN (defs->ref), false);
472 /* Mark every instruction that defines a register value that INSN uses. */
474 static void
475 mark_reg_dependencies (rtx insn)
477 struct df_link *defs;
478 struct df_ref **use_rec;
480 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
482 struct df_ref *use = *use_rec;
483 if (dump_file)
485 fprintf (dump_file, "Processing use of ");
486 print_simple_rtl (dump_file, DF_REF_REG (use));
487 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
489 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
490 mark_insn (DF_REF_INSN (defs->ref), false);
495 /* Initialize global variables for a new DCE pass. */
497 static void
498 init_dce (bool fast)
500 if (!df_in_progress)
502 if (!fast)
503 df_chain_add_problem (DF_UD_CHAIN);
504 df_analyze ();
507 if (dump_file)
508 df_dump (dump_file);
510 if (fast)
512 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
513 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
516 marked = sbitmap_alloc (get_max_uid () + 1);
517 sbitmap_zero (marked);
521 /* Free the data allocated by init_dce. */
523 static void
524 fini_dce (bool fast)
526 sbitmap_free (marked);
528 if (fast)
530 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
531 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
536 /* UD-chain based DCE. */
538 static unsigned int
539 rest_of_handle_ud_dce (void)
541 rtx insn;
543 init_dce (false);
545 prescan_insns_for_dce (false);
546 mark_artificial_uses ();
547 while (VEC_length (rtx, worklist) > 0)
549 insn = VEC_pop (rtx, worklist);
550 mark_reg_dependencies (insn);
553 /* Before any insns are deleted, we must remove the chains since
554 they are not bidirectional. */
555 df_remove_problem (df_chain);
556 delete_unmarked_insns ();
558 fini_dce (false);
559 return 0;
563 static bool
564 gate_ud_dce (void)
566 return optimize > 1 && flag_dce;
569 struct tree_opt_pass pass_ud_rtl_dce =
571 "dce", /* name */
572 gate_ud_dce, /* gate */
573 rest_of_handle_ud_dce, /* execute */
574 NULL, /* sub */
575 NULL, /* next */
576 0, /* static_pass_number */
577 TV_DCE, /* tv_id */
578 0, /* properties_required */
579 0, /* properties_provided */
580 0, /* properties_destroyed */
581 0, /* todo_flags_start */
582 TODO_dump_func |
583 TODO_df_finish | TODO_verify_rtl_sharing |
584 TODO_ggc_collect, /* todo_flags_finish */
585 'w' /* letter */
589 /* -------------------------------------------------------------------------
590 Fast DCE functions
591 ------------------------------------------------------------------------- */
593 /* Process basic block BB. Return true if the live_in set has changed. */
595 static bool
596 dce_process_block (basic_block bb, bool redo_out)
598 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
599 bitmap au;
600 rtx insn;
601 bool block_changed;
602 struct df_ref **def_rec, **use_rec;
603 unsigned int bb_index = bb->index;
605 if (redo_out)
607 /* Need to redo the live_out set of this block if when one of
608 the succs of this block has had a change in it live in
609 set. */
610 edge e;
611 edge_iterator ei;
612 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
613 bitmap_clear (DF_LR_OUT (bb));
614 FOR_EACH_EDGE (e, ei, bb->succs)
615 (*con_fun_n) (e);
618 if (dump_file)
620 fprintf (dump_file, "processing block %d live out = ", bb->index);
621 df_print_regset (dump_file, DF_LR_OUT (bb));
624 bitmap_copy (local_live, DF_LR_OUT (bb));
626 /* Process the artificial defs and uses at the bottom of the block. */
627 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
629 struct df_ref *def = *def_rec;
630 if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
631 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
632 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
635 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
637 struct df_ref *use = *use_rec;
638 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
639 bitmap_set_bit (local_live, DF_REF_REGNO (use));
642 /* These regs are considered always live so if they end up dying
643 because of some def, we need to bring the back again.
644 Calling df_simulate_fixup_sets has the disadvantage of calling
645 bb_has_eh_pred once per insn, so we cache the information here. */
646 if (bb_has_eh_pred (bb))
647 au = df->eh_block_artificial_uses;
648 else
649 au = df->regular_block_artificial_uses;
651 FOR_BB_INSNS_REVERSE (bb, insn)
652 if (INSN_P (insn))
654 bool needed = false;
656 /* The insn is needed if there is someone who uses the output. */
657 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
658 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
659 || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
661 needed = true;
662 break;
665 if (needed)
666 mark_insn (insn, true);
668 /* No matter if the instruction is needed or not, we remove
669 any regno in the defs from the live set. */
670 df_simulate_defs (insn, local_live);
672 /* On the other hand, we do not allow the dead uses to set
673 anything in local_live. */
674 if (marked_insn_p (insn))
675 df_simulate_uses (insn, local_live);
678 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
680 struct df_ref *def = *def_rec;
681 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
682 && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
683 bitmap_clear_bit (local_live, DF_REF_REGNO (def));
686 #ifdef EH_USES
687 /* Process the uses that are live into an exception handler. */
688 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
690 /* Add use to set of uses in this BB. */
691 struct df_ref *use = *use_rec;
692 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
693 bitmap_set_bit (local_live, DF_REF_REGNO (use));
695 #endif
697 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
698 if (block_changed)
699 bitmap_copy (DF_LR_IN (bb), local_live);
701 BITMAP_FREE (local_live);
702 return block_changed;
706 /* Perform fast DCE once initialization is done. */
708 static void
709 fast_dce (void)
711 int *postorder = df_get_postorder (DF_BACKWARD);
712 int n_blocks = df_get_n_blocks (DF_BACKWARD);
713 /* The set of blocks that have been seen on this iteration. */
714 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
715 /* The set of blocks that need to have the out vectors reset because
716 the in of one of their successors has changed. */
717 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
718 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
719 bool global_changed = true;
720 int i;
722 prescan_insns_for_dce (true);
724 for (i = 0; i < n_blocks; i++)
725 bitmap_set_bit (all_blocks, postorder[i]);
727 while (global_changed)
729 global_changed = false;
731 for (i = 0; i < n_blocks; i++)
733 int index = postorder[i];
734 basic_block bb = BASIC_BLOCK (index);
735 bool local_changed;
737 if (index < NUM_FIXED_BLOCKS)
739 bitmap_set_bit (processed, index);
740 continue;
743 local_changed
744 = dce_process_block (bb, bitmap_bit_p (redo_out, index));
745 bitmap_set_bit (processed, index);
747 if (local_changed)
749 edge e;
750 edge_iterator ei;
751 FOR_EACH_EDGE (e, ei, bb->preds)
752 if (bitmap_bit_p (processed, e->src->index))
753 /* Be tricky about when we need to iterate the
754 analysis. We only have redo the analysis if the
755 bitmaps change at the top of a block that is the
756 entry to a loop. */
757 global_changed = true;
758 else
759 bitmap_set_bit (redo_out, e->src->index);
763 if (global_changed)
765 /* Turn off the RUN_DCE flag to prevent recursive calls to
766 dce. */
767 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
769 /* So something was deleted that requires a redo. Do it on
770 the cheap. */
771 delete_unmarked_insns ();
772 sbitmap_zero (marked);
773 bitmap_clear (processed);
774 bitmap_clear (redo_out);
776 /* We do not need to rescan any instructions. We only need
777 to redo the dataflow equations for the blocks that had a
778 change at the top of the block. Then we need to redo the
779 iteration. */
780 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
782 if (old_flag & DF_LR_RUN_DCE)
783 df_set_flags (DF_LR_RUN_DCE);
785 prescan_insns_for_dce (true);
789 delete_unmarked_insns ();
791 BITMAP_FREE (processed);
792 BITMAP_FREE (redo_out);
793 BITMAP_FREE (all_blocks);
797 /* Fast DCE. */
799 static unsigned int
800 rest_of_handle_fast_dce (void)
802 init_dce (true);
803 fast_dce ();
804 fini_dce (true);
805 return 0;
809 /* This is an internal call that is used by the df live register
810 problem to run fast dce as a side effect of creating the live
811 information. The stack is organized so that the lr problem is run,
812 this pass is run, which updates the live info and the df scanning
813 info, and then returns to allow the rest of the problems to be run.
815 This can be called by elsewhere but it will not update the bit
816 vectors for any other problems than LR. */
818 void
819 run_fast_df_dce (void)
821 if (flag_dce)
823 /* If dce is able to delete something, it has to happen
824 immediately. Otherwise there will be problems handling the
825 eq_notes. */
826 enum df_changeable_flags old_flags
827 = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
829 df_in_progress = true;
830 rest_of_handle_fast_dce ();
831 df_in_progress = false;
833 df_set_flags (old_flags);
838 /* Run a fast DCE pass. */
840 void
841 run_fast_dce (void)
843 if (flag_dce)
844 rest_of_handle_fast_dce ();
848 static bool
849 gate_fast_dce (void)
851 return optimize > 0 && flag_dce;
854 struct tree_opt_pass pass_fast_rtl_dce =
856 "dce", /* name */
857 gate_fast_dce, /* gate */
858 rest_of_handle_fast_dce, /* execute */
859 NULL, /* sub */
860 NULL, /* next */
861 0, /* static_pass_number */
862 TV_DCE, /* tv_id */
863 0, /* properties_required */
864 0, /* properties_provided */
865 0, /* properties_destroyed */
866 0, /* todo_flags_start */
867 TODO_dump_func |
868 TODO_df_finish | TODO_verify_rtl_sharing |
869 TODO_ggc_collect, /* todo_flags_finish */
870 'w' /* letter */