2008-09-04 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / dce.c
blob31a5f51ac1c188be43b6012fa77fe5764e7532ca
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hashtab.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "df.h"
31 #include "cselib.h"
32 #include "dce.h"
33 #include "timevar.h"
34 #include "tree-pass.h"
35 #include "dbgcnt.h"
37 DEF_VEC_I(int);
38 DEF_VEC_ALLOC_I(int,heap);
41 /* -------------------------------------------------------------------------
42 Core mark/delete routines
43 ------------------------------------------------------------------------- */
45 /* True if we are invoked while the df engine is running; in this case,
46 we don't want to reenter it. */
47 static bool df_in_progress = false;
49 /* Instructions that have been marked but whose dependencies have not
50 yet been processed. */
51 static VEC(rtx,heap) *worklist;
53 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
54 static sbitmap marked;
56 /* Bitmap obstacks used for block processing by the fast algorithm. */
57 static bitmap_obstack dce_blocks_bitmap_obstack;
58 static bitmap_obstack dce_tmp_bitmap_obstack;
61 /* A subroutine for which BODY is part of the instruction being tested;
62 either the top-level pattern, or an element of a PARALLEL. The
63 instruction is known not to be a bare USE or CLOBBER. */
65 static bool
66 deletable_insn_p_1 (rtx body)
68 switch (GET_CODE (body))
70 case PREFETCH:
71 case TRAP_IF:
72 /* The UNSPEC case was added here because the ia-64 claims that
73 USEs do not work after reload and generates UNSPECS rather
74 than USEs. Since dce is run after reload we need to avoid
75 deleting these even if they are dead. If it turns out that
76 USEs really do work after reload, the ia-64 should be
77 changed, and the UNSPEC case can be removed. */
78 case UNSPEC:
79 return false;
81 default:
82 if (volatile_refs_p (body))
83 return false;
85 if (flag_non_call_exceptions && may_trap_p (body))
86 return false;
88 return true;
93 /* Return true if INSN is a normal instruction that can be deleted by
94 the DCE pass. */
96 static bool
97 deletable_insn_p (rtx insn, bool fast)
99 rtx body, x;
100 int i;
102 if (CALL_P (insn)
103 /* We cannot delete calls inside of the recursive dce because
104 this may cause basic blocks to be deleted and this messes up
105 the rest of the stack of optimization passes. */
106 && (!df_in_progress)
107 /* We cannot delete pure or const sibling calls because it is
108 hard to see the result. */
109 && (!SIBLING_CALL_P (insn))
110 /* We can delete dead const or pure calls as long as they do not
111 infinite loop. */
112 && (RTL_CONST_OR_PURE_CALL_P (insn)
113 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
114 return true;
116 if (!NONJUMP_INSN_P (insn))
117 return false;
119 body = PATTERN (insn);
120 switch (GET_CODE (body))
122 case USE:
123 return false;
125 case CLOBBER:
126 if (fast)
128 /* A CLOBBER of a dead pseudo register serves no purpose.
129 That is not necessarily true for hard registers until
130 after reload. */
131 x = XEXP (body, 0);
132 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
134 else
135 /* Because of the way that use-def chains are built, it is not
136 possible to tell if the clobber is dead because it can
137 never be the target of a use-def chain. */
138 return false;
140 case PARALLEL:
141 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
142 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
143 return false;
144 return true;
146 default:
147 return deletable_insn_p_1 (body);
152 /* Return true if INSN has been marked as needed. */
154 static inline int
155 marked_insn_p (rtx insn)
157 /* Artificial defs are always needed and they do not have an insn.
158 We should never see them here. */
159 gcc_assert (insn);
160 return TEST_BIT (marked, INSN_UID (insn));
164 /* If INSN has not yet been marked as needed, mark it now, and add it to
165 the worklist. */
167 static void
168 mark_insn (rtx insn, bool fast)
170 if (!marked_insn_p (insn))
172 if (!fast)
173 VEC_safe_push (rtx, heap, worklist, insn);
174 SET_BIT (marked, INSN_UID (insn));
175 if (dump_file)
176 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
181 /* A note_stores callback used by mark_nonreg_stores. DATA is the
182 instruction containing DEST. */
184 static void
185 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
187 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
188 mark_insn ((rtx) data, true);
192 /* A note_stores callback used by mark_nonreg_stores. DATA is the
193 instruction containing DEST. */
195 static void
196 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
198 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
199 mark_insn ((rtx) data, false);
203 /* Mark INSN if BODY stores to a non-register destination. */
205 static void
206 mark_nonreg_stores (rtx body, rtx insn, bool fast)
208 if (fast)
209 note_stores (body, mark_nonreg_stores_1, insn);
210 else
211 note_stores (body, mark_nonreg_stores_2, insn);
215 /* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
216 bad dangling REG_EQUAL notes. */
218 static void
219 delete_corresponding_reg_eq_notes (rtx insn)
221 struct df_ref **def_rec;
222 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
224 struct df_ref *def = *def_rec;
225 unsigned int regno = DF_REF_REGNO (def);
226 /* This loop is a little tricky. We cannot just go down the
227 chain because it is being modified by the actions in the
228 loop. So we just get the head. We plan to drain the list
229 anyway. */
230 while (DF_REG_EQ_USE_CHAIN (regno))
232 struct df_ref *eq_use = DF_REG_EQ_USE_CHAIN (regno);
233 rtx noted_insn = DF_REF_INSN (eq_use);
234 rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
235 if (!note)
236 note = find_reg_note (noted_insn, REG_EQUIV, NULL_RTX);
238 /* This assert is generally triggered when someone deletes a
239 REG_EQUAL or REG_EQUIV note by hacking the list manually
240 rather than calling remove_note. */
241 gcc_assert (note);
242 remove_note (noted_insn, note);
248 /* Delete every instruction that hasn't been marked. */
250 static void
251 delete_unmarked_insns (void)
253 basic_block bb;
254 rtx insn, next;
255 bool must_clean = false;
257 FOR_EACH_BB (bb)
258 FOR_BB_INSNS_SAFE (bb, insn, next)
259 if (INSN_P (insn))
261 /* Always delete no-op moves. */
262 if (noop_move_p (insn))
265 /* Otherwise rely only on the DCE algorithm. */
266 else if (marked_insn_p (insn))
267 continue;
269 if (!dbg_cnt (dce))
270 continue;
272 if (dump_file)
273 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
275 /* Before we delete the insn we have to delete REG_EQUAL notes
276 for the destination regs in order to avoid dangling notes. */
277 delete_corresponding_reg_eq_notes (insn);
279 /* If a pure or const call is deleted, this may make the cfg
280 have unreachable blocks. We rememeber this and call
281 delete_unreachable_blocks at the end. */
282 if (CALL_P (insn))
283 must_clean = true;
285 /* Now delete the insn. */
286 delete_insn_and_edges (insn);
289 /* Deleted a pure or const call. */
290 if (must_clean)
291 delete_unreachable_blocks ();
295 /* Go through the instructions and mark those whose necessity is not
296 dependent on inter-instruction information. Make sure all other
297 instructions are not marked. */
299 static void
300 prescan_insns_for_dce (bool fast)
302 basic_block bb;
303 rtx insn, next;
305 if (dump_file)
306 fprintf (dump_file, "Finding needed instructions:\n");
308 FOR_EACH_BB (bb)
309 FOR_BB_INSNS_SAFE (bb, insn, next)
310 if (INSN_P (insn))
312 if (deletable_insn_p (insn, fast))
313 mark_nonreg_stores (PATTERN (insn), insn, fast);
314 else
315 mark_insn (insn, fast);
318 if (dump_file)
319 fprintf (dump_file, "Finished finding needed instructions:\n");
323 /* UD-based DSE routines. */
325 /* Mark instructions that define artificially-used registers, such as
326 the frame pointer and the stack pointer. */
328 static void
329 mark_artificial_uses (void)
331 basic_block bb;
332 struct df_link *defs;
333 struct df_ref **use_rec;
335 FOR_ALL_BB (bb)
337 for (use_rec = df_get_artificial_uses (bb->index);
338 *use_rec; use_rec++)
339 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
340 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
341 mark_insn (DF_REF_INSN (defs->ref), false);
346 /* Mark every instruction that defines a register value that INSN uses. */
348 static void
349 mark_reg_dependencies (rtx insn)
351 struct df_link *defs;
352 struct df_ref **use_rec;
354 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
356 struct df_ref *use = *use_rec;
357 if (dump_file)
359 fprintf (dump_file, "Processing use of ");
360 print_simple_rtl (dump_file, DF_REF_REG (use));
361 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
363 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
364 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
365 mark_insn (DF_REF_INSN (defs->ref), false);
370 /* Initialize global variables for a new DCE pass. */
372 static void
373 init_dce (bool fast)
375 if (!df_in_progress)
377 if (!fast)
378 df_chain_add_problem (DF_UD_CHAIN);
379 df_analyze ();
382 if (dump_file)
383 df_dump (dump_file);
385 if (fast)
387 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
388 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
391 marked = sbitmap_alloc (get_max_uid () + 1);
392 sbitmap_zero (marked);
396 /* Free the data allocated by init_dce. */
398 static void
399 fini_dce (bool fast)
401 sbitmap_free (marked);
403 if (fast)
405 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
406 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
411 /* UD-chain based DCE. */
413 static unsigned int
414 rest_of_handle_ud_dce (void)
416 rtx insn;
418 init_dce (false);
420 prescan_insns_for_dce (false);
421 mark_artificial_uses ();
422 while (VEC_length (rtx, worklist) > 0)
424 insn = VEC_pop (rtx, worklist);
425 mark_reg_dependencies (insn);
428 /* Before any insns are deleted, we must remove the chains since
429 they are not bidirectional. */
430 df_remove_problem (df_chain);
431 delete_unmarked_insns ();
433 fini_dce (false);
434 return 0;
438 static bool
439 gate_ud_dce (void)
441 return optimize > 1 && flag_dce
442 && dbg_cnt (dce_ud);
445 struct rtl_opt_pass pass_ud_rtl_dce =
448 RTL_PASS,
449 "dce", /* name */
450 gate_ud_dce, /* gate */
451 rest_of_handle_ud_dce, /* execute */
452 NULL, /* sub */
453 NULL, /* next */
454 0, /* static_pass_number */
455 TV_DCE, /* tv_id */
456 0, /* properties_required */
457 0, /* properties_provided */
458 0, /* properties_destroyed */
459 0, /* todo_flags_start */
460 TODO_dump_func |
461 TODO_df_finish | TODO_verify_rtl_sharing |
462 TODO_ggc_collect /* todo_flags_finish */
467 /* -------------------------------------------------------------------------
468 Fast DCE functions
469 ------------------------------------------------------------------------- */
471 /* Process basic block BB. Return true if the live_in set has
472 changed. REDO_OUT is true if the info at the bottom of the block
473 needs to be recalculated before starting. AU is the proper set of
474 artificial uses. */
476 static bool
477 byte_dce_process_block (basic_block bb, bool redo_out, bitmap au)
479 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
480 rtx insn;
481 bool block_changed;
482 struct df_ref **def_rec;
484 if (redo_out)
486 /* Need to redo the live_out set of this block if when one of
487 the succs of this block has had a change in it live in
488 set. */
489 edge e;
490 edge_iterator ei;
491 df_confluence_function_n con_fun_n = df_byte_lr->problem->con_fun_n;
492 bitmap_clear (DF_BYTE_LR_OUT (bb));
493 FOR_EACH_EDGE (e, ei, bb->succs)
494 (*con_fun_n) (e);
497 if (dump_file)
499 fprintf (dump_file, "processing block %d live out = ", bb->index);
500 df_print_byte_regset (dump_file, DF_BYTE_LR_OUT (bb));
503 bitmap_copy (local_live, DF_BYTE_LR_OUT (bb));
505 df_byte_lr_simulate_artificial_refs_at_end (bb, local_live);
507 FOR_BB_INSNS_REVERSE (bb, insn)
508 if (INSN_P (insn))
510 /* The insn is needed if there is someone who uses the output. */
511 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
513 struct df_ref *def = *def_rec;
514 unsigned int last;
515 unsigned int dregno = DF_REF_REGNO (def);
516 unsigned int start = df_byte_lr_get_regno_start (dregno);
517 unsigned int len = df_byte_lr_get_regno_len (dregno);
519 unsigned int sb;
520 unsigned int lb;
521 /* This is one of the only places where DF_MM_MAY should
522 be used for defs. Need to make sure that we are
523 checking for all of the bits that may be used. */
525 if (!df_compute_accessed_bytes (def, DF_MM_MAY, &sb, &lb))
527 start += sb;
528 len = lb - sb;
531 if (bitmap_bit_p (au, dregno))
533 mark_insn (insn, true);
534 goto quickexit;
537 last = start + len;
538 while (start < last)
539 if (bitmap_bit_p (local_live, start++))
541 mark_insn (insn, true);
542 goto quickexit;
546 quickexit:
548 /* No matter if the instruction is needed or not, we remove
549 any regno in the defs from the live set. */
550 df_byte_lr_simulate_defs (insn, local_live);
552 /* On the other hand, we do not allow the dead uses to set
553 anything in local_live. */
554 if (marked_insn_p (insn))
555 df_byte_lr_simulate_uses (insn, local_live);
557 if (dump_file)
559 fprintf (dump_file, "finished processing insn %d live out = ",
560 INSN_UID (insn));
561 df_print_byte_regset (dump_file, local_live);
565 df_byte_lr_simulate_artificial_refs_at_top (bb, local_live);
567 block_changed = !bitmap_equal_p (local_live, DF_BYTE_LR_IN (bb));
568 if (block_changed)
569 bitmap_copy (DF_BYTE_LR_IN (bb), local_live);
570 BITMAP_FREE (local_live);
571 return block_changed;
575 /* Process basic block BB. Return true if the live_in set has
576 changed. REDO_OUT is true if the info at the bottom of the block
577 needs to be recalculated before starting. AU is the proper set of
578 artificial uses. */
580 static bool
581 dce_process_block (basic_block bb, bool redo_out, bitmap au)
583 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
584 rtx insn;
585 bool block_changed;
586 struct df_ref **def_rec;
588 if (redo_out)
590 /* Need to redo the live_out set of this block if when one of
591 the succs of this block has had a change in it live in
592 set. */
593 edge e;
594 edge_iterator ei;
595 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
596 bitmap_clear (DF_LR_OUT (bb));
597 FOR_EACH_EDGE (e, ei, bb->succs)
598 (*con_fun_n) (e);
601 if (dump_file)
603 fprintf (dump_file, "processing block %d live out = ", bb->index);
604 df_print_regset (dump_file, DF_LR_OUT (bb));
607 bitmap_copy (local_live, DF_LR_OUT (bb));
609 df_simulate_artificial_refs_at_end (bb, local_live);
611 FOR_BB_INSNS_REVERSE (bb, insn)
612 if (INSN_P (insn))
614 bool needed = false;
616 /* The insn is needed if there is someone who uses the output. */
617 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
618 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
619 || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
621 needed = true;
622 break;
625 if (needed)
626 mark_insn (insn, true);
628 /* No matter if the instruction is needed or not, we remove
629 any regno in the defs from the live set. */
630 df_simulate_defs (insn, local_live);
632 /* On the other hand, we do not allow the dead uses to set
633 anything in local_live. */
634 if (marked_insn_p (insn))
635 df_simulate_uses (insn, local_live);
638 df_simulate_artificial_refs_at_top (bb, local_live);
640 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
641 if (block_changed)
642 bitmap_copy (DF_LR_IN (bb), local_live);
644 BITMAP_FREE (local_live);
645 return block_changed;
649 /* Perform fast DCE once initialization is done. If BYTE_LEVEL is
650 true, use the byte level dce, otherwise do it at the pseudo
651 level. */
653 static void
654 fast_dce (bool byte_level)
656 int *postorder = df_get_postorder (DF_BACKWARD);
657 int n_blocks = df_get_n_blocks (DF_BACKWARD);
658 /* The set of blocks that have been seen on this iteration. */
659 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
660 /* The set of blocks that need to have the out vectors reset because
661 the in of one of their successors has changed. */
662 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
663 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
664 bool global_changed = true;
666 /* These regs are considered always live so if they end up dying
667 because of some def, we need to bring the back again. Calling
668 df_simulate_fixup_sets has the disadvantage of calling
669 bb_has_eh_pred once per insn, so we cache the information
670 here. */
671 bitmap au = df->regular_block_artificial_uses;
672 bitmap au_eh = df->eh_block_artificial_uses;
673 int i;
675 prescan_insns_for_dce (true);
677 for (i = 0; i < n_blocks; i++)
678 bitmap_set_bit (all_blocks, postorder[i]);
680 while (global_changed)
682 global_changed = false;
684 for (i = 0; i < n_blocks; i++)
686 int index = postorder[i];
687 basic_block bb = BASIC_BLOCK (index);
688 bool local_changed;
690 if (index < NUM_FIXED_BLOCKS)
692 bitmap_set_bit (processed, index);
693 continue;
696 if (byte_level)
697 local_changed
698 = byte_dce_process_block (bb, bitmap_bit_p (redo_out, index),
699 bb_has_eh_pred (bb) ? au_eh : au);
700 else
701 local_changed
702 = dce_process_block (bb, bitmap_bit_p (redo_out, index),
703 bb_has_eh_pred (bb) ? au_eh : au);
704 bitmap_set_bit (processed, index);
706 if (local_changed)
708 edge e;
709 edge_iterator ei;
710 FOR_EACH_EDGE (e, ei, bb->preds)
711 if (bitmap_bit_p (processed, e->src->index))
712 /* Be tricky about when we need to iterate the
713 analysis. We only have redo the analysis if the
714 bitmaps change at the top of a block that is the
715 entry to a loop. */
716 global_changed = true;
717 else
718 bitmap_set_bit (redo_out, e->src->index);
722 if (global_changed)
724 /* Turn off the RUN_DCE flag to prevent recursive calls to
725 dce. */
726 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
728 /* So something was deleted that requires a redo. Do it on
729 the cheap. */
730 delete_unmarked_insns ();
731 sbitmap_zero (marked);
732 bitmap_clear (processed);
733 bitmap_clear (redo_out);
735 /* We do not need to rescan any instructions. We only need
736 to redo the dataflow equations for the blocks that had a
737 change at the top of the block. Then we need to redo the
738 iteration. */
739 if (byte_level)
740 df_analyze_problem (df_byte_lr, all_blocks, postorder, n_blocks);
741 else
742 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
744 if (old_flag & DF_LR_RUN_DCE)
745 df_set_flags (DF_LR_RUN_DCE);
747 prescan_insns_for_dce (true);
751 delete_unmarked_insns ();
753 BITMAP_FREE (processed);
754 BITMAP_FREE (redo_out);
755 BITMAP_FREE (all_blocks);
759 /* Fast register level DCE. */
761 static unsigned int
762 rest_of_handle_fast_dce (void)
764 init_dce (true);
765 fast_dce (false);
766 fini_dce (true);
767 return 0;
771 /* Fast byte level DCE. */
773 static unsigned int
774 rest_of_handle_fast_byte_dce (void)
776 df_byte_lr_add_problem ();
777 init_dce (true);
778 fast_dce (true);
779 fini_dce (true);
780 return 0;
784 /* This is an internal call that is used by the df live register
785 problem to run fast dce as a side effect of creating the live
786 information. The stack is organized so that the lr problem is run,
787 this pass is run, which updates the live info and the df scanning
788 info, and then returns to allow the rest of the problems to be run.
790 This can be called by elsewhere but it will not update the bit
791 vectors for any other problems than LR. */
793 void
794 run_fast_df_dce (void)
796 if (flag_dce)
798 /* If dce is able to delete something, it has to happen
799 immediately. Otherwise there will be problems handling the
800 eq_notes. */
801 enum df_changeable_flags old_flags
802 = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
804 df_in_progress = true;
805 rest_of_handle_fast_dce ();
806 df_in_progress = false;
808 df_set_flags (old_flags);
813 /* Run a fast DCE pass. */
815 void
816 run_fast_dce (void)
818 if (flag_dce)
819 rest_of_handle_fast_dce ();
823 static bool
824 gate_fast_dce (void)
826 return optimize > 0 && flag_dce
827 && dbg_cnt (dce_fast);
830 struct rtl_opt_pass pass_fast_rtl_dce =
833 RTL_PASS,
834 "dce", /* name */
835 gate_fast_dce, /* gate */
836 rest_of_handle_fast_dce, /* execute */
837 NULL, /* sub */
838 NULL, /* next */
839 0, /* static_pass_number */
840 TV_DCE, /* tv_id */
841 0, /* properties_required */
842 0, /* properties_provided */
843 0, /* properties_destroyed */
844 0, /* todo_flags_start */
845 TODO_dump_func |
846 TODO_df_finish | TODO_verify_rtl_sharing |
847 TODO_ggc_collect /* todo_flags_finish */
851 struct rtl_opt_pass pass_fast_rtl_byte_dce =
854 RTL_PASS,
855 "byte-dce", /* name */
856 gate_fast_dce, /* gate */
857 rest_of_handle_fast_byte_dce, /* execute */
858 NULL, /* sub */
859 NULL, /* next */
860 0, /* static_pass_number */
861 TV_DCE, /* tv_id */
862 0, /* properties_required */
863 0, /* properties_provided */
864 0, /* properties_destroyed */
865 0, /* todo_flags_start */
866 TODO_dump_func |
867 TODO_df_finish | TODO_verify_rtl_sharing |
868 TODO_ggc_collect /* todo_flags_finish */