* gcc.dg/compat/struct-layout-1_generate.c (dg_options): New. Moved
[official-gcc.git] / gcc / dce.c
blobfc08751348c7b2c19296135edbe8de1454cb5376
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hashtab.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "df.h"
31 #include "cselib.h"
32 #include "dce.h"
33 #include "timevar.h"
34 #include "tree-pass.h"
35 #include "dbgcnt.h"
37 DEF_VEC_I(int);
38 DEF_VEC_ALLOC_I(int,heap);
41 /* -------------------------------------------------------------------------
42 Core mark/delete routines
43 ------------------------------------------------------------------------- */
45 /* True if we are invoked while the df engine is running; in this case,
46 we don't want to reenter it. */
47 static bool df_in_progress = false;
49 /* Instructions that have been marked but whose dependencies have not
50 yet been processed. */
51 static VEC(rtx,heap) *worklist;
53 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
54 static sbitmap marked;
56 /* Bitmap obstacks used for block processing by the fast algorithm. */
57 static bitmap_obstack dce_blocks_bitmap_obstack;
58 static bitmap_obstack dce_tmp_bitmap_obstack;
61 /* A subroutine for which BODY is part of the instruction being tested;
62 either the top-level pattern, or an element of a PARALLEL. The
63 instruction is known not to be a bare USE or CLOBBER. */
65 static bool
66 deletable_insn_p_1 (rtx body)
68 switch (GET_CODE (body))
70 case PREFETCH:
71 case TRAP_IF:
72 /* The UNSPEC case was added here because the ia-64 claims that
73 USEs do not work after reload and generates UNSPECS rather
74 than USEs. Since dce is run after reload we need to avoid
75 deleting these even if they are dead. If it turns out that
76 USEs really do work after reload, the ia-64 should be
77 changed, and the UNSPEC case can be removed. */
78 case UNSPEC:
79 return false;
81 default:
82 if (volatile_refs_p (body))
83 return false;
85 if (flag_non_call_exceptions && may_trap_p (body))
86 return false;
88 return true;
93 /* Return true if INSN is a normal instruction that can be deleted by
94 the DCE pass. */
96 static bool
97 deletable_insn_p (rtx insn, bool fast)
99 rtx body, x;
100 int i;
102 if (CALL_P (insn)
103 /* We cannot delete calls inside of the recursive dce because
104 this may cause basic blocks to be deleted and this messes up
105 the rest of the stack of optimization passes. */
106 && (!df_in_progress)
107 /* We cannot delete pure or const sibling calls because it is
108 hard to see the result. */
109 && (!SIBLING_CALL_P (insn))
110 /* We can delete dead const or pure calls as long as they do not
111 infinite loop. */
112 && (RTL_CONST_OR_PURE_CALL_P (insn)
113 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
114 return true;
116 if (!NONJUMP_INSN_P (insn))
117 return false;
119 body = PATTERN (insn);
120 switch (GET_CODE (body))
122 case USE:
123 return false;
125 case CLOBBER:
126 if (fast)
128 /* A CLOBBER of a dead pseudo register serves no purpose.
129 That is not necessarily true for hard registers until
130 after reload. */
131 x = XEXP (body, 0);
132 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
134 else
135 /* Because of the way that use-def chains are built, it is not
136 possible to tell if the clobber is dead because it can
137 never be the target of a use-def chain. */
138 return false;
140 case PARALLEL:
141 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
142 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
143 return false;
144 return true;
146 default:
147 return deletable_insn_p_1 (body);
152 /* Return true if INSN has been marked as needed. */
154 static inline int
155 marked_insn_p (rtx insn)
157 /* Artificial defs are always needed and they do not have an insn.
158 We should never see them here. */
159 gcc_assert (insn);
160 return TEST_BIT (marked, INSN_UID (insn));
164 /* If INSN has not yet been marked as needed, mark it now, and add it to
165 the worklist. */
167 static void
168 mark_insn (rtx insn, bool fast)
170 if (!marked_insn_p (insn))
172 if (!fast)
173 VEC_safe_push (rtx, heap, worklist, insn);
174 SET_BIT (marked, INSN_UID (insn));
175 if (dump_file)
176 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
181 /* A note_stores callback used by mark_nonreg_stores. DATA is the
182 instruction containing DEST. */
184 static void
185 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
187 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
188 mark_insn ((rtx) data, true);
192 /* A note_stores callback used by mark_nonreg_stores. DATA is the
193 instruction containing DEST. */
195 static void
196 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
198 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
199 mark_insn ((rtx) data, false);
203 /* Mark INSN if BODY stores to a non-register destination. */
205 static void
206 mark_nonreg_stores (rtx body, rtx insn, bool fast)
208 if (fast)
209 note_stores (body, mark_nonreg_stores_1, insn);
210 else
211 note_stores (body, mark_nonreg_stores_2, insn);
215 /* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
216 bad dangling REG_EQUAL notes. */
218 static void
219 delete_corresponding_reg_eq_notes (rtx insn)
221 struct df_ref **def_rec;
222 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
224 struct df_ref *def = *def_rec;
225 unsigned int regno = DF_REF_REGNO (def);
226 /* This loop is a little tricky. We cannot just go down the
227 chain because it is being modified by the actions in the
228 loop. So we just get the head. We plan to drain the list
229 anyway. */
230 while (DF_REG_EQ_USE_CHAIN (regno))
232 struct df_ref *eq_use = DF_REG_EQ_USE_CHAIN (regno);
233 rtx noted_insn = DF_REF_INSN (eq_use);
234 rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
235 if (!note)
236 note = find_reg_note (noted_insn, REG_EQUIV, NULL_RTX);
238 /* This assert is generally triggered when someone deletes a
239 REG_EQUAL or REG_EQUIV note by hacking the list manually
240 rather than calling remove_note. */
241 gcc_assert (note);
242 remove_note (noted_insn, note);
248 /* Delete every instruction that hasn't been marked. */
250 static void
251 delete_unmarked_insns (void)
253 basic_block bb;
254 rtx insn, next;
255 bool must_clean = false;
257 FOR_EACH_BB (bb)
258 FOR_BB_INSNS_SAFE (bb, insn, next)
259 if (INSN_P (insn))
261 /* Always delete no-op moves. */
262 if (noop_move_p (insn))
265 /* Otherwise rely only on the DCE algorithm. */
266 else if (marked_insn_p (insn))
267 continue;
269 if (!dbg_cnt (dce))
270 continue;
272 if (dump_file)
273 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
275 /* Before we delete the insn we have to delete REG_EQUAL notes
276 for the destination regs in order to avoid dangling notes. */
277 delete_corresponding_reg_eq_notes (insn);
279 /* If a pure or const call is deleted, this may make the cfg
280 have unreachable blocks. We rememeber this and call
281 delete_unreachable_blocks at the end. */
282 if (CALL_P (insn))
283 must_clean = true;
285 /* Now delete the insn. */
286 delete_insn_and_edges (insn);
289 /* Deleted a pure or const call. */
290 if (must_clean)
291 delete_unreachable_blocks ();
295 /* Go through the instructions and mark those whose necessity is not
296 dependent on inter-instruction information. Make sure all other
297 instructions are not marked. */
299 static void
300 prescan_insns_for_dce (bool fast)
302 basic_block bb;
303 rtx insn, next;
305 if (dump_file)
306 fprintf (dump_file, "Finding needed instructions:\n");
308 FOR_EACH_BB (bb)
309 FOR_BB_INSNS_SAFE (bb, insn, next)
310 if (INSN_P (insn))
312 if (deletable_insn_p (insn, fast))
313 mark_nonreg_stores (PATTERN (insn), insn, fast);
314 else
315 mark_insn (insn, fast);
318 if (dump_file)
319 fprintf (dump_file, "Finished finding needed instructions:\n");
323 /* UD-based DSE routines. */
325 /* Mark instructions that define artificially-used registers, such as
326 the frame pointer and the stack pointer. */
328 static void
329 mark_artificial_uses (void)
331 basic_block bb;
332 struct df_link *defs;
333 struct df_ref **use_rec;
335 FOR_ALL_BB (bb)
337 for (use_rec = df_get_artificial_uses (bb->index);
338 *use_rec; use_rec++)
339 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
340 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
341 mark_insn (DF_REF_INSN (defs->ref), false);
346 /* Mark every instruction that defines a register value that INSN uses. */
348 static void
349 mark_reg_dependencies (rtx insn)
351 struct df_link *defs;
352 struct df_ref **use_rec;
354 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
356 struct df_ref *use = *use_rec;
357 if (dump_file)
359 fprintf (dump_file, "Processing use of ");
360 print_simple_rtl (dump_file, DF_REF_REG (use));
361 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
363 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
364 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
365 mark_insn (DF_REF_INSN (defs->ref), false);
370 /* Initialize global variables for a new DCE pass. */
372 static void
373 init_dce (bool fast)
375 if (!df_in_progress)
377 if (!fast)
378 df_chain_add_problem (DF_UD_CHAIN);
379 df_analyze ();
382 if (dump_file)
383 df_dump (dump_file);
385 if (fast)
387 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
388 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
391 marked = sbitmap_alloc (get_max_uid () + 1);
392 sbitmap_zero (marked);
396 /* Free the data allocated by init_dce. */
398 static void
399 fini_dce (bool fast)
401 sbitmap_free (marked);
403 if (fast)
405 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
406 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
411 /* UD-chain based DCE. */
413 static unsigned int
414 rest_of_handle_ud_dce (void)
416 rtx insn;
418 init_dce (false);
420 prescan_insns_for_dce (false);
421 mark_artificial_uses ();
422 while (VEC_length (rtx, worklist) > 0)
424 insn = VEC_pop (rtx, worklist);
425 mark_reg_dependencies (insn);
427 VEC_free (rtx, heap, worklist);
429 /* Before any insns are deleted, we must remove the chains since
430 they are not bidirectional. */
431 df_remove_problem (df_chain);
432 delete_unmarked_insns ();
434 fini_dce (false);
435 return 0;
439 static bool
440 gate_ud_dce (void)
442 return optimize > 1 && flag_dce
443 && dbg_cnt (dce_ud);
446 struct rtl_opt_pass pass_ud_rtl_dce =
449 RTL_PASS,
450 "dce", /* name */
451 gate_ud_dce, /* gate */
452 rest_of_handle_ud_dce, /* execute */
453 NULL, /* sub */
454 NULL, /* next */
455 0, /* static_pass_number */
456 TV_DCE, /* tv_id */
457 0, /* properties_required */
458 0, /* properties_provided */
459 0, /* properties_destroyed */
460 0, /* todo_flags_start */
461 TODO_dump_func |
462 TODO_df_finish | TODO_verify_rtl_sharing |
463 TODO_ggc_collect /* todo_flags_finish */
468 /* -------------------------------------------------------------------------
469 Fast DCE functions
470 ------------------------------------------------------------------------- */
472 /* Process basic block BB. Return true if the live_in set has
473 changed. REDO_OUT is true if the info at the bottom of the block
474 needs to be recalculated before starting. AU is the proper set of
475 artificial uses. */
477 static bool
478 byte_dce_process_block (basic_block bb, bool redo_out, bitmap au)
480 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
481 rtx insn;
482 bool block_changed;
483 struct df_ref **def_rec;
485 if (redo_out)
487 /* Need to redo the live_out set of this block if when one of
488 the succs of this block has had a change in it live in
489 set. */
490 edge e;
491 edge_iterator ei;
492 df_confluence_function_n con_fun_n = df_byte_lr->problem->con_fun_n;
493 bitmap_clear (DF_BYTE_LR_OUT (bb));
494 FOR_EACH_EDGE (e, ei, bb->succs)
495 (*con_fun_n) (e);
498 if (dump_file)
500 fprintf (dump_file, "processing block %d live out = ", bb->index);
501 df_print_byte_regset (dump_file, DF_BYTE_LR_OUT (bb));
504 bitmap_copy (local_live, DF_BYTE_LR_OUT (bb));
506 df_byte_lr_simulate_artificial_refs_at_end (bb, local_live);
508 FOR_BB_INSNS_REVERSE (bb, insn)
509 if (INSN_P (insn))
511 /* The insn is needed if there is someone who uses the output. */
512 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
514 struct df_ref *def = *def_rec;
515 unsigned int last;
516 unsigned int dregno = DF_REF_REGNO (def);
517 unsigned int start = df_byte_lr_get_regno_start (dregno);
518 unsigned int len = df_byte_lr_get_regno_len (dregno);
520 unsigned int sb;
521 unsigned int lb;
522 /* This is one of the only places where DF_MM_MAY should
523 be used for defs. Need to make sure that we are
524 checking for all of the bits that may be used. */
526 if (!df_compute_accessed_bytes (def, DF_MM_MAY, &sb, &lb))
528 start += sb;
529 len = lb - sb;
532 if (bitmap_bit_p (au, dregno))
534 mark_insn (insn, true);
535 goto quickexit;
538 last = start + len;
539 while (start < last)
540 if (bitmap_bit_p (local_live, start++))
542 mark_insn (insn, true);
543 goto quickexit;
547 quickexit:
549 /* No matter if the instruction is needed or not, we remove
550 any regno in the defs from the live set. */
551 df_byte_lr_simulate_defs (insn, local_live);
553 /* On the other hand, we do not allow the dead uses to set
554 anything in local_live. */
555 if (marked_insn_p (insn))
556 df_byte_lr_simulate_uses (insn, local_live);
558 if (dump_file)
560 fprintf (dump_file, "finished processing insn %d live out = ",
561 INSN_UID (insn));
562 df_print_byte_regset (dump_file, local_live);
566 df_byte_lr_simulate_artificial_refs_at_top (bb, local_live);
568 block_changed = !bitmap_equal_p (local_live, DF_BYTE_LR_IN (bb));
569 if (block_changed)
570 bitmap_copy (DF_BYTE_LR_IN (bb), local_live);
571 BITMAP_FREE (local_live);
572 return block_changed;
576 /* Process basic block BB. Return true if the live_in set has
577 changed. REDO_OUT is true if the info at the bottom of the block
578 needs to be recalculated before starting. AU is the proper set of
579 artificial uses. */
581 static bool
582 dce_process_block (basic_block bb, bool redo_out, bitmap au)
584 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
585 rtx insn;
586 bool block_changed;
587 struct df_ref **def_rec;
589 if (redo_out)
591 /* Need to redo the live_out set of this block if when one of
592 the succs of this block has had a change in it live in
593 set. */
594 edge e;
595 edge_iterator ei;
596 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
597 bitmap_clear (DF_LR_OUT (bb));
598 FOR_EACH_EDGE (e, ei, bb->succs)
599 (*con_fun_n) (e);
602 if (dump_file)
604 fprintf (dump_file, "processing block %d live out = ", bb->index);
605 df_print_regset (dump_file, DF_LR_OUT (bb));
608 bitmap_copy (local_live, DF_LR_OUT (bb));
610 df_simulate_artificial_refs_at_end (bb, local_live);
612 FOR_BB_INSNS_REVERSE (bb, insn)
613 if (INSN_P (insn))
615 bool needed = false;
617 /* The insn is needed if there is someone who uses the output. */
618 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
619 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
620 || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
622 needed = true;
623 break;
626 if (needed)
627 mark_insn (insn, true);
629 /* No matter if the instruction is needed or not, we remove
630 any regno in the defs from the live set. */
631 df_simulate_defs (insn, local_live);
633 /* On the other hand, we do not allow the dead uses to set
634 anything in local_live. */
635 if (marked_insn_p (insn))
636 df_simulate_uses (insn, local_live);
639 df_simulate_artificial_refs_at_top (bb, local_live);
641 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
642 if (block_changed)
643 bitmap_copy (DF_LR_IN (bb), local_live);
645 BITMAP_FREE (local_live);
646 return block_changed;
650 /* Perform fast DCE once initialization is done. If BYTE_LEVEL is
651 true, use the byte level dce, otherwise do it at the pseudo
652 level. */
654 static void
655 fast_dce (bool byte_level)
657 int *postorder = df_get_postorder (DF_BACKWARD);
658 int n_blocks = df_get_n_blocks (DF_BACKWARD);
659 /* The set of blocks that have been seen on this iteration. */
660 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
661 /* The set of blocks that need to have the out vectors reset because
662 the in of one of their successors has changed. */
663 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
664 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
665 bool global_changed = true;
667 /* These regs are considered always live so if they end up dying
668 because of some def, we need to bring the back again. Calling
669 df_simulate_fixup_sets has the disadvantage of calling
670 bb_has_eh_pred once per insn, so we cache the information
671 here. */
672 bitmap au = df->regular_block_artificial_uses;
673 bitmap au_eh = df->eh_block_artificial_uses;
674 int i;
676 prescan_insns_for_dce (true);
678 for (i = 0; i < n_blocks; i++)
679 bitmap_set_bit (all_blocks, postorder[i]);
681 while (global_changed)
683 global_changed = false;
685 for (i = 0; i < n_blocks; i++)
687 int index = postorder[i];
688 basic_block bb = BASIC_BLOCK (index);
689 bool local_changed;
691 if (index < NUM_FIXED_BLOCKS)
693 bitmap_set_bit (processed, index);
694 continue;
697 if (byte_level)
698 local_changed
699 = byte_dce_process_block (bb, bitmap_bit_p (redo_out, index),
700 bb_has_eh_pred (bb) ? au_eh : au);
701 else
702 local_changed
703 = dce_process_block (bb, bitmap_bit_p (redo_out, index),
704 bb_has_eh_pred (bb) ? au_eh : au);
705 bitmap_set_bit (processed, index);
707 if (local_changed)
709 edge e;
710 edge_iterator ei;
711 FOR_EACH_EDGE (e, ei, bb->preds)
712 if (bitmap_bit_p (processed, e->src->index))
713 /* Be tricky about when we need to iterate the
714 analysis. We only have redo the analysis if the
715 bitmaps change at the top of a block that is the
716 entry to a loop. */
717 global_changed = true;
718 else
719 bitmap_set_bit (redo_out, e->src->index);
723 if (global_changed)
725 /* Turn off the RUN_DCE flag to prevent recursive calls to
726 dce. */
727 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
729 /* So something was deleted that requires a redo. Do it on
730 the cheap. */
731 delete_unmarked_insns ();
732 sbitmap_zero (marked);
733 bitmap_clear (processed);
734 bitmap_clear (redo_out);
736 /* We do not need to rescan any instructions. We only need
737 to redo the dataflow equations for the blocks that had a
738 change at the top of the block. Then we need to redo the
739 iteration. */
740 if (byte_level)
741 df_analyze_problem (df_byte_lr, all_blocks, postorder, n_blocks);
742 else
743 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
745 if (old_flag & DF_LR_RUN_DCE)
746 df_set_flags (DF_LR_RUN_DCE);
748 prescan_insns_for_dce (true);
752 delete_unmarked_insns ();
754 BITMAP_FREE (processed);
755 BITMAP_FREE (redo_out);
756 BITMAP_FREE (all_blocks);
760 /* Fast register level DCE. */
762 static unsigned int
763 rest_of_handle_fast_dce (void)
765 init_dce (true);
766 fast_dce (false);
767 fini_dce (true);
768 return 0;
772 /* Fast byte level DCE. */
774 static unsigned int
775 rest_of_handle_fast_byte_dce (void)
777 df_byte_lr_add_problem ();
778 init_dce (true);
779 fast_dce (true);
780 fini_dce (true);
781 return 0;
785 /* This is an internal call that is used by the df live register
786 problem to run fast dce as a side effect of creating the live
787 information. The stack is organized so that the lr problem is run,
788 this pass is run, which updates the live info and the df scanning
789 info, and then returns to allow the rest of the problems to be run.
791 This can be called by elsewhere but it will not update the bit
792 vectors for any other problems than LR. */
794 void
795 run_fast_df_dce (void)
797 if (flag_dce)
799 /* If dce is able to delete something, it has to happen
800 immediately. Otherwise there will be problems handling the
801 eq_notes. */
802 enum df_changeable_flags old_flags
803 = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
805 df_in_progress = true;
806 rest_of_handle_fast_dce ();
807 df_in_progress = false;
809 df_set_flags (old_flags);
814 /* Run a fast DCE pass. */
816 void
817 run_fast_dce (void)
819 if (flag_dce)
820 rest_of_handle_fast_dce ();
824 static bool
825 gate_fast_dce (void)
827 return optimize > 0 && flag_dce
828 && dbg_cnt (dce_fast);
831 struct rtl_opt_pass pass_fast_rtl_dce =
834 RTL_PASS,
835 "dce", /* name */
836 gate_fast_dce, /* gate */
837 rest_of_handle_fast_dce, /* execute */
838 NULL, /* sub */
839 NULL, /* next */
840 0, /* static_pass_number */
841 TV_DCE, /* tv_id */
842 0, /* properties_required */
843 0, /* properties_provided */
844 0, /* properties_destroyed */
845 0, /* todo_flags_start */
846 TODO_dump_func |
847 TODO_df_finish | TODO_verify_rtl_sharing |
848 TODO_ggc_collect /* todo_flags_finish */
852 struct rtl_opt_pass pass_fast_rtl_byte_dce =
855 RTL_PASS,
856 "byte-dce", /* name */
857 gate_fast_dce, /* gate */
858 rest_of_handle_fast_byte_dce, /* execute */
859 NULL, /* sub */
860 NULL, /* next */
861 0, /* static_pass_number */
862 TV_DCE, /* tv_id */
863 0, /* properties_required */
864 0, /* properties_provided */
865 0, /* properties_destroyed */
866 0, /* todo_flags_start */
867 TODO_dump_func |
868 TODO_df_finish | TODO_verify_rtl_sharing |
869 TODO_ggc_collect /* todo_flags_finish */