1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "hard-reg-set.h"
35 #include "tree-pass.h"
40 /* -------------------------------------------------------------------------
41 Core mark/delete routines
42 ------------------------------------------------------------------------- */
44 /* True if we are invoked while the df engine is running; in this case,
45 we don't want to reenter it. */
46 static bool df_in_progress
= false;
48 /* Instructions that have been marked but whose dependencies have not
49 yet been processed. */
50 static VEC(rtx
,heap
) *worklist
;
52 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
53 static sbitmap marked
;
55 /* Bitmap obstacks used for block processing by the fast algorithm. */
56 static bitmap_obstack dce_blocks_bitmap_obstack
;
57 static bitmap_obstack dce_tmp_bitmap_obstack
;
59 static bool find_call_stack_args (rtx
, bool, bool, bitmap
);
61 /* A subroutine for which BODY is part of the instruction being tested;
62 either the top-level pattern, or an element of a PARALLEL. The
63 instruction is known not to be a bare USE or CLOBBER. */
66 deletable_insn_p_1 (rtx body
)
68 switch (GET_CODE (body
))
72 /* The UNSPEC case was added here because the ia-64 claims that
73 USEs do not work after reload and generates UNSPECS rather
74 than USEs. Since dce is run after reload we need to avoid
75 deleting these even if they are dead. If it turns out that
76 USEs really do work after reload, the ia-64 should be
77 changed, and the UNSPEC case can be removed. */
82 if (volatile_refs_p (body
))
85 if (flag_non_call_exceptions
&& may_trap_p (body
))
93 /* Return true if INSN is a normal instruction that can be deleted by
97 deletable_insn_p (rtx insn
, bool fast
, bitmap arg_stores
)
103 /* We cannot delete calls inside of the recursive dce because
104 this may cause basic blocks to be deleted and this messes up
105 the rest of the stack of optimization passes. */
107 /* We cannot delete pure or const sibling calls because it is
108 hard to see the result. */
109 && (!SIBLING_CALL_P (insn
))
110 /* We can delete dead const or pure calls as long as they do not
112 && (RTL_CONST_OR_PURE_CALL_P (insn
)
113 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
)))
114 return find_call_stack_args (insn
, false, fast
, arg_stores
);
116 if (!NONJUMP_INSN_P (insn
))
119 /* Similarly, we cannot delete other insns that can throw either. */
120 if (df_in_progress
&& flag_non_call_exceptions
&& can_throw_internal (insn
))
123 body
= PATTERN (insn
);
124 switch (GET_CODE (body
))
133 /* A CLOBBER of a dead pseudo register serves no purpose.
134 That is not necessarily true for hard registers until
137 return REG_P (x
) && (!HARD_REGISTER_P (x
) || reload_completed
);
140 /* Because of the way that use-def chains are built, it is not
141 possible to tell if the clobber is dead because it can
142 never be the target of a use-def chain. */
146 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
147 if (!deletable_insn_p_1 (XVECEXP (body
, 0, i
)))
152 return deletable_insn_p_1 (body
);
157 /* Return true if INSN has been marked as needed. */
160 marked_insn_p (rtx insn
)
162 /* Artificial defs are always needed and they do not have an insn.
163 We should never see them here. */
165 return TEST_BIT (marked
, INSN_UID (insn
));
169 /* If INSN has not yet been marked as needed, mark it now, and add it to
173 mark_insn (rtx insn
, bool fast
)
175 if (!marked_insn_p (insn
))
178 VEC_safe_push (rtx
, heap
, worklist
, insn
);
179 SET_BIT (marked
, INSN_UID (insn
));
181 fprintf (dump_file
, " Adding insn %d to worklist\n", INSN_UID (insn
));
184 && !SIBLING_CALL_P (insn
)
185 && (RTL_CONST_OR_PURE_CALL_P (insn
)
186 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
)))
187 find_call_stack_args (insn
, true, fast
, NULL
);
192 /* A note_stores callback used by mark_nonreg_stores. DATA is the
193 instruction containing DEST. */
196 mark_nonreg_stores_1 (rtx dest
, const_rtx pattern
, void *data
)
198 if (GET_CODE (pattern
) != CLOBBER
&& !REG_P (dest
))
199 mark_insn ((rtx
) data
, true);
203 /* A note_stores callback used by mark_nonreg_stores. DATA is the
204 instruction containing DEST. */
207 mark_nonreg_stores_2 (rtx dest
, const_rtx pattern
, void *data
)
209 if (GET_CODE (pattern
) != CLOBBER
&& !REG_P (dest
))
210 mark_insn ((rtx
) data
, false);
214 /* Mark INSN if BODY stores to a non-register destination. */
217 mark_nonreg_stores (rtx body
, rtx insn
, bool fast
)
220 note_stores (body
, mark_nonreg_stores_1
, insn
);
222 note_stores (body
, mark_nonreg_stores_2
, insn
);
226 /* Try to find all stack stores of CALL_INSN arguments if
227 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
228 and it is therefore safe to eliminate the call, return true,
229 otherwise return false. This function should be first called
230 with DO_MARK false, and only when the CALL_INSN is actually
231 going to be marked called again with DO_MARK true. */
234 find_call_stack_args (rtx call_insn
, bool do_mark
, bool fast
,
237 rtx p
, insn
, prev_insn
;
239 HOST_WIDE_INT min_sp_off
, max_sp_off
;
242 gcc_assert (CALL_P (call_insn
));
243 if (!ACCUMULATE_OUTGOING_ARGS
)
248 gcc_assert (arg_stores
);
249 bitmap_clear (arg_stores
);
252 min_sp_off
= INTTYPE_MAXIMUM (HOST_WIDE_INT
);
255 /* First determine the minimum and maximum offset from sp for
257 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
258 if (GET_CODE (XEXP (p
, 0)) == USE
259 && MEM_P (XEXP (XEXP (p
, 0), 0)))
261 rtx mem
= XEXP (XEXP (p
, 0), 0), addr
, size
;
262 HOST_WIDE_INT off
= 0;
263 size
= MEM_SIZE (mem
);
264 if (size
== NULL_RTX
)
266 addr
= XEXP (mem
, 0);
267 if (GET_CODE (addr
) == PLUS
268 && REG_P (XEXP (addr
, 0))
269 && CONST_INT_P (XEXP (addr
, 1)))
271 off
= INTVAL (XEXP (addr
, 1));
272 addr
= XEXP (addr
, 0);
274 if (addr
!= stack_pointer_rtx
)
278 /* If not fast, use chains to see if addr wasn't set to
283 struct df_link
*defs
;
286 for (use_rec
= DF_INSN_USES (call_insn
); *use_rec
; use_rec
++)
287 if (rtx_equal_p (addr
, DF_REF_REG (*use_rec
)))
290 if (*use_rec
== NULL
)
293 for (defs
= DF_REF_CHAIN (*use_rec
); defs
; defs
= defs
->next
)
294 if (! DF_REF_IS_ARTIFICIAL (defs
->ref
))
300 set
= single_set (DF_REF_INSN (defs
->ref
));
304 if (GET_CODE (SET_SRC (set
)) != PLUS
305 || XEXP (SET_SRC (set
), 0) != stack_pointer_rtx
306 || !CONST_INT_P (XEXP (SET_SRC (set
), 1)))
309 off
+= INTVAL (XEXP (SET_SRC (set
), 1));
314 min_sp_off
= MIN (min_sp_off
, off
);
315 max_sp_off
= MAX (max_sp_off
, off
+ INTVAL (size
));
318 if (min_sp_off
>= max_sp_off
)
320 sp_bytes
= BITMAP_ALLOC (NULL
);
322 /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
323 which contain arguments. Checking has been done in the previous
325 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
326 if (GET_CODE (XEXP (p
, 0)) == USE
327 && MEM_P (XEXP (XEXP (p
, 0), 0)))
329 rtx mem
= XEXP (XEXP (p
, 0), 0), addr
;
330 HOST_WIDE_INT off
= 0, byte
;
331 addr
= XEXP (mem
, 0);
332 if (GET_CODE (addr
) == PLUS
333 && REG_P (XEXP (addr
, 0))
334 && CONST_INT_P (XEXP (addr
, 1)))
336 off
= INTVAL (XEXP (addr
, 1));
337 addr
= XEXP (addr
, 0);
339 if (addr
!= stack_pointer_rtx
)
342 struct df_link
*defs
;
345 for (use_rec
= DF_INSN_USES (call_insn
); *use_rec
; use_rec
++)
346 if (rtx_equal_p (addr
, DF_REF_REG (*use_rec
)))
349 for (defs
= DF_REF_CHAIN (*use_rec
); defs
; defs
= defs
->next
)
350 if (! DF_REF_IS_ARTIFICIAL (defs
->ref
))
353 set
= single_set (DF_REF_INSN (defs
->ref
));
354 off
+= INTVAL (XEXP (SET_SRC (set
), 1));
356 for (byte
= off
; byte
< off
+ INTVAL (MEM_SIZE (mem
)); byte
++)
358 if (!bitmap_set_bit (sp_bytes
, byte
- min_sp_off
))
363 /* Walk backwards, looking for argument stores. The search stops
364 when seeing another call, sp adjustment or memory store other than
367 for (insn
= PREV_INSN (call_insn
); insn
; insn
= prev_insn
)
370 HOST_WIDE_INT off
, byte
;
372 if (insn
== BB_HEAD (BLOCK_FOR_INSN (call_insn
)))
373 prev_insn
= NULL_RTX
;
375 prev_insn
= PREV_INSN (insn
);
383 set
= single_set (insn
);
384 if (!set
|| SET_DEST (set
) == stack_pointer_rtx
)
387 if (!MEM_P (SET_DEST (set
)))
390 mem
= SET_DEST (set
);
391 addr
= XEXP (mem
, 0);
393 if (GET_CODE (addr
) == PLUS
394 && REG_P (XEXP (addr
, 0))
395 && CONST_INT_P (XEXP (addr
, 1)))
397 off
= INTVAL (XEXP (addr
, 1));
398 addr
= XEXP (addr
, 0);
400 if (addr
!= stack_pointer_rtx
)
407 struct df_link
*defs
;
410 for (use_rec
= DF_INSN_USES (insn
); *use_rec
; use_rec
++)
411 if (rtx_equal_p (addr
, DF_REF_REG (*use_rec
)))
414 if (*use_rec
== NULL
)
417 for (defs
= DF_REF_CHAIN (*use_rec
); defs
; defs
= defs
->next
)
418 if (! DF_REF_IS_ARTIFICIAL (defs
->ref
))
424 set
= single_set (DF_REF_INSN (defs
->ref
));
428 if (GET_CODE (SET_SRC (set
)) != PLUS
429 || XEXP (SET_SRC (set
), 0) != stack_pointer_rtx
430 || !CONST_INT_P (XEXP (SET_SRC (set
), 1)))
433 off
+= INTVAL (XEXP (SET_SRC (set
), 1));
439 if (GET_MODE_SIZE (GET_MODE (mem
)) == 0)
442 for (byte
= off
; byte
< off
+ GET_MODE_SIZE (GET_MODE (mem
)); byte
++)
444 if (byte
< min_sp_off
445 || byte
>= max_sp_off
446 || !bitmap_clear_bit (sp_bytes
, byte
- min_sp_off
))
450 if (!deletable_insn_p (insn
, fast
, NULL
))
454 mark_insn (insn
, fast
);
456 bitmap_set_bit (arg_stores
, INSN_UID (insn
));
458 if (bitmap_empty_p (sp_bytes
))
465 BITMAP_FREE (sp_bytes
);
466 if (!ret
&& arg_stores
)
467 bitmap_clear (arg_stores
);
473 /* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
474 bad dangling REG_EQUAL notes. */
477 delete_corresponding_reg_eq_notes (rtx insn
)
480 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
482 df_ref def
= *def_rec
;
483 unsigned int regno
= DF_REF_REGNO (def
);
484 /* This loop is a little tricky. We cannot just go down the
485 chain because it is being modified by the actions in the
486 loop. So we just get the head. We plan to drain the list
488 while (DF_REG_EQ_USE_CHAIN (regno
))
490 df_ref eq_use
= DF_REG_EQ_USE_CHAIN (regno
);
491 rtx noted_insn
= DF_REF_INSN (eq_use
);
492 rtx note
= find_reg_note (noted_insn
, REG_EQUAL
, NULL_RTX
);
494 note
= find_reg_note (noted_insn
, REG_EQUIV
, NULL_RTX
);
496 /* This assert is generally triggered when someone deletes a
497 REG_EQUAL or REG_EQUIV note by hacking the list manually
498 rather than calling remove_note. */
500 remove_note (noted_insn
, note
);
506 /* Delete every instruction that hasn't been marked. */
509 delete_unmarked_insns (void)
513 bool must_clean
= false;
515 FOR_EACH_BB_REVERSE (bb
)
516 FOR_BB_INSNS_REVERSE_SAFE (bb
, insn
, next
)
519 /* Always delete no-op moves. */
520 if (noop_move_p (insn
))
523 /* Otherwise rely only on the DCE algorithm. */
524 else if (marked_insn_p (insn
))
527 /* Beware that reaching a dbg counter limit here can result
528 in miscompiled file. This occurs when a group of insns
529 must be deleted together, typically because the kept insn
530 depends on the output from the deleted insn. Deleting
531 this insns in reverse order (both at the bb level and
532 when looking at the blocks) minimizes this, but does not
533 eliminate it, since it is possible for the using insn to
534 be top of a block and the producer to be at the bottom of
535 the block. However, in most cases this will only result
536 in an uninitialized use of an insn that is dead anyway.
538 However, there is one rare case that will cause a
539 miscompile: deletion of non-looping pure and constant
540 calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
541 In this case it is possible to remove the call, but leave
542 the argument pushes to the stack. Because of the changes
543 to the stack pointer, this will almost always lead to a
549 fprintf (dump_file
, "DCE: Deleting insn %d\n", INSN_UID (insn
));
551 /* Before we delete the insn we have to delete REG_EQUAL notes
552 for the destination regs in order to avoid dangling notes. */
553 delete_corresponding_reg_eq_notes (insn
);
555 /* If a pure or const call is deleted, this may make the cfg
556 have unreachable blocks. We rememeber this and call
557 delete_unreachable_blocks at the end. */
561 /* Now delete the insn. */
562 delete_insn_and_edges (insn
);
565 /* Deleted a pure or const call. */
567 delete_unreachable_blocks ();
571 /* Go through the instructions and mark those whose necessity is not
572 dependent on inter-instruction information. Make sure all other
573 instructions are not marked. */
576 prescan_insns_for_dce (bool fast
)
580 bitmap arg_stores
= NULL
;
583 fprintf (dump_file
, "Finding needed instructions:\n");
585 if (!df_in_progress
&& ACCUMULATE_OUTGOING_ARGS
)
586 arg_stores
= BITMAP_ALLOC (NULL
);
590 FOR_BB_INSNS_REVERSE_SAFE (bb
, insn
, prev
)
593 /* Don't mark argument stores now. They will be marked
594 if needed when the associated CALL is marked. */
595 if (arg_stores
&& bitmap_bit_p (arg_stores
, INSN_UID (insn
)))
597 if (deletable_insn_p (insn
, fast
, arg_stores
))
598 mark_nonreg_stores (PATTERN (insn
), insn
, fast
);
600 mark_insn (insn
, fast
);
602 /* find_call_stack_args only looks at argument stores in the
605 bitmap_clear (arg_stores
);
609 BITMAP_FREE (arg_stores
);
612 fprintf (dump_file
, "Finished finding needed instructions:\n");
616 /* UD-based DSE routines. */
618 /* Mark instructions that define artificially-used registers, such as
619 the frame pointer and the stack pointer. */
622 mark_artificial_uses (void)
625 struct df_link
*defs
;
630 for (use_rec
= df_get_artificial_uses (bb
->index
);
632 for (defs
= DF_REF_CHAIN (*use_rec
); defs
; defs
= defs
->next
)
633 if (! DF_REF_IS_ARTIFICIAL (defs
->ref
))
634 mark_insn (DF_REF_INSN (defs
->ref
), false);
639 /* Mark every instruction that defines a register value that INSN uses. */
642 mark_reg_dependencies (rtx insn
)
644 struct df_link
*defs
;
647 if (DEBUG_INSN_P (insn
))
650 for (use_rec
= DF_INSN_USES (insn
); *use_rec
; use_rec
++)
652 df_ref use
= *use_rec
;
655 fprintf (dump_file
, "Processing use of ");
656 print_simple_rtl (dump_file
, DF_REF_REG (use
));
657 fprintf (dump_file
, " in insn %d:\n", INSN_UID (insn
));
659 for (defs
= DF_REF_CHAIN (use
); defs
; defs
= defs
->next
)
660 if (! DF_REF_IS_ARTIFICIAL (defs
->ref
))
661 mark_insn (DF_REF_INSN (defs
->ref
), false);
666 /* Initialize global variables for a new DCE pass. */
674 df_chain_add_problem (DF_UD_CHAIN
);
683 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack
);
684 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack
);
687 marked
= sbitmap_alloc (get_max_uid () + 1);
688 sbitmap_zero (marked
);
692 /* Free the data allocated by init_dce. */
697 sbitmap_free (marked
);
701 bitmap_obstack_release (&dce_blocks_bitmap_obstack
);
702 bitmap_obstack_release (&dce_tmp_bitmap_obstack
);
707 /* UD-chain based DCE. */
710 rest_of_handle_ud_dce (void)
716 prescan_insns_for_dce (false);
717 mark_artificial_uses ();
718 while (VEC_length (rtx
, worklist
) > 0)
720 insn
= VEC_pop (rtx
, worklist
);
721 mark_reg_dependencies (insn
);
723 VEC_free (rtx
, heap
, worklist
);
725 /* Before any insns are deleted, we must remove the chains since
726 they are not bidirectional. */
727 df_remove_problem (df_chain
);
728 delete_unmarked_insns ();
738 return optimize
> 1 && flag_dce
742 struct rtl_opt_pass pass_ud_rtl_dce
=
747 gate_ud_dce
, /* gate */
748 rest_of_handle_ud_dce
, /* execute */
751 0, /* static_pass_number */
753 0, /* properties_required */
754 0, /* properties_provided */
755 0, /* properties_destroyed */
756 0, /* todo_flags_start */
758 TODO_df_finish
| TODO_verify_rtl_sharing
|
759 TODO_ggc_collect
/* todo_flags_finish */
764 /* -------------------------------------------------------------------------
766 ------------------------------------------------------------------------- */
768 /* Process basic block BB. Return true if the live_in set has
769 changed. REDO_OUT is true if the info at the bottom of the block
770 needs to be recalculated before starting. AU is the proper set of
774 byte_dce_process_block (basic_block bb
, bool redo_out
, bitmap au
)
776 bitmap local_live
= BITMAP_ALLOC (&dce_tmp_bitmap_obstack
);
783 /* Need to redo the live_out set of this block if when one of
784 the succs of this block has had a change in it live in
788 df_confluence_function_n con_fun_n
= df_byte_lr
->problem
->con_fun_n
;
789 bitmap_clear (DF_BYTE_LR_OUT (bb
));
790 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
796 fprintf (dump_file
, "processing block %d live out = ", bb
->index
);
797 df_print_byte_regset (dump_file
, DF_BYTE_LR_OUT (bb
));
800 bitmap_copy (local_live
, DF_BYTE_LR_OUT (bb
));
802 df_byte_lr_simulate_artificial_refs_at_end (bb
, local_live
);
804 FOR_BB_INSNS_REVERSE (bb
, insn
)
807 /* The insn is needed if there is someone who uses the output. */
808 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
810 df_ref def
= *def_rec
;
812 unsigned int dregno
= DF_REF_REGNO (def
);
813 unsigned int start
= df_byte_lr_get_regno_start (dregno
);
814 unsigned int len
= df_byte_lr_get_regno_len (dregno
);
818 /* This is one of the only places where DF_MM_MAY should
819 be used for defs. Need to make sure that we are
820 checking for all of the bits that may be used. */
822 if (!df_compute_accessed_bytes (def
, DF_MM_MAY
, &sb
, &lb
))
828 if (bitmap_bit_p (au
, dregno
))
830 mark_insn (insn
, true);
836 if (bitmap_bit_p (local_live
, start
++))
838 mark_insn (insn
, true);
845 /* No matter if the instruction is needed or not, we remove
846 any regno in the defs from the live set. */
847 df_byte_lr_simulate_defs (insn
, local_live
);
849 /* On the other hand, we do not allow the dead uses to set
850 anything in local_live. */
851 if (marked_insn_p (insn
))
852 df_byte_lr_simulate_uses (insn
, local_live
);
856 fprintf (dump_file
, "finished processing insn %d live out = ",
858 df_print_byte_regset (dump_file
, local_live
);
862 df_byte_lr_simulate_artificial_refs_at_top (bb
, local_live
);
864 block_changed
= !bitmap_equal_p (local_live
, DF_BYTE_LR_IN (bb
));
866 bitmap_copy (DF_BYTE_LR_IN (bb
), local_live
);
867 BITMAP_FREE (local_live
);
868 return block_changed
;
872 /* Process basic block BB. Return true if the live_in set has
873 changed. REDO_OUT is true if the info at the bottom of the block
874 needs to be recalculated before starting. AU is the proper set of
878 dce_process_block (basic_block bb
, bool redo_out
, bitmap au
)
880 bitmap local_live
= BITMAP_ALLOC (&dce_tmp_bitmap_obstack
);
887 /* Need to redo the live_out set of this block if when one of
888 the succs of this block has had a change in it live in
892 df_confluence_function_n con_fun_n
= df_lr
->problem
->con_fun_n
;
893 bitmap_clear (DF_LR_OUT (bb
));
894 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
900 fprintf (dump_file
, "processing block %d lr out = ", bb
->index
);
901 df_print_regset (dump_file
, DF_LR_OUT (bb
));
904 bitmap_copy (local_live
, DF_LR_OUT (bb
));
906 df_simulate_initialize_backwards (bb
, local_live
);
908 FOR_BB_INSNS_REVERSE (bb
, insn
)
913 /* The insn is needed if there is someone who uses the output. */
914 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
915 if (bitmap_bit_p (local_live
, DF_REF_REGNO (*def_rec
))
916 || bitmap_bit_p (au
, DF_REF_REGNO (*def_rec
)))
923 mark_insn (insn
, true);
925 /* No matter if the instruction is needed or not, we remove
926 any regno in the defs from the live set. */
927 df_simulate_defs (insn
, local_live
);
929 /* On the other hand, we do not allow the dead uses to set
930 anything in local_live. */
931 if (marked_insn_p (insn
))
932 df_simulate_uses (insn
, local_live
);
935 df_simulate_finalize_backwards (bb
, local_live
);
937 block_changed
= !bitmap_equal_p (local_live
, DF_LR_IN (bb
));
939 bitmap_copy (DF_LR_IN (bb
), local_live
);
941 BITMAP_FREE (local_live
);
942 return block_changed
;
946 /* Perform fast DCE once initialization is done. If BYTE_LEVEL is
947 true, use the byte level dce, otherwise do it at the pseudo
951 fast_dce (bool byte_level
)
953 int *postorder
= df_get_postorder (DF_BACKWARD
);
954 int n_blocks
= df_get_n_blocks (DF_BACKWARD
);
955 /* The set of blocks that have been seen on this iteration. */
956 bitmap processed
= BITMAP_ALLOC (&dce_blocks_bitmap_obstack
);
957 /* The set of blocks that need to have the out vectors reset because
958 the in of one of their successors has changed. */
959 bitmap redo_out
= BITMAP_ALLOC (&dce_blocks_bitmap_obstack
);
960 bitmap all_blocks
= BITMAP_ALLOC (&dce_blocks_bitmap_obstack
);
961 bool global_changed
= true;
963 /* These regs are considered always live so if they end up dying
964 because of some def, we need to bring the back again. Calling
965 df_simulate_fixup_sets has the disadvantage of calling
966 bb_has_eh_pred once per insn, so we cache the information
968 bitmap au
= df
->regular_block_artificial_uses
;
969 bitmap au_eh
= df
->eh_block_artificial_uses
;
972 prescan_insns_for_dce (true);
974 for (i
= 0; i
< n_blocks
; i
++)
975 bitmap_set_bit (all_blocks
, postorder
[i
]);
977 while (global_changed
)
979 global_changed
= false;
981 for (i
= 0; i
< n_blocks
; i
++)
983 int index
= postorder
[i
];
984 basic_block bb
= BASIC_BLOCK (index
);
987 if (index
< NUM_FIXED_BLOCKS
)
989 bitmap_set_bit (processed
, index
);
995 = byte_dce_process_block (bb
, bitmap_bit_p (redo_out
, index
),
996 bb_has_eh_pred (bb
) ? au_eh
: au
);
999 = dce_process_block (bb
, bitmap_bit_p (redo_out
, index
),
1000 bb_has_eh_pred (bb
) ? au_eh
: au
);
1001 bitmap_set_bit (processed
, index
);
1007 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1008 if (bitmap_bit_p (processed
, e
->src
->index
))
1009 /* Be tricky about when we need to iterate the
1010 analysis. We only have redo the analysis if the
1011 bitmaps change at the top of a block that is the
1013 global_changed
= true;
1015 bitmap_set_bit (redo_out
, e
->src
->index
);
1021 /* Turn off the RUN_DCE flag to prevent recursive calls to
1023 int old_flag
= df_clear_flags (DF_LR_RUN_DCE
);
1025 /* So something was deleted that requires a redo. Do it on
1027 delete_unmarked_insns ();
1028 sbitmap_zero (marked
);
1029 bitmap_clear (processed
);
1030 bitmap_clear (redo_out
);
1032 /* We do not need to rescan any instructions. We only need
1033 to redo the dataflow equations for the blocks that had a
1034 change at the top of the block. Then we need to redo the
1037 df_analyze_problem (df_byte_lr
, all_blocks
, postorder
, n_blocks
);
1039 df_analyze_problem (df_lr
, all_blocks
, postorder
, n_blocks
);
1041 if (old_flag
& DF_LR_RUN_DCE
)
1042 df_set_flags (DF_LR_RUN_DCE
);
1044 prescan_insns_for_dce (true);
1048 delete_unmarked_insns ();
1050 BITMAP_FREE (processed
);
1051 BITMAP_FREE (redo_out
);
1052 BITMAP_FREE (all_blocks
);
1056 /* Fast register level DCE. */
1059 rest_of_handle_fast_dce (void)
1068 /* Fast byte level DCE. */
1071 rest_of_handle_fast_byte_dce (void)
1073 df_byte_lr_add_problem ();
1081 /* This is an internal call that is used by the df live register
1082 problem to run fast dce as a side effect of creating the live
1083 information. The stack is organized so that the lr problem is run,
1084 this pass is run, which updates the live info and the df scanning
1085 info, and then returns to allow the rest of the problems to be run.
1087 This can be called by elsewhere but it will not update the bit
1088 vectors for any other problems than LR. */
1091 run_fast_df_dce (void)
1095 /* If dce is able to delete something, it has to happen
1096 immediately. Otherwise there will be problems handling the
1099 df_clear_flags (DF_DEFER_INSN_RESCAN
+ DF_NO_INSN_RESCAN
);
1101 df_in_progress
= true;
1102 rest_of_handle_fast_dce ();
1103 df_in_progress
= false;
1105 df_set_flags (old_flags
);
1110 /* Run a fast DCE pass. */
1116 rest_of_handle_fast_dce ();
1121 gate_fast_dce (void)
1123 return optimize
> 0 && flag_dce
1124 && dbg_cnt (dce_fast
);
1127 struct rtl_opt_pass pass_fast_rtl_dce
=
1132 gate_fast_dce
, /* gate */
1133 rest_of_handle_fast_dce
, /* execute */
1136 0, /* static_pass_number */
1138 0, /* properties_required */
1139 0, /* properties_provided */
1140 0, /* properties_destroyed */
1141 0, /* todo_flags_start */
1143 TODO_df_finish
| TODO_verify_rtl_sharing
|
1144 TODO_ggc_collect
/* todo_flags_finish */
1148 struct rtl_opt_pass pass_fast_rtl_byte_dce
=
1152 "byte-dce", /* name */
1153 gate_fast_dce
, /* gate */
1154 rest_of_handle_fast_byte_dce
, /* execute */
1157 0, /* static_pass_number */
1159 0, /* properties_required */
1160 0, /* properties_provided */
1161 0, /* properties_destroyed */
1162 0, /* todo_flags_start */
1164 TODO_df_finish
| TODO_verify_rtl_sharing
|
1165 TODO_ggc_collect
/* todo_flags_finish */