1 /* RTL dead code elimination.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
33 #include "cfgcleanup.h"
36 #include "tree-pass.h"
40 /* -------------------------------------------------------------------------
41 Core mark/delete routines
42 ------------------------------------------------------------------------- */
44 /* True if we are invoked while the df engine is running; in this case,
45 we don't want to reenter it. */
46 static bool df_in_progress
= false;
48 /* True if we are allowed to alter the CFG in this pass. */
49 static bool can_alter_cfg
= false;
51 /* Instructions that have been marked but whose dependencies have not
52 yet been processed. */
53 static vec
<rtx_insn
*> worklist
;
55 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
56 static sbitmap marked
;
58 /* Bitmap obstacks used for block processing by the fast algorithm. */
59 static bitmap_obstack dce_blocks_bitmap_obstack
;
60 static bitmap_obstack dce_tmp_bitmap_obstack
;
62 static bool find_call_stack_args (rtx_call_insn
*, bool, bool, bitmap
);
64 /* A subroutine for which BODY is part of the instruction being tested;
65 either the top-level pattern, or an element of a PARALLEL. The
66 instruction is known not to be a bare USE or CLOBBER. */
69 deletable_insn_p_1 (rtx body
)
71 switch (GET_CODE (body
))
75 /* The UNSPEC case was added here because the ia-64 claims that
76 USEs do not work after reload and generates UNSPECS rather
77 than USEs. Since dce is run after reload we need to avoid
78 deleting these even if they are dead. If it turns out that
79 USEs really do work after reload, the ia-64 should be
80 changed, and the UNSPEC case can be removed. */
85 return !volatile_refs_p (body
);
90 /* Return true if INSN is a normal instruction that can be deleted by
94 deletable_insn_p (rtx_insn
*insn
, bool fast
, bitmap arg_stores
)
101 /* We cannot delete calls inside of the recursive dce because
102 this may cause basic blocks to be deleted and this messes up
103 the rest of the stack of optimization passes. */
105 /* We cannot delete pure or const sibling calls because it is
106 hard to see the result. */
107 && (!SIBLING_CALL_P (insn
))
108 /* We can delete dead const or pure calls as long as they do not
110 && (RTL_CONST_OR_PURE_CALL_P (insn
)
111 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
))
112 /* Don't delete calls that may throw if we cannot do so. */
113 && ((cfun
->can_delete_dead_exceptions
&& can_alter_cfg
)
114 || insn_nothrow_p (insn
)))
115 return find_call_stack_args (as_a
<rtx_call_insn
*> (insn
), false,
118 /* Don't delete jumps, notes and the like. */
119 if (!NONJUMP_INSN_P (insn
))
122 /* Don't delete insns that may throw if we cannot do so. */
123 if (!(cfun
->can_delete_dead_exceptions
&& can_alter_cfg
)
124 && !insn_nothrow_p (insn
))
127 /* If INSN sets a global_reg, leave it untouched. */
128 FOR_EACH_INSN_DEF (def
, insn
)
129 if (HARD_REGISTER_NUM_P (DF_REF_REGNO (def
))
130 && global_regs
[DF_REF_REGNO (def
)])
132 /* Initialization of pseudo PIC register should never be removed. */
133 else if (DF_REF_REG (def
) == pic_offset_table_rtx
134 && REGNO (pic_offset_table_rtx
) >= FIRST_PSEUDO_REGISTER
)
137 /* Callee-save restores are needed. */
138 if (RTX_FRAME_RELATED_P (insn
)
139 && crtl
->shrink_wrapped_separate
140 && find_reg_note (insn
, REG_CFA_RESTORE
, NULL
))
143 body
= PATTERN (insn
);
144 switch (GET_CODE (body
))
154 /* A CLOBBER of a dead pseudo register serves no purpose.
155 That is not necessarily true for hard registers until
158 return REG_P (x
) && (!HARD_REGISTER_P (x
) || reload_completed
);
161 /* Because of the way that use-def chains are built, it is not
162 possible to tell if the clobber is dead because it can
163 never be the target of a use-def chain. */
167 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
168 if (!deletable_insn_p_1 (XVECEXP (body
, 0, i
)))
173 return deletable_insn_p_1 (body
);
178 /* Return true if INSN has been marked as needed. */
181 marked_insn_p (rtx_insn
*insn
)
183 /* Artificial defs are always needed and they do not have an insn.
184 We should never see them here. */
186 return bitmap_bit_p (marked
, INSN_UID (insn
));
190 /* If INSN has not yet been marked as needed, mark it now, and add it to
194 mark_insn (rtx_insn
*insn
, bool fast
)
196 if (!marked_insn_p (insn
))
199 worklist
.safe_push (insn
);
200 bitmap_set_bit (marked
, INSN_UID (insn
));
202 fprintf (dump_file
, " Adding insn %d to worklist\n", INSN_UID (insn
));
205 && !SIBLING_CALL_P (insn
)
206 && (RTL_CONST_OR_PURE_CALL_P (insn
)
207 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
))
208 && ((cfun
->can_delete_dead_exceptions
&& can_alter_cfg
)
209 || insn_nothrow_p (insn
)))
210 find_call_stack_args (as_a
<rtx_call_insn
*> (insn
), true, fast
, NULL
);
215 /* A note_stores callback used by mark_nonreg_stores. DATA is the
216 instruction containing DEST. */
219 mark_nonreg_stores_1 (rtx dest
, const_rtx pattern
, void *data
)
221 if (GET_CODE (pattern
) != CLOBBER
&& !REG_P (dest
))
223 gcc_checking_assert (GET_CODE (pattern
) != CLOBBER_HIGH
);
224 mark_insn ((rtx_insn
*) data
, true);
229 /* A note_stores callback used by mark_nonreg_stores. DATA is the
230 instruction containing DEST. */
233 mark_nonreg_stores_2 (rtx dest
, const_rtx pattern
, void *data
)
235 if (GET_CODE (pattern
) != CLOBBER
&& !REG_P (dest
))
237 gcc_checking_assert (GET_CODE (pattern
) != CLOBBER_HIGH
);
238 mark_insn ((rtx_insn
*) data
, false);
243 /* Mark INSN if BODY stores to a non-register destination. */
246 mark_nonreg_stores (rtx body
, rtx_insn
*insn
, bool fast
)
249 note_stores (body
, mark_nonreg_stores_1
, insn
);
251 note_stores (body
, mark_nonreg_stores_2
, insn
);
255 /* Return true if a store to SIZE bytes, starting OFF bytes from stack pointer,
256 is a call argument store, and clear corresponding bits from SP_BYTES
260 check_argument_store (HOST_WIDE_INT size
, HOST_WIDE_INT off
,
261 HOST_WIDE_INT min_sp_off
, HOST_WIDE_INT max_sp_off
,
265 for (byte
= off
; byte
< off
+ size
; byte
++)
267 if (byte
< min_sp_off
268 || byte
>= max_sp_off
269 || !bitmap_clear_bit (sp_bytes
, byte
- min_sp_off
))
276 /* Try to find all stack stores of CALL_INSN arguments if
277 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
278 and it is therefore safe to eliminate the call, return true,
279 otherwise return false. This function should be first called
280 with DO_MARK false, and only when the CALL_INSN is actually
281 going to be marked called again with DO_MARK true. */
284 find_call_stack_args (rtx_call_insn
*call_insn
, bool do_mark
, bool fast
,
288 rtx_insn
*insn
, *prev_insn
;
290 HOST_WIDE_INT min_sp_off
, max_sp_off
;
293 gcc_assert (CALL_P (call_insn
));
294 if (!ACCUMULATE_OUTGOING_ARGS
)
299 gcc_assert (arg_stores
);
300 bitmap_clear (arg_stores
);
303 min_sp_off
= INTTYPE_MAXIMUM (HOST_WIDE_INT
);
306 /* First determine the minimum and maximum offset from sp for
308 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
309 if (GET_CODE (XEXP (p
, 0)) == USE
310 && MEM_P (XEXP (XEXP (p
, 0), 0)))
312 rtx mem
= XEXP (XEXP (p
, 0), 0), addr
;
313 HOST_WIDE_INT off
= 0, size
;
314 if (!MEM_SIZE_KNOWN_P (mem
) || !MEM_SIZE (mem
).is_constant (&size
))
316 addr
= XEXP (mem
, 0);
317 if (GET_CODE (addr
) == PLUS
318 && REG_P (XEXP (addr
, 0))
319 && CONST_INT_P (XEXP (addr
, 1)))
321 off
= INTVAL (XEXP (addr
, 1));
322 addr
= XEXP (addr
, 0);
324 if (addr
!= stack_pointer_rtx
)
328 /* If not fast, use chains to see if addr wasn't set to
333 struct df_link
*defs
;
336 FOR_EACH_INSN_USE (use
, call_insn
)
337 if (rtx_equal_p (addr
, DF_REF_REG (use
)))
343 for (defs
= DF_REF_CHAIN (use
); defs
; defs
= defs
->next
)
344 if (! DF_REF_IS_ARTIFICIAL (defs
->ref
))
350 set
= single_set (DF_REF_INSN (defs
->ref
));
354 if (GET_CODE (SET_SRC (set
)) != PLUS
355 || XEXP (SET_SRC (set
), 0) != stack_pointer_rtx
356 || !CONST_INT_P (XEXP (SET_SRC (set
), 1)))
359 off
+= INTVAL (XEXP (SET_SRC (set
), 1));
364 min_sp_off
= MIN (min_sp_off
, off
);
365 max_sp_off
= MAX (max_sp_off
, off
+ size
);
368 if (min_sp_off
>= max_sp_off
)
370 sp_bytes
= BITMAP_ALLOC (NULL
);
372 /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
373 which contain arguments. Checking has been done in the previous
375 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
376 if (GET_CODE (XEXP (p
, 0)) == USE
377 && MEM_P (XEXP (XEXP (p
, 0), 0)))
379 rtx mem
= XEXP (XEXP (p
, 0), 0), addr
;
380 HOST_WIDE_INT off
= 0, byte
, size
;
381 /* Checked in the previous iteration. */
382 size
= MEM_SIZE (mem
).to_constant ();
383 addr
= XEXP (mem
, 0);
384 if (GET_CODE (addr
) == PLUS
385 && REG_P (XEXP (addr
, 0))
386 && CONST_INT_P (XEXP (addr
, 1)))
388 off
= INTVAL (XEXP (addr
, 1));
389 addr
= XEXP (addr
, 0);
391 if (addr
!= stack_pointer_rtx
)
394 struct df_link
*defs
;
397 FOR_EACH_INSN_USE (use
, call_insn
)
398 if (rtx_equal_p (addr
, DF_REF_REG (use
)))
401 for (defs
= DF_REF_CHAIN (use
); defs
; defs
= defs
->next
)
402 if (! DF_REF_IS_ARTIFICIAL (defs
->ref
))
405 set
= single_set (DF_REF_INSN (defs
->ref
));
406 off
+= INTVAL (XEXP (SET_SRC (set
), 1));
408 for (byte
= off
; byte
< off
+ size
; byte
++)
410 if (!bitmap_set_bit (sp_bytes
, byte
- min_sp_off
))
415 /* Walk backwards, looking for argument stores. The search stops
416 when seeing another call, sp adjustment or memory store other than
419 for (insn
= PREV_INSN (call_insn
); insn
; insn
= prev_insn
)
424 if (insn
== BB_HEAD (BLOCK_FOR_INSN (call_insn
)))
427 prev_insn
= PREV_INSN (insn
);
432 if (!NONDEBUG_INSN_P (insn
))
435 set
= single_set (insn
);
436 if (!set
|| SET_DEST (set
) == stack_pointer_rtx
)
439 if (!MEM_P (SET_DEST (set
)))
442 mem
= SET_DEST (set
);
443 addr
= XEXP (mem
, 0);
445 if (GET_CODE (addr
) == PLUS
446 && REG_P (XEXP (addr
, 0))
447 && CONST_INT_P (XEXP (addr
, 1)))
449 off
= INTVAL (XEXP (addr
, 1));
450 addr
= XEXP (addr
, 0);
452 if (addr
!= stack_pointer_rtx
)
459 struct df_link
*defs
;
462 FOR_EACH_INSN_USE (use
, insn
)
463 if (rtx_equal_p (addr
, DF_REF_REG (use
)))
469 for (defs
= DF_REF_CHAIN (use
); defs
; defs
= defs
->next
)
470 if (! DF_REF_IS_ARTIFICIAL (defs
->ref
))
476 set
= single_set (DF_REF_INSN (defs
->ref
));
480 if (GET_CODE (SET_SRC (set
)) != PLUS
481 || XEXP (SET_SRC (set
), 0) != stack_pointer_rtx
482 || !CONST_INT_P (XEXP (SET_SRC (set
), 1)))
485 off
+= INTVAL (XEXP (SET_SRC (set
), 1));
492 if (!MEM_SIZE_KNOWN_P (mem
)
493 || !MEM_SIZE (mem
).is_constant (&size
)
494 || !check_argument_store (size
, off
, min_sp_off
,
495 max_sp_off
, sp_bytes
))
498 if (!deletable_insn_p (insn
, fast
, NULL
))
502 mark_insn (insn
, fast
);
504 bitmap_set_bit (arg_stores
, INSN_UID (insn
));
506 if (bitmap_empty_p (sp_bytes
))
513 BITMAP_FREE (sp_bytes
);
514 if (!ret
&& arg_stores
)
515 bitmap_clear (arg_stores
);
521 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
525 remove_reg_equal_equiv_notes_for_defs (rtx_insn
*insn
)
529 FOR_EACH_INSN_DEF (def
, insn
)
530 remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (def
));
533 /* Scan all BBs for debug insns and reset those that reference values
534 defined in unmarked insns. */
537 reset_unmarked_insns_debug_uses (void)
540 rtx_insn
*insn
, *next
;
542 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
543 FOR_BB_INSNS_REVERSE_SAFE (bb
, insn
, next
)
544 if (DEBUG_INSN_P (insn
))
548 FOR_EACH_INSN_USE (use
, insn
)
550 struct df_link
*defs
;
551 for (defs
= DF_REF_CHAIN (use
); defs
; defs
= defs
->next
)
554 if (DF_REF_IS_ARTIFICIAL (defs
->ref
))
556 ref_insn
= DF_REF_INSN (defs
->ref
);
557 if (!marked_insn_p (ref_insn
))
562 /* ??? FIXME could we propagate the values assigned to
564 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
565 df_insn_rescan_debug_internal (insn
);
571 /* Delete every instruction that hasn't been marked. */
574 delete_unmarked_insns (void)
577 rtx_insn
*insn
, *next
;
578 bool must_clean
= false;
580 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
581 FOR_BB_INSNS_REVERSE_SAFE (bb
, insn
, next
)
582 if (NONDEBUG_INSN_P (insn
))
584 rtx turn_into_use
= NULL_RTX
;
586 /* Always delete no-op moves. */
587 if (noop_move_p (insn
))
589 if (RTX_FRAME_RELATED_P (insn
))
591 = find_reg_note (insn
, REG_CFA_RESTORE
, NULL
);
592 if (turn_into_use
&& REG_P (XEXP (turn_into_use
, 0)))
593 turn_into_use
= XEXP (turn_into_use
, 0);
595 turn_into_use
= NULL_RTX
;
598 /* Otherwise rely only on the DCE algorithm. */
599 else if (marked_insn_p (insn
))
602 /* Beware that reaching a dbg counter limit here can result
603 in miscompiled file. This occurs when a group of insns
604 must be deleted together, typically because the kept insn
605 depends on the output from the deleted insn. Deleting
606 this insns in reverse order (both at the bb level and
607 when looking at the blocks) minimizes this, but does not
608 eliminate it, since it is possible for the using insn to
609 be top of a block and the producer to be at the bottom of
610 the block. However, in most cases this will only result
611 in an uninitialized use of an insn that is dead anyway.
613 However, there is one rare case that will cause a
614 miscompile: deletion of non-looping pure and constant
615 calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
616 In this case it is possible to remove the call, but leave
617 the argument pushes to the stack. Because of the changes
618 to the stack pointer, this will almost always lead to a
624 fprintf (dump_file
, "DCE: Deleting insn %d\n", INSN_UID (insn
));
626 /* Before we delete the insn we have to remove the REG_EQUAL notes
627 for the destination regs in order to avoid dangling notes. */
628 remove_reg_equal_equiv_notes_for_defs (insn
);
630 /* If a pure or const call is deleted, this may make the cfg
631 have unreachable blocks. We rememeber this and call
632 delete_unreachable_blocks at the end. */
638 /* Don't remove frame related noop moves if they cary
639 REG_CFA_RESTORE note, while we don't need to emit any code,
640 we need it to emit the CFI restore note. */
642 = gen_rtx_USE (GET_MODE (turn_into_use
), turn_into_use
);
643 INSN_CODE (insn
) = -1;
644 df_insn_rescan (insn
);
647 /* Now delete the insn. */
648 delete_insn_and_edges (insn
);
651 /* Deleted a pure or const call. */
653 delete_unreachable_blocks ();
657 /* Go through the instructions and mark those whose necessity is not
658 dependent on inter-instruction information. Make sure all other
659 instructions are not marked. */
662 prescan_insns_for_dce (bool fast
)
665 rtx_insn
*insn
, *prev
;
666 bitmap arg_stores
= NULL
;
669 fprintf (dump_file
, "Finding needed instructions:\n");
671 if (!df_in_progress
&& ACCUMULATE_OUTGOING_ARGS
)
672 arg_stores
= BITMAP_ALLOC (NULL
);
674 FOR_EACH_BB_FN (bb
, cfun
)
676 FOR_BB_INSNS_REVERSE_SAFE (bb
, insn
, prev
)
677 if (NONDEBUG_INSN_P (insn
))
679 /* Don't mark argument stores now. They will be marked
680 if needed when the associated CALL is marked. */
681 if (arg_stores
&& bitmap_bit_p (arg_stores
, INSN_UID (insn
)))
683 if (deletable_insn_p (insn
, fast
, arg_stores
))
684 mark_nonreg_stores (PATTERN (insn
), insn
, fast
);
686 mark_insn (insn
, fast
);
688 /* find_call_stack_args only looks at argument stores in the
691 bitmap_clear (arg_stores
);
695 BITMAP_FREE (arg_stores
);
698 fprintf (dump_file
, "Finished finding needed instructions:\n");
702 /* UD-based DSE routines. */
704 /* Mark instructions that define artificially-used registers, such as
705 the frame pointer and the stack pointer. */
708 mark_artificial_uses (void)
711 struct df_link
*defs
;
714 FOR_ALL_BB_FN (bb
, cfun
)
715 FOR_EACH_ARTIFICIAL_USE (use
, bb
->index
)
716 for (defs
= DF_REF_CHAIN (use
); defs
; defs
= defs
->next
)
717 if (!DF_REF_IS_ARTIFICIAL (defs
->ref
))
718 mark_insn (DF_REF_INSN (defs
->ref
), false);
722 /* Mark every instruction that defines a register value that INSN uses. */
725 mark_reg_dependencies (rtx_insn
*insn
)
727 struct df_link
*defs
;
730 if (DEBUG_INSN_P (insn
))
733 FOR_EACH_INSN_USE (use
, insn
)
737 fprintf (dump_file
, "Processing use of ");
738 print_simple_rtl (dump_file
, DF_REF_REG (use
));
739 fprintf (dump_file
, " in insn %d:\n", INSN_UID (insn
));
741 for (defs
= DF_REF_CHAIN (use
); defs
; defs
= defs
->next
)
742 if (! DF_REF_IS_ARTIFICIAL (defs
->ref
))
743 mark_insn (DF_REF_INSN (defs
->ref
), false);
748 /* Initialize global variables for a new DCE pass. */
757 df_set_flags (DF_RD_PRUNE_DEAD_DEFS
);
758 df_chain_add_problem (DF_UD_CHAIN
);
768 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack
);
769 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack
);
770 can_alter_cfg
= false;
773 can_alter_cfg
= true;
775 marked
= sbitmap_alloc (get_max_uid () + 1);
776 bitmap_clear (marked
);
780 /* Free the data allocated by init_dce. */
785 sbitmap_free (marked
);
789 bitmap_obstack_release (&dce_blocks_bitmap_obstack
);
790 bitmap_obstack_release (&dce_tmp_bitmap_obstack
);
795 /* UD-chain based DCE. */
798 rest_of_handle_ud_dce (void)
804 prescan_insns_for_dce (false);
805 mark_artificial_uses ();
806 while (worklist
.length () > 0)
808 insn
= worklist
.pop ();
809 mark_reg_dependencies (insn
);
813 if (MAY_HAVE_DEBUG_BIND_INSNS
)
814 reset_unmarked_insns_debug_uses ();
816 /* Before any insns are deleted, we must remove the chains since
817 they are not bidirectional. */
818 df_remove_problem (df_chain
);
819 delete_unmarked_insns ();
828 const pass_data pass_data_ud_rtl_dce
=
832 OPTGROUP_NONE
, /* optinfo_flags */
834 0, /* properties_required */
835 0, /* properties_provided */
836 0, /* properties_destroyed */
837 0, /* todo_flags_start */
838 TODO_df_finish
, /* todo_flags_finish */
841 class pass_ud_rtl_dce
: public rtl_opt_pass
844 pass_ud_rtl_dce (gcc::context
*ctxt
)
845 : rtl_opt_pass (pass_data_ud_rtl_dce
, ctxt
)
848 /* opt_pass methods: */
849 virtual bool gate (function
*)
851 return optimize
> 1 && flag_dce
&& dbg_cnt (dce_ud
);
854 virtual unsigned int execute (function
*)
856 return rest_of_handle_ud_dce ();
859 }; // class pass_ud_rtl_dce
864 make_pass_ud_rtl_dce (gcc::context
*ctxt
)
866 return new pass_ud_rtl_dce (ctxt
);
870 /* -------------------------------------------------------------------------
872 ------------------------------------------------------------------------- */
874 /* Process basic block BB. Return true if the live_in set has
875 changed. REDO_OUT is true if the info at the bottom of the block
876 needs to be recalculated before starting. AU is the proper set of
877 artificial uses. Track global substitution of uses of dead pseudos
878 in debug insns using GLOBAL_DEBUG. */
881 word_dce_process_block (basic_block bb
, bool redo_out
,
882 struct dead_debug_global
*global_debug
)
884 bitmap local_live
= BITMAP_ALLOC (&dce_tmp_bitmap_obstack
);
887 struct dead_debug_local debug
;
891 /* Need to redo the live_out set of this block if when one of
892 the succs of this block has had a change in it live in
896 df_confluence_function_n con_fun_n
= df_word_lr
->problem
->con_fun_n
;
897 bitmap_clear (DF_WORD_LR_OUT (bb
));
898 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
904 fprintf (dump_file
, "processing block %d live out = ", bb
->index
);
905 df_print_word_regset (dump_file
, DF_WORD_LR_OUT (bb
));
908 bitmap_copy (local_live
, DF_WORD_LR_OUT (bb
));
909 dead_debug_local_init (&debug
, NULL
, global_debug
);
911 FOR_BB_INSNS_REVERSE (bb
, insn
)
912 if (DEBUG_INSN_P (insn
))
915 FOR_EACH_INSN_USE (use
, insn
)
916 if (DF_REF_REGNO (use
) >= FIRST_PSEUDO_REGISTER
917 && known_eq (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (use
))),
919 && !bitmap_bit_p (local_live
, 2 * DF_REF_REGNO (use
))
920 && !bitmap_bit_p (local_live
, 2 * DF_REF_REGNO (use
) + 1))
921 dead_debug_add (&debug
, use
, DF_REF_REGNO (use
));
923 else if (INSN_P (insn
))
927 /* No matter if the instruction is needed or not, we remove
928 any regno in the defs from the live set. */
929 any_changed
= df_word_lr_simulate_defs (insn
, local_live
);
931 mark_insn (insn
, true);
933 /* On the other hand, we do not allow the dead uses to set
934 anything in local_live. */
935 if (marked_insn_p (insn
))
936 df_word_lr_simulate_uses (insn
, local_live
);
938 /* Insert debug temps for dead REGs used in subsequent debug
939 insns. We may have to emit a debug temp even if the insn
940 was marked, in case the debug use was after the point of
942 if (debug
.used
&& !bitmap_empty_p (debug
.used
))
946 FOR_EACH_INSN_DEF (def
, insn
)
947 dead_debug_insert_temp (&debug
, DF_REF_REGNO (def
), insn
,
949 && !control_flow_insn_p (insn
)
950 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
951 : DEBUG_TEMP_BEFORE_WITH_VALUE
);
956 fprintf (dump_file
, "finished processing insn %d live out = ",
958 df_print_word_regset (dump_file
, local_live
);
962 block_changed
= !bitmap_equal_p (local_live
, DF_WORD_LR_IN (bb
));
964 bitmap_copy (DF_WORD_LR_IN (bb
), local_live
);
966 dead_debug_local_finish (&debug
, NULL
);
967 BITMAP_FREE (local_live
);
968 return block_changed
;
972 /* Process basic block BB. Return true if the live_in set has
973 changed. REDO_OUT is true if the info at the bottom of the block
974 needs to be recalculated before starting. AU is the proper set of
975 artificial uses. Track global substitution of uses of dead pseudos
976 in debug insns using GLOBAL_DEBUG. */
979 dce_process_block (basic_block bb
, bool redo_out
, bitmap au
,
980 struct dead_debug_global
*global_debug
)
982 bitmap local_live
= BITMAP_ALLOC (&dce_tmp_bitmap_obstack
);
986 struct dead_debug_local debug
;
990 /* Need to redo the live_out set of this block if when one of
991 the succs of this block has had a change in it live in
995 df_confluence_function_n con_fun_n
= df_lr
->problem
->con_fun_n
;
996 bitmap_clear (DF_LR_OUT (bb
));
997 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1003 fprintf (dump_file
, "processing block %d lr out = ", bb
->index
);
1004 df_print_regset (dump_file
, DF_LR_OUT (bb
));
1007 bitmap_copy (local_live
, DF_LR_OUT (bb
));
1009 df_simulate_initialize_backwards (bb
, local_live
);
1010 dead_debug_local_init (&debug
, NULL
, global_debug
);
1012 FOR_BB_INSNS_REVERSE (bb
, insn
)
1013 if (DEBUG_INSN_P (insn
))
1016 FOR_EACH_INSN_USE (use
, insn
)
1017 if (!bitmap_bit_p (local_live
, DF_REF_REGNO (use
))
1018 && !bitmap_bit_p (au
, DF_REF_REGNO (use
)))
1019 dead_debug_add (&debug
, use
, DF_REF_REGNO (use
));
1021 else if (INSN_P (insn
))
1023 bool needed
= marked_insn_p (insn
);
1025 /* The insn is needed if there is someone who uses the output. */
1027 FOR_EACH_INSN_DEF (def
, insn
)
1028 if (bitmap_bit_p (local_live
, DF_REF_REGNO (def
))
1029 || bitmap_bit_p (au
, DF_REF_REGNO (def
)))
1032 mark_insn (insn
, true);
1036 /* No matter if the instruction is needed or not, we remove
1037 any regno in the defs from the live set. */
1038 df_simulate_defs (insn
, local_live
);
1040 /* On the other hand, we do not allow the dead uses to set
1041 anything in local_live. */
1043 df_simulate_uses (insn
, local_live
);
1045 /* Insert debug temps for dead REGs used in subsequent debug
1046 insns. We may have to emit a debug temp even if the insn
1047 was marked, in case the debug use was after the point of
1049 if (debug
.used
&& !bitmap_empty_p (debug
.used
))
1050 FOR_EACH_INSN_DEF (def
, insn
)
1051 dead_debug_insert_temp (&debug
, DF_REF_REGNO (def
), insn
,
1052 needed
&& !control_flow_insn_p (insn
)
1053 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
1054 : DEBUG_TEMP_BEFORE_WITH_VALUE
);
1057 dead_debug_local_finish (&debug
, NULL
);
1058 df_simulate_finalize_backwards (bb
, local_live
);
1060 block_changed
= !bitmap_equal_p (local_live
, DF_LR_IN (bb
));
1062 bitmap_copy (DF_LR_IN (bb
), local_live
);
1064 BITMAP_FREE (local_live
);
1065 return block_changed
;
1069 /* Perform fast DCE once initialization is done. If WORD_LEVEL is
1070 true, use the word level dce, otherwise do it at the pseudo
1074 fast_dce (bool word_level
)
1076 int *postorder
= df_get_postorder (DF_BACKWARD
);
1077 int n_blocks
= df_get_n_blocks (DF_BACKWARD
);
1078 /* The set of blocks that have been seen on this iteration. */
1079 bitmap processed
= BITMAP_ALLOC (&dce_blocks_bitmap_obstack
);
1080 /* The set of blocks that need to have the out vectors reset because
1081 the in of one of their successors has changed. */
1082 bitmap redo_out
= BITMAP_ALLOC (&dce_blocks_bitmap_obstack
);
1083 bitmap all_blocks
= BITMAP_ALLOC (&dce_blocks_bitmap_obstack
);
1084 bool global_changed
= true;
1086 /* These regs are considered always live so if they end up dying
1087 because of some def, we need to bring the back again. Calling
1088 df_simulate_fixup_sets has the disadvantage of calling
1089 bb_has_eh_pred once per insn, so we cache the information
1091 bitmap au
= &df
->regular_block_artificial_uses
;
1092 bitmap au_eh
= &df
->eh_block_artificial_uses
;
1094 struct dead_debug_global global_debug
;
1096 prescan_insns_for_dce (true);
1098 for (i
= 0; i
< n_blocks
; i
++)
1099 bitmap_set_bit (all_blocks
, postorder
[i
]);
1101 dead_debug_global_init (&global_debug
, NULL
);
1103 while (global_changed
)
1105 global_changed
= false;
1107 for (i
= 0; i
< n_blocks
; i
++)
1109 int index
= postorder
[i
];
1110 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, index
);
1113 if (index
< NUM_FIXED_BLOCKS
)
1115 bitmap_set_bit (processed
, index
);
1121 = word_dce_process_block (bb
, bitmap_bit_p (redo_out
, index
),
1125 = dce_process_block (bb
, bitmap_bit_p (redo_out
, index
),
1126 bb_has_eh_pred (bb
) ? au_eh
: au
,
1128 bitmap_set_bit (processed
, index
);
1134 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1135 if (bitmap_bit_p (processed
, e
->src
->index
))
1136 /* Be tricky about when we need to iterate the
1137 analysis. We only have redo the analysis if the
1138 bitmaps change at the top of a block that is the
1140 global_changed
= true;
1142 bitmap_set_bit (redo_out
, e
->src
->index
);
1148 /* Turn off the RUN_DCE flag to prevent recursive calls to
1150 int old_flag
= df_clear_flags (DF_LR_RUN_DCE
);
1152 /* So something was deleted that requires a redo. Do it on
1154 delete_unmarked_insns ();
1155 bitmap_clear (marked
);
1156 bitmap_clear (processed
);
1157 bitmap_clear (redo_out
);
1159 /* We do not need to rescan any instructions. We only need
1160 to redo the dataflow equations for the blocks that had a
1161 change at the top of the block. Then we need to redo the
1164 df_analyze_problem (df_word_lr
, all_blocks
, postorder
, n_blocks
);
1166 df_analyze_problem (df_lr
, all_blocks
, postorder
, n_blocks
);
1168 if (old_flag
& DF_LR_RUN_DCE
)
1169 df_set_flags (DF_LR_RUN_DCE
);
1171 prescan_insns_for_dce (true);
1175 dead_debug_global_finish (&global_debug
, NULL
);
1177 delete_unmarked_insns ();
1179 BITMAP_FREE (processed
);
1180 BITMAP_FREE (redo_out
);
1181 BITMAP_FREE (all_blocks
);
1185 /* Fast register level DCE. */
1188 rest_of_handle_fast_dce (void)
1197 /* Fast byte level DCE. */
1207 timevar_push (TV_DCE
);
1208 old_flags
= df_clear_flags (DF_DEFER_INSN_RESCAN
+ DF_NO_INSN_RESCAN
);
1209 df_word_lr_add_problem ();
1213 df_set_flags (old_flags
);
1214 timevar_pop (TV_DCE
);
1218 /* This is an internal call that is used by the df live register
1219 problem to run fast dce as a side effect of creating the live
1220 information. The stack is organized so that the lr problem is run,
1221 this pass is run, which updates the live info and the df scanning
1222 info, and then returns to allow the rest of the problems to be run.
1224 This can be called by elsewhere but it will not update the bit
1225 vectors for any other problems than LR. */
1228 run_fast_df_dce (void)
1232 /* If dce is able to delete something, it has to happen
1233 immediately. Otherwise there will be problems handling the
1236 df_clear_flags (DF_DEFER_INSN_RESCAN
+ DF_NO_INSN_RESCAN
);
1238 df_in_progress
= true;
1239 rest_of_handle_fast_dce ();
1240 df_in_progress
= false;
1242 df_set_flags (old_flags
);
1247 /* Run a fast DCE pass. */
1253 rest_of_handle_fast_dce ();
1259 const pass_data pass_data_fast_rtl_dce
=
1261 RTL_PASS
, /* type */
1262 "rtl_dce", /* name */
1263 OPTGROUP_NONE
, /* optinfo_flags */
1265 0, /* properties_required */
1266 0, /* properties_provided */
1267 0, /* properties_destroyed */
1268 0, /* todo_flags_start */
1269 TODO_df_finish
, /* todo_flags_finish */
1272 class pass_fast_rtl_dce
: public rtl_opt_pass
1275 pass_fast_rtl_dce (gcc::context
*ctxt
)
1276 : rtl_opt_pass (pass_data_fast_rtl_dce
, ctxt
)
1279 /* opt_pass methods: */
1280 virtual bool gate (function
*)
1282 return optimize
> 0 && flag_dce
&& dbg_cnt (dce_fast
);
1285 virtual unsigned int execute (function
*)
1287 return rest_of_handle_fast_dce ();
1290 }; // class pass_fast_rtl_dce
1295 make_pass_fast_rtl_dce (gcc::context
*ctxt
)
1297 return new pass_fast_rtl_dce (ctxt
);