Fix PR target/63209.
[official-gcc.git] / gcc / dce.c
blob5b7d36ee15671911190917b443058862a375b749
1 /* RTL dead code elimination.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hashtab.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "except.h"
31 #include "df.h"
32 #include "cselib.h"
33 #include "dce.h"
34 #include "valtrack.h"
35 #include "tree-pass.h"
36 #include "dbgcnt.h"
37 #include "tm_p.h"
38 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
41 /* -------------------------------------------------------------------------
42 Core mark/delete routines
43 ------------------------------------------------------------------------- */
45 /* True if we are invoked while the df engine is running; in this case,
46 we don't want to reenter it. */
47 static bool df_in_progress = false;
49 /* True if we are allowed to alter the CFG in this pass. */
50 static bool can_alter_cfg = false;
52 /* Instructions that have been marked but whose dependencies have not
53 yet been processed. */
54 static vec<rtx_insn *> worklist;
56 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
57 static sbitmap marked;
59 /* Bitmap obstacks used for block processing by the fast algorithm. */
60 static bitmap_obstack dce_blocks_bitmap_obstack;
61 static bitmap_obstack dce_tmp_bitmap_obstack;
63 static bool find_call_stack_args (rtx_call_insn *, bool, bool, bitmap);
65 /* A subroutine for which BODY is part of the instruction being tested;
66 either the top-level pattern, or an element of a PARALLEL. The
67 instruction is known not to be a bare USE or CLOBBER. */
69 static bool
70 deletable_insn_p_1 (rtx body)
72 switch (GET_CODE (body))
74 case PREFETCH:
75 case TRAP_IF:
76 /* The UNSPEC case was added here because the ia-64 claims that
77 USEs do not work after reload and generates UNSPECS rather
78 than USEs. Since dce is run after reload we need to avoid
79 deleting these even if they are dead. If it turns out that
80 USEs really do work after reload, the ia-64 should be
81 changed, and the UNSPEC case can be removed. */
82 case UNSPEC:
83 return false;
85 default:
86 return !volatile_refs_p (body);
91 /* Return true if INSN is a normal instruction that can be deleted by
92 the DCE pass. */
94 static bool
95 deletable_insn_p (rtx_insn *insn, bool fast, bitmap arg_stores)
97 rtx body, x;
98 int i;
99 df_ref def;
101 if (CALL_P (insn)
102 /* We cannot delete calls inside of the recursive dce because
103 this may cause basic blocks to be deleted and this messes up
104 the rest of the stack of optimization passes. */
105 && (!df_in_progress)
106 /* We cannot delete pure or const sibling calls because it is
107 hard to see the result. */
108 && (!SIBLING_CALL_P (insn))
109 /* We can delete dead const or pure calls as long as they do not
110 infinite loop. */
111 && (RTL_CONST_OR_PURE_CALL_P (insn)
112 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
113 return find_call_stack_args (as_a <rtx_call_insn *> (insn), false,
114 fast, arg_stores);
116 /* Don't delete jumps, notes and the like. */
117 if (!NONJUMP_INSN_P (insn))
118 return false;
120 /* Don't delete insns that may throw if we cannot do so. */
121 if (!(cfun->can_delete_dead_exceptions && can_alter_cfg)
122 && !insn_nothrow_p (insn))
123 return false;
125 /* If INSN sets a global_reg, leave it untouched. */
126 FOR_EACH_INSN_DEF (def, insn)
127 if (HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
128 && global_regs[DF_REF_REGNO (def)])
129 return false;
131 body = PATTERN (insn);
132 switch (GET_CODE (body))
134 case USE:
135 case VAR_LOCATION:
136 return false;
138 case CLOBBER:
139 if (fast)
141 /* A CLOBBER of a dead pseudo register serves no purpose.
142 That is not necessarily true for hard registers until
143 after reload. */
144 x = XEXP (body, 0);
145 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
147 else
148 /* Because of the way that use-def chains are built, it is not
149 possible to tell if the clobber is dead because it can
150 never be the target of a use-def chain. */
151 return false;
153 case PARALLEL:
154 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
155 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
156 return false;
157 return true;
159 default:
160 return deletable_insn_p_1 (body);
165 /* Return true if INSN has been marked as needed. */
167 static inline int
168 marked_insn_p (rtx_insn *insn)
170 /* Artificial defs are always needed and they do not have an insn.
171 We should never see them here. */
172 gcc_assert (insn);
173 return bitmap_bit_p (marked, INSN_UID (insn));
177 /* If INSN has not yet been marked as needed, mark it now, and add it to
178 the worklist. */
180 static void
181 mark_insn (rtx_insn *insn, bool fast)
183 if (!marked_insn_p (insn))
185 if (!fast)
186 worklist.safe_push (insn);
187 bitmap_set_bit (marked, INSN_UID (insn));
188 if (dump_file)
189 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
190 if (CALL_P (insn)
191 && !df_in_progress
192 && !SIBLING_CALL_P (insn)
193 && (RTL_CONST_OR_PURE_CALL_P (insn)
194 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
195 find_call_stack_args (as_a <rtx_call_insn *> (insn), true, fast, NULL);
200 /* A note_stores callback used by mark_nonreg_stores. DATA is the
201 instruction containing DEST. */
203 static void
204 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
206 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
207 mark_insn ((rtx_insn *) data, true);
211 /* A note_stores callback used by mark_nonreg_stores. DATA is the
212 instruction containing DEST. */
214 static void
215 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
217 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
218 mark_insn ((rtx_insn *) data, false);
222 /* Mark INSN if BODY stores to a non-register destination. */
224 static void
225 mark_nonreg_stores (rtx body, rtx_insn *insn, bool fast)
227 if (fast)
228 note_stores (body, mark_nonreg_stores_1, insn);
229 else
230 note_stores (body, mark_nonreg_stores_2, insn);
234 /* Return true if store to MEM, starting OFF bytes from stack pointer,
235 is a call argument store, and clear corresponding bits from SP_BYTES
236 bitmap if it is. */
238 static bool
239 check_argument_store (rtx mem, HOST_WIDE_INT off, HOST_WIDE_INT min_sp_off,
240 HOST_WIDE_INT max_sp_off, bitmap sp_bytes)
242 HOST_WIDE_INT byte;
243 for (byte = off; byte < off + GET_MODE_SIZE (GET_MODE (mem)); byte++)
245 if (byte < min_sp_off
246 || byte >= max_sp_off
247 || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
248 return false;
250 return true;
254 /* Try to find all stack stores of CALL_INSN arguments if
255 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
256 and it is therefore safe to eliminate the call, return true,
257 otherwise return false. This function should be first called
258 with DO_MARK false, and only when the CALL_INSN is actually
259 going to be marked called again with DO_MARK true. */
261 static bool
262 find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast,
263 bitmap arg_stores)
265 rtx p;
266 rtx_insn *insn, *prev_insn;
267 bool ret;
268 HOST_WIDE_INT min_sp_off, max_sp_off;
269 bitmap sp_bytes;
271 gcc_assert (CALL_P (call_insn));
272 if (!ACCUMULATE_OUTGOING_ARGS)
273 return true;
275 if (!do_mark)
277 gcc_assert (arg_stores);
278 bitmap_clear (arg_stores);
281 min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
282 max_sp_off = 0;
284 /* First determine the minimum and maximum offset from sp for
285 stored arguments. */
286 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
287 if (GET_CODE (XEXP (p, 0)) == USE
288 && MEM_P (XEXP (XEXP (p, 0), 0)))
290 rtx mem = XEXP (XEXP (p, 0), 0), addr;
291 HOST_WIDE_INT off = 0, size;
292 if (!MEM_SIZE_KNOWN_P (mem))
293 return false;
294 size = MEM_SIZE (mem);
295 addr = XEXP (mem, 0);
296 if (GET_CODE (addr) == PLUS
297 && REG_P (XEXP (addr, 0))
298 && CONST_INT_P (XEXP (addr, 1)))
300 off = INTVAL (XEXP (addr, 1));
301 addr = XEXP (addr, 0);
303 if (addr != stack_pointer_rtx)
305 if (!REG_P (addr))
306 return false;
307 /* If not fast, use chains to see if addr wasn't set to
308 sp + offset. */
309 if (!fast)
311 df_ref use;
312 struct df_link *defs;
313 rtx set;
315 FOR_EACH_INSN_USE (use, call_insn)
316 if (rtx_equal_p (addr, DF_REF_REG (use)))
317 break;
319 if (use == NULL)
320 return false;
322 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
323 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
324 break;
326 if (defs == NULL)
327 return false;
329 set = single_set (DF_REF_INSN (defs->ref));
330 if (!set)
331 return false;
333 if (GET_CODE (SET_SRC (set)) != PLUS
334 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
335 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
336 return false;
338 off += INTVAL (XEXP (SET_SRC (set), 1));
340 else
341 return false;
343 min_sp_off = MIN (min_sp_off, off);
344 max_sp_off = MAX (max_sp_off, off + size);
347 if (min_sp_off >= max_sp_off)
348 return true;
349 sp_bytes = BITMAP_ALLOC (NULL);
351 /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
352 which contain arguments. Checking has been done in the previous
353 loop. */
354 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
355 if (GET_CODE (XEXP (p, 0)) == USE
356 && MEM_P (XEXP (XEXP (p, 0), 0)))
358 rtx mem = XEXP (XEXP (p, 0), 0), addr;
359 HOST_WIDE_INT off = 0, byte;
360 addr = XEXP (mem, 0);
361 if (GET_CODE (addr) == PLUS
362 && REG_P (XEXP (addr, 0))
363 && CONST_INT_P (XEXP (addr, 1)))
365 off = INTVAL (XEXP (addr, 1));
366 addr = XEXP (addr, 0);
368 if (addr != stack_pointer_rtx)
370 df_ref use;
371 struct df_link *defs;
372 rtx set;
374 FOR_EACH_INSN_USE (use, call_insn)
375 if (rtx_equal_p (addr, DF_REF_REG (use)))
376 break;
378 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
379 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
380 break;
382 set = single_set (DF_REF_INSN (defs->ref));
383 off += INTVAL (XEXP (SET_SRC (set), 1));
385 for (byte = off; byte < off + MEM_SIZE (mem); byte++)
387 if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
388 gcc_unreachable ();
392 /* Walk backwards, looking for argument stores. The search stops
393 when seeing another call, sp adjustment or memory store other than
394 argument store. */
395 ret = false;
396 for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
398 rtx set, mem, addr;
399 HOST_WIDE_INT off;
401 if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
402 prev_insn = NULL;
403 else
404 prev_insn = PREV_INSN (insn);
406 if (CALL_P (insn))
407 break;
409 if (!NONDEBUG_INSN_P (insn))
410 continue;
412 set = single_set (insn);
413 if (!set || SET_DEST (set) == stack_pointer_rtx)
414 break;
416 if (!MEM_P (SET_DEST (set)))
417 continue;
419 mem = SET_DEST (set);
420 addr = XEXP (mem, 0);
421 off = 0;
422 if (GET_CODE (addr) == PLUS
423 && REG_P (XEXP (addr, 0))
424 && CONST_INT_P (XEXP (addr, 1)))
426 off = INTVAL (XEXP (addr, 1));
427 addr = XEXP (addr, 0);
429 if (addr != stack_pointer_rtx)
431 if (!REG_P (addr))
432 break;
433 if (!fast)
435 df_ref use;
436 struct df_link *defs;
437 rtx set;
439 FOR_EACH_INSN_USE (use, insn)
440 if (rtx_equal_p (addr, DF_REF_REG (use)))
441 break;
443 if (use == NULL)
444 break;
446 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
447 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
448 break;
450 if (defs == NULL)
451 break;
453 set = single_set (DF_REF_INSN (defs->ref));
454 if (!set)
455 break;
457 if (GET_CODE (SET_SRC (set)) != PLUS
458 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
459 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
460 break;
462 off += INTVAL (XEXP (SET_SRC (set), 1));
464 else
465 break;
468 if (GET_MODE_SIZE (GET_MODE (mem)) == 0
469 || !check_argument_store (mem, off, min_sp_off,
470 max_sp_off, sp_bytes))
471 break;
473 if (!deletable_insn_p (insn, fast, NULL))
474 break;
476 if (do_mark)
477 mark_insn (insn, fast);
478 else
479 bitmap_set_bit (arg_stores, INSN_UID (insn));
481 if (bitmap_empty_p (sp_bytes))
483 ret = true;
484 break;
488 BITMAP_FREE (sp_bytes);
489 if (!ret && arg_stores)
490 bitmap_clear (arg_stores);
492 return ret;
496 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
497 writes to. */
499 static void
500 remove_reg_equal_equiv_notes_for_defs (rtx_insn *insn)
502 df_ref def;
504 FOR_EACH_INSN_DEF (def, insn)
505 remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (def));
508 /* Scan all BBs for debug insns and reset those that reference values
509 defined in unmarked insns. */
511 static void
512 reset_unmarked_insns_debug_uses (void)
514 basic_block bb;
515 rtx_insn *insn, *next;
517 FOR_EACH_BB_REVERSE_FN (bb, cfun)
518 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
519 if (DEBUG_INSN_P (insn))
521 df_ref use;
523 FOR_EACH_INSN_USE (use, insn)
525 struct df_link *defs;
526 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
528 rtx_insn *ref_insn;
529 if (DF_REF_IS_ARTIFICIAL (defs->ref))
530 continue;
531 ref_insn = DF_REF_INSN (defs->ref);
532 if (!marked_insn_p (ref_insn))
533 break;
535 if (!defs)
536 continue;
537 /* ??? FIXME could we propagate the values assigned to
538 each of the DEFs? */
539 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
540 df_insn_rescan_debug_internal (insn);
541 break;
546 /* Delete every instruction that hasn't been marked. */
548 static void
549 delete_unmarked_insns (void)
551 basic_block bb;
552 rtx_insn *insn, *next;
553 bool must_clean = false;
555 FOR_EACH_BB_REVERSE_FN (bb, cfun)
556 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
557 if (NONDEBUG_INSN_P (insn))
559 /* Always delete no-op moves. */
560 if (noop_move_p (insn))
563 /* Otherwise rely only on the DCE algorithm. */
564 else if (marked_insn_p (insn))
565 continue;
567 /* Beware that reaching a dbg counter limit here can result
568 in miscompiled file. This occurs when a group of insns
569 must be deleted together, typically because the kept insn
570 depends on the output from the deleted insn. Deleting
571 this insns in reverse order (both at the bb level and
572 when looking at the blocks) minimizes this, but does not
573 eliminate it, since it is possible for the using insn to
574 be top of a block and the producer to be at the bottom of
575 the block. However, in most cases this will only result
576 in an uninitialized use of an insn that is dead anyway.
578 However, there is one rare case that will cause a
579 miscompile: deletion of non-looping pure and constant
580 calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
581 In this case it is possible to remove the call, but leave
582 the argument pushes to the stack. Because of the changes
583 to the stack pointer, this will almost always lead to a
584 miscompile. */
585 if (!dbg_cnt (dce))
586 continue;
588 if (dump_file)
589 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
591 /* Before we delete the insn we have to remove the REG_EQUAL notes
592 for the destination regs in order to avoid dangling notes. */
593 remove_reg_equal_equiv_notes_for_defs (insn);
595 /* If a pure or const call is deleted, this may make the cfg
596 have unreachable blocks. We rememeber this and call
597 delete_unreachable_blocks at the end. */
598 if (CALL_P (insn))
599 must_clean = true;
601 /* Now delete the insn. */
602 delete_insn_and_edges (insn);
605 /* Deleted a pure or const call. */
606 if (must_clean)
607 delete_unreachable_blocks ();
611 /* Go through the instructions and mark those whose necessity is not
612 dependent on inter-instruction information. Make sure all other
613 instructions are not marked. */
615 static void
616 prescan_insns_for_dce (bool fast)
618 basic_block bb;
619 rtx_insn *insn, *prev;
620 bitmap arg_stores = NULL;
622 if (dump_file)
623 fprintf (dump_file, "Finding needed instructions:\n");
625 if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
626 arg_stores = BITMAP_ALLOC (NULL);
628 FOR_EACH_BB_FN (bb, cfun)
630 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
631 if (NONDEBUG_INSN_P (insn))
633 /* Don't mark argument stores now. They will be marked
634 if needed when the associated CALL is marked. */
635 if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
636 continue;
637 if (deletable_insn_p (insn, fast, arg_stores))
638 mark_nonreg_stores (PATTERN (insn), insn, fast);
639 else
640 mark_insn (insn, fast);
642 /* find_call_stack_args only looks at argument stores in the
643 same bb. */
644 if (arg_stores)
645 bitmap_clear (arg_stores);
648 if (arg_stores)
649 BITMAP_FREE (arg_stores);
651 if (dump_file)
652 fprintf (dump_file, "Finished finding needed instructions:\n");
656 /* UD-based DSE routines. */
658 /* Mark instructions that define artificially-used registers, such as
659 the frame pointer and the stack pointer. */
661 static void
662 mark_artificial_uses (void)
664 basic_block bb;
665 struct df_link *defs;
666 df_ref use;
668 FOR_ALL_BB_FN (bb, cfun)
669 FOR_EACH_ARTIFICIAL_USE (use, bb->index)
670 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
671 if (!DF_REF_IS_ARTIFICIAL (defs->ref))
672 mark_insn (DF_REF_INSN (defs->ref), false);
676 /* Mark every instruction that defines a register value that INSN uses. */
678 static void
679 mark_reg_dependencies (rtx_insn *insn)
681 struct df_link *defs;
682 df_ref use;
684 if (DEBUG_INSN_P (insn))
685 return;
687 FOR_EACH_INSN_USE (use, insn)
689 if (dump_file)
691 fprintf (dump_file, "Processing use of ");
692 print_simple_rtl (dump_file, DF_REF_REG (use));
693 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
695 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
696 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
697 mark_insn (DF_REF_INSN (defs->ref), false);
702 /* Initialize global variables for a new DCE pass. */
704 static void
705 init_dce (bool fast)
707 if (!df_in_progress)
709 if (!fast)
711 df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
712 df_chain_add_problem (DF_UD_CHAIN);
714 df_analyze ();
717 if (dump_file)
718 df_dump (dump_file);
720 if (fast)
722 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
723 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
724 can_alter_cfg = false;
726 else
727 can_alter_cfg = true;
729 marked = sbitmap_alloc (get_max_uid () + 1);
730 bitmap_clear (marked);
734 /* Free the data allocated by init_dce. */
736 static void
737 fini_dce (bool fast)
739 sbitmap_free (marked);
741 if (fast)
743 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
744 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
749 /* UD-chain based DCE. */
751 static unsigned int
752 rest_of_handle_ud_dce (void)
754 rtx_insn *insn;
756 init_dce (false);
758 prescan_insns_for_dce (false);
759 mark_artificial_uses ();
760 while (worklist.length () > 0)
762 insn = worklist.pop ();
763 mark_reg_dependencies (insn);
765 worklist.release ();
767 if (MAY_HAVE_DEBUG_INSNS)
768 reset_unmarked_insns_debug_uses ();
770 /* Before any insns are deleted, we must remove the chains since
771 they are not bidirectional. */
772 df_remove_problem (df_chain);
773 delete_unmarked_insns ();
775 fini_dce (false);
776 return 0;
780 namespace {
782 const pass_data pass_data_ud_rtl_dce =
784 RTL_PASS, /* type */
785 "ud_dce", /* name */
786 OPTGROUP_NONE, /* optinfo_flags */
787 TV_DCE, /* tv_id */
788 0, /* properties_required */
789 0, /* properties_provided */
790 0, /* properties_destroyed */
791 0, /* todo_flags_start */
792 TODO_df_finish, /* todo_flags_finish */
795 class pass_ud_rtl_dce : public rtl_opt_pass
797 public:
798 pass_ud_rtl_dce (gcc::context *ctxt)
799 : rtl_opt_pass (pass_data_ud_rtl_dce, ctxt)
802 /* opt_pass methods: */
803 virtual bool gate (function *)
805 return optimize > 1 && flag_dce && dbg_cnt (dce_ud);
808 virtual unsigned int execute (function *)
810 return rest_of_handle_ud_dce ();
813 }; // class pass_ud_rtl_dce
815 } // anon namespace
817 rtl_opt_pass *
818 make_pass_ud_rtl_dce (gcc::context *ctxt)
820 return new pass_ud_rtl_dce (ctxt);
824 /* -------------------------------------------------------------------------
825 Fast DCE functions
826 ------------------------------------------------------------------------- */
828 /* Process basic block BB. Return true if the live_in set has
829 changed. REDO_OUT is true if the info at the bottom of the block
830 needs to be recalculated before starting. AU is the proper set of
831 artificial uses. Track global substitution of uses of dead pseudos
832 in debug insns using GLOBAL_DEBUG. */
834 static bool
835 word_dce_process_block (basic_block bb, bool redo_out,
836 struct dead_debug_global *global_debug)
838 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
839 rtx_insn *insn;
840 bool block_changed;
841 struct dead_debug_local debug;
843 if (redo_out)
845 /* Need to redo the live_out set of this block if when one of
846 the succs of this block has had a change in it live in
847 set. */
848 edge e;
849 edge_iterator ei;
850 df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
851 bitmap_clear (DF_WORD_LR_OUT (bb));
852 FOR_EACH_EDGE (e, ei, bb->succs)
853 (*con_fun_n) (e);
856 if (dump_file)
858 fprintf (dump_file, "processing block %d live out = ", bb->index);
859 df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
862 bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
863 dead_debug_local_init (&debug, NULL, global_debug);
865 FOR_BB_INSNS_REVERSE (bb, insn)
866 if (DEBUG_INSN_P (insn))
868 df_ref use;
869 FOR_EACH_INSN_USE (use, insn)
870 if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER
871 && (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (use)))
872 == 2 * UNITS_PER_WORD)
873 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use))
874 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use) + 1))
875 dead_debug_add (&debug, use, DF_REF_REGNO (use));
877 else if (INSN_P (insn))
879 bool any_changed;
881 /* No matter if the instruction is needed or not, we remove
882 any regno in the defs from the live set. */
883 any_changed = df_word_lr_simulate_defs (insn, local_live);
884 if (any_changed)
885 mark_insn (insn, true);
887 /* On the other hand, we do not allow the dead uses to set
888 anything in local_live. */
889 if (marked_insn_p (insn))
890 df_word_lr_simulate_uses (insn, local_live);
892 /* Insert debug temps for dead REGs used in subsequent debug
893 insns. We may have to emit a debug temp even if the insn
894 was marked, in case the debug use was after the point of
895 death. */
896 if (debug.used && !bitmap_empty_p (debug.used))
898 df_ref def;
900 FOR_EACH_INSN_DEF (def, insn)
901 dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
902 marked_insn_p (insn)
903 && !control_flow_insn_p (insn)
904 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
905 : DEBUG_TEMP_BEFORE_WITH_VALUE);
908 if (dump_file)
910 fprintf (dump_file, "finished processing insn %d live out = ",
911 INSN_UID (insn));
912 df_print_word_regset (dump_file, local_live);
916 block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
917 if (block_changed)
918 bitmap_copy (DF_WORD_LR_IN (bb), local_live);
920 dead_debug_local_finish (&debug, NULL);
921 BITMAP_FREE (local_live);
922 return block_changed;
926 /* Process basic block BB. Return true if the live_in set has
927 changed. REDO_OUT is true if the info at the bottom of the block
928 needs to be recalculated before starting. AU is the proper set of
929 artificial uses. Track global substitution of uses of dead pseudos
930 in debug insns using GLOBAL_DEBUG. */
932 static bool
933 dce_process_block (basic_block bb, bool redo_out, bitmap au,
934 struct dead_debug_global *global_debug)
936 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
937 rtx_insn *insn;
938 bool block_changed;
939 df_ref def;
940 struct dead_debug_local debug;
942 if (redo_out)
944 /* Need to redo the live_out set of this block if when one of
945 the succs of this block has had a change in it live in
946 set. */
947 edge e;
948 edge_iterator ei;
949 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
950 bitmap_clear (DF_LR_OUT (bb));
951 FOR_EACH_EDGE (e, ei, bb->succs)
952 (*con_fun_n) (e);
955 if (dump_file)
957 fprintf (dump_file, "processing block %d lr out = ", bb->index);
958 df_print_regset (dump_file, DF_LR_OUT (bb));
961 bitmap_copy (local_live, DF_LR_OUT (bb));
963 df_simulate_initialize_backwards (bb, local_live);
964 dead_debug_local_init (&debug, NULL, global_debug);
966 FOR_BB_INSNS_REVERSE (bb, insn)
967 if (DEBUG_INSN_P (insn))
969 df_ref use;
970 FOR_EACH_INSN_USE (use, insn)
971 if (!bitmap_bit_p (local_live, DF_REF_REGNO (use))
972 && !bitmap_bit_p (au, DF_REF_REGNO (use)))
973 dead_debug_add (&debug, use, DF_REF_REGNO (use));
975 else if (INSN_P (insn))
977 bool needed = marked_insn_p (insn);
979 /* The insn is needed if there is someone who uses the output. */
980 if (!needed)
981 FOR_EACH_INSN_DEF (def, insn)
982 if (bitmap_bit_p (local_live, DF_REF_REGNO (def))
983 || bitmap_bit_p (au, DF_REF_REGNO (def)))
985 needed = true;
986 mark_insn (insn, true);
987 break;
990 /* No matter if the instruction is needed or not, we remove
991 any regno in the defs from the live set. */
992 df_simulate_defs (insn, local_live);
994 /* On the other hand, we do not allow the dead uses to set
995 anything in local_live. */
996 if (needed)
997 df_simulate_uses (insn, local_live);
999 /* Insert debug temps for dead REGs used in subsequent debug
1000 insns. We may have to emit a debug temp even if the insn
1001 was marked, in case the debug use was after the point of
1002 death. */
1003 if (debug.used && !bitmap_empty_p (debug.used))
1004 FOR_EACH_INSN_DEF (def, insn)
1005 dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
1006 needed && !control_flow_insn_p (insn)
1007 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
1008 : DEBUG_TEMP_BEFORE_WITH_VALUE);
1011 dead_debug_local_finish (&debug, NULL);
1012 df_simulate_finalize_backwards (bb, local_live);
1014 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
1015 if (block_changed)
1016 bitmap_copy (DF_LR_IN (bb), local_live);
1018 BITMAP_FREE (local_live);
1019 return block_changed;
1023 /* Perform fast DCE once initialization is done. If WORD_LEVEL is
1024 true, use the word level dce, otherwise do it at the pseudo
1025 level. */
1027 static void
1028 fast_dce (bool word_level)
1030 int *postorder = df_get_postorder (DF_BACKWARD);
1031 int n_blocks = df_get_n_blocks (DF_BACKWARD);
1032 /* The set of blocks that have been seen on this iteration. */
1033 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1034 /* The set of blocks that need to have the out vectors reset because
1035 the in of one of their successors has changed. */
1036 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1037 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1038 bool global_changed = true;
1040 /* These regs are considered always live so if they end up dying
1041 because of some def, we need to bring the back again. Calling
1042 df_simulate_fixup_sets has the disadvantage of calling
1043 bb_has_eh_pred once per insn, so we cache the information
1044 here. */
1045 bitmap au = &df->regular_block_artificial_uses;
1046 bitmap au_eh = &df->eh_block_artificial_uses;
1047 int i;
1048 struct dead_debug_global global_debug;
1050 prescan_insns_for_dce (true);
1052 for (i = 0; i < n_blocks; i++)
1053 bitmap_set_bit (all_blocks, postorder[i]);
1055 dead_debug_global_init (&global_debug, NULL);
1057 while (global_changed)
1059 global_changed = false;
1061 for (i = 0; i < n_blocks; i++)
1063 int index = postorder[i];
1064 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, index);
1065 bool local_changed;
1067 if (index < NUM_FIXED_BLOCKS)
1069 bitmap_set_bit (processed, index);
1070 continue;
1073 if (word_level)
1074 local_changed
1075 = word_dce_process_block (bb, bitmap_bit_p (redo_out, index),
1076 &global_debug);
1077 else
1078 local_changed
1079 = dce_process_block (bb, bitmap_bit_p (redo_out, index),
1080 bb_has_eh_pred (bb) ? au_eh : au,
1081 &global_debug);
1082 bitmap_set_bit (processed, index);
1084 if (local_changed)
1086 edge e;
1087 edge_iterator ei;
1088 FOR_EACH_EDGE (e, ei, bb->preds)
1089 if (bitmap_bit_p (processed, e->src->index))
1090 /* Be tricky about when we need to iterate the
1091 analysis. We only have redo the analysis if the
1092 bitmaps change at the top of a block that is the
1093 entry to a loop. */
1094 global_changed = true;
1095 else
1096 bitmap_set_bit (redo_out, e->src->index);
1100 if (global_changed)
1102 /* Turn off the RUN_DCE flag to prevent recursive calls to
1103 dce. */
1104 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1106 /* So something was deleted that requires a redo. Do it on
1107 the cheap. */
1108 delete_unmarked_insns ();
1109 bitmap_clear (marked);
1110 bitmap_clear (processed);
1111 bitmap_clear (redo_out);
1113 /* We do not need to rescan any instructions. We only need
1114 to redo the dataflow equations for the blocks that had a
1115 change at the top of the block. Then we need to redo the
1116 iteration. */
1117 if (word_level)
1118 df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
1119 else
1120 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1122 if (old_flag & DF_LR_RUN_DCE)
1123 df_set_flags (DF_LR_RUN_DCE);
1125 prescan_insns_for_dce (true);
1129 dead_debug_global_finish (&global_debug, NULL);
1131 delete_unmarked_insns ();
1133 BITMAP_FREE (processed);
1134 BITMAP_FREE (redo_out);
1135 BITMAP_FREE (all_blocks);
1139 /* Fast register level DCE. */
1141 static unsigned int
1142 rest_of_handle_fast_dce (void)
1144 init_dce (true);
1145 fast_dce (false);
1146 fini_dce (true);
1147 return 0;
1151 /* Fast byte level DCE. */
1153 void
1154 run_word_dce (void)
1156 int old_flags;
1158 if (!flag_dce)
1159 return;
1161 timevar_push (TV_DCE);
1162 old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1163 df_word_lr_add_problem ();
1164 init_dce (true);
1165 fast_dce (true);
1166 fini_dce (true);
1167 df_set_flags (old_flags);
1168 timevar_pop (TV_DCE);
1172 /* This is an internal call that is used by the df live register
1173 problem to run fast dce as a side effect of creating the live
1174 information. The stack is organized so that the lr problem is run,
1175 this pass is run, which updates the live info and the df scanning
1176 info, and then returns to allow the rest of the problems to be run.
1178 This can be called by elsewhere but it will not update the bit
1179 vectors for any other problems than LR. */
1181 void
1182 run_fast_df_dce (void)
1184 if (flag_dce)
1186 /* If dce is able to delete something, it has to happen
1187 immediately. Otherwise there will be problems handling the
1188 eq_notes. */
1189 int old_flags =
1190 df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1192 df_in_progress = true;
1193 rest_of_handle_fast_dce ();
1194 df_in_progress = false;
1196 df_set_flags (old_flags);
1201 /* Run a fast DCE pass. */
1203 void
1204 run_fast_dce (void)
1206 if (flag_dce)
1207 rest_of_handle_fast_dce ();
1211 namespace {
1213 const pass_data pass_data_fast_rtl_dce =
1215 RTL_PASS, /* type */
1216 "rtl_dce", /* name */
1217 OPTGROUP_NONE, /* optinfo_flags */
1218 TV_DCE, /* tv_id */
1219 0, /* properties_required */
1220 0, /* properties_provided */
1221 0, /* properties_destroyed */
1222 0, /* todo_flags_start */
1223 TODO_df_finish, /* todo_flags_finish */
1226 class pass_fast_rtl_dce : public rtl_opt_pass
1228 public:
1229 pass_fast_rtl_dce (gcc::context *ctxt)
1230 : rtl_opt_pass (pass_data_fast_rtl_dce, ctxt)
1233 /* opt_pass methods: */
1234 virtual bool gate (function *)
1236 return optimize > 0 && flag_dce && dbg_cnt (dce_fast);
1239 virtual unsigned int execute (function *)
1241 return rest_of_handle_fast_dce ();
1244 }; // class pass_fast_rtl_dce
1246 } // anon namespace
1248 rtl_opt_pass *
1249 make_pass_fast_rtl_dce (gcc::context *ctxt)
1251 return new pass_fast_rtl_dce (ctxt);