Update ChangeLog and version files for release
[official-gcc.git] / gcc / dce.c
blob99f6271556271209b6a9228402627cd437de2d63
1 /* RTL dead code elimination.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hashtab.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "regs.h"
37 #include "hard-reg-set.h"
38 #include "flags.h"
39 #include "except.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfgrtl.h"
43 #include "cfgbuild.h"
44 #include "cfgcleanup.h"
45 #include "predict.h"
46 #include "basic-block.h"
47 #include "df.h"
48 #include "cselib.h"
49 #include "dce.h"
50 #include "valtrack.h"
51 #include "tree-pass.h"
52 #include "dbgcnt.h"
53 #include "tm_p.h"
54 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
57 /* -------------------------------------------------------------------------
58 Core mark/delete routines
59 ------------------------------------------------------------------------- */
61 /* True if we are invoked while the df engine is running; in this case,
62 we don't want to reenter it. */
63 static bool df_in_progress = false;
65 /* True if we are allowed to alter the CFG in this pass. */
66 static bool can_alter_cfg = false;
68 /* Instructions that have been marked but whose dependencies have not
69 yet been processed. */
70 static vec<rtx_insn *> worklist;
72 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
73 static sbitmap marked;
75 /* Bitmap obstacks used for block processing by the fast algorithm. */
76 static bitmap_obstack dce_blocks_bitmap_obstack;
77 static bitmap_obstack dce_tmp_bitmap_obstack;
79 static bool find_call_stack_args (rtx_call_insn *, bool, bool, bitmap);
81 /* A subroutine for which BODY is part of the instruction being tested;
82 either the top-level pattern, or an element of a PARALLEL. The
83 instruction is known not to be a bare USE or CLOBBER. */
85 static bool
86 deletable_insn_p_1 (rtx body)
88 switch (GET_CODE (body))
90 case PREFETCH:
91 case TRAP_IF:
92 /* The UNSPEC case was added here because the ia-64 claims that
93 USEs do not work after reload and generates UNSPECS rather
94 than USEs. Since dce is run after reload we need to avoid
95 deleting these even if they are dead. If it turns out that
96 USEs really do work after reload, the ia-64 should be
97 changed, and the UNSPEC case can be removed. */
98 case UNSPEC:
99 return false;
101 default:
102 return !volatile_refs_p (body);
107 /* Return true if INSN is a normal instruction that can be deleted by
108 the DCE pass. */
110 static bool
111 deletable_insn_p (rtx_insn *insn, bool fast, bitmap arg_stores)
113 rtx body, x;
114 int i;
115 df_ref def;
117 if (CALL_P (insn)
118 /* We cannot delete calls inside of the recursive dce because
119 this may cause basic blocks to be deleted and this messes up
120 the rest of the stack of optimization passes. */
121 && (!df_in_progress)
122 /* We cannot delete pure or const sibling calls because it is
123 hard to see the result. */
124 && (!SIBLING_CALL_P (insn))
125 /* We can delete dead const or pure calls as long as they do not
126 infinite loop. */
127 && (RTL_CONST_OR_PURE_CALL_P (insn)
128 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
129 return find_call_stack_args (as_a <rtx_call_insn *> (insn), false,
130 fast, arg_stores);
132 /* Don't delete jumps, notes and the like. */
133 if (!NONJUMP_INSN_P (insn))
134 return false;
136 /* Don't delete insns that may throw if we cannot do so. */
137 if (!(cfun->can_delete_dead_exceptions && can_alter_cfg)
138 && !insn_nothrow_p (insn))
139 return false;
141 /* If INSN sets a global_reg, leave it untouched. */
142 FOR_EACH_INSN_DEF (def, insn)
143 if (HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
144 && global_regs[DF_REF_REGNO (def)])
145 return false;
146 /* Initialization of pseudo PIC register should never be removed. */
147 else if (DF_REF_REG (def) == pic_offset_table_rtx
148 && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
149 return false;
151 body = PATTERN (insn);
152 switch (GET_CODE (body))
154 case USE:
155 case VAR_LOCATION:
156 return false;
158 case CLOBBER:
159 if (fast)
161 /* A CLOBBER of a dead pseudo register serves no purpose.
162 That is not necessarily true for hard registers until
163 after reload. */
164 x = XEXP (body, 0);
165 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
167 else
168 /* Because of the way that use-def chains are built, it is not
169 possible to tell if the clobber is dead because it can
170 never be the target of a use-def chain. */
171 return false;
173 case PARALLEL:
174 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
175 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
176 return false;
177 return true;
179 default:
180 return deletable_insn_p_1 (body);
185 /* Return true if INSN has been marked as needed. */
187 static inline int
188 marked_insn_p (rtx_insn *insn)
190 /* Artificial defs are always needed and they do not have an insn.
191 We should never see them here. */
192 gcc_assert (insn);
193 return bitmap_bit_p (marked, INSN_UID (insn));
197 /* If INSN has not yet been marked as needed, mark it now, and add it to
198 the worklist. */
200 static void
201 mark_insn (rtx_insn *insn, bool fast)
203 if (!marked_insn_p (insn))
205 if (!fast)
206 worklist.safe_push (insn);
207 bitmap_set_bit (marked, INSN_UID (insn));
208 if (dump_file)
209 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
210 if (CALL_P (insn)
211 && !df_in_progress
212 && !SIBLING_CALL_P (insn)
213 && (RTL_CONST_OR_PURE_CALL_P (insn)
214 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
215 find_call_stack_args (as_a <rtx_call_insn *> (insn), true, fast, NULL);
220 /* A note_stores callback used by mark_nonreg_stores. DATA is the
221 instruction containing DEST. */
223 static void
224 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
226 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
227 mark_insn ((rtx_insn *) data, true);
231 /* A note_stores callback used by mark_nonreg_stores. DATA is the
232 instruction containing DEST. */
234 static void
235 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
237 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
238 mark_insn ((rtx_insn *) data, false);
242 /* Mark INSN if BODY stores to a non-register destination. */
244 static void
245 mark_nonreg_stores (rtx body, rtx_insn *insn, bool fast)
247 if (fast)
248 note_stores (body, mark_nonreg_stores_1, insn);
249 else
250 note_stores (body, mark_nonreg_stores_2, insn);
254 /* Return true if store to MEM, starting OFF bytes from stack pointer,
255 is a call argument store, and clear corresponding bits from SP_BYTES
256 bitmap if it is. */
258 static bool
259 check_argument_store (rtx mem, HOST_WIDE_INT off, HOST_WIDE_INT min_sp_off,
260 HOST_WIDE_INT max_sp_off, bitmap sp_bytes)
262 HOST_WIDE_INT byte;
263 for (byte = off; byte < off + GET_MODE_SIZE (GET_MODE (mem)); byte++)
265 if (byte < min_sp_off
266 || byte >= max_sp_off
267 || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
268 return false;
270 return true;
274 /* Try to find all stack stores of CALL_INSN arguments if
275 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
276 and it is therefore safe to eliminate the call, return true,
277 otherwise return false. This function should be first called
278 with DO_MARK false, and only when the CALL_INSN is actually
279 going to be marked called again with DO_MARK true. */
281 static bool
282 find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast,
283 bitmap arg_stores)
285 rtx p;
286 rtx_insn *insn, *prev_insn;
287 bool ret;
288 HOST_WIDE_INT min_sp_off, max_sp_off;
289 bitmap sp_bytes;
291 gcc_assert (CALL_P (call_insn));
292 if (!ACCUMULATE_OUTGOING_ARGS)
293 return true;
295 if (!do_mark)
297 gcc_assert (arg_stores);
298 bitmap_clear (arg_stores);
301 min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
302 max_sp_off = 0;
304 /* First determine the minimum and maximum offset from sp for
305 stored arguments. */
306 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
307 if (GET_CODE (XEXP (p, 0)) == USE
308 && MEM_P (XEXP (XEXP (p, 0), 0)))
310 rtx mem = XEXP (XEXP (p, 0), 0), addr;
311 HOST_WIDE_INT off = 0, size;
312 if (!MEM_SIZE_KNOWN_P (mem))
313 return false;
314 size = MEM_SIZE (mem);
315 addr = XEXP (mem, 0);
316 if (GET_CODE (addr) == PLUS
317 && REG_P (XEXP (addr, 0))
318 && CONST_INT_P (XEXP (addr, 1)))
320 off = INTVAL (XEXP (addr, 1));
321 addr = XEXP (addr, 0);
323 if (addr != stack_pointer_rtx)
325 if (!REG_P (addr))
326 return false;
327 /* If not fast, use chains to see if addr wasn't set to
328 sp + offset. */
329 if (!fast)
331 df_ref use;
332 struct df_link *defs;
333 rtx set;
335 FOR_EACH_INSN_USE (use, call_insn)
336 if (rtx_equal_p (addr, DF_REF_REG (use)))
337 break;
339 if (use == NULL)
340 return false;
342 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
343 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
344 break;
346 if (defs == NULL)
347 return false;
349 set = single_set (DF_REF_INSN (defs->ref));
350 if (!set)
351 return false;
353 if (GET_CODE (SET_SRC (set)) != PLUS
354 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
355 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
356 return false;
358 off += INTVAL (XEXP (SET_SRC (set), 1));
360 else
361 return false;
363 min_sp_off = MIN (min_sp_off, off);
364 max_sp_off = MAX (max_sp_off, off + size);
367 if (min_sp_off >= max_sp_off)
368 return true;
369 sp_bytes = BITMAP_ALLOC (NULL);
371 /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
372 which contain arguments. Checking has been done in the previous
373 loop. */
374 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
375 if (GET_CODE (XEXP (p, 0)) == USE
376 && MEM_P (XEXP (XEXP (p, 0), 0)))
378 rtx mem = XEXP (XEXP (p, 0), 0), addr;
379 HOST_WIDE_INT off = 0, byte;
380 addr = XEXP (mem, 0);
381 if (GET_CODE (addr) == PLUS
382 && REG_P (XEXP (addr, 0))
383 && CONST_INT_P (XEXP (addr, 1)))
385 off = INTVAL (XEXP (addr, 1));
386 addr = XEXP (addr, 0);
388 if (addr != stack_pointer_rtx)
390 df_ref use;
391 struct df_link *defs;
392 rtx set;
394 FOR_EACH_INSN_USE (use, call_insn)
395 if (rtx_equal_p (addr, DF_REF_REG (use)))
396 break;
398 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
399 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
400 break;
402 set = single_set (DF_REF_INSN (defs->ref));
403 off += INTVAL (XEXP (SET_SRC (set), 1));
405 for (byte = off; byte < off + MEM_SIZE (mem); byte++)
407 if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
408 gcc_unreachable ();
412 /* Walk backwards, looking for argument stores. The search stops
413 when seeing another call, sp adjustment or memory store other than
414 argument store. */
415 ret = false;
416 for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
418 rtx set, mem, addr;
419 HOST_WIDE_INT off;
421 if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
422 prev_insn = NULL;
423 else
424 prev_insn = PREV_INSN (insn);
426 if (CALL_P (insn))
427 break;
429 if (!NONDEBUG_INSN_P (insn))
430 continue;
432 set = single_set (insn);
433 if (!set || SET_DEST (set) == stack_pointer_rtx)
434 break;
436 if (!MEM_P (SET_DEST (set)))
437 continue;
439 mem = SET_DEST (set);
440 addr = XEXP (mem, 0);
441 off = 0;
442 if (GET_CODE (addr) == PLUS
443 && REG_P (XEXP (addr, 0))
444 && CONST_INT_P (XEXP (addr, 1)))
446 off = INTVAL (XEXP (addr, 1));
447 addr = XEXP (addr, 0);
449 if (addr != stack_pointer_rtx)
451 if (!REG_P (addr))
452 break;
453 if (!fast)
455 df_ref use;
456 struct df_link *defs;
457 rtx set;
459 FOR_EACH_INSN_USE (use, insn)
460 if (rtx_equal_p (addr, DF_REF_REG (use)))
461 break;
463 if (use == NULL)
464 break;
466 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
467 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
468 break;
470 if (defs == NULL)
471 break;
473 set = single_set (DF_REF_INSN (defs->ref));
474 if (!set)
475 break;
477 if (GET_CODE (SET_SRC (set)) != PLUS
478 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
479 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
480 break;
482 off += INTVAL (XEXP (SET_SRC (set), 1));
484 else
485 break;
488 if (GET_MODE_SIZE (GET_MODE (mem)) == 0
489 || !check_argument_store (mem, off, min_sp_off,
490 max_sp_off, sp_bytes))
491 break;
493 if (!deletable_insn_p (insn, fast, NULL))
494 break;
496 if (do_mark)
497 mark_insn (insn, fast);
498 else
499 bitmap_set_bit (arg_stores, INSN_UID (insn));
501 if (bitmap_empty_p (sp_bytes))
503 ret = true;
504 break;
508 BITMAP_FREE (sp_bytes);
509 if (!ret && arg_stores)
510 bitmap_clear (arg_stores);
512 return ret;
516 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
517 writes to. */
519 static void
520 remove_reg_equal_equiv_notes_for_defs (rtx_insn *insn)
522 df_ref def;
524 FOR_EACH_INSN_DEF (def, insn)
525 remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (def));
528 /* Scan all BBs for debug insns and reset those that reference values
529 defined in unmarked insns. */
531 static void
532 reset_unmarked_insns_debug_uses (void)
534 basic_block bb;
535 rtx_insn *insn, *next;
537 FOR_EACH_BB_REVERSE_FN (bb, cfun)
538 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
539 if (DEBUG_INSN_P (insn))
541 df_ref use;
543 FOR_EACH_INSN_USE (use, insn)
545 struct df_link *defs;
546 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
548 rtx_insn *ref_insn;
549 if (DF_REF_IS_ARTIFICIAL (defs->ref))
550 continue;
551 ref_insn = DF_REF_INSN (defs->ref);
552 if (!marked_insn_p (ref_insn))
553 break;
555 if (!defs)
556 continue;
557 /* ??? FIXME could we propagate the values assigned to
558 each of the DEFs? */
559 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
560 df_insn_rescan_debug_internal (insn);
561 break;
566 /* Delete every instruction that hasn't been marked. */
568 static void
569 delete_unmarked_insns (void)
571 basic_block bb;
572 rtx_insn *insn, *next;
573 bool must_clean = false;
575 FOR_EACH_BB_REVERSE_FN (bb, cfun)
576 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
577 if (NONDEBUG_INSN_P (insn))
579 /* Always delete no-op moves. */
580 if (noop_move_p (insn))
583 /* Otherwise rely only on the DCE algorithm. */
584 else if (marked_insn_p (insn))
585 continue;
587 /* Beware that reaching a dbg counter limit here can result
588 in miscompiled file. This occurs when a group of insns
589 must be deleted together, typically because the kept insn
590 depends on the output from the deleted insn. Deleting
591 this insns in reverse order (both at the bb level and
592 when looking at the blocks) minimizes this, but does not
593 eliminate it, since it is possible for the using insn to
594 be top of a block and the producer to be at the bottom of
595 the block. However, in most cases this will only result
596 in an uninitialized use of an insn that is dead anyway.
598 However, there is one rare case that will cause a
599 miscompile: deletion of non-looping pure and constant
600 calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
601 In this case it is possible to remove the call, but leave
602 the argument pushes to the stack. Because of the changes
603 to the stack pointer, this will almost always lead to a
604 miscompile. */
605 if (!dbg_cnt (dce))
606 continue;
608 if (dump_file)
609 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
611 /* Before we delete the insn we have to remove the REG_EQUAL notes
612 for the destination regs in order to avoid dangling notes. */
613 remove_reg_equal_equiv_notes_for_defs (insn);
615 /* If a pure or const call is deleted, this may make the cfg
616 have unreachable blocks. We rememeber this and call
617 delete_unreachable_blocks at the end. */
618 if (CALL_P (insn))
619 must_clean = true;
621 /* Now delete the insn. */
622 delete_insn_and_edges (insn);
625 /* Deleted a pure or const call. */
626 if (must_clean)
627 delete_unreachable_blocks ();
631 /* Go through the instructions and mark those whose necessity is not
632 dependent on inter-instruction information. Make sure all other
633 instructions are not marked. */
635 static void
636 prescan_insns_for_dce (bool fast)
638 basic_block bb;
639 rtx_insn *insn, *prev;
640 bitmap arg_stores = NULL;
642 if (dump_file)
643 fprintf (dump_file, "Finding needed instructions:\n");
645 if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
646 arg_stores = BITMAP_ALLOC (NULL);
648 FOR_EACH_BB_FN (bb, cfun)
650 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
651 if (NONDEBUG_INSN_P (insn))
653 /* Don't mark argument stores now. They will be marked
654 if needed when the associated CALL is marked. */
655 if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
656 continue;
657 if (deletable_insn_p (insn, fast, arg_stores))
658 mark_nonreg_stores (PATTERN (insn), insn, fast);
659 else
660 mark_insn (insn, fast);
662 /* find_call_stack_args only looks at argument stores in the
663 same bb. */
664 if (arg_stores)
665 bitmap_clear (arg_stores);
668 if (arg_stores)
669 BITMAP_FREE (arg_stores);
671 if (dump_file)
672 fprintf (dump_file, "Finished finding needed instructions:\n");
676 /* UD-based DSE routines. */
678 /* Mark instructions that define artificially-used registers, such as
679 the frame pointer and the stack pointer. */
681 static void
682 mark_artificial_uses (void)
684 basic_block bb;
685 struct df_link *defs;
686 df_ref use;
688 FOR_ALL_BB_FN (bb, cfun)
689 FOR_EACH_ARTIFICIAL_USE (use, bb->index)
690 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
691 if (!DF_REF_IS_ARTIFICIAL (defs->ref))
692 mark_insn (DF_REF_INSN (defs->ref), false);
696 /* Mark every instruction that defines a register value that INSN uses. */
698 static void
699 mark_reg_dependencies (rtx_insn *insn)
701 struct df_link *defs;
702 df_ref use;
704 if (DEBUG_INSN_P (insn))
705 return;
707 FOR_EACH_INSN_USE (use, insn)
709 if (dump_file)
711 fprintf (dump_file, "Processing use of ");
712 print_simple_rtl (dump_file, DF_REF_REG (use));
713 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
715 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
716 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
717 mark_insn (DF_REF_INSN (defs->ref), false);
722 /* Initialize global variables for a new DCE pass. */
724 static void
725 init_dce (bool fast)
727 if (!df_in_progress)
729 if (!fast)
731 df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
732 df_chain_add_problem (DF_UD_CHAIN);
734 df_analyze ();
737 if (dump_file)
738 df_dump (dump_file);
740 if (fast)
742 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
743 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
744 can_alter_cfg = false;
746 else
747 can_alter_cfg = true;
749 marked = sbitmap_alloc (get_max_uid () + 1);
750 bitmap_clear (marked);
754 /* Free the data allocated by init_dce. */
756 static void
757 fini_dce (bool fast)
759 sbitmap_free (marked);
761 if (fast)
763 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
764 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
769 /* UD-chain based DCE. */
771 static unsigned int
772 rest_of_handle_ud_dce (void)
774 rtx_insn *insn;
776 init_dce (false);
778 prescan_insns_for_dce (false);
779 mark_artificial_uses ();
780 while (worklist.length () > 0)
782 insn = worklist.pop ();
783 mark_reg_dependencies (insn);
785 worklist.release ();
787 if (MAY_HAVE_DEBUG_INSNS)
788 reset_unmarked_insns_debug_uses ();
790 /* Before any insns are deleted, we must remove the chains since
791 they are not bidirectional. */
792 df_remove_problem (df_chain);
793 delete_unmarked_insns ();
795 fini_dce (false);
796 return 0;
800 namespace {
802 const pass_data pass_data_ud_rtl_dce =
804 RTL_PASS, /* type */
805 "ud_dce", /* name */
806 OPTGROUP_NONE, /* optinfo_flags */
807 TV_DCE, /* tv_id */
808 0, /* properties_required */
809 0, /* properties_provided */
810 0, /* properties_destroyed */
811 0, /* todo_flags_start */
812 TODO_df_finish, /* todo_flags_finish */
815 class pass_ud_rtl_dce : public rtl_opt_pass
817 public:
818 pass_ud_rtl_dce (gcc::context *ctxt)
819 : rtl_opt_pass (pass_data_ud_rtl_dce, ctxt)
822 /* opt_pass methods: */
823 virtual bool gate (function *)
825 return optimize > 1 && flag_dce && dbg_cnt (dce_ud);
828 virtual unsigned int execute (function *)
830 return rest_of_handle_ud_dce ();
833 }; // class pass_ud_rtl_dce
835 } // anon namespace
837 rtl_opt_pass *
838 make_pass_ud_rtl_dce (gcc::context *ctxt)
840 return new pass_ud_rtl_dce (ctxt);
844 /* -------------------------------------------------------------------------
845 Fast DCE functions
846 ------------------------------------------------------------------------- */
848 /* Process basic block BB. Return true if the live_in set has
849 changed. REDO_OUT is true if the info at the bottom of the block
850 needs to be recalculated before starting. AU is the proper set of
851 artificial uses. Track global substitution of uses of dead pseudos
852 in debug insns using GLOBAL_DEBUG. */
854 static bool
855 word_dce_process_block (basic_block bb, bool redo_out,
856 struct dead_debug_global *global_debug)
858 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
859 rtx_insn *insn;
860 bool block_changed;
861 struct dead_debug_local debug;
863 if (redo_out)
865 /* Need to redo the live_out set of this block if when one of
866 the succs of this block has had a change in it live in
867 set. */
868 edge e;
869 edge_iterator ei;
870 df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
871 bitmap_clear (DF_WORD_LR_OUT (bb));
872 FOR_EACH_EDGE (e, ei, bb->succs)
873 (*con_fun_n) (e);
876 if (dump_file)
878 fprintf (dump_file, "processing block %d live out = ", bb->index);
879 df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
882 bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
883 dead_debug_local_init (&debug, NULL, global_debug);
885 FOR_BB_INSNS_REVERSE (bb, insn)
886 if (DEBUG_INSN_P (insn))
888 df_ref use;
889 FOR_EACH_INSN_USE (use, insn)
890 if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER
891 && (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (use)))
892 == 2 * UNITS_PER_WORD)
893 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use))
894 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use) + 1))
895 dead_debug_add (&debug, use, DF_REF_REGNO (use));
897 else if (INSN_P (insn))
899 bool any_changed;
901 /* No matter if the instruction is needed or not, we remove
902 any regno in the defs from the live set. */
903 any_changed = df_word_lr_simulate_defs (insn, local_live);
904 if (any_changed)
905 mark_insn (insn, true);
907 /* On the other hand, we do not allow the dead uses to set
908 anything in local_live. */
909 if (marked_insn_p (insn))
910 df_word_lr_simulate_uses (insn, local_live);
912 /* Insert debug temps for dead REGs used in subsequent debug
913 insns. We may have to emit a debug temp even if the insn
914 was marked, in case the debug use was after the point of
915 death. */
916 if (debug.used && !bitmap_empty_p (debug.used))
918 df_ref def;
920 FOR_EACH_INSN_DEF (def, insn)
921 dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
922 marked_insn_p (insn)
923 && !control_flow_insn_p (insn)
924 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
925 : DEBUG_TEMP_BEFORE_WITH_VALUE);
928 if (dump_file)
930 fprintf (dump_file, "finished processing insn %d live out = ",
931 INSN_UID (insn));
932 df_print_word_regset (dump_file, local_live);
936 block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
937 if (block_changed)
938 bitmap_copy (DF_WORD_LR_IN (bb), local_live);
940 dead_debug_local_finish (&debug, NULL);
941 BITMAP_FREE (local_live);
942 return block_changed;
946 /* Process basic block BB. Return true if the live_in set has
947 changed. REDO_OUT is true if the info at the bottom of the block
948 needs to be recalculated before starting. AU is the proper set of
949 artificial uses. Track global substitution of uses of dead pseudos
950 in debug insns using GLOBAL_DEBUG. */
952 static bool
953 dce_process_block (basic_block bb, bool redo_out, bitmap au,
954 struct dead_debug_global *global_debug)
956 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
957 rtx_insn *insn;
958 bool block_changed;
959 df_ref def;
960 struct dead_debug_local debug;
962 if (redo_out)
964 /* Need to redo the live_out set of this block if when one of
965 the succs of this block has had a change in it live in
966 set. */
967 edge e;
968 edge_iterator ei;
969 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
970 bitmap_clear (DF_LR_OUT (bb));
971 FOR_EACH_EDGE (e, ei, bb->succs)
972 (*con_fun_n) (e);
975 if (dump_file)
977 fprintf (dump_file, "processing block %d lr out = ", bb->index);
978 df_print_regset (dump_file, DF_LR_OUT (bb));
981 bitmap_copy (local_live, DF_LR_OUT (bb));
983 df_simulate_initialize_backwards (bb, local_live);
984 dead_debug_local_init (&debug, NULL, global_debug);
986 FOR_BB_INSNS_REVERSE (bb, insn)
987 if (DEBUG_INSN_P (insn))
989 df_ref use;
990 FOR_EACH_INSN_USE (use, insn)
991 if (!bitmap_bit_p (local_live, DF_REF_REGNO (use))
992 && !bitmap_bit_p (au, DF_REF_REGNO (use)))
993 dead_debug_add (&debug, use, DF_REF_REGNO (use));
995 else if (INSN_P (insn))
997 bool needed = marked_insn_p (insn);
999 /* The insn is needed if there is someone who uses the output. */
1000 if (!needed)
1001 FOR_EACH_INSN_DEF (def, insn)
1002 if (bitmap_bit_p (local_live, DF_REF_REGNO (def))
1003 || bitmap_bit_p (au, DF_REF_REGNO (def)))
1005 needed = true;
1006 mark_insn (insn, true);
1007 break;
1010 /* No matter if the instruction is needed or not, we remove
1011 any regno in the defs from the live set. */
1012 df_simulate_defs (insn, local_live);
1014 /* On the other hand, we do not allow the dead uses to set
1015 anything in local_live. */
1016 if (needed)
1017 df_simulate_uses (insn, local_live);
1019 /* Insert debug temps for dead REGs used in subsequent debug
1020 insns. We may have to emit a debug temp even if the insn
1021 was marked, in case the debug use was after the point of
1022 death. */
1023 if (debug.used && !bitmap_empty_p (debug.used))
1024 FOR_EACH_INSN_DEF (def, insn)
1025 dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
1026 needed && !control_flow_insn_p (insn)
1027 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
1028 : DEBUG_TEMP_BEFORE_WITH_VALUE);
1031 dead_debug_local_finish (&debug, NULL);
1032 df_simulate_finalize_backwards (bb, local_live);
1034 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
1035 if (block_changed)
1036 bitmap_copy (DF_LR_IN (bb), local_live);
1038 BITMAP_FREE (local_live);
1039 return block_changed;
1043 /* Perform fast DCE once initialization is done. If WORD_LEVEL is
1044 true, use the word level dce, otherwise do it at the pseudo
1045 level. */
1047 static void
1048 fast_dce (bool word_level)
1050 int *postorder = df_get_postorder (DF_BACKWARD);
1051 int n_blocks = df_get_n_blocks (DF_BACKWARD);
1052 /* The set of blocks that have been seen on this iteration. */
1053 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1054 /* The set of blocks that need to have the out vectors reset because
1055 the in of one of their successors has changed. */
1056 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1057 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1058 bool global_changed = true;
1060 /* These regs are considered always live so if they end up dying
1061 because of some def, we need to bring the back again. Calling
1062 df_simulate_fixup_sets has the disadvantage of calling
1063 bb_has_eh_pred once per insn, so we cache the information
1064 here. */
1065 bitmap au = &df->regular_block_artificial_uses;
1066 bitmap au_eh = &df->eh_block_artificial_uses;
1067 int i;
1068 struct dead_debug_global global_debug;
1070 prescan_insns_for_dce (true);
1072 for (i = 0; i < n_blocks; i++)
1073 bitmap_set_bit (all_blocks, postorder[i]);
1075 dead_debug_global_init (&global_debug, NULL);
1077 while (global_changed)
1079 global_changed = false;
1081 for (i = 0; i < n_blocks; i++)
1083 int index = postorder[i];
1084 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, index);
1085 bool local_changed;
1087 if (index < NUM_FIXED_BLOCKS)
1089 bitmap_set_bit (processed, index);
1090 continue;
1093 if (word_level)
1094 local_changed
1095 = word_dce_process_block (bb, bitmap_bit_p (redo_out, index),
1096 &global_debug);
1097 else
1098 local_changed
1099 = dce_process_block (bb, bitmap_bit_p (redo_out, index),
1100 bb_has_eh_pred (bb) ? au_eh : au,
1101 &global_debug);
1102 bitmap_set_bit (processed, index);
1104 if (local_changed)
1106 edge e;
1107 edge_iterator ei;
1108 FOR_EACH_EDGE (e, ei, bb->preds)
1109 if (bitmap_bit_p (processed, e->src->index))
1110 /* Be tricky about when we need to iterate the
1111 analysis. We only have redo the analysis if the
1112 bitmaps change at the top of a block that is the
1113 entry to a loop. */
1114 global_changed = true;
1115 else
1116 bitmap_set_bit (redo_out, e->src->index);
1120 if (global_changed)
1122 /* Turn off the RUN_DCE flag to prevent recursive calls to
1123 dce. */
1124 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1126 /* So something was deleted that requires a redo. Do it on
1127 the cheap. */
1128 delete_unmarked_insns ();
1129 bitmap_clear (marked);
1130 bitmap_clear (processed);
1131 bitmap_clear (redo_out);
1133 /* We do not need to rescan any instructions. We only need
1134 to redo the dataflow equations for the blocks that had a
1135 change at the top of the block. Then we need to redo the
1136 iteration. */
1137 if (word_level)
1138 df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
1139 else
1140 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1142 if (old_flag & DF_LR_RUN_DCE)
1143 df_set_flags (DF_LR_RUN_DCE);
1145 prescan_insns_for_dce (true);
1149 dead_debug_global_finish (&global_debug, NULL);
1151 delete_unmarked_insns ();
1153 BITMAP_FREE (processed);
1154 BITMAP_FREE (redo_out);
1155 BITMAP_FREE (all_blocks);
1159 /* Fast register level DCE. */
1161 static unsigned int
1162 rest_of_handle_fast_dce (void)
1164 init_dce (true);
1165 fast_dce (false);
1166 fini_dce (true);
1167 return 0;
1171 /* Fast byte level DCE. */
1173 void
1174 run_word_dce (void)
1176 int old_flags;
1178 if (!flag_dce)
1179 return;
1181 timevar_push (TV_DCE);
1182 old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1183 df_word_lr_add_problem ();
1184 init_dce (true);
1185 fast_dce (true);
1186 fini_dce (true);
1187 df_set_flags (old_flags);
1188 timevar_pop (TV_DCE);
1192 /* This is an internal call that is used by the df live register
1193 problem to run fast dce as a side effect of creating the live
1194 information. The stack is organized so that the lr problem is run,
1195 this pass is run, which updates the live info and the df scanning
1196 info, and then returns to allow the rest of the problems to be run.
1198 This can be called by elsewhere but it will not update the bit
1199 vectors for any other problems than LR. */
1201 void
1202 run_fast_df_dce (void)
1204 if (flag_dce)
1206 /* If dce is able to delete something, it has to happen
1207 immediately. Otherwise there will be problems handling the
1208 eq_notes. */
1209 int old_flags =
1210 df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1212 df_in_progress = true;
1213 rest_of_handle_fast_dce ();
1214 df_in_progress = false;
1216 df_set_flags (old_flags);
1221 /* Run a fast DCE pass. */
1223 void
1224 run_fast_dce (void)
1226 if (flag_dce)
1227 rest_of_handle_fast_dce ();
1231 namespace {
1233 const pass_data pass_data_fast_rtl_dce =
1235 RTL_PASS, /* type */
1236 "rtl_dce", /* name */
1237 OPTGROUP_NONE, /* optinfo_flags */
1238 TV_DCE, /* tv_id */
1239 0, /* properties_required */
1240 0, /* properties_provided */
1241 0, /* properties_destroyed */
1242 0, /* todo_flags_start */
1243 TODO_df_finish, /* todo_flags_finish */
1246 class pass_fast_rtl_dce : public rtl_opt_pass
1248 public:
1249 pass_fast_rtl_dce (gcc::context *ctxt)
1250 : rtl_opt_pass (pass_data_fast_rtl_dce, ctxt)
1253 /* opt_pass methods: */
1254 virtual bool gate (function *)
1256 return optimize > 0 && flag_dce && dbg_cnt (dce_fast);
1259 virtual unsigned int execute (function *)
1261 return rest_of_handle_fast_dce ();
1264 }; // class pass_fast_rtl_dce
1266 } // anon namespace
1268 rtl_opt_pass *
1269 make_pass_fast_rtl_dce (gcc::context *ctxt)
1271 return new pass_fast_rtl_dce (ctxt);