missing Changelog
[official-gcc.git] / gcc / dce.c
blobc9ac8b219a9701b38c6b5aba14684fced1602fb1
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hashtab.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "flags.h"
31 #include "except.h"
32 #include "df.h"
33 #include "cselib.h"
34 #include "dce.h"
35 #include "valtrack.h"
36 #include "tree-pass.h"
37 #include "dbgcnt.h"
38 #include "tm_p.h"
39 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
42 /* -------------------------------------------------------------------------
43 Core mark/delete routines
44 ------------------------------------------------------------------------- */
46 /* True if we are invoked while the df engine is running; in this case,
47 we don't want to reenter it. */
48 static bool df_in_progress = false;
50 /* True if we are allowed to alter the CFG in this pass. */
51 static bool can_alter_cfg = false;
53 /* Instructions that have been marked but whose dependencies have not
54 yet been processed. */
55 static vec<rtx> worklist;
57 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
58 static sbitmap marked;
60 /* Bitmap obstacks used for block processing by the fast algorithm. */
61 static bitmap_obstack dce_blocks_bitmap_obstack;
62 static bitmap_obstack dce_tmp_bitmap_obstack;
64 static bool find_call_stack_args (rtx, bool, bool, bitmap);
66 /* A subroutine for which BODY is part of the instruction being tested;
67 either the top-level pattern, or an element of a PARALLEL. The
68 instruction is known not to be a bare USE or CLOBBER. */
70 static bool
71 deletable_insn_p_1 (rtx body)
73 switch (GET_CODE (body))
75 case PREFETCH:
76 case TRAP_IF:
77 /* The UNSPEC case was added here because the ia-64 claims that
78 USEs do not work after reload and generates UNSPECS rather
79 than USEs. Since dce is run after reload we need to avoid
80 deleting these even if they are dead. If it turns out that
81 USEs really do work after reload, the ia-64 should be
82 changed, and the UNSPEC case can be removed. */
83 case UNSPEC:
84 return false;
86 default:
87 return !volatile_refs_p (body);
92 /* Return true if INSN is a normal instruction that can be deleted by
93 the DCE pass. */
95 static bool
96 deletable_insn_p (rtx insn, bool fast, bitmap arg_stores)
98 rtx body, x;
99 int i;
101 if (CALL_P (insn)
102 /* We cannot delete calls inside of the recursive dce because
103 this may cause basic blocks to be deleted and this messes up
104 the rest of the stack of optimization passes. */
105 && (!df_in_progress)
106 /* We cannot delete pure or const sibling calls because it is
107 hard to see the result. */
108 && (!SIBLING_CALL_P (insn))
109 /* We can delete dead const or pure calls as long as they do not
110 infinite loop. */
111 && (RTL_CONST_OR_PURE_CALL_P (insn)
112 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
113 return find_call_stack_args (insn, false, fast, arg_stores);
115 /* Don't delete jumps, notes and the like. */
116 if (!NONJUMP_INSN_P (insn))
117 return false;
119 /* Don't delete insns that may throw if we cannot do so. */
120 if (!(cfun->can_delete_dead_exceptions && can_alter_cfg)
121 && !insn_nothrow_p (insn))
122 return false;
124 /* If INSN sets a global_reg, leave it untouched. */
125 for (df_ref *def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
126 if (HARD_REGISTER_NUM_P (DF_REF_REGNO (*def_rec))
127 && global_regs[DF_REF_REGNO (*def_rec)])
128 return false;
130 body = PATTERN (insn);
131 switch (GET_CODE (body))
133 case USE:
134 case VAR_LOCATION:
135 return false;
137 case CLOBBER:
138 if (fast)
140 /* A CLOBBER of a dead pseudo register serves no purpose.
141 That is not necessarily true for hard registers until
142 after reload. */
143 x = XEXP (body, 0);
144 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
146 else
147 /* Because of the way that use-def chains are built, it is not
148 possible to tell if the clobber is dead because it can
149 never be the target of a use-def chain. */
150 return false;
152 case PARALLEL:
153 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
154 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
155 return false;
156 return true;
158 default:
159 return deletable_insn_p_1 (body);
164 /* Return true if INSN has been marked as needed. */
166 static inline int
167 marked_insn_p (rtx insn)
169 /* Artificial defs are always needed and they do not have an insn.
170 We should never see them here. */
171 gcc_assert (insn);
172 return bitmap_bit_p (marked, INSN_UID (insn));
176 /* If INSN has not yet been marked as needed, mark it now, and add it to
177 the worklist. */
179 static void
180 mark_insn (rtx insn, bool fast)
182 if (!marked_insn_p (insn))
184 if (!fast)
185 worklist.safe_push (insn);
186 bitmap_set_bit (marked, INSN_UID (insn));
187 if (dump_file)
188 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
189 if (CALL_P (insn)
190 && !df_in_progress
191 && !SIBLING_CALL_P (insn)
192 && (RTL_CONST_OR_PURE_CALL_P (insn)
193 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
194 find_call_stack_args (insn, true, fast, NULL);
199 /* A note_stores callback used by mark_nonreg_stores. DATA is the
200 instruction containing DEST. */
202 static void
203 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
205 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
206 mark_insn ((rtx) data, true);
210 /* A note_stores callback used by mark_nonreg_stores. DATA is the
211 instruction containing DEST. */
213 static void
214 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
216 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
217 mark_insn ((rtx) data, false);
221 /* Mark INSN if BODY stores to a non-register destination. */
223 static void
224 mark_nonreg_stores (rtx body, rtx insn, bool fast)
226 if (fast)
227 note_stores (body, mark_nonreg_stores_1, insn);
228 else
229 note_stores (body, mark_nonreg_stores_2, insn);
233 /* Return true if store to MEM, starting OFF bytes from stack pointer,
234 is a call argument store, and clear corresponding bits from SP_BYTES
235 bitmap if it is. */
237 static bool
238 check_argument_store (rtx mem, HOST_WIDE_INT off, HOST_WIDE_INT min_sp_off,
239 HOST_WIDE_INT max_sp_off, bitmap sp_bytes)
241 HOST_WIDE_INT byte;
242 for (byte = off; byte < off + GET_MODE_SIZE (GET_MODE (mem)); byte++)
244 if (byte < min_sp_off
245 || byte >= max_sp_off
246 || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
247 return false;
249 return true;
253 /* Try to find all stack stores of CALL_INSN arguments if
254 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
255 and it is therefore safe to eliminate the call, return true,
256 otherwise return false. This function should be first called
257 with DO_MARK false, and only when the CALL_INSN is actually
258 going to be marked called again with DO_MARK true. */
260 static bool
261 find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
262 bitmap arg_stores)
264 rtx p, insn, prev_insn;
265 bool ret;
266 HOST_WIDE_INT min_sp_off, max_sp_off;
267 bitmap sp_bytes;
269 gcc_assert (CALL_P (call_insn));
270 if (!ACCUMULATE_OUTGOING_ARGS)
271 return true;
273 if (!do_mark)
275 gcc_assert (arg_stores);
276 bitmap_clear (arg_stores);
279 min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
280 max_sp_off = 0;
282 /* First determine the minimum and maximum offset from sp for
283 stored arguments. */
284 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
285 if (GET_CODE (XEXP (p, 0)) == USE
286 && MEM_P (XEXP (XEXP (p, 0), 0)))
288 rtx mem = XEXP (XEXP (p, 0), 0), addr;
289 HOST_WIDE_INT off = 0, size;
290 if (!MEM_SIZE_KNOWN_P (mem))
291 return false;
292 size = MEM_SIZE (mem);
293 addr = XEXP (mem, 0);
294 if (GET_CODE (addr) == PLUS
295 && REG_P (XEXP (addr, 0))
296 && CONST_INT_P (XEXP (addr, 1)))
298 off = INTVAL (XEXP (addr, 1));
299 addr = XEXP (addr, 0);
301 if (addr != stack_pointer_rtx)
303 if (!REG_P (addr))
304 return false;
305 /* If not fast, use chains to see if addr wasn't set to
306 sp + offset. */
307 if (!fast)
309 df_ref *use_rec;
310 struct df_link *defs;
311 rtx set;
313 for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
314 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
315 break;
317 if (*use_rec == NULL)
318 return false;
320 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
321 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
322 break;
324 if (defs == NULL)
325 return false;
327 set = single_set (DF_REF_INSN (defs->ref));
328 if (!set)
329 return false;
331 if (GET_CODE (SET_SRC (set)) != PLUS
332 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
333 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
334 return false;
336 off += INTVAL (XEXP (SET_SRC (set), 1));
338 else
339 return false;
341 min_sp_off = MIN (min_sp_off, off);
342 max_sp_off = MAX (max_sp_off, off + size);
345 if (min_sp_off >= max_sp_off)
346 return true;
347 sp_bytes = BITMAP_ALLOC (NULL);
349 /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
350 which contain arguments. Checking has been done in the previous
351 loop. */
352 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
353 if (GET_CODE (XEXP (p, 0)) == USE
354 && MEM_P (XEXP (XEXP (p, 0), 0)))
356 rtx mem = XEXP (XEXP (p, 0), 0), addr;
357 HOST_WIDE_INT off = 0, byte;
358 addr = XEXP (mem, 0);
359 if (GET_CODE (addr) == PLUS
360 && REG_P (XEXP (addr, 0))
361 && CONST_INT_P (XEXP (addr, 1)))
363 off = INTVAL (XEXP (addr, 1));
364 addr = XEXP (addr, 0);
366 if (addr != stack_pointer_rtx)
368 df_ref *use_rec;
369 struct df_link *defs;
370 rtx set;
372 for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
373 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
374 break;
376 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
377 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
378 break;
380 set = single_set (DF_REF_INSN (defs->ref));
381 off += INTVAL (XEXP (SET_SRC (set), 1));
383 for (byte = off; byte < off + MEM_SIZE (mem); byte++)
385 if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
386 gcc_unreachable ();
390 /* Walk backwards, looking for argument stores. The search stops
391 when seeing another call, sp adjustment or memory store other than
392 argument store. */
393 ret = false;
394 for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
396 rtx set, mem, addr;
397 HOST_WIDE_INT off;
399 if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
400 prev_insn = NULL_RTX;
401 else
402 prev_insn = PREV_INSN (insn);
404 if (CALL_P (insn))
405 break;
407 if (!NONDEBUG_INSN_P (insn))
408 continue;
410 set = single_set (insn);
411 if (!set || SET_DEST (set) == stack_pointer_rtx)
412 break;
414 if (!MEM_P (SET_DEST (set)))
415 continue;
417 mem = SET_DEST (set);
418 addr = XEXP (mem, 0);
419 off = 0;
420 if (GET_CODE (addr) == PLUS
421 && REG_P (XEXP (addr, 0))
422 && CONST_INT_P (XEXP (addr, 1)))
424 off = INTVAL (XEXP (addr, 1));
425 addr = XEXP (addr, 0);
427 if (addr != stack_pointer_rtx)
429 if (!REG_P (addr))
430 break;
431 if (!fast)
433 df_ref *use_rec;
434 struct df_link *defs;
435 rtx set;
437 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
438 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
439 break;
441 if (*use_rec == NULL)
442 break;
444 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
445 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
446 break;
448 if (defs == NULL)
449 break;
451 set = single_set (DF_REF_INSN (defs->ref));
452 if (!set)
453 break;
455 if (GET_CODE (SET_SRC (set)) != PLUS
456 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
457 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
458 break;
460 off += INTVAL (XEXP (SET_SRC (set), 1));
462 else
463 break;
466 if (GET_MODE_SIZE (GET_MODE (mem)) == 0
467 || !check_argument_store (mem, off, min_sp_off,
468 max_sp_off, sp_bytes))
469 break;
471 if (!deletable_insn_p (insn, fast, NULL))
472 break;
474 if (do_mark)
475 mark_insn (insn, fast);
476 else
477 bitmap_set_bit (arg_stores, INSN_UID (insn));
479 if (bitmap_empty_p (sp_bytes))
481 ret = true;
482 break;
486 BITMAP_FREE (sp_bytes);
487 if (!ret && arg_stores)
488 bitmap_clear (arg_stores);
490 return ret;
494 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
495 writes to. */
497 static void
498 remove_reg_equal_equiv_notes_for_defs (rtx insn)
500 df_ref *def_rec;
502 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
503 remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (*def_rec));
506 /* Scan all BBs for debug insns and reset those that reference values
507 defined in unmarked insns. */
509 static void
510 reset_unmarked_insns_debug_uses (void)
512 basic_block bb;
513 rtx insn, next;
515 FOR_EACH_BB_REVERSE (bb)
516 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
517 if (DEBUG_INSN_P (insn))
519 df_ref *use_rec;
521 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
523 df_ref use = *use_rec;
524 struct df_link *defs;
525 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
527 rtx ref_insn;
528 if (DF_REF_IS_ARTIFICIAL (defs->ref))
529 continue;
530 ref_insn = DF_REF_INSN (defs->ref);
531 if (!marked_insn_p (ref_insn))
532 break;
534 if (!defs)
535 continue;
536 /* ??? FIXME could we propagate the values assigned to
537 each of the DEFs? */
538 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
539 df_insn_rescan_debug_internal (insn);
540 break;
545 /* Delete every instruction that hasn't been marked. */
547 static void
548 delete_unmarked_insns (void)
550 basic_block bb;
551 rtx insn, next;
552 bool must_clean = false;
554 FOR_EACH_BB_REVERSE (bb)
555 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
556 if (NONDEBUG_INSN_P (insn))
558 /* Always delete no-op moves. */
559 if (noop_move_p (insn))
562 /* Otherwise rely only on the DCE algorithm. */
563 else if (marked_insn_p (insn))
564 continue;
566 /* Beware that reaching a dbg counter limit here can result
567 in miscompiled file. This occurs when a group of insns
568 must be deleted together, typically because the kept insn
569 depends on the output from the deleted insn. Deleting
570 this insns in reverse order (both at the bb level and
571 when looking at the blocks) minimizes this, but does not
572 eliminate it, since it is possible for the using insn to
573 be top of a block and the producer to be at the bottom of
574 the block. However, in most cases this will only result
575 in an uninitialized use of an insn that is dead anyway.
577 However, there is one rare case that will cause a
578 miscompile: deletion of non-looping pure and constant
579 calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
580 In this case it is possible to remove the call, but leave
581 the argument pushes to the stack. Because of the changes
582 to the stack pointer, this will almost always lead to a
583 miscompile. */
584 if (!dbg_cnt (dce))
585 continue;
587 if (dump_file)
588 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
590 /* Before we delete the insn we have to remove the REG_EQUAL notes
591 for the destination regs in order to avoid dangling notes. */
592 remove_reg_equal_equiv_notes_for_defs (insn);
594 /* If a pure or const call is deleted, this may make the cfg
595 have unreachable blocks. We rememeber this and call
596 delete_unreachable_blocks at the end. */
597 if (CALL_P (insn))
598 must_clean = true;
600 /* Now delete the insn. */
601 delete_insn_and_edges (insn);
604 /* Deleted a pure or const call. */
605 if (must_clean)
606 delete_unreachable_blocks ();
610 /* Go through the instructions and mark those whose necessity is not
611 dependent on inter-instruction information. Make sure all other
612 instructions are not marked. */
614 static void
615 prescan_insns_for_dce (bool fast)
617 basic_block bb;
618 rtx insn, prev;
619 bitmap arg_stores = NULL;
621 if (dump_file)
622 fprintf (dump_file, "Finding needed instructions:\n");
624 if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
625 arg_stores = BITMAP_ALLOC (NULL);
627 FOR_EACH_BB (bb)
629 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
630 if (NONDEBUG_INSN_P (insn))
632 /* Don't mark argument stores now. They will be marked
633 if needed when the associated CALL is marked. */
634 if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
635 continue;
636 if (deletable_insn_p (insn, fast, arg_stores))
637 mark_nonreg_stores (PATTERN (insn), insn, fast);
638 else
639 mark_insn (insn, fast);
641 /* find_call_stack_args only looks at argument stores in the
642 same bb. */
643 if (arg_stores)
644 bitmap_clear (arg_stores);
647 if (arg_stores)
648 BITMAP_FREE (arg_stores);
650 if (dump_file)
651 fprintf (dump_file, "Finished finding needed instructions:\n");
655 /* UD-based DSE routines. */
657 /* Mark instructions that define artificially-used registers, such as
658 the frame pointer and the stack pointer. */
660 static void
661 mark_artificial_uses (void)
663 basic_block bb;
664 struct df_link *defs;
665 df_ref *use_rec;
667 FOR_ALL_BB (bb)
669 for (use_rec = df_get_artificial_uses (bb->index);
670 *use_rec; use_rec++)
671 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
672 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
673 mark_insn (DF_REF_INSN (defs->ref), false);
678 /* Mark every instruction that defines a register value that INSN uses. */
680 static void
681 mark_reg_dependencies (rtx insn)
683 struct df_link *defs;
684 df_ref *use_rec;
686 if (DEBUG_INSN_P (insn))
687 return;
689 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
691 df_ref use = *use_rec;
692 if (dump_file)
694 fprintf (dump_file, "Processing use of ");
695 print_simple_rtl (dump_file, DF_REF_REG (use));
696 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
698 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
699 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
700 mark_insn (DF_REF_INSN (defs->ref), false);
705 /* Initialize global variables for a new DCE pass. */
707 static void
708 init_dce (bool fast)
710 if (!df_in_progress)
712 if (!fast)
714 df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
715 df_chain_add_problem (DF_UD_CHAIN);
717 df_analyze ();
720 if (dump_file)
721 df_dump (dump_file);
723 if (fast)
725 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
726 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
727 can_alter_cfg = false;
729 else
730 can_alter_cfg = true;
732 marked = sbitmap_alloc (get_max_uid () + 1);
733 bitmap_clear (marked);
737 /* Free the data allocated by init_dce. */
739 static void
740 fini_dce (bool fast)
742 sbitmap_free (marked);
744 if (fast)
746 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
747 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
752 /* UD-chain based DCE. */
754 static unsigned int
755 rest_of_handle_ud_dce (void)
757 rtx insn;
759 init_dce (false);
761 prescan_insns_for_dce (false);
762 mark_artificial_uses ();
763 while (worklist.length () > 0)
765 insn = worklist.pop ();
766 mark_reg_dependencies (insn);
768 worklist.release ();
770 if (MAY_HAVE_DEBUG_INSNS)
771 reset_unmarked_insns_debug_uses ();
773 /* Before any insns are deleted, we must remove the chains since
774 they are not bidirectional. */
775 df_remove_problem (df_chain);
776 delete_unmarked_insns ();
778 fini_dce (false);
779 return 0;
783 static bool
784 gate_ud_dce (void)
786 return optimize > 1 && flag_dce
787 && dbg_cnt (dce_ud);
790 struct rtl_opt_pass pass_ud_rtl_dce =
793 RTL_PASS,
794 "ud_dce", /* name */
795 OPTGROUP_NONE, /* optinfo_flags */
796 gate_ud_dce, /* gate */
797 rest_of_handle_ud_dce, /* execute */
798 NULL, /* sub */
799 NULL, /* next */
800 0, /* static_pass_number */
801 TV_DCE, /* tv_id */
802 0, /* properties_required */
803 0, /* properties_provided */
804 0, /* properties_destroyed */
805 0, /* todo_flags_start */
806 TODO_df_finish | TODO_verify_rtl_sharing |
807 TODO_ggc_collect /* todo_flags_finish */
812 /* -------------------------------------------------------------------------
813 Fast DCE functions
814 ------------------------------------------------------------------------- */
816 /* Process basic block BB. Return true if the live_in set has
817 changed. REDO_OUT is true if the info at the bottom of the block
818 needs to be recalculated before starting. AU is the proper set of
819 artificial uses. Track global substitution of uses of dead pseudos
820 in debug insns using GLOBAL_DEBUG. */
822 static bool
823 word_dce_process_block (basic_block bb, bool redo_out,
824 struct dead_debug_global *global_debug)
826 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
827 rtx insn;
828 bool block_changed;
829 struct dead_debug_local debug;
831 if (redo_out)
833 /* Need to redo the live_out set of this block if when one of
834 the succs of this block has had a change in it live in
835 set. */
836 edge e;
837 edge_iterator ei;
838 df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
839 bitmap_clear (DF_WORD_LR_OUT (bb));
840 FOR_EACH_EDGE (e, ei, bb->succs)
841 (*con_fun_n) (e);
844 if (dump_file)
846 fprintf (dump_file, "processing block %d live out = ", bb->index);
847 df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
850 bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
851 dead_debug_local_init (&debug, NULL, global_debug);
853 FOR_BB_INSNS_REVERSE (bb, insn)
854 if (DEBUG_INSN_P (insn))
856 df_ref *use_rec;
857 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
858 if (DF_REF_REGNO (*use_rec) >= FIRST_PSEUDO_REGISTER
859 && (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (*use_rec)))
860 == 2 * UNITS_PER_WORD)
861 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec))
862 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec) + 1))
863 dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
865 else if (INSN_P (insn))
867 bool any_changed;
869 /* No matter if the instruction is needed or not, we remove
870 any regno in the defs from the live set. */
871 any_changed = df_word_lr_simulate_defs (insn, local_live);
872 if (any_changed)
873 mark_insn (insn, true);
875 /* On the other hand, we do not allow the dead uses to set
876 anything in local_live. */
877 if (marked_insn_p (insn))
878 df_word_lr_simulate_uses (insn, local_live);
880 /* Insert debug temps for dead REGs used in subsequent debug
881 insns. We may have to emit a debug temp even if the insn
882 was marked, in case the debug use was after the point of
883 death. */
884 if (debug.used && !bitmap_empty_p (debug.used))
886 df_ref *def_rec;
888 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
889 dead_debug_insert_temp (&debug, DF_REF_REGNO (*def_rec), insn,
890 marked_insn_p (insn)
891 && !control_flow_insn_p (insn)
892 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
893 : DEBUG_TEMP_BEFORE_WITH_VALUE);
896 if (dump_file)
898 fprintf (dump_file, "finished processing insn %d live out = ",
899 INSN_UID (insn));
900 df_print_word_regset (dump_file, local_live);
904 block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
905 if (block_changed)
906 bitmap_copy (DF_WORD_LR_IN (bb), local_live);
908 dead_debug_local_finish (&debug, NULL);
909 BITMAP_FREE (local_live);
910 return block_changed;
914 /* Process basic block BB. Return true if the live_in set has
915 changed. REDO_OUT is true if the info at the bottom of the block
916 needs to be recalculated before starting. AU is the proper set of
917 artificial uses. Track global substitution of uses of dead pseudos
918 in debug insns using GLOBAL_DEBUG. */
920 static bool
921 dce_process_block (basic_block bb, bool redo_out, bitmap au,
922 struct dead_debug_global *global_debug)
924 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
925 rtx insn;
926 bool block_changed;
927 df_ref *def_rec;
928 struct dead_debug_local debug;
930 if (redo_out)
932 /* Need to redo the live_out set of this block if when one of
933 the succs of this block has had a change in it live in
934 set. */
935 edge e;
936 edge_iterator ei;
937 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
938 bitmap_clear (DF_LR_OUT (bb));
939 FOR_EACH_EDGE (e, ei, bb->succs)
940 (*con_fun_n) (e);
943 if (dump_file)
945 fprintf (dump_file, "processing block %d lr out = ", bb->index);
946 df_print_regset (dump_file, DF_LR_OUT (bb));
949 bitmap_copy (local_live, DF_LR_OUT (bb));
951 df_simulate_initialize_backwards (bb, local_live);
952 dead_debug_local_init (&debug, NULL, global_debug);
954 FOR_BB_INSNS_REVERSE (bb, insn)
955 if (DEBUG_INSN_P (insn))
957 df_ref *use_rec;
958 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
959 if (!bitmap_bit_p (local_live, DF_REF_REGNO (*use_rec))
960 && !bitmap_bit_p (au, DF_REF_REGNO (*use_rec)))
961 dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
963 else if (INSN_P (insn))
965 bool needed = marked_insn_p (insn);
967 /* The insn is needed if there is someone who uses the output. */
968 if (!needed)
969 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
970 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
971 || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
973 needed = true;
974 mark_insn (insn, true);
975 break;
978 /* No matter if the instruction is needed or not, we remove
979 any regno in the defs from the live set. */
980 df_simulate_defs (insn, local_live);
982 /* On the other hand, we do not allow the dead uses to set
983 anything in local_live. */
984 if (needed)
985 df_simulate_uses (insn, local_live);
987 /* Insert debug temps for dead REGs used in subsequent debug
988 insns. We may have to emit a debug temp even if the insn
989 was marked, in case the debug use was after the point of
990 death. */
991 if (debug.used && !bitmap_empty_p (debug.used))
992 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
993 dead_debug_insert_temp (&debug, DF_REF_REGNO (*def_rec), insn,
994 needed && !control_flow_insn_p (insn)
995 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
996 : DEBUG_TEMP_BEFORE_WITH_VALUE);
999 dead_debug_local_finish (&debug, NULL);
1000 df_simulate_finalize_backwards (bb, local_live);
1002 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
1003 if (block_changed)
1004 bitmap_copy (DF_LR_IN (bb), local_live);
1006 BITMAP_FREE (local_live);
1007 return block_changed;
1011 /* Perform fast DCE once initialization is done. If WORD_LEVEL is
1012 true, use the word level dce, otherwise do it at the pseudo
1013 level. */
1015 static void
1016 fast_dce (bool word_level)
1018 int *postorder = df_get_postorder (DF_BACKWARD);
1019 int n_blocks = df_get_n_blocks (DF_BACKWARD);
1020 /* The set of blocks that have been seen on this iteration. */
1021 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1022 /* The set of blocks that need to have the out vectors reset because
1023 the in of one of their successors has changed. */
1024 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1025 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1026 bool global_changed = true;
1028 /* These regs are considered always live so if they end up dying
1029 because of some def, we need to bring the back again. Calling
1030 df_simulate_fixup_sets has the disadvantage of calling
1031 bb_has_eh_pred once per insn, so we cache the information
1032 here. */
1033 bitmap au = &df->regular_block_artificial_uses;
1034 bitmap au_eh = &df->eh_block_artificial_uses;
1035 int i;
1036 struct dead_debug_global global_debug;
1038 prescan_insns_for_dce (true);
1040 for (i = 0; i < n_blocks; i++)
1041 bitmap_set_bit (all_blocks, postorder[i]);
1043 dead_debug_global_init (&global_debug, NULL);
1045 while (global_changed)
1047 global_changed = false;
1049 for (i = 0; i < n_blocks; i++)
1051 int index = postorder[i];
1052 basic_block bb = BASIC_BLOCK (index);
1053 bool local_changed;
1055 if (index < NUM_FIXED_BLOCKS)
1057 bitmap_set_bit (processed, index);
1058 continue;
1061 if (word_level)
1062 local_changed
1063 = word_dce_process_block (bb, bitmap_bit_p (redo_out, index),
1064 &global_debug);
1065 else
1066 local_changed
1067 = dce_process_block (bb, bitmap_bit_p (redo_out, index),
1068 bb_has_eh_pred (bb) ? au_eh : au,
1069 &global_debug);
1070 bitmap_set_bit (processed, index);
1072 if (local_changed)
1074 edge e;
1075 edge_iterator ei;
1076 FOR_EACH_EDGE (e, ei, bb->preds)
1077 if (bitmap_bit_p (processed, e->src->index))
1078 /* Be tricky about when we need to iterate the
1079 analysis. We only have redo the analysis if the
1080 bitmaps change at the top of a block that is the
1081 entry to a loop. */
1082 global_changed = true;
1083 else
1084 bitmap_set_bit (redo_out, e->src->index);
1088 if (global_changed)
1090 /* Turn off the RUN_DCE flag to prevent recursive calls to
1091 dce. */
1092 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1094 /* So something was deleted that requires a redo. Do it on
1095 the cheap. */
1096 delete_unmarked_insns ();
1097 bitmap_clear (marked);
1098 bitmap_clear (processed);
1099 bitmap_clear (redo_out);
1101 /* We do not need to rescan any instructions. We only need
1102 to redo the dataflow equations for the blocks that had a
1103 change at the top of the block. Then we need to redo the
1104 iteration. */
1105 if (word_level)
1106 df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
1107 else
1108 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1110 if (old_flag & DF_LR_RUN_DCE)
1111 df_set_flags (DF_LR_RUN_DCE);
1113 prescan_insns_for_dce (true);
1117 dead_debug_global_finish (&global_debug, NULL);
1119 delete_unmarked_insns ();
1121 BITMAP_FREE (processed);
1122 BITMAP_FREE (redo_out);
1123 BITMAP_FREE (all_blocks);
1127 /* Fast register level DCE. */
1129 static unsigned int
1130 rest_of_handle_fast_dce (void)
1132 init_dce (true);
1133 fast_dce (false);
1134 fini_dce (true);
1135 return 0;
1139 /* Fast byte level DCE. */
1141 void
1142 run_word_dce (void)
1144 int old_flags;
1146 if (!flag_dce)
1147 return;
1149 timevar_push (TV_DCE);
1150 old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1151 df_word_lr_add_problem ();
1152 init_dce (true);
1153 fast_dce (true);
1154 fini_dce (true);
1155 df_set_flags (old_flags);
1156 timevar_pop (TV_DCE);
1160 /* This is an internal call that is used by the df live register
1161 problem to run fast dce as a side effect of creating the live
1162 information. The stack is organized so that the lr problem is run,
1163 this pass is run, which updates the live info and the df scanning
1164 info, and then returns to allow the rest of the problems to be run.
1166 This can be called by elsewhere but it will not update the bit
1167 vectors for any other problems than LR. */
1169 void
1170 run_fast_df_dce (void)
1172 if (flag_dce)
1174 /* If dce is able to delete something, it has to happen
1175 immediately. Otherwise there will be problems handling the
1176 eq_notes. */
1177 int old_flags =
1178 df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1180 df_in_progress = true;
1181 rest_of_handle_fast_dce ();
1182 df_in_progress = false;
1184 df_set_flags (old_flags);
1189 /* Run a fast DCE pass. */
1191 void
1192 run_fast_dce (void)
1194 if (flag_dce)
1195 rest_of_handle_fast_dce ();
1199 static bool
1200 gate_fast_dce (void)
1202 return optimize > 0 && flag_dce
1203 && dbg_cnt (dce_fast);
1206 struct rtl_opt_pass pass_fast_rtl_dce =
1209 RTL_PASS,
1210 "rtl_dce", /* name */
1211 OPTGROUP_NONE, /* optinfo_flags */
1212 gate_fast_dce, /* gate */
1213 rest_of_handle_fast_dce, /* execute */
1214 NULL, /* sub */
1215 NULL, /* next */
1216 0, /* static_pass_number */
1217 TV_DCE, /* tv_id */
1218 0, /* properties_required */
1219 0, /* properties_provided */
1220 0, /* properties_destroyed */
1221 0, /* todo_flags_start */
1222 TODO_df_finish | TODO_verify_rtl_sharing |
1223 TODO_ggc_collect /* todo_flags_finish */