* config/sh/sh.c (sh_delegitimize_address): Handle UNSPEC_SYMOFF
[official-gcc.git] / gcc / dce.c
blobf63d09e4164286f8bc52c0b5ca048f599f843744
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hashtab.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "flags.h"
31 #include "except.h"
32 #include "df.h"
33 #include "cselib.h"
34 #include "dce.h"
35 #include "timevar.h"
36 #include "tree-pass.h"
37 #include "dbgcnt.h"
38 #include "tm_p.h"
39 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
42 /* -------------------------------------------------------------------------
43 Core mark/delete routines
44 ------------------------------------------------------------------------- */
46 /* True if we are invoked while the df engine is running; in this case,
47 we don't want to reenter it. */
48 static bool df_in_progress = false;
50 /* Instructions that have been marked but whose dependencies have not
51 yet been processed. */
52 static VEC(rtx,heap) *worklist;
54 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
55 static sbitmap marked;
57 /* Bitmap obstacks used for block processing by the fast algorithm. */
58 static bitmap_obstack dce_blocks_bitmap_obstack;
59 static bitmap_obstack dce_tmp_bitmap_obstack;
61 static bool find_call_stack_args (rtx, bool, bool, bitmap);
63 /* A subroutine for which BODY is part of the instruction being tested;
64 either the top-level pattern, or an element of a PARALLEL. The
65 instruction is known not to be a bare USE or CLOBBER. */
67 static bool
68 deletable_insn_p_1 (rtx body)
70 switch (GET_CODE (body))
72 case PREFETCH:
73 case TRAP_IF:
74 /* The UNSPEC case was added here because the ia-64 claims that
75 USEs do not work after reload and generates UNSPECS rather
76 than USEs. Since dce is run after reload we need to avoid
77 deleting these even if they are dead. If it turns out that
78 USEs really do work after reload, the ia-64 should be
79 changed, and the UNSPEC case can be removed. */
80 case UNSPEC:
81 return false;
83 default:
84 return !volatile_refs_p (body);
89 /* Return true if INSN is a normal instruction that can be deleted by
90 the DCE pass. */
92 static bool
93 deletable_insn_p (rtx insn, bool fast, bitmap arg_stores)
95 rtx body, x;
96 int i;
98 if (CALL_P (insn)
99 /* We cannot delete calls inside of the recursive dce because
100 this may cause basic blocks to be deleted and this messes up
101 the rest of the stack of optimization passes. */
102 && (!df_in_progress)
103 /* We cannot delete pure or const sibling calls because it is
104 hard to see the result. */
105 && (!SIBLING_CALL_P (insn))
106 /* We can delete dead const or pure calls as long as they do not
107 infinite loop. */
108 && (RTL_CONST_OR_PURE_CALL_P (insn)
109 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
110 return find_call_stack_args (insn, false, fast, arg_stores);
112 /* Don't delete jumps, notes and the like. */
113 if (!NONJUMP_INSN_P (insn))
114 return false;
116 /* Don't delete insns that can throw. */
117 if (!insn_nothrow_p (insn))
118 return false;
120 body = PATTERN (insn);
121 switch (GET_CODE (body))
123 case USE:
124 case VAR_LOCATION:
125 return false;
127 case CLOBBER:
128 if (fast)
130 /* A CLOBBER of a dead pseudo register serves no purpose.
131 That is not necessarily true for hard registers until
132 after reload. */
133 x = XEXP (body, 0);
134 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
136 else
137 /* Because of the way that use-def chains are built, it is not
138 possible to tell if the clobber is dead because it can
139 never be the target of a use-def chain. */
140 return false;
142 case PARALLEL:
143 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
144 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
145 return false;
146 return true;
148 default:
149 return deletable_insn_p_1 (body);
154 /* Return true if INSN has been marked as needed. */
156 static inline int
157 marked_insn_p (rtx insn)
159 /* Artificial defs are always needed and they do not have an insn.
160 We should never see them here. */
161 gcc_assert (insn);
162 return TEST_BIT (marked, INSN_UID (insn));
166 /* If INSN has not yet been marked as needed, mark it now, and add it to
167 the worklist. */
169 static void
170 mark_insn (rtx insn, bool fast)
172 if (!marked_insn_p (insn))
174 if (!fast)
175 VEC_safe_push (rtx, heap, worklist, insn);
176 SET_BIT (marked, INSN_UID (insn));
177 if (dump_file)
178 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
179 if (CALL_P (insn)
180 && !df_in_progress
181 && !SIBLING_CALL_P (insn)
182 && (RTL_CONST_OR_PURE_CALL_P (insn)
183 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
184 find_call_stack_args (insn, true, fast, NULL);
189 /* A note_stores callback used by mark_nonreg_stores. DATA is the
190 instruction containing DEST. */
192 static void
193 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
195 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
196 mark_insn ((rtx) data, true);
200 /* A note_stores callback used by mark_nonreg_stores. DATA is the
201 instruction containing DEST. */
203 static void
204 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
206 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
207 mark_insn ((rtx) data, false);
211 /* Mark INSN if BODY stores to a non-register destination. */
213 static void
214 mark_nonreg_stores (rtx body, rtx insn, bool fast)
216 if (fast)
217 note_stores (body, mark_nonreg_stores_1, insn);
218 else
219 note_stores (body, mark_nonreg_stores_2, insn);
223 /* Return true if store to MEM, starting OFF bytes from stack pointer,
224 is a call argument store, and clear corresponding bits from SP_BYTES
225 bitmap if it is. */
227 static bool
228 check_argument_store (rtx mem, HOST_WIDE_INT off, HOST_WIDE_INT min_sp_off,
229 HOST_WIDE_INT max_sp_off, bitmap sp_bytes)
231 HOST_WIDE_INT byte;
232 for (byte = off; byte < off + GET_MODE_SIZE (GET_MODE (mem)); byte++)
234 if (byte < min_sp_off
235 || byte >= max_sp_off
236 || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
237 return false;
239 return true;
243 /* Try to find all stack stores of CALL_INSN arguments if
244 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
245 and it is therefore safe to eliminate the call, return true,
246 otherwise return false. This function should be first called
247 with DO_MARK false, and only when the CALL_INSN is actually
248 going to be marked called again with DO_MARK true. */
250 static bool
251 find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
252 bitmap arg_stores)
254 rtx p, insn, prev_insn;
255 bool ret;
256 HOST_WIDE_INT min_sp_off, max_sp_off;
257 bitmap sp_bytes;
259 gcc_assert (CALL_P (call_insn));
260 if (!ACCUMULATE_OUTGOING_ARGS)
261 return true;
263 if (!do_mark)
265 gcc_assert (arg_stores);
266 bitmap_clear (arg_stores);
269 min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
270 max_sp_off = 0;
272 /* First determine the minimum and maximum offset from sp for
273 stored arguments. */
274 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
275 if (GET_CODE (XEXP (p, 0)) == USE
276 && MEM_P (XEXP (XEXP (p, 0), 0)))
278 rtx mem = XEXP (XEXP (p, 0), 0), addr, size;
279 HOST_WIDE_INT off = 0;
280 size = MEM_SIZE (mem);
281 if (size == NULL_RTX)
282 return false;
283 addr = XEXP (mem, 0);
284 if (GET_CODE (addr) == PLUS
285 && REG_P (XEXP (addr, 0))
286 && CONST_INT_P (XEXP (addr, 1)))
288 off = INTVAL (XEXP (addr, 1));
289 addr = XEXP (addr, 0);
291 if (addr != stack_pointer_rtx)
293 if (!REG_P (addr))
294 return false;
295 /* If not fast, use chains to see if addr wasn't set to
296 sp + offset. */
297 if (!fast)
299 df_ref *use_rec;
300 struct df_link *defs;
301 rtx set;
303 for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
304 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
305 break;
307 if (*use_rec == NULL)
308 return false;
310 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
311 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
312 break;
314 if (defs == NULL)
315 return false;
317 set = single_set (DF_REF_INSN (defs->ref));
318 if (!set)
319 return false;
321 if (GET_CODE (SET_SRC (set)) != PLUS
322 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
323 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
324 return false;
326 off += INTVAL (XEXP (SET_SRC (set), 1));
328 else
329 return false;
331 min_sp_off = MIN (min_sp_off, off);
332 max_sp_off = MAX (max_sp_off, off + INTVAL (size));
335 if (min_sp_off >= max_sp_off)
336 return true;
337 sp_bytes = BITMAP_ALLOC (NULL);
339 /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
340 which contain arguments. Checking has been done in the previous
341 loop. */
342 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
343 if (GET_CODE (XEXP (p, 0)) == USE
344 && MEM_P (XEXP (XEXP (p, 0), 0)))
346 rtx mem = XEXP (XEXP (p, 0), 0), addr;
347 HOST_WIDE_INT off = 0, byte;
348 addr = XEXP (mem, 0);
349 if (GET_CODE (addr) == PLUS
350 && REG_P (XEXP (addr, 0))
351 && CONST_INT_P (XEXP (addr, 1)))
353 off = INTVAL (XEXP (addr, 1));
354 addr = XEXP (addr, 0);
356 if (addr != stack_pointer_rtx)
358 df_ref *use_rec;
359 struct df_link *defs;
360 rtx set;
362 for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
363 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
364 break;
366 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
367 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
368 break;
370 set = single_set (DF_REF_INSN (defs->ref));
371 off += INTVAL (XEXP (SET_SRC (set), 1));
373 for (byte = off; byte < off + INTVAL (MEM_SIZE (mem)); byte++)
375 if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
376 gcc_unreachable ();
380 /* Walk backwards, looking for argument stores. The search stops
381 when seeing another call, sp adjustment or memory store other than
382 argument store. */
383 ret = false;
384 for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
386 rtx set, mem, addr;
387 HOST_WIDE_INT off;
389 if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
390 prev_insn = NULL_RTX;
391 else
392 prev_insn = PREV_INSN (insn);
394 if (CALL_P (insn))
395 break;
397 if (!NONDEBUG_INSN_P (insn))
398 continue;
400 set = single_set (insn);
401 if (!set || SET_DEST (set) == stack_pointer_rtx)
402 break;
404 if (!MEM_P (SET_DEST (set)))
405 continue;
407 mem = SET_DEST (set);
408 addr = XEXP (mem, 0);
409 off = 0;
410 if (GET_CODE (addr) == PLUS
411 && REG_P (XEXP (addr, 0))
412 && CONST_INT_P (XEXP (addr, 1)))
414 off = INTVAL (XEXP (addr, 1));
415 addr = XEXP (addr, 0);
417 if (addr != stack_pointer_rtx)
419 if (!REG_P (addr))
420 break;
421 if (!fast)
423 df_ref *use_rec;
424 struct df_link *defs;
425 rtx set;
427 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
428 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
429 break;
431 if (*use_rec == NULL)
432 break;
434 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
435 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
436 break;
438 if (defs == NULL)
439 break;
441 set = single_set (DF_REF_INSN (defs->ref));
442 if (!set)
443 break;
445 if (GET_CODE (SET_SRC (set)) != PLUS
446 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
447 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
448 break;
450 off += INTVAL (XEXP (SET_SRC (set), 1));
452 else
453 break;
456 if (GET_MODE_SIZE (GET_MODE (mem)) == 0
457 || !check_argument_store (mem, off, min_sp_off,
458 max_sp_off, sp_bytes))
459 break;
461 if (!deletable_insn_p (insn, fast, NULL))
462 break;
464 if (do_mark)
465 mark_insn (insn, fast);
466 else
467 bitmap_set_bit (arg_stores, INSN_UID (insn));
469 if (bitmap_empty_p (sp_bytes))
471 ret = true;
472 break;
476 BITMAP_FREE (sp_bytes);
477 if (!ret && arg_stores)
478 bitmap_clear (arg_stores);
480 return ret;
484 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
485 writes to. */
487 static void
488 remove_reg_equal_equiv_notes_for_defs (rtx insn)
490 df_ref *def_rec;
492 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
493 remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (*def_rec));
497 /* Delete every instruction that hasn't been marked. */
499 static void
500 delete_unmarked_insns (void)
502 basic_block bb;
503 rtx insn, next;
504 bool must_clean = false;
506 FOR_EACH_BB_REVERSE (bb)
507 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
508 if (INSN_P (insn))
510 /* Always delete no-op moves. */
511 if (noop_move_p (insn))
514 /* Otherwise rely only on the DCE algorithm. */
515 else if (marked_insn_p (insn))
516 continue;
518 /* Beware that reaching a dbg counter limit here can result
519 in miscompiled file. This occurs when a group of insns
520 must be deleted together, typically because the kept insn
521 depends on the output from the deleted insn. Deleting
522 this insns in reverse order (both at the bb level and
523 when looking at the blocks) minimizes this, but does not
524 eliminate it, since it is possible for the using insn to
525 be top of a block and the producer to be at the bottom of
526 the block. However, in most cases this will only result
527 in an uninitialized use of an insn that is dead anyway.
529 However, there is one rare case that will cause a
530 miscompile: deletion of non-looping pure and constant
531 calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
532 In this case it is possible to remove the call, but leave
533 the argument pushes to the stack. Because of the changes
534 to the stack pointer, this will almost always lead to a
535 miscompile. */
536 if (!dbg_cnt (dce))
537 continue;
539 if (dump_file)
540 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
542 /* Before we delete the insn we have to remove the REG_EQUAL notes
543 for the destination regs in order to avoid dangling notes. */
544 remove_reg_equal_equiv_notes_for_defs (insn);
546 /* If a pure or const call is deleted, this may make the cfg
547 have unreachable blocks. We rememeber this and call
548 delete_unreachable_blocks at the end. */
549 if (CALL_P (insn))
550 must_clean = true;
552 /* Now delete the insn. */
553 delete_insn_and_edges (insn);
556 /* Deleted a pure or const call. */
557 if (must_clean)
558 delete_unreachable_blocks ();
562 /* Go through the instructions and mark those whose necessity is not
563 dependent on inter-instruction information. Make sure all other
564 instructions are not marked. */
566 static void
567 prescan_insns_for_dce (bool fast)
569 basic_block bb;
570 rtx insn, prev;
571 bitmap arg_stores = NULL;
573 if (dump_file)
574 fprintf (dump_file, "Finding needed instructions:\n");
576 if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
577 arg_stores = BITMAP_ALLOC (NULL);
579 FOR_EACH_BB (bb)
581 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
582 if (INSN_P (insn))
584 /* Don't mark argument stores now. They will be marked
585 if needed when the associated CALL is marked. */
586 if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
587 continue;
588 if (deletable_insn_p (insn, fast, arg_stores))
589 mark_nonreg_stores (PATTERN (insn), insn, fast);
590 else
591 mark_insn (insn, fast);
593 /* find_call_stack_args only looks at argument stores in the
594 same bb. */
595 if (arg_stores)
596 bitmap_clear (arg_stores);
599 if (arg_stores)
600 BITMAP_FREE (arg_stores);
602 if (dump_file)
603 fprintf (dump_file, "Finished finding needed instructions:\n");
607 /* UD-based DSE routines. */
609 /* Mark instructions that define artificially-used registers, such as
610 the frame pointer and the stack pointer. */
612 static void
613 mark_artificial_uses (void)
615 basic_block bb;
616 struct df_link *defs;
617 df_ref *use_rec;
619 FOR_ALL_BB (bb)
621 for (use_rec = df_get_artificial_uses (bb->index);
622 *use_rec; use_rec++)
623 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
624 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
625 mark_insn (DF_REF_INSN (defs->ref), false);
630 /* Mark every instruction that defines a register value that INSN uses. */
632 static void
633 mark_reg_dependencies (rtx insn)
635 struct df_link *defs;
636 df_ref *use_rec;
638 if (DEBUG_INSN_P (insn))
639 return;
641 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
643 df_ref use = *use_rec;
644 if (dump_file)
646 fprintf (dump_file, "Processing use of ");
647 print_simple_rtl (dump_file, DF_REF_REG (use));
648 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
650 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
651 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
652 mark_insn (DF_REF_INSN (defs->ref), false);
657 /* Initialize global variables for a new DCE pass. */
659 static void
660 init_dce (bool fast)
662 if (!df_in_progress)
664 if (!fast)
665 df_chain_add_problem (DF_UD_CHAIN);
666 df_analyze ();
669 if (dump_file)
670 df_dump (dump_file);
672 if (fast)
674 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
675 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
678 marked = sbitmap_alloc (get_max_uid () + 1);
679 sbitmap_zero (marked);
683 /* Free the data allocated by init_dce. */
685 static void
686 fini_dce (bool fast)
688 sbitmap_free (marked);
690 if (fast)
692 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
693 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
698 /* UD-chain based DCE. */
700 static unsigned int
701 rest_of_handle_ud_dce (void)
703 rtx insn;
705 init_dce (false);
707 prescan_insns_for_dce (false);
708 mark_artificial_uses ();
709 while (VEC_length (rtx, worklist) > 0)
711 insn = VEC_pop (rtx, worklist);
712 mark_reg_dependencies (insn);
714 VEC_free (rtx, heap, worklist);
716 /* Before any insns are deleted, we must remove the chains since
717 they are not bidirectional. */
718 df_remove_problem (df_chain);
719 delete_unmarked_insns ();
721 fini_dce (false);
722 return 0;
726 static bool
727 gate_ud_dce (void)
729 return optimize > 1 && flag_dce
730 && dbg_cnt (dce_ud);
733 struct rtl_opt_pass pass_ud_rtl_dce =
736 RTL_PASS,
737 "ud dce", /* name */
738 gate_ud_dce, /* gate */
739 rest_of_handle_ud_dce, /* execute */
740 NULL, /* sub */
741 NULL, /* next */
742 0, /* static_pass_number */
743 TV_DCE, /* tv_id */
744 0, /* properties_required */
745 0, /* properties_provided */
746 0, /* properties_destroyed */
747 0, /* todo_flags_start */
748 TODO_dump_func |
749 TODO_df_finish | TODO_verify_rtl_sharing |
750 TODO_ggc_collect /* todo_flags_finish */
755 /* -------------------------------------------------------------------------
756 Fast DCE functions
757 ------------------------------------------------------------------------- */
759 /* Process basic block BB. Return true if the live_in set has
760 changed. REDO_OUT is true if the info at the bottom of the block
761 needs to be recalculated before starting. AU is the proper set of
762 artificial uses. */
764 static bool
765 word_dce_process_block (basic_block bb, bool redo_out)
767 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
768 rtx insn;
769 bool block_changed;
771 if (redo_out)
773 /* Need to redo the live_out set of this block if when one of
774 the succs of this block has had a change in it live in
775 set. */
776 edge e;
777 edge_iterator ei;
778 df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
779 bitmap_clear (DF_WORD_LR_OUT (bb));
780 FOR_EACH_EDGE (e, ei, bb->succs)
781 (*con_fun_n) (e);
784 if (dump_file)
786 fprintf (dump_file, "processing block %d live out = ", bb->index);
787 df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
790 bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
792 FOR_BB_INSNS_REVERSE (bb, insn)
793 if (NONDEBUG_INSN_P (insn))
795 bool any_changed;
796 /* No matter if the instruction is needed or not, we remove
797 any regno in the defs from the live set. */
798 any_changed = df_word_lr_simulate_defs (insn, local_live);
799 if (any_changed)
800 mark_insn (insn, true);
802 /* On the other hand, we do not allow the dead uses to set
803 anything in local_live. */
804 if (marked_insn_p (insn))
805 df_word_lr_simulate_uses (insn, local_live);
807 if (dump_file)
809 fprintf (dump_file, "finished processing insn %d live out = ",
810 INSN_UID (insn));
811 df_print_word_regset (dump_file, local_live);
815 block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
816 if (block_changed)
817 bitmap_copy (DF_WORD_LR_IN (bb), local_live);
819 BITMAP_FREE (local_live);
820 return block_changed;
824 /* Process basic block BB. Return true if the live_in set has
825 changed. REDO_OUT is true if the info at the bottom of the block
826 needs to be recalculated before starting. AU is the proper set of
827 artificial uses. */
829 static bool
830 dce_process_block (basic_block bb, bool redo_out, bitmap au)
832 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
833 rtx insn;
834 bool block_changed;
835 df_ref *def_rec;
837 if (redo_out)
839 /* Need to redo the live_out set of this block if when one of
840 the succs of this block has had a change in it live in
841 set. */
842 edge e;
843 edge_iterator ei;
844 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
845 bitmap_clear (DF_LR_OUT (bb));
846 FOR_EACH_EDGE (e, ei, bb->succs)
847 (*con_fun_n) (e);
850 if (dump_file)
852 fprintf (dump_file, "processing block %d lr out = ", bb->index);
853 df_print_regset (dump_file, DF_LR_OUT (bb));
856 bitmap_copy (local_live, DF_LR_OUT (bb));
858 df_simulate_initialize_backwards (bb, local_live);
860 FOR_BB_INSNS_REVERSE (bb, insn)
861 if (INSN_P (insn))
863 bool needed = marked_insn_p (insn);
865 /* The insn is needed if there is someone who uses the output. */
866 if (!needed)
867 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
868 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
869 || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
871 needed = true;
872 mark_insn (insn, true);
873 break;
876 /* No matter if the instruction is needed or not, we remove
877 any regno in the defs from the live set. */
878 df_simulate_defs (insn, local_live);
880 /* On the other hand, we do not allow the dead uses to set
881 anything in local_live. */
882 if (needed)
883 df_simulate_uses (insn, local_live);
886 df_simulate_finalize_backwards (bb, local_live);
888 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
889 if (block_changed)
890 bitmap_copy (DF_LR_IN (bb), local_live);
892 BITMAP_FREE (local_live);
893 return block_changed;
897 /* Perform fast DCE once initialization is done. If WORD_LEVEL is
898 true, use the word level dce, otherwise do it at the pseudo
899 level. */
901 static void
902 fast_dce (bool word_level)
904 int *postorder = df_get_postorder (DF_BACKWARD);
905 int n_blocks = df_get_n_blocks (DF_BACKWARD);
906 /* The set of blocks that have been seen on this iteration. */
907 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
908 /* The set of blocks that need to have the out vectors reset because
909 the in of one of their successors has changed. */
910 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
911 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
912 bool global_changed = true;
914 /* These regs are considered always live so if they end up dying
915 because of some def, we need to bring the back again. Calling
916 df_simulate_fixup_sets has the disadvantage of calling
917 bb_has_eh_pred once per insn, so we cache the information
918 here. */
919 bitmap au = &df->regular_block_artificial_uses;
920 bitmap au_eh = &df->eh_block_artificial_uses;
921 int i;
923 prescan_insns_for_dce (true);
925 for (i = 0; i < n_blocks; i++)
926 bitmap_set_bit (all_blocks, postorder[i]);
928 while (global_changed)
930 global_changed = false;
932 for (i = 0; i < n_blocks; i++)
934 int index = postorder[i];
935 basic_block bb = BASIC_BLOCK (index);
936 bool local_changed;
938 if (index < NUM_FIXED_BLOCKS)
940 bitmap_set_bit (processed, index);
941 continue;
944 if (word_level)
945 local_changed
946 = word_dce_process_block (bb, bitmap_bit_p (redo_out, index));
947 else
948 local_changed
949 = dce_process_block (bb, bitmap_bit_p (redo_out, index),
950 bb_has_eh_pred (bb) ? au_eh : au);
951 bitmap_set_bit (processed, index);
953 if (local_changed)
955 edge e;
956 edge_iterator ei;
957 FOR_EACH_EDGE (e, ei, bb->preds)
958 if (bitmap_bit_p (processed, e->src->index))
959 /* Be tricky about when we need to iterate the
960 analysis. We only have redo the analysis if the
961 bitmaps change at the top of a block that is the
962 entry to a loop. */
963 global_changed = true;
964 else
965 bitmap_set_bit (redo_out, e->src->index);
969 if (global_changed)
971 /* Turn off the RUN_DCE flag to prevent recursive calls to
972 dce. */
973 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
975 /* So something was deleted that requires a redo. Do it on
976 the cheap. */
977 delete_unmarked_insns ();
978 sbitmap_zero (marked);
979 bitmap_clear (processed);
980 bitmap_clear (redo_out);
982 /* We do not need to rescan any instructions. We only need
983 to redo the dataflow equations for the blocks that had a
984 change at the top of the block. Then we need to redo the
985 iteration. */
986 if (word_level)
987 df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
988 else
989 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
991 if (old_flag & DF_LR_RUN_DCE)
992 df_set_flags (DF_LR_RUN_DCE);
994 prescan_insns_for_dce (true);
998 delete_unmarked_insns ();
1000 BITMAP_FREE (processed);
1001 BITMAP_FREE (redo_out);
1002 BITMAP_FREE (all_blocks);
1006 /* Fast register level DCE. */
1008 static unsigned int
1009 rest_of_handle_fast_dce (void)
1011 init_dce (true);
1012 fast_dce (false);
1013 fini_dce (true);
1014 return 0;
1018 /* Fast byte level DCE. */
1020 void
1021 run_word_dce (void)
1023 int old_flags;
1025 if (!flag_dce)
1026 return;
1028 timevar_push (TV_DCE);
1029 old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1030 df_word_lr_add_problem ();
1031 init_dce (true);
1032 fast_dce (true);
1033 fini_dce (true);
1034 df_set_flags (old_flags);
1035 timevar_pop (TV_DCE);
1039 /* This is an internal call that is used by the df live register
1040 problem to run fast dce as a side effect of creating the live
1041 information. The stack is organized so that the lr problem is run,
1042 this pass is run, which updates the live info and the df scanning
1043 info, and then returns to allow the rest of the problems to be run.
1045 This can be called by elsewhere but it will not update the bit
1046 vectors for any other problems than LR. */
1048 void
1049 run_fast_df_dce (void)
1051 if (flag_dce)
1053 /* If dce is able to delete something, it has to happen
1054 immediately. Otherwise there will be problems handling the
1055 eq_notes. */
1056 int old_flags =
1057 df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1059 df_in_progress = true;
1060 rest_of_handle_fast_dce ();
1061 df_in_progress = false;
1063 df_set_flags (old_flags);
1068 /* Run a fast DCE pass. */
1070 void
1071 run_fast_dce (void)
1073 if (flag_dce)
1074 rest_of_handle_fast_dce ();
1078 static bool
1079 gate_fast_dce (void)
1081 return optimize > 0 && flag_dce
1082 && dbg_cnt (dce_fast);
1085 struct rtl_opt_pass pass_fast_rtl_dce =
1088 RTL_PASS,
1089 "rtl dce", /* name */
1090 gate_fast_dce, /* gate */
1091 rest_of_handle_fast_dce, /* execute */
1092 NULL, /* sub */
1093 NULL, /* next */
1094 0, /* static_pass_number */
1095 TV_DCE, /* tv_id */
1096 0, /* properties_required */
1097 0, /* properties_provided */
1098 0, /* properties_destroyed */
1099 0, /* todo_flags_start */
1100 TODO_dump_func |
1101 TODO_df_finish | TODO_verify_rtl_sharing |
1102 TODO_ggc_collect /* todo_flags_finish */