Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / dce.c
blob3e1dd47f3a4b456d9fd618d2f35efb41908b9ac7
1 /* RTL dead code elimination.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hashtab.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "except.h"
31 #include "df.h"
32 #include "cselib.h"
33 #include "dce.h"
34 #include "timevar.h"
35 #include "tree-pass.h"
36 #include "dbgcnt.h"
37 #include "tm_p.h"
40 /* -------------------------------------------------------------------------
41 Core mark/delete routines
42 ------------------------------------------------------------------------- */
44 /* True if we are invoked while the df engine is running; in this case,
45 we don't want to reenter it. */
46 static bool df_in_progress = false;
48 /* Instructions that have been marked but whose dependencies have not
49 yet been processed. */
50 static VEC(rtx,heap) *worklist;
52 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
53 static sbitmap marked;
55 /* Bitmap obstacks used for block processing by the fast algorithm. */
56 static bitmap_obstack dce_blocks_bitmap_obstack;
57 static bitmap_obstack dce_tmp_bitmap_obstack;
59 static bool find_call_stack_args (rtx, bool, bool, bitmap);
61 /* A subroutine for which BODY is part of the instruction being tested;
62 either the top-level pattern, or an element of a PARALLEL. The
63 instruction is known not to be a bare USE or CLOBBER. */
65 static bool
66 deletable_insn_p_1 (rtx body)
68 switch (GET_CODE (body))
70 case PREFETCH:
71 case TRAP_IF:
72 /* The UNSPEC case was added here because the ia-64 claims that
73 USEs do not work after reload and generates UNSPECS rather
74 than USEs. Since dce is run after reload we need to avoid
75 deleting these even if they are dead. If it turns out that
76 USEs really do work after reload, the ia-64 should be
77 changed, and the UNSPEC case can be removed. */
78 case UNSPEC:
79 return false;
81 default:
82 if (volatile_refs_p (body))
83 return false;
85 if (flag_non_call_exceptions && may_trap_p (body))
86 return false;
88 return true;
93 /* Return true if INSN is a normal instruction that can be deleted by
94 the DCE pass. */
96 static bool
97 deletable_insn_p (rtx insn, bool fast, bitmap arg_stores)
99 rtx body, x;
100 int i;
102 if (CALL_P (insn)
103 /* We cannot delete calls inside of the recursive dce because
104 this may cause basic blocks to be deleted and this messes up
105 the rest of the stack of optimization passes. */
106 && (!df_in_progress)
107 /* We cannot delete pure or const sibling calls because it is
108 hard to see the result. */
109 && (!SIBLING_CALL_P (insn))
110 /* We can delete dead const or pure calls as long as they do not
111 infinite loop. */
112 && (RTL_CONST_OR_PURE_CALL_P (insn)
113 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
114 return find_call_stack_args (insn, false, fast, arg_stores);
116 if (!NONJUMP_INSN_P (insn))
117 return false;
119 /* Similarly, we cannot delete other insns that can throw either. */
120 if (df_in_progress && flag_non_call_exceptions && can_throw_internal (insn))
121 return false;
123 body = PATTERN (insn);
124 switch (GET_CODE (body))
126 case USE:
127 case VAR_LOCATION:
128 return false;
130 case CLOBBER:
131 if (fast)
133 /* A CLOBBER of a dead pseudo register serves no purpose.
134 That is not necessarily true for hard registers until
135 after reload. */
136 x = XEXP (body, 0);
137 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
139 else
140 /* Because of the way that use-def chains are built, it is not
141 possible to tell if the clobber is dead because it can
142 never be the target of a use-def chain. */
143 return false;
145 case PARALLEL:
146 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
147 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
148 return false;
149 return true;
151 default:
152 return deletable_insn_p_1 (body);
157 /* Return true if INSN has been marked as needed. */
159 static inline int
160 marked_insn_p (rtx insn)
162 /* Artificial defs are always needed and they do not have an insn.
163 We should never see them here. */
164 gcc_assert (insn);
165 return TEST_BIT (marked, INSN_UID (insn));
169 /* If INSN has not yet been marked as needed, mark it now, and add it to
170 the worklist. */
172 static void
173 mark_insn (rtx insn, bool fast)
175 if (!marked_insn_p (insn))
177 if (!fast)
178 VEC_safe_push (rtx, heap, worklist, insn);
179 SET_BIT (marked, INSN_UID (insn));
180 if (dump_file)
181 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
182 if (CALL_P (insn)
183 && !df_in_progress
184 && !SIBLING_CALL_P (insn)
185 && (RTL_CONST_OR_PURE_CALL_P (insn)
186 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
187 find_call_stack_args (insn, true, fast, NULL);
192 /* A note_stores callback used by mark_nonreg_stores. DATA is the
193 instruction containing DEST. */
195 static void
196 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
198 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
199 mark_insn ((rtx) data, true);
203 /* A note_stores callback used by mark_nonreg_stores. DATA is the
204 instruction containing DEST. */
206 static void
207 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
209 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
210 mark_insn ((rtx) data, false);
214 /* Mark INSN if BODY stores to a non-register destination. */
216 static void
217 mark_nonreg_stores (rtx body, rtx insn, bool fast)
219 if (fast)
220 note_stores (body, mark_nonreg_stores_1, insn);
221 else
222 note_stores (body, mark_nonreg_stores_2, insn);
226 /* Try to find all stack stores of CALL_INSN arguments if
227 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
228 and it is therefore safe to eliminate the call, return true,
229 otherwise return false. This function should be first called
230 with DO_MARK false, and only when the CALL_INSN is actually
231 going to be marked called again with DO_MARK true. */
233 static bool
234 find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
235 bitmap arg_stores)
237 rtx p, insn, prev_insn;
238 bool ret;
239 HOST_WIDE_INT min_sp_off, max_sp_off;
240 bitmap sp_bytes;
242 gcc_assert (CALL_P (call_insn));
243 if (!ACCUMULATE_OUTGOING_ARGS)
244 return true;
246 if (!do_mark)
248 gcc_assert (arg_stores);
249 bitmap_clear (arg_stores);
252 min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
253 max_sp_off = 0;
255 /* First determine the minimum and maximum offset from sp for
256 stored arguments. */
257 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
258 if (GET_CODE (XEXP (p, 0)) == USE
259 && MEM_P (XEXP (XEXP (p, 0), 0)))
261 rtx mem = XEXP (XEXP (p, 0), 0), addr, size;
262 HOST_WIDE_INT off = 0;
263 size = MEM_SIZE (mem);
264 if (size == NULL_RTX)
265 return false;
266 addr = XEXP (mem, 0);
267 if (GET_CODE (addr) == PLUS
268 && REG_P (XEXP (addr, 0))
269 && CONST_INT_P (XEXP (addr, 1)))
271 off = INTVAL (XEXP (addr, 1));
272 addr = XEXP (addr, 0);
274 if (addr != stack_pointer_rtx)
276 if (!REG_P (addr))
277 return false;
278 /* If not fast, use chains to see if addr wasn't set to
279 sp + offset. */
280 if (!fast)
282 df_ref *use_rec;
283 struct df_link *defs;
284 rtx set;
286 for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
287 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
288 break;
290 if (*use_rec == NULL)
291 return false;
293 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
294 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
295 break;
297 if (defs == NULL)
298 return false;
300 set = single_set (DF_REF_INSN (defs->ref));
301 if (!set)
302 return false;
304 if (GET_CODE (SET_SRC (set)) != PLUS
305 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
306 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
307 return false;
309 off += INTVAL (XEXP (SET_SRC (set), 1));
311 else
312 return false;
314 min_sp_off = MIN (min_sp_off, off);
315 max_sp_off = MAX (max_sp_off, off + INTVAL (size));
318 if (min_sp_off >= max_sp_off)
319 return true;
320 sp_bytes = BITMAP_ALLOC (NULL);
322 /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
323 which contain arguments. Checking has been done in the previous
324 loop. */
325 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
326 if (GET_CODE (XEXP (p, 0)) == USE
327 && MEM_P (XEXP (XEXP (p, 0), 0)))
329 rtx mem = XEXP (XEXP (p, 0), 0), addr;
330 HOST_WIDE_INT off = 0, byte;
331 addr = XEXP (mem, 0);
332 if (GET_CODE (addr) == PLUS
333 && REG_P (XEXP (addr, 0))
334 && CONST_INT_P (XEXP (addr, 1)))
336 off = INTVAL (XEXP (addr, 1));
337 addr = XEXP (addr, 0);
339 if (addr != stack_pointer_rtx)
341 df_ref *use_rec;
342 struct df_link *defs;
343 rtx set;
345 for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
346 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
347 break;
349 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
350 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
351 break;
353 set = single_set (DF_REF_INSN (defs->ref));
354 off += INTVAL (XEXP (SET_SRC (set), 1));
356 for (byte = off; byte < off + INTVAL (MEM_SIZE (mem)); byte++)
358 if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
359 gcc_unreachable ();
363 /* Walk backwards, looking for argument stores. The search stops
364 when seeing another call, sp adjustment or memory store other than
365 argument store. */
366 ret = false;
367 for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
369 rtx set, mem, addr;
370 HOST_WIDE_INT off, byte;
372 if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
373 prev_insn = NULL_RTX;
374 else
375 prev_insn = PREV_INSN (insn);
377 if (CALL_P (insn))
378 break;
380 if (!INSN_P (insn))
381 continue;
383 set = single_set (insn);
384 if (!set || SET_DEST (set) == stack_pointer_rtx)
385 break;
387 if (!MEM_P (SET_DEST (set)))
388 continue;
390 mem = SET_DEST (set);
391 addr = XEXP (mem, 0);
392 off = 0;
393 if (GET_CODE (addr) == PLUS
394 && REG_P (XEXP (addr, 0))
395 && CONST_INT_P (XEXP (addr, 1)))
397 off = INTVAL (XEXP (addr, 1));
398 addr = XEXP (addr, 0);
400 if (addr != stack_pointer_rtx)
402 if (!REG_P (addr))
403 break;
404 if (!fast)
406 df_ref *use_rec;
407 struct df_link *defs;
408 rtx set;
410 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
411 if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
412 break;
414 if (*use_rec == NULL)
415 break;
417 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
418 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
419 break;
421 if (defs == NULL)
422 break;
424 set = single_set (DF_REF_INSN (defs->ref));
425 if (!set)
426 break;
428 if (GET_CODE (SET_SRC (set)) != PLUS
429 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
430 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
431 break;
433 off += INTVAL (XEXP (SET_SRC (set), 1));
435 else
436 break;
439 if (GET_MODE_SIZE (GET_MODE (mem)) == 0)
440 break;
442 for (byte = off; byte < off + GET_MODE_SIZE (GET_MODE (mem)); byte++)
444 if (byte < min_sp_off
445 || byte >= max_sp_off
446 || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
447 break;
450 if (!deletable_insn_p (insn, fast, NULL))
451 break;
453 if (do_mark)
454 mark_insn (insn, fast);
455 else
456 bitmap_set_bit (arg_stores, INSN_UID (insn));
458 if (bitmap_empty_p (sp_bytes))
460 ret = true;
461 break;
465 BITMAP_FREE (sp_bytes);
466 if (!ret && arg_stores)
467 bitmap_clear (arg_stores);
469 return ret;
473 /* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
474 bad dangling REG_EQUAL notes. */
476 static void
477 delete_corresponding_reg_eq_notes (rtx insn)
479 df_ref *def_rec;
480 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
482 df_ref def = *def_rec;
483 unsigned int regno = DF_REF_REGNO (def);
484 /* This loop is a little tricky. We cannot just go down the
485 chain because it is being modified by the actions in the
486 loop. So we just get the head. We plan to drain the list
487 anyway. */
488 while (DF_REG_EQ_USE_CHAIN (regno))
490 df_ref eq_use = DF_REG_EQ_USE_CHAIN (regno);
491 rtx noted_insn = DF_REF_INSN (eq_use);
492 rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
493 if (!note)
494 note = find_reg_note (noted_insn, REG_EQUIV, NULL_RTX);
496 /* This assert is generally triggered when someone deletes a
497 REG_EQUAL or REG_EQUIV note by hacking the list manually
498 rather than calling remove_note. */
499 gcc_assert (note);
500 remove_note (noted_insn, note);
506 /* Delete every instruction that hasn't been marked. */
508 static void
509 delete_unmarked_insns (void)
511 basic_block bb;
512 rtx insn, next;
513 bool must_clean = false;
515 FOR_EACH_BB_REVERSE (bb)
516 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
517 if (INSN_P (insn))
519 /* Always delete no-op moves. */
520 if (noop_move_p (insn))
523 /* Otherwise rely only on the DCE algorithm. */
524 else if (marked_insn_p (insn))
525 continue;
527 /* Beware that reaching a dbg counter limit here can result
528 in miscompiled file. This occurs when a group of insns
529 must be deleted together, typically because the kept insn
530 depends on the output from the deleted insn. Deleting
531 this insns in reverse order (both at the bb level and
532 when looking at the blocks) minimizes this, but does not
533 eliminate it, since it is possible for the using insn to
534 be top of a block and the producer to be at the bottom of
535 the block. However, in most cases this will only result
536 in an uninitialized use of an insn that is dead anyway.
538 However, there is one rare case that will cause a
539 miscompile: deletion of non-looping pure and constant
540 calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
541 In this case it is possible to remove the call, but leave
542 the argument pushes to the stack. Because of the changes
543 to the stack pointer, this will almost always lead to a
544 miscompile. */
545 if (!dbg_cnt (dce))
546 continue;
548 if (dump_file)
549 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
551 /* Before we delete the insn we have to delete REG_EQUAL notes
552 for the destination regs in order to avoid dangling notes. */
553 delete_corresponding_reg_eq_notes (insn);
555 /* If a pure or const call is deleted, this may make the cfg
556 have unreachable blocks. We rememeber this and call
557 delete_unreachable_blocks at the end. */
558 if (CALL_P (insn))
559 must_clean = true;
561 /* Now delete the insn. */
562 delete_insn_and_edges (insn);
565 /* Deleted a pure or const call. */
566 if (must_clean)
567 delete_unreachable_blocks ();
571 /* Go through the instructions and mark those whose necessity is not
572 dependent on inter-instruction information. Make sure all other
573 instructions are not marked. */
575 static void
576 prescan_insns_for_dce (bool fast)
578 basic_block bb;
579 rtx insn, prev;
580 bitmap arg_stores = NULL;
582 if (dump_file)
583 fprintf (dump_file, "Finding needed instructions:\n");
585 if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
586 arg_stores = BITMAP_ALLOC (NULL);
588 FOR_EACH_BB (bb)
590 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
591 if (INSN_P (insn))
593 /* Don't mark argument stores now. They will be marked
594 if needed when the associated CALL is marked. */
595 if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
596 continue;
597 if (deletable_insn_p (insn, fast, arg_stores))
598 mark_nonreg_stores (PATTERN (insn), insn, fast);
599 else
600 mark_insn (insn, fast);
602 /* find_call_stack_args only looks at argument stores in the
603 same bb. */
604 if (arg_stores)
605 bitmap_clear (arg_stores);
608 if (arg_stores)
609 BITMAP_FREE (arg_stores);
611 if (dump_file)
612 fprintf (dump_file, "Finished finding needed instructions:\n");
616 /* UD-based DSE routines. */
618 /* Mark instructions that define artificially-used registers, such as
619 the frame pointer and the stack pointer. */
621 static void
622 mark_artificial_uses (void)
624 basic_block bb;
625 struct df_link *defs;
626 df_ref *use_rec;
628 FOR_ALL_BB (bb)
630 for (use_rec = df_get_artificial_uses (bb->index);
631 *use_rec; use_rec++)
632 for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
633 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
634 mark_insn (DF_REF_INSN (defs->ref), false);
639 /* Mark every instruction that defines a register value that INSN uses. */
641 static void
642 mark_reg_dependencies (rtx insn)
644 struct df_link *defs;
645 df_ref *use_rec;
647 if (DEBUG_INSN_P (insn))
648 return;
650 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
652 df_ref use = *use_rec;
653 if (dump_file)
655 fprintf (dump_file, "Processing use of ");
656 print_simple_rtl (dump_file, DF_REF_REG (use));
657 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
659 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
660 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
661 mark_insn (DF_REF_INSN (defs->ref), false);
666 /* Initialize global variables for a new DCE pass. */
668 static void
669 init_dce (bool fast)
671 if (!df_in_progress)
673 if (!fast)
674 df_chain_add_problem (DF_UD_CHAIN);
675 df_analyze ();
678 if (dump_file)
679 df_dump (dump_file);
681 if (fast)
683 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
684 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
687 marked = sbitmap_alloc (get_max_uid () + 1);
688 sbitmap_zero (marked);
692 /* Free the data allocated by init_dce. */
694 static void
695 fini_dce (bool fast)
697 sbitmap_free (marked);
699 if (fast)
701 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
702 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
707 /* UD-chain based DCE. */
709 static unsigned int
710 rest_of_handle_ud_dce (void)
712 rtx insn;
714 init_dce (false);
716 prescan_insns_for_dce (false);
717 mark_artificial_uses ();
718 while (VEC_length (rtx, worklist) > 0)
720 insn = VEC_pop (rtx, worklist);
721 mark_reg_dependencies (insn);
723 VEC_free (rtx, heap, worklist);
725 /* Before any insns are deleted, we must remove the chains since
726 they are not bidirectional. */
727 df_remove_problem (df_chain);
728 delete_unmarked_insns ();
730 fini_dce (false);
731 return 0;
735 static bool
736 gate_ud_dce (void)
738 return optimize > 1 && flag_dce
739 && dbg_cnt (dce_ud);
742 struct rtl_opt_pass pass_ud_rtl_dce =
745 RTL_PASS,
746 "dce", /* name */
747 gate_ud_dce, /* gate */
748 rest_of_handle_ud_dce, /* execute */
749 NULL, /* sub */
750 NULL, /* next */
751 0, /* static_pass_number */
752 TV_DCE, /* tv_id */
753 0, /* properties_required */
754 0, /* properties_provided */
755 0, /* properties_destroyed */
756 0, /* todo_flags_start */
757 TODO_dump_func |
758 TODO_df_finish | TODO_verify_rtl_sharing |
759 TODO_ggc_collect /* todo_flags_finish */
764 /* -------------------------------------------------------------------------
765 Fast DCE functions
766 ------------------------------------------------------------------------- */
768 /* Process basic block BB. Return true if the live_in set has
769 changed. REDO_OUT is true if the info at the bottom of the block
770 needs to be recalculated before starting. AU is the proper set of
771 artificial uses. */
773 static bool
774 byte_dce_process_block (basic_block bb, bool redo_out, bitmap au)
776 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
777 rtx insn;
778 bool block_changed;
779 df_ref *def_rec;
781 if (redo_out)
783 /* Need to redo the live_out set of this block if when one of
784 the succs of this block has had a change in it live in
785 set. */
786 edge e;
787 edge_iterator ei;
788 df_confluence_function_n con_fun_n = df_byte_lr->problem->con_fun_n;
789 bitmap_clear (DF_BYTE_LR_OUT (bb));
790 FOR_EACH_EDGE (e, ei, bb->succs)
791 (*con_fun_n) (e);
794 if (dump_file)
796 fprintf (dump_file, "processing block %d live out = ", bb->index);
797 df_print_byte_regset (dump_file, DF_BYTE_LR_OUT (bb));
800 bitmap_copy (local_live, DF_BYTE_LR_OUT (bb));
802 df_byte_lr_simulate_artificial_refs_at_end (bb, local_live);
804 FOR_BB_INSNS_REVERSE (bb, insn)
805 if (INSN_P (insn))
807 /* The insn is needed if there is someone who uses the output. */
808 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
810 df_ref def = *def_rec;
811 unsigned int last;
812 unsigned int dregno = DF_REF_REGNO (def);
813 unsigned int start = df_byte_lr_get_regno_start (dregno);
814 unsigned int len = df_byte_lr_get_regno_len (dregno);
816 unsigned int sb;
817 unsigned int lb;
818 /* This is one of the only places where DF_MM_MAY should
819 be used for defs. Need to make sure that we are
820 checking for all of the bits that may be used. */
822 if (!df_compute_accessed_bytes (def, DF_MM_MAY, &sb, &lb))
824 start += sb;
825 len = lb - sb;
828 if (bitmap_bit_p (au, dregno))
830 mark_insn (insn, true);
831 goto quickexit;
834 last = start + len;
835 while (start < last)
836 if (bitmap_bit_p (local_live, start++))
838 mark_insn (insn, true);
839 goto quickexit;
843 quickexit:
845 /* No matter if the instruction is needed or not, we remove
846 any regno in the defs from the live set. */
847 df_byte_lr_simulate_defs (insn, local_live);
849 /* On the other hand, we do not allow the dead uses to set
850 anything in local_live. */
851 if (marked_insn_p (insn))
852 df_byte_lr_simulate_uses (insn, local_live);
854 if (dump_file)
856 fprintf (dump_file, "finished processing insn %d live out = ",
857 INSN_UID (insn));
858 df_print_byte_regset (dump_file, local_live);
862 df_byte_lr_simulate_artificial_refs_at_top (bb, local_live);
864 block_changed = !bitmap_equal_p (local_live, DF_BYTE_LR_IN (bb));
865 if (block_changed)
866 bitmap_copy (DF_BYTE_LR_IN (bb), local_live);
867 BITMAP_FREE (local_live);
868 return block_changed;
872 /* Process basic block BB. Return true if the live_in set has
873 changed. REDO_OUT is true if the info at the bottom of the block
874 needs to be recalculated before starting. AU is the proper set of
875 artificial uses. */
877 static bool
878 dce_process_block (basic_block bb, bool redo_out, bitmap au)
880 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
881 rtx insn;
882 bool block_changed;
883 df_ref *def_rec;
885 if (redo_out)
887 /* Need to redo the live_out set of this block if when one of
888 the succs of this block has had a change in it live in
889 set. */
890 edge e;
891 edge_iterator ei;
892 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
893 bitmap_clear (DF_LR_OUT (bb));
894 FOR_EACH_EDGE (e, ei, bb->succs)
895 (*con_fun_n) (e);
898 if (dump_file)
900 fprintf (dump_file, "processing block %d lr out = ", bb->index);
901 df_print_regset (dump_file, DF_LR_OUT (bb));
904 bitmap_copy (local_live, DF_LR_OUT (bb));
906 df_simulate_initialize_backwards (bb, local_live);
908 FOR_BB_INSNS_REVERSE (bb, insn)
909 if (INSN_P (insn))
911 bool needed = false;
913 /* The insn is needed if there is someone who uses the output. */
914 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
915 if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
916 || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
918 needed = true;
919 break;
922 if (needed)
923 mark_insn (insn, true);
925 /* No matter if the instruction is needed or not, we remove
926 any regno in the defs from the live set. */
927 df_simulate_defs (insn, local_live);
929 /* On the other hand, we do not allow the dead uses to set
930 anything in local_live. */
931 if (marked_insn_p (insn))
932 df_simulate_uses (insn, local_live);
935 df_simulate_finalize_backwards (bb, local_live);
937 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
938 if (block_changed)
939 bitmap_copy (DF_LR_IN (bb), local_live);
941 BITMAP_FREE (local_live);
942 return block_changed;
946 /* Perform fast DCE once initialization is done. If BYTE_LEVEL is
947 true, use the byte level dce, otherwise do it at the pseudo
948 level. */
950 static void
951 fast_dce (bool byte_level)
953 int *postorder = df_get_postorder (DF_BACKWARD);
954 int n_blocks = df_get_n_blocks (DF_BACKWARD);
955 /* The set of blocks that have been seen on this iteration. */
956 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
957 /* The set of blocks that need to have the out vectors reset because
958 the in of one of their successors has changed. */
959 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
960 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
961 bool global_changed = true;
963 /* These regs are considered always live so if they end up dying
964 because of some def, we need to bring the back again. Calling
965 df_simulate_fixup_sets has the disadvantage of calling
966 bb_has_eh_pred once per insn, so we cache the information
967 here. */
968 bitmap au = df->regular_block_artificial_uses;
969 bitmap au_eh = df->eh_block_artificial_uses;
970 int i;
972 prescan_insns_for_dce (true);
974 for (i = 0; i < n_blocks; i++)
975 bitmap_set_bit (all_blocks, postorder[i]);
977 while (global_changed)
979 global_changed = false;
981 for (i = 0; i < n_blocks; i++)
983 int index = postorder[i];
984 basic_block bb = BASIC_BLOCK (index);
985 bool local_changed;
987 if (index < NUM_FIXED_BLOCKS)
989 bitmap_set_bit (processed, index);
990 continue;
993 if (byte_level)
994 local_changed
995 = byte_dce_process_block (bb, bitmap_bit_p (redo_out, index),
996 bb_has_eh_pred (bb) ? au_eh : au);
997 else
998 local_changed
999 = dce_process_block (bb, bitmap_bit_p (redo_out, index),
1000 bb_has_eh_pred (bb) ? au_eh : au);
1001 bitmap_set_bit (processed, index);
1003 if (local_changed)
1005 edge e;
1006 edge_iterator ei;
1007 FOR_EACH_EDGE (e, ei, bb->preds)
1008 if (bitmap_bit_p (processed, e->src->index))
1009 /* Be tricky about when we need to iterate the
1010 analysis. We only have redo the analysis if the
1011 bitmaps change at the top of a block that is the
1012 entry to a loop. */
1013 global_changed = true;
1014 else
1015 bitmap_set_bit (redo_out, e->src->index);
1019 if (global_changed)
1021 /* Turn off the RUN_DCE flag to prevent recursive calls to
1022 dce. */
1023 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1025 /* So something was deleted that requires a redo. Do it on
1026 the cheap. */
1027 delete_unmarked_insns ();
1028 sbitmap_zero (marked);
1029 bitmap_clear (processed);
1030 bitmap_clear (redo_out);
1032 /* We do not need to rescan any instructions. We only need
1033 to redo the dataflow equations for the blocks that had a
1034 change at the top of the block. Then we need to redo the
1035 iteration. */
1036 if (byte_level)
1037 df_analyze_problem (df_byte_lr, all_blocks, postorder, n_blocks);
1038 else
1039 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1041 if (old_flag & DF_LR_RUN_DCE)
1042 df_set_flags (DF_LR_RUN_DCE);
1044 prescan_insns_for_dce (true);
1048 delete_unmarked_insns ();
1050 BITMAP_FREE (processed);
1051 BITMAP_FREE (redo_out);
1052 BITMAP_FREE (all_blocks);
1056 /* Fast register level DCE. */
1058 static unsigned int
1059 rest_of_handle_fast_dce (void)
1061 init_dce (true);
1062 fast_dce (false);
1063 fini_dce (true);
1064 return 0;
1068 /* Fast byte level DCE. */
1070 static unsigned int
1071 rest_of_handle_fast_byte_dce (void)
1073 df_byte_lr_add_problem ();
1074 init_dce (true);
1075 fast_dce (true);
1076 fini_dce (true);
1077 return 0;
1081 /* This is an internal call that is used by the df live register
1082 problem to run fast dce as a side effect of creating the live
1083 information. The stack is organized so that the lr problem is run,
1084 this pass is run, which updates the live info and the df scanning
1085 info, and then returns to allow the rest of the problems to be run.
1087 This can be called by elsewhere but it will not update the bit
1088 vectors for any other problems than LR. */
1090 void
1091 run_fast_df_dce (void)
1093 if (flag_dce)
1095 /* If dce is able to delete something, it has to happen
1096 immediately. Otherwise there will be problems handling the
1097 eq_notes. */
1098 int old_flags =
1099 df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1101 df_in_progress = true;
1102 rest_of_handle_fast_dce ();
1103 df_in_progress = false;
1105 df_set_flags (old_flags);
1110 /* Run a fast DCE pass. */
1112 void
1113 run_fast_dce (void)
1115 if (flag_dce)
1116 rest_of_handle_fast_dce ();
1120 static bool
1121 gate_fast_dce (void)
1123 return optimize > 0 && flag_dce
1124 && dbg_cnt (dce_fast);
1127 struct rtl_opt_pass pass_fast_rtl_dce =
1130 RTL_PASS,
1131 "dce", /* name */
1132 gate_fast_dce, /* gate */
1133 rest_of_handle_fast_dce, /* execute */
1134 NULL, /* sub */
1135 NULL, /* next */
1136 0, /* static_pass_number */
1137 TV_DCE, /* tv_id */
1138 0, /* properties_required */
1139 0, /* properties_provided */
1140 0, /* properties_destroyed */
1141 0, /* todo_flags_start */
1142 TODO_dump_func |
1143 TODO_df_finish | TODO_verify_rtl_sharing |
1144 TODO_ggc_collect /* todo_flags_finish */
1148 struct rtl_opt_pass pass_fast_rtl_byte_dce =
1151 RTL_PASS,
1152 "byte-dce", /* name */
1153 gate_fast_dce, /* gate */
1154 rest_of_handle_fast_byte_dce, /* execute */
1155 NULL, /* sub */
1156 NULL, /* next */
1157 0, /* static_pass_number */
1158 TV_DCE, /* tv_id */
1159 0, /* properties_required */
1160 0, /* properties_provided */
1161 0, /* properties_destroyed */
1162 0, /* todo_flags_start */
1163 TODO_dump_func |
1164 TODO_df_finish | TODO_verify_rtl_sharing |
1165 TODO_ggc_collect /* todo_flags_finish */