Use extract_bit_field_as_subreg for vectors
[official-gcc.git] / gcc / dce.c
blob590b6874e53d4f9b456e38a8c53dbdd622388f75
1 /* RTL dead code elimination.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "predict.h"
27 #include "df.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
31 #include "cfgrtl.h"
32 #include "cfgbuild.h"
33 #include "cfgcleanup.h"
34 #include "dce.h"
35 #include "valtrack.h"
36 #include "tree-pass.h"
37 #include "dbgcnt.h"
40 /* -------------------------------------------------------------------------
41 Core mark/delete routines
42 ------------------------------------------------------------------------- */
44 /* True if we are invoked while the df engine is running; in this case,
45 we don't want to reenter it. */
46 static bool df_in_progress = false;
48 /* True if we are allowed to alter the CFG in this pass. */
49 static bool can_alter_cfg = false;
51 /* Instructions that have been marked but whose dependencies have not
52 yet been processed. */
53 static vec<rtx_insn *> worklist;
55 /* Bitmap of instructions marked as needed indexed by INSN_UID. */
56 static sbitmap marked;
58 /* Bitmap obstacks used for block processing by the fast algorithm. */
59 static bitmap_obstack dce_blocks_bitmap_obstack;
60 static bitmap_obstack dce_tmp_bitmap_obstack;
62 static bool find_call_stack_args (rtx_call_insn *, bool, bool, bitmap);
64 /* A subroutine for which BODY is part of the instruction being tested;
65 either the top-level pattern, or an element of a PARALLEL. The
66 instruction is known not to be a bare USE or CLOBBER. */
68 static bool
69 deletable_insn_p_1 (rtx body)
71 switch (GET_CODE (body))
73 case PREFETCH:
74 case TRAP_IF:
75 /* The UNSPEC case was added here because the ia-64 claims that
76 USEs do not work after reload and generates UNSPECS rather
77 than USEs. Since dce is run after reload we need to avoid
78 deleting these even if they are dead. If it turns out that
79 USEs really do work after reload, the ia-64 should be
80 changed, and the UNSPEC case can be removed. */
81 case UNSPEC:
82 return false;
84 default:
85 return !volatile_refs_p (body);
90 /* Return true if INSN is a normal instruction that can be deleted by
91 the DCE pass. */
93 static bool
94 deletable_insn_p (rtx_insn *insn, bool fast, bitmap arg_stores)
96 rtx body, x;
97 int i;
98 df_ref def;
100 if (CALL_P (insn)
101 /* We cannot delete calls inside of the recursive dce because
102 this may cause basic blocks to be deleted and this messes up
103 the rest of the stack of optimization passes. */
104 && (!df_in_progress)
105 /* We cannot delete pure or const sibling calls because it is
106 hard to see the result. */
107 && (!SIBLING_CALL_P (insn))
108 /* We can delete dead const or pure calls as long as they do not
109 infinite loop. */
110 && (RTL_CONST_OR_PURE_CALL_P (insn)
111 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
112 return find_call_stack_args (as_a <rtx_call_insn *> (insn), false,
113 fast, arg_stores);
115 /* Don't delete jumps, notes and the like. */
116 if (!NONJUMP_INSN_P (insn))
117 return false;
119 /* Don't delete insns that may throw if we cannot do so. */
120 if (!(cfun->can_delete_dead_exceptions && can_alter_cfg)
121 && !insn_nothrow_p (insn))
122 return false;
124 /* If INSN sets a global_reg, leave it untouched. */
125 FOR_EACH_INSN_DEF (def, insn)
126 if (HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
127 && global_regs[DF_REF_REGNO (def)])
128 return false;
129 /* Initialization of pseudo PIC register should never be removed. */
130 else if (DF_REF_REG (def) == pic_offset_table_rtx
131 && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
132 return false;
134 body = PATTERN (insn);
135 switch (GET_CODE (body))
137 case USE:
138 case VAR_LOCATION:
139 return false;
141 case CLOBBER:
142 if (fast)
144 /* A CLOBBER of a dead pseudo register serves no purpose.
145 That is not necessarily true for hard registers until
146 after reload. */
147 x = XEXP (body, 0);
148 return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
150 else
151 /* Because of the way that use-def chains are built, it is not
152 possible to tell if the clobber is dead because it can
153 never be the target of a use-def chain. */
154 return false;
156 case PARALLEL:
157 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
158 if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
159 return false;
160 return true;
162 default:
163 return deletable_insn_p_1 (body);
168 /* Return true if INSN has been marked as needed. */
170 static inline int
171 marked_insn_p (rtx_insn *insn)
173 /* Artificial defs are always needed and they do not have an insn.
174 We should never see them here. */
175 gcc_assert (insn);
176 return bitmap_bit_p (marked, INSN_UID (insn));
180 /* If INSN has not yet been marked as needed, mark it now, and add it to
181 the worklist. */
183 static void
184 mark_insn (rtx_insn *insn, bool fast)
186 if (!marked_insn_p (insn))
188 if (!fast)
189 worklist.safe_push (insn);
190 bitmap_set_bit (marked, INSN_UID (insn));
191 if (dump_file)
192 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
193 if (CALL_P (insn)
194 && !df_in_progress
195 && !SIBLING_CALL_P (insn)
196 && (RTL_CONST_OR_PURE_CALL_P (insn)
197 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
198 find_call_stack_args (as_a <rtx_call_insn *> (insn), true, fast, NULL);
203 /* A note_stores callback used by mark_nonreg_stores. DATA is the
204 instruction containing DEST. */
206 static void
207 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
209 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
210 mark_insn ((rtx_insn *) data, true);
214 /* A note_stores callback used by mark_nonreg_stores. DATA is the
215 instruction containing DEST. */
217 static void
218 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
220 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
221 mark_insn ((rtx_insn *) data, false);
225 /* Mark INSN if BODY stores to a non-register destination. */
227 static void
228 mark_nonreg_stores (rtx body, rtx_insn *insn, bool fast)
230 if (fast)
231 note_stores (body, mark_nonreg_stores_1, insn);
232 else
233 note_stores (body, mark_nonreg_stores_2, insn);
237 /* Return true if a store to SIZE bytes, starting OFF bytes from stack pointer,
238 is a call argument store, and clear corresponding bits from SP_BYTES
239 bitmap if it is. */
241 static bool
242 check_argument_store (HOST_WIDE_INT size, HOST_WIDE_INT off,
243 HOST_WIDE_INT min_sp_off, HOST_WIDE_INT max_sp_off,
244 bitmap sp_bytes)
246 HOST_WIDE_INT byte;
247 for (byte = off; byte < off + size; byte++)
249 if (byte < min_sp_off
250 || byte >= max_sp_off
251 || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
252 return false;
254 return true;
258 /* Try to find all stack stores of CALL_INSN arguments if
259 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
260 and it is therefore safe to eliminate the call, return true,
261 otherwise return false. This function should be first called
262 with DO_MARK false, and only when the CALL_INSN is actually
263 going to be marked called again with DO_MARK true. */
265 static bool
266 find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast,
267 bitmap arg_stores)
269 rtx p;
270 rtx_insn *insn, *prev_insn;
271 bool ret;
272 HOST_WIDE_INT min_sp_off, max_sp_off;
273 bitmap sp_bytes;
275 gcc_assert (CALL_P (call_insn));
276 if (!ACCUMULATE_OUTGOING_ARGS)
277 return true;
279 if (!do_mark)
281 gcc_assert (arg_stores);
282 bitmap_clear (arg_stores);
285 min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
286 max_sp_off = 0;
288 /* First determine the minimum and maximum offset from sp for
289 stored arguments. */
290 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
291 if (GET_CODE (XEXP (p, 0)) == USE
292 && MEM_P (XEXP (XEXP (p, 0), 0)))
294 rtx mem = XEXP (XEXP (p, 0), 0), addr;
295 HOST_WIDE_INT off = 0, size;
296 if (!MEM_SIZE_KNOWN_P (mem) || !MEM_SIZE (mem).is_constant (&size))
297 return false;
298 addr = XEXP (mem, 0);
299 if (GET_CODE (addr) == PLUS
300 && REG_P (XEXP (addr, 0))
301 && CONST_INT_P (XEXP (addr, 1)))
303 off = INTVAL (XEXP (addr, 1));
304 addr = XEXP (addr, 0);
306 if (addr != stack_pointer_rtx)
308 if (!REG_P (addr))
309 return false;
310 /* If not fast, use chains to see if addr wasn't set to
311 sp + offset. */
312 if (!fast)
314 df_ref use;
315 struct df_link *defs;
316 rtx set;
318 FOR_EACH_INSN_USE (use, call_insn)
319 if (rtx_equal_p (addr, DF_REF_REG (use)))
320 break;
322 if (use == NULL)
323 return false;
325 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
326 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
327 break;
329 if (defs == NULL)
330 return false;
332 set = single_set (DF_REF_INSN (defs->ref));
333 if (!set)
334 return false;
336 if (GET_CODE (SET_SRC (set)) != PLUS
337 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
338 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
339 return false;
341 off += INTVAL (XEXP (SET_SRC (set), 1));
343 else
344 return false;
346 min_sp_off = MIN (min_sp_off, off);
347 max_sp_off = MAX (max_sp_off, off + size);
350 if (min_sp_off >= max_sp_off)
351 return true;
352 sp_bytes = BITMAP_ALLOC (NULL);
354 /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
355 which contain arguments. Checking has been done in the previous
356 loop. */
357 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
358 if (GET_CODE (XEXP (p, 0)) == USE
359 && MEM_P (XEXP (XEXP (p, 0), 0)))
361 rtx mem = XEXP (XEXP (p, 0), 0), addr;
362 HOST_WIDE_INT off = 0, byte, size;
363 /* Checked in the previous iteration. */
364 size = MEM_SIZE (mem).to_constant ();
365 addr = XEXP (mem, 0);
366 if (GET_CODE (addr) == PLUS
367 && REG_P (XEXP (addr, 0))
368 && CONST_INT_P (XEXP (addr, 1)))
370 off = INTVAL (XEXP (addr, 1));
371 addr = XEXP (addr, 0);
373 if (addr != stack_pointer_rtx)
375 df_ref use;
376 struct df_link *defs;
377 rtx set;
379 FOR_EACH_INSN_USE (use, call_insn)
380 if (rtx_equal_p (addr, DF_REF_REG (use)))
381 break;
383 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
384 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
385 break;
387 set = single_set (DF_REF_INSN (defs->ref));
388 off += INTVAL (XEXP (SET_SRC (set), 1));
390 for (byte = off; byte < off + size; byte++)
392 if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
393 gcc_unreachable ();
397 /* Walk backwards, looking for argument stores. The search stops
398 when seeing another call, sp adjustment or memory store other than
399 argument store. */
400 ret = false;
401 for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
403 rtx set, mem, addr;
404 HOST_WIDE_INT off;
406 if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
407 prev_insn = NULL;
408 else
409 prev_insn = PREV_INSN (insn);
411 if (CALL_P (insn))
412 break;
414 if (!NONDEBUG_INSN_P (insn))
415 continue;
417 set = single_set (insn);
418 if (!set || SET_DEST (set) == stack_pointer_rtx)
419 break;
421 if (!MEM_P (SET_DEST (set)))
422 continue;
424 mem = SET_DEST (set);
425 addr = XEXP (mem, 0);
426 off = 0;
427 if (GET_CODE (addr) == PLUS
428 && REG_P (XEXP (addr, 0))
429 && CONST_INT_P (XEXP (addr, 1)))
431 off = INTVAL (XEXP (addr, 1));
432 addr = XEXP (addr, 0);
434 if (addr != stack_pointer_rtx)
436 if (!REG_P (addr))
437 break;
438 if (!fast)
440 df_ref use;
441 struct df_link *defs;
442 rtx set;
444 FOR_EACH_INSN_USE (use, insn)
445 if (rtx_equal_p (addr, DF_REF_REG (use)))
446 break;
448 if (use == NULL)
449 break;
451 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
452 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
453 break;
455 if (defs == NULL)
456 break;
458 set = single_set (DF_REF_INSN (defs->ref));
459 if (!set)
460 break;
462 if (GET_CODE (SET_SRC (set)) != PLUS
463 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
464 || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
465 break;
467 off += INTVAL (XEXP (SET_SRC (set), 1));
469 else
470 break;
473 HOST_WIDE_INT size;
474 if (!MEM_SIZE_KNOWN_P (mem)
475 || !MEM_SIZE (mem).is_constant (&size)
476 || !check_argument_store (size, off, min_sp_off,
477 max_sp_off, sp_bytes))
478 break;
480 if (!deletable_insn_p (insn, fast, NULL))
481 break;
483 if (do_mark)
484 mark_insn (insn, fast);
485 else
486 bitmap_set_bit (arg_stores, INSN_UID (insn));
488 if (bitmap_empty_p (sp_bytes))
490 ret = true;
491 break;
495 BITMAP_FREE (sp_bytes);
496 if (!ret && arg_stores)
497 bitmap_clear (arg_stores);
499 return ret;
503 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
504 writes to. */
506 static void
507 remove_reg_equal_equiv_notes_for_defs (rtx_insn *insn)
509 df_ref def;
511 FOR_EACH_INSN_DEF (def, insn)
512 remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (def));
515 /* Scan all BBs for debug insns and reset those that reference values
516 defined in unmarked insns. */
518 static void
519 reset_unmarked_insns_debug_uses (void)
521 basic_block bb;
522 rtx_insn *insn, *next;
524 FOR_EACH_BB_REVERSE_FN (bb, cfun)
525 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
526 if (DEBUG_INSN_P (insn))
528 df_ref use;
530 FOR_EACH_INSN_USE (use, insn)
532 struct df_link *defs;
533 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
535 rtx_insn *ref_insn;
536 if (DF_REF_IS_ARTIFICIAL (defs->ref))
537 continue;
538 ref_insn = DF_REF_INSN (defs->ref);
539 if (!marked_insn_p (ref_insn))
540 break;
542 if (!defs)
543 continue;
544 /* ??? FIXME could we propagate the values assigned to
545 each of the DEFs? */
546 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
547 df_insn_rescan_debug_internal (insn);
548 break;
553 /* Delete every instruction that hasn't been marked. */
555 static void
556 delete_unmarked_insns (void)
558 basic_block bb;
559 rtx_insn *insn, *next;
560 bool must_clean = false;
562 FOR_EACH_BB_REVERSE_FN (bb, cfun)
563 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
564 if (NONDEBUG_INSN_P (insn))
566 /* Always delete no-op moves. */
567 if (noop_move_p (insn))
570 /* Otherwise rely only on the DCE algorithm. */
571 else if (marked_insn_p (insn))
572 continue;
574 /* Beware that reaching a dbg counter limit here can result
575 in miscompiled file. This occurs when a group of insns
576 must be deleted together, typically because the kept insn
577 depends on the output from the deleted insn. Deleting
578 this insns in reverse order (both at the bb level and
579 when looking at the blocks) minimizes this, but does not
580 eliminate it, since it is possible for the using insn to
581 be top of a block and the producer to be at the bottom of
582 the block. However, in most cases this will only result
583 in an uninitialized use of an insn that is dead anyway.
585 However, there is one rare case that will cause a
586 miscompile: deletion of non-looping pure and constant
587 calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
588 In this case it is possible to remove the call, but leave
589 the argument pushes to the stack. Because of the changes
590 to the stack pointer, this will almost always lead to a
591 miscompile. */
592 if (!dbg_cnt (dce))
593 continue;
595 if (crtl->shrink_wrapped_separate
596 && find_reg_note (insn, REG_CFA_RESTORE, NULL))
598 if (dump_file)
599 fprintf (dump_file, "DCE: NOT deleting insn %d, it's a "
600 "callee-save restore\n", INSN_UID (insn));
601 continue;
604 if (dump_file)
605 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
607 /* Before we delete the insn we have to remove the REG_EQUAL notes
608 for the destination regs in order to avoid dangling notes. */
609 remove_reg_equal_equiv_notes_for_defs (insn);
611 /* If a pure or const call is deleted, this may make the cfg
612 have unreachable blocks. We rememeber this and call
613 delete_unreachable_blocks at the end. */
614 if (CALL_P (insn))
615 must_clean = true;
617 /* Now delete the insn. */
618 delete_insn_and_edges (insn);
621 /* Deleted a pure or const call. */
622 if (must_clean)
623 delete_unreachable_blocks ();
627 /* Go through the instructions and mark those whose necessity is not
628 dependent on inter-instruction information. Make sure all other
629 instructions are not marked. */
631 static void
632 prescan_insns_for_dce (bool fast)
634 basic_block bb;
635 rtx_insn *insn, *prev;
636 bitmap arg_stores = NULL;
638 if (dump_file)
639 fprintf (dump_file, "Finding needed instructions:\n");
641 if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
642 arg_stores = BITMAP_ALLOC (NULL);
644 FOR_EACH_BB_FN (bb, cfun)
646 FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
647 if (NONDEBUG_INSN_P (insn))
649 /* Don't mark argument stores now. They will be marked
650 if needed when the associated CALL is marked. */
651 if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
652 continue;
653 if (deletable_insn_p (insn, fast, arg_stores))
654 mark_nonreg_stores (PATTERN (insn), insn, fast);
655 else
656 mark_insn (insn, fast);
658 /* find_call_stack_args only looks at argument stores in the
659 same bb. */
660 if (arg_stores)
661 bitmap_clear (arg_stores);
664 if (arg_stores)
665 BITMAP_FREE (arg_stores);
667 if (dump_file)
668 fprintf (dump_file, "Finished finding needed instructions:\n");
672 /* UD-based DSE routines. */
674 /* Mark instructions that define artificially-used registers, such as
675 the frame pointer and the stack pointer. */
677 static void
678 mark_artificial_uses (void)
680 basic_block bb;
681 struct df_link *defs;
682 df_ref use;
684 FOR_ALL_BB_FN (bb, cfun)
685 FOR_EACH_ARTIFICIAL_USE (use, bb->index)
686 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
687 if (!DF_REF_IS_ARTIFICIAL (defs->ref))
688 mark_insn (DF_REF_INSN (defs->ref), false);
692 /* Mark every instruction that defines a register value that INSN uses. */
694 static void
695 mark_reg_dependencies (rtx_insn *insn)
697 struct df_link *defs;
698 df_ref use;
700 if (DEBUG_INSN_P (insn))
701 return;
703 FOR_EACH_INSN_USE (use, insn)
705 if (dump_file)
707 fprintf (dump_file, "Processing use of ");
708 print_simple_rtl (dump_file, DF_REF_REG (use));
709 fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
711 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
712 if (! DF_REF_IS_ARTIFICIAL (defs->ref))
713 mark_insn (DF_REF_INSN (defs->ref), false);
718 /* Initialize global variables for a new DCE pass. */
720 static void
721 init_dce (bool fast)
723 if (!df_in_progress)
725 if (!fast)
727 df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
728 df_chain_add_problem (DF_UD_CHAIN);
730 df_analyze ();
733 if (dump_file)
734 df_dump (dump_file);
736 if (fast)
738 bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
739 bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
740 can_alter_cfg = false;
742 else
743 can_alter_cfg = true;
745 marked = sbitmap_alloc (get_max_uid () + 1);
746 bitmap_clear (marked);
750 /* Free the data allocated by init_dce. */
752 static void
753 fini_dce (bool fast)
755 sbitmap_free (marked);
757 if (fast)
759 bitmap_obstack_release (&dce_blocks_bitmap_obstack);
760 bitmap_obstack_release (&dce_tmp_bitmap_obstack);
765 /* UD-chain based DCE. */
767 static unsigned int
768 rest_of_handle_ud_dce (void)
770 rtx_insn *insn;
772 init_dce (false);
774 prescan_insns_for_dce (false);
775 mark_artificial_uses ();
776 while (worklist.length () > 0)
778 insn = worklist.pop ();
779 mark_reg_dependencies (insn);
781 worklist.release ();
783 if (MAY_HAVE_DEBUG_BIND_INSNS)
784 reset_unmarked_insns_debug_uses ();
786 /* Before any insns are deleted, we must remove the chains since
787 they are not bidirectional. */
788 df_remove_problem (df_chain);
789 delete_unmarked_insns ();
791 fini_dce (false);
792 return 0;
796 namespace {
798 const pass_data pass_data_ud_rtl_dce =
800 RTL_PASS, /* type */
801 "ud_dce", /* name */
802 OPTGROUP_NONE, /* optinfo_flags */
803 TV_DCE, /* tv_id */
804 0, /* properties_required */
805 0, /* properties_provided */
806 0, /* properties_destroyed */
807 0, /* todo_flags_start */
808 TODO_df_finish, /* todo_flags_finish */
811 class pass_ud_rtl_dce : public rtl_opt_pass
813 public:
814 pass_ud_rtl_dce (gcc::context *ctxt)
815 : rtl_opt_pass (pass_data_ud_rtl_dce, ctxt)
818 /* opt_pass methods: */
819 virtual bool gate (function *)
821 return optimize > 1 && flag_dce && dbg_cnt (dce_ud);
824 virtual unsigned int execute (function *)
826 return rest_of_handle_ud_dce ();
829 }; // class pass_ud_rtl_dce
831 } // anon namespace
833 rtl_opt_pass *
834 make_pass_ud_rtl_dce (gcc::context *ctxt)
836 return new pass_ud_rtl_dce (ctxt);
840 /* -------------------------------------------------------------------------
841 Fast DCE functions
842 ------------------------------------------------------------------------- */
844 /* Process basic block BB. Return true if the live_in set has
845 changed. REDO_OUT is true if the info at the bottom of the block
846 needs to be recalculated before starting. AU is the proper set of
847 artificial uses. Track global substitution of uses of dead pseudos
848 in debug insns using GLOBAL_DEBUG. */
850 static bool
851 word_dce_process_block (basic_block bb, bool redo_out,
852 struct dead_debug_global *global_debug)
854 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
855 rtx_insn *insn;
856 bool block_changed;
857 struct dead_debug_local debug;
859 if (redo_out)
861 /* Need to redo the live_out set of this block if when one of
862 the succs of this block has had a change in it live in
863 set. */
864 edge e;
865 edge_iterator ei;
866 df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
867 bitmap_clear (DF_WORD_LR_OUT (bb));
868 FOR_EACH_EDGE (e, ei, bb->succs)
869 (*con_fun_n) (e);
872 if (dump_file)
874 fprintf (dump_file, "processing block %d live out = ", bb->index);
875 df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
878 bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
879 dead_debug_local_init (&debug, NULL, global_debug);
881 FOR_BB_INSNS_REVERSE (bb, insn)
882 if (DEBUG_INSN_P (insn))
884 df_ref use;
885 FOR_EACH_INSN_USE (use, insn)
886 if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER
887 && known_eq (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (use))),
888 2 * UNITS_PER_WORD)
889 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use))
890 && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use) + 1))
891 dead_debug_add (&debug, use, DF_REF_REGNO (use));
893 else if (INSN_P (insn))
895 bool any_changed;
897 /* No matter if the instruction is needed or not, we remove
898 any regno in the defs from the live set. */
899 any_changed = df_word_lr_simulate_defs (insn, local_live);
900 if (any_changed)
901 mark_insn (insn, true);
903 /* On the other hand, we do not allow the dead uses to set
904 anything in local_live. */
905 if (marked_insn_p (insn))
906 df_word_lr_simulate_uses (insn, local_live);
908 /* Insert debug temps for dead REGs used in subsequent debug
909 insns. We may have to emit a debug temp even if the insn
910 was marked, in case the debug use was after the point of
911 death. */
912 if (debug.used && !bitmap_empty_p (debug.used))
914 df_ref def;
916 FOR_EACH_INSN_DEF (def, insn)
917 dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
918 marked_insn_p (insn)
919 && !control_flow_insn_p (insn)
920 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
921 : DEBUG_TEMP_BEFORE_WITH_VALUE);
924 if (dump_file)
926 fprintf (dump_file, "finished processing insn %d live out = ",
927 INSN_UID (insn));
928 df_print_word_regset (dump_file, local_live);
932 block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
933 if (block_changed)
934 bitmap_copy (DF_WORD_LR_IN (bb), local_live);
936 dead_debug_local_finish (&debug, NULL);
937 BITMAP_FREE (local_live);
938 return block_changed;
942 /* Process basic block BB. Return true if the live_in set has
943 changed. REDO_OUT is true if the info at the bottom of the block
944 needs to be recalculated before starting. AU is the proper set of
945 artificial uses. Track global substitution of uses of dead pseudos
946 in debug insns using GLOBAL_DEBUG. */
948 static bool
949 dce_process_block (basic_block bb, bool redo_out, bitmap au,
950 struct dead_debug_global *global_debug)
952 bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
953 rtx_insn *insn;
954 bool block_changed;
955 df_ref def;
956 struct dead_debug_local debug;
958 if (redo_out)
960 /* Need to redo the live_out set of this block if when one of
961 the succs of this block has had a change in it live in
962 set. */
963 edge e;
964 edge_iterator ei;
965 df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
966 bitmap_clear (DF_LR_OUT (bb));
967 FOR_EACH_EDGE (e, ei, bb->succs)
968 (*con_fun_n) (e);
971 if (dump_file)
973 fprintf (dump_file, "processing block %d lr out = ", bb->index);
974 df_print_regset (dump_file, DF_LR_OUT (bb));
977 bitmap_copy (local_live, DF_LR_OUT (bb));
979 df_simulate_initialize_backwards (bb, local_live);
980 dead_debug_local_init (&debug, NULL, global_debug);
982 FOR_BB_INSNS_REVERSE (bb, insn)
983 if (DEBUG_INSN_P (insn))
985 df_ref use;
986 FOR_EACH_INSN_USE (use, insn)
987 if (!bitmap_bit_p (local_live, DF_REF_REGNO (use))
988 && !bitmap_bit_p (au, DF_REF_REGNO (use)))
989 dead_debug_add (&debug, use, DF_REF_REGNO (use));
991 else if (INSN_P (insn))
993 bool needed = marked_insn_p (insn);
995 /* The insn is needed if there is someone who uses the output. */
996 if (!needed)
997 FOR_EACH_INSN_DEF (def, insn)
998 if (bitmap_bit_p (local_live, DF_REF_REGNO (def))
999 || bitmap_bit_p (au, DF_REF_REGNO (def)))
1001 needed = true;
1002 mark_insn (insn, true);
1003 break;
1006 /* No matter if the instruction is needed or not, we remove
1007 any regno in the defs from the live set. */
1008 df_simulate_defs (insn, local_live);
1010 /* On the other hand, we do not allow the dead uses to set
1011 anything in local_live. */
1012 if (needed)
1013 df_simulate_uses (insn, local_live);
1015 /* Insert debug temps for dead REGs used in subsequent debug
1016 insns. We may have to emit a debug temp even if the insn
1017 was marked, in case the debug use was after the point of
1018 death. */
1019 if (debug.used && !bitmap_empty_p (debug.used))
1020 FOR_EACH_INSN_DEF (def, insn)
1021 dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
1022 needed && !control_flow_insn_p (insn)
1023 ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
1024 : DEBUG_TEMP_BEFORE_WITH_VALUE);
1027 dead_debug_local_finish (&debug, NULL);
1028 df_simulate_finalize_backwards (bb, local_live);
1030 block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
1031 if (block_changed)
1032 bitmap_copy (DF_LR_IN (bb), local_live);
1034 BITMAP_FREE (local_live);
1035 return block_changed;
1039 /* Perform fast DCE once initialization is done. If WORD_LEVEL is
1040 true, use the word level dce, otherwise do it at the pseudo
1041 level. */
1043 static void
1044 fast_dce (bool word_level)
1046 int *postorder = df_get_postorder (DF_BACKWARD);
1047 int n_blocks = df_get_n_blocks (DF_BACKWARD);
1048 /* The set of blocks that have been seen on this iteration. */
1049 bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1050 /* The set of blocks that need to have the out vectors reset because
1051 the in of one of their successors has changed. */
1052 bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1053 bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1054 bool global_changed = true;
1056 /* These regs are considered always live so if they end up dying
1057 because of some def, we need to bring the back again. Calling
1058 df_simulate_fixup_sets has the disadvantage of calling
1059 bb_has_eh_pred once per insn, so we cache the information
1060 here. */
1061 bitmap au = &df->regular_block_artificial_uses;
1062 bitmap au_eh = &df->eh_block_artificial_uses;
1063 int i;
1064 struct dead_debug_global global_debug;
1066 prescan_insns_for_dce (true);
1068 for (i = 0; i < n_blocks; i++)
1069 bitmap_set_bit (all_blocks, postorder[i]);
1071 dead_debug_global_init (&global_debug, NULL);
1073 while (global_changed)
1075 global_changed = false;
1077 for (i = 0; i < n_blocks; i++)
1079 int index = postorder[i];
1080 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, index);
1081 bool local_changed;
1083 if (index < NUM_FIXED_BLOCKS)
1085 bitmap_set_bit (processed, index);
1086 continue;
1089 if (word_level)
1090 local_changed
1091 = word_dce_process_block (bb, bitmap_bit_p (redo_out, index),
1092 &global_debug);
1093 else
1094 local_changed
1095 = dce_process_block (bb, bitmap_bit_p (redo_out, index),
1096 bb_has_eh_pred (bb) ? au_eh : au,
1097 &global_debug);
1098 bitmap_set_bit (processed, index);
1100 if (local_changed)
1102 edge e;
1103 edge_iterator ei;
1104 FOR_EACH_EDGE (e, ei, bb->preds)
1105 if (bitmap_bit_p (processed, e->src->index))
1106 /* Be tricky about when we need to iterate the
1107 analysis. We only have redo the analysis if the
1108 bitmaps change at the top of a block that is the
1109 entry to a loop. */
1110 global_changed = true;
1111 else
1112 bitmap_set_bit (redo_out, e->src->index);
1116 if (global_changed)
1118 /* Turn off the RUN_DCE flag to prevent recursive calls to
1119 dce. */
1120 int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1122 /* So something was deleted that requires a redo. Do it on
1123 the cheap. */
1124 delete_unmarked_insns ();
1125 bitmap_clear (marked);
1126 bitmap_clear (processed);
1127 bitmap_clear (redo_out);
1129 /* We do not need to rescan any instructions. We only need
1130 to redo the dataflow equations for the blocks that had a
1131 change at the top of the block. Then we need to redo the
1132 iteration. */
1133 if (word_level)
1134 df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
1135 else
1136 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1138 if (old_flag & DF_LR_RUN_DCE)
1139 df_set_flags (DF_LR_RUN_DCE);
1141 prescan_insns_for_dce (true);
1145 dead_debug_global_finish (&global_debug, NULL);
1147 delete_unmarked_insns ();
1149 BITMAP_FREE (processed);
1150 BITMAP_FREE (redo_out);
1151 BITMAP_FREE (all_blocks);
1155 /* Fast register level DCE. */
1157 static unsigned int
1158 rest_of_handle_fast_dce (void)
1160 init_dce (true);
1161 fast_dce (false);
1162 fini_dce (true);
1163 return 0;
1167 /* Fast byte level DCE. */
1169 void
1170 run_word_dce (void)
1172 int old_flags;
1174 if (!flag_dce)
1175 return;
1177 timevar_push (TV_DCE);
1178 old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1179 df_word_lr_add_problem ();
1180 init_dce (true);
1181 fast_dce (true);
1182 fini_dce (true);
1183 df_set_flags (old_flags);
1184 timevar_pop (TV_DCE);
1188 /* This is an internal call that is used by the df live register
1189 problem to run fast dce as a side effect of creating the live
1190 information. The stack is organized so that the lr problem is run,
1191 this pass is run, which updates the live info and the df scanning
1192 info, and then returns to allow the rest of the problems to be run.
1194 This can be called by elsewhere but it will not update the bit
1195 vectors for any other problems than LR. */
1197 void
1198 run_fast_df_dce (void)
1200 if (flag_dce)
1202 /* If dce is able to delete something, it has to happen
1203 immediately. Otherwise there will be problems handling the
1204 eq_notes. */
1205 int old_flags =
1206 df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1208 df_in_progress = true;
1209 rest_of_handle_fast_dce ();
1210 df_in_progress = false;
1212 df_set_flags (old_flags);
1217 /* Run a fast DCE pass. */
1219 void
1220 run_fast_dce (void)
1222 if (flag_dce)
1223 rest_of_handle_fast_dce ();
1227 namespace {
1229 const pass_data pass_data_fast_rtl_dce =
1231 RTL_PASS, /* type */
1232 "rtl_dce", /* name */
1233 OPTGROUP_NONE, /* optinfo_flags */
1234 TV_DCE, /* tv_id */
1235 0, /* properties_required */
1236 0, /* properties_provided */
1237 0, /* properties_destroyed */
1238 0, /* todo_flags_start */
1239 TODO_df_finish, /* todo_flags_finish */
1242 class pass_fast_rtl_dce : public rtl_opt_pass
1244 public:
1245 pass_fast_rtl_dce (gcc::context *ctxt)
1246 : rtl_opt_pass (pass_data_fast_rtl_dce, ctxt)
1249 /* opt_pass methods: */
1250 virtual bool gate (function *)
1252 return optimize > 0 && flag_dce && dbg_cnt (dce_fast);
1255 virtual unsigned int execute (function *)
1257 return rest_of_handle_fast_dce ();
1260 }; // class pass_fast_rtl_dce
1262 } // anon namespace
1264 rtl_opt_pass *
1265 make_pass_fast_rtl_dce (gcc::context *ctxt)
1267 return new pass_fast_rtl_dce (ctxt);