* Makefile.in: Rebuilt.
[official-gcc.git] / gcc / dce.c
blob3a83d7a7026b969e676122fb03384a6d36489832
1 /* Dead-code elimination pass for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
3 Written by Jeffrey D. Oldham <oldham@codesourcery.com>.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 2, or (at your option) any
10 later version.
12 GNU CC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* Dead-code elimination is the removal of instructions which have no
23 impact on the program's output. "Dead instructions" have no impact
24 on the program's output, while "necessary instructions" may have
25 impact on the output.
27 The algorithm consists of three phases:
28 1) marking as necessary all instructions known to be necessary,
29 e.g., writing a value to memory,
30 2) propagating necessary instructions, e.g., the instructions
31 giving values to operands in necessary instructions, and
32 3) removing dead instructions (except replacing dead conditionals
33 with unconditional jumps).
35 Side Effects:
36 The last step can require adding labels, deleting insns, and
37 modifying basic block structures. Some conditional jumps may be
38 converted to unconditional jumps so the control-flow graph may be
39 out-of-date.
41 Edges from some infinite loops to the exit block can be added to
42 the control-flow graph.
44 It Does Not Perform:
45 We decided to not simultaneously perform jump optimization and dead
46 loop removal during dead-code elimination. Thus, all jump
47 instructions originally present remain after dead-code elimination
48 but 1) unnecessary conditional jump instructions are changed to
49 unconditional jump instructions and 2) all unconditional jump
50 instructions remain.
52 Assumptions:
53 1) SSA has been performed.
54 2) The basic block and control-flow graph structures are accurate.
55 3) The flow graph permits constructing an edge_list.
56 4) note rtxes should be saved.
58 Unfinished:
59 When replacing unnecessary conditional jumps with unconditional
60 jumps, the control-flow graph is not updated. It should be.
62 References:
63 Building an Optimizing Compiler
64 Robert Morgan
65 Butterworth-Heinemann, 1998
66 Section 8.9
69 #include "config.h"
70 #include "system.h"
72 #include "rtl.h"
73 #include "hard-reg-set.h"
74 #include "basic-block.h"
75 #include "ssa.h"
76 #include "insn-config.h"
77 #include "recog.h"
78 #include "output.h"
81 /* A map from blocks to the edges on which they are control dependent. */
82 typedef struct {
83 /* An dynamically allocated array. The Nth element corresponds to
84 the block with index N + 2. The Ith bit in the bitmap is set if
85 that block is dependent on the Ith edge. */
86 bitmap *data;
87 /* The number of elements in the array. */
88 int length;
89 } control_dependent_block_to_edge_map_s, *control_dependent_block_to_edge_map;
91 /* Local function prototypes. */
92 static control_dependent_block_to_edge_map control_dependent_block_to_edge_map_create
93 PARAMS((size_t num_basic_blocks));
94 static void set_control_dependent_block_to_edge_map_bit
95 PARAMS ((control_dependent_block_to_edge_map c, basic_block bb,
96 int edge_index));
97 static void control_dependent_block_to_edge_map_free
98 PARAMS ((control_dependent_block_to_edge_map c));
99 static void find_all_control_dependences
100 PARAMS ((struct edge_list *el, int *pdom,
101 control_dependent_block_to_edge_map cdbte));
102 static void find_control_dependence
103 PARAMS ((struct edge_list *el, int edge_index, int *pdom,
104 control_dependent_block_to_edge_map cdbte));
105 static basic_block find_pdom
106 PARAMS ((int *pdom, basic_block block));
107 static int inherently_necessary_register_1
108 PARAMS ((rtx *current_rtx, void *data));
109 static int inherently_necessary_register
110 PARAMS ((rtx current_rtx));
111 static int find_inherently_necessary
112 PARAMS ((rtx current_rtx));
113 static int propagate_necessity_through_operand
114 PARAMS ((rtx *current_rtx, void *data));
116 /* Unnecessary insns are indicated using insns' in_struct bit. */
118 /* Indicate INSN is dead-code; returns nothing. */
119 #define KILL_INSN(INSN) INSN_DEAD_CODE_P(INSN) = 1
120 /* Indicate INSN is necessary, i.e., not dead-code; returns nothing. */
121 #define RESURRECT_INSN(INSN) INSN_DEAD_CODE_P(INSN) = 0
122 /* Return nonzero if INSN is unnecessary. */
123 #define UNNECESSARY_P(INSN) INSN_DEAD_CODE_P(INSN)
124 static void mark_all_insn_unnecessary
125 PARAMS ((void));
126 /* Execute CODE with free variable INSN for all unnecessary insns in
127 an unspecified order, producing no output. */
128 #define EXECUTE_IF_UNNECESSARY(INSN, CODE) \
130 rtx INSN; \
132 for (INSN = get_insns (); INSN != NULL_RTX; INSN = NEXT_INSN (INSN)) \
133 if (INSN_DEAD_CODE_P (INSN)) { \
134 CODE; \
137 /* Find the label beginning block BB. */
138 static rtx find_block_label
139 PARAMS ((basic_block bb));
140 /* Remove INSN, updating its basic block structure. */
141 static void delete_insn_bb
142 PARAMS ((rtx insn));
144 /* Recording which blocks are control dependent on which edges. We
145 expect each block to be control dependent on very few edges so we
146 use a bitmap for each block recording its edges. An array holds
147 the bitmap. Its position 0 entry holds the bitmap for block
148 INVALID_BLOCK+1 so that all blocks, including the entry and exit
149 blocks can participate in the data structure. */
151 /* Create a control_dependent_block_to_edge_map, given the number
152 NUM_BASIC_BLOCKS of non-entry, non-exit basic blocks, e.g.,
153 n_basic_blocks. This memory must be released using
154 control_dependent_block_to_edge_map_free (). */
156 static control_dependent_block_to_edge_map
157 control_dependent_block_to_edge_map_create (num_basic_blocks)
158 size_t num_basic_blocks;
160 int i;
161 control_dependent_block_to_edge_map c
162 = xmalloc (sizeof (control_dependent_block_to_edge_map_s));
163 c->length = num_basic_blocks - (INVALID_BLOCK+1);
164 c->data = xmalloc ((size_t) c->length*sizeof (bitmap));
165 for (i = 0; i < c->length; ++i)
166 c->data[i] = BITMAP_XMALLOC ();
168 return c;
171 /* Indicate block BB is control dependent on an edge with index
172 EDGE_INDEX in the mapping C of blocks to edges on which they are
173 control-dependent. */
175 static void
176 set_control_dependent_block_to_edge_map_bit (c, bb, edge_index)
177 control_dependent_block_to_edge_map c;
178 basic_block bb;
179 int edge_index;
181 if (bb->index - (INVALID_BLOCK+1) >= c->length)
182 abort ();
184 bitmap_set_bit (c->data[bb->index - (INVALID_BLOCK+1)],
185 edge_index);
188 /* Execute CODE for each edge (given number EDGE_NUMBER within the
189 CODE) for which the block containing INSN is control dependent,
190 returning no output. CDBTE is the mapping of blocks to edges on
191 which they are control-dependent. */
193 #define EXECUTE_IF_CONTROL_DEPENDENT(CDBTE, INSN, EDGE_NUMBER, CODE) \
194 EXECUTE_IF_SET_IN_BITMAP \
195 (CDBTE->data[BLOCK_NUM (INSN) - (INVALID_BLOCK+1)], 0, \
196 EDGE_NUMBER, CODE)
198 /* Destroy a control_dependent_block_to_edge_map C. */
200 static void
201 control_dependent_block_to_edge_map_free (c)
202 control_dependent_block_to_edge_map c;
204 int i;
205 for (i = 0; i < c->length; ++i)
206 BITMAP_XFREE (c->data[i]);
207 free ((PTR) c);
210 /* Record all blocks' control dependences on all edges in the edge
211 list EL, ala Morgan, Section 3.6. The mapping PDOM of blocks to
212 their postdominators are used, and results are stored in CDBTE,
213 which should be empty. */
215 static void
216 find_all_control_dependences (el, pdom, cdbte)
217 struct edge_list *el;
218 int *pdom;
219 control_dependent_block_to_edge_map cdbte;
221 int i;
223 for (i = 0; i < NUM_EDGES (el); ++i)
224 find_control_dependence (el, i, pdom, cdbte);
227 /* Determine all blocks' control dependences on the given edge with
228 edge_list EL index EDGE_INDEX, ala Morgan, Section 3.6. The
229 mapping PDOM of blocks to their postdominators are used, and
230 results are stored in CDBTE, which is assumed to be initialized
231 with zeros in each (block b', edge) position. */
233 static void
234 find_control_dependence (el, edge_index, pdom, cdbte)
235 struct edge_list *el;
236 int edge_index;
237 int *pdom;
238 control_dependent_block_to_edge_map cdbte;
240 basic_block current_block;
241 basic_block ending_block;
243 if (INDEX_EDGE_PRED_BB (el, edge_index) == EXIT_BLOCK_PTR)
244 abort ();
245 ending_block =
246 (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
247 ? BASIC_BLOCK (0)
248 : find_pdom (pdom, INDEX_EDGE_PRED_BB (el, edge_index));
250 for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
251 current_block != ending_block && current_block != EXIT_BLOCK_PTR;
252 current_block = find_pdom (pdom, current_block))
254 set_control_dependent_block_to_edge_map_bit (cdbte,
255 current_block,
256 edge_index);
260 /* Find the immediate postdominator PDOM of the specified basic block
261 BLOCK. This function is necessary because some blocks have
262 negative numbers. */
264 static basic_block
265 find_pdom (pdom, block)
266 int *pdom;
267 basic_block block;
269 if (!block)
270 abort ();
271 if (block->index == INVALID_BLOCK)
272 abort ();
274 if (block == ENTRY_BLOCK_PTR)
275 return BASIC_BLOCK (0);
276 else if (block == EXIT_BLOCK_PTR || pdom[block->index] == EXIT_BLOCK)
277 return EXIT_BLOCK_PTR;
278 else
279 return BASIC_BLOCK (pdom[block->index]);
282 /* Determine if the given CURRENT_RTX uses a hard register not
283 converted to SSA. Returns nonzero only if it uses such a hard
284 register. DATA is not used.
286 The program counter (PC) is not considered inherently necessary
287 since code should be position-independent and thus not depend on
288 particular PC values. */
290 static int
291 inherently_necessary_register_1 (current_rtx, data)
292 rtx *current_rtx;
293 void *data ATTRIBUTE_UNUSED;
295 rtx x = *current_rtx;
297 if (x == NULL_RTX)
298 return 0;
299 switch (GET_CODE (x))
301 case CLOBBER:
302 /* Do not traverse the rest of the clobber. */
303 return -1;
304 break;
305 case PC:
306 return 0;
307 break;
308 case REG:
309 if (CONVERT_REGISTER_TO_SSA_P (REGNO (x)) || x == pc_rtx)
310 return 0;
311 else
312 return !0;
313 break;
314 default:
315 return 0;
316 break;
320 /* Return nonzero if the insn CURRENT_RTX is inherently necessary. */
322 static int
323 inherently_necessary_register (current_rtx)
324 rtx current_rtx;
326 return for_each_rtx (&current_rtx,
327 &inherently_necessary_register_1, NULL);
330 /* Mark X as inherently necessary if appropriate. For example,
331 function calls and storing values into memory are inherently
332 necessary. This function is to be used with for_each_rtx ().
333 Return nonzero iff inherently necessary. */
335 static int
336 find_inherently_necessary (x)
337 rtx x;
339 rtx pattern;
340 if (x == NULL_RTX)
341 return 0;
342 else if (inherently_necessary_register (x))
343 return !0;
344 else
345 switch (GET_CODE (x))
347 case CALL_INSN:
348 case CODE_LABEL:
349 case NOTE:
350 case BARRIER:
351 return !0;
352 break;
353 case JUMP_INSN:
354 return JUMP_TABLE_DATA_P (x) || computed_jump_p (x) != 0;
355 break;
356 case INSN:
357 pattern = PATTERN (x);
358 switch (GET_CODE (pattern))
360 case SET:
361 case PRE_DEC:
362 case PRE_INC:
363 case POST_DEC:
364 case POST_INC:
365 return GET_CODE (SET_DEST (pattern)) == MEM;
366 case CALL:
367 case RETURN:
368 case USE:
369 case CLOBBER:
370 return !0;
371 break;
372 case ASM_INPUT:
373 /* We treat assembler instructions as inherently
374 necessary, and we hope that its operands do not need to
375 be propagated. */
376 return !0;
377 break;
378 default:
379 return 0;
381 default:
382 /* Found an impossible insn type. */
383 abort();
384 break;
388 /* Propagate necessity through REG and SUBREG operands of CURRENT_RTX.
389 This function is called with for_each_rtx () on necessary
390 instructions. The DATA must be a varray of unprocessed
391 instructions. */
393 static int
394 propagate_necessity_through_operand (current_rtx, data)
395 rtx *current_rtx;
396 void *data;
398 rtx x = *current_rtx;
399 varray_type *unprocessed_instructions = (varray_type *) data;
401 if (x == NULL_RTX)
402 return 0;
403 switch ( GET_CODE (x))
405 case REG:
406 if (CONVERT_REGISTER_TO_SSA_P (REGNO (x)))
408 rtx insn = VARRAY_RTX (ssa_definition, REGNO (x));
409 if (insn != NULL_RTX && UNNECESSARY_P (insn))
411 RESURRECT_INSN (insn);
412 VARRAY_PUSH_RTX (*unprocessed_instructions, insn);
415 return 0;
417 default:
418 return 0;
422 /* Indicate all insns initially assumed to be unnecessary. */
424 static void
425 mark_all_insn_unnecessary ()
427 rtx insn;
428 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
429 KILL_INSN (insn);
432 /* Find the label beginning block BB, adding one if necessary. */
434 static rtx
435 find_block_label (bb)
436 basic_block bb;
438 rtx insn = bb->head;
439 if (LABEL_P (insn))
440 return insn;
441 else
443 rtx new_label = emit_label_before (gen_label_rtx (), insn);
444 if (insn == bb->head)
445 bb->head = new_label;
446 return new_label;
450 /* Remove INSN, updating its basic block structure. */
452 static void
453 delete_insn_bb (insn)
454 rtx insn;
456 basic_block bb;
457 if (!insn)
458 abort ();
459 bb = BLOCK_FOR_INSN (insn);
460 if (!bb)
461 abort ();
462 if (bb->head == bb->end)
464 /* Delete the insn by converting it to a note. */
465 PUT_CODE (insn, NOTE);
466 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
467 return;
469 else if (insn == bb->head)
470 bb->head = NEXT_INSN (insn);
471 else if (insn == bb->end)
472 bb->end = PREV_INSN (insn);
473 delete_insn (insn);
476 /* Perform the dead-code elimination. */
478 void
479 eliminate_dead_code ()
481 int i;
482 rtx insn;
483 /* Necessary instructions with operands to explore. */
484 varray_type unprocessed_instructions;
485 /* Map element (b,e) is nonzero if the block is control dependent on
486 edge. "cdbte" abbreviates control dependent block to edge. */
487 control_dependent_block_to_edge_map cdbte;
488 /* Element I is the immediate postdominator of block I. */
489 int *pdom;
490 struct edge_list *el;
492 int max_insn_uid = get_max_uid ();
494 /* Initialize the data structures. */
495 mark_all_insn_unnecessary ();
496 VARRAY_RTX_INIT (unprocessed_instructions, 64,
497 "unprocessed instructions");
498 cdbte = control_dependent_block_to_edge_map_create (n_basic_blocks);
500 /* Prepare for use of BLOCK_NUM (). */
501 connect_infinite_loops_to_exit ();
502 /* Be careful not to clear the added edges. */
503 compute_bb_for_insn (max_insn_uid);
505 /* Compute control dependence. */
506 pdom = (int *) xmalloc (n_basic_blocks * sizeof (int));
507 for (i = 0; i < n_basic_blocks; ++i)
508 pdom[i] = INVALID_BLOCK;
509 calculate_dominance_info (pdom, NULL, CDI_POST_DOMINATORS);
510 /* Assume there is a path from each node to the exit block. */
511 for (i = 0; i < n_basic_blocks; ++i)
512 if (pdom[i] == INVALID_BLOCK)
513 pdom[i] = EXIT_BLOCK;
514 el = create_edge_list();
515 find_all_control_dependences (el, pdom, cdbte);
517 /* Find inherently necessary instructions. */
518 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
519 if (find_inherently_necessary (insn))
521 RESURRECT_INSN (insn);
522 VARRAY_PUSH_RTX (unprocessed_instructions, insn);
525 /* Propagate necessity using the operands of necessary instructions. */
526 while (VARRAY_ACTIVE_SIZE (unprocessed_instructions) > 0)
528 rtx current_instruction;
529 int edge_number;
531 current_instruction = VARRAY_TOP_RTX (unprocessed_instructions);
532 VARRAY_POP (unprocessed_instructions);
534 /* Make corresponding control dependent edges necessary. */
535 /* Assume the only JUMP_INSN is the block's last insn. It appears
536 that the last instruction of the program need not be a
537 JUMP_INSN. */
539 if (INSN_P (current_instruction)
540 && !JUMP_TABLE_DATA_P (current_instruction))
542 /* Notes and labels contain no interesting operands. */
543 EXECUTE_IF_CONTROL_DEPENDENT
544 (cdbte, current_instruction, edge_number,
546 rtx jump_insn = (INDEX_EDGE_PRED_BB (el, edge_number))->end;
547 if (GET_CODE (jump_insn) == JUMP_INSN &&
548 UNNECESSARY_P (jump_insn)) {
549 RESURRECT_INSN (jump_insn);
550 VARRAY_PUSH_RTX (unprocessed_instructions, jump_insn);
554 /* Propagate through the operands. */
555 for_each_rtx (&current_instruction,
556 &propagate_necessity_through_operand,
557 (PTR) &unprocessed_instructions);
562 /* Remove the unnecessary instructions. */
563 EXECUTE_IF_UNNECESSARY (insn,
565 if (any_condjump_p (insn))
567 /* Convert unnecessary conditional insn to an unconditional
568 jump to immediate postdominator block. */
569 rtx old_label = JUMP_LABEL (insn);
570 int pdom_block_number =
571 find_pdom (pdom, BLOCK_FOR_INSN (insn))->index;
573 /* Prevent the conditional jump's label from being deleted so
574 we do not have to modify the basic block structure. */
575 ++LABEL_NUSES (old_label);
577 if (pdom_block_number != EXIT_BLOCK
578 && pdom_block_number != INVALID_BLOCK)
580 rtx lbl = find_block_label (BASIC_BLOCK (pdom_block_number));
581 rtx new_jump = emit_jump_insn_before (gen_jump (lbl), insn);
583 /* Let jump know that label is in use. */
584 JUMP_LABEL (new_jump) = lbl;
585 ++LABEL_NUSES (lbl);
587 delete_insn_bb (insn);
589 /* A conditional branch is unnecessary if and only if any
590 block control-dependent on it is unnecessary. Thus,
591 any phi nodes in these unnecessary blocks are also
592 removed and these nodes need not be updated. */
594 /* A barrier must follow any unconditional jump. Barriers
595 are not in basic blocks so this must occur after
596 deleting the conditional jump. */
597 emit_barrier_after (new_jump);
599 else
600 /* The block drops off the end of the function and the
601 ending conditional jump is not needed. */
602 delete_insn_bb (insn);
604 else if (!JUMP_P (insn))
605 delete_insn_bb (insn);
608 /* Release allocated memory. */
609 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
610 RESURRECT_INSN (insn);
611 if (VARRAY_ACTIVE_SIZE (unprocessed_instructions) != 0)
612 abort ();
613 VARRAY_FREE (unprocessed_instructions);
614 control_dependent_block_to_edge_map_free (cdbte);
615 free ((PTR) pdom);
616 free_edge_list (el);