* configure.in: Add ${libgcj} to noconfigdirs for xtensa-*-* targets.
[official-gcc.git] / gcc / ssa-ccp.c
blobc093cc463e74c5bb5ca6839e1477783efd255e15
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Original framework by Daniel Berlin <dan@cgsoftware.com>
4 Fleshed out and major cleanups by Jeff Law <law@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* Conditional constant propagation.
25 References:
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6
36 The overall structure is as follows:
38 1. Run a simple SSA based DCE pass to remove any dead code.
39 2. Run CCP to compute what registers are known constants
40 and what edges are not executable. Remove unexecutable
41 edges from the CFG and simplify PHI nodes.
42 3. Replace registers with constants where possible.
43 4. Remove unreachable blocks computed in step #2.
44 5. Another simple SSA DCE pass to remove dead code exposed
45 by CCP.
47 When we exit, we are still in SSA form.
50 Potential further enhancements:
52 1. Handle SUBREGs, STRICT_LOW_PART, etc in destinations more
53 gracefully.
55 2. Handle insns with multiple outputs more gracefully.
57 3. Handle CONST_DOUBLE and symbolic constants.
59 4. Fold expressions after performing constant substitutions. */
62 #include "config.h"
63 #include "system.h"
64 #include "coretypes.h"
65 #include "tm.h"
67 #include "rtl.h"
68 #include "hard-reg-set.h"
69 #include "basic-block.h"
70 #include "ssa.h"
71 #include "insn-config.h"
72 #include "recog.h"
73 #include "output.h"
74 #include "errors.h"
75 #include "ggc.h"
76 #include "df.h"
77 #include "function.h"
79 /* Possible lattice values. */
81 typedef enum
83 UNDEFINED,
84 CONSTANT,
85 VARYING
86 } latticevalue;
88 /* Main structure for CCP.
90 Contains the lattice value and, if it's a constant, the constant
91 value. */
92 typedef struct
94 latticevalue lattice_val;
95 rtx const_value;
96 } value;
98 /* Array of values indexed by register number. */
99 static value *values;
101 /* A bitmap to keep track of executable blocks in the CFG. */
102 static sbitmap executable_blocks;
104 /* A bitmap for all executable edges in the CFG. */
105 static sbitmap executable_edges;
107 /* Array of edges on the work list. */
108 static edge *edge_info;
110 /* We need an edge list to be able to get indexes easily. */
111 static struct edge_list *edges;
113 /* For building/following use-def and def-use chains. */
114 static struct df *df_analyzer;
116 /* Current edge we are operating on, from the worklist */
117 static edge flow_edges;
119 /* Bitmap of SSA edges which will need reexamination as their definition
120 has changed. */
121 static sbitmap ssa_edges;
123 /* Simple macros to simplify code */
124 #define SSA_NAME(x) REGNO (SET_DEST (x))
125 #define EIE(x,y) EDGE_INDEX (edges, x, y)
127 static void visit_phi_node (rtx, basic_block);
128 static void visit_expression (rtx, basic_block);
129 static void defs_to_undefined (rtx);
130 static void defs_to_varying (rtx);
131 static void examine_flow_edges (void);
132 static int mark_references (rtx *, void *);
133 static void follow_def_use_chains (void);
134 static void optimize_unexecutable_edges (struct edge_list *, sbitmap);
135 static void ssa_ccp_substitute_constants (void);
136 static void ssa_ccp_df_delete_unreachable_insns (void);
137 static void ssa_fast_dce (struct df *);
139 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
140 lattice values to determine PHI_NODE's lattice value. */
141 static void
142 visit_phi_node (rtx phi_node, basic_block block)
144 unsigned int i;
145 rtx phi_node_expr = NULL;
146 unsigned int phi_node_name = SSA_NAME (PATTERN (phi_node));
147 latticevalue phi_node_lattice_val = UNDEFINED;
148 rtx pat = PATTERN (phi_node);
149 rtvec phi_vec = XVEC (SET_SRC (pat), 0);
150 unsigned int num_elem = GET_NUM_ELEM (phi_vec);
152 for (i = 0; i < num_elem; i += 2)
154 if (TEST_BIT (executable_edges,
155 EIE (BASIC_BLOCK (INTVAL (RTVEC_ELT (phi_vec, i + 1))),
156 block)))
158 unsigned int current_parm
159 = REGNO (RTVEC_ELT (phi_vec, i));
161 latticevalue current_parm_lattice_val
162 = values[current_parm].lattice_val;
164 /* If any node is VARYING, then new value of PHI_NODE
165 is VARYING. */
166 if (current_parm_lattice_val == VARYING)
168 phi_node_lattice_val = VARYING;
169 phi_node_expr = NULL;
170 break;
173 /* If we have more than one distinct constant, then the new
174 value of PHI_NODE is VARYING. */
175 if (current_parm_lattice_val == CONSTANT
176 && phi_node_lattice_val == CONSTANT
177 && values[current_parm].const_value != phi_node_expr)
179 phi_node_lattice_val = VARYING;
180 phi_node_expr = NULL;
181 break;
184 /* If the current value of PHI_NODE is UNDEFINED and one
185 node in PHI_NODE is CONSTANT, then the new value of the
186 PHI is that CONSTANT. Note this can turn into VARYING
187 if we find another distinct constant later. */
188 if (phi_node_lattice_val == UNDEFINED
189 && phi_node_expr == NULL
190 && current_parm_lattice_val == CONSTANT)
192 phi_node_expr = values[current_parm].const_value;
193 phi_node_lattice_val = CONSTANT;
194 continue;
199 /* If the value of PHI_NODE changed, then we will need to
200 re-execute uses of the output of PHI_NODE. */
201 if (phi_node_lattice_val != values[phi_node_name].lattice_val)
203 values[phi_node_name].lattice_val = phi_node_lattice_val;
204 values[phi_node_name].const_value = phi_node_expr;
205 SET_BIT (ssa_edges, phi_node_name);
209 /* Sets all defs in an insn to UNDEFINED. */
210 static void
211 defs_to_undefined (rtx insn)
213 struct df_link *currdef;
214 for (currdef = DF_INSN_DEFS (df_analyzer, insn); currdef;
215 currdef = currdef->next)
217 if (values[DF_REF_REGNO (currdef->ref)].lattice_val != UNDEFINED)
218 SET_BIT (ssa_edges, DF_REF_REGNO (currdef->ref));
219 values[DF_REF_REGNO (currdef->ref)].lattice_val = UNDEFINED;
223 /* Sets all defs in an insn to VARYING. */
224 static void
225 defs_to_varying (rtx insn)
227 struct df_link *currdef;
228 for (currdef = DF_INSN_DEFS (df_analyzer, insn); currdef;
229 currdef = currdef->next)
231 if (values[DF_REF_REGNO (currdef->ref)].lattice_val != VARYING)
232 SET_BIT (ssa_edges, DF_REF_REGNO (currdef->ref));
233 values[DF_REF_REGNO (currdef->ref)].lattice_val = VARYING;
237 /* Go through the expression, call the appropriate evaluation routines
238 to attempt cprop */
239 static void
240 visit_expression (rtx insn, basic_block block)
242 rtx src, dest, set;
245 /* Ugh. CALL_INSNs may end a basic block and have multiple edges
246 leading out from them.
248 Mark all the outgoing edges as executable, then fall into the
249 normal processing below. */
250 if (GET_CODE (insn) == CALL_INSN && block->end == insn)
252 edge curredge;
254 for (curredge = block->succ; curredge;
255 curredge = curredge->succ_next)
257 int index = EIE (curredge->src, curredge->dest);
259 if (TEST_BIT (executable_edges, index))
260 continue;
262 SET_BIT (executable_edges, index);
263 edge_info[index] = flow_edges;
264 flow_edges = curredge;
268 set = single_set (insn);
269 if (! set)
271 defs_to_varying (insn);
272 return;
275 src = SET_SRC (set);
276 dest = SET_DEST (set);
278 /* We may want to refine this some day. */
279 if (GET_CODE (dest) != REG && dest != pc_rtx)
281 defs_to_varying (insn);
282 return;
285 /* Hard registers are not put in SSA form and thus we must consider
286 them varying. All the more reason to avoid hard registers in
287 RTL until as late as possible in the compilation. */
288 if (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
290 defs_to_varying (insn);
291 return;
294 /* If this is assigning DEST to a constant, record that fact. */
295 if (GET_CODE (src) == CONST_INT && GET_CODE (insn) == INSN)
297 unsigned int resultreg = REGNO (dest);
299 values[resultreg].lattice_val = CONSTANT;
300 values[resultreg].const_value = SET_SRC (PATTERN (insn));
301 SET_BIT (ssa_edges, resultreg);
304 /* If this is a copy operation, then we can copy the lattice values. */
305 else if (GET_CODE (src) == REG && GET_CODE (dest) == REG)
307 unsigned int old_value = REGNO (src);
308 latticevalue old_lattice_value = values[old_value].lattice_val;
309 unsigned int new_value = REGNO (dest);
311 /* Unless the lattice value is going to change, don't bother
312 adding the "new value" into the worklist. */
313 if (values[new_value].lattice_val != old_lattice_value
314 || values[new_value].const_value != values[old_value].const_value)
315 SET_BIT (ssa_edges, new_value);
317 /* Copy the old lattice node info into the new value lattice node. */
318 values[new_value].lattice_val = old_lattice_value;
319 values[new_value].const_value = values[old_value].const_value;
322 /* Handle jumps. */
323 else if (GET_CODE (insn) == JUMP_INSN)
325 rtx x = pc_set (insn);
326 if (GET_CODE (src) != IF_THEN_ELSE)
328 edge curredge;
330 /* This is a computed jump, table jump, or an unconditional
331 jump. For all these cases we want to mark all successor
332 blocks as executable if they have not already been
333 marked.
335 One day we may try do better with switch tables and
336 other computed jumps. */
337 for (curredge = block->succ; curredge;
338 curredge = curredge->succ_next)
340 int index = EIE (curredge->src, curredge->dest);
342 if (TEST_BIT (executable_edges, index))
343 continue;
345 SET_BIT (executable_edges, index);
346 edge_info[index] = flow_edges;
347 flow_edges = curredge;
350 else
352 edge curredge;
353 enum rtx_code comparison_code;
354 rtx comparison_src0;
355 rtx comparison_src1;
357 comparison_code = GET_CODE (XEXP (src, 0));
358 comparison_src0 = XEXP (XEXP (src, 0), 0);
359 comparison_src1 = XEXP (XEXP (src, 0), 1);
361 /* If either operand is undefined, then there is nothing to
362 do right now. If/when operands are later defined we will
363 revaluate the condition and take the appropriate action. */
364 if ((GET_CODE (comparison_src0) == REG
365 && values[REGNO (comparison_src0)].lattice_val == UNDEFINED)
366 || (GET_CODE (comparison_src1) == REG
367 && values[REGNO (comparison_src1)].lattice_val == UNDEFINED))
368 return;
370 /* If either operand is varying, then we must consider all
371 paths as executable. */
372 if ((GET_CODE (comparison_src0) == REG
373 && values[REGNO (comparison_src0)].lattice_val == VARYING)
374 || (GET_CODE (comparison_src1) == REG
375 && values[REGNO (comparison_src1)].lattice_val == VARYING))
377 for (curredge = block->succ; curredge;
378 curredge = curredge->succ_next)
380 int index = EIE (curredge->src, curredge->dest);
382 if (TEST_BIT (executable_edges, index))
383 continue;
385 SET_BIT (executable_edges, index);
386 edge_info[index] = flow_edges;
387 flow_edges = curredge;
389 return;
392 /* Try to simplify the comparison. */
393 if (GET_CODE (comparison_src0) == REG
394 && values[REGNO (comparison_src0)].lattice_val == CONSTANT)
395 comparison_src0 = values[REGNO (comparison_src0)].const_value;
397 if (GET_CODE (comparison_src1) == REG
398 && values[REGNO (comparison_src1)].lattice_val == CONSTANT)
399 comparison_src1 = values[REGNO (comparison_src1)].const_value;
401 x = simplify_ternary_operation (IF_THEN_ELSE,
402 VOIDmode,
403 GET_MODE (XEXP (src, 0)),
404 gen_rtx (comparison_code,
405 GET_MODE (XEXP (src, 0)),
406 comparison_src0,
407 comparison_src1),
408 XEXP (src, 1),
409 XEXP (src, 2));
411 /* Walk through all the outgoing edges from this block and see
412 which (if any) we should mark as executable. */
413 for (curredge = block->succ; curredge;
414 curredge = curredge->succ_next)
416 int index = EIE (curredge->src, curredge->dest);
418 if (TEST_BIT (executable_edges, index))
419 continue;
421 /* If we were unable to simplify the expression at this
422 point, it's highly unlikely we'll be able to simplify
423 it later. So consider all edges as executable if the
424 expression did not simplify.
426 If the expression simplified to (pc), then we know we
427 will take the fall-thru edge, so mark it. Similarly,
428 if the expression simplified to (label_ref ...), then
429 we know the branch will be taken and we mark that
430 edge as taken. */
431 if (!x
432 || (x == pc_rtx
433 && (curredge->flags & EDGE_FALLTHRU))
434 || (GET_CODE (x) == LABEL_REF
435 && ! (curredge->flags & EDGE_FALLTHRU)))
437 SET_BIT (executable_edges, index);
438 edge_info[index] = flow_edges;
439 flow_edges = curredge;
444 else if (!PHI_NODE_P (insn))
446 rtx simplified = NULL;
448 /* We've got some kind of INSN. If it's simple, try to evaluate
449 it and record the results.
451 We already know this insn is a single_set and that it sets
452 a pseudo register. So we just need to extract the source
453 arguments, simplify them to constants if possible, then
454 simplify the expression as a whole if possible. */
455 switch (GET_RTX_CLASS (GET_CODE (src)))
457 case '<':
459 rtx src0 = XEXP (src, 0);
460 rtx src1 = XEXP (src, 1);
461 enum machine_mode mode;
463 /* If either is undefined, then the result is undefined. */
464 if ((GET_CODE (src0) == REG
465 && values[REGNO (src0)].lattice_val == UNDEFINED)
466 || (GET_CODE (src1) == REG
467 && values[REGNO (src1)].lattice_val == UNDEFINED))
469 defs_to_undefined (insn);
470 break;
473 /* Determine the mode for the operation before we simplify
474 our arguments to constants. */
475 mode = GET_MODE (src0);
476 if (mode == VOIDmode)
477 mode = GET_MODE (src1);
479 /* Simplify source operands to whatever known values they
480 may have. */
481 if (GET_CODE (src0) == REG
482 && values[REGNO (src0)].lattice_val == CONSTANT)
483 src0 = values[REGNO (src0)].const_value;
485 if (GET_CODE (src1) == REG
486 && values[REGNO (src1)].lattice_val == CONSTANT)
487 src1 = values[REGNO (src1)].const_value;
489 /* See if the simplifier can determine if this operation
490 computes a constant value. */
491 simplified = simplify_relational_operation (GET_CODE (src),
492 mode, src0, src1);
493 break;
497 case '1':
499 rtx src0 = XEXP (src, 0);
500 enum machine_mode mode0 = GET_MODE (src0);
502 /* If the operand is undefined, then the result is undefined. */
503 if (GET_CODE (src0) == REG
504 && values[REGNO (src0)].lattice_val == UNDEFINED)
506 defs_to_undefined (insn);
507 break;
510 /* Simplify source operands to whatever known values they
511 may have. */
512 if (GET_CODE (src0) == REG
513 && values[REGNO (src0)].lattice_val == CONSTANT)
514 src0 = values[REGNO (src0)].const_value;
516 /* See if the simplifier can determine if this operation
517 computes a constant value. */
518 simplified = simplify_unary_operation (GET_CODE (src),
519 GET_MODE (src),
520 src0,
521 mode0);
522 break;
525 case '2':
526 case 'c':
528 rtx src0 = XEXP (src, 0);
529 rtx src1 = XEXP (src, 1);
531 /* If either is undefined, then the result is undefined. */
532 if ((GET_CODE (src0) == REG
533 && values[REGNO (src0)].lattice_val == UNDEFINED)
534 || (GET_CODE (src1) == REG
535 && values[REGNO (src1)].lattice_val == UNDEFINED))
537 defs_to_undefined (insn);
538 break;
541 /* Simplify source operands to whatever known values they
542 may have. */
543 if (GET_CODE (src0) == REG
544 && values[REGNO (src0)].lattice_val == CONSTANT)
545 src0 = values[REGNO (src0)].const_value;
547 if (GET_CODE (src1) == REG
548 && values[REGNO (src1)].lattice_val == CONSTANT)
549 src1 = values[REGNO (src1)].const_value;
551 /* See if the simplifier can determine if this operation
552 computes a constant value. */
553 simplified = simplify_binary_operation (GET_CODE (src),
554 GET_MODE (src),
555 src0, src1);
556 break;
559 case '3':
560 case 'b':
562 rtx src0 = XEXP (src, 0);
563 rtx src1 = XEXP (src, 1);
564 rtx src2 = XEXP (src, 2);
566 /* If either is undefined, then the result is undefined. */
567 if ((GET_CODE (src0) == REG
568 && values[REGNO (src0)].lattice_val == UNDEFINED)
569 || (GET_CODE (src1) == REG
570 && values[REGNO (src1)].lattice_val == UNDEFINED)
571 || (GET_CODE (src2) == REG
572 && values[REGNO (src2)].lattice_val == UNDEFINED))
574 defs_to_undefined (insn);
575 break;
578 /* Simplify source operands to whatever known values they
579 may have. */
580 if (GET_CODE (src0) == REG
581 && values[REGNO (src0)].lattice_val == CONSTANT)
582 src0 = values[REGNO (src0)].const_value;
584 if (GET_CODE (src1) == REG
585 && values[REGNO (src1)].lattice_val == CONSTANT)
586 src1 = values[REGNO (src1)].const_value;
588 if (GET_CODE (src2) == REG
589 && values[REGNO (src2)].lattice_val == CONSTANT)
590 src2 = values[REGNO (src2)].const_value;
592 /* See if the simplifier can determine if this operation
593 computes a constant value. */
594 simplified = simplify_ternary_operation (GET_CODE (src),
595 GET_MODE (src),
596 GET_MODE (src),
597 src0, src1, src2);
598 break;
601 default:
602 defs_to_varying (insn);
605 if (simplified && GET_CODE (simplified) == CONST_INT)
607 if (values[REGNO (dest)].lattice_val != CONSTANT
608 || values[REGNO (dest)].const_value != simplified)
609 SET_BIT (ssa_edges, REGNO (dest));
611 values[REGNO (dest)].lattice_val = CONSTANT;
612 values[REGNO (dest)].const_value = simplified;
614 else
615 defs_to_varying (insn);
619 /* Iterate over the FLOW_EDGES work list. Simulate the target block
620 for each edge. */
621 static void
622 examine_flow_edges (void)
624 while (flow_edges != NULL)
626 basic_block succ_block;
627 rtx curr_phi_node;
629 /* Pull the next block to simulate off the worklist. */
630 succ_block = flow_edges->dest;
631 flow_edges = edge_info[EIE (flow_edges->src, flow_edges->dest)];
633 /* There is nothing to do for the exit block. */
634 if (succ_block == EXIT_BLOCK_PTR)
635 continue;
637 /* Always simulate PHI nodes, even if we have simulated this block
638 before. Note that all PHI nodes are consecutive within a block. */
639 for (curr_phi_node = first_insn_after_basic_block_note (succ_block);
640 PHI_NODE_P (curr_phi_node);
641 curr_phi_node = NEXT_INSN (curr_phi_node))
642 visit_phi_node (curr_phi_node, succ_block);
644 /* If this is the first time we've simulated this block, then we
645 must simulate each of its insns. */
646 if (!TEST_BIT (executable_blocks, succ_block->index))
648 rtx currinsn;
649 edge succ_edge = succ_block->succ;
651 /* Note that we have simulated this block. */
652 SET_BIT (executable_blocks, succ_block->index);
654 /* Simulate each insn within the block. */
655 currinsn = succ_block->head;
656 while (currinsn != succ_block->end)
658 if (INSN_P (currinsn))
659 visit_expression (currinsn, succ_block);
661 currinsn = NEXT_INSN (currinsn);
664 /* Don't forget the last insn in the block. */
665 if (INSN_P (currinsn))
666 visit_expression (currinsn, succ_block);
668 /* If we haven't looked at the next block, and it has a
669 single successor, add it onto the worklist. This is because
670 if we only have one successor, we know it gets executed,
671 so we don't have to wait for cprop to tell us. */
672 if (succ_edge != NULL
673 && succ_edge->succ_next == NULL
674 && !TEST_BIT (executable_edges,
675 EIE (succ_edge->src, succ_edge->dest)))
677 SET_BIT (executable_edges,
678 EIE (succ_edge->src, succ_edge->dest));
679 edge_info[EIE (succ_edge->src, succ_edge->dest)] = flow_edges;
680 flow_edges = succ_edge;
686 /* Follow the def-use chains for each definition on the worklist and
687 simulate the uses of the definition. */
689 static void
690 follow_def_use_chains (void)
692 /* Iterate over all the entries on the SSA_EDGES worklist. */
693 while (sbitmap_first_set_bit (ssa_edges) >= 0)
695 int member;
696 struct df_link *curruse;
698 /* Pick an entry off the worklist (it does not matter which
699 entry we pick). */
700 member = sbitmap_first_set_bit (ssa_edges);
701 RESET_BIT (ssa_edges, member);
703 /* Iterate through all the uses of this entry. */
704 for (curruse = df_analyzer->regs[member].uses; curruse;
705 curruse = curruse->next)
707 rtx useinsn;
709 useinsn = DF_REF_INSN (curruse->ref);
710 if (PHI_NODE_P (useinsn))
712 if (TEST_BIT (executable_blocks, BLOCK_NUM (useinsn)))
713 visit_phi_node (useinsn, BLOCK_FOR_INSN (useinsn));
715 else
717 if (TEST_BIT (executable_blocks, BLOCK_NUM (useinsn)))
718 visit_expression (useinsn, BLOCK_FOR_INSN (useinsn));
724 /* Examine each edge to see if we were able to prove any were
725 not executable.
727 If an edge is not executable, then we can remove its alternative
728 in PHI nodes as the destination of the edge, we can simplify the
729 conditional branch at the source of the edge, and we can remove
730 the edge from the CFG. Note we do not delete unreachable blocks
731 yet as the DF analyzer can not deal with that yet. */
732 static void
733 optimize_unexecutable_edges (struct edge_list *edges,
734 sbitmap executable_edges)
736 int i;
737 basic_block bb;
739 for (i = 0; i < NUM_EDGES (edges); i++)
741 if (!TEST_BIT (executable_edges, i))
743 edge edge = INDEX_EDGE (edges, i);
745 if (edge->flags & EDGE_ABNORMAL)
746 continue;
748 /* We found an edge that is not executable. First simplify
749 the PHI nodes in the target block. */
750 if (edge->dest != EXIT_BLOCK_PTR)
752 rtx insn = first_insn_after_basic_block_note (edge->dest);
754 while (PHI_NODE_P (insn))
756 remove_phi_alternative (PATTERN (insn), edge->src);
757 if (rtl_dump_file)
758 fprintf (rtl_dump_file,
759 "Removing alternative for bb %d of phi %d\n",
760 edge->src->index, SSA_NAME (PATTERN (insn)));
761 insn = NEXT_INSN (insn);
764 if (rtl_dump_file)
765 fprintf (rtl_dump_file,
766 "Removing unexecutable edge from %d to %d\n",
767 edge->src->index, edge->dest->index);
768 /* Since the edge was not executable, remove it from the CFG. */
769 remove_edge (edge);
773 /* We have removed all the unexecutable edges from the CFG. Fix up
774 the conditional jumps at the end of any affected block.
776 We have three cases to deal with:
778 a. Both outgoing edges are not executable. This happens if the
779 source block is not reachable. We will deal with this by
780 deleting all the insns in the block later.
782 b. The fall-thru edge is not executable. In this case we
783 change the conditional jump into an unconditional jump and
784 add a BARRIER after the unconditional jump. Note that since
785 we are working on generic RTL we can change the jump in-place
786 instead of dealing with the headache of reemitting the jump.
788 c. The branch taken edge is not executable. In this case
789 we turn the jump into (set (pc) (pc)) which is a nop-jump
790 and we will remove the unrecognizable insn later.
792 In cases B & C we are removing uses of registers, so make sure
793 to note those changes for the DF analyzer. */
795 FOR_EACH_BB (bb)
797 rtx insn = bb->end;
798 edge edge = bb->succ;
800 /* If we have no predecessors, then this block is unreachable and
801 will be cleaned up when we remove unreachable blocks. */
802 if (bb->pred == NULL || GET_CODE (insn) != JUMP_INSN)
803 continue;
805 /* If this block ends in a conditional jump, but only has one
806 successor, then the jump needs adjustment. */
807 if (condjump_p (insn) && ! simplejump_p (insn)
808 && bb->succ && bb->succ->succ_next == NULL)
810 /* If the fallthru edge is the executable edge, then turn
811 this jump into a nop jump, otherwise make it an unconditional
812 jump to its target. */
813 if (edge->flags & EDGE_FALLTHRU)
815 PUT_CODE (insn, NOTE);
816 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
818 else
820 SET_SRC (PATTERN (insn)) = gen_rtx_LABEL_REF (Pmode,
821 JUMP_LABEL (insn));
822 emit_barrier_after (insn);
823 INSN_CODE (insn) = -1;
826 /* Inform the DF analyzer that this insn changed. */
827 df_insn_modify (df_analyzer, BLOCK_FOR_INSN (insn), insn);
832 /* Perform substitution of known values for pseudo registers.
834 ??? Note we do not do simplifications or constant folding here, it
835 is unlikely that any significant simplifications can be done here
836 anyway. Consider that if the simplification would result in an
837 expression that produces a constant value that the value would
838 have been discovered and recorded already.
840 We perform two transformations. First, we initialize pseudos to their
841 known constant values at their definition point. Second, we try to
842 replace uses with the known constant value. */
844 static void
845 ssa_ccp_substitute_constants (void)
847 unsigned int i;
849 for (i = FIRST_PSEUDO_REGISTER; i < VARRAY_SIZE (ssa_definition); i++)
851 if (values[i].lattice_val == CONSTANT)
853 rtx def = VARRAY_RTX (ssa_definition, i);
854 rtx set = single_set (def);
855 struct df_link *curruse;
857 if (! set)
858 continue;
860 /* Do not try to simplify PHI nodes down to a constant load.
861 That will be done later as we translate out of SSA. Also,
862 doing that here could violate the rule that all PHI nodes
863 are consecutive at the start of the basic block.
865 Don't do anything to nodes that were already sets to
866 constants. */
867 if (! PHI_NODE_P (def)
868 && ! ((GET_CODE (def) == INSN
869 && GET_CODE (SET_SRC (set)) == CONST_INT)))
871 if (rtl_dump_file)
872 fprintf (rtl_dump_file,
873 "Register %d is now set to a constant\n",
874 SSA_NAME (PATTERN (def)));
875 SET_SRC (set) = values[i].const_value;
876 INSN_CODE (def) = -1;
877 df_insn_modify (df_analyzer, BLOCK_FOR_INSN (def), def);
880 /* Iterate through all the uses of this entry and try replacements
881 there too. Note it is not particularly profitable to try
882 and fold/simplify expressions here as most of the common
883 cases were handled above. */
884 for (curruse = df_analyzer->regs[i].uses;
885 curruse;
886 curruse = curruse->next)
888 rtx useinsn;
890 useinsn = DF_REF_INSN (curruse->ref);
892 if (!INSN_DELETED_P (useinsn)
893 && ! (GET_CODE (useinsn) == NOTE
894 && NOTE_LINE_NUMBER (useinsn) == NOTE_INSN_DELETED)
895 && (GET_CODE (useinsn) == INSN
896 || GET_CODE (useinsn) == JUMP_INSN))
899 if (validate_replace_src (regno_reg_rtx [i],
900 values[i].const_value,
901 useinsn))
903 if (rtl_dump_file)
904 fprintf (rtl_dump_file,
905 "Register %d in insn %d replaced with constant\n",
906 i, INSN_UID (useinsn));
907 INSN_CODE (useinsn) = -1;
908 df_insn_modify (df_analyzer,
909 BLOCK_FOR_INSN (useinsn),
910 useinsn);
919 /* Now find all unreachable basic blocks. All the insns in those
920 blocks are unreachable, so delete them and mark any necessary
921 updates for the DF analyzer. */
923 static void
924 ssa_ccp_df_delete_unreachable_insns (void)
926 basic_block b;
928 /* Use the CFG to find all the reachable blocks. */
929 find_unreachable_blocks ();
931 /* Now we know what blocks are not reachable. Mark all the insns
932 in those blocks as deleted for the DF analyzer. We'll let the
933 normal flow code actually remove the unreachable blocks. */
934 FOR_EACH_BB_REVERSE (b)
936 if (!(b->flags & BB_REACHABLE))
938 rtx start = b->head;
939 rtx end = b->end;
940 rtx tmp;
942 /* Include any jump table following the basic block. */
943 end = b->end;
944 if (tablejump_p (end, NULL, &tmp))
945 end = tmp;
947 while (1)
949 rtx next = NEXT_INSN (start);
951 if (GET_CODE (start) == INSN
952 || GET_CODE (start) == CALL_INSN
953 || GET_CODE (start) == JUMP_INSN)
954 df_insn_delete (df_analyzer, BLOCK_FOR_INSN (start), start);
956 if (start == end)
957 break;
958 start = next;
965 /* Main entry point for SSA Conditional Constant Propagation.
967 Long term it should accept as input the specific flow graph to
968 operate on so that it can be called for sub-graphs. */
970 void
971 ssa_const_prop (void)
973 unsigned int i;
974 edge curredge;
976 /* We need alias analysis (for what?) */
977 init_alias_analysis ();
979 df_analyzer = df_init ();
980 df_analyse (df_analyzer, 0,
981 DF_RD_CHAIN | DF_RU_CHAIN | DF_REG_INFO | DF_HARD_REGS);
983 /* Perform a quick and dirty dead code elimination pass. This is not
984 as aggressive as it could be, but it's good enough to clean up a
985 lot of unwanted junk and it is fast. */
986 ssa_fast_dce (df_analyzer);
988 /* Build an edge list from the CFG. */
989 edges = create_edge_list ();
991 /* Initialize the values array with everything as undefined. */
992 values = (value *) xmalloc (VARRAY_SIZE (ssa_definition) * sizeof (value));
993 for (i = 0; i < VARRAY_SIZE (ssa_definition); i++)
995 if (i < FIRST_PSEUDO_REGISTER)
996 values[i].lattice_val = VARYING;
997 else
998 values[i].lattice_val = UNDEFINED;
999 values[i].const_value = NULL;
1002 ssa_edges = sbitmap_alloc (VARRAY_SIZE (ssa_definition));
1003 sbitmap_zero (ssa_edges);
1005 executable_blocks = sbitmap_alloc (last_basic_block);
1006 sbitmap_zero (executable_blocks);
1008 executable_edges = sbitmap_alloc (NUM_EDGES (edges));
1009 sbitmap_zero (executable_edges);
1011 edge_info = (edge *) xmalloc (NUM_EDGES (edges) * sizeof (edge));
1012 flow_edges = ENTRY_BLOCK_PTR->succ;
1014 /* Add the successors of the entry block to the edge worklist. That
1015 is enough of a seed to get SSA-CCP started. */
1016 for (curredge = ENTRY_BLOCK_PTR->succ; curredge;
1017 curredge = curredge->succ_next)
1019 int index = EIE (curredge->src, curredge->dest);
1020 SET_BIT (executable_edges, index);
1021 edge_info[index] = curredge->succ_next;
1024 /* Iterate until until the worklists are empty. */
1027 examine_flow_edges ();
1028 follow_def_use_chains ();
1030 while (flow_edges != NULL);
1032 /* Now perform substitutions based on the known constant values. */
1033 ssa_ccp_substitute_constants ();
1035 /* Remove unexecutable edges from the CFG and make appropriate
1036 adjustments to PHI nodes. */
1037 optimize_unexecutable_edges (edges, executable_edges);
1039 /* Now remove all unreachable insns and update the DF information.
1040 as appropriate. */
1041 ssa_ccp_df_delete_unreachable_insns ();
1043 #if 0
1044 /* The DF analyzer expects the number of blocks to remain constant,
1045 so we can't remove unreachable blocks.
1047 Code the DF analyzer calls expects there to be no unreachable
1048 blocks in the CFG. So we can't leave unreachable blocks in the
1049 CFG.
1051 So, there is no way to do an incremental update of the DF data
1052 at this point. */
1053 df_analyse (df_analyzer, 0,
1054 DF_RD_CHAIN | DF_RU_CHAIN | DF_REG_INFO | DF_HARD_REGS);
1055 #endif
1057 /* Clean up any dead code exposed by SSA-CCP, do this after updating
1058 the dataflow information! */
1059 ssa_fast_dce (df_analyzer);
1061 free (values);
1062 values = NULL;
1064 free (edge_info);
1065 edge_info = NULL;
1067 sbitmap_free (executable_blocks);
1068 executable_blocks = NULL;
1070 sbitmap_free (ssa_edges);
1071 ssa_edges = NULL;
1073 free_edge_list (edges);
1074 edges = NULL;
1076 sbitmap_free (executable_edges);
1077 executable_edges = NULL;
1079 df_finish (df_analyzer);
1080 end_alias_analysis ();
1083 static int
1084 mark_references (rtx *current_rtx, void *data)
1086 rtx x = *current_rtx;
1087 sbitmap worklist = (sbitmap) data;
1089 if (x == NULL_RTX)
1090 return 0;
1092 if (GET_CODE (x) == SET)
1094 rtx dest = SET_DEST (x);
1096 if (GET_CODE (dest) == STRICT_LOW_PART
1097 || GET_CODE (dest) == SUBREG
1098 || GET_CODE (dest) == SIGN_EXTRACT
1099 || GET_CODE (dest) == ZERO_EXTRACT)
1101 rtx reg;
1103 reg = dest;
1105 while (GET_CODE (reg) == STRICT_LOW_PART
1106 || GET_CODE (reg) == SUBREG
1107 || GET_CODE (reg) == SIGN_EXTRACT
1108 || GET_CODE (reg) == ZERO_EXTRACT)
1109 reg = XEXP (reg, 0);
1111 if (GET_CODE (reg) == REG)
1112 SET_BIT (worklist, REGNO (reg));
1115 if (GET_CODE (dest) == REG)
1117 for_each_rtx (&SET_SRC (x), mark_references, data);
1118 return -1;
1121 return 0;
1123 else if (GET_CODE (x) == REG)
1125 SET_BIT (worklist, REGNO (x));
1126 return -1;
1128 else if (GET_CODE (x) == CLOBBER)
1129 return -1;
1130 else
1131 return 0;
1134 static void
1135 ssa_fast_dce (struct df *df)
1137 sbitmap worklist = sbitmap_alloc (VARRAY_SIZE (ssa_definition));
1138 sbitmap_ones (worklist);
1140 /* Iterate on the worklist until there's no definitions left to
1141 examine. */
1142 while (sbitmap_first_set_bit (worklist) >= 0)
1144 struct df_link *curruse;
1145 int reg, found_use;
1147 /* Remove an item from the worklist. */
1148 reg = sbitmap_first_set_bit (worklist);
1149 RESET_BIT (worklist, reg);
1151 /* We never consider deleting assignments to hard regs or things
1152 which do not have SSA definitions, or things we have already
1153 deleted, or things with unusual side effects. */
1154 if (reg < FIRST_PSEUDO_REGISTER
1155 || ! VARRAY_RTX (ssa_definition, reg)
1156 || INSN_DELETED_P (VARRAY_RTX (ssa_definition, reg))
1157 || (GET_CODE (VARRAY_RTX (ssa_definition, reg)) == NOTE
1158 && (NOTE_LINE_NUMBER (VARRAY_RTX (ssa_definition, reg))
1159 == NOTE_INSN_DELETED))
1160 || side_effects_p (PATTERN (VARRAY_RTX (ssa_definition, reg))))
1161 continue;
1163 /* Iterate over the uses of this register. If we can not find
1164 any uses that have not been deleted, then the definition of
1165 this register is dead. */
1166 found_use = 0;
1167 for (curruse = df->regs[reg].uses; curruse; curruse = curruse->next)
1169 if (curruse->ref
1170 && DF_REF_INSN (curruse->ref)
1171 && ! INSN_DELETED_P (DF_REF_INSN (curruse->ref))
1172 && ! (GET_CODE (DF_REF_INSN (curruse->ref)) == NOTE
1173 && (NOTE_LINE_NUMBER (DF_REF_INSN (curruse->ref))
1174 == NOTE_INSN_DELETED))
1175 && DF_REF_INSN (curruse->ref) != VARRAY_RTX (ssa_definition, reg))
1177 found_use = 1;
1178 break;
1182 /* If we did not find a use of this register, then the definition
1183 of this register is dead. */
1185 if (! found_use)
1187 rtx def = VARRAY_RTX (ssa_definition, reg);
1189 /* Add all registers referenced by INSN to the work
1190 list. */
1191 for_each_rtx (&PATTERN (def), mark_references, worklist);
1193 /* Inform the analyzer that this insn is going to be
1194 deleted. */
1195 df_insn_delete (df, BLOCK_FOR_INSN (def), def);
1197 VARRAY_RTX (ssa_definition, reg) = NULL;
1201 sbitmap_free (worklist);
1203 /* Update the use-def chains in the df_analyzer as needed. */
1204 df_analyse (df_analyzer, 0,
1205 DF_RD_CHAIN | DF_RU_CHAIN | DF_REG_INFO | DF_HARD_REGS);