Record revision number.
[official-gcc.git] / gcc / tree-ssa-pre.c
blob425d2204a3c2442dd00e39f9d0371fc67fba977f
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "fibheap.h"
37 #include "hashtab.h"
38 #include "tree-iterator.h"
39 #include "real.h"
40 #include "alloc-pool.h"
41 #include "tree-pass.h"
42 #include "flags.h"
43 #include "bitmap.h"
44 #include "langhooks.h"
45 #include "cfgloop.h"
47 /* TODO:
49 1. Avail sets can be shared by making an avail_find_leader that
50 walks up the dominator tree and looks in those avail sets.
51 This might affect code optimality, it's unclear right now.
52 2. Load motion can be performed by value numbering the loads the
53 same as we do other expressions. This requires iterative
54 hashing the vuses into the values. Right now we simply assign
55 a new value every time we see a statement with a vuse.
56 3. Strength reduction can be performed by anticipating expressions
57 we can repair later on.
58 4. We can do back-substitution or smarter value numbering to catch
59 commutative expressions split up over multiple statements.
60 */
62 /* For ease of terminology, "expression node" in the below refers to
63 every expression node but MODIFY_EXPR, because MODIFY_EXPR's represent
64 the actual statement containing the expressions we care about, and
65 we cache the value number by putting it in the expression. */
67 /* Basic algorithm
69 First we walk the statements to generate the AVAIL sets, the
70 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
71 generation of values/expressions by a given block. We use them
72 when computing the ANTIC sets. The AVAIL sets consist of
73 SSA_NAME's that represent values, so we know what values are
74 available in what blocks. AVAIL is a forward dataflow problem. In
75 SSA, values are never killed, so we don't need a kill set, or a
76 fixpoint iteration, in order to calculate the AVAIL sets. In
77 traditional parlance, AVAIL sets tell us the downsafety of the
78 expressions/values.
80 Next, we generate the ANTIC sets. These sets represent the
81 anticipatable expressions. ANTIC is a backwards dataflow
82 problem.An expression is anticipatable in a given block if it could
83 be generated in that block. This means that if we had to perform
84 an insertion in that block, of the value of that expression, we
85 could. Calculating the ANTIC sets requires phi translation of
86 expressions, because the flow goes backwards through phis. We must
87 iterate to a fixpoint of the ANTIC sets, because we have a kill
88 set. Even in SSA form, values are not live over the entire
89 function, only from their definition point onwards. So we have to
90 remove values from the ANTIC set once we go past the definition
91 point of the leaders that make them up.
92 compute_antic/compute_antic_aux performs this computation.
94 Third, we perform insertions to make partially redundant
95 expressions fully redundant.
97 An expression is partially redundant (excluding partial
98 anticipation) if:
100 1. It is AVAIL in some, but not all, of the predecessors of a
101 given block.
102 2. It is ANTIC in all the predecessors.
104 In order to make it fully redundant, we insert the expression into
105 the predecessors where it is not available, but is ANTIC.
106 insert/insert_aux performs this insertion.
108 Fourth, we eliminate fully redundant expressions.
109 This is a simple statement walk that replaces redundant
110 calculations with the now available values. */
112 /* Representations of value numbers:
114 Value numbers are represented using the "value handle" approach.
115 This means that each SSA_NAME (and for other reasons to be
116 disclosed in a moment, expression nodes) has a value handle that
117 can be retrieved through get_value_handle. This value handle, *is*
118 the value number of the SSA_NAME. You can pointer compare the
119 value handles for equivalence purposes.
121 For debugging reasons, the value handle is internally more than
122 just a number, it is a VAR_DECL named "value.x", where x is a
123 unique number for each value number in use. This allows
124 expressions with SSA_NAMES replaced by value handles to still be
125 pretty printed in a sane way. They simply print as "value.3 *
126 value.5", etc.
128 Expression nodes have value handles associated with them as a
129 cache. Otherwise, we'd have to look them up again in the hash
130 table This makes significant difference (factor of two or more) on
131 some test cases. They can be thrown away after the pass is
132 finished. */
134 /* Representation of expressions on value numbers:
136 In some portions of this code, you will notice we allocate "fake"
137 analogues to the expression we are value numbering, and replace the
138 operands with the values of the expression. Since we work on
139 values, and not just names, we canonicalize expressions to value
140 expressions for use in the ANTIC sets, the EXP_GEN set, etc.
142 This is theoretically unnecessary, it just saves a bunch of
143 repeated get_value_handle and find_leader calls in the remainder of
144 the code, trading off temporary memory usage for speed. The tree
145 nodes aren't actually creating more garbage, since they are
146 allocated in a special pools which are thrown away at the end of
147 this pass.
149 All of this also means that if you print the EXP_GEN or ANTIC sets,
150 you will see "value.5 + value.7" in the set, instead of "a_55 +
151 b_66" or something. The only thing that actually cares about
152 seeing the value leaders is phi translation, and it needs to be
153 able to find the leader for a value in an arbitrary block, so this
154 "value expression" form is perfect for it (otherwise you'd do
155 get_value_handle->find_leader->translate->get_value_handle->find_leader).*/
158 /* Representation of sets:
160 There are currently two types of sets used, hopefully to be unified soon.
161 The AVAIL sets do not need to be sorted in any particular order,
162 and thus, are simply represented as two bitmaps, one that keeps
163 track of values present in the set, and one that keeps track of
164 expressions present in the set.
166 The other sets are represented as doubly linked lists kept in topological
167 order, with an optional supporting bitmap of values present in the
168 set. The sets represent values, and the elements can be values or
169 expressions. The elements can appear in different sets, but each
170 element can only appear once in each set.
172 Since each node in the set represents a value, we also want to be
173 able to map expression, set pairs to something that tells us
174 whether the value is present is a set. We use a per-set bitmap for
175 that. The value handles also point to a linked list of the
176 expressions they represent via a tree annotation. This is mainly
177 useful only for debugging, since we don't do identity lookups. */
180 static bool in_fre = false;
182 /* A value set element. Basically a single linked list of
183 expressions/values. */
184 typedef struct value_set_node
186 /* An expression. */
187 tree expr;
189 /* A pointer to the next element of the value set. */
190 struct value_set_node *next;
191 } *value_set_node_t;
194 /* A value set. This is a singly linked list of value_set_node
195 elements with a possible bitmap that tells us what values exist in
196 the set. This set must be kept in topologically sorted order. */
197 typedef struct value_set
199 /* The head of the list. Used for iterating over the list in
200 order. */
201 value_set_node_t head;
203 /* The tail of the list. Used for tail insertions, which are
204 necessary to keep the set in topologically sorted order because
205 of how the set is built. */
206 value_set_node_t tail;
208 /* The length of the list. */
209 size_t length;
211 /* True if the set is indexed, which means it contains a backing
212 bitmap for quick determination of whether certain values exist in the
213 set. */
214 bool indexed;
216 /* The bitmap of values that exist in the set. May be NULL in an
217 empty or non-indexed set. */
218 bitmap values;
220 } *value_set_t;
223 /* An unordered bitmap set. One bitmap tracks values, the other,
224 expressions. */
225 typedef struct bitmap_set
227 bitmap expressions;
228 bitmap values;
229 } *bitmap_set_t;
231 /* Sets that we need to keep track of. */
232 typedef struct bb_value_sets
234 /* The EXP_GEN set, which represents expressions/values generated in
235 a basic block. */
236 value_set_t exp_gen;
238 /* The PHI_GEN set, which represents PHI results generated in a
239 basic block. */
240 bitmap_set_t phi_gen;
242 /* The TMP_GEN set, which represents results/temporaries generated
243 in a basic block. IE the LHS of an expression. */
244 bitmap_set_t tmp_gen;
246 /* The AVAIL_OUT set, which represents which values are available in
247 a given basic block. */
248 bitmap_set_t avail_out;
250 /* The ANTIC_IN set, which represents which values are anticiptable
251 in a given basic block. */
252 value_set_t antic_in;
254 /* The NEW_SETS set, which is used during insertion to augment the
255 AVAIL_OUT set of blocks with the new insertions performed during
256 the current iteration. */
257 bitmap_set_t new_sets;
258 } *bb_value_sets_t;
260 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
261 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
262 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
263 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
264 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
265 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
267 /* This structure is used to keep track of statistics on what
268 optimization PRE was able to perform. */
269 static struct
271 /* The number of RHS computations eliminated by PRE. */
272 int eliminations;
274 /* The number of new expressions/temporaries generated by PRE. */
275 int insertions;
277 /* The number of new PHI nodes added by PRE. */
278 int phis;
280 /* The number of values found constant. */
281 int constified;
283 } pre_stats;
286 static tree bitmap_find_leader (bitmap_set_t, tree);
287 static tree find_leader (value_set_t, tree);
288 static void value_insert_into_set (value_set_t, tree);
289 static void bitmap_value_insert_into_set (bitmap_set_t, tree);
290 static void bitmap_value_replace_in_set (bitmap_set_t, tree);
291 static void insert_into_set (value_set_t, tree);
292 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
293 static bool bitmap_set_contains_value (bitmap_set_t, tree);
294 static bitmap_set_t bitmap_set_new (void);
295 static value_set_t set_new (bool);
296 static bool is_undefined_value (tree);
297 static tree create_expression_by_pieces (basic_block, tree, tree);
300 /* We can add and remove elements and entries to and from sets
301 and hash tables, so we use alloc pools for them. */
303 static alloc_pool value_set_pool;
304 static alloc_pool bitmap_set_pool;
305 static alloc_pool value_set_node_pool;
306 static alloc_pool binary_node_pool;
307 static alloc_pool unary_node_pool;
308 static alloc_pool reference_node_pool;
309 static alloc_pool comparison_node_pool;
310 static alloc_pool expression_node_pool;
311 static alloc_pool list_node_pool;
312 static bitmap_obstack grand_bitmap_obstack;
314 /* Set of blocks with statements that have had its EH information
315 cleaned up. */
316 static bitmap need_eh_cleanup;
318 /* The phi_translate_table caches phi translations for a given
319 expression and predecessor. */
321 static htab_t phi_translate_table;
323 /* A three tuple {e, pred, v} used to cache phi translations in the
324 phi_translate_table. */
326 typedef struct expr_pred_trans_d
328 /* The expression. */
329 tree e;
331 /* The predecessor block along which we translated the expression. */
332 basic_block pred;
334 /* The value that resulted from the translation. */
335 tree v;
337 /* The hashcode for the expression, pred pair. This is cached for
338 speed reasons. */
339 hashval_t hashcode;
340 } *expr_pred_trans_t;
342 /* Return the hash value for a phi translation table entry. */
344 static hashval_t
345 expr_pred_trans_hash (const void *p)
347 const expr_pred_trans_t ve = (expr_pred_trans_t) p;
348 return ve->hashcode;
351 /* Return true if two phi translation table entries are the same.
352 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
354 static int
355 expr_pred_trans_eq (const void *p1, const void *p2)
357 const expr_pred_trans_t ve1 = (expr_pred_trans_t) p1;
358 const expr_pred_trans_t ve2 = (expr_pred_trans_t) p2;
359 basic_block b1 = ve1->pred;
360 basic_block b2 = ve2->pred;
363 /* If they are not translations for the same basic block, they can't
364 be equal. */
365 if (b1 != b2)
366 return false;
368 /* If they are for the same basic block, determine if the
369 expressions are equal. */
370 if (expressions_equal_p (ve1->e, ve2->e))
371 return true;
373 return false;
376 /* Search in the phi translation table for the translation of
377 expression E in basic block PRED. Return the translated value, if
378 found, NULL otherwise. */
380 static inline tree
381 phi_trans_lookup (tree e, basic_block pred)
383 void **slot;
384 struct expr_pred_trans_d ept;
385 ept.e = e;
386 ept.pred = pred;
387 ept.hashcode = vn_compute (e, (unsigned long) pred, NULL);
388 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
389 NO_INSERT);
390 if (!slot)
391 return NULL;
392 else
393 return ((expr_pred_trans_t) *slot)->v;
397 /* Add the tuple mapping from {expression E, basic block PRED} to
398 value V, to the phi translation table. */
400 static inline void
401 phi_trans_add (tree e, tree v, basic_block pred)
403 void **slot;
404 expr_pred_trans_t new_pair = xmalloc (sizeof (*new_pair));
405 new_pair->e = e;
406 new_pair->pred = pred;
407 new_pair->v = v;
408 new_pair->hashcode = vn_compute (e, (unsigned long) pred, NULL);
409 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
410 new_pair->hashcode, INSERT);
411 if (*slot)
412 free (*slot);
413 *slot = (void *) new_pair;
417 /* Add expression E to the expression set of value V. */
419 void
420 add_to_value (tree v, tree e)
422 /* Constants have no expression sets. */
423 if (is_gimple_min_invariant (v))
424 return;
426 if (VALUE_HANDLE_EXPR_SET (v) == NULL)
427 VALUE_HANDLE_EXPR_SET (v) = set_new (false);
429 insert_into_set (VALUE_HANDLE_EXPR_SET (v), e);
433 /* Return true if value V exists in the bitmap for SET. */
435 static inline bool
436 value_exists_in_set_bitmap (value_set_t set, tree v)
438 if (!set->values)
439 return false;
441 return bitmap_bit_p (set->values, VALUE_HANDLE_ID (v));
445 /* Remove value V from the bitmap for SET. */
447 static void
448 value_remove_from_set_bitmap (value_set_t set, tree v)
450 gcc_assert (set->indexed);
452 if (!set->values)
453 return;
455 bitmap_clear_bit (set->values, VALUE_HANDLE_ID (v));
459 /* Insert the value number V into the bitmap of values existing in
460 SET. */
462 static inline void
463 value_insert_into_set_bitmap (value_set_t set, tree v)
465 gcc_assert (set->indexed);
467 if (set->values == NULL)
468 set->values = BITMAP_ALLOC (&grand_bitmap_obstack);
470 bitmap_set_bit (set->values, VALUE_HANDLE_ID (v));
474 /* Create a new bitmap set and return it. */
476 static bitmap_set_t
477 bitmap_set_new (void)
479 bitmap_set_t ret = pool_alloc (bitmap_set_pool);
480 ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
481 ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
482 return ret;
485 /* Create a new set. */
487 static value_set_t
488 set_new (bool indexed)
490 value_set_t ret;
491 ret = pool_alloc (value_set_pool);
492 ret->head = ret->tail = NULL;
493 ret->length = 0;
494 ret->indexed = indexed;
495 ret->values = NULL;
496 return ret;
499 /* Insert an expression EXPR into a bitmapped set. */
501 static void
502 bitmap_insert_into_set (bitmap_set_t set, tree expr)
504 tree val;
505 /* XXX: For now, we only let SSA_NAMES into the bitmap sets. */
506 gcc_assert (TREE_CODE (expr) == SSA_NAME);
507 val = get_value_handle (expr);
509 gcc_assert (val);
510 if (!is_gimple_min_invariant (val))
512 bitmap_set_bit (set->values, VALUE_HANDLE_ID (val));
513 bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
517 /* Insert EXPR into SET. */
519 static void
520 insert_into_set (value_set_t set, tree expr)
522 value_set_node_t newnode = pool_alloc (value_set_node_pool);
523 tree val = get_value_handle (expr);
524 gcc_assert (val);
526 if (is_gimple_min_invariant (val))
527 return;
529 /* For indexed sets, insert the value into the set value bitmap.
530 For all sets, add it to the linked list and increment the list
531 length. */
532 if (set->indexed)
533 value_insert_into_set_bitmap (set, val);
535 newnode->next = NULL;
536 newnode->expr = expr;
537 set->length ++;
538 if (set->head == NULL)
540 set->head = set->tail = newnode;
542 else
544 set->tail->next = newnode;
545 set->tail = newnode;
549 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
551 static void
552 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
554 bitmap_copy (dest->expressions, orig->expressions);
555 bitmap_copy (dest->values, orig->values);
558 /* Copy the set ORIG to the set DEST. */
560 static void
561 set_copy (value_set_t dest, value_set_t orig)
563 value_set_node_t node;
565 if (!orig || !orig->head)
566 return;
568 for (node = orig->head;
569 node;
570 node = node->next)
572 insert_into_set (dest, node->expr);
576 /* Remove EXPR from SET. */
578 static void
579 set_remove (value_set_t set, tree expr)
581 value_set_node_t node, prev;
583 /* Remove the value of EXPR from the bitmap, decrement the set
584 length, and remove it from the actual double linked list. */
585 value_remove_from_set_bitmap (set, get_value_handle (expr));
586 set->length--;
587 prev = NULL;
588 for (node = set->head;
589 node != NULL;
590 prev = node, node = node->next)
592 if (node->expr == expr)
594 if (prev == NULL)
595 set->head = node->next;
596 else
597 prev->next= node->next;
599 if (node == set->tail)
600 set->tail = prev;
601 pool_free (value_set_node_pool, node);
602 return;
607 /* Return true if SET contains the value VAL. */
609 static bool
610 set_contains_value (value_set_t set, tree val)
612 /* All constants are in every set. */
613 if (is_gimple_min_invariant (val))
614 return true;
616 if (set->length == 0)
617 return false;
619 return value_exists_in_set_bitmap (set, val);
622 /* Return true if bitmapped set SET contains the expression EXPR. */
623 static bool
624 bitmap_set_contains (bitmap_set_t set, tree expr)
626 /* All constants are in every set. */
627 if (is_gimple_min_invariant (get_value_handle (expr)))
628 return true;
630 /* XXX: Bitmapped sets only contain SSA_NAME's for now. */
631 if (TREE_CODE (expr) != SSA_NAME)
632 return false;
633 return bitmap_bit_p (set->expressions, SSA_NAME_VERSION (expr));
637 /* Return true if bitmapped set SET contains the value VAL. */
639 static bool
640 bitmap_set_contains_value (bitmap_set_t set, tree val)
642 if (is_gimple_min_invariant (val))
643 return true;
644 return bitmap_bit_p (set->values, VALUE_HANDLE_ID (val));
647 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
649 static void
650 bitmap_set_replace_value (bitmap_set_t set, tree lookfor, tree expr)
652 value_set_t exprset;
653 value_set_node_t node;
654 if (is_gimple_min_invariant (lookfor))
655 return;
656 if (!bitmap_set_contains_value (set, lookfor))
657 return;
659 /* The number of expressions having a given value is usually
660 significantly less than the total number of expressions in SET.
661 Thus, rather than check, for each expression in SET, whether it
662 has the value LOOKFOR, we walk the reverse mapping that tells us
663 what expressions have a given value, and see if any of those
664 expressions are in our set. For large testcases, this is about
665 5-10x faster than walking the bitmap. If this is somehow a
666 significant lose for some cases, we can choose which set to walk
667 based on the set size. */
668 exprset = VALUE_HANDLE_EXPR_SET (lookfor);
669 for (node = exprset->head; node; node = node->next)
671 if (TREE_CODE (node->expr) == SSA_NAME)
673 if (bitmap_bit_p (set->expressions, SSA_NAME_VERSION (node->expr)))
675 bitmap_clear_bit (set->expressions, SSA_NAME_VERSION (node->expr));
676 bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
677 return;
683 /* Subtract bitmapped set B from value set A, and return the new set. */
685 static value_set_t
686 bitmap_set_subtract_from_value_set (value_set_t a, bitmap_set_t b,
687 bool indexed)
689 value_set_t ret = set_new (indexed);
690 value_set_node_t node;
691 for (node = a->head;
692 node;
693 node = node->next)
695 if (!bitmap_set_contains (b, node->expr))
696 insert_into_set (ret, node->expr);
698 return ret;
701 /* Return true if two sets are equal. */
703 static bool
704 set_equal (value_set_t a, value_set_t b)
706 value_set_node_t node;
708 if (a->length != b->length)
709 return false;
710 for (node = a->head;
711 node;
712 node = node->next)
714 if (!set_contains_value (b, get_value_handle (node->expr)))
715 return false;
717 return true;
720 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
721 and add it otherwise. */
723 static void
724 bitmap_value_replace_in_set (bitmap_set_t set, tree expr)
726 tree val = get_value_handle (expr);
727 if (bitmap_set_contains_value (set, val))
728 bitmap_set_replace_value (set, val, expr);
729 else
730 bitmap_insert_into_set (set, expr);
733 /* Insert EXPR into SET if EXPR's value is not already present in
734 SET. */
736 static void
737 bitmap_value_insert_into_set (bitmap_set_t set, tree expr)
739 tree val = get_value_handle (expr);
741 if (is_gimple_min_invariant (val))
742 return;
744 if (!bitmap_set_contains_value (set, val))
745 bitmap_insert_into_set (set, expr);
748 /* Insert the value for EXPR into SET, if it doesn't exist already. */
750 static void
751 value_insert_into_set (value_set_t set, tree expr)
753 tree val = get_value_handle (expr);
755 /* Constant and invariant values exist everywhere, and thus,
756 actually keeping them in the sets is pointless. */
757 if (is_gimple_min_invariant (val))
758 return;
760 if (!set_contains_value (set, val))
761 insert_into_set (set, expr);
765 /* Print out SET to OUTFILE. */
767 static void
768 bitmap_print_value_set (FILE *outfile, bitmap_set_t set,
769 const char *setname, int blockindex)
771 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
772 if (set)
774 bool first = true;
775 unsigned i;
776 bitmap_iterator bi;
778 EXECUTE_IF_SET_IN_BITMAP (set->expressions, 0, i, bi)
780 if (!first)
781 fprintf (outfile, ", ");
782 first = false;
783 print_generic_expr (outfile, ssa_name (i), 0);
785 fprintf (outfile, " (");
786 print_generic_expr (outfile, get_value_handle (ssa_name (i)), 0);
787 fprintf (outfile, ") ");
790 fprintf (outfile, " }\n");
792 /* Print out the value_set SET to OUTFILE. */
794 static void
795 print_value_set (FILE *outfile, value_set_t set,
796 const char *setname, int blockindex)
798 value_set_node_t node;
799 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
800 if (set)
802 for (node = set->head;
803 node;
804 node = node->next)
806 print_generic_expr (outfile, node->expr, 0);
808 fprintf (outfile, " (");
809 print_generic_expr (outfile, get_value_handle (node->expr), 0);
810 fprintf (outfile, ") ");
812 if (node->next)
813 fprintf (outfile, ", ");
817 fprintf (outfile, " }\n");
820 /* Print out the expressions that have VAL to OUTFILE. */
822 void
823 print_value_expressions (FILE *outfile, tree val)
825 if (VALUE_HANDLE_EXPR_SET (val))
827 char s[10];
828 sprintf (s, "VH.%04d", VALUE_HANDLE_ID (val));
829 print_value_set (outfile, VALUE_HANDLE_EXPR_SET (val), s, 0);
834 void
835 debug_value_expressions (tree val)
837 print_value_expressions (stderr, val);
841 void debug_value_set (value_set_t, const char *, int);
843 void
844 debug_value_set (value_set_t set, const char *setname, int blockindex)
846 print_value_set (stderr, set, setname, blockindex);
849 /* Return the folded version of T if T, when folded, is a gimple
850 min_invariant. Otherwise, return T. */
852 static tree
853 fully_constant_expression (tree t)
855 tree folded;
856 folded = fold (t);
857 if (folded && is_gimple_min_invariant (folded))
858 return folded;
859 return t;
862 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
863 For example, this can copy a list made of TREE_LIST nodes.
864 Allocates the nodes in list_node_pool*/
866 static tree
867 pool_copy_list (tree list)
869 tree head;
870 tree prev, next;
872 if (list == 0)
873 return 0;
874 head = pool_alloc (list_node_pool);
876 memcpy (head, list, tree_size (list));
877 prev = head;
879 next = TREE_CHAIN (list);
880 while (next)
882 TREE_CHAIN (prev) = pool_alloc (list_node_pool);
883 memcpy (TREE_CHAIN (prev), next, tree_size (next));
884 prev = TREE_CHAIN (prev);
885 next = TREE_CHAIN (next);
887 return head;
891 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
892 the phis in PRED. Return NULL if we can't find a leader for each
893 part of the translated expression. */
895 static tree
896 phi_translate (tree expr, value_set_t set, basic_block pred,
897 basic_block phiblock)
899 tree phitrans = NULL;
900 tree oldexpr = expr;
902 if (expr == NULL)
903 return NULL;
905 if (is_gimple_min_invariant (expr))
906 return expr;
908 /* Phi translations of a given expression don't change. */
909 phitrans = phi_trans_lookup (expr, pred);
910 if (phitrans)
911 return phitrans;
913 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
915 case tcc_expression:
917 if (TREE_CODE (expr) != CALL_EXPR)
918 return NULL;
919 else
921 tree oldop0 = TREE_OPERAND (expr, 0);
922 tree oldarglist = TREE_OPERAND (expr, 1);
923 tree oldop2 = TREE_OPERAND (expr, 2);
924 tree newop0;
925 tree newarglist;
926 tree newop2 = NULL;
927 tree oldwalker;
928 tree newwalker;
929 tree newexpr;
930 bool listchanged = false;
932 /* Call expressions are kind of weird because they have an
933 argument list. We don't want to value number the list
934 as one value number, because that doesn't make much
935 sense, and just breaks the support functions we call,
936 which expect TREE_OPERAND (call_expr, 2) to be a
937 TREE_LIST. */
939 newop0 = phi_translate (find_leader (set, oldop0),
940 set, pred, phiblock);
941 if (newop0 == NULL)
942 return NULL;
943 if (oldop2)
945 newop2 = phi_translate (find_leader (set, oldop2),
946 set, pred, phiblock);
947 if (newop2 == NULL)
948 return NULL;
951 /* phi translate the argument list piece by piece.
953 We could actually build the list piece by piece here,
954 but it's likely to not be worth the memory we will save,
955 unless you have millions of call arguments. */
957 newarglist = pool_copy_list (oldarglist);
958 for (oldwalker = oldarglist, newwalker = newarglist;
959 oldwalker && newwalker;
960 oldwalker = TREE_CHAIN (oldwalker),
961 newwalker = TREE_CHAIN (newwalker))
964 tree oldval = TREE_VALUE (oldwalker);
965 tree newval;
966 if (oldval)
968 newval = phi_translate (find_leader (set, oldval),
969 set, pred, phiblock);
970 if (newval == NULL)
971 return NULL;
972 if (newval != oldval)
974 listchanged = true;
975 TREE_VALUE (newwalker) = get_value_handle (newval);
979 if (listchanged)
980 vn_lookup_or_add (newarglist, NULL);
982 if (listchanged || (newop0 != oldop0) || (oldop2 != newop2))
984 newexpr = pool_alloc (expression_node_pool);
985 memcpy (newexpr, expr, tree_size (expr));
986 TREE_OPERAND (newexpr, 0) = newop0 == oldop0 ? oldop0 : get_value_handle (newop0);
987 TREE_OPERAND (newexpr, 1) = listchanged ? newarglist : oldarglist;
988 TREE_OPERAND (newexpr, 2) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
989 create_tree_ann (newexpr);
990 vn_lookup_or_add (newexpr, NULL);
991 expr = newexpr;
992 phi_trans_add (oldexpr, newexpr, pred);
996 return expr;
998 case tcc_reference:
999 /* XXX: Until we have PRE of loads working, none will be ANTIC. */
1000 return NULL;
1002 case tcc_binary:
1003 case tcc_comparison:
1005 tree oldop1 = TREE_OPERAND (expr, 0);
1006 tree oldop2 = TREE_OPERAND (expr, 1);
1007 tree newop1;
1008 tree newop2;
1009 tree newexpr;
1011 newop1 = phi_translate (find_leader (set, oldop1),
1012 set, pred, phiblock);
1013 if (newop1 == NULL)
1014 return NULL;
1015 newop2 = phi_translate (find_leader (set, oldop2),
1016 set, pred, phiblock);
1017 if (newop2 == NULL)
1018 return NULL;
1019 if (newop1 != oldop1 || newop2 != oldop2)
1021 tree t;
1022 newexpr = pool_alloc (binary_node_pool);
1023 memcpy (newexpr, expr, tree_size (expr));
1024 TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldop1 : get_value_handle (newop1);
1025 TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
1026 t = fully_constant_expression (newexpr);
1027 if (t != newexpr)
1029 pool_free (binary_node_pool, newexpr);
1030 newexpr = t;
1032 else
1034 create_tree_ann (newexpr);
1035 vn_lookup_or_add (newexpr, NULL);
1037 expr = newexpr;
1038 phi_trans_add (oldexpr, newexpr, pred);
1041 return expr;
1043 case tcc_unary:
1045 tree oldop1 = TREE_OPERAND (expr, 0);
1046 tree newop1;
1047 tree newexpr;
1049 newop1 = phi_translate (find_leader (set, oldop1),
1050 set, pred, phiblock);
1051 if (newop1 == NULL)
1052 return NULL;
1053 if (newop1 != oldop1)
1055 tree t;
1056 newexpr = pool_alloc (unary_node_pool);
1057 memcpy (newexpr, expr, tree_size (expr));
1058 TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
1059 t = fully_constant_expression (newexpr);
1060 if (t != newexpr)
1062 pool_free (unary_node_pool, newexpr);
1063 newexpr = t;
1065 else
1067 create_tree_ann (newexpr);
1068 vn_lookup_or_add (newexpr, NULL);
1070 expr = newexpr;
1071 phi_trans_add (oldexpr, newexpr, pred);
1074 return expr;
1076 case tcc_exceptional:
1078 tree phi = NULL;
1079 edge e;
1080 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1081 if (TREE_CODE (SSA_NAME_DEF_STMT (expr)) == PHI_NODE)
1082 phi = SSA_NAME_DEF_STMT (expr);
1083 else
1084 return expr;
1086 e = find_edge (pred, bb_for_stmt (phi));
1087 if (e)
1089 if (is_undefined_value (PHI_ARG_DEF (phi, e->dest_idx)))
1090 return NULL;
1091 vn_lookup_or_add (PHI_ARG_DEF (phi, e->dest_idx), NULL);
1092 return PHI_ARG_DEF (phi, e->dest_idx);
1095 return expr;
1097 default:
1098 gcc_unreachable ();
1102 /* For each expression in SET, translate the value handles through phi nodes
1103 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1104 expressions in DEST. */
1106 static void
1107 phi_translate_set (value_set_t dest, value_set_t set, basic_block pred,
1108 basic_block phiblock)
1110 value_set_node_t node;
1111 for (node = set->head;
1112 node;
1113 node = node->next)
1115 tree translated;
1116 translated = phi_translate (node->expr, set, pred, phiblock);
1117 phi_trans_add (node->expr, translated, pred);
1119 if (translated != NULL)
1120 value_insert_into_set (dest, translated);
1124 /* Find the leader for a value (i.e., the name representing that
1125 value) in a given set, and return it. Return NULL if no leader is
1126 found. */
1128 static tree
1129 bitmap_find_leader (bitmap_set_t set, tree val)
1131 if (val == NULL)
1132 return NULL;
1134 if (is_gimple_min_invariant (val))
1135 return val;
1136 if (bitmap_set_contains_value (set, val))
1138 /* Rather than walk the entire bitmap of expressions, and see
1139 whether any of them has the value we are looking for, we look
1140 at the reverse mapping, which tells us the set of expressions
1141 that have a given value (IE value->expressions with that
1142 value) and see if any of those expressions are in our set.
1143 The number of expressions per value is usually significantly
1144 less than the number of expressions in the set. In fact, for
1145 large testcases, doing it this way is roughly 5-10x faster
1146 than walking the bitmap.
1147 If this is somehow a significant lose for some cases, we can
1148 choose which set to walk based on which set is smaller. */
1149 value_set_t exprset;
1150 value_set_node_t node;
1151 exprset = VALUE_HANDLE_EXPR_SET (val);
1152 for (node = exprset->head; node; node = node->next)
1154 if (TREE_CODE (node->expr) == SSA_NAME)
1156 if (bitmap_bit_p (set->expressions,
1157 SSA_NAME_VERSION (node->expr)))
1158 return node->expr;
1162 return NULL;
1166 /* Find the leader for a value (i.e., the name representing that
1167 value) in a given set, and return it. Return NULL if no leader is
1168 found. */
1170 static tree
1171 find_leader (value_set_t set, tree val)
1173 value_set_node_t node;
1175 if (val == NULL)
1176 return NULL;
1178 /* Constants represent themselves. */
1179 if (is_gimple_min_invariant (val))
1180 return val;
1182 if (set->length == 0)
1183 return NULL;
1185 if (value_exists_in_set_bitmap (set, val))
1187 for (node = set->head;
1188 node;
1189 node = node->next)
1191 if (get_value_handle (node->expr) == val)
1192 return node->expr;
1196 return NULL;
1199 /* Determine if the expression EXPR is valid in SET. This means that
1200 we have a leader for each part of the expression (if it consists of
1201 values), or the expression is an SSA_NAME.
1203 NB: We never should run into a case where we have SSA_NAME +
1204 SSA_NAME or SSA_NAME + value. The sets valid_in_set is called on,
1205 the ANTIC sets, will only ever have SSA_NAME's or value expressions
1206 (IE VALUE1 + VALUE2, *VALUE1, VALUE1 < VALUE2) */
1208 static bool
1209 valid_in_set (value_set_t set, tree expr)
1211 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1213 case tcc_binary:
1214 case tcc_comparison:
1216 tree op1 = TREE_OPERAND (expr, 0);
1217 tree op2 = TREE_OPERAND (expr, 1);
1218 return set_contains_value (set, op1) && set_contains_value (set, op2);
1221 case tcc_unary:
1223 tree op1 = TREE_OPERAND (expr, 0);
1224 return set_contains_value (set, op1);
1227 case tcc_expression:
1229 if (TREE_CODE (expr) == CALL_EXPR)
1231 tree op0 = TREE_OPERAND (expr, 0);
1232 tree arglist = TREE_OPERAND (expr, 1);
1233 tree op2 = TREE_OPERAND (expr, 2);
1235 /* Check the non-list operands first. */
1236 if (!set_contains_value (set, op0)
1237 || (op2 && !set_contains_value (set, op2)))
1238 return false;
1240 /* Now check the operands. */
1241 for (; arglist; arglist = TREE_CHAIN (arglist))
1243 if (!set_contains_value (set, TREE_VALUE (arglist)))
1244 return false;
1246 return true;
1248 return false;
1251 case tcc_reference:
1252 /* XXX: Until PRE of loads works, no reference nodes are ANTIC. */
1253 return false;
1255 case tcc_exceptional:
1256 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1257 return true;
1259 case tcc_declaration:
1260 /* VAR_DECL and PARM_DECL are never anticipatable. */
1261 return false;
1263 default:
1264 /* No other cases should be encountered. */
1265 gcc_unreachable ();
1269 /* Clean the set of expressions that are no longer valid in SET. This
1270 means expressions that are made up of values we have no leaders for
1271 in SET. */
1273 static void
1274 clean (value_set_t set)
1276 value_set_node_t node;
1277 value_set_node_t next;
1278 node = set->head;
1279 while (node)
1281 next = node->next;
1282 if (!valid_in_set (set, node->expr))
1283 set_remove (set, node->expr);
1284 node = next;
1288 DEF_VEC_P (basic_block);
1289 DEF_VEC_ALLOC_P (basic_block, heap);
1290 static sbitmap has_abnormal_preds;
1292 /* Compute the ANTIC set for BLOCK.
1294 If succs(BLOCK) > 1 then
1295 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1296 else if succs(BLOCK) == 1 then
1297 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
1299 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
1301 XXX: It would be nice to either write a set_clear, and use it for
1302 ANTIC_OUT, or to mark the antic_out set as deleted at the end
1303 of this routine, so that the pool can hand the same memory back out
1304 again for the next ANTIC_OUT. */
1306 static bool
1307 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
1309 basic_block son;
1310 bool changed = false;
1311 value_set_t S, old, ANTIC_OUT;
1312 value_set_node_t node;
1314 ANTIC_OUT = S = NULL;
1316 /* If any edges from predecessors are abnormal, antic_in is empty,
1317 so do nothing. */
1318 if (block_has_abnormal_pred_edge)
1319 goto maybe_dump_sets;
1321 old = set_new (false);
1322 set_copy (old, ANTIC_IN (block));
1323 ANTIC_OUT = set_new (true);
1325 /* If the block has no successors, ANTIC_OUT is empty. */
1326 if (EDGE_COUNT (block->succs) == 0)
1328 /* If we have one successor, we could have some phi nodes to
1329 translate through. */
1330 else if (single_succ_p (block))
1332 phi_translate_set (ANTIC_OUT, ANTIC_IN (single_succ (block)),
1333 block, single_succ (block));
1335 /* If we have multiple successors, we take the intersection of all of
1336 them. */
1337 else
1339 VEC(basic_block, heap) * worklist;
1340 edge e;
1341 size_t i;
1342 basic_block bprime, first;
1343 edge_iterator ei;
1345 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
1346 FOR_EACH_EDGE (e, ei, block->succs)
1347 VEC_quick_push (basic_block, worklist, e->dest);
1348 first = VEC_index (basic_block, worklist, 0);
1349 set_copy (ANTIC_OUT, ANTIC_IN (first));
1351 for (i = 1; VEC_iterate (basic_block, worklist, i, bprime); i++)
1353 node = ANTIC_OUT->head;
1354 while (node)
1356 tree val;
1357 value_set_node_t next = node->next;
1358 val = get_value_handle (node->expr);
1359 if (!set_contains_value (ANTIC_IN (bprime), val))
1360 set_remove (ANTIC_OUT, node->expr);
1361 node = next;
1364 VEC_free (basic_block, heap, worklist);
1367 /* Generate ANTIC_OUT - TMP_GEN. */
1368 S = bitmap_set_subtract_from_value_set (ANTIC_OUT, TMP_GEN (block), false);
1370 /* Start ANTIC_IN with EXP_GEN - TMP_GEN */
1371 ANTIC_IN (block) = bitmap_set_subtract_from_value_set (EXP_GEN (block),
1372 TMP_GEN (block),
1373 true);
1375 /* Then union in the ANTIC_OUT - TMP_GEN values,
1376 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
1377 for (node = S->head; node; node = node->next)
1378 value_insert_into_set (ANTIC_IN (block), node->expr);
1380 clean (ANTIC_IN (block));
1381 if (!set_equal (old, ANTIC_IN (block)))
1382 changed = true;
1384 maybe_dump_sets:
1385 if (dump_file && (dump_flags & TDF_DETAILS))
1387 if (ANTIC_OUT)
1388 print_value_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
1389 print_value_set (dump_file, ANTIC_IN (block), "ANTIC_IN", block->index);
1390 if (S)
1391 print_value_set (dump_file, S, "S", block->index);
1394 for (son = first_dom_son (CDI_POST_DOMINATORS, block);
1395 son;
1396 son = next_dom_son (CDI_POST_DOMINATORS, son))
1398 changed |= compute_antic_aux (son,
1399 TEST_BIT (has_abnormal_preds, son->index));
1401 return changed;
1404 /* Compute ANTIC sets. */
1406 static void
1407 compute_antic (void)
1409 bool changed = true;
1410 int num_iterations = 0;
1411 basic_block block;
1413 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
1414 We pre-build the map of blocks with incoming abnormal edges here. */
1415 has_abnormal_preds = sbitmap_alloc (last_basic_block);
1416 sbitmap_zero (has_abnormal_preds);
1417 FOR_EACH_BB (block)
1419 edge_iterator ei;
1420 edge e;
1422 FOR_EACH_EDGE (e, ei, block->preds)
1423 if (e->flags & EDGE_ABNORMAL)
1425 SET_BIT (has_abnormal_preds, block->index);
1426 break;
1429 /* While we are here, give empty ANTIC_IN sets to each block. */
1430 ANTIC_IN (block) = set_new (true);
1432 /* At the exit block we anticipate nothing. */
1433 ANTIC_IN (EXIT_BLOCK_PTR) = set_new (true);
1435 while (changed)
1437 num_iterations++;
1438 changed = false;
1439 changed = compute_antic_aux (EXIT_BLOCK_PTR, false);
1442 sbitmap_free (has_abnormal_preds);
1444 if (dump_file && (dump_flags & TDF_STATS))
1445 fprintf (dump_file, "compute_antic required %d iterations\n", num_iterations);
1448 static VEC(tree,heap) *inserted_exprs;
1449 /* Find a leader for an expression, or generate one using
1450 create_expression_by_pieces if it's ANTIC but
1451 complex.
1452 BLOCK is the basic_block we are looking for leaders in.
1453 EXPR is the expression to find a leader or generate for.
1454 STMTS is the statement list to put the inserted expressions on.
1455 Returns the SSA_NAME of the LHS of the generated expression or the
1456 leader. */
1458 static tree
1459 find_or_generate_expression (basic_block block, tree expr, tree stmts)
1461 tree genop = bitmap_find_leader (AVAIL_OUT (block), expr);
1463 /* If it's still NULL, it must be a complex expression, so generate
1464 it recursively. */
1465 if (genop == NULL)
1467 genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
1468 gcc_assert (UNARY_CLASS_P (genop)
1469 || BINARY_CLASS_P (genop)
1470 || COMPARISON_CLASS_P (genop)
1471 || REFERENCE_CLASS_P (genop)
1472 || TREE_CODE (genop) == CALL_EXPR);
1473 genop = create_expression_by_pieces (block, genop, stmts);
1475 return genop;
1478 #define NECESSARY(stmt) stmt->common.asm_written_flag
1479 /* Create an expression in pieces, so that we can handle very complex
1480 expressions that may be ANTIC, but not necessary GIMPLE.
1481 BLOCK is the basic block the expression will be inserted into,
1482 EXPR is the expression to insert (in value form)
1483 STMTS is a statement list to append the necessary insertions into.
1485 This function will die if we hit some value that shouldn't be
1486 ANTIC but is (IE there is no leader for it, or its components).
1487 This function may also generate expressions that are themselves
1488 partially or fully redundant. Those that are will be either made
1489 fully redundant during the next iteration of insert (for partially
1490 redundant ones), or eliminated by eliminate (for fully redundant
1491 ones). */
1493 static tree
1494 create_expression_by_pieces (basic_block block, tree expr, tree stmts)
1496 tree temp, name;
1497 tree folded, forced_stmts, newexpr;
1498 tree v;
1499 tree_stmt_iterator tsi;
1501 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1503 case tcc_expression:
1505 tree op0, op2;
1506 tree arglist;
1507 tree genop0, genop2;
1508 tree genarglist;
1509 tree walker, genwalker;
1511 gcc_assert (TREE_CODE (expr) == CALL_EXPR);
1512 genop2 = NULL;
1514 op0 = TREE_OPERAND (expr, 0);
1515 arglist = TREE_OPERAND (expr, 1);
1516 op2 = TREE_OPERAND (expr, 2);
1518 genop0 = find_or_generate_expression (block, op0, stmts);
1519 genarglist = copy_list (arglist);
1520 for (walker = arglist, genwalker = genarglist;
1521 genwalker && walker;
1522 genwalker = TREE_CHAIN (genwalker), walker = TREE_CHAIN (walker))
1524 TREE_VALUE (genwalker) = find_or_generate_expression (block,
1525 TREE_VALUE (walker),
1526 stmts);
1529 if (op2)
1530 genop2 = find_or_generate_expression (block, op2, stmts);
1531 folded = fold_build3 (TREE_CODE (expr), TREE_TYPE (expr),
1532 genop0, genarglist, genop2);
1533 break;
1537 break;
1539 case tcc_binary:
1540 case tcc_comparison:
1542 tree op1 = TREE_OPERAND (expr, 0);
1543 tree op2 = TREE_OPERAND (expr, 1);
1544 tree genop1 = find_or_generate_expression (block, op1, stmts);
1545 tree genop2 = find_or_generate_expression (block, op2, stmts);
1546 folded = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr),
1547 genop1, genop2);
1548 break;
1551 case tcc_unary:
1553 tree op1 = TREE_OPERAND (expr, 0);
1554 tree genop1 = find_or_generate_expression (block, op1, stmts);
1555 folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
1556 genop1);
1557 break;
1560 default:
1561 gcc_unreachable ();
1564 /* Force the generated expression to be a sequence of GIMPLE
1565 statements.
1566 We have to call unshare_expr because force_gimple_operand may
1567 modify the tree we pass to it. */
1568 newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts,
1569 false, NULL);
1571 /* If we have any intermediate expressions to the value sets, add them
1572 to the value sets and chain them on in the instruction stream. */
1573 if (forced_stmts)
1575 tsi = tsi_start (forced_stmts);
1576 for (; !tsi_end_p (tsi); tsi_next (&tsi))
1578 tree stmt = tsi_stmt (tsi);
1579 tree forcedname = TREE_OPERAND (stmt, 0);
1580 tree forcedexpr = TREE_OPERAND (stmt, 1);
1581 tree val = vn_lookup_or_add (forcedexpr, NULL);
1583 VEC_safe_push (tree, heap, inserted_exprs, stmt);
1584 vn_add (forcedname, val, NULL);
1585 bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
1586 bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
1588 tsi = tsi_last (stmts);
1589 tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
1592 /* Build and insert the assignment of the end result to the temporary
1593 that we will return. */
1594 temp = create_tmp_var (TREE_TYPE (expr), "pretmp");
1595 add_referenced_tmp_var (temp);
1596 if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
1597 DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
1598 newexpr = build (MODIFY_EXPR, TREE_TYPE (expr), temp, newexpr);
1599 name = make_ssa_name (temp, newexpr);
1600 TREE_OPERAND (newexpr, 0) = name;
1601 NECESSARY (newexpr) = 0;
1602 tsi = tsi_last (stmts);
1603 tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
1604 VEC_safe_push (tree, heap, inserted_exprs, newexpr);
1606 /* Add a value handle to the temporary.
1607 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
1608 we are creating the expression by pieces, and this particular piece of
1609 the expression may have been represented. There is no harm in replacing
1610 here. */
1611 v = get_value_handle (expr);
1612 vn_add (name, v, NULL);
1613 bitmap_value_replace_in_set (NEW_SETS (block), name);
1614 bitmap_value_replace_in_set (AVAIL_OUT (block), name);
1616 pre_stats.insertions++;
1617 if (dump_file && (dump_flags & TDF_DETAILS))
1619 fprintf (dump_file, "Inserted ");
1620 print_generic_expr (dump_file, newexpr, 0);
1621 fprintf (dump_file, " in predecessor %d\n", block->index);
1624 return name;
1627 /* Insert the to-be-made-available values of NODE for each predecessor, stored
1628 in AVAIL, into the predecessors of BLOCK, and merge the result with a phi
1629 node, given the same value handle as NODE. The prefix of the phi node is
1630 given with TMPNAME. Return true if we have inserted new stuff. */
1632 static bool
1633 insert_into_preds_of_block (basic_block block, value_set_node_t node,
1634 tree *avail, const char *tmpname)
1636 tree val = get_value_handle (node->expr);
1637 edge pred;
1638 bool insertions = false;
1639 bool nophi = false;
1640 basic_block bprime;
1641 tree eprime;
1642 edge_iterator ei;
1643 tree type = TREE_TYPE (avail[EDGE_PRED (block, 0)->src->index]);
1644 tree temp;
1646 if (dump_file && (dump_flags & TDF_DETAILS))
1648 fprintf (dump_file, "Found partial redundancy for expression ");
1649 print_generic_expr (dump_file, node->expr, 0);
1650 fprintf (dump_file, "\n");
1653 /* Make sure we aren't creating an induction variable. */
1654 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
1656 bool firstinsideloop = false;
1657 bool secondinsideloop = false;
1658 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
1659 EDGE_PRED (block, 0)->src);
1660 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
1661 EDGE_PRED (block, 1)->src);
1662 /* Induction variables only have one edge inside the loop. */
1663 if (firstinsideloop ^ secondinsideloop)
1665 if (dump_file && (dump_flags & TDF_DETAILS))
1666 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
1667 nophi = true;
1672 /* Make the necessary insertions. */
1673 FOR_EACH_EDGE (pred, ei, block->preds)
1675 tree stmts = alloc_stmt_list ();
1676 tree builtexpr;
1677 bprime = pred->src;
1678 eprime = avail[bprime->index];
1679 if (BINARY_CLASS_P (eprime)
1680 || COMPARISON_CLASS_P (eprime)
1681 || UNARY_CLASS_P (eprime)
1682 || TREE_CODE (eprime) == CALL_EXPR)
1684 builtexpr = create_expression_by_pieces (bprime,
1685 eprime,
1686 stmts);
1687 bsi_insert_on_edge (pred, stmts);
1688 avail[bprime->index] = builtexpr;
1689 insertions = true;
1692 /* If we didn't want a phi node, and we made insertions, we still have
1693 inserted new stuff, and thus return true. If we didn't want a phi node,
1694 and didn't make insertions, we haven't added anything new, so return
1695 false. */
1696 if (nophi && insertions)
1697 return true;
1698 else if (nophi && !insertions)
1699 return false;
1701 /* Now build a phi for the new variable. */
1702 temp = create_tmp_var (type, tmpname);
1703 add_referenced_tmp_var (temp);
1704 if (TREE_CODE (type) == COMPLEX_TYPE)
1705 DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
1706 temp = create_phi_node (temp, block);
1707 NECESSARY (temp) = 0;
1708 VEC_safe_push (tree, heap, inserted_exprs, temp);
1709 FOR_EACH_EDGE (pred, ei, block->preds)
1710 add_phi_arg (temp, avail[pred->src->index], pred);
1712 vn_add (PHI_RESULT (temp), val, NULL);
1714 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
1715 this insertion, since we test for the existence of this value in PHI_GEN
1716 before proceeding with the partial redundancy checks in insert_aux.
1718 The value may exist in AVAIL_OUT, in particular, it could be represented
1719 by the expression we are trying to eliminate, in which case we want the
1720 replacement to occur. If it's not existing in AVAIL_OUT, we want it
1721 inserted there.
1723 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
1724 this block, because if it did, it would have existed in our dominator's
1725 AVAIL_OUT, and would have been skipped due to the full redundancy check.
1728 bitmap_insert_into_set (PHI_GEN (block),
1729 PHI_RESULT (temp));
1730 bitmap_value_replace_in_set (AVAIL_OUT (block),
1731 PHI_RESULT (temp));
1732 bitmap_insert_into_set (NEW_SETS (block),
1733 PHI_RESULT (temp));
1735 if (dump_file && (dump_flags & TDF_DETAILS))
1737 fprintf (dump_file, "Created phi ");
1738 print_generic_expr (dump_file, temp, 0);
1739 fprintf (dump_file, " in block %d\n", block->index);
1741 pre_stats.phis++;
1742 return true;
1747 /* Perform insertion of partially redundant values.
1748 For BLOCK, do the following:
1749 1. Propagate the NEW_SETS of the dominator into the current block.
1750 If the block has multiple predecessors,
1751 2a. Iterate over the ANTIC expressions for the block to see if
1752 any of them are partially redundant.
1753 2b. If so, insert them into the necessary predecessors to make
1754 the expression fully redundant.
1755 2c. Insert a new PHI merging the values of the predecessors.
1756 2d. Insert the new PHI, and the new expressions, into the
1757 NEW_SETS set.
1758 3. Recursively call ourselves on the dominator children of BLOCK.
1762 static bool
1763 insert_aux (basic_block block)
1765 basic_block son;
1766 bool new_stuff = false;
1768 if (block)
1770 basic_block dom;
1771 dom = get_immediate_dominator (CDI_DOMINATORS, block);
1772 if (dom)
1774 unsigned i;
1775 bitmap_iterator bi;
1776 bitmap_set_t newset = NEW_SETS (dom);
1777 if (newset)
1779 /* Note that we need to value_replace both NEW_SETS, and
1780 AVAIL_OUT. For both the case of NEW_SETS, the value may be
1781 represented by some non-simple expression here that we want
1782 to replace it with. */
1783 EXECUTE_IF_SET_IN_BITMAP (newset->expressions, 0, i, bi)
1785 bitmap_value_replace_in_set (NEW_SETS (block), ssa_name (i));
1786 bitmap_value_replace_in_set (AVAIL_OUT (block), ssa_name (i));
1789 if (!single_pred_p (block))
1791 value_set_node_t node;
1792 for (node = ANTIC_IN (block)->head;
1793 node;
1794 node = node->next)
1796 if (BINARY_CLASS_P (node->expr)
1797 || COMPARISON_CLASS_P (node->expr)
1798 || UNARY_CLASS_P (node->expr)
1799 || TREE_CODE (node->expr) == CALL_EXPR)
1801 tree *avail;
1802 tree val;
1803 bool by_some = false;
1804 bool cant_insert = false;
1805 bool all_same = true;
1806 tree first_s = NULL;
1807 edge pred;
1808 basic_block bprime;
1809 tree eprime = NULL_TREE;
1810 edge_iterator ei;
1812 val = get_value_handle (node->expr);
1813 if (bitmap_set_contains_value (PHI_GEN (block), val))
1814 continue;
1815 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
1817 if (dump_file && (dump_flags & TDF_DETAILS))
1818 fprintf (dump_file, "Found fully redundant value\n");
1819 continue;
1822 avail = xcalloc (last_basic_block, sizeof (tree));
1823 FOR_EACH_EDGE (pred, ei, block->preds)
1825 tree vprime;
1826 tree edoubleprime;
1828 /* This can happen in the very weird case
1829 that our fake infinite loop edges have caused a
1830 critical edge to appear. */
1831 if (EDGE_CRITICAL_P (pred))
1833 cant_insert = true;
1834 break;
1836 bprime = pred->src;
1837 eprime = phi_translate (node->expr,
1838 ANTIC_IN (block),
1839 bprime, block);
1841 /* eprime will generally only be NULL if the
1842 value of the expression, translated
1843 through the PHI for this predecessor, is
1844 undefined. If that is the case, we can't
1845 make the expression fully redundant,
1846 because its value is undefined along a
1847 predecessor path. We can thus break out
1848 early because it doesn't matter what the
1849 rest of the results are. */
1850 if (eprime == NULL)
1852 cant_insert = true;
1853 break;
1856 eprime = fully_constant_expression (eprime);
1857 vprime = get_value_handle (eprime);
1858 gcc_assert (vprime);
1859 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
1860 vprime);
1861 if (edoubleprime == NULL)
1863 avail[bprime->index] = eprime;
1864 all_same = false;
1866 else
1868 avail[bprime->index] = edoubleprime;
1869 by_some = true;
1870 if (first_s == NULL)
1871 first_s = edoubleprime;
1872 else if (!operand_equal_p (first_s, edoubleprime,
1874 all_same = false;
1877 /* If we can insert it, it's not the same value
1878 already existing along every predecessor, and
1879 it's defined by some predecessor, it is
1880 partially redundant. */
1881 if (!cant_insert && !all_same && by_some)
1883 if (insert_into_preds_of_block (block, node, avail,
1884 "prephitmp"))
1885 new_stuff = true;
1887 /* If all edges produce the same value and that value is
1888 an invariant, then the PHI has the same value on all
1889 edges. Note this. */
1890 else if (!cant_insert && all_same && eprime
1891 && is_gimple_min_invariant (eprime)
1892 && !is_gimple_min_invariant (val))
1894 value_set_t exprset = VALUE_HANDLE_EXPR_SET (val);
1895 value_set_node_t node;
1896 for (node = exprset->head; node; node = node->next)
1898 if (TREE_CODE (node->expr) == SSA_NAME)
1900 vn_add (node->expr, eprime, NULL);
1901 pre_stats.constified++;
1905 free (avail);
1911 for (son = first_dom_son (CDI_DOMINATORS, block);
1912 son;
1913 son = next_dom_son (CDI_DOMINATORS, son))
1915 new_stuff |= insert_aux (son);
1918 return new_stuff;
1921 /* Perform insertion of partially redundant values. */
1923 static void
1924 insert (void)
1926 bool new_stuff = true;
1927 basic_block bb;
1928 int num_iterations = 0;
1930 FOR_ALL_BB (bb)
1931 NEW_SETS (bb) = bitmap_set_new ();
1933 while (new_stuff)
1935 num_iterations++;
1936 new_stuff = false;
1937 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
1939 if (num_iterations > 2 && dump_file && (dump_flags & TDF_STATS))
1940 fprintf (dump_file, "insert required %d iterations\n", num_iterations);
1944 /* Return true if VAR is an SSA variable with no defining statement in
1945 this procedure, *AND* isn't a live-on-entry parameter. */
1947 static bool
1948 is_undefined_value (tree expr)
1950 return (TREE_CODE (expr) == SSA_NAME
1951 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (expr))
1952 /* PARM_DECLs and hard registers are always defined. */
1953 && TREE_CODE (SSA_NAME_VAR (expr)) != PARM_DECL);
1957 /* Given an SSA variable VAR and an expression EXPR, compute the value
1958 number for EXPR and create a value handle (VAL) for it. If VAR and
1959 EXPR are not the same, associate VAL with VAR. Finally, add VAR to
1960 S1 and its value handle to S2.
1962 VUSES represent the virtual use operands associated with EXPR (if
1963 any). They are used when computing the hash value for EXPR. */
1965 static inline void
1966 add_to_sets (tree var, tree expr, tree stmt, bitmap_set_t s1,
1967 bitmap_set_t s2)
1969 tree val = vn_lookup_or_add (expr, stmt);
1971 /* VAR and EXPR may be the same when processing statements for which
1972 we are not computing value numbers (e.g., non-assignments, or
1973 statements that make aliased stores). In those cases, we are
1974 only interested in making VAR available as its own value. */
1975 if (var != expr)
1976 vn_add (var, val, NULL_TREE);
1978 if (s1)
1979 bitmap_insert_into_set (s1, var);
1980 bitmap_value_insert_into_set (s2, var);
1984 /* Given a unary or binary expression EXPR, create and return a new
1985 expression with the same structure as EXPR but with its operands
1986 replaced with the value handles of each of the operands of EXPR.
1988 VUSES represent the virtual use operands associated with EXPR (if
1989 any). They are used when computing the hash value for EXPR.
1990 Insert EXPR's operands into the EXP_GEN set for BLOCK. */
1992 static inline tree
1993 create_value_expr_from (tree expr, basic_block block, tree stmt)
1995 int i;
1996 enum tree_code code = TREE_CODE (expr);
1997 tree vexpr;
1998 alloc_pool pool;
2000 gcc_assert (TREE_CODE_CLASS (code) == tcc_unary
2001 || TREE_CODE_CLASS (code) == tcc_binary
2002 || TREE_CODE_CLASS (code) == tcc_comparison
2003 || TREE_CODE_CLASS (code) == tcc_reference
2004 || TREE_CODE_CLASS (code) == tcc_expression
2005 || TREE_CODE_CLASS (code) == tcc_exceptional);
2007 if (TREE_CODE_CLASS (code) == tcc_unary)
2008 pool = unary_node_pool;
2009 else if (TREE_CODE_CLASS (code) == tcc_reference)
2010 pool = reference_node_pool;
2011 else if (TREE_CODE_CLASS (code) == tcc_binary)
2012 pool = binary_node_pool;
2013 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2014 pool = comparison_node_pool;
2015 else if (TREE_CODE_CLASS (code) == tcc_exceptional)
2017 gcc_assert (code == TREE_LIST);
2018 pool = list_node_pool;
2020 else
2022 gcc_assert (code == CALL_EXPR);
2023 pool = expression_node_pool;
2026 vexpr = pool_alloc (pool);
2027 memcpy (vexpr, expr, tree_size (expr));
2029 /* This case is only for TREE_LIST's that appear as part of
2030 CALL_EXPR's. Anything else is a bug, but we can't easily verify
2031 this, hence this comment. TREE_LIST is not handled by the
2032 general case below is because they don't have a fixed length, or
2033 operands, so you can't access purpose/value/chain through
2034 TREE_OPERAND macros. */
2036 if (code == TREE_LIST)
2038 tree op = NULL_TREE;
2039 tree temp = NULL_TREE;
2040 if (TREE_CHAIN (vexpr))
2041 temp = create_value_expr_from (TREE_CHAIN (vexpr), block, stmt);
2042 TREE_CHAIN (vexpr) = temp ? temp : TREE_CHAIN (vexpr);
2045 /* Recursively value-numberize reference ops. */
2046 if (REFERENCE_CLASS_P (TREE_VALUE (vexpr)))
2048 tree tempop;
2049 op = TREE_VALUE (vexpr);
2050 tempop = create_value_expr_from (op, block, stmt);
2051 op = tempop ? tempop : op;
2053 TREE_VALUE (vexpr) = vn_lookup_or_add (op, stmt);
2055 else
2057 op = TREE_VALUE (vexpr);
2058 TREE_VALUE (vexpr) = vn_lookup_or_add (TREE_VALUE (vexpr), NULL);
2060 /* This is the equivalent of inserting op into EXP_GEN like we
2061 do below */
2062 if (!is_undefined_value (op))
2063 value_insert_into_set (EXP_GEN (block), op);
2065 return vexpr;
2068 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
2070 tree val, op;
2072 op = TREE_OPERAND (expr, i);
2073 if (op == NULL_TREE)
2074 continue;
2076 /* If OP is a constant that has overflowed, do not value number
2077 this expression. */
2078 if (CONSTANT_CLASS_P (op)
2079 && TREE_OVERFLOW (op))
2081 pool_free (pool, vexpr);
2082 return NULL;
2085 /* Recursively value-numberize reference ops and tree lists. */
2086 if (REFERENCE_CLASS_P (op))
2088 tree tempop = create_value_expr_from (op, block, stmt);
2089 op = tempop ? tempop : op;
2090 val = vn_lookup_or_add (op, stmt);
2092 else if (TREE_CODE (op) == TREE_LIST)
2094 tree tempop;
2096 gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2097 tempop = create_value_expr_from (op, block, stmt);
2099 op = tempop ? tempop : op;
2100 vn_lookup_or_add (op, NULL);
2101 /* Unlike everywhere else, we do *not* want to replace the
2102 TREE_LIST itself with a value number, because support
2103 functions we call will blow up. */
2104 val = op;
2106 else
2107 /* Create a value handle for OP and add it to VEXPR. */
2108 val = vn_lookup_or_add (op, NULL);
2110 if (!is_undefined_value (op) && TREE_CODE (op) != TREE_LIST)
2111 value_insert_into_set (EXP_GEN (block), op);
2113 if (TREE_CODE (val) == VALUE_HANDLE)
2114 TREE_TYPE (val) = TREE_TYPE (TREE_OPERAND (vexpr, i));
2116 TREE_OPERAND (vexpr, i) = val;
2119 return vexpr;
2123 /* Return true if we can value number a call. This is true if we have
2124 a pure or constant call. */
2125 static bool
2126 can_value_number_call (tree stmt)
2128 tree call = get_call_expr_in (stmt);
2130 /* This is a temporary restriction until we translate vuses through
2131 phi nodes. This is only needed for PRE, of course. */
2132 if (!in_fre && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
2133 return false;
2134 if (call_expr_flags (call) & (ECF_PURE | ECF_CONST))
2135 return true;
2136 return false;
2139 /* Given a statement STMT and its right hand side which is a load, try
2140 to look for the expression stored in the location for the load, and
2141 return true if a useful equivalence was recorded for LHS. */
2143 static bool
2144 try_look_through_load (tree lhs, tree mem_ref, tree stmt, basic_block block)
2146 tree store_stmt = NULL;
2147 tree rhs;
2148 ssa_op_iter i;
2149 tree vuse;
2151 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
2153 tree def_stmt;
2155 gcc_assert (TREE_CODE (vuse) == SSA_NAME);
2156 def_stmt = SSA_NAME_DEF_STMT (vuse);
2158 /* If there is no useful statement for this VUSE, we'll not find a
2159 useful expression to return either. Likewise, if there is a
2160 statement but it is not a simple assignment or it has virtual
2161 uses, we can stop right here. Note that this means we do
2162 not look through PHI nodes, which is intentional. */
2163 if (!def_stmt
2164 || TREE_CODE (def_stmt) != MODIFY_EXPR
2165 || !ZERO_SSA_OPERANDS (def_stmt, SSA_OP_VIRTUAL_USES))
2166 return false;
2168 /* If this is not the same statement as one we have looked at for
2169 another VUSE of STMT already, we have two statements producing
2170 something that reaches our STMT. */
2171 if (store_stmt && store_stmt != def_stmt)
2172 return false;
2173 else
2175 /* Is this a store to the exact same location as the one we are
2176 loading from in STMT? */
2177 if (!operand_equal_p (TREE_OPERAND (def_stmt, 0), mem_ref, 0))
2178 return false;
2180 /* Otherwise remember this statement and see if all other VUSEs
2181 come from the same statement. */
2182 store_stmt = def_stmt;
2186 /* Alright then, we have visited all VUSEs of STMT and we've determined
2187 that all of them come from the same statement STORE_STMT. See if there
2188 is a useful expression we can deduce from STORE_STMT. */
2189 rhs = TREE_OPERAND (store_stmt, 1);
2190 if ((TREE_CODE (rhs) == SSA_NAME
2191 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2192 || is_gimple_min_invariant (rhs)
2193 || TREE_CODE (rhs) == ADDR_EXPR
2194 || TREE_INVARIANT (rhs))
2197 /* Yay! Compute a value number for the RHS of the statement and
2198 add its value to the AVAIL_OUT set for the block. Add the LHS
2199 to TMP_GEN. */
2200 add_to_sets (lhs, rhs, store_stmt, TMP_GEN (block), AVAIL_OUT (block));
2201 if (TREE_CODE (rhs) == SSA_NAME
2202 && !is_undefined_value (rhs))
2203 value_insert_into_set (EXP_GEN (block), rhs);
2204 return true;
2207 return false;
2210 /* Compute the AVAIL set for all basic blocks.
2212 This function performs value numbering of the statements in each basic
2213 block. The AVAIL sets are built from information we glean while doing
2214 this value numbering, since the AVAIL sets contain only one entry per
2215 value.
2217 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
2218 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
2220 static void
2221 compute_avail (void)
2223 basic_block block, son;
2224 basic_block *worklist;
2225 size_t sp = 0;
2226 tree param;
2228 /* For arguments with default definitions, we pretend they are
2229 defined in the entry block. */
2230 for (param = DECL_ARGUMENTS (current_function_decl);
2231 param;
2232 param = TREE_CHAIN (param))
2234 if (default_def (param) != NULL)
2236 tree def = default_def (param);
2237 vn_lookup_or_add (def, NULL);
2238 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
2239 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
2243 /* Allocate the worklist. */
2244 worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
2246 /* Seed the algorithm by putting the dominator children of the entry
2247 block on the worklist. */
2248 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
2249 son;
2250 son = next_dom_son (CDI_DOMINATORS, son))
2251 worklist[sp++] = son;
2253 /* Loop until the worklist is empty. */
2254 while (sp)
2256 block_stmt_iterator bsi;
2257 tree stmt, phi;
2258 basic_block dom;
2260 /* Pick a block from the worklist. */
2261 block = worklist[--sp];
2263 /* Initially, the set of available values in BLOCK is that of
2264 its immediate dominator. */
2265 dom = get_immediate_dominator (CDI_DOMINATORS, block);
2266 if (dom)
2267 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
2269 /* Generate values for PHI nodes. */
2270 for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
2271 /* We have no need for virtual phis, as they don't represent
2272 actual computations. */
2273 if (is_gimple_reg (PHI_RESULT (phi)))
2274 add_to_sets (PHI_RESULT (phi), PHI_RESULT (phi), NULL,
2275 PHI_GEN (block), AVAIL_OUT (block));
2277 /* Now compute value numbers and populate value sets with all
2278 the expressions computed in BLOCK. */
2279 for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
2281 stmt_ann_t ann;
2282 ssa_op_iter iter;
2283 tree op;
2285 stmt = bsi_stmt (bsi);
2286 ann = stmt_ann (stmt);
2288 /* We are only interested in assignments of the form
2289 X_i = EXPR, where EXPR represents an "interesting"
2290 computation, it has no volatile operands and X_i
2291 doesn't flow through an abnormal edge. */
2292 if (TREE_CODE (stmt) == MODIFY_EXPR
2293 && !ann->has_volatile_ops
2294 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
2295 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (stmt, 0)))
2297 tree lhs = TREE_OPERAND (stmt, 0);
2298 tree rhs = TREE_OPERAND (stmt, 1);
2300 /* Try to look through loads. */
2301 if (TREE_CODE (lhs) == SSA_NAME
2302 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES)
2303 && try_look_through_load (lhs, rhs, stmt, block))
2304 continue;
2306 STRIP_USELESS_TYPE_CONVERSION (rhs);
2307 if (UNARY_CLASS_P (rhs)
2308 || BINARY_CLASS_P (rhs)
2309 || COMPARISON_CLASS_P (rhs)
2310 || REFERENCE_CLASS_P (rhs)
2311 || (TREE_CODE (rhs) == CALL_EXPR
2312 && can_value_number_call (stmt)))
2314 /* For binary, unary, and reference expressions,
2315 create a duplicate expression with the operands
2316 replaced with the value handles of the original
2317 RHS. */
2318 tree newt = create_value_expr_from (rhs, block, stmt);
2319 if (newt)
2321 add_to_sets (lhs, newt, stmt, TMP_GEN (block),
2322 AVAIL_OUT (block));
2323 value_insert_into_set (EXP_GEN (block), newt);
2324 continue;
2327 else if ((TREE_CODE (rhs) == SSA_NAME
2328 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2329 || is_gimple_min_invariant (rhs)
2330 || TREE_CODE (rhs) == ADDR_EXPR
2331 || TREE_INVARIANT (rhs)
2332 || DECL_P (rhs))
2334 /* Compute a value number for the RHS of the statement
2335 and add its value to the AVAIL_OUT set for the block.
2336 Add the LHS to TMP_GEN. */
2337 add_to_sets (lhs, rhs, stmt, TMP_GEN (block),
2338 AVAIL_OUT (block));
2340 if (TREE_CODE (rhs) == SSA_NAME
2341 && !is_undefined_value (rhs))
2342 value_insert_into_set (EXP_GEN (block), rhs);
2343 continue;
2347 /* For any other statement that we don't recognize, simply
2348 make the names generated by the statement available in
2349 AVAIL_OUT and TMP_GEN. */
2350 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
2351 add_to_sets (op, op, NULL, TMP_GEN (block), AVAIL_OUT (block));
2353 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
2354 add_to_sets (op, op, NULL, NULL , AVAIL_OUT (block));
2357 /* Put the dominator children of BLOCK on the worklist of blocks
2358 to compute available sets for. */
2359 for (son = first_dom_son (CDI_DOMINATORS, block);
2360 son;
2361 son = next_dom_son (CDI_DOMINATORS, son))
2362 worklist[sp++] = son;
2365 free (worklist);
2369 /* Eliminate fully redundant computations. */
2371 static void
2372 eliminate (void)
2374 basic_block b;
2376 FOR_EACH_BB (b)
2378 block_stmt_iterator i;
2380 for (i = bsi_start (b); !bsi_end_p (i); bsi_next (&i))
2382 tree stmt = bsi_stmt (i);
2384 /* Lookup the RHS of the expression, see if we have an
2385 available computation for it. If so, replace the RHS with
2386 the available computation. */
2387 if (TREE_CODE (stmt) == MODIFY_EXPR
2388 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
2389 && TREE_CODE (TREE_OPERAND (stmt ,1)) != SSA_NAME
2390 && !is_gimple_min_invariant (TREE_OPERAND (stmt, 1))
2391 && !stmt_ann (stmt)->has_volatile_ops)
2393 tree lhs = TREE_OPERAND (stmt, 0);
2394 tree *rhs_p = &TREE_OPERAND (stmt, 1);
2395 tree sprime;
2397 sprime = bitmap_find_leader (AVAIL_OUT (b),
2398 vn_lookup (lhs, NULL));
2399 if (sprime
2400 && sprime != lhs
2401 && (TREE_CODE (*rhs_p) != SSA_NAME
2402 || may_propagate_copy (*rhs_p, sprime)))
2404 gcc_assert (sprime != *rhs_p);
2406 if (dump_file && (dump_flags & TDF_DETAILS))
2408 fprintf (dump_file, "Replaced ");
2409 print_generic_expr (dump_file, *rhs_p, 0);
2410 fprintf (dump_file, " with ");
2411 print_generic_expr (dump_file, sprime, 0);
2412 fprintf (dump_file, " in ");
2413 print_generic_stmt (dump_file, stmt, 0);
2416 if (TREE_CODE (sprime) == SSA_NAME)
2417 NECESSARY (SSA_NAME_DEF_STMT (sprime)) = 1;
2418 /* We need to make sure the new and old types actually match,
2419 which may require adding a simple cast, which fold_convert
2420 will do for us. */
2421 if (TREE_CODE (*rhs_p) != SSA_NAME
2422 && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*rhs_p),
2423 TREE_TYPE (sprime)))
2424 sprime = fold_convert (TREE_TYPE (*rhs_p), sprime);
2426 pre_stats.eliminations++;
2427 propagate_tree_value (rhs_p, sprime);
2428 update_stmt (stmt);
2430 /* If we removed EH side effects from the statement, clean
2431 its EH information. */
2432 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
2434 bitmap_set_bit (need_eh_cleanup,
2435 bb_for_stmt (stmt)->index);
2436 if (dump_file && (dump_flags & TDF_DETAILS))
2437 fprintf (dump_file, " Removed EH side effects.\n");
2445 /* Borrow a bit of tree-ssa-dce.c for the moment.
2446 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
2447 this may be a bit faster, and we may want critical edges kept split. */
2449 /* If OP's defining statement has not already been determined to be necessary,
2450 mark that statement necessary. Return the stmt, if it is newly
2451 necessary. */
2453 static inline tree
2454 mark_operand_necessary (tree op)
2456 tree stmt;
2458 gcc_assert (op);
2460 stmt = SSA_NAME_DEF_STMT (op);
2461 gcc_assert (stmt);
2463 if (NECESSARY (stmt)
2464 || IS_EMPTY_STMT (stmt))
2465 return NULL;
2467 NECESSARY (stmt) = 1;
2468 return stmt;
2471 /* Because we don't follow exactly the standard PRE algorithm, and decide not
2472 to insert PHI nodes sometimes, and because value numbering of casts isn't
2473 perfect, we sometimes end up inserting dead code. This simple DCE-like
2474 pass removes any insertions we made that weren't actually used. */
2476 static void
2477 remove_dead_inserted_code (void)
2479 VEC(tree,heap) *worklist = NULL;
2480 int i;
2481 tree t;
2483 worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
2484 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
2486 if (NECESSARY (t))
2487 VEC_quick_push (tree, worklist, t);
2489 while (VEC_length (tree, worklist) > 0)
2491 t = VEC_pop (tree, worklist);
2492 if (TREE_CODE (t) == PHI_NODE)
2494 /* PHI nodes are somewhat special in that each PHI alternative has
2495 data and control dependencies. All the statements feeding the
2496 PHI node's arguments are always necessary. In aggressive mode,
2497 we also consider the control dependent edges leading to the
2498 predecessor block associated with each PHI alternative as
2499 necessary. */
2500 int k;
2502 VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
2503 for (k = 0; k < PHI_NUM_ARGS (t); k++)
2505 tree arg = PHI_ARG_DEF (t, k);
2506 if (TREE_CODE (arg) == SSA_NAME)
2508 arg = mark_operand_necessary (arg);
2509 if (arg)
2510 VEC_quick_push (tree, worklist, arg);
2514 else
2516 /* Propagate through the operands. Examine all the USE, VUSE and
2517 V_MAY_DEF operands in this statement. Mark all the statements
2518 which feed this statement's uses as necessary. */
2519 ssa_op_iter iter;
2520 tree use;
2522 /* The operands of V_MAY_DEF expressions are also needed as they
2523 represent potential definitions that may reach this
2524 statement (V_MAY_DEF operands allow us to follow def-def
2525 links). */
2527 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
2529 tree n = mark_operand_necessary (use);
2530 if (n)
2531 VEC_safe_push (tree, heap, worklist, n);
2535 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
2537 if (!NECESSARY (t))
2539 block_stmt_iterator bsi;
2540 if (dump_file && (dump_flags & TDF_DETAILS))
2542 fprintf (dump_file, "Removing unnecessary insertion:");
2543 print_generic_stmt (dump_file, t, 0);
2545 if (TREE_CODE (t) == PHI_NODE)
2547 remove_phi_node (t, NULL);
2549 else
2551 bsi = bsi_for_stmt (t);
2552 bsi_remove (&bsi);
2556 VEC_free (tree, heap, worklist);
2558 /* Initialize data structures used by PRE. */
2560 static void
2561 init_pre (bool do_fre)
2563 basic_block bb;
2565 in_fre = do_fre;
2567 inserted_exprs = NULL;
2568 vn_init ();
2569 if (!do_fre)
2570 current_loops = loop_optimizer_init (dump_file);
2571 connect_infinite_loops_to_exit ();
2572 memset (&pre_stats, 0, sizeof (pre_stats));
2574 /* If block 0 has more than one predecessor, it means that its PHI
2575 nodes will have arguments coming from block -1. This creates
2576 problems for several places in PRE that keep local arrays indexed
2577 by block number. To prevent this, we split the edge coming from
2578 ENTRY_BLOCK_PTR (FIXME, if ENTRY_BLOCK_PTR had an index number
2579 different than -1 we wouldn't have to hack this. tree-ssa-dce.c
2580 needs a similar change). */
2581 if (!single_pred_p (single_succ (ENTRY_BLOCK_PTR)))
2582 if (!(single_succ_edge (ENTRY_BLOCK_PTR)->flags & EDGE_ABNORMAL))
2583 split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
2585 FOR_ALL_BB (bb)
2586 bb->aux = xcalloc (1, sizeof (struct bb_value_sets));
2588 bitmap_obstack_initialize (&grand_bitmap_obstack);
2589 phi_translate_table = htab_create (511, expr_pred_trans_hash,
2590 expr_pred_trans_eq, free);
2591 value_set_pool = create_alloc_pool ("Value sets",
2592 sizeof (struct value_set), 30);
2593 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
2594 sizeof (struct bitmap_set), 30);
2595 value_set_node_pool = create_alloc_pool ("Value set nodes",
2596 sizeof (struct value_set_node), 30);
2597 calculate_dominance_info (CDI_POST_DOMINATORS);
2598 calculate_dominance_info (CDI_DOMINATORS);
2599 binary_node_pool = create_alloc_pool ("Binary tree nodes",
2600 tree_code_size (PLUS_EXPR), 30);
2601 unary_node_pool = create_alloc_pool ("Unary tree nodes",
2602 tree_code_size (NEGATE_EXPR), 30);
2603 reference_node_pool = create_alloc_pool ("Reference tree nodes",
2604 tree_code_size (ARRAY_REF), 30);
2605 expression_node_pool = create_alloc_pool ("Expression tree nodes",
2606 tree_code_size (CALL_EXPR), 30);
2607 list_node_pool = create_alloc_pool ("List tree nodes",
2608 tree_code_size (TREE_LIST), 30);
2609 comparison_node_pool = create_alloc_pool ("Comparison tree nodes",
2610 tree_code_size (EQ_EXPR), 30);
2611 FOR_ALL_BB (bb)
2613 EXP_GEN (bb) = set_new (true);
2614 PHI_GEN (bb) = bitmap_set_new ();
2615 TMP_GEN (bb) = bitmap_set_new ();
2616 AVAIL_OUT (bb) = bitmap_set_new ();
2619 need_eh_cleanup = BITMAP_ALLOC (NULL);
2623 /* Deallocate data structures used by PRE. */
2625 static void
2626 fini_pre (bool do_fre)
2628 basic_block bb;
2629 unsigned int i;
2631 VEC_free (tree, heap, inserted_exprs);
2632 bitmap_obstack_release (&grand_bitmap_obstack);
2633 free_alloc_pool (value_set_pool);
2634 free_alloc_pool (bitmap_set_pool);
2635 free_alloc_pool (value_set_node_pool);
2636 free_alloc_pool (binary_node_pool);
2637 free_alloc_pool (reference_node_pool);
2638 free_alloc_pool (unary_node_pool);
2639 free_alloc_pool (list_node_pool);
2640 free_alloc_pool (expression_node_pool);
2641 free_alloc_pool (comparison_node_pool);
2642 htab_delete (phi_translate_table);
2643 remove_fake_exit_edges ();
2645 FOR_ALL_BB (bb)
2647 free (bb->aux);
2648 bb->aux = NULL;
2651 free_dominance_info (CDI_POST_DOMINATORS);
2652 vn_delete ();
2654 if (!bitmap_empty_p (need_eh_cleanup))
2656 tree_purge_all_dead_eh_edges (need_eh_cleanup);
2657 cleanup_tree_cfg ();
2660 BITMAP_FREE (need_eh_cleanup);
2662 /* Wipe out pointers to VALUE_HANDLEs. In the not terribly distant
2663 future we will want them to be persistent though. */
2664 for (i = 0; i < num_ssa_names; i++)
2666 tree name = ssa_name (i);
2668 if (!name)
2669 continue;
2671 if (SSA_NAME_VALUE (name)
2672 && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
2673 SSA_NAME_VALUE (name) = NULL;
2675 if (!do_fre && current_loops)
2677 loop_optimizer_finalize (current_loops, dump_file);
2678 current_loops = NULL;
2683 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
2684 only wants to do full redundancy elimination. */
2686 static void
2687 execute_pre (bool do_fre)
2689 init_pre (do_fre);
2691 /* Collect and value number expressions computed in each basic block. */
2692 compute_avail ();
2694 if (dump_file && (dump_flags & TDF_DETAILS))
2696 basic_block bb;
2698 FOR_ALL_BB (bb)
2700 print_value_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
2701 bitmap_print_value_set (dump_file, TMP_GEN (bb), "tmp_gen",
2702 bb->index);
2703 bitmap_print_value_set (dump_file, AVAIL_OUT (bb), "avail_out",
2704 bb->index);
2708 /* Insert can get quite slow on an incredibly large number of basic
2709 blocks due to some quadratic behavior. Until this behavior is
2710 fixed, don't run it when he have an incredibly large number of
2711 bb's. If we aren't going to run insert, there is no point in
2712 computing ANTIC, either, even though it's plenty fast. */
2713 if (!do_fre && n_basic_blocks < 4000)
2715 compute_antic ();
2716 insert ();
2719 /* Remove all the redundant expressions. */
2720 eliminate ();
2723 if (dump_file && (dump_flags & TDF_STATS))
2725 fprintf (dump_file, "Insertions: %d\n", pre_stats.insertions);
2726 fprintf (dump_file, "New PHIs: %d\n", pre_stats.phis);
2727 fprintf (dump_file, "Eliminated: %d\n", pre_stats.eliminations);
2728 fprintf (dump_file, "Constified: %d\n", pre_stats.constified);
2731 bsi_commit_edge_inserts ();
2732 if (!do_fre)
2733 remove_dead_inserted_code ();
2734 fini_pre (do_fre);
2739 /* Gate and execute functions for PRE. */
2741 static void
2742 do_pre (void)
2744 execute_pre (false);
2747 static bool
2748 gate_pre (void)
2750 return flag_tree_pre != 0;
2753 struct tree_opt_pass pass_pre =
2755 "pre", /* name */
2756 gate_pre, /* gate */
2757 do_pre, /* execute */
2758 NULL, /* sub */
2759 NULL, /* next */
2760 0, /* static_pass_number */
2761 TV_TREE_PRE, /* tv_id */
2762 PROP_no_crit_edges | PROP_cfg
2763 | PROP_ssa | PROP_alias, /* properties_required */
2764 0, /* properties_provided */
2765 0, /* properties_destroyed */
2766 0, /* todo_flags_start */
2767 TODO_update_ssa | TODO_dump_func | TODO_ggc_collect
2768 | TODO_verify_ssa, /* todo_flags_finish */
2769 0 /* letter */
2773 /* Gate and execute functions for FRE. */
2775 static void
2776 execute_fre (void)
2778 execute_pre (true);
2781 static bool
2782 gate_fre (void)
2784 return flag_tree_fre != 0;
2787 struct tree_opt_pass pass_fre =
2789 "fre", /* name */
2790 gate_fre, /* gate */
2791 execute_fre, /* execute */
2792 NULL, /* sub */
2793 NULL, /* next */
2794 0, /* static_pass_number */
2795 TV_TREE_FRE, /* tv_id */
2796 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2797 0, /* properties_provided */
2798 0, /* properties_destroyed */
2799 0, /* todo_flags_start */
2800 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
2801 0 /* letter */
2804 /* Return true if T is a copy statement between two ssa names. */
2806 static bool
2807 is_copy_stmt (tree t)
2809 if (!t || TREE_CODE (t) != MODIFY_EXPR)
2810 return false;
2811 if (TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
2812 && TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME)
2813 return true;
2814 return false;
2817 /* Starting from START, walk copy statements till we hit a statement with a
2818 VUSE or a non-copy statement. */
2820 static tree
2821 follow_copies_till_vuse (tree start)
2823 if (is_copy_stmt (start) && ZERO_SSA_OPERANDS (start, SSA_OP_VIRTUAL_USES))
2825 tree rhs, defstmt;
2827 rhs = TREE_OPERAND (start, 1);
2828 defstmt = SSA_NAME_DEF_STMT (rhs);
2829 return follow_copies_till_vuse (defstmt);
2831 return start;
2834 /* Gate and execute functions for eliminate useless stores.
2835 The goal here is to recognize the pattern *x = ... *x, and eliminate the
2836 store because the value hasn't changed. Store copy/const prop won't
2837 do this because making *more* loads (IE propagating *x) is not a win, so it
2838 ignores them.
2839 This pass is currently geared completely towards static variable store
2840 elimination. */
2842 static void
2843 do_eustores (void)
2845 basic_block bb;
2846 /* For each basic block
2847 For each statement (STMT) in the block
2848 if STMT is a stores of the pattern *x = y
2849 follow the chain of definitions for y, until we hit a non-copy
2850 statement or a statement with a vuse.
2851 if the statement we arrive at is a vuse of the operand we killed,
2852 accessed through the same memory operation, then we have a
2853 useless store (because it is *x = ... = *x). */
2855 FOR_EACH_BB (bb)
2857 block_stmt_iterator bsi;
2859 for (bsi = bsi_start (bb);
2860 !bsi_end_p (bsi);)
2862 tree stmt = bsi_stmt (bsi);
2863 tree startat;
2864 tree kill;
2865 tree found;
2867 if (NUM_SSA_OPERANDS (stmt, SSA_OP_VMUSTDEF) != 1
2868 || TREE_CODE (stmt) != MODIFY_EXPR
2869 || TREE_CODE (TREE_OPERAND (stmt, 1)) != SSA_NAME)
2871 bsi_next (&bsi);
2872 continue;
2875 kill = MUSTDEF_KILL (MUSTDEF_OPS (stmt));
2876 startat = TREE_OPERAND (stmt, 1);
2877 startat = SSA_NAME_DEF_STMT (startat);
2878 found = follow_copies_till_vuse (startat);
2880 if (found && TREE_CODE (found) == MODIFY_EXPR)
2883 /* We want exactly one virtual use, and it should match up with
2884 the use being killed. */
2886 if (NUM_SSA_OPERANDS (found, SSA_OP_VUSE) != 1
2887 || VUSE_OP (VUSE_OPS (found)) != kill
2888 || !DECL_P (TREE_OPERAND (stmt, 0))
2889 || !operand_equal_p (TREE_OPERAND (found, 1),
2890 TREE_OPERAND (stmt, 0), 0))
2892 bsi_next (&bsi);
2893 continue;
2896 if (dump_file)
2898 fprintf (dump_file, "Eliminating useless store ");
2899 print_generic_stmt (dump_file, stmt, 0);
2901 mark_sym_for_renaming (TREE_OPERAND (stmt, 0));
2902 bsi_remove (&bsi);
2904 else
2906 bsi_next (&bsi);
2907 continue;
2913 static bool
2914 gate_eustores(void)
2916 return flag_unit_at_a_time != 0;
2919 struct tree_opt_pass pass_eliminate_useless_stores =
2921 "eustores", /* name */
2922 gate_eustores, /* gate */
2923 do_eustores, /* execute */
2924 NULL, /* sub */
2925 NULL, /* next */
2926 0, /* static_pass_number */
2927 0, /* tv_id */
2928 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2929 0, /* properties_provided */
2930 0, /* properties_destroyed */
2931 0, /* todo_flags_start */
2932 TODO_update_ssa | TODO_dump_func
2933 | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
2934 0 /* letter */