include:
[official-gcc.git] / gcc / tree-ssa-pre.c
blobbf3e5249b77bbe354940b0dad0a68a42bedcf079
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "fibheap.h"
37 #include "hashtab.h"
38 #include "tree-iterator.h"
39 #include "real.h"
40 #include "alloc-pool.h"
41 #include "tree-pass.h"
42 #include "flags.h"
43 #include "bitmap.h"
44 #include "langhooks.h"
45 #include "cfgloop.h"
47 /* TODO:
49 1. Avail sets can be shared by making an avail_find_leader that
50 walks up the dominator tree and looks in those avail sets.
51 This might affect code optimality, it's unclear right now.
52 2. Strength reduction can be performed by anticipating expressions
53 we can repair later on.
54 3. We can do back-substitution or smarter value numbering to catch
55 commutative expressions split up over multiple statements.
56 */
58 /* For ease of terminology, "expression node" in the below refers to
59 every expression node but MODIFY_EXPR, because MODIFY_EXPR's represent
60 the actual statement containing the expressions we care about, and
61 we cache the value number by putting it in the expression. */
63 /* Basic algorithm
65 First we walk the statements to generate the AVAIL sets, the
66 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
67 generation of values/expressions by a given block. We use them
68 when computing the ANTIC sets. The AVAIL sets consist of
69 SSA_NAME's that represent values, so we know what values are
70 available in what blocks. AVAIL is a forward dataflow problem. In
71 SSA, values are never killed, so we don't need a kill set, or a
72 fixpoint iteration, in order to calculate the AVAIL sets. In
73 traditional parlance, AVAIL sets tell us the downsafety of the
74 expressions/values.
76 Next, we generate the ANTIC sets. These sets represent the
77 anticipatable expressions. ANTIC is a backwards dataflow
78 problem.An expression is anticipatable in a given block if it could
79 be generated in that block. This means that if we had to perform
80 an insertion in that block, of the value of that expression, we
81 could. Calculating the ANTIC sets requires phi translation of
82 expressions, because the flow goes backwards through phis. We must
83 iterate to a fixpoint of the ANTIC sets, because we have a kill
84 set. Even in SSA form, values are not live over the entire
85 function, only from their definition point onwards. So we have to
86 remove values from the ANTIC set once we go past the definition
87 point of the leaders that make them up.
88 compute_antic/compute_antic_aux performs this computation.
90 Third, we perform insertions to make partially redundant
91 expressions fully redundant.
93 An expression is partially redundant (excluding partial
94 anticipation) if:
96 1. It is AVAIL in some, but not all, of the predecessors of a
97 given block.
98 2. It is ANTIC in all the predecessors.
100 In order to make it fully redundant, we insert the expression into
101 the predecessors where it is not available, but is ANTIC.
102 insert/insert_aux performs this insertion.
104 Fourth, we eliminate fully redundant expressions.
105 This is a simple statement walk that replaces redundant
106 calculations with the now available values. */
108 /* Representations of value numbers:
110 Value numbers are represented using the "value handle" approach.
111 This means that each SSA_NAME (and for other reasons to be
112 disclosed in a moment, expression nodes) has a value handle that
113 can be retrieved through get_value_handle. This value handle, *is*
114 the value number of the SSA_NAME. You can pointer compare the
115 value handles for equivalence purposes.
117 For debugging reasons, the value handle is internally more than
118 just a number, it is a VAR_DECL named "value.x", where x is a
119 unique number for each value number in use. This allows
120 expressions with SSA_NAMES replaced by value handles to still be
121 pretty printed in a sane way. They simply print as "value.3 *
122 value.5", etc.
124 Expression nodes have value handles associated with them as a
125 cache. Otherwise, we'd have to look them up again in the hash
126 table This makes significant difference (factor of two or more) on
127 some test cases. They can be thrown away after the pass is
128 finished. */
130 /* Representation of expressions on value numbers:
132 In some portions of this code, you will notice we allocate "fake"
133 analogues to the expression we are value numbering, and replace the
134 operands with the values of the expression. Since we work on
135 values, and not just names, we canonicalize expressions to value
136 expressions for use in the ANTIC sets, the EXP_GEN set, etc.
138 This is theoretically unnecessary, it just saves a bunch of
139 repeated get_value_handle and find_leader calls in the remainder of
140 the code, trading off temporary memory usage for speed. The tree
141 nodes aren't actually creating more garbage, since they are
142 allocated in a special pools which are thrown away at the end of
143 this pass.
145 All of this also means that if you print the EXP_GEN or ANTIC sets,
146 you will see "value.5 + value.7" in the set, instead of "a_55 +
147 b_66" or something. The only thing that actually cares about
148 seeing the value leaders is phi translation, and it needs to be
149 able to find the leader for a value in an arbitrary block, so this
150 "value expression" form is perfect for it (otherwise you'd do
151 get_value_handle->find_leader->translate->get_value_handle->find_leader).*/
154 /* Representation of sets:
156 There are currently two types of sets used, hopefully to be unified soon.
157 The AVAIL sets do not need to be sorted in any particular order,
158 and thus, are simply represented as two bitmaps, one that keeps
159 track of values present in the set, and one that keeps track of
160 expressions present in the set.
162 The other sets are represented as doubly linked lists kept in topological
163 order, with an optional supporting bitmap of values present in the
164 set. The sets represent values, and the elements can be values or
165 expressions. The elements can appear in different sets, but each
166 element can only appear once in each set.
168 Since each node in the set represents a value, we also want to be
169 able to map expression, set pairs to something that tells us
170 whether the value is present is a set. We use a per-set bitmap for
171 that. The value handles also point to a linked list of the
172 expressions they represent via a tree annotation. This is mainly
173 useful only for debugging, since we don't do identity lookups. */
176 static bool in_fre = false;
178 /* A value set element. Basically a single linked list of
179 expressions/values. */
180 typedef struct value_set_node
182 /* An expression. */
183 tree expr;
185 /* A pointer to the next element of the value set. */
186 struct value_set_node *next;
187 } *value_set_node_t;
190 /* A value set. This is a singly linked list of value_set_node
191 elements with a possible bitmap that tells us what values exist in
192 the set. This set must be kept in topologically sorted order. */
193 typedef struct value_set
195 /* The head of the list. Used for iterating over the list in
196 order. */
197 value_set_node_t head;
199 /* The tail of the list. Used for tail insertions, which are
200 necessary to keep the set in topologically sorted order because
201 of how the set is built. */
202 value_set_node_t tail;
204 /* The length of the list. */
205 size_t length;
207 /* True if the set is indexed, which means it contains a backing
208 bitmap for quick determination of whether certain values exist in the
209 set. */
210 bool indexed;
212 /* The bitmap of values that exist in the set. May be NULL in an
213 empty or non-indexed set. */
214 bitmap values;
216 } *value_set_t;
219 /* An unordered bitmap set. One bitmap tracks values, the other,
220 expressions. */
221 typedef struct bitmap_set
223 bitmap expressions;
224 bitmap values;
225 } *bitmap_set_t;
227 /* Sets that we need to keep track of. */
228 typedef struct bb_value_sets
230 /* The EXP_GEN set, which represents expressions/values generated in
231 a basic block. */
232 value_set_t exp_gen;
234 /* The PHI_GEN set, which represents PHI results generated in a
235 basic block. */
236 bitmap_set_t phi_gen;
238 /* The TMP_GEN set, which represents results/temporaries generated
239 in a basic block. IE the LHS of an expression. */
240 bitmap_set_t tmp_gen;
242 /* The AVAIL_OUT set, which represents which values are available in
243 a given basic block. */
244 bitmap_set_t avail_out;
246 /* The ANTIC_IN set, which represents which values are anticipatable
247 in a given basic block. */
248 value_set_t antic_in;
250 /* The NEW_SETS set, which is used during insertion to augment the
251 AVAIL_OUT set of blocks with the new insertions performed during
252 the current iteration. */
253 bitmap_set_t new_sets;
255 /* The RVUSE sets, which are used during ANTIC computation to ensure
256 that we don't mark loads ANTIC once they have died. */
257 bitmap rvuse_in;
258 bitmap rvuse_out;
259 bitmap rvuse_gen;
260 bitmap rvuse_kill;
261 } *bb_value_sets_t;
263 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
264 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
265 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
266 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
267 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
268 #define RVUSE_IN(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_in
269 #define RVUSE_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_gen
270 #define RVUSE_KILL(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_kill
271 #define RVUSE_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_out
272 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
274 /* This structure is used to keep track of statistics on what
275 optimization PRE was able to perform. */
276 static struct
278 /* The number of RHS computations eliminated by PRE. */
279 int eliminations;
281 /* The number of new expressions/temporaries generated by PRE. */
282 int insertions;
284 /* The number of new PHI nodes added by PRE. */
285 int phis;
287 /* The number of values found constant. */
288 int constified;
290 } pre_stats;
293 static tree bitmap_find_leader (bitmap_set_t, tree);
294 static tree find_leader (value_set_t, tree);
295 static void value_insert_into_set (value_set_t, tree);
296 static void bitmap_value_insert_into_set (bitmap_set_t, tree);
297 static void bitmap_value_replace_in_set (bitmap_set_t, tree);
298 static void insert_into_set (value_set_t, tree);
299 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
300 static bool bitmap_set_contains_value (bitmap_set_t, tree);
301 static bitmap_set_t bitmap_set_new (void);
302 static value_set_t set_new (bool);
303 static bool is_undefined_value (tree);
304 static tree create_expression_by_pieces (basic_block, tree, tree);
307 /* We can add and remove elements and entries to and from sets
308 and hash tables, so we use alloc pools for them. */
310 static alloc_pool value_set_pool;
311 static alloc_pool bitmap_set_pool;
312 static alloc_pool value_set_node_pool;
313 static alloc_pool binary_node_pool;
314 static alloc_pool unary_node_pool;
315 static alloc_pool reference_node_pool;
316 static alloc_pool comparison_node_pool;
317 static alloc_pool expression_node_pool;
318 static alloc_pool list_node_pool;
319 static alloc_pool modify_expr_node_pool;
320 static bitmap_obstack grand_bitmap_obstack;
322 /* To avoid adding 300 temporary variables when we only need one, we
323 only create one temporary variable, on demand, and build ssa names
324 off that. We do have to change the variable if the types don't
325 match the current variable's type. */
326 static tree pretemp;
327 static tree storetemp;
328 static tree mergephitemp;
329 static tree prephitemp;
331 /* Set of blocks with statements that have had its EH information
332 cleaned up. */
333 static bitmap need_eh_cleanup;
335 /* The phi_translate_table caches phi translations for a given
336 expression and predecessor. */
338 static htab_t phi_translate_table;
340 /* A three tuple {e, pred, v} used to cache phi translations in the
341 phi_translate_table. */
343 typedef struct expr_pred_trans_d
345 /* The expression. */
346 tree e;
348 /* The predecessor block along which we translated the expression. */
349 basic_block pred;
351 /* vuses associated with the expression. */
352 VEC (tree, gc) *vuses;
354 /* The value that resulted from the translation. */
355 tree v;
358 /* The hashcode for the expression, pred pair. This is cached for
359 speed reasons. */
360 hashval_t hashcode;
361 } *expr_pred_trans_t;
363 /* Return the hash value for a phi translation table entry. */
365 static hashval_t
366 expr_pred_trans_hash (const void *p)
368 const expr_pred_trans_t ve = (expr_pred_trans_t) p;
369 return ve->hashcode;
372 /* Return true if two phi translation table entries are the same.
373 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
375 static int
376 expr_pred_trans_eq (const void *p1, const void *p2)
378 const expr_pred_trans_t ve1 = (expr_pred_trans_t) p1;
379 const expr_pred_trans_t ve2 = (expr_pred_trans_t) p2;
380 basic_block b1 = ve1->pred;
381 basic_block b2 = ve2->pred;
382 int i;
383 tree vuse1;
385 /* If they are not translations for the same basic block, they can't
386 be equal. */
387 if (b1 != b2)
388 return false;
391 /* If they are for the same basic block, determine if the
392 expressions are equal. */
393 if (!expressions_equal_p (ve1->e, ve2->e))
394 return false;
396 /* Make sure the vuses are equivalent. */
397 if (ve1->vuses == ve2->vuses)
398 return true;
400 if (VEC_length (tree, ve1->vuses) != VEC_length (tree, ve2->vuses))
401 return false;
403 for (i = 0; VEC_iterate (tree, ve1->vuses, i, vuse1); i++)
405 if (VEC_index (tree, ve2->vuses, i) != vuse1)
406 return false;
409 return true;
412 /* Search in the phi translation table for the translation of
413 expression E in basic block PRED with vuses VUSES.
414 Return the translated value, if found, NULL otherwise. */
416 static inline tree
417 phi_trans_lookup (tree e, basic_block pred, VEC (tree, gc) *vuses)
419 void **slot;
420 struct expr_pred_trans_d ept;
422 ept.e = e;
423 ept.pred = pred;
424 ept.vuses = vuses;
425 ept.hashcode = vn_compute (e, (unsigned long) pred);
426 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
427 NO_INSERT);
428 if (!slot)
429 return NULL;
430 else
431 return ((expr_pred_trans_t) *slot)->v;
435 /* Add the tuple mapping from {expression E, basic block PRED, vuses VUSES} to
436 value V, to the phi translation table. */
438 static inline void
439 phi_trans_add (tree e, tree v, basic_block pred, VEC (tree, gc) *vuses)
441 void **slot;
442 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
443 new_pair->e = e;
444 new_pair->pred = pred;
445 new_pair->vuses = vuses;
446 new_pair->v = v;
447 new_pair->hashcode = vn_compute (e, (unsigned long) pred);
448 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
449 new_pair->hashcode, INSERT);
450 if (*slot)
451 free (*slot);
452 *slot = (void *) new_pair;
456 /* Add expression E to the expression set of value V. */
458 void
459 add_to_value (tree v, tree e)
461 /* Constants have no expression sets. */
462 if (is_gimple_min_invariant (v))
463 return;
465 if (VALUE_HANDLE_EXPR_SET (v) == NULL)
466 VALUE_HANDLE_EXPR_SET (v) = set_new (false);
468 insert_into_set (VALUE_HANDLE_EXPR_SET (v), e);
472 /* Return true if value V exists in the bitmap for SET. */
474 static inline bool
475 value_exists_in_set_bitmap (value_set_t set, tree v)
477 if (!set->values)
478 return false;
480 return bitmap_bit_p (set->values, VALUE_HANDLE_ID (v));
484 /* Remove value V from the bitmap for SET. */
486 static void
487 value_remove_from_set_bitmap (value_set_t set, tree v)
489 gcc_assert (set->indexed);
491 if (!set->values)
492 return;
494 bitmap_clear_bit (set->values, VALUE_HANDLE_ID (v));
498 /* Insert the value number V into the bitmap of values existing in
499 SET. */
501 static inline void
502 value_insert_into_set_bitmap (value_set_t set, tree v)
504 gcc_assert (set->indexed);
506 if (set->values == NULL)
507 set->values = BITMAP_ALLOC (&grand_bitmap_obstack);
509 bitmap_set_bit (set->values, VALUE_HANDLE_ID (v));
513 /* Create a new bitmap set and return it. */
515 static bitmap_set_t
516 bitmap_set_new (void)
518 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
519 ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
520 ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
521 return ret;
524 /* Create a new set. */
526 static value_set_t
527 set_new (bool indexed)
529 value_set_t ret;
530 ret = (value_set_t) pool_alloc (value_set_pool);
531 ret->head = ret->tail = NULL;
532 ret->length = 0;
533 ret->indexed = indexed;
534 ret->values = NULL;
535 return ret;
538 /* Insert an expression EXPR into a bitmapped set. */
540 static void
541 bitmap_insert_into_set (bitmap_set_t set, tree expr)
543 tree val;
544 /* XXX: For now, we only let SSA_NAMES into the bitmap sets. */
545 gcc_assert (TREE_CODE (expr) == SSA_NAME);
546 val = get_value_handle (expr);
548 gcc_assert (val);
549 if (!is_gimple_min_invariant (val))
551 bitmap_set_bit (set->values, VALUE_HANDLE_ID (val));
552 bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
556 /* Insert EXPR into SET. */
558 static void
559 insert_into_set (value_set_t set, tree expr)
561 value_set_node_t newnode = (value_set_node_t) pool_alloc (value_set_node_pool);
562 tree val = get_value_handle (expr);
563 gcc_assert (val);
565 if (is_gimple_min_invariant (val))
566 return;
568 /* For indexed sets, insert the value into the set value bitmap.
569 For all sets, add it to the linked list and increment the list
570 length. */
571 if (set->indexed)
572 value_insert_into_set_bitmap (set, val);
574 newnode->next = NULL;
575 newnode->expr = expr;
576 set->length ++;
577 if (set->head == NULL)
579 set->head = set->tail = newnode;
581 else
583 set->tail->next = newnode;
584 set->tail = newnode;
588 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
590 static void
591 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
593 bitmap_copy (dest->expressions, orig->expressions);
594 bitmap_copy (dest->values, orig->values);
597 /* Perform bitmapped set operation DEST &= ORIG. */
599 static void
600 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
602 bitmap_iterator bi;
603 unsigned int i;
604 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
606 bitmap_and_into (dest->values, orig->values);
607 bitmap_copy (temp, dest->expressions);
608 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
610 tree name = ssa_name (i);
611 tree val = get_value_handle (name);
612 if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
613 bitmap_clear_bit (dest->expressions, i);
618 /* Perform bitmapped value set operation DEST = DEST & ~ORIG. */
620 static void
621 bitmap_set_and_compl (bitmap_set_t dest, bitmap_set_t orig)
623 bitmap_iterator bi;
624 unsigned int i;
625 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
627 bitmap_and_compl_into (dest->values, orig->values);
628 bitmap_copy (temp, dest->expressions);
629 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
631 tree name = ssa_name (i);
632 tree val = get_value_handle (name);
633 if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
634 bitmap_clear_bit (dest->expressions, i);
638 /* Return true if the bitmap set SET is empty. */
640 static bool
641 bitmap_set_empty_p (bitmap_set_t set)
643 return bitmap_empty_p (set->values);
646 /* Copy the set ORIG to the set DEST. */
648 static void
649 set_copy (value_set_t dest, value_set_t orig)
651 value_set_node_t node;
653 if (!orig || !orig->head)
654 return;
656 for (node = orig->head;
657 node;
658 node = node->next)
660 insert_into_set (dest, node->expr);
664 /* Remove EXPR from SET. */
666 static void
667 set_remove (value_set_t set, tree expr)
669 value_set_node_t node, prev;
671 /* Remove the value of EXPR from the bitmap, decrement the set
672 length, and remove it from the actual double linked list. */
673 value_remove_from_set_bitmap (set, get_value_handle (expr));
674 set->length--;
675 prev = NULL;
676 for (node = set->head;
677 node != NULL;
678 prev = node, node = node->next)
680 if (node->expr == expr)
682 if (prev == NULL)
683 set->head = node->next;
684 else
685 prev->next= node->next;
687 if (node == set->tail)
688 set->tail = prev;
689 pool_free (value_set_node_pool, node);
690 return;
695 /* Return true if SET contains the value VAL. */
697 static bool
698 set_contains_value (value_set_t set, tree val)
700 /* All constants are in every set. */
701 if (is_gimple_min_invariant (val))
702 return true;
704 if (set->length == 0)
705 return false;
707 return value_exists_in_set_bitmap (set, val);
710 /* Return true if bitmapped set SET contains the expression EXPR. */
711 static bool
712 bitmap_set_contains (bitmap_set_t set, tree expr)
714 /* All constants are in every set. */
715 if (is_gimple_min_invariant (get_value_handle (expr)))
716 return true;
718 /* XXX: Bitmapped sets only contain SSA_NAME's for now. */
719 if (TREE_CODE (expr) != SSA_NAME)
720 return false;
721 return bitmap_bit_p (set->expressions, SSA_NAME_VERSION (expr));
725 /* Return true if bitmapped set SET contains the value VAL. */
727 static bool
728 bitmap_set_contains_value (bitmap_set_t set, tree val)
730 if (is_gimple_min_invariant (val))
731 return true;
732 return bitmap_bit_p (set->values, VALUE_HANDLE_ID (val));
735 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
737 static void
738 bitmap_set_replace_value (bitmap_set_t set, tree lookfor, tree expr)
740 value_set_t exprset;
741 value_set_node_t node;
742 if (is_gimple_min_invariant (lookfor))
743 return;
744 if (!bitmap_set_contains_value (set, lookfor))
745 return;
747 /* The number of expressions having a given value is usually
748 significantly less than the total number of expressions in SET.
749 Thus, rather than check, for each expression in SET, whether it
750 has the value LOOKFOR, we walk the reverse mapping that tells us
751 what expressions have a given value, and see if any of those
752 expressions are in our set. For large testcases, this is about
753 5-10x faster than walking the bitmap. If this is somehow a
754 significant lose for some cases, we can choose which set to walk
755 based on the set size. */
756 exprset = VALUE_HANDLE_EXPR_SET (lookfor);
757 for (node = exprset->head; node; node = node->next)
759 if (TREE_CODE (node->expr) == SSA_NAME)
761 if (bitmap_bit_p (set->expressions, SSA_NAME_VERSION (node->expr)))
763 bitmap_clear_bit (set->expressions, SSA_NAME_VERSION (node->expr));
764 bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
765 return;
771 /* Subtract bitmapped set B from value set A, and return the new set. */
773 static value_set_t
774 bitmap_set_subtract_from_value_set (value_set_t a, bitmap_set_t b,
775 bool indexed)
777 value_set_t ret = set_new (indexed);
778 value_set_node_t node;
779 for (node = a->head;
780 node;
781 node = node->next)
783 if (!bitmap_set_contains (b, node->expr))
784 insert_into_set (ret, node->expr);
786 return ret;
789 /* Return true if two sets are equal. */
791 static bool
792 set_equal (value_set_t a, value_set_t b)
794 value_set_node_t node;
796 if (a->length != b->length)
797 return false;
798 for (node = a->head;
799 node;
800 node = node->next)
802 if (!set_contains_value (b, get_value_handle (node->expr)))
803 return false;
805 return true;
808 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
809 and add it otherwise. */
811 static void
812 bitmap_value_replace_in_set (bitmap_set_t set, tree expr)
814 tree val = get_value_handle (expr);
815 if (bitmap_set_contains_value (set, val))
816 bitmap_set_replace_value (set, val, expr);
817 else
818 bitmap_insert_into_set (set, expr);
821 /* Insert EXPR into SET if EXPR's value is not already present in
822 SET. */
824 static void
825 bitmap_value_insert_into_set (bitmap_set_t set, tree expr)
827 tree val = get_value_handle (expr);
829 if (is_gimple_min_invariant (val))
830 return;
832 if (!bitmap_set_contains_value (set, val))
833 bitmap_insert_into_set (set, expr);
836 /* Insert the value for EXPR into SET, if it doesn't exist already. */
838 static void
839 value_insert_into_set (value_set_t set, tree expr)
841 tree val = get_value_handle (expr);
843 /* Constant and invariant values exist everywhere, and thus,
844 actually keeping them in the sets is pointless. */
845 if (is_gimple_min_invariant (val))
846 return;
848 if (!set_contains_value (set, val))
849 insert_into_set (set, expr);
853 /* Print out SET to OUTFILE. */
855 static void
856 bitmap_print_value_set (FILE *outfile, bitmap_set_t set,
857 const char *setname, int blockindex)
859 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
860 if (set)
862 bool first = true;
863 unsigned i;
864 bitmap_iterator bi;
866 EXECUTE_IF_SET_IN_BITMAP (set->expressions, 0, i, bi)
868 if (!first)
869 fprintf (outfile, ", ");
870 first = false;
871 print_generic_expr (outfile, ssa_name (i), 0);
873 fprintf (outfile, " (");
874 print_generic_expr (outfile, get_value_handle (ssa_name (i)), 0);
875 fprintf (outfile, ") ");
878 fprintf (outfile, " }\n");
880 /* Print out the value_set SET to OUTFILE. */
882 static void
883 print_value_set (FILE *outfile, value_set_t set,
884 const char *setname, int blockindex)
886 value_set_node_t node;
887 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
888 if (set)
890 for (node = set->head;
891 node;
892 node = node->next)
894 print_generic_expr (outfile, node->expr, 0);
896 fprintf (outfile, " (");
897 print_generic_expr (outfile, get_value_handle (node->expr), 0);
898 fprintf (outfile, ") ");
900 if (node->next)
901 fprintf (outfile, ", ");
905 fprintf (outfile, " }\n");
908 /* Print out the expressions that have VAL to OUTFILE. */
910 void
911 print_value_expressions (FILE *outfile, tree val)
913 if (VALUE_HANDLE_EXPR_SET (val))
915 char s[10];
916 sprintf (s, "VH.%04d", VALUE_HANDLE_ID (val));
917 print_value_set (outfile, VALUE_HANDLE_EXPR_SET (val), s, 0);
922 void
923 debug_value_expressions (tree val)
925 print_value_expressions (stderr, val);
929 void debug_value_set (value_set_t, const char *, int);
931 void
932 debug_value_set (value_set_t set, const char *setname, int blockindex)
934 print_value_set (stderr, set, setname, blockindex);
937 /* Return the folded version of T if T, when folded, is a gimple
938 min_invariant. Otherwise, return T. */
940 static tree
941 fully_constant_expression (tree t)
943 tree folded;
944 folded = fold (t);
945 if (folded && is_gimple_min_invariant (folded))
946 return folded;
947 return t;
950 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
951 For example, this can copy a list made of TREE_LIST nodes.
952 Allocates the nodes in list_node_pool*/
954 static tree
955 pool_copy_list (tree list)
957 tree head;
958 tree prev, next;
960 if (list == 0)
961 return 0;
962 head = (tree) pool_alloc (list_node_pool);
964 memcpy (head, list, tree_size (list));
965 prev = head;
967 next = TREE_CHAIN (list);
968 while (next)
970 TREE_CHAIN (prev) = (tree) pool_alloc (list_node_pool);
971 memcpy (TREE_CHAIN (prev), next, tree_size (next));
972 prev = TREE_CHAIN (prev);
973 next = TREE_CHAIN (next);
975 return head;
978 /* Translate the vuses in the VUSES vector backwards through phi
979 nodes, so that they have the value they would have in BLOCK. */
981 static VEC(tree, gc) *
982 translate_vuses_through_block (VEC (tree, gc) *vuses, basic_block block)
984 tree oldvuse;
985 VEC(tree, gc) *result = NULL;
986 int i;
988 for (i = 0; VEC_iterate (tree, vuses, i, oldvuse); i++)
990 tree phi = SSA_NAME_DEF_STMT (oldvuse);
991 if (TREE_CODE (phi) == PHI_NODE)
993 edge e = find_edge (block, bb_for_stmt (phi));
994 if (e)
996 tree def = PHI_ARG_DEF (phi, e->dest_idx);
997 if (def != oldvuse)
999 if (!result)
1000 result = VEC_copy (tree, gc, vuses);
1001 VEC_replace (tree, result, i, def);
1006 if (result)
1008 sort_vuses (result);
1009 return result;
1011 return vuses;
1014 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1015 the phis in PRED. Return NULL if we can't find a leader for each
1016 part of the translated expression. */
1018 static tree
1019 phi_translate (tree expr, value_set_t set, basic_block pred,
1020 basic_block phiblock)
1022 tree phitrans = NULL;
1023 tree oldexpr = expr;
1024 if (expr == NULL)
1025 return NULL;
1027 if (is_gimple_min_invariant (expr))
1028 return expr;
1030 /* Phi translations of a given expression don't change. */
1031 if (EXPR_P (expr))
1033 tree vh;
1035 vh = get_value_handle (expr);
1036 if (vh && TREE_CODE (vh) == VALUE_HANDLE)
1037 phitrans = phi_trans_lookup (expr, pred, VALUE_HANDLE_VUSES (vh));
1038 else
1039 phitrans = phi_trans_lookup (expr, pred, NULL);
1041 else
1042 phitrans = phi_trans_lookup (expr, pred, NULL);
1044 if (phitrans)
1045 return phitrans;
1047 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1049 case tcc_expression:
1051 if (TREE_CODE (expr) != CALL_EXPR)
1052 return NULL;
1053 else
1055 tree oldop0 = TREE_OPERAND (expr, 0);
1056 tree oldarglist = TREE_OPERAND (expr, 1);
1057 tree oldop2 = TREE_OPERAND (expr, 2);
1058 tree newop0;
1059 tree newarglist;
1060 tree newop2 = NULL;
1061 tree oldwalker;
1062 tree newwalker;
1063 tree newexpr;
1064 tree vh = get_value_handle (expr);
1065 bool listchanged = false;
1066 VEC (tree, gc) *vuses = VALUE_HANDLE_VUSES (vh);
1067 VEC (tree, gc) *tvuses;
1069 /* Call expressions are kind of weird because they have an
1070 argument list. We don't want to value number the list
1071 as one value number, because that doesn't make much
1072 sense, and just breaks the support functions we call,
1073 which expect TREE_OPERAND (call_expr, 2) to be a
1074 TREE_LIST. */
1076 newop0 = phi_translate (find_leader (set, oldop0),
1077 set, pred, phiblock);
1078 if (newop0 == NULL)
1079 return NULL;
1080 if (oldop2)
1082 newop2 = phi_translate (find_leader (set, oldop2),
1083 set, pred, phiblock);
1084 if (newop2 == NULL)
1085 return NULL;
1088 /* phi translate the argument list piece by piece.
1090 We could actually build the list piece by piece here,
1091 but it's likely to not be worth the memory we will save,
1092 unless you have millions of call arguments. */
1094 newarglist = pool_copy_list (oldarglist);
1095 for (oldwalker = oldarglist, newwalker = newarglist;
1096 oldwalker && newwalker;
1097 oldwalker = TREE_CHAIN (oldwalker),
1098 newwalker = TREE_CHAIN (newwalker))
1101 tree oldval = TREE_VALUE (oldwalker);
1102 tree newval;
1103 if (oldval)
1105 newval = phi_translate (find_leader (set, oldval),
1106 set, pred, phiblock);
1107 if (newval == NULL)
1108 return NULL;
1109 if (newval != oldval)
1111 listchanged = true;
1112 TREE_VALUE (newwalker) = get_value_handle (newval);
1116 if (listchanged)
1117 vn_lookup_or_add (newarglist, NULL);
1119 tvuses = translate_vuses_through_block (vuses, pred);
1121 if (listchanged || (newop0 != oldop0) || (oldop2 != newop2)
1122 || vuses != tvuses)
1124 newexpr = (tree) pool_alloc (expression_node_pool);
1125 memcpy (newexpr, expr, tree_size (expr));
1126 TREE_OPERAND (newexpr, 0) = newop0 == oldop0 ? oldop0 : get_value_handle (newop0);
1127 TREE_OPERAND (newexpr, 1) = listchanged ? newarglist : oldarglist;
1128 TREE_OPERAND (newexpr, 2) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
1129 create_tree_ann (newexpr);
1130 vn_lookup_or_add_with_vuses (newexpr, tvuses);
1131 expr = newexpr;
1132 phi_trans_add (oldexpr, newexpr, pred, tvuses);
1136 return expr;
1138 case tcc_declaration:
1140 VEC (tree, gc) * oldvuses = NULL;
1141 VEC (tree, gc) * newvuses = NULL;
1143 oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1144 if (oldvuses)
1145 newvuses = translate_vuses_through_block (oldvuses, pred);
1147 if (oldvuses != newvuses)
1148 vn_lookup_or_add_with_vuses (expr, newvuses);
1150 phi_trans_add (oldexpr, expr, pred, newvuses);
1152 return expr;
1154 case tcc_reference:
1156 tree oldop1 = TREE_OPERAND (expr, 0);
1157 tree newop1;
1158 tree newexpr;
1159 VEC (tree, gc) * oldvuses = NULL;
1160 VEC (tree, gc) * newvuses = NULL;
1162 if (TREE_CODE (expr) != INDIRECT_REF)
1163 return NULL;
1165 newop1 = phi_translate (find_leader (set, oldop1),
1166 set, pred, phiblock);
1167 if (newop1 == NULL)
1168 return NULL;
1170 oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1171 if (oldvuses)
1172 newvuses = translate_vuses_through_block (oldvuses, pred);
1174 if (newop1 != oldop1 || newvuses != oldvuses)
1176 tree t;
1178 newexpr = pool_alloc (reference_node_pool);
1179 memcpy (newexpr, expr, tree_size (expr));
1180 TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
1182 t = fully_constant_expression (newexpr);
1184 if (t != newexpr)
1186 pool_free (reference_node_pool, newexpr);
1187 newexpr = t;
1189 else
1191 create_tree_ann (newexpr);
1192 vn_lookup_or_add_with_vuses (newexpr, newvuses);
1194 expr = newexpr;
1195 phi_trans_add (oldexpr, newexpr, pred, newvuses);
1198 return expr;
1199 break;
1201 case tcc_binary:
1202 case tcc_comparison:
1204 tree oldop1 = TREE_OPERAND (expr, 0);
1205 tree oldop2 = TREE_OPERAND (expr, 1);
1206 tree newop1;
1207 tree newop2;
1208 tree newexpr;
1210 newop1 = phi_translate (find_leader (set, oldop1),
1211 set, pred, phiblock);
1212 if (newop1 == NULL)
1213 return NULL;
1214 newop2 = phi_translate (find_leader (set, oldop2),
1215 set, pred, phiblock);
1216 if (newop2 == NULL)
1217 return NULL;
1218 if (newop1 != oldop1 || newop2 != oldop2)
1220 tree t;
1221 newexpr = (tree) pool_alloc (binary_node_pool);
1222 memcpy (newexpr, expr, tree_size (expr));
1223 TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldop1 : get_value_handle (newop1);
1224 TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
1225 t = fully_constant_expression (newexpr);
1226 if (t != newexpr)
1228 pool_free (binary_node_pool, newexpr);
1229 newexpr = t;
1231 else
1233 create_tree_ann (newexpr);
1234 vn_lookup_or_add (newexpr, NULL);
1236 expr = newexpr;
1237 phi_trans_add (oldexpr, newexpr, pred, NULL);
1240 return expr;
1242 case tcc_unary:
1244 tree oldop1 = TREE_OPERAND (expr, 0);
1245 tree newop1;
1246 tree newexpr;
1248 newop1 = phi_translate (find_leader (set, oldop1),
1249 set, pred, phiblock);
1250 if (newop1 == NULL)
1251 return NULL;
1252 if (newop1 != oldop1)
1254 tree t;
1255 newexpr = (tree) pool_alloc (unary_node_pool);
1256 memcpy (newexpr, expr, tree_size (expr));
1257 TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
1258 t = fully_constant_expression (newexpr);
1259 if (t != newexpr)
1261 pool_free (unary_node_pool, newexpr);
1262 newexpr = t;
1264 else
1266 create_tree_ann (newexpr);
1267 vn_lookup_or_add (newexpr, NULL);
1269 expr = newexpr;
1270 phi_trans_add (oldexpr, newexpr, pred, NULL);
1273 return expr;
1275 case tcc_exceptional:
1277 tree phi = NULL;
1278 edge e;
1279 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1280 if (TREE_CODE (SSA_NAME_DEF_STMT (expr)) == PHI_NODE)
1281 phi = SSA_NAME_DEF_STMT (expr);
1282 else
1283 return expr;
1285 e = find_edge (pred, bb_for_stmt (phi));
1286 if (e)
1288 if (is_undefined_value (PHI_ARG_DEF (phi, e->dest_idx)))
1289 return NULL;
1290 vn_lookup_or_add (PHI_ARG_DEF (phi, e->dest_idx), NULL);
1291 return PHI_ARG_DEF (phi, e->dest_idx);
1294 return expr;
1296 default:
1297 gcc_unreachable ();
1301 /* For each expression in SET, translate the value handles through phi nodes
1302 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1303 expressions in DEST. */
1305 static void
1306 phi_translate_set (value_set_t dest, value_set_t set, basic_block pred,
1307 basic_block phiblock)
1309 value_set_node_t node;
1310 for (node = set->head;
1311 node;
1312 node = node->next)
1314 tree translated;
1316 translated = phi_translate (node->expr, set, pred, phiblock);
1318 /* Don't add constants or empty translations to the cache, since
1319 we won't look them up that way, or use the result, anyway. */
1320 if (translated && !is_gimple_min_invariant (translated))
1322 tree vh = get_value_handle (translated);
1323 VEC (tree, gc) *vuses;
1325 /* The value handle itself may also be an invariant, in
1326 which case, it has no vuses. */
1327 vuses = !is_gimple_min_invariant (vh)
1328 ? VALUE_HANDLE_VUSES (vh) : NULL;
1329 phi_trans_add (node->expr, translated, pred, vuses);
1332 if (translated != NULL)
1333 value_insert_into_set (dest, translated);
1337 /* Find the leader for a value (i.e., the name representing that
1338 value) in a given set, and return it. Return NULL if no leader is
1339 found. */
1341 static tree
1342 bitmap_find_leader (bitmap_set_t set, tree val)
1344 if (val == NULL)
1345 return NULL;
1347 if (is_gimple_min_invariant (val))
1348 return val;
1349 if (bitmap_set_contains_value (set, val))
1351 /* Rather than walk the entire bitmap of expressions, and see
1352 whether any of them has the value we are looking for, we look
1353 at the reverse mapping, which tells us the set of expressions
1354 that have a given value (IE value->expressions with that
1355 value) and see if any of those expressions are in our set.
1356 The number of expressions per value is usually significantly
1357 less than the number of expressions in the set. In fact, for
1358 large testcases, doing it this way is roughly 5-10x faster
1359 than walking the bitmap.
1360 If this is somehow a significant lose for some cases, we can
1361 choose which set to walk based on which set is smaller. */
1362 value_set_t exprset;
1363 value_set_node_t node;
1364 exprset = VALUE_HANDLE_EXPR_SET (val);
1365 for (node = exprset->head; node; node = node->next)
1367 if (TREE_CODE (node->expr) == SSA_NAME)
1369 if (bitmap_bit_p (set->expressions,
1370 SSA_NAME_VERSION (node->expr)))
1371 return node->expr;
1375 return NULL;
1379 /* Find the leader for a value (i.e., the name representing that
1380 value) in a given set, and return it. Return NULL if no leader is
1381 found. */
1383 static tree
1384 find_leader (value_set_t set, tree val)
1386 value_set_node_t node;
1388 if (val == NULL)
1389 return NULL;
1391 /* Constants represent themselves. */
1392 if (is_gimple_min_invariant (val))
1393 return val;
1395 if (set->length == 0)
1396 return NULL;
1398 if (value_exists_in_set_bitmap (set, val))
1400 for (node = set->head;
1401 node;
1402 node = node->next)
1404 if (get_value_handle (node->expr) == val)
1405 return node->expr;
1409 return NULL;
1412 /* Given the vuse representative map, MAP, and an SSA version number,
1413 ID, return the bitmap of names ID represents, or NULL, if none
1414 exists. */
1416 static bitmap
1417 get_representative (bitmap *map, int id)
1419 if (map[id] != NULL)
1420 return map[id];
1421 return NULL;
1424 /* A vuse is anticipable at the top of block x, from the bottom of the
1425 block, if it reaches the top of the block, and is not killed in the
1426 block. In effect, we are trying to see if the vuse is transparent
1427 backwards in the block. */
1429 static bool
1430 vuses_dies_in_block_x (VEC (tree, gc) *vuses, basic_block block)
1432 int i;
1433 tree vuse;
1435 for (i = 0; VEC_iterate (tree, vuses, i, vuse); i++)
1437 /* Any places where this is too conservative, are places
1438 where we created a new version and shouldn't have. */
1440 if (!bitmap_bit_p (RVUSE_IN (block), SSA_NAME_VERSION (vuse))
1441 || bitmap_bit_p (RVUSE_KILL (block), SSA_NAME_VERSION
1442 (vuse)))
1443 return true;
1445 return false;
1448 /* Determine if the expression EXPR is valid in SET. This means that
1449 we have a leader for each part of the expression (if it consists of
1450 values), or the expression is an SSA_NAME.
1452 NB: We never should run into a case where we have SSA_NAME +
1453 SSA_NAME or SSA_NAME + value. The sets valid_in_set is called on,
1454 the ANTIC sets, will only ever have SSA_NAME's or value expressions
1455 (IE VALUE1 + VALUE2, *VALUE1, VALUE1 < VALUE2) */
1457 static bool
1458 valid_in_set (value_set_t set, tree expr, basic_block block)
1460 tree vh = get_value_handle (expr);
1461 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1463 case tcc_binary:
1464 case tcc_comparison:
1466 tree op1 = TREE_OPERAND (expr, 0);
1467 tree op2 = TREE_OPERAND (expr, 1);
1468 return set_contains_value (set, op1) && set_contains_value (set, op2);
1471 case tcc_unary:
1473 tree op1 = TREE_OPERAND (expr, 0);
1474 return set_contains_value (set, op1);
1477 case tcc_expression:
1479 if (TREE_CODE (expr) == CALL_EXPR)
1481 tree op0 = TREE_OPERAND (expr, 0);
1482 tree arglist = TREE_OPERAND (expr, 1);
1483 tree op2 = TREE_OPERAND (expr, 2);
1485 /* Check the non-list operands first. */
1486 if (!set_contains_value (set, op0)
1487 || (op2 && !set_contains_value (set, op2)))
1488 return false;
1490 /* Now check the operands. */
1491 for (; arglist; arglist = TREE_CHAIN (arglist))
1493 if (!set_contains_value (set, TREE_VALUE (arglist)))
1494 return false;
1496 return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1498 return false;
1501 case tcc_reference:
1503 if (TREE_CODE (expr) == INDIRECT_REF)
1505 tree op0 = TREE_OPERAND (expr, 0);
1506 if (is_gimple_min_invariant (op0)
1507 || TREE_CODE (op0) == VALUE_HANDLE)
1509 bool retval = set_contains_value (set, op0);
1510 if (retval)
1511 return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh),
1512 block);
1513 return false;
1517 return false;
1519 case tcc_exceptional:
1520 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1521 return true;
1523 case tcc_declaration:
1524 return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1526 default:
1527 /* No other cases should be encountered. */
1528 gcc_unreachable ();
1532 /* Clean the set of expressions that are no longer valid in SET. This
1533 means expressions that are made up of values we have no leaders for
1534 in SET. */
1536 static void
1537 clean (value_set_t set, basic_block block)
1539 value_set_node_t node;
1540 value_set_node_t next;
1541 node = set->head;
1542 while (node)
1544 next = node->next;
1545 if (!valid_in_set (set, node->expr, block))
1546 set_remove (set, node->expr);
1547 node = next;
1551 static sbitmap has_abnormal_preds;
1553 /* Compute the ANTIC set for BLOCK.
1555 If succs(BLOCK) > 1 then
1556 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1557 else if succs(BLOCK) == 1 then
1558 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
1560 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
1562 XXX: It would be nice to either write a set_clear, and use it for
1563 ANTIC_OUT, or to mark the antic_out set as deleted at the end
1564 of this routine, so that the pool can hand the same memory back out
1565 again for the next ANTIC_OUT. */
1567 static bool
1568 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
1570 basic_block son;
1571 bool changed = false;
1572 value_set_t S, old, ANTIC_OUT;
1573 value_set_node_t node;
1575 ANTIC_OUT = S = NULL;
1577 /* If any edges from predecessors are abnormal, antic_in is empty,
1578 so do nothing. */
1579 if (block_has_abnormal_pred_edge)
1580 goto maybe_dump_sets;
1582 old = set_new (false);
1583 set_copy (old, ANTIC_IN (block));
1584 ANTIC_OUT = set_new (true);
1586 /* If the block has no successors, ANTIC_OUT is empty. */
1587 if (EDGE_COUNT (block->succs) == 0)
1589 /* If we have one successor, we could have some phi nodes to
1590 translate through. */
1591 else if (single_succ_p (block))
1593 phi_translate_set (ANTIC_OUT, ANTIC_IN (single_succ (block)),
1594 block, single_succ (block));
1596 /* If we have multiple successors, we take the intersection of all of
1597 them. */
1598 else
1600 VEC(basic_block, heap) * worklist;
1601 edge e;
1602 size_t i;
1603 basic_block bprime, first;
1604 edge_iterator ei;
1606 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
1607 FOR_EACH_EDGE (e, ei, block->succs)
1608 VEC_quick_push (basic_block, worklist, e->dest);
1609 first = VEC_index (basic_block, worklist, 0);
1610 set_copy (ANTIC_OUT, ANTIC_IN (first));
1612 for (i = 1; VEC_iterate (basic_block, worklist, i, bprime); i++)
1614 node = ANTIC_OUT->head;
1615 while (node)
1617 tree val;
1618 value_set_node_t next = node->next;
1620 val = get_value_handle (node->expr);
1621 if (!set_contains_value (ANTIC_IN (bprime), val))
1622 set_remove (ANTIC_OUT, node->expr);
1623 node = next;
1626 VEC_free (basic_block, heap, worklist);
1629 /* Generate ANTIC_OUT - TMP_GEN. */
1630 S = bitmap_set_subtract_from_value_set (ANTIC_OUT, TMP_GEN (block), false);
1632 /* Start ANTIC_IN with EXP_GEN - TMP_GEN */
1633 ANTIC_IN (block) = bitmap_set_subtract_from_value_set (EXP_GEN (block),
1634 TMP_GEN (block),
1635 true);
1637 /* Then union in the ANTIC_OUT - TMP_GEN values,
1638 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
1639 for (node = S->head; node; node = node->next)
1640 value_insert_into_set (ANTIC_IN (block), node->expr);
1642 clean (ANTIC_IN (block), block);
1643 if (!set_equal (old, ANTIC_IN (block)))
1644 changed = true;
1646 maybe_dump_sets:
1647 if (dump_file && (dump_flags & TDF_DETAILS))
1649 if (ANTIC_OUT)
1650 print_value_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
1651 print_value_set (dump_file, ANTIC_IN (block), "ANTIC_IN", block->index);
1652 if (S)
1653 print_value_set (dump_file, S, "S", block->index);
1656 for (son = first_dom_son (CDI_POST_DOMINATORS, block);
1657 son;
1658 son = next_dom_son (CDI_POST_DOMINATORS, son))
1660 changed |= compute_antic_aux (son,
1661 TEST_BIT (has_abnormal_preds, son->index));
1663 return changed;
1666 /* Compute ANTIC sets. */
1668 static void
1669 compute_antic (void)
1671 bool changed = true;
1672 int num_iterations = 0;
1673 basic_block block;
1675 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
1676 We pre-build the map of blocks with incoming abnormal edges here. */
1677 has_abnormal_preds = sbitmap_alloc (last_basic_block);
1678 sbitmap_zero (has_abnormal_preds);
1679 FOR_EACH_BB (block)
1681 edge_iterator ei;
1682 edge e;
1684 FOR_EACH_EDGE (e, ei, block->preds)
1685 if (e->flags & EDGE_ABNORMAL)
1687 SET_BIT (has_abnormal_preds, block->index);
1688 break;
1691 /* While we are here, give empty ANTIC_IN sets to each block. */
1692 ANTIC_IN (block) = set_new (true);
1694 /* At the exit block we anticipate nothing. */
1695 ANTIC_IN (EXIT_BLOCK_PTR) = set_new (true);
1697 while (changed)
1699 num_iterations++;
1700 changed = false;
1701 changed = compute_antic_aux (EXIT_BLOCK_PTR, false);
1704 sbitmap_free (has_abnormal_preds);
1706 if (dump_file && (dump_flags & TDF_STATS))
1707 fprintf (dump_file, "compute_antic required %d iterations\n", num_iterations);
1710 /* Print the names represented by the bitmap NAMES, to the file OUT. */
1711 static void
1712 dump_bitmap_of_names (FILE *out, bitmap names)
1714 bitmap_iterator bi;
1715 unsigned int i;
1717 fprintf (out, " { ");
1718 EXECUTE_IF_SET_IN_BITMAP (names, 0, i, bi)
1720 print_generic_expr (out, ssa_name (i), 0);
1721 fprintf (out, " ");
1723 fprintf (out, "}\n");
1726 /* Compute a set of representative vuse versions for each phi. This
1727 is so we can compute conservative kill sets in terms of all vuses
1728 that are killed, instead of continually walking chains.
1730 We also have to be able kill all names associated with a phi when
1731 the phi dies in order to ensure we don't generate overlapping
1732 live ranges, which are not allowed in virtual SSA. */
1734 static bitmap *vuse_names;
1735 static void
1736 compute_vuse_representatives (void)
1738 tree phi;
1739 basic_block bb;
1740 VEC (tree, heap) *phis = NULL;
1741 bool changed = true;
1742 size_t i;
1744 FOR_EACH_BB (bb)
1746 for (phi = phi_nodes (bb);
1747 phi;
1748 phi = PHI_CHAIN (phi))
1749 if (!is_gimple_reg (PHI_RESULT (phi)))
1750 VEC_safe_push (tree, heap, phis, phi);
1753 while (changed)
1755 changed = false;
1757 for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
1759 size_t ver = SSA_NAME_VERSION (PHI_RESULT (phi));
1760 use_operand_p usep;
1761 ssa_op_iter iter;
1763 if (vuse_names[ver] == NULL)
1765 vuse_names[ver] = BITMAP_ALLOC (&grand_bitmap_obstack);
1766 bitmap_set_bit (vuse_names[ver], ver);
1768 FOR_EACH_PHI_ARG (usep, phi, iter, SSA_OP_ALL_USES)
1770 tree use = USE_FROM_PTR (usep);
1771 bitmap usebitmap = get_representative (vuse_names,
1772 SSA_NAME_VERSION (use));
1773 if (usebitmap != NULL)
1775 changed |= bitmap_ior_into (vuse_names[ver],
1776 usebitmap);
1778 else
1780 changed |= !bitmap_bit_p (vuse_names[ver],
1781 SSA_NAME_VERSION (use));
1782 if (changed)
1783 bitmap_set_bit (vuse_names[ver],
1784 SSA_NAME_VERSION (use));
1790 if (dump_file && (dump_flags & TDF_DETAILS))
1791 for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
1793 bitmap reps = get_representative (vuse_names,
1794 SSA_NAME_VERSION (PHI_RESULT (phi)));
1795 if (reps)
1797 print_generic_expr (dump_file, PHI_RESULT (phi), 0);
1798 fprintf (dump_file, " represents ");
1799 dump_bitmap_of_names (dump_file, reps);
1802 VEC_free (tree, heap, phis);
1805 /* Compute reaching vuses. This is a small bit of iterative dataflow
1806 to determine what virtual uses reach what blocks. Because we can't
1807 generate overlapping virtual uses, and virtual uses *do* actually
1808 die, this ends up being faster in most cases than continually
1809 walking the virtual use/def chains to determine whether we are
1810 inside a block where a given virtual is still available to be
1811 used. */
1813 static void
1814 compute_rvuse (void)
1817 size_t i;
1818 tree phi;
1819 basic_block bb;
1820 int *postorder;
1821 bool changed = true;
1823 compute_vuse_representatives ();
1825 FOR_ALL_BB (bb)
1827 RVUSE_IN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1828 RVUSE_GEN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1829 RVUSE_KILL (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1830 RVUSE_OUT (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1834 /* Mark live on entry */
1835 for (i = 0; i < num_ssa_names; i++)
1837 tree name = ssa_name (i);
1838 if (name && !is_gimple_reg (name)
1839 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
1840 bitmap_set_bit (RVUSE_OUT (ENTRY_BLOCK_PTR),
1841 SSA_NAME_VERSION (name));
1844 /* Compute local sets for reaching vuses.
1845 GEN(block) = generated in block and not locally killed.
1846 KILL(block) = set of vuses killed in block.
1849 FOR_EACH_BB (bb)
1851 block_stmt_iterator bsi;
1852 ssa_op_iter iter;
1853 def_operand_p defp;
1854 use_operand_p usep;
1857 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1859 tree stmt = bsi_stmt (bsi);
1860 FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VIRTUAL_KILLS
1861 | SSA_OP_VMAYUSE)
1863 tree use = USE_FROM_PTR (usep);
1864 bitmap repbit = get_representative (vuse_names,
1865 SSA_NAME_VERSION (use));
1866 if (repbit != NULL)
1868 bitmap_and_compl_into (RVUSE_GEN (bb), repbit);
1869 bitmap_ior_into (RVUSE_KILL (bb), repbit);
1871 else
1873 bitmap_set_bit (RVUSE_KILL (bb), SSA_NAME_VERSION (use));
1874 bitmap_clear_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (use));
1877 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
1879 tree def = DEF_FROM_PTR (defp);
1880 bitmap_set_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (def));
1885 FOR_EACH_BB (bb)
1887 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1889 if (!is_gimple_reg (PHI_RESULT (phi)))
1891 edge e;
1892 edge_iterator ei;
1894 tree def = PHI_RESULT (phi);
1895 /* In reality, the PHI result is generated at the end of
1896 each predecessor block. This will make the value
1897 LVUSE_IN for the bb containing the PHI, which is
1898 correct. */
1899 FOR_EACH_EDGE (e, ei, bb->preds)
1900 bitmap_set_bit (RVUSE_GEN (e->src), SSA_NAME_VERSION (def));
1905 /* Solve reaching vuses.
1907 RVUSE_IN[BB] = Union of RVUSE_OUT of predecessors.
1908 RVUSE_OUT[BB] = RVUSE_GEN[BB] U (RVUSE_IN[BB] - RVUSE_KILL[BB])
1910 postorder = xmalloc (sizeof (int) * (n_basic_blocks - NUM_FIXED_BLOCKS));
1911 pre_and_rev_post_order_compute (NULL, postorder, false);
1913 changed = true;
1914 while (changed)
1916 int j;
1917 changed = false;
1918 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
1920 edge e;
1921 edge_iterator ei;
1922 bb = BASIC_BLOCK (postorder[j]);
1924 FOR_EACH_EDGE (e, ei, bb->preds)
1925 bitmap_ior_into (RVUSE_IN (bb), RVUSE_OUT (e->src));
1927 changed |= bitmap_ior_and_compl (RVUSE_OUT (bb),
1928 RVUSE_GEN (bb),
1929 RVUSE_IN (bb),
1930 RVUSE_KILL (bb));
1933 free (postorder);
1935 if (dump_file && (dump_flags & TDF_DETAILS))
1937 FOR_ALL_BB (bb)
1939 fprintf (dump_file, "RVUSE_IN (%d) =", bb->index);
1940 dump_bitmap_of_names (dump_file, RVUSE_IN (bb));
1942 fprintf (dump_file, "RVUSE_KILL (%d) =", bb->index);
1943 dump_bitmap_of_names (dump_file, RVUSE_KILL (bb));
1945 fprintf (dump_file, "RVUSE_GEN (%d) =", bb->index);
1946 dump_bitmap_of_names (dump_file, RVUSE_GEN (bb));
1948 fprintf (dump_file, "RVUSE_OUT (%d) =", bb->index);
1949 dump_bitmap_of_names (dump_file, RVUSE_OUT (bb));
1954 /* Return true if we can value number the call in STMT. This is true
1955 if we have a pure or constant call. */
1957 static bool
1958 can_value_number_call (tree stmt)
1960 tree call = get_call_expr_in (stmt);
1962 if (call_expr_flags (call) & (ECF_PURE | ECF_CONST))
1963 return true;
1964 return false;
1967 /* Return true if OP is a tree which we can perform value numbering
1968 on. */
1970 static bool
1971 can_value_number_operation (tree op)
1973 return UNARY_CLASS_P (op)
1974 || BINARY_CLASS_P (op)
1975 || COMPARISON_CLASS_P (op)
1976 || REFERENCE_CLASS_P (op)
1977 || (TREE_CODE (op) == CALL_EXPR
1978 && can_value_number_call (op));
1982 /* Return true if OP is a tree which we can perform PRE on
1983 on. This may not match the operations we can value number, but in
1984 a perfect world would. */
1986 static bool
1987 can_PRE_operation (tree op)
1989 return UNARY_CLASS_P (op)
1990 || BINARY_CLASS_P (op)
1991 || COMPARISON_CLASS_P (op)
1992 || TREE_CODE (op) == INDIRECT_REF
1993 || TREE_CODE (op) == CALL_EXPR;
1997 /* Inserted expressions are placed onto this worklist, which is used
1998 for performing quick dead code elimination of insertions we made
1999 that didn't turn out to be necessary. */
2000 static VEC(tree,heap) *inserted_exprs;
2002 /* Pool allocated fake store expressions are placed onto this
2003 worklist, which, after performing dead code elimination, is walked
2004 to see which expressions need to be put into GC'able memory */
2005 static VEC(tree, heap) *need_creation;
2008 /* Find a leader for an expression, or generate one using
2009 create_expression_by_pieces if it's ANTIC but
2010 complex.
2011 BLOCK is the basic_block we are looking for leaders in.
2012 EXPR is the expression to find a leader or generate for.
2013 STMTS is the statement list to put the inserted expressions on.
2014 Returns the SSA_NAME of the LHS of the generated expression or the
2015 leader. */
2017 static tree
2018 find_or_generate_expression (basic_block block, tree expr, tree stmts)
2020 tree genop = bitmap_find_leader (AVAIL_OUT (block), expr);
2022 /* If it's still NULL, it must be a complex expression, so generate
2023 it recursively. */
2024 if (genop == NULL)
2026 genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
2028 gcc_assert (can_PRE_operation (genop));
2029 genop = create_expression_by_pieces (block, genop, stmts);
2031 return genop;
2034 #define NECESSARY(stmt) stmt->common.asm_written_flag
2035 /* Create an expression in pieces, so that we can handle very complex
2036 expressions that may be ANTIC, but not necessary GIMPLE.
2037 BLOCK is the basic block the expression will be inserted into,
2038 EXPR is the expression to insert (in value form)
2039 STMTS is a statement list to append the necessary insertions into.
2041 This function will die if we hit some value that shouldn't be
2042 ANTIC but is (IE there is no leader for it, or its components).
2043 This function may also generate expressions that are themselves
2044 partially or fully redundant. Those that are will be either made
2045 fully redundant during the next iteration of insert (for partially
2046 redundant ones), or eliminated by eliminate (for fully redundant
2047 ones). */
2049 static tree
2050 create_expression_by_pieces (basic_block block, tree expr, tree stmts)
2052 tree temp, name;
2053 tree folded, forced_stmts, newexpr;
2054 tree v;
2055 tree_stmt_iterator tsi;
2057 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2059 case tcc_expression:
2061 tree op0, op2;
2062 tree arglist;
2063 tree genop0, genop2;
2064 tree genarglist;
2065 tree walker, genwalker;
2067 gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2068 genop2 = NULL;
2070 op0 = TREE_OPERAND (expr, 0);
2071 arglist = TREE_OPERAND (expr, 1);
2072 op2 = TREE_OPERAND (expr, 2);
2074 genop0 = find_or_generate_expression (block, op0, stmts);
2075 genarglist = copy_list (arglist);
2076 for (walker = arglist, genwalker = genarglist;
2077 genwalker && walker;
2078 genwalker = TREE_CHAIN (genwalker), walker = TREE_CHAIN (walker))
2080 TREE_VALUE (genwalker)
2081 = find_or_generate_expression (block, TREE_VALUE (walker),
2082 stmts);
2085 if (op2)
2086 genop2 = find_or_generate_expression (block, op2, stmts);
2087 folded = fold_build3 (TREE_CODE (expr), TREE_TYPE (expr),
2088 genop0, genarglist, genop2);
2089 break;
2093 break;
2094 case tcc_reference:
2095 gcc_assert (TREE_CODE (expr) == INDIRECT_REF);
2097 tree op1 = TREE_OPERAND (expr, 0);
2098 tree genop1 = find_or_generate_expression (block, op1, stmts);
2100 folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2101 genop1);
2102 break;
2105 case tcc_binary:
2106 case tcc_comparison:
2108 tree op1 = TREE_OPERAND (expr, 0);
2109 tree op2 = TREE_OPERAND (expr, 1);
2110 tree genop1 = find_or_generate_expression (block, op1, stmts);
2111 tree genop2 = find_or_generate_expression (block, op2, stmts);
2112 folded = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr),
2113 genop1, genop2);
2114 break;
2117 case tcc_unary:
2119 tree op1 = TREE_OPERAND (expr, 0);
2120 tree genop1 = find_or_generate_expression (block, op1, stmts);
2121 folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2122 genop1);
2123 break;
2126 default:
2127 gcc_unreachable ();
2130 /* Force the generated expression to be a sequence of GIMPLE
2131 statements.
2132 We have to call unshare_expr because force_gimple_operand may
2133 modify the tree we pass to it. */
2134 newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts,
2135 false, NULL);
2137 /* If we have any intermediate expressions to the value sets, add them
2138 to the value sets and chain them on in the instruction stream. */
2139 if (forced_stmts)
2141 tsi = tsi_start (forced_stmts);
2142 for (; !tsi_end_p (tsi); tsi_next (&tsi))
2144 tree stmt = tsi_stmt (tsi);
2145 tree forcedname = TREE_OPERAND (stmt, 0);
2146 tree forcedexpr = TREE_OPERAND (stmt, 1);
2147 tree val = vn_lookup_or_add (forcedexpr, NULL);
2149 VEC_safe_push (tree, heap, inserted_exprs, stmt);
2150 vn_add (forcedname, val);
2151 bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
2152 bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
2153 mark_new_vars_to_rename (stmt);
2155 tsi = tsi_last (stmts);
2156 tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
2159 /* Build and insert the assignment of the end result to the temporary
2160 that we will return. */
2161 if (!pretemp || TREE_TYPE (expr) != TREE_TYPE (pretemp))
2163 pretemp = create_tmp_var (TREE_TYPE (expr), "pretmp");
2164 get_var_ann (pretemp);
2167 temp = pretemp;
2168 add_referenced_tmp_var (temp);
2170 if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2171 DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
2173 newexpr = build2 (MODIFY_EXPR, TREE_TYPE (expr), temp, newexpr);
2174 name = make_ssa_name (temp, newexpr);
2175 TREE_OPERAND (newexpr, 0) = name;
2176 NECESSARY (newexpr) = 0;
2178 tsi = tsi_last (stmts);
2179 tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
2180 VEC_safe_push (tree, heap, inserted_exprs, newexpr);
2181 mark_new_vars_to_rename (newexpr);
2183 /* Add a value handle to the temporary.
2184 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2185 we are creating the expression by pieces, and this particular piece of
2186 the expression may have been represented. There is no harm in replacing
2187 here. */
2188 v = get_value_handle (expr);
2189 vn_add (name, v);
2190 bitmap_value_replace_in_set (NEW_SETS (block), name);
2191 bitmap_value_replace_in_set (AVAIL_OUT (block), name);
2193 pre_stats.insertions++;
2194 if (dump_file && (dump_flags & TDF_DETAILS))
2196 fprintf (dump_file, "Inserted ");
2197 print_generic_expr (dump_file, newexpr, 0);
2198 fprintf (dump_file, " in predecessor %d\n", block->index);
2201 return name;
2204 /* Insert the to-be-made-available values of NODE for each
2205 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2206 merge the result with a phi node, given the same value handle as
2207 NODE. Return true if we have inserted new stuff. */
2209 static bool
2210 insert_into_preds_of_block (basic_block block, value_set_node_t node,
2211 tree *avail)
2213 tree val = get_value_handle (node->expr);
2214 edge pred;
2215 bool insertions = false;
2216 bool nophi = false;
2217 basic_block bprime;
2218 tree eprime;
2219 edge_iterator ei;
2220 tree type = TREE_TYPE (avail[EDGE_PRED (block, 0)->src->index]);
2221 tree temp;
2223 if (dump_file && (dump_flags & TDF_DETAILS))
2225 fprintf (dump_file, "Found partial redundancy for expression ");
2226 print_generic_expr (dump_file, node->expr, 0);
2227 fprintf (dump_file, " (");
2228 print_generic_expr (dump_file, val, 0);
2229 fprintf (dump_file, ")");
2230 fprintf (dump_file, "\n");
2233 /* Make sure we aren't creating an induction variable. */
2234 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2
2235 && TREE_CODE_CLASS (TREE_CODE (node->expr)) != tcc_reference )
2237 bool firstinsideloop = false;
2238 bool secondinsideloop = false;
2239 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2240 EDGE_PRED (block, 0)->src);
2241 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2242 EDGE_PRED (block, 1)->src);
2243 /* Induction variables only have one edge inside the loop. */
2244 if (firstinsideloop ^ secondinsideloop)
2246 if (dump_file && (dump_flags & TDF_DETAILS))
2247 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2248 nophi = true;
2253 /* Make the necessary insertions. */
2254 FOR_EACH_EDGE (pred, ei, block->preds)
2256 tree stmts = alloc_stmt_list ();
2257 tree builtexpr;
2258 bprime = pred->src;
2259 eprime = avail[bprime->index];
2261 if (can_PRE_operation (eprime))
2263 #ifdef ENABLE_CHECKING
2264 tree vh;
2266 /* eprime may be an invariant. */
2267 vh = TREE_CODE (eprime) == VALUE_HANDLE
2268 ? eprime
2269 : get_value_handle (eprime);
2271 /* ensure that the virtual uses we need reach our block. */
2272 if (TREE_CODE (vh) == VALUE_HANDLE)
2274 int i;
2275 tree vuse;
2276 for (i = 0;
2277 VEC_iterate (tree, VALUE_HANDLE_VUSES (vh), i, vuse);
2278 i++)
2280 size_t id = SSA_NAME_VERSION (vuse);
2281 gcc_assert (bitmap_bit_p (RVUSE_OUT (bprime), id)
2282 || IS_EMPTY_STMT (SSA_NAME_DEF_STMT (vuse)));
2285 #endif
2286 builtexpr = create_expression_by_pieces (bprime,
2287 eprime,
2288 stmts);
2289 bsi_insert_on_edge (pred, stmts);
2290 avail[bprime->index] = builtexpr;
2291 insertions = true;
2294 /* If we didn't want a phi node, and we made insertions, we still have
2295 inserted new stuff, and thus return true. If we didn't want a phi node,
2296 and didn't make insertions, we haven't added anything new, so return
2297 false. */
2298 if (nophi && insertions)
2299 return true;
2300 else if (nophi && !insertions)
2301 return false;
2303 /* Now build a phi for the new variable. */
2304 if (!prephitemp || TREE_TYPE (prephitemp) != type)
2306 prephitemp = create_tmp_var (type, "prephitmp");
2307 get_var_ann (prephitemp);
2310 temp = prephitemp;
2311 add_referenced_tmp_var (temp);
2313 if (TREE_CODE (type) == COMPLEX_TYPE)
2314 DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
2315 temp = create_phi_node (temp, block);
2317 NECESSARY (temp) = 0;
2318 VEC_safe_push (tree, heap, inserted_exprs, temp);
2319 FOR_EACH_EDGE (pred, ei, block->preds)
2320 add_phi_arg (temp, avail[pred->src->index], pred);
2322 vn_add (PHI_RESULT (temp), val);
2324 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
2325 this insertion, since we test for the existence of this value in PHI_GEN
2326 before proceeding with the partial redundancy checks in insert_aux.
2328 The value may exist in AVAIL_OUT, in particular, it could be represented
2329 by the expression we are trying to eliminate, in which case we want the
2330 replacement to occur. If it's not existing in AVAIL_OUT, we want it
2331 inserted there.
2333 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
2334 this block, because if it did, it would have existed in our dominator's
2335 AVAIL_OUT, and would have been skipped due to the full redundancy check.
2338 bitmap_insert_into_set (PHI_GEN (block),
2339 PHI_RESULT (temp));
2340 bitmap_value_replace_in_set (AVAIL_OUT (block),
2341 PHI_RESULT (temp));
2342 bitmap_insert_into_set (NEW_SETS (block),
2343 PHI_RESULT (temp));
2345 if (dump_file && (dump_flags & TDF_DETAILS))
2347 fprintf (dump_file, "Created phi ");
2348 print_generic_expr (dump_file, temp, 0);
2349 fprintf (dump_file, " in block %d\n", block->index);
2351 pre_stats.phis++;
2352 return true;
2357 /* Perform insertion of partially redundant values.
2358 For BLOCK, do the following:
2359 1. Propagate the NEW_SETS of the dominator into the current block.
2360 If the block has multiple predecessors,
2361 2a. Iterate over the ANTIC expressions for the block to see if
2362 any of them are partially redundant.
2363 2b. If so, insert them into the necessary predecessors to make
2364 the expression fully redundant.
2365 2c. Insert a new PHI merging the values of the predecessors.
2366 2d. Insert the new PHI, and the new expressions, into the
2367 NEW_SETS set.
2368 3. Recursively call ourselves on the dominator children of BLOCK.
2372 static bool
2373 insert_aux (basic_block block)
2375 basic_block son;
2376 bool new_stuff = false;
2378 if (block)
2380 basic_block dom;
2381 dom = get_immediate_dominator (CDI_DOMINATORS, block);
2382 if (dom)
2384 unsigned i;
2385 bitmap_iterator bi;
2386 bitmap_set_t newset = NEW_SETS (dom);
2387 if (newset)
2389 /* Note that we need to value_replace both NEW_SETS, and
2390 AVAIL_OUT. For both the case of NEW_SETS, the value may be
2391 represented by some non-simple expression here that we want
2392 to replace it with. */
2393 EXECUTE_IF_SET_IN_BITMAP (newset->expressions, 0, i, bi)
2395 bitmap_value_replace_in_set (NEW_SETS (block), ssa_name (i));
2396 bitmap_value_replace_in_set (AVAIL_OUT (block), ssa_name (i));
2399 if (!single_pred_p (block))
2401 value_set_node_t node;
2402 for (node = ANTIC_IN (block)->head;
2403 node;
2404 node = node->next)
2406 if (can_PRE_operation (node->expr))
2408 tree *avail;
2409 tree val;
2410 bool by_some = false;
2411 bool cant_insert = false;
2412 bool all_same = true;
2413 tree first_s = NULL;
2414 edge pred;
2415 basic_block bprime;
2416 tree eprime = NULL_TREE;
2417 edge_iterator ei;
2419 val = get_value_handle (node->expr);
2420 if (bitmap_set_contains_value (PHI_GEN (block), val))
2421 continue;
2422 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
2424 if (dump_file && (dump_flags & TDF_DETAILS))
2425 fprintf (dump_file, "Found fully redundant value\n");
2426 continue;
2429 avail = XCNEWVEC (tree, last_basic_block);
2430 FOR_EACH_EDGE (pred, ei, block->preds)
2432 tree vprime;
2433 tree edoubleprime;
2435 /* This can happen in the very weird case
2436 that our fake infinite loop edges have caused a
2437 critical edge to appear. */
2438 if (EDGE_CRITICAL_P (pred))
2440 cant_insert = true;
2441 break;
2443 bprime = pred->src;
2444 eprime = phi_translate (node->expr,
2445 ANTIC_IN (block),
2446 bprime, block);
2448 /* eprime will generally only be NULL if the
2449 value of the expression, translated
2450 through the PHI for this predecessor, is
2451 undefined. If that is the case, we can't
2452 make the expression fully redundant,
2453 because its value is undefined along a
2454 predecessor path. We can thus break out
2455 early because it doesn't matter what the
2456 rest of the results are. */
2457 if (eprime == NULL)
2459 cant_insert = true;
2460 break;
2463 eprime = fully_constant_expression (eprime);
2464 vprime = get_value_handle (eprime);
2465 gcc_assert (vprime);
2466 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
2467 vprime);
2468 if (edoubleprime == NULL)
2470 avail[bprime->index] = eprime;
2471 all_same = false;
2473 else
2475 avail[bprime->index] = edoubleprime;
2476 by_some = true;
2477 if (first_s == NULL)
2478 first_s = edoubleprime;
2479 else if (!operand_equal_p (first_s, edoubleprime,
2481 all_same = false;
2484 /* If we can insert it, it's not the same value
2485 already existing along every predecessor, and
2486 it's defined by some predecessor, it is
2487 partially redundant. */
2488 if (!cant_insert && !all_same && by_some)
2490 if (insert_into_preds_of_block (block, node, avail))
2491 new_stuff = true;
2493 /* If all edges produce the same value and that value is
2494 an invariant, then the PHI has the same value on all
2495 edges. Note this. */
2496 else if (!cant_insert && all_same && eprime
2497 && is_gimple_min_invariant (eprime)
2498 && !is_gimple_min_invariant (val))
2500 value_set_t exprset = VALUE_HANDLE_EXPR_SET (val);
2501 value_set_node_t node;
2503 for (node = exprset->head; node; node = node->next)
2505 if (TREE_CODE (node->expr) == SSA_NAME)
2507 vn_add (node->expr, eprime);
2508 pre_stats.constified++;
2512 free (avail);
2518 for (son = first_dom_son (CDI_DOMINATORS, block);
2519 son;
2520 son = next_dom_son (CDI_DOMINATORS, son))
2522 new_stuff |= insert_aux (son);
2525 return new_stuff;
2528 /* Perform insertion of partially redundant values. */
2530 static void
2531 insert (void)
2533 bool new_stuff = true;
2534 basic_block bb;
2535 int num_iterations = 0;
2537 FOR_ALL_BB (bb)
2538 NEW_SETS (bb) = bitmap_set_new ();
2540 while (new_stuff)
2542 num_iterations++;
2543 new_stuff = false;
2544 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
2546 if (num_iterations > 2 && dump_file && (dump_flags & TDF_STATS))
2547 fprintf (dump_file, "insert required %d iterations\n", num_iterations);
2551 /* Return true if VAR is an SSA variable with no defining statement in
2552 this procedure, *AND* isn't a live-on-entry parameter. */
2554 static bool
2555 is_undefined_value (tree expr)
2557 return (TREE_CODE (expr) == SSA_NAME
2558 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (expr))
2559 /* PARM_DECLs and hard registers are always defined. */
2560 && TREE_CODE (SSA_NAME_VAR (expr)) != PARM_DECL);
2564 /* Given an SSA variable VAR and an expression EXPR, compute the value
2565 number for EXPR and create a value handle (VAL) for it. If VAR and
2566 EXPR are not the same, associate VAL with VAR. Finally, add VAR to
2567 S1 and its value handle to S2.
2569 VUSES represent the virtual use operands associated with EXPR (if
2570 any). */
2572 static inline void
2573 add_to_sets (tree var, tree expr, tree stmt, bitmap_set_t s1,
2574 bitmap_set_t s2)
2576 tree val = vn_lookup_or_add (expr, stmt);
2578 /* VAR and EXPR may be the same when processing statements for which
2579 we are not computing value numbers (e.g., non-assignments, or
2580 statements that make aliased stores). In those cases, we are
2581 only interested in making VAR available as its own value. */
2582 if (var != expr)
2583 vn_add (var, val);
2585 if (s1)
2586 bitmap_insert_into_set (s1, var);
2587 bitmap_value_insert_into_set (s2, var);
2591 /* Given a unary or binary expression EXPR, create and return a new
2592 expression with the same structure as EXPR but with its operands
2593 replaced with the value handles of each of the operands of EXPR.
2595 VUSES represent the virtual use operands associated with EXPR (if
2596 any). Insert EXPR's operands into the EXP_GEN set for BLOCK. */
2598 static inline tree
2599 create_value_expr_from (tree expr, basic_block block, tree stmt)
2601 int i;
2602 enum tree_code code = TREE_CODE (expr);
2603 tree vexpr;
2604 alloc_pool pool;
2606 gcc_assert (TREE_CODE_CLASS (code) == tcc_unary
2607 || TREE_CODE_CLASS (code) == tcc_binary
2608 || TREE_CODE_CLASS (code) == tcc_comparison
2609 || TREE_CODE_CLASS (code) == tcc_reference
2610 || TREE_CODE_CLASS (code) == tcc_expression
2611 || TREE_CODE_CLASS (code) == tcc_exceptional
2612 || TREE_CODE_CLASS (code) == tcc_declaration);
2614 if (TREE_CODE_CLASS (code) == tcc_unary)
2615 pool = unary_node_pool;
2616 else if (TREE_CODE_CLASS (code) == tcc_reference)
2617 pool = reference_node_pool;
2618 else if (TREE_CODE_CLASS (code) == tcc_binary)
2619 pool = binary_node_pool;
2620 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2621 pool = comparison_node_pool;
2622 else if (TREE_CODE_CLASS (code) == tcc_exceptional)
2624 gcc_assert (code == TREE_LIST);
2625 pool = list_node_pool;
2627 else
2629 gcc_assert (code == CALL_EXPR);
2630 pool = expression_node_pool;
2633 vexpr = (tree) pool_alloc (pool);
2634 memcpy (vexpr, expr, tree_size (expr));
2636 /* This case is only for TREE_LIST's that appear as part of
2637 CALL_EXPR's. Anything else is a bug, but we can't easily verify
2638 this, hence this comment. TREE_LIST is not handled by the
2639 general case below is because they don't have a fixed length, or
2640 operands, so you can't access purpose/value/chain through
2641 TREE_OPERAND macros. */
2643 if (code == TREE_LIST)
2645 tree op = NULL_TREE;
2646 tree temp = NULL_TREE;
2647 if (TREE_CHAIN (vexpr))
2648 temp = create_value_expr_from (TREE_CHAIN (vexpr), block, stmt);
2649 TREE_CHAIN (vexpr) = temp ? temp : TREE_CHAIN (vexpr);
2652 /* Recursively value-numberize reference ops. */
2653 if (REFERENCE_CLASS_P (TREE_VALUE (vexpr)))
2655 tree tempop;
2656 op = TREE_VALUE (vexpr);
2657 tempop = create_value_expr_from (op, block, stmt);
2658 op = tempop ? tempop : op;
2660 TREE_VALUE (vexpr) = vn_lookup_or_add (op, stmt);
2662 else
2664 op = TREE_VALUE (vexpr);
2665 TREE_VALUE (vexpr) = vn_lookup_or_add (TREE_VALUE (vexpr), NULL);
2667 /* This is the equivalent of inserting op into EXP_GEN like we
2668 do below */
2669 if (!is_undefined_value (op))
2670 value_insert_into_set (EXP_GEN (block), op);
2672 return vexpr;
2675 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
2677 tree val, op;
2679 op = TREE_OPERAND (expr, i);
2680 if (op == NULL_TREE)
2681 continue;
2683 /* If OP is a constant that has overflowed, do not value number
2684 this expression. */
2685 if (CONSTANT_CLASS_P (op)
2686 && TREE_OVERFLOW (op))
2688 pool_free (pool, vexpr);
2689 return NULL;
2692 /* Recursively value-numberize reference ops and tree lists. */
2693 if (REFERENCE_CLASS_P (op))
2695 tree tempop = create_value_expr_from (op, block, stmt);
2696 op = tempop ? tempop : op;
2697 val = vn_lookup_or_add (op, stmt);
2699 else if (TREE_CODE (op) == TREE_LIST)
2701 tree tempop;
2703 gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2704 tempop = create_value_expr_from (op, block, stmt);
2706 op = tempop ? tempop : op;
2707 vn_lookup_or_add (op, NULL);
2708 /* Unlike everywhere else, we do *not* want to replace the
2709 TREE_LIST itself with a value number, because support
2710 functions we call will blow up. */
2711 val = op;
2713 else
2714 /* Create a value handle for OP and add it to VEXPR. */
2715 val = vn_lookup_or_add (op, NULL);
2717 if (!is_undefined_value (op) && TREE_CODE (op) != TREE_LIST)
2718 value_insert_into_set (EXP_GEN (block), op);
2720 if (TREE_CODE (val) == VALUE_HANDLE)
2721 TREE_TYPE (val) = TREE_TYPE (TREE_OPERAND (vexpr, i));
2723 TREE_OPERAND (vexpr, i) = val;
2726 return vexpr;
2731 /* Insert extra phis to merge values that are fully available from
2732 preds of BLOCK, but have no dominating representative coming from
2733 block DOM. */
2735 static void
2736 insert_extra_phis (basic_block block, basic_block dom)
2739 if (!single_pred_p (block))
2741 edge e;
2742 edge_iterator ei;
2743 bool first = true;
2744 bitmap_set_t tempset = bitmap_set_new ();
2746 FOR_EACH_EDGE (e, ei, block->preds)
2748 if (first)
2750 bitmap_set_copy (tempset, AVAIL_OUT (e->src));
2751 first = false;
2753 else
2754 bitmap_set_and (tempset, AVAIL_OUT (e->src));
2757 if (dom)
2758 bitmap_set_and_compl (tempset, AVAIL_OUT (dom));
2760 if (!bitmap_set_empty_p (tempset))
2762 unsigned int i;
2763 bitmap_iterator bi;
2765 EXECUTE_IF_SET_IN_BITMAP (tempset->expressions, 0, i, bi)
2767 tree name = ssa_name (i);
2768 tree val = get_value_handle (name);
2769 tree temp;
2771 if (!mergephitemp
2772 || TREE_TYPE (name) != TREE_TYPE (mergephitemp))
2774 mergephitemp = create_tmp_var (TREE_TYPE (name),
2775 "mergephitmp");
2776 get_var_ann (mergephitemp);
2778 temp = mergephitemp;
2780 if (dump_file && (dump_flags & TDF_DETAILS))
2782 fprintf (dump_file, "Creating phi ");
2783 print_generic_expr (dump_file, temp, 0);
2784 fprintf (dump_file, " to merge available but not dominating values ");
2787 add_referenced_tmp_var (temp);
2788 temp = create_phi_node (temp, block);
2789 NECESSARY (temp) = 0;
2790 VEC_safe_push (tree, heap, inserted_exprs, temp);
2792 FOR_EACH_EDGE (e, ei, block->preds)
2794 tree leader = bitmap_find_leader (AVAIL_OUT (e->src), val);
2796 gcc_assert (leader);
2797 add_phi_arg (temp, leader, e);
2799 if (dump_file && (dump_flags & TDF_DETAILS))
2801 print_generic_expr (dump_file, leader, 0);
2802 fprintf (dump_file, " in block %d,", e->src->index);
2806 vn_add (PHI_RESULT (temp), val);
2808 if (dump_file && (dump_flags & TDF_DETAILS))
2809 fprintf (dump_file, "\n");
2815 /* Given a statement STMT and its right hand side which is a load, try
2816 to look for the expression stored in the location for the load, and
2817 return true if a useful equivalence was recorded for LHS. */
2819 static bool
2820 try_look_through_load (tree lhs, tree mem_ref, tree stmt, basic_block block)
2822 tree store_stmt = NULL;
2823 tree rhs;
2824 ssa_op_iter i;
2825 tree vuse;
2827 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
2829 tree def_stmt;
2831 gcc_assert (TREE_CODE (vuse) == SSA_NAME);
2832 def_stmt = SSA_NAME_DEF_STMT (vuse);
2834 /* If there is no useful statement for this VUSE, we'll not find a
2835 useful expression to return either. Likewise, if there is a
2836 statement but it is not a simple assignment or it has virtual
2837 uses, we can stop right here. Note that this means we do
2838 not look through PHI nodes, which is intentional. */
2839 if (!def_stmt
2840 || TREE_CODE (def_stmt) != MODIFY_EXPR
2841 || !ZERO_SSA_OPERANDS (def_stmt, SSA_OP_VIRTUAL_USES))
2842 return false;
2844 /* If this is not the same statement as one we have looked at for
2845 another VUSE of STMT already, we have two statements producing
2846 something that reaches our STMT. */
2847 if (store_stmt && store_stmt != def_stmt)
2848 return false;
2849 else
2851 /* Is this a store to the exact same location as the one we are
2852 loading from in STMT? */
2853 if (!operand_equal_p (TREE_OPERAND (def_stmt, 0), mem_ref, 0))
2854 return false;
2856 /* Otherwise remember this statement and see if all other VUSEs
2857 come from the same statement. */
2858 store_stmt = def_stmt;
2862 /* Alright then, we have visited all VUSEs of STMT and we've determined
2863 that all of them come from the same statement STORE_STMT. See if there
2864 is a useful expression we can deduce from STORE_STMT. */
2865 rhs = TREE_OPERAND (store_stmt, 1);
2866 if ((TREE_CODE (rhs) == SSA_NAME
2867 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2868 || is_gimple_min_invariant (rhs)
2869 || TREE_CODE (rhs) == ADDR_EXPR
2870 || TREE_INVARIANT (rhs))
2873 /* Yay! Compute a value number for the RHS of the statement and
2874 add its value to the AVAIL_OUT set for the block. Add the LHS
2875 to TMP_GEN. */
2876 add_to_sets (lhs, rhs, store_stmt, TMP_GEN (block), AVAIL_OUT (block));
2877 if (TREE_CODE (rhs) == SSA_NAME
2878 && !is_undefined_value (rhs))
2879 value_insert_into_set (EXP_GEN (block), rhs);
2880 return true;
2883 return false;
2886 /* Return a copy of NODE that is stored in the temporary alloc_pool's.
2887 This is made recursively true, so that the operands are stored in
2888 the pool as well. */
2890 static tree
2891 poolify_tree (tree node)
2893 switch (TREE_CODE (node))
2895 case INDIRECT_REF:
2897 tree temp = pool_alloc (reference_node_pool);
2898 memcpy (temp, node, tree_size (node));
2899 TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
2900 return temp;
2902 break;
2903 case MODIFY_EXPR:
2905 tree temp = pool_alloc (modify_expr_node_pool);
2906 memcpy (temp, node, tree_size (node));
2907 TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
2908 TREE_OPERAND (temp, 1) = poolify_tree (TREE_OPERAND (temp, 1));
2909 return temp;
2911 break;
2912 case SSA_NAME:
2913 case INTEGER_CST:
2914 case STRING_CST:
2915 case REAL_CST:
2916 case PARM_DECL:
2917 case VAR_DECL:
2918 case RESULT_DECL:
2919 return node;
2920 default:
2921 gcc_unreachable ();
2925 static tree modify_expr_template;
2927 /* Allocate a MODIFY_EXPR with TYPE, and operands OP1, OP2 in the
2928 alloc pools and return it. */
2929 static tree
2930 poolify_modify_expr (tree type, tree op1, tree op2)
2932 if (modify_expr_template == NULL)
2933 modify_expr_template = build2 (MODIFY_EXPR, type, op1, op2);
2935 TREE_OPERAND (modify_expr_template, 0) = op1;
2936 TREE_OPERAND (modify_expr_template, 1) = op2;
2937 TREE_TYPE (modify_expr_template) = type;
2939 return poolify_tree (modify_expr_template);
2943 /* For each real store operation of the form
2944 *a = <value> that we see, create a corresponding fake store of the
2945 form storetmp_<version> = *a.
2947 This enables AVAIL computation to mark the results of stores as
2948 available. Without this, you'd need to do some computation to
2949 mark the result of stores as ANTIC and AVAIL at all the right
2950 points.
2951 To save memory, we keep the store
2952 statements pool allocated until we decide whether they are
2953 necessary or not. */
2955 static void
2956 insert_fake_stores (void)
2958 basic_block block;
2960 FOR_ALL_BB (block)
2962 block_stmt_iterator bsi;
2963 for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
2965 tree stmt = bsi_stmt (bsi);
2967 /* We can't generate SSA names for stores that are complex
2968 or aggregate. We also want to ignore things whose
2969 virtual uses occur in abnormal phis. */
2971 if (TREE_CODE (stmt) == MODIFY_EXPR
2972 && TREE_CODE (TREE_OPERAND (stmt, 0)) == INDIRECT_REF
2973 && !AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 0)))
2974 && TREE_CODE (TREE_TYPE (TREE_OPERAND (stmt, 0))) != COMPLEX_TYPE)
2976 ssa_op_iter iter;
2977 def_operand_p defp;
2978 tree lhs = TREE_OPERAND (stmt, 0);
2979 tree rhs = TREE_OPERAND (stmt, 1);
2980 tree new;
2981 bool notokay = false;
2983 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
2985 tree defvar = DEF_FROM_PTR (defp);
2986 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defvar))
2988 notokay = true;
2989 break;
2993 if (notokay)
2994 continue;
2996 if (!storetemp || TREE_TYPE (rhs) != TREE_TYPE (storetemp))
2998 storetemp = create_tmp_var (TREE_TYPE (rhs), "storetmp");
2999 get_var_ann (storetemp);
3002 new = poolify_modify_expr (TREE_TYPE (stmt), storetemp, lhs);
3004 lhs = make_ssa_name (storetemp, new);
3005 TREE_OPERAND (new, 0) = lhs;
3006 create_ssa_artficial_load_stmt (new, stmt);
3008 NECESSARY (new) = 0;
3009 VEC_safe_push (tree, heap, inserted_exprs, new);
3010 VEC_safe_push (tree, heap, need_creation, new);
3011 bsi_insert_after (&bsi, new, BSI_NEW_STMT);
3017 /* Turn the pool allocated fake stores that we created back into real
3018 GC allocated ones if they turned out to be necessary to PRE some
3019 expressions. */
3021 static void
3022 realify_fake_stores (void)
3024 unsigned int i;
3025 tree stmt;
3027 for (i = 0; VEC_iterate (tree, need_creation, i, stmt); i++)
3029 if (NECESSARY (stmt))
3031 block_stmt_iterator bsi;
3032 tree newstmt;
3034 /* Mark the temp variable as referenced */
3035 add_referenced_tmp_var (SSA_NAME_VAR (TREE_OPERAND (stmt, 0)));
3037 /* Put the new statement in GC memory, fix up the annotation
3038 and SSA_NAME_DEF_STMT on it, and then put it in place of
3039 the old statement in the IR stream. */
3040 newstmt = unshare_expr (stmt);
3041 SSA_NAME_DEF_STMT (TREE_OPERAND (newstmt, 0)) = newstmt;
3043 newstmt->common.ann = stmt->common.ann;
3045 bsi = bsi_for_stmt (stmt);
3046 bsi_replace (&bsi, newstmt, true);
3048 else
3049 release_defs (stmt);
3054 /* Compute the AVAIL set for all basic blocks.
3056 This function performs value numbering of the statements in each basic
3057 block. The AVAIL sets are built from information we glean while doing
3058 this value numbering, since the AVAIL sets contain only one entry per
3059 value.
3061 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3062 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3064 static void
3065 compute_avail (void)
3067 basic_block block, son;
3068 basic_block *worklist;
3069 size_t sp = 0;
3070 tree param;
3072 /* For arguments with default definitions, we pretend they are
3073 defined in the entry block. */
3074 for (param = DECL_ARGUMENTS (current_function_decl);
3075 param;
3076 param = TREE_CHAIN (param))
3078 if (default_def (param) != NULL)
3080 tree def = default_def (param);
3081 vn_lookup_or_add (def, NULL);
3082 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3083 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3087 /* Likewise for the static chain decl. */
3088 if (cfun->static_chain_decl)
3090 param = cfun->static_chain_decl;
3091 if (default_def (param) != NULL)
3093 tree def = default_def (param);
3094 vn_lookup_or_add (def, NULL);
3095 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3096 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3100 /* Allocate the worklist. */
3101 worklist = XNEWVEC (basic_block, n_basic_blocks);
3103 /* Seed the algorithm by putting the dominator children of the entry
3104 block on the worklist. */
3105 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3106 son;
3107 son = next_dom_son (CDI_DOMINATORS, son))
3108 worklist[sp++] = son;
3110 /* Loop until the worklist is empty. */
3111 while (sp)
3113 block_stmt_iterator bsi;
3114 tree stmt, phi;
3115 basic_block dom;
3117 /* Pick a block from the worklist. */
3118 block = worklist[--sp];
3120 /* Initially, the set of available values in BLOCK is that of
3121 its immediate dominator. */
3122 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3123 if (dom)
3124 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3126 if (!in_fre)
3127 insert_extra_phis (block, dom);
3129 /* Generate values for PHI nodes. */
3130 for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
3131 /* We have no need for virtual phis, as they don't represent
3132 actual computations. */
3133 if (is_gimple_reg (PHI_RESULT (phi)))
3134 add_to_sets (PHI_RESULT (phi), PHI_RESULT (phi), NULL,
3135 PHI_GEN (block), AVAIL_OUT (block));
3137 /* Now compute value numbers and populate value sets with all
3138 the expressions computed in BLOCK. */
3139 for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3141 stmt_ann_t ann;
3142 ssa_op_iter iter;
3143 tree op;
3145 stmt = bsi_stmt (bsi);
3146 ann = stmt_ann (stmt);
3148 /* For regular value numbering, we are only interested in
3149 assignments of the form X_i = EXPR, where EXPR represents
3150 an "interesting" computation, it has no volatile operands
3151 and X_i doesn't flow through an abnormal edge. */
3152 if (TREE_CODE (stmt) == MODIFY_EXPR
3153 && !ann->has_volatile_ops
3154 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
3155 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (stmt, 0)))
3157 tree lhs = TREE_OPERAND (stmt, 0);
3158 tree rhs = TREE_OPERAND (stmt, 1);
3160 /* Try to look through loads. */
3161 if (TREE_CODE (lhs) == SSA_NAME
3162 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES)
3163 && try_look_through_load (lhs, rhs, stmt, block))
3164 continue;
3166 STRIP_USELESS_TYPE_CONVERSION (rhs);
3167 if (can_value_number_operation (rhs))
3169 /* For value numberable operation, create a
3170 duplicate expression with the operands replaced
3171 with the value handles of the original RHS. */
3172 tree newt = create_value_expr_from (rhs, block, stmt);
3173 if (newt)
3175 add_to_sets (lhs, newt, stmt, TMP_GEN (block),
3176 AVAIL_OUT (block));
3177 value_insert_into_set (EXP_GEN (block), newt);
3178 continue;
3181 else if ((TREE_CODE (rhs) == SSA_NAME
3182 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3183 || is_gimple_min_invariant (rhs)
3184 || TREE_CODE (rhs) == ADDR_EXPR
3185 || TREE_INVARIANT (rhs)
3186 || DECL_P (rhs))
3188 /* Compute a value number for the RHS of the statement
3189 and add its value to the AVAIL_OUT set for the block.
3190 Add the LHS to TMP_GEN. */
3191 add_to_sets (lhs, rhs, stmt, TMP_GEN (block),
3192 AVAIL_OUT (block));
3194 if (TREE_CODE (rhs) == SSA_NAME
3195 && !is_undefined_value (rhs))
3196 value_insert_into_set (EXP_GEN (block), rhs);
3197 continue;
3201 /* For any other statement that we don't recognize, simply
3202 make the names generated by the statement available in
3203 AVAIL_OUT and TMP_GEN. */
3204 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3205 add_to_sets (op, op, NULL, TMP_GEN (block), AVAIL_OUT (block));
3207 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3208 add_to_sets (op, op, NULL, NULL , AVAIL_OUT (block));
3211 /* Put the dominator children of BLOCK on the worklist of blocks
3212 to compute available sets for. */
3213 for (son = first_dom_son (CDI_DOMINATORS, block);
3214 son;
3215 son = next_dom_son (CDI_DOMINATORS, son))
3216 worklist[sp++] = son;
3219 free (worklist);
3223 /* Eliminate fully redundant computations. */
3225 static void
3226 eliminate (void)
3228 basic_block b;
3230 FOR_EACH_BB (b)
3232 block_stmt_iterator i;
3234 for (i = bsi_start (b); !bsi_end_p (i); bsi_next (&i))
3236 tree stmt = bsi_stmt (i);
3238 /* Lookup the RHS of the expression, see if we have an
3239 available computation for it. If so, replace the RHS with
3240 the available computation. */
3241 if (TREE_CODE (stmt) == MODIFY_EXPR
3242 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
3243 && TREE_CODE (TREE_OPERAND (stmt ,1)) != SSA_NAME
3244 && !is_gimple_min_invariant (TREE_OPERAND (stmt, 1))
3245 && !stmt_ann (stmt)->has_volatile_ops)
3247 tree lhs = TREE_OPERAND (stmt, 0);
3248 tree *rhs_p = &TREE_OPERAND (stmt, 1);
3249 tree sprime;
3251 sprime = bitmap_find_leader (AVAIL_OUT (b),
3252 vn_lookup (lhs, NULL));
3253 if (sprime
3254 && sprime != lhs
3255 && (TREE_CODE (*rhs_p) != SSA_NAME
3256 || may_propagate_copy (*rhs_p, sprime)))
3258 gcc_assert (sprime != *rhs_p);
3260 if (dump_file && (dump_flags & TDF_DETAILS))
3262 fprintf (dump_file, "Replaced ");
3263 print_generic_expr (dump_file, *rhs_p, 0);
3264 fprintf (dump_file, " with ");
3265 print_generic_expr (dump_file, sprime, 0);
3266 fprintf (dump_file, " in ");
3267 print_generic_stmt (dump_file, stmt, 0);
3270 if (TREE_CODE (sprime) == SSA_NAME)
3271 NECESSARY (SSA_NAME_DEF_STMT (sprime)) = 1;
3272 /* We need to make sure the new and old types actually match,
3273 which may require adding a simple cast, which fold_convert
3274 will do for us. */
3275 if (TREE_CODE (*rhs_p) != SSA_NAME
3276 && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*rhs_p),
3277 TREE_TYPE (sprime)))
3278 sprime = fold_convert (TREE_TYPE (*rhs_p), sprime);
3280 pre_stats.eliminations++;
3281 propagate_tree_value (rhs_p, sprime);
3282 update_stmt (stmt);
3284 /* If we removed EH side effects from the statement, clean
3285 its EH information. */
3286 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
3288 bitmap_set_bit (need_eh_cleanup,
3289 bb_for_stmt (stmt)->index);
3290 if (dump_file && (dump_flags & TDF_DETAILS))
3291 fprintf (dump_file, " Removed EH side effects.\n");
3299 /* Borrow a bit of tree-ssa-dce.c for the moment.
3300 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
3301 this may be a bit faster, and we may want critical edges kept split. */
3303 /* If OP's defining statement has not already been determined to be necessary,
3304 mark that statement necessary. Return the stmt, if it is newly
3305 necessary. */
3307 static inline tree
3308 mark_operand_necessary (tree op)
3310 tree stmt;
3312 gcc_assert (op);
3314 if (TREE_CODE (op) != SSA_NAME)
3315 return NULL;
3317 stmt = SSA_NAME_DEF_STMT (op);
3318 gcc_assert (stmt);
3320 if (NECESSARY (stmt)
3321 || IS_EMPTY_STMT (stmt))
3322 return NULL;
3324 NECESSARY (stmt) = 1;
3325 return stmt;
3328 /* Because we don't follow exactly the standard PRE algorithm, and decide not
3329 to insert PHI nodes sometimes, and because value numbering of casts isn't
3330 perfect, we sometimes end up inserting dead code. This simple DCE-like
3331 pass removes any insertions we made that weren't actually used. */
3333 static void
3334 remove_dead_inserted_code (void)
3336 VEC(tree,heap) *worklist = NULL;
3337 int i;
3338 tree t;
3340 worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
3341 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3343 if (NECESSARY (t))
3344 VEC_quick_push (tree, worklist, t);
3346 while (VEC_length (tree, worklist) > 0)
3348 t = VEC_pop (tree, worklist);
3350 /* PHI nodes are somewhat special in that each PHI alternative has
3351 data and control dependencies. All the statements feeding the
3352 PHI node's arguments are always necessary. */
3353 if (TREE_CODE (t) == PHI_NODE)
3355 int k;
3357 VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
3358 for (k = 0; k < PHI_NUM_ARGS (t); k++)
3360 tree arg = PHI_ARG_DEF (t, k);
3361 if (TREE_CODE (arg) == SSA_NAME)
3363 arg = mark_operand_necessary (arg);
3364 if (arg)
3365 VEC_quick_push (tree, worklist, arg);
3369 else
3371 /* Propagate through the operands. Examine all the USE, VUSE and
3372 V_MAY_DEF operands in this statement. Mark all the statements
3373 which feed this statement's uses as necessary. */
3374 ssa_op_iter iter;
3375 tree use;
3377 /* The operands of V_MAY_DEF expressions are also needed as they
3378 represent potential definitions that may reach this
3379 statement (V_MAY_DEF operands allow us to follow def-def
3380 links). */
3382 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
3384 tree n = mark_operand_necessary (use);
3385 if (n)
3386 VEC_safe_push (tree, heap, worklist, n);
3391 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3393 if (!NECESSARY (t))
3395 block_stmt_iterator bsi;
3397 if (dump_file && (dump_flags & TDF_DETAILS))
3399 fprintf (dump_file, "Removing unnecessary insertion:");
3400 print_generic_stmt (dump_file, t, 0);
3403 if (TREE_CODE (t) == PHI_NODE)
3405 remove_phi_node (t, NULL);
3407 else
3409 bsi = bsi_for_stmt (t);
3410 bsi_remove (&bsi, true);
3411 release_defs (t);
3415 VEC_free (tree, heap, worklist);
3418 /* Initialize data structures used by PRE. */
3420 static void
3421 init_pre (bool do_fre)
3423 basic_block bb;
3425 in_fre = do_fre;
3427 inserted_exprs = NULL;
3428 need_creation = NULL;
3429 pretemp = NULL_TREE;
3430 storetemp = NULL_TREE;
3431 mergephitemp = NULL_TREE;
3432 prephitemp = NULL_TREE;
3434 vn_init ();
3435 if (!do_fre)
3436 current_loops = loop_optimizer_init (dump_file);
3438 connect_infinite_loops_to_exit ();
3439 memset (&pre_stats, 0, sizeof (pre_stats));
3441 /* If block 0 has more than one predecessor, it means that its PHI
3442 nodes will have arguments coming from block -1. This creates
3443 problems for several places in PRE that keep local arrays indexed
3444 by block number. To prevent this, we split the edge coming from
3445 ENTRY_BLOCK_PTR (FIXME, if ENTRY_BLOCK_PTR had an index number
3446 different than -1 we wouldn't have to hack this. tree-ssa-dce.c
3447 needs a similar change). */
3448 if (!single_pred_p (single_succ (ENTRY_BLOCK_PTR)))
3449 if (!(single_succ_edge (ENTRY_BLOCK_PTR)->flags & EDGE_ABNORMAL))
3450 split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
3452 FOR_ALL_BB (bb)
3453 bb->aux = xcalloc (1, sizeof (struct bb_value_sets));
3455 bitmap_obstack_initialize (&grand_bitmap_obstack);
3456 phi_translate_table = htab_create (511, expr_pred_trans_hash,
3457 expr_pred_trans_eq, free);
3458 value_set_pool = create_alloc_pool ("Value sets",
3459 sizeof (struct value_set), 30);
3460 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
3461 sizeof (struct bitmap_set), 30);
3462 value_set_node_pool = create_alloc_pool ("Value set nodes",
3463 sizeof (struct value_set_node), 30);
3464 calculate_dominance_info (CDI_POST_DOMINATORS);
3465 calculate_dominance_info (CDI_DOMINATORS);
3466 binary_node_pool = create_alloc_pool ("Binary tree nodes",
3467 tree_code_size (PLUS_EXPR), 30);
3468 unary_node_pool = create_alloc_pool ("Unary tree nodes",
3469 tree_code_size (NEGATE_EXPR), 30);
3470 reference_node_pool = create_alloc_pool ("Reference tree nodes",
3471 tree_code_size (ARRAY_REF), 30);
3472 expression_node_pool = create_alloc_pool ("Expression tree nodes",
3473 tree_code_size (CALL_EXPR), 30);
3474 list_node_pool = create_alloc_pool ("List tree nodes",
3475 tree_code_size (TREE_LIST), 30);
3476 comparison_node_pool = create_alloc_pool ("Comparison tree nodes",
3477 tree_code_size (EQ_EXPR), 30);
3478 modify_expr_node_pool = create_alloc_pool ("MODIFY_EXPR nodes",
3479 tree_code_size (MODIFY_EXPR),
3480 30);
3481 modify_expr_template = NULL;
3483 FOR_ALL_BB (bb)
3485 EXP_GEN (bb) = set_new (true);
3486 PHI_GEN (bb) = bitmap_set_new ();
3487 TMP_GEN (bb) = bitmap_set_new ();
3488 AVAIL_OUT (bb) = bitmap_set_new ();
3491 need_eh_cleanup = BITMAP_ALLOC (NULL);
3495 /* Deallocate data structures used by PRE. */
3497 static void
3498 fini_pre (bool do_fre)
3500 basic_block bb;
3501 unsigned int i;
3503 VEC_free (tree, heap, inserted_exprs);
3504 VEC_free (tree, heap, need_creation);
3505 bitmap_obstack_release (&grand_bitmap_obstack);
3506 free_alloc_pool (value_set_pool);
3507 free_alloc_pool (bitmap_set_pool);
3508 free_alloc_pool (value_set_node_pool);
3509 free_alloc_pool (binary_node_pool);
3510 free_alloc_pool (reference_node_pool);
3511 free_alloc_pool (unary_node_pool);
3512 free_alloc_pool (list_node_pool);
3513 free_alloc_pool (expression_node_pool);
3514 free_alloc_pool (comparison_node_pool);
3515 free_alloc_pool (modify_expr_node_pool);
3516 htab_delete (phi_translate_table);
3517 remove_fake_exit_edges ();
3519 FOR_ALL_BB (bb)
3521 free (bb->aux);
3522 bb->aux = NULL;
3525 free_dominance_info (CDI_POST_DOMINATORS);
3526 vn_delete ();
3528 if (!bitmap_empty_p (need_eh_cleanup))
3530 tree_purge_all_dead_eh_edges (need_eh_cleanup);
3531 cleanup_tree_cfg ();
3534 BITMAP_FREE (need_eh_cleanup);
3536 /* Wipe out pointers to VALUE_HANDLEs. In the not terribly distant
3537 future we will want them to be persistent though. */
3538 for (i = 0; i < num_ssa_names; i++)
3540 tree name = ssa_name (i);
3542 if (!name)
3543 continue;
3545 if (SSA_NAME_VALUE (name)
3546 && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
3547 SSA_NAME_VALUE (name) = NULL;
3549 if (!do_fre && current_loops)
3551 loop_optimizer_finalize (current_loops, dump_file);
3552 current_loops = NULL;
3556 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
3557 only wants to do full redundancy elimination. */
3559 static void
3560 execute_pre (bool do_fre)
3562 init_pre (do_fre);
3564 if (!do_fre)
3565 insert_fake_stores ();
3567 /* Collect and value number expressions computed in each basic block. */
3568 compute_avail ();
3570 if (dump_file && (dump_flags & TDF_DETAILS))
3572 basic_block bb;
3574 FOR_ALL_BB (bb)
3576 print_value_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
3577 bitmap_print_value_set (dump_file, TMP_GEN (bb), "tmp_gen",
3578 bb->index);
3579 bitmap_print_value_set (dump_file, AVAIL_OUT (bb), "avail_out",
3580 bb->index);
3584 /* Insert can get quite slow on an incredibly large number of basic
3585 blocks due to some quadratic behavior. Until this behavior is
3586 fixed, don't run it when he have an incredibly large number of
3587 bb's. If we aren't going to run insert, there is no point in
3588 computing ANTIC, either, even though it's plenty fast. */
3589 if (!do_fre && n_basic_blocks < 4000)
3591 vuse_names = xcalloc (num_ssa_names, sizeof (bitmap));
3592 compute_rvuse ();
3593 compute_antic ();
3594 insert ();
3595 free (vuse_names);
3598 /* Remove all the redundant expressions. */
3599 eliminate ();
3602 if (dump_file && (dump_flags & TDF_STATS))
3604 fprintf (dump_file, "Insertions: %d\n", pre_stats.insertions);
3605 fprintf (dump_file, "New PHIs: %d\n", pre_stats.phis);
3606 fprintf (dump_file, "Eliminated: %d\n", pre_stats.eliminations);
3607 fprintf (dump_file, "Constified: %d\n", pre_stats.constified);
3610 bsi_commit_edge_inserts ();
3612 if (!do_fre)
3614 remove_dead_inserted_code ();
3615 realify_fake_stores ();
3618 fini_pre (do_fre);
3622 /* Gate and execute functions for PRE. */
3624 static void
3625 do_pre (void)
3627 execute_pre (false);
3630 static bool
3631 gate_pre (void)
3633 return flag_tree_pre != 0;
3636 struct tree_opt_pass pass_pre =
3638 "pre", /* name */
3639 gate_pre, /* gate */
3640 do_pre, /* execute */
3641 NULL, /* sub */
3642 NULL, /* next */
3643 0, /* static_pass_number */
3644 TV_TREE_PRE, /* tv_id */
3645 PROP_no_crit_edges | PROP_cfg
3646 | PROP_ssa | PROP_alias, /* properties_required */
3647 0, /* properties_provided */
3648 0, /* properties_destroyed */
3649 0, /* todo_flags_start */
3650 TODO_update_ssa | TODO_dump_func | TODO_ggc_collect
3651 | TODO_verify_ssa, /* todo_flags_finish */
3652 0 /* letter */
3656 /* Gate and execute functions for FRE. */
3658 static void
3659 execute_fre (void)
3661 execute_pre (true);
3664 static bool
3665 gate_fre (void)
3667 return flag_tree_fre != 0;
3670 struct tree_opt_pass pass_fre =
3672 "fre", /* name */
3673 gate_fre, /* gate */
3674 execute_fre, /* execute */
3675 NULL, /* sub */
3676 NULL, /* next */
3677 0, /* static_pass_number */
3678 TV_TREE_FRE, /* tv_id */
3679 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
3680 0, /* properties_provided */
3681 0, /* properties_destroyed */
3682 0, /* todo_flags_start */
3683 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
3684 0 /* letter */