* lang.opt (-freduced-reflection): New option.
[official-gcc.git] / gcc / tree-ssa-pre.c
blob305ee3e938b9dfc534788e4d1efdf0db212cf0c5
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "fibheap.h"
37 #include "hashtab.h"
38 #include "tree-iterator.h"
39 #include "real.h"
40 #include "alloc-pool.h"
41 #include "tree-pass.h"
42 #include "flags.h"
43 #include "bitmap.h"
44 #include "langhooks.h"
45 #include "cfgloop.h"
47 /* TODO:
49 1. Avail sets can be shared by making an avail_find_leader that
50 walks up the dominator tree and looks in those avail sets.
51 This might affect code optimality, it's unclear right now.
52 2. Strength reduction can be performed by anticipating expressions
53 we can repair later on.
54 3. We can do back-substitution or smarter value numbering to catch
55 commutative expressions split up over multiple statements.
56 4. ANTIC_SAFE_LOADS could be a lot smarter than it is now.
57 Right now, it is simply calculating loads that occur before
58 any store in a block, instead of loads that occur before
59 stores that affect them. This is relatively more expensive, and
60 it's not clear how much more it will buy us.
61 */
63 /* For ease of terminology, "expression node" in the below refers to
64 every expression node but MODIFY_EXPR, because MODIFY_EXPR's represent
65 the actual statement containing the expressions we care about, and
66 we cache the value number by putting it in the expression. */
68 /* Basic algorithm
70 First we walk the statements to generate the AVAIL sets, the
71 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
72 generation of values/expressions by a given block. We use them
73 when computing the ANTIC sets. The AVAIL sets consist of
74 SSA_NAME's that represent values, so we know what values are
75 available in what blocks. AVAIL is a forward dataflow problem. In
76 SSA, values are never killed, so we don't need a kill set, or a
77 fixpoint iteration, in order to calculate the AVAIL sets. In
78 traditional parlance, AVAIL sets tell us the downsafety of the
79 expressions/values.
81 Next, we generate the ANTIC sets. These sets represent the
82 anticipatable expressions. ANTIC is a backwards dataflow
83 problem.An expression is anticipatable in a given block if it could
84 be generated in that block. This means that if we had to perform
85 an insertion in that block, of the value of that expression, we
86 could. Calculating the ANTIC sets requires phi translation of
87 expressions, because the flow goes backwards through phis. We must
88 iterate to a fixpoint of the ANTIC sets, because we have a kill
89 set. Even in SSA form, values are not live over the entire
90 function, only from their definition point onwards. So we have to
91 remove values from the ANTIC set once we go past the definition
92 point of the leaders that make them up.
93 compute_antic/compute_antic_aux performs this computation.
95 Third, we perform insertions to make partially redundant
96 expressions fully redundant.
98 An expression is partially redundant (excluding partial
99 anticipation) if:
101 1. It is AVAIL in some, but not all, of the predecessors of a
102 given block.
103 2. It is ANTIC in all the predecessors.
105 In order to make it fully redundant, we insert the expression into
106 the predecessors where it is not available, but is ANTIC.
107 insert/insert_aux performs this insertion.
109 Fourth, we eliminate fully redundant expressions.
110 This is a simple statement walk that replaces redundant
111 calculations with the now available values. */
113 /* Representations of value numbers:
115 Value numbers are represented using the "value handle" approach.
116 This means that each SSA_NAME (and for other reasons to be
117 disclosed in a moment, expression nodes) has a value handle that
118 can be retrieved through get_value_handle. This value handle, *is*
119 the value number of the SSA_NAME. You can pointer compare the
120 value handles for equivalence purposes.
122 For debugging reasons, the value handle is internally more than
123 just a number, it is a VAR_DECL named "value.x", where x is a
124 unique number for each value number in use. This allows
125 expressions with SSA_NAMES replaced by value handles to still be
126 pretty printed in a sane way. They simply print as "value.3 *
127 value.5", etc.
129 Expression nodes have value handles associated with them as a
130 cache. Otherwise, we'd have to look them up again in the hash
131 table This makes significant difference (factor of two or more) on
132 some test cases. They can be thrown away after the pass is
133 finished. */
135 /* Representation of expressions on value numbers:
137 In some portions of this code, you will notice we allocate "fake"
138 analogues to the expression we are value numbering, and replace the
139 operands with the values of the expression. Since we work on
140 values, and not just names, we canonicalize expressions to value
141 expressions for use in the ANTIC sets, the EXP_GEN set, etc.
143 This is theoretically unnecessary, it just saves a bunch of
144 repeated get_value_handle and find_leader calls in the remainder of
145 the code, trading off temporary memory usage for speed. The tree
146 nodes aren't actually creating more garbage, since they are
147 allocated in a special pools which are thrown away at the end of
148 this pass.
150 All of this also means that if you print the EXP_GEN or ANTIC sets,
151 you will see "value.5 + value.7" in the set, instead of "a_55 +
152 b_66" or something. The only thing that actually cares about
153 seeing the value leaders is phi translation, and it needs to be
154 able to find the leader for a value in an arbitrary block, so this
155 "value expression" form is perfect for it (otherwise you'd do
156 get_value_handle->find_leader->translate->get_value_handle->find_leader).*/
159 /* Representation of sets:
161 There are currently two types of sets used, hopefully to be unified soon.
162 The AVAIL sets do not need to be sorted in any particular order,
163 and thus, are simply represented as two bitmaps, one that keeps
164 track of values present in the set, and one that keeps track of
165 expressions present in the set.
167 The other sets are represented as doubly linked lists kept in topological
168 order, with an optional supporting bitmap of values present in the
169 set. The sets represent values, and the elements can be values or
170 expressions. The elements can appear in different sets, but each
171 element can only appear once in each set.
173 Since each node in the set represents a value, we also want to be
174 able to map expression, set pairs to something that tells us
175 whether the value is present is a set. We use a per-set bitmap for
176 that. The value handles also point to a linked list of the
177 expressions they represent via a tree annotation. This is mainly
178 useful only for debugging, since we don't do identity lookups. */
181 static bool in_fre = false;
183 /* A value set element. Basically a single linked list of
184 expressions/values. */
185 typedef struct value_set_node
187 /* An expression. */
188 tree expr;
190 /* A pointer to the next element of the value set. */
191 struct value_set_node *next;
192 } *value_set_node_t;
195 /* A value set. This is a singly linked list of value_set_node
196 elements with a possible bitmap that tells us what values exist in
197 the set. This set must be kept in topologically sorted order. */
198 typedef struct value_set
200 /* The head of the list. Used for iterating over the list in
201 order. */
202 value_set_node_t head;
204 /* The tail of the list. Used for tail insertions, which are
205 necessary to keep the set in topologically sorted order because
206 of how the set is built. */
207 value_set_node_t tail;
209 /* The length of the list. */
210 size_t length;
212 /* True if the set is indexed, which means it contains a backing
213 bitmap for quick determination of whether certain values exist in the
214 set. */
215 bool indexed;
217 /* The bitmap of values that exist in the set. May be NULL in an
218 empty or non-indexed set. */
219 bitmap values;
221 } *value_set_t;
224 /* An unordered bitmap set. One bitmap tracks values, the other,
225 expressions. */
226 typedef struct bitmap_set
228 bitmap expressions;
229 bitmap values;
230 } *bitmap_set_t;
232 /* Sets that we need to keep track of. */
233 typedef struct bb_value_sets
235 /* The EXP_GEN set, which represents expressions/values generated in
236 a basic block. */
237 value_set_t exp_gen;
239 /* The PHI_GEN set, which represents PHI results generated in a
240 basic block. */
241 bitmap_set_t phi_gen;
243 /* The TMP_GEN set, which represents results/temporaries generated
244 in a basic block. IE the LHS of an expression. */
245 bitmap_set_t tmp_gen;
247 /* The AVAIL_OUT set, which represents which values are available in
248 a given basic block. */
249 bitmap_set_t avail_out;
251 /* The ANTIC_IN set, which represents which values are anticipatable
252 in a given basic block. */
253 value_set_t antic_in;
255 /* The NEW_SETS set, which is used during insertion to augment the
256 AVAIL_OUT set of blocks with the new insertions performed during
257 the current iteration. */
258 bitmap_set_t new_sets;
260 /* The RVUSE sets, which are used during ANTIC computation to ensure
261 that we don't mark loads ANTIC once they have died. */
262 bitmap rvuse_in;
263 bitmap rvuse_out;
264 bitmap rvuse_gen;
265 bitmap rvuse_kill;
267 /* For actually occuring loads, as long as they occur before all the
268 other stores in the block, we know they are antic at the top of
269 the block, regardless of RVUSE_KILL. */
270 value_set_t antic_safe_loads;
271 } *bb_value_sets_t;
273 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
274 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
275 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
276 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
277 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
278 #define RVUSE_IN(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_in
279 #define RVUSE_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_gen
280 #define RVUSE_KILL(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_kill
281 #define RVUSE_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_out
282 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
283 #define ANTIC_SAFE_LOADS(BB) ((bb_value_sets_t) ((BB)->aux))->antic_safe_loads
285 /* This structure is used to keep track of statistics on what
286 optimization PRE was able to perform. */
287 static struct
289 /* The number of RHS computations eliminated by PRE. */
290 int eliminations;
292 /* The number of new expressions/temporaries generated by PRE. */
293 int insertions;
295 /* The number of new PHI nodes added by PRE. */
296 int phis;
298 /* The number of values found constant. */
299 int constified;
301 } pre_stats;
304 static tree bitmap_find_leader (bitmap_set_t, tree);
305 static tree find_leader (value_set_t, tree);
306 static void value_insert_into_set (value_set_t, tree);
307 static void bitmap_value_insert_into_set (bitmap_set_t, tree);
308 static void bitmap_value_replace_in_set (bitmap_set_t, tree);
309 static void insert_into_set (value_set_t, tree);
310 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
311 static bool bitmap_set_contains_value (bitmap_set_t, tree);
312 static bitmap_set_t bitmap_set_new (void);
313 static value_set_t set_new (bool);
314 static bool is_undefined_value (tree);
315 static tree create_expression_by_pieces (basic_block, tree, tree);
316 static tree find_or_generate_expression (basic_block, tree, tree);
319 /* We can add and remove elements and entries to and from sets
320 and hash tables, so we use alloc pools for them. */
322 static alloc_pool value_set_pool;
323 static alloc_pool bitmap_set_pool;
324 static alloc_pool value_set_node_pool;
325 static alloc_pool binary_node_pool;
326 static alloc_pool unary_node_pool;
327 static alloc_pool reference_node_pool;
328 static alloc_pool comparison_node_pool;
329 static alloc_pool expression_node_pool;
330 static alloc_pool list_node_pool;
331 static alloc_pool modify_expr_node_pool;
332 static bitmap_obstack grand_bitmap_obstack;
334 /* To avoid adding 300 temporary variables when we only need one, we
335 only create one temporary variable, on demand, and build ssa names
336 off that. We do have to change the variable if the types don't
337 match the current variable's type. */
338 static tree pretemp;
339 static tree storetemp;
340 static tree mergephitemp;
341 static tree prephitemp;
343 /* Set of blocks with statements that have had its EH information
344 cleaned up. */
345 static bitmap need_eh_cleanup;
347 /* The phi_translate_table caches phi translations for a given
348 expression and predecessor. */
350 static htab_t phi_translate_table;
352 /* A three tuple {e, pred, v} used to cache phi translations in the
353 phi_translate_table. */
355 typedef struct expr_pred_trans_d
357 /* The expression. */
358 tree e;
360 /* The predecessor block along which we translated the expression. */
361 basic_block pred;
363 /* vuses associated with the expression. */
364 VEC (tree, gc) *vuses;
366 /* The value that resulted from the translation. */
367 tree v;
370 /* The hashcode for the expression, pred pair. This is cached for
371 speed reasons. */
372 hashval_t hashcode;
373 } *expr_pred_trans_t;
375 /* Return the hash value for a phi translation table entry. */
377 static hashval_t
378 expr_pred_trans_hash (const void *p)
380 const expr_pred_trans_t ve = (expr_pred_trans_t) p;
381 return ve->hashcode;
384 /* Return true if two phi translation table entries are the same.
385 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
387 static int
388 expr_pred_trans_eq (const void *p1, const void *p2)
390 const expr_pred_trans_t ve1 = (expr_pred_trans_t) p1;
391 const expr_pred_trans_t ve2 = (expr_pred_trans_t) p2;
392 basic_block b1 = ve1->pred;
393 basic_block b2 = ve2->pred;
394 int i;
395 tree vuse1;
397 /* If they are not translations for the same basic block, they can't
398 be equal. */
399 if (b1 != b2)
400 return false;
403 /* If they are for the same basic block, determine if the
404 expressions are equal. */
405 if (!expressions_equal_p (ve1->e, ve2->e))
406 return false;
408 /* Make sure the vuses are equivalent. */
409 if (ve1->vuses == ve2->vuses)
410 return true;
412 if (VEC_length (tree, ve1->vuses) != VEC_length (tree, ve2->vuses))
413 return false;
415 for (i = 0; VEC_iterate (tree, ve1->vuses, i, vuse1); i++)
417 if (VEC_index (tree, ve2->vuses, i) != vuse1)
418 return false;
421 return true;
424 /* Search in the phi translation table for the translation of
425 expression E in basic block PRED with vuses VUSES.
426 Return the translated value, if found, NULL otherwise. */
428 static inline tree
429 phi_trans_lookup (tree e, basic_block pred, VEC (tree, gc) *vuses)
431 void **slot;
432 struct expr_pred_trans_d ept;
434 ept.e = e;
435 ept.pred = pred;
436 ept.vuses = vuses;
437 ept.hashcode = vn_compute (e, (unsigned long) pred);
438 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
439 NO_INSERT);
440 if (!slot)
441 return NULL;
442 else
443 return ((expr_pred_trans_t) *slot)->v;
447 /* Add the tuple mapping from {expression E, basic block PRED, vuses VUSES} to
448 value V, to the phi translation table. */
450 static inline void
451 phi_trans_add (tree e, tree v, basic_block pred, VEC (tree, gc) *vuses)
453 void **slot;
454 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
455 new_pair->e = e;
456 new_pair->pred = pred;
457 new_pair->vuses = vuses;
458 new_pair->v = v;
459 new_pair->hashcode = vn_compute (e, (unsigned long) pred);
460 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
461 new_pair->hashcode, INSERT);
462 if (*slot)
463 free (*slot);
464 *slot = (void *) new_pair;
468 /* Add expression E to the expression set of value V. */
470 void
471 add_to_value (tree v, tree e)
473 /* Constants have no expression sets. */
474 if (is_gimple_min_invariant (v))
475 return;
477 if (VALUE_HANDLE_EXPR_SET (v) == NULL)
478 VALUE_HANDLE_EXPR_SET (v) = set_new (false);
480 insert_into_set (VALUE_HANDLE_EXPR_SET (v), e);
484 /* Return true if value V exists in the bitmap for SET. */
486 static inline bool
487 value_exists_in_set_bitmap (value_set_t set, tree v)
489 if (!set->values)
490 return false;
492 return bitmap_bit_p (set->values, VALUE_HANDLE_ID (v));
496 /* Remove value V from the bitmap for SET. */
498 static void
499 value_remove_from_set_bitmap (value_set_t set, tree v)
501 gcc_assert (set->indexed);
503 if (!set->values)
504 return;
506 bitmap_clear_bit (set->values, VALUE_HANDLE_ID (v));
510 /* Insert the value number V into the bitmap of values existing in
511 SET. */
513 static inline void
514 value_insert_into_set_bitmap (value_set_t set, tree v)
516 gcc_assert (set->indexed);
518 if (set->values == NULL)
519 set->values = BITMAP_ALLOC (&grand_bitmap_obstack);
521 bitmap_set_bit (set->values, VALUE_HANDLE_ID (v));
525 /* Create a new bitmap set and return it. */
527 static bitmap_set_t
528 bitmap_set_new (void)
530 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
531 ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
532 ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
533 return ret;
536 /* Create a new set. */
538 static value_set_t
539 set_new (bool indexed)
541 value_set_t ret;
542 ret = (value_set_t) pool_alloc (value_set_pool);
543 ret->head = ret->tail = NULL;
544 ret->length = 0;
545 ret->indexed = indexed;
546 ret->values = NULL;
547 return ret;
550 /* Insert an expression EXPR into a bitmapped set. */
552 static void
553 bitmap_insert_into_set (bitmap_set_t set, tree expr)
555 tree val;
556 /* XXX: For now, we only let SSA_NAMES into the bitmap sets. */
557 gcc_assert (TREE_CODE (expr) == SSA_NAME);
558 val = get_value_handle (expr);
560 gcc_assert (val);
561 if (!is_gimple_min_invariant (val))
563 bitmap_set_bit (set->values, VALUE_HANDLE_ID (val));
564 bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
568 /* Insert EXPR into SET. */
570 static void
571 insert_into_set (value_set_t set, tree expr)
573 value_set_node_t newnode = (value_set_node_t) pool_alloc (value_set_node_pool);
574 tree val = get_value_handle (expr);
575 gcc_assert (val);
577 if (is_gimple_min_invariant (val))
578 return;
580 /* For indexed sets, insert the value into the set value bitmap.
581 For all sets, add it to the linked list and increment the list
582 length. */
583 if (set->indexed)
584 value_insert_into_set_bitmap (set, val);
586 newnode->next = NULL;
587 newnode->expr = expr;
588 set->length ++;
589 if (set->head == NULL)
591 set->head = set->tail = newnode;
593 else
595 set->tail->next = newnode;
596 set->tail = newnode;
600 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
602 static void
603 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
605 bitmap_copy (dest->expressions, orig->expressions);
606 bitmap_copy (dest->values, orig->values);
609 /* Perform bitmapped set operation DEST &= ORIG. */
611 static void
612 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
614 bitmap_iterator bi;
615 unsigned int i;
616 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
618 bitmap_and_into (dest->values, orig->values);
619 bitmap_copy (temp, dest->expressions);
620 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
622 tree name = ssa_name (i);
623 tree val = get_value_handle (name);
624 if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
625 bitmap_clear_bit (dest->expressions, i);
630 /* Perform bitmapped value set operation DEST = DEST & ~ORIG. */
632 static void
633 bitmap_set_and_compl (bitmap_set_t dest, bitmap_set_t orig)
635 bitmap_iterator bi;
636 unsigned int i;
637 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
639 bitmap_and_compl_into (dest->values, orig->values);
640 bitmap_copy (temp, dest->expressions);
641 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
643 tree name = ssa_name (i);
644 tree val = get_value_handle (name);
645 if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
646 bitmap_clear_bit (dest->expressions, i);
650 /* Return true if the bitmap set SET is empty. */
652 static bool
653 bitmap_set_empty_p (bitmap_set_t set)
655 return bitmap_empty_p (set->values);
658 /* Copy the set ORIG to the set DEST. */
660 static void
661 set_copy (value_set_t dest, value_set_t orig)
663 value_set_node_t node;
665 if (!orig || !orig->head)
666 return;
668 for (node = orig->head;
669 node;
670 node = node->next)
672 insert_into_set (dest, node->expr);
676 /* Remove EXPR from SET. */
678 static void
679 set_remove (value_set_t set, tree expr)
681 value_set_node_t node, prev;
683 /* Remove the value of EXPR from the bitmap, decrement the set
684 length, and remove it from the actual double linked list. */
685 value_remove_from_set_bitmap (set, get_value_handle (expr));
686 set->length--;
687 prev = NULL;
688 for (node = set->head;
689 node != NULL;
690 prev = node, node = node->next)
692 if (node->expr == expr)
694 if (prev == NULL)
695 set->head = node->next;
696 else
697 prev->next= node->next;
699 if (node == set->tail)
700 set->tail = prev;
701 pool_free (value_set_node_pool, node);
702 return;
707 /* Return true if SET contains the value VAL. */
709 static bool
710 set_contains_value (value_set_t set, tree val)
712 /* All constants are in every set. */
713 if (is_gimple_min_invariant (val))
714 return true;
716 if (!set || set->length == 0)
717 return false;
719 return value_exists_in_set_bitmap (set, val);
722 /* Return true if bitmapped set SET contains the expression EXPR. */
723 static bool
724 bitmap_set_contains (bitmap_set_t set, tree expr)
726 /* All constants are in every set. */
727 if (is_gimple_min_invariant (get_value_handle (expr)))
728 return true;
730 /* XXX: Bitmapped sets only contain SSA_NAME's for now. */
731 if (TREE_CODE (expr) != SSA_NAME)
732 return false;
733 return bitmap_bit_p (set->expressions, SSA_NAME_VERSION (expr));
737 /* Return true if bitmapped set SET contains the value VAL. */
739 static bool
740 bitmap_set_contains_value (bitmap_set_t set, tree val)
742 if (is_gimple_min_invariant (val))
743 return true;
744 return bitmap_bit_p (set->values, VALUE_HANDLE_ID (val));
747 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
749 static void
750 bitmap_set_replace_value (bitmap_set_t set, tree lookfor, tree expr)
752 value_set_t exprset;
753 value_set_node_t node;
754 if (is_gimple_min_invariant (lookfor))
755 return;
756 if (!bitmap_set_contains_value (set, lookfor))
757 return;
759 /* The number of expressions having a given value is usually
760 significantly less than the total number of expressions in SET.
761 Thus, rather than check, for each expression in SET, whether it
762 has the value LOOKFOR, we walk the reverse mapping that tells us
763 what expressions have a given value, and see if any of those
764 expressions are in our set. For large testcases, this is about
765 5-10x faster than walking the bitmap. If this is somehow a
766 significant lose for some cases, we can choose which set to walk
767 based on the set size. */
768 exprset = VALUE_HANDLE_EXPR_SET (lookfor);
769 for (node = exprset->head; node; node = node->next)
771 if (TREE_CODE (node->expr) == SSA_NAME)
773 if (bitmap_bit_p (set->expressions, SSA_NAME_VERSION (node->expr)))
775 bitmap_clear_bit (set->expressions, SSA_NAME_VERSION (node->expr));
776 bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
777 return;
783 /* Subtract bitmapped set B from value set A, and return the new set. */
785 static value_set_t
786 bitmap_set_subtract_from_value_set (value_set_t a, bitmap_set_t b,
787 bool indexed)
789 value_set_t ret = set_new (indexed);
790 value_set_node_t node;
791 for (node = a->head;
792 node;
793 node = node->next)
795 if (!bitmap_set_contains (b, node->expr))
796 insert_into_set (ret, node->expr);
798 return ret;
801 /* Return true if two sets are equal. */
803 static bool
804 set_equal (value_set_t a, value_set_t b)
806 value_set_node_t node;
808 if (a->length != b->length)
809 return false;
810 for (node = a->head;
811 node;
812 node = node->next)
814 if (!set_contains_value (b, get_value_handle (node->expr)))
815 return false;
817 return true;
820 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
821 and add it otherwise. */
823 static void
824 bitmap_value_replace_in_set (bitmap_set_t set, tree expr)
826 tree val = get_value_handle (expr);
827 if (bitmap_set_contains_value (set, val))
828 bitmap_set_replace_value (set, val, expr);
829 else
830 bitmap_insert_into_set (set, expr);
833 /* Insert EXPR into SET if EXPR's value is not already present in
834 SET. */
836 static void
837 bitmap_value_insert_into_set (bitmap_set_t set, tree expr)
839 tree val = get_value_handle (expr);
841 if (is_gimple_min_invariant (val))
842 return;
844 if (!bitmap_set_contains_value (set, val))
845 bitmap_insert_into_set (set, expr);
848 /* Insert the value for EXPR into SET, if it doesn't exist already. */
850 static void
851 value_insert_into_set (value_set_t set, tree expr)
853 tree val = get_value_handle (expr);
855 /* Constant and invariant values exist everywhere, and thus,
856 actually keeping them in the sets is pointless. */
857 if (is_gimple_min_invariant (val))
858 return;
860 if (!set_contains_value (set, val))
861 insert_into_set (set, expr);
865 /* Print out SET to OUTFILE. */
867 static void
868 bitmap_print_value_set (FILE *outfile, bitmap_set_t set,
869 const char *setname, int blockindex)
871 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
872 if (set)
874 bool first = true;
875 unsigned i;
876 bitmap_iterator bi;
878 EXECUTE_IF_SET_IN_BITMAP (set->expressions, 0, i, bi)
880 if (!first)
881 fprintf (outfile, ", ");
882 first = false;
883 print_generic_expr (outfile, ssa_name (i), 0);
885 fprintf (outfile, " (");
886 print_generic_expr (outfile, get_value_handle (ssa_name (i)), 0);
887 fprintf (outfile, ") ");
890 fprintf (outfile, " }\n");
892 /* Print out the value_set SET to OUTFILE. */
894 static void
895 print_value_set (FILE *outfile, value_set_t set,
896 const char *setname, int blockindex)
898 value_set_node_t node;
899 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
900 if (set)
902 for (node = set->head;
903 node;
904 node = node->next)
906 print_generic_expr (outfile, node->expr, 0);
908 fprintf (outfile, " (");
909 print_generic_expr (outfile, get_value_handle (node->expr), 0);
910 fprintf (outfile, ") ");
912 if (node->next)
913 fprintf (outfile, ", ");
917 fprintf (outfile, " }\n");
920 /* Print out the expressions that have VAL to OUTFILE. */
922 void
923 print_value_expressions (FILE *outfile, tree val)
925 if (VALUE_HANDLE_EXPR_SET (val))
927 char s[10];
928 sprintf (s, "VH.%04d", VALUE_HANDLE_ID (val));
929 print_value_set (outfile, VALUE_HANDLE_EXPR_SET (val), s, 0);
934 void
935 debug_value_expressions (tree val)
937 print_value_expressions (stderr, val);
941 void debug_value_set (value_set_t, const char *, int);
943 void
944 debug_value_set (value_set_t set, const char *setname, int blockindex)
946 print_value_set (stderr, set, setname, blockindex);
949 /* Return the folded version of T if T, when folded, is a gimple
950 min_invariant. Otherwise, return T. */
952 static tree
953 fully_constant_expression (tree t)
955 tree folded;
956 folded = fold (t);
957 if (folded && is_gimple_min_invariant (folded))
958 return folded;
959 return t;
962 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
963 For example, this can copy a list made of TREE_LIST nodes.
964 Allocates the nodes in list_node_pool*/
966 static tree
967 pool_copy_list (tree list)
969 tree head;
970 tree prev, next;
972 if (list == 0)
973 return 0;
974 head = (tree) pool_alloc (list_node_pool);
976 memcpy (head, list, tree_size (list));
977 prev = head;
979 next = TREE_CHAIN (list);
980 while (next)
982 TREE_CHAIN (prev) = (tree) pool_alloc (list_node_pool);
983 memcpy (TREE_CHAIN (prev), next, tree_size (next));
984 prev = TREE_CHAIN (prev);
985 next = TREE_CHAIN (next);
987 return head;
990 /* Translate the vuses in the VUSES vector backwards through phi
991 nodes, so that they have the value they would have in BLOCK. */
993 static VEC(tree, gc) *
994 translate_vuses_through_block (VEC (tree, gc) *vuses, basic_block block)
996 tree oldvuse;
997 VEC(tree, gc) *result = NULL;
998 int i;
1000 for (i = 0; VEC_iterate (tree, vuses, i, oldvuse); i++)
1002 tree phi = SSA_NAME_DEF_STMT (oldvuse);
1003 if (TREE_CODE (phi) == PHI_NODE)
1005 edge e = find_edge (block, bb_for_stmt (phi));
1006 if (e)
1008 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1009 if (def != oldvuse)
1011 if (!result)
1012 result = VEC_copy (tree, gc, vuses);
1013 VEC_replace (tree, result, i, def);
1018 if (result)
1020 sort_vuses (result);
1021 return result;
1023 return vuses;
1026 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1027 the phis in PRED. Return NULL if we can't find a leader for each
1028 part of the translated expression. */
1030 static tree
1031 phi_translate (tree expr, value_set_t set, basic_block pred,
1032 basic_block phiblock)
1034 tree phitrans = NULL;
1035 tree oldexpr = expr;
1036 if (expr == NULL)
1037 return NULL;
1039 if (is_gimple_min_invariant (expr))
1040 return expr;
1042 /* Phi translations of a given expression don't change. */
1043 if (EXPR_P (expr))
1045 tree vh;
1047 vh = get_value_handle (expr);
1048 if (vh && TREE_CODE (vh) == VALUE_HANDLE)
1049 phitrans = phi_trans_lookup (expr, pred, VALUE_HANDLE_VUSES (vh));
1050 else
1051 phitrans = phi_trans_lookup (expr, pred, NULL);
1053 else
1054 phitrans = phi_trans_lookup (expr, pred, NULL);
1056 if (phitrans)
1057 return phitrans;
1059 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1061 case tcc_expression:
1063 if (TREE_CODE (expr) != CALL_EXPR)
1064 return NULL;
1065 else
1067 tree oldop0 = TREE_OPERAND (expr, 0);
1068 tree oldarglist = TREE_OPERAND (expr, 1);
1069 tree oldop2 = TREE_OPERAND (expr, 2);
1070 tree newop0;
1071 tree newarglist;
1072 tree newop2 = NULL;
1073 tree oldwalker;
1074 tree newwalker;
1075 tree newexpr;
1076 tree vh = get_value_handle (expr);
1077 bool listchanged = false;
1078 VEC (tree, gc) *vuses = VALUE_HANDLE_VUSES (vh);
1079 VEC (tree, gc) *tvuses;
1081 /* Call expressions are kind of weird because they have an
1082 argument list. We don't want to value number the list
1083 as one value number, because that doesn't make much
1084 sense, and just breaks the support functions we call,
1085 which expect TREE_OPERAND (call_expr, 2) to be a
1086 TREE_LIST. */
1088 newop0 = phi_translate (find_leader (set, oldop0),
1089 set, pred, phiblock);
1090 if (newop0 == NULL)
1091 return NULL;
1092 if (oldop2)
1094 newop2 = phi_translate (find_leader (set, oldop2),
1095 set, pred, phiblock);
1096 if (newop2 == NULL)
1097 return NULL;
1100 /* phi translate the argument list piece by piece.
1102 We could actually build the list piece by piece here,
1103 but it's likely to not be worth the memory we will save,
1104 unless you have millions of call arguments. */
1106 newarglist = pool_copy_list (oldarglist);
1107 for (oldwalker = oldarglist, newwalker = newarglist;
1108 oldwalker && newwalker;
1109 oldwalker = TREE_CHAIN (oldwalker),
1110 newwalker = TREE_CHAIN (newwalker))
1113 tree oldval = TREE_VALUE (oldwalker);
1114 tree newval;
1115 if (oldval)
1117 /* This may seem like a weird place for this
1118 check, but it's actually the easiest place to
1119 do it. We can't do it lower on in the
1120 recursion because it's valid for pieces of a
1121 component ref to be of AGGREGATE_TYPE, as long
1122 as the outermost one is not.
1123 To avoid *that* case, we have a check for
1124 AGGREGATE_TYPE_P in insert_aux. However, that
1125 check will *not* catch this case because here
1126 it occurs in the argument list. */
1127 if (AGGREGATE_TYPE_P (TREE_TYPE (oldval)))
1128 return NULL;
1129 newval = phi_translate (find_leader (set, oldval),
1130 set, pred, phiblock);
1131 if (newval == NULL)
1132 return NULL;
1133 if (newval != oldval)
1135 listchanged = true;
1136 TREE_VALUE (newwalker) = get_value_handle (newval);
1140 if (listchanged)
1141 vn_lookup_or_add (newarglist, NULL);
1143 tvuses = translate_vuses_through_block (vuses, pred);
1145 if (listchanged || (newop0 != oldop0) || (oldop2 != newop2)
1146 || vuses != tvuses)
1148 newexpr = (tree) pool_alloc (expression_node_pool);
1149 memcpy (newexpr, expr, tree_size (expr));
1150 TREE_OPERAND (newexpr, 0) = newop0 == oldop0 ? oldop0 : get_value_handle (newop0);
1151 TREE_OPERAND (newexpr, 1) = listchanged ? newarglist : oldarglist;
1152 TREE_OPERAND (newexpr, 2) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
1153 create_tree_ann (newexpr);
1154 vn_lookup_or_add_with_vuses (newexpr, tvuses);
1155 expr = newexpr;
1156 phi_trans_add (oldexpr, newexpr, pred, tvuses);
1160 return expr;
1162 case tcc_declaration:
1164 VEC (tree, gc) * oldvuses = NULL;
1165 VEC (tree, gc) * newvuses = NULL;
1167 oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1168 if (oldvuses)
1169 newvuses = translate_vuses_through_block (oldvuses, pred);
1171 if (oldvuses != newvuses)
1172 vn_lookup_or_add_with_vuses (expr, newvuses);
1174 phi_trans_add (oldexpr, expr, pred, newvuses);
1176 return expr;
1178 case tcc_reference:
1180 tree oldop1 = TREE_OPERAND (expr, 0);
1181 tree newop1;
1182 tree newexpr;
1183 VEC (tree, gc) * oldvuses = NULL;
1184 VEC (tree, gc) * newvuses = NULL;
1186 if (TREE_CODE (expr) != INDIRECT_REF
1187 && TREE_CODE (expr) != COMPONENT_REF)
1188 return NULL;
1190 newop1 = phi_translate (find_leader (set, oldop1),
1191 set, pred, phiblock);
1192 if (newop1 == NULL)
1193 return NULL;
1195 oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1196 if (oldvuses)
1197 newvuses = translate_vuses_through_block (oldvuses, pred);
1199 if (newop1 != oldop1 || newvuses != oldvuses)
1201 tree t;
1203 newexpr = pool_alloc (reference_node_pool);
1204 memcpy (newexpr, expr, tree_size (expr));
1205 TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
1207 t = fully_constant_expression (newexpr);
1209 if (t != newexpr)
1211 pool_free (reference_node_pool, newexpr);
1212 newexpr = t;
1214 else
1216 create_tree_ann (newexpr);
1217 vn_lookup_or_add_with_vuses (newexpr, newvuses);
1219 expr = newexpr;
1220 phi_trans_add (oldexpr, newexpr, pred, newvuses);
1223 return expr;
1224 break;
1226 case tcc_binary:
1227 case tcc_comparison:
1229 tree oldop1 = TREE_OPERAND (expr, 0);
1230 tree oldop2 = TREE_OPERAND (expr, 1);
1231 tree newop1;
1232 tree newop2;
1233 tree newexpr;
1235 newop1 = phi_translate (find_leader (set, oldop1),
1236 set, pred, phiblock);
1237 if (newop1 == NULL)
1238 return NULL;
1239 newop2 = phi_translate (find_leader (set, oldop2),
1240 set, pred, phiblock);
1241 if (newop2 == NULL)
1242 return NULL;
1243 if (newop1 != oldop1 || newop2 != oldop2)
1245 tree t;
1246 newexpr = (tree) pool_alloc (binary_node_pool);
1247 memcpy (newexpr, expr, tree_size (expr));
1248 TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldop1 : get_value_handle (newop1);
1249 TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
1250 t = fully_constant_expression (newexpr);
1251 if (t != newexpr)
1253 pool_free (binary_node_pool, newexpr);
1254 newexpr = t;
1256 else
1258 create_tree_ann (newexpr);
1259 vn_lookup_or_add (newexpr, NULL);
1261 expr = newexpr;
1262 phi_trans_add (oldexpr, newexpr, pred, NULL);
1265 return expr;
1267 case tcc_unary:
1269 tree oldop1 = TREE_OPERAND (expr, 0);
1270 tree newop1;
1271 tree newexpr;
1273 newop1 = phi_translate (find_leader (set, oldop1),
1274 set, pred, phiblock);
1275 if (newop1 == NULL)
1276 return NULL;
1277 if (newop1 != oldop1)
1279 tree t;
1280 newexpr = (tree) pool_alloc (unary_node_pool);
1281 memcpy (newexpr, expr, tree_size (expr));
1282 TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
1283 t = fully_constant_expression (newexpr);
1284 if (t != newexpr)
1286 pool_free (unary_node_pool, newexpr);
1287 newexpr = t;
1289 else
1291 create_tree_ann (newexpr);
1292 vn_lookup_or_add (newexpr, NULL);
1294 expr = newexpr;
1295 phi_trans_add (oldexpr, newexpr, pred, NULL);
1298 return expr;
1300 case tcc_exceptional:
1302 tree phi = NULL;
1303 edge e;
1304 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1305 if (TREE_CODE (SSA_NAME_DEF_STMT (expr)) == PHI_NODE)
1306 phi = SSA_NAME_DEF_STMT (expr);
1307 else
1308 return expr;
1310 e = find_edge (pred, bb_for_stmt (phi));
1311 if (e)
1313 if (is_undefined_value (PHI_ARG_DEF (phi, e->dest_idx)))
1314 return NULL;
1315 vn_lookup_or_add (PHI_ARG_DEF (phi, e->dest_idx), NULL);
1316 return PHI_ARG_DEF (phi, e->dest_idx);
1319 return expr;
1321 default:
1322 gcc_unreachable ();
1326 /* For each expression in SET, translate the value handles through phi nodes
1327 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1328 expressions in DEST. */
1330 static void
1331 phi_translate_set (value_set_t dest, value_set_t set, basic_block pred,
1332 basic_block phiblock)
1334 value_set_node_t node;
1335 for (node = set->head;
1336 node;
1337 node = node->next)
1339 tree translated;
1341 translated = phi_translate (node->expr, set, pred, phiblock);
1343 /* Don't add constants or empty translations to the cache, since
1344 we won't look them up that way, or use the result, anyway. */
1345 if (translated && !is_gimple_min_invariant (translated))
1347 tree vh = get_value_handle (translated);
1348 VEC (tree, gc) *vuses;
1350 /* The value handle itself may also be an invariant, in
1351 which case, it has no vuses. */
1352 vuses = !is_gimple_min_invariant (vh)
1353 ? VALUE_HANDLE_VUSES (vh) : NULL;
1354 phi_trans_add (node->expr, translated, pred, vuses);
1357 if (translated != NULL)
1358 value_insert_into_set (dest, translated);
1362 /* Find the leader for a value (i.e., the name representing that
1363 value) in a given set, and return it. Return NULL if no leader is
1364 found. */
1366 static tree
1367 bitmap_find_leader (bitmap_set_t set, tree val)
1369 if (val == NULL)
1370 return NULL;
1372 if (is_gimple_min_invariant (val))
1373 return val;
1374 if (bitmap_set_contains_value (set, val))
1376 /* Rather than walk the entire bitmap of expressions, and see
1377 whether any of them has the value we are looking for, we look
1378 at the reverse mapping, which tells us the set of expressions
1379 that have a given value (IE value->expressions with that
1380 value) and see if any of those expressions are in our set.
1381 The number of expressions per value is usually significantly
1382 less than the number of expressions in the set. In fact, for
1383 large testcases, doing it this way is roughly 5-10x faster
1384 than walking the bitmap.
1385 If this is somehow a significant lose for some cases, we can
1386 choose which set to walk based on which set is smaller. */
1387 value_set_t exprset;
1388 value_set_node_t node;
1389 exprset = VALUE_HANDLE_EXPR_SET (val);
1390 for (node = exprset->head; node; node = node->next)
1392 if (TREE_CODE (node->expr) == SSA_NAME)
1394 if (bitmap_bit_p (set->expressions,
1395 SSA_NAME_VERSION (node->expr)))
1396 return node->expr;
1400 return NULL;
1404 /* Find the leader for a value (i.e., the name representing that
1405 value) in a given set, and return it. Return NULL if no leader is
1406 found. */
1408 static tree
1409 find_leader (value_set_t set, tree val)
1411 value_set_node_t node;
1413 if (val == NULL)
1414 return NULL;
1416 /* Constants represent themselves. */
1417 if (is_gimple_min_invariant (val))
1418 return val;
1420 if (set->length == 0)
1421 return NULL;
1423 if (value_exists_in_set_bitmap (set, val))
1425 for (node = set->head;
1426 node;
1427 node = node->next)
1429 if (get_value_handle (node->expr) == val)
1430 return node->expr;
1434 return NULL;
1437 /* Given the vuse representative map, MAP, and an SSA version number,
1438 ID, return the bitmap of names ID represents, or NULL, if none
1439 exists. */
1441 static bitmap
1442 get_representative (bitmap *map, int id)
1444 if (map[id] != NULL)
1445 return map[id];
1446 return NULL;
1449 /* A vuse is anticipable at the top of block x, from the bottom of the
1450 block, if it reaches the top of the block, and is not killed in the
1451 block. In effect, we are trying to see if the vuse is transparent
1452 backwards in the block. */
1454 static bool
1455 vuses_dies_in_block_x (VEC (tree, gc) *vuses, basic_block block)
1457 int i;
1458 tree vuse;
1460 for (i = 0; VEC_iterate (tree, vuses, i, vuse); i++)
1462 /* Any places where this is too conservative, are places
1463 where we created a new version and shouldn't have. */
1465 if (!bitmap_bit_p (RVUSE_IN (block), SSA_NAME_VERSION (vuse))
1466 || bitmap_bit_p (RVUSE_KILL (block), SSA_NAME_VERSION (vuse)))
1467 return true;
1469 return false;
1472 /* Determine if the expression EXPR is valid in SET. This means that
1473 we have a leader for each part of the expression (if it consists of
1474 values), or the expression is an SSA_NAME.
1476 NB: We never should run into a case where we have SSA_NAME +
1477 SSA_NAME or SSA_NAME + value. The sets valid_in_set is called on,
1478 the ANTIC sets, will only ever have SSA_NAME's or value expressions
1479 (IE VALUE1 + VALUE2, *VALUE1, VALUE1 < VALUE2) */
1481 static bool
1482 valid_in_set (value_set_t set, tree expr, basic_block block)
1484 tree vh = get_value_handle (expr);
1485 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1487 case tcc_binary:
1488 case tcc_comparison:
1490 tree op1 = TREE_OPERAND (expr, 0);
1491 tree op2 = TREE_OPERAND (expr, 1);
1492 return set_contains_value (set, op1) && set_contains_value (set, op2);
1495 case tcc_unary:
1497 tree op1 = TREE_OPERAND (expr, 0);
1498 return set_contains_value (set, op1);
1501 case tcc_expression:
1503 if (TREE_CODE (expr) == CALL_EXPR)
1505 tree op0 = TREE_OPERAND (expr, 0);
1506 tree arglist = TREE_OPERAND (expr, 1);
1507 tree op2 = TREE_OPERAND (expr, 2);
1509 /* Check the non-list operands first. */
1510 if (!set_contains_value (set, op0)
1511 || (op2 && !set_contains_value (set, op2)))
1512 return false;
1514 /* Now check the operands. */
1515 for (; arglist; arglist = TREE_CHAIN (arglist))
1517 if (!set_contains_value (set, TREE_VALUE (arglist)))
1518 return false;
1520 return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1522 return false;
1525 case tcc_reference:
1527 if (TREE_CODE (expr) == INDIRECT_REF
1528 || TREE_CODE (expr) == COMPONENT_REF)
1530 tree op0 = TREE_OPERAND (expr, 0);
1531 if (is_gimple_min_invariant (op0)
1532 || TREE_CODE (op0) == VALUE_HANDLE)
1534 bool retval = set_contains_value (set, op0);
1535 if (retval)
1537 return set_contains_value (ANTIC_SAFE_LOADS (block),
1539 || !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh),
1540 block);
1542 return false;
1546 return false;
1548 case tcc_exceptional:
1549 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1550 return true;
1552 case tcc_declaration:
1553 return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1555 default:
1556 /* No other cases should be encountered. */
1557 gcc_unreachable ();
1561 /* Clean the set of expressions that are no longer valid in SET. This
1562 means expressions that are made up of values we have no leaders for
1563 in SET. */
1565 static void
1566 clean (value_set_t set, basic_block block)
1568 value_set_node_t node;
1569 value_set_node_t next;
1570 node = set->head;
1571 while (node)
1573 next = node->next;
1574 if (!valid_in_set (set, node->expr, block))
1575 set_remove (set, node->expr);
1576 node = next;
1580 static sbitmap has_abnormal_preds;
1582 /* Compute the ANTIC set for BLOCK.
1584 If succs(BLOCK) > 1 then
1585 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1586 else if succs(BLOCK) == 1 then
1587 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
1589 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
1591 XXX: It would be nice to either write a set_clear, and use it for
1592 ANTIC_OUT, or to mark the antic_out set as deleted at the end
1593 of this routine, so that the pool can hand the same memory back out
1594 again for the next ANTIC_OUT. */
1596 static bool
1597 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
1599 basic_block son;
1600 bool changed = false;
1601 value_set_t S, old, ANTIC_OUT;
1602 value_set_node_t node;
1604 ANTIC_OUT = S = NULL;
1606 /* If any edges from predecessors are abnormal, antic_in is empty,
1607 so do nothing. */
1608 if (block_has_abnormal_pred_edge)
1609 goto maybe_dump_sets;
1611 old = set_new (false);
1612 set_copy (old, ANTIC_IN (block));
1613 ANTIC_OUT = set_new (true);
1615 /* If the block has no successors, ANTIC_OUT is empty. */
1616 if (EDGE_COUNT (block->succs) == 0)
1618 /* If we have one successor, we could have some phi nodes to
1619 translate through. */
1620 else if (single_succ_p (block))
1622 phi_translate_set (ANTIC_OUT, ANTIC_IN (single_succ (block)),
1623 block, single_succ (block));
1625 /* If we have multiple successors, we take the intersection of all of
1626 them. */
1627 else
1629 VEC(basic_block, heap) * worklist;
1630 edge e;
1631 size_t i;
1632 basic_block bprime, first;
1633 edge_iterator ei;
1635 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
1636 FOR_EACH_EDGE (e, ei, block->succs)
1637 VEC_quick_push (basic_block, worklist, e->dest);
1638 first = VEC_index (basic_block, worklist, 0);
1639 set_copy (ANTIC_OUT, ANTIC_IN (first));
1641 for (i = 1; VEC_iterate (basic_block, worklist, i, bprime); i++)
1643 node = ANTIC_OUT->head;
1644 while (node)
1646 tree val;
1647 value_set_node_t next = node->next;
1649 val = get_value_handle (node->expr);
1650 if (!set_contains_value (ANTIC_IN (bprime), val))
1651 set_remove (ANTIC_OUT, node->expr);
1652 node = next;
1655 VEC_free (basic_block, heap, worklist);
1658 /* Generate ANTIC_OUT - TMP_GEN. */
1659 S = bitmap_set_subtract_from_value_set (ANTIC_OUT, TMP_GEN (block), false);
1661 /* Start ANTIC_IN with EXP_GEN - TMP_GEN */
1662 ANTIC_IN (block) = bitmap_set_subtract_from_value_set (EXP_GEN (block),
1663 TMP_GEN (block),
1664 true);
1666 /* Then union in the ANTIC_OUT - TMP_GEN values,
1667 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
1668 for (node = S->head; node; node = node->next)
1669 value_insert_into_set (ANTIC_IN (block), node->expr);
1671 clean (ANTIC_IN (block), block);
1672 if (!set_equal (old, ANTIC_IN (block)))
1673 changed = true;
1675 maybe_dump_sets:
1676 if (dump_file && (dump_flags & TDF_DETAILS))
1678 if (ANTIC_OUT)
1679 print_value_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
1681 if (ANTIC_SAFE_LOADS (block))
1682 print_value_set (dump_file, ANTIC_SAFE_LOADS (block),
1683 "ANTIC_SAFE_LOADS", block->index);
1684 print_value_set (dump_file, ANTIC_IN (block), "ANTIC_IN", block->index);
1686 if (S)
1687 print_value_set (dump_file, S, "S", block->index);
1690 for (son = first_dom_son (CDI_POST_DOMINATORS, block);
1691 son;
1692 son = next_dom_son (CDI_POST_DOMINATORS, son))
1694 changed |= compute_antic_aux (son,
1695 TEST_BIT (has_abnormal_preds, son->index));
1697 return changed;
1700 /* Compute ANTIC sets. */
1702 static void
1703 compute_antic (void)
1705 bool changed = true;
1706 int num_iterations = 0;
1707 basic_block block;
1709 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
1710 We pre-build the map of blocks with incoming abnormal edges here. */
1711 has_abnormal_preds = sbitmap_alloc (last_basic_block);
1712 sbitmap_zero (has_abnormal_preds);
1713 FOR_EACH_BB (block)
1715 edge_iterator ei;
1716 edge e;
1718 FOR_EACH_EDGE (e, ei, block->preds)
1719 if (e->flags & EDGE_ABNORMAL)
1721 SET_BIT (has_abnormal_preds, block->index);
1722 break;
1725 /* While we are here, give empty ANTIC_IN sets to each block. */
1726 ANTIC_IN (block) = set_new (true);
1728 /* At the exit block we anticipate nothing. */
1729 ANTIC_IN (EXIT_BLOCK_PTR) = set_new (true);
1731 while (changed)
1733 num_iterations++;
1734 changed = false;
1735 changed = compute_antic_aux (EXIT_BLOCK_PTR, false);
1738 sbitmap_free (has_abnormal_preds);
1740 if (dump_file && (dump_flags & TDF_STATS))
1741 fprintf (dump_file, "compute_antic required %d iterations\n", num_iterations);
1744 /* Print the names represented by the bitmap NAMES, to the file OUT. */
1745 static void
1746 dump_bitmap_of_names (FILE *out, bitmap names)
1748 bitmap_iterator bi;
1749 unsigned int i;
1751 fprintf (out, " { ");
1752 EXECUTE_IF_SET_IN_BITMAP (names, 0, i, bi)
1754 print_generic_expr (out, ssa_name (i), 0);
1755 fprintf (out, " ");
1757 fprintf (out, "}\n");
1760 /* Compute a set of representative vuse versions for each phi. This
1761 is so we can compute conservative kill sets in terms of all vuses
1762 that are killed, instead of continually walking chains.
1764 We also have to be able kill all names associated with a phi when
1765 the phi dies in order to ensure we don't generate overlapping
1766 live ranges, which are not allowed in virtual SSA. */
1768 static bitmap *vuse_names;
1769 static void
1770 compute_vuse_representatives (void)
1772 tree phi;
1773 basic_block bb;
1774 VEC (tree, heap) *phis = NULL;
1775 bool changed = true;
1776 size_t i;
1778 FOR_EACH_BB (bb)
1780 for (phi = phi_nodes (bb);
1781 phi;
1782 phi = PHI_CHAIN (phi))
1783 if (!is_gimple_reg (PHI_RESULT (phi)))
1784 VEC_safe_push (tree, heap, phis, phi);
1787 while (changed)
1789 changed = false;
1791 for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
1793 size_t ver = SSA_NAME_VERSION (PHI_RESULT (phi));
1794 use_operand_p usep;
1795 ssa_op_iter iter;
1797 if (vuse_names[ver] == NULL)
1799 vuse_names[ver] = BITMAP_ALLOC (&grand_bitmap_obstack);
1800 bitmap_set_bit (vuse_names[ver], ver);
1802 FOR_EACH_PHI_ARG (usep, phi, iter, SSA_OP_ALL_USES)
1804 tree use = USE_FROM_PTR (usep);
1805 bitmap usebitmap = get_representative (vuse_names,
1806 SSA_NAME_VERSION (use));
1807 if (usebitmap != NULL)
1809 changed |= bitmap_ior_into (vuse_names[ver],
1810 usebitmap);
1812 else
1814 changed |= !bitmap_bit_p (vuse_names[ver],
1815 SSA_NAME_VERSION (use));
1816 if (changed)
1817 bitmap_set_bit (vuse_names[ver],
1818 SSA_NAME_VERSION (use));
1824 if (dump_file && (dump_flags & TDF_DETAILS))
1825 for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
1827 bitmap reps = get_representative (vuse_names,
1828 SSA_NAME_VERSION (PHI_RESULT (phi)));
1829 if (reps)
1831 print_generic_expr (dump_file, PHI_RESULT (phi), 0);
1832 fprintf (dump_file, " represents ");
1833 dump_bitmap_of_names (dump_file, reps);
1836 VEC_free (tree, heap, phis);
1839 /* Compute reaching vuses and antic safe loads. RVUSE computation is
1840 is a small bit of iterative dataflow to determine what virtual uses
1841 reach what blocks. Because we can't generate overlapping virtual
1842 uses, and virtual uses *do* actually die, this ends up being faster
1843 in most cases than continually walking the virtual use/def chains
1844 to determine whether we are inside a block where a given virtual is
1845 still available to be used.
1847 ANTIC_SAFE_LOADS are those loads that actually occur before any kill to
1848 their vuses in the block,and thus, are safe at the top of the
1849 block.
1851 An example:
1853 <block begin>
1854 b = *a
1855 *a = 9
1856 <block end>
1858 b = *a is an antic safe load because it still safe to consider it
1859 ANTIC at the top of the block.
1861 We currently compute a conservative approximation to
1862 ANTIC_SAFE_LOADS. We compute those loads that occur before *any*
1863 stores in the block. This is not because it is difficult to
1864 compute the precise answer, but because it is expensive. More
1865 testing is necessary to determine whether it is worth computing the
1866 precise answer. */
1868 static void
1869 compute_rvuse_and_antic_safe (void)
1872 size_t i;
1873 tree phi;
1874 basic_block bb;
1875 int *postorder;
1876 bool changed = true;
1877 unsigned int *first_store_uid;
1879 first_store_uid = xcalloc (n_basic_blocks, sizeof (unsigned int));
1881 compute_vuse_representatives ();
1883 FOR_ALL_BB (bb)
1885 RVUSE_IN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1886 RVUSE_GEN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1887 RVUSE_KILL (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1888 RVUSE_OUT (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1889 ANTIC_SAFE_LOADS (bb) = NULL;
1892 /* Mark live on entry */
1893 for (i = 0; i < num_ssa_names; i++)
1895 tree name = ssa_name (i);
1896 if (name && !is_gimple_reg (name)
1897 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
1898 bitmap_set_bit (RVUSE_OUT (ENTRY_BLOCK_PTR),
1899 SSA_NAME_VERSION (name));
1902 /* Compute local sets for reaching vuses.
1903 GEN(block) = generated in block and not locally killed.
1904 KILL(block) = set of vuses killed in block.
1907 FOR_EACH_BB (bb)
1909 block_stmt_iterator bsi;
1910 ssa_op_iter iter;
1911 def_operand_p defp;
1912 use_operand_p usep;
1914 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1916 tree stmt = bsi_stmt (bsi);
1918 if (first_store_uid[bb->index] == 0
1919 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYUSE | SSA_OP_VMAYDEF
1920 | SSA_OP_VMUSTDEF | SSA_OP_VMUSTKILL))
1922 first_store_uid[bb->index] = stmt_ann (stmt)->uid;
1926 FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VIRTUAL_KILLS
1927 | SSA_OP_VMAYUSE)
1929 tree use = USE_FROM_PTR (usep);
1930 bitmap repbit = get_representative (vuse_names,
1931 SSA_NAME_VERSION (use));
1932 if (repbit != NULL)
1934 bitmap_and_compl_into (RVUSE_GEN (bb), repbit);
1935 bitmap_ior_into (RVUSE_KILL (bb), repbit);
1937 else
1939 bitmap_set_bit (RVUSE_KILL (bb), SSA_NAME_VERSION (use));
1940 bitmap_clear_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (use));
1943 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
1945 tree def = DEF_FROM_PTR (defp);
1946 bitmap_set_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (def));
1951 FOR_EACH_BB (bb)
1953 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1955 if (!is_gimple_reg (PHI_RESULT (phi)))
1957 edge e;
1958 edge_iterator ei;
1960 tree def = PHI_RESULT (phi);
1961 /* In reality, the PHI result is generated at the end of
1962 each predecessor block. This will make the value
1963 LVUSE_IN for the bb containing the PHI, which is
1964 correct. */
1965 FOR_EACH_EDGE (e, ei, bb->preds)
1966 bitmap_set_bit (RVUSE_GEN (e->src), SSA_NAME_VERSION (def));
1971 /* Solve reaching vuses.
1973 RVUSE_IN[BB] = Union of RVUSE_OUT of predecessors.
1974 RVUSE_OUT[BB] = RVUSE_GEN[BB] U (RVUSE_IN[BB] - RVUSE_KILL[BB])
1976 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
1977 pre_and_rev_post_order_compute (NULL, postorder, false);
1979 changed = true;
1980 while (changed)
1982 int j;
1983 changed = false;
1984 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
1986 edge e;
1987 edge_iterator ei;
1988 bb = BASIC_BLOCK (postorder[j]);
1990 FOR_EACH_EDGE (e, ei, bb->preds)
1991 bitmap_ior_into (RVUSE_IN (bb), RVUSE_OUT (e->src));
1993 changed |= bitmap_ior_and_compl (RVUSE_OUT (bb),
1994 RVUSE_GEN (bb),
1995 RVUSE_IN (bb),
1996 RVUSE_KILL (bb));
1999 free (postorder);
2001 if (dump_file && (dump_flags & TDF_DETAILS))
2003 FOR_ALL_BB (bb)
2005 fprintf (dump_file, "RVUSE_IN (%d) =", bb->index);
2006 dump_bitmap_of_names (dump_file, RVUSE_IN (bb));
2008 fprintf (dump_file, "RVUSE_KILL (%d) =", bb->index);
2009 dump_bitmap_of_names (dump_file, RVUSE_KILL (bb));
2011 fprintf (dump_file, "RVUSE_GEN (%d) =", bb->index);
2012 dump_bitmap_of_names (dump_file, RVUSE_GEN (bb));
2014 fprintf (dump_file, "RVUSE_OUT (%d) =", bb->index);
2015 dump_bitmap_of_names (dump_file, RVUSE_OUT (bb));
2019 FOR_EACH_BB (bb)
2021 value_set_node_t node;
2022 if (bitmap_empty_p (RVUSE_KILL (bb)))
2023 continue;
2025 for (node = EXP_GEN (bb)->head; node; node = node->next)
2027 if (REFERENCE_CLASS_P (node->expr))
2029 tree vh = get_value_handle (node->expr);
2030 tree maybe = bitmap_find_leader (AVAIL_OUT (bb), vh);
2032 if (maybe)
2034 tree def = SSA_NAME_DEF_STMT (maybe);
2036 if (bb_for_stmt (def) != bb)
2037 continue;
2039 if (TREE_CODE (def) == PHI_NODE
2040 || stmt_ann (def)->uid < first_store_uid[bb->index])
2042 if (ANTIC_SAFE_LOADS (bb) == NULL)
2043 ANTIC_SAFE_LOADS (bb) = set_new (true);
2044 value_insert_into_set (ANTIC_SAFE_LOADS (bb),
2045 node->expr);
2051 free (first_store_uid);
2054 /* Return true if we can value number the call in STMT. This is true
2055 if we have a pure or constant call. */
2057 static bool
2058 can_value_number_call (tree stmt)
2060 tree call = get_call_expr_in (stmt);
2062 if (call_expr_flags (call) & (ECF_PURE | ECF_CONST))
2063 return true;
2064 return false;
2067 /* Return true if OP is a tree which we can perform value numbering
2068 on. */
2070 static bool
2071 can_value_number_operation (tree op)
2073 return UNARY_CLASS_P (op)
2074 || BINARY_CLASS_P (op)
2075 || COMPARISON_CLASS_P (op)
2076 || REFERENCE_CLASS_P (op)
2077 || (TREE_CODE (op) == CALL_EXPR
2078 && can_value_number_call (op));
2082 /* Return true if OP is a tree which we can perform PRE on
2083 on. This may not match the operations we can value number, but in
2084 a perfect world would. */
2086 static bool
2087 can_PRE_operation (tree op)
2089 return UNARY_CLASS_P (op)
2090 || BINARY_CLASS_P (op)
2091 || COMPARISON_CLASS_P (op)
2092 || TREE_CODE (op) == INDIRECT_REF
2093 || TREE_CODE (op) == COMPONENT_REF
2094 || TREE_CODE (op) == CALL_EXPR;
2098 /* Inserted expressions are placed onto this worklist, which is used
2099 for performing quick dead code elimination of insertions we made
2100 that didn't turn out to be necessary. */
2101 static VEC(tree,heap) *inserted_exprs;
2103 /* Pool allocated fake store expressions are placed onto this
2104 worklist, which, after performing dead code elimination, is walked
2105 to see which expressions need to be put into GC'able memory */
2106 static VEC(tree, heap) *need_creation;
2108 /* For COMPONENT_REF's, we can't have any intermediates for the
2109 COMPONENT_REF or INDIRECT_REF portion, because we'd end up with
2110 trying to rename aggregates into ssa form directly, which is a no
2113 Thus, this routine doesn't create temporaries, it just builds a
2114 single access expression for the array, calling
2115 find_or_generate_expression to build the innermost pieces.
2117 This function is a subroutine of create_expression_by_pieces, and
2118 should not be called on it's own unless you really know what you
2119 are doing.
2121 static tree
2122 create_component_ref_by_pieces (basic_block block, tree expr, tree stmts)
2124 tree genop = expr;
2125 tree folded;
2127 if (TREE_CODE (genop) == VALUE_HANDLE)
2129 tree found = bitmap_find_leader (AVAIL_OUT (block), expr);
2130 if (found)
2131 return found;
2134 if (TREE_CODE (genop) == VALUE_HANDLE)
2135 genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
2137 switch TREE_CODE (genop)
2139 case COMPONENT_REF:
2141 tree op0;
2142 tree op1;
2143 op0 = create_component_ref_by_pieces (block,
2144 TREE_OPERAND (genop, 0),
2145 stmts);
2146 op1 = VALUE_HANDLE_EXPR_SET (TREE_OPERAND (genop, 1))->head->expr;
2147 folded = fold_build3 (COMPONENT_REF, TREE_TYPE (genop), op0, op1,
2148 NULL_TREE);
2149 return folded;
2151 break;
2152 case INDIRECT_REF:
2154 tree op1 = TREE_OPERAND (genop, 0);
2155 tree genop1 = find_or_generate_expression (block, op1, stmts);
2157 folded = fold_build1 (TREE_CODE (genop), TREE_TYPE (genop),
2158 genop1);
2159 return folded;
2161 break;
2162 case VAR_DECL:
2163 case PARM_DECL:
2164 case RESULT_DECL:
2165 case SSA_NAME:
2166 return genop;
2167 default:
2168 gcc_unreachable ();
2171 return NULL_TREE;
2174 /* Find a leader for an expression, or generate one using
2175 create_expression_by_pieces if it's ANTIC but
2176 complex.
2177 BLOCK is the basic_block we are looking for leaders in.
2178 EXPR is the expression to find a leader or generate for.
2179 STMTS is the statement list to put the inserted expressions on.
2180 Returns the SSA_NAME of the LHS of the generated expression or the
2181 leader. */
2183 static tree
2184 find_or_generate_expression (basic_block block, tree expr, tree stmts)
2186 tree genop = bitmap_find_leader (AVAIL_OUT (block), expr);
2188 /* If it's still NULL, it must be a complex expression, so generate
2189 it recursively. */
2190 if (genop == NULL)
2192 genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
2194 gcc_assert (can_PRE_operation (genop));
2195 genop = create_expression_by_pieces (block, genop, stmts);
2197 return genop;
2200 #define NECESSARY(stmt) stmt->common.asm_written_flag
2201 /* Create an expression in pieces, so that we can handle very complex
2202 expressions that may be ANTIC, but not necessary GIMPLE.
2203 BLOCK is the basic block the expression will be inserted into,
2204 EXPR is the expression to insert (in value form)
2205 STMTS is a statement list to append the necessary insertions into.
2207 This function will die if we hit some value that shouldn't be
2208 ANTIC but is (IE there is no leader for it, or its components).
2209 This function may also generate expressions that are themselves
2210 partially or fully redundant. Those that are will be either made
2211 fully redundant during the next iteration of insert (for partially
2212 redundant ones), or eliminated by eliminate (for fully redundant
2213 ones). */
2215 static tree
2216 create_expression_by_pieces (basic_block block, tree expr, tree stmts)
2218 tree temp, name;
2219 tree folded, forced_stmts, newexpr;
2220 tree v;
2221 tree_stmt_iterator tsi;
2223 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2225 case tcc_expression:
2227 tree op0, op2;
2228 tree arglist;
2229 tree genop0, genop2;
2230 tree genarglist;
2231 tree walker, genwalker;
2233 gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2234 genop2 = NULL;
2236 op0 = TREE_OPERAND (expr, 0);
2237 arglist = TREE_OPERAND (expr, 1);
2238 op2 = TREE_OPERAND (expr, 2);
2240 genop0 = find_or_generate_expression (block, op0, stmts);
2241 genarglist = copy_list (arglist);
2242 for (walker = arglist, genwalker = genarglist;
2243 genwalker && walker;
2244 genwalker = TREE_CHAIN (genwalker), walker = TREE_CHAIN (walker))
2246 TREE_VALUE (genwalker)
2247 = find_or_generate_expression (block, TREE_VALUE (walker),
2248 stmts);
2251 if (op2)
2252 genop2 = find_or_generate_expression (block, op2, stmts);
2253 folded = fold_build3 (TREE_CODE (expr), TREE_TYPE (expr),
2254 genop0, genarglist, genop2);
2255 break;
2259 break;
2260 case tcc_reference:
2262 if (TREE_CODE (expr) == COMPONENT_REF)
2264 folded = create_component_ref_by_pieces (block, expr, stmts);
2266 else
2268 tree op1 = TREE_OPERAND (expr, 0);
2269 tree genop1 = find_or_generate_expression (block, op1, stmts);
2271 folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2272 genop1);
2274 break;
2277 case tcc_binary:
2278 case tcc_comparison:
2280 tree op1 = TREE_OPERAND (expr, 0);
2281 tree op2 = TREE_OPERAND (expr, 1);
2282 tree genop1 = find_or_generate_expression (block, op1, stmts);
2283 tree genop2 = find_or_generate_expression (block, op2, stmts);
2284 folded = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr),
2285 genop1, genop2);
2286 break;
2289 case tcc_unary:
2291 tree op1 = TREE_OPERAND (expr, 0);
2292 tree genop1 = find_or_generate_expression (block, op1, stmts);
2293 folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2294 genop1);
2295 break;
2298 default:
2299 gcc_unreachable ();
2302 /* Force the generated expression to be a sequence of GIMPLE
2303 statements.
2304 We have to call unshare_expr because force_gimple_operand may
2305 modify the tree we pass to it. */
2306 newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts,
2307 false, NULL);
2309 /* If we have any intermediate expressions to the value sets, add them
2310 to the value sets and chain them on in the instruction stream. */
2311 if (forced_stmts)
2313 tsi = tsi_start (forced_stmts);
2314 for (; !tsi_end_p (tsi); tsi_next (&tsi))
2316 tree stmt = tsi_stmt (tsi);
2317 tree forcedname = TREE_OPERAND (stmt, 0);
2318 tree forcedexpr = TREE_OPERAND (stmt, 1);
2319 tree val = vn_lookup_or_add (forcedexpr, NULL);
2321 VEC_safe_push (tree, heap, inserted_exprs, stmt);
2322 vn_add (forcedname, val);
2323 bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
2324 bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
2325 mark_new_vars_to_rename (stmt);
2327 tsi = tsi_last (stmts);
2328 tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
2331 /* Build and insert the assignment of the end result to the temporary
2332 that we will return. */
2333 if (!pretemp || TREE_TYPE (expr) != TREE_TYPE (pretemp))
2335 pretemp = create_tmp_var (TREE_TYPE (expr), "pretmp");
2336 get_var_ann (pretemp);
2339 temp = pretemp;
2340 add_referenced_tmp_var (temp);
2342 if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2343 DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
2345 newexpr = build2 (MODIFY_EXPR, TREE_TYPE (expr), temp, newexpr);
2346 name = make_ssa_name (temp, newexpr);
2347 TREE_OPERAND (newexpr, 0) = name;
2348 NECESSARY (newexpr) = 0;
2350 tsi = tsi_last (stmts);
2351 tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
2352 VEC_safe_push (tree, heap, inserted_exprs, newexpr);
2353 mark_new_vars_to_rename (newexpr);
2355 /* Add a value handle to the temporary.
2356 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2357 we are creating the expression by pieces, and this particular piece of
2358 the expression may have been represented. There is no harm in replacing
2359 here. */
2360 v = get_value_handle (expr);
2361 vn_add (name, v);
2362 bitmap_value_replace_in_set (NEW_SETS (block), name);
2363 bitmap_value_replace_in_set (AVAIL_OUT (block), name);
2365 pre_stats.insertions++;
2366 if (dump_file && (dump_flags & TDF_DETAILS))
2368 fprintf (dump_file, "Inserted ");
2369 print_generic_expr (dump_file, newexpr, 0);
2370 fprintf (dump_file, " in predecessor %d\n", block->index);
2373 return name;
2376 /* Insert the to-be-made-available values of NODE for each
2377 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2378 merge the result with a phi node, given the same value handle as
2379 NODE. Return true if we have inserted new stuff. */
2381 static bool
2382 insert_into_preds_of_block (basic_block block, value_set_node_t node,
2383 tree *avail)
2385 tree val = get_value_handle (node->expr);
2386 edge pred;
2387 bool insertions = false;
2388 bool nophi = false;
2389 basic_block bprime;
2390 tree eprime;
2391 edge_iterator ei;
2392 tree type = TREE_TYPE (avail[EDGE_PRED (block, 0)->src->index]);
2393 tree temp;
2395 if (dump_file && (dump_flags & TDF_DETAILS))
2397 fprintf (dump_file, "Found partial redundancy for expression ");
2398 print_generic_expr (dump_file, node->expr, 0);
2399 fprintf (dump_file, " (");
2400 print_generic_expr (dump_file, val, 0);
2401 fprintf (dump_file, ")");
2402 fprintf (dump_file, "\n");
2405 /* Make sure we aren't creating an induction variable. */
2406 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2
2407 && TREE_CODE_CLASS (TREE_CODE (node->expr)) != tcc_reference )
2409 bool firstinsideloop = false;
2410 bool secondinsideloop = false;
2411 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2412 EDGE_PRED (block, 0)->src);
2413 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2414 EDGE_PRED (block, 1)->src);
2415 /* Induction variables only have one edge inside the loop. */
2416 if (firstinsideloop ^ secondinsideloop)
2418 if (dump_file && (dump_flags & TDF_DETAILS))
2419 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2420 nophi = true;
2425 /* Make the necessary insertions. */
2426 FOR_EACH_EDGE (pred, ei, block->preds)
2428 tree stmts = alloc_stmt_list ();
2429 tree builtexpr;
2430 bprime = pred->src;
2431 eprime = avail[bprime->index];
2433 if (can_PRE_operation (eprime))
2435 #ifdef ENABLE_CHECKING
2436 tree vh;
2438 /* eprime may be an invariant. */
2439 vh = TREE_CODE (eprime) == VALUE_HANDLE
2440 ? eprime
2441 : get_value_handle (eprime);
2443 /* ensure that the virtual uses we need reach our block. */
2444 if (TREE_CODE (vh) == VALUE_HANDLE)
2446 int i;
2447 tree vuse;
2448 for (i = 0;
2449 VEC_iterate (tree, VALUE_HANDLE_VUSES (vh), i, vuse);
2450 i++)
2452 size_t id = SSA_NAME_VERSION (vuse);
2453 gcc_assert (bitmap_bit_p (RVUSE_OUT (bprime), id)
2454 || IS_EMPTY_STMT (SSA_NAME_DEF_STMT (vuse)));
2457 #endif
2458 builtexpr = create_expression_by_pieces (bprime,
2459 eprime,
2460 stmts);
2461 bsi_insert_on_edge (pred, stmts);
2462 avail[bprime->index] = builtexpr;
2463 insertions = true;
2466 /* If we didn't want a phi node, and we made insertions, we still have
2467 inserted new stuff, and thus return true. If we didn't want a phi node,
2468 and didn't make insertions, we haven't added anything new, so return
2469 false. */
2470 if (nophi && insertions)
2471 return true;
2472 else if (nophi && !insertions)
2473 return false;
2475 /* Now build a phi for the new variable. */
2476 if (!prephitemp || TREE_TYPE (prephitemp) != type)
2478 prephitemp = create_tmp_var (type, "prephitmp");
2479 get_var_ann (prephitemp);
2482 temp = prephitemp;
2483 add_referenced_tmp_var (temp);
2485 if (TREE_CODE (type) == COMPLEX_TYPE)
2486 DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
2487 temp = create_phi_node (temp, block);
2489 NECESSARY (temp) = 0;
2490 VEC_safe_push (tree, heap, inserted_exprs, temp);
2491 FOR_EACH_EDGE (pred, ei, block->preds)
2492 add_phi_arg (temp, avail[pred->src->index], pred);
2494 vn_add (PHI_RESULT (temp), val);
2496 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
2497 this insertion, since we test for the existence of this value in PHI_GEN
2498 before proceeding with the partial redundancy checks in insert_aux.
2500 The value may exist in AVAIL_OUT, in particular, it could be represented
2501 by the expression we are trying to eliminate, in which case we want the
2502 replacement to occur. If it's not existing in AVAIL_OUT, we want it
2503 inserted there.
2505 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
2506 this block, because if it did, it would have existed in our dominator's
2507 AVAIL_OUT, and would have been skipped due to the full redundancy check.
2510 bitmap_insert_into_set (PHI_GEN (block),
2511 PHI_RESULT (temp));
2512 bitmap_value_replace_in_set (AVAIL_OUT (block),
2513 PHI_RESULT (temp));
2514 bitmap_insert_into_set (NEW_SETS (block),
2515 PHI_RESULT (temp));
2517 if (dump_file && (dump_flags & TDF_DETAILS))
2519 fprintf (dump_file, "Created phi ");
2520 print_generic_expr (dump_file, temp, 0);
2521 fprintf (dump_file, " in block %d\n", block->index);
2523 pre_stats.phis++;
2524 return true;
2529 /* Perform insertion of partially redundant values.
2530 For BLOCK, do the following:
2531 1. Propagate the NEW_SETS of the dominator into the current block.
2532 If the block has multiple predecessors,
2533 2a. Iterate over the ANTIC expressions for the block to see if
2534 any of them are partially redundant.
2535 2b. If so, insert them into the necessary predecessors to make
2536 the expression fully redundant.
2537 2c. Insert a new PHI merging the values of the predecessors.
2538 2d. Insert the new PHI, and the new expressions, into the
2539 NEW_SETS set.
2540 3. Recursively call ourselves on the dominator children of BLOCK.
2544 static bool
2545 insert_aux (basic_block block)
2547 basic_block son;
2548 bool new_stuff = false;
2550 if (block)
2552 basic_block dom;
2553 dom = get_immediate_dominator (CDI_DOMINATORS, block);
2554 if (dom)
2556 unsigned i;
2557 bitmap_iterator bi;
2558 bitmap_set_t newset = NEW_SETS (dom);
2559 if (newset)
2561 /* Note that we need to value_replace both NEW_SETS, and
2562 AVAIL_OUT. For both the case of NEW_SETS, the value may be
2563 represented by some non-simple expression here that we want
2564 to replace it with. */
2565 EXECUTE_IF_SET_IN_BITMAP (newset->expressions, 0, i, bi)
2567 bitmap_value_replace_in_set (NEW_SETS (block), ssa_name (i));
2568 bitmap_value_replace_in_set (AVAIL_OUT (block), ssa_name (i));
2571 if (!single_pred_p (block))
2573 value_set_node_t node;
2574 for (node = ANTIC_IN (block)->head;
2575 node;
2576 node = node->next)
2578 if (can_PRE_operation (node->expr)
2579 && !AGGREGATE_TYPE_P (TREE_TYPE (node->expr)))
2581 tree *avail;
2582 tree val;
2583 bool by_some = false;
2584 bool cant_insert = false;
2585 bool all_same = true;
2586 tree first_s = NULL;
2587 edge pred;
2588 basic_block bprime;
2589 tree eprime = NULL_TREE;
2590 edge_iterator ei;
2592 val = get_value_handle (node->expr);
2593 if (bitmap_set_contains_value (PHI_GEN (block), val))
2594 continue;
2595 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
2597 if (dump_file && (dump_flags & TDF_DETAILS))
2598 fprintf (dump_file, "Found fully redundant value\n");
2599 continue;
2602 avail = XCNEWVEC (tree, last_basic_block);
2603 FOR_EACH_EDGE (pred, ei, block->preds)
2605 tree vprime;
2606 tree edoubleprime;
2608 /* This can happen in the very weird case
2609 that our fake infinite loop edges have caused a
2610 critical edge to appear. */
2611 if (EDGE_CRITICAL_P (pred))
2613 cant_insert = true;
2614 break;
2616 bprime = pred->src;
2617 eprime = phi_translate (node->expr,
2618 ANTIC_IN (block),
2619 bprime, block);
2621 /* eprime will generally only be NULL if the
2622 value of the expression, translated
2623 through the PHI for this predecessor, is
2624 undefined. If that is the case, we can't
2625 make the expression fully redundant,
2626 because its value is undefined along a
2627 predecessor path. We can thus break out
2628 early because it doesn't matter what the
2629 rest of the results are. */
2630 if (eprime == NULL)
2632 cant_insert = true;
2633 break;
2636 eprime = fully_constant_expression (eprime);
2637 vprime = get_value_handle (eprime);
2638 gcc_assert (vprime);
2639 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
2640 vprime);
2641 if (edoubleprime == NULL)
2643 avail[bprime->index] = eprime;
2644 all_same = false;
2646 else
2648 avail[bprime->index] = edoubleprime;
2649 by_some = true;
2650 if (first_s == NULL)
2651 first_s = edoubleprime;
2652 else if (!operand_equal_p (first_s, edoubleprime,
2654 all_same = false;
2657 /* If we can insert it, it's not the same value
2658 already existing along every predecessor, and
2659 it's defined by some predecessor, it is
2660 partially redundant. */
2661 if (!cant_insert && !all_same && by_some)
2663 if (insert_into_preds_of_block (block, node, avail))
2664 new_stuff = true;
2666 /* If all edges produce the same value and that value is
2667 an invariant, then the PHI has the same value on all
2668 edges. Note this. */
2669 else if (!cant_insert && all_same && eprime
2670 && is_gimple_min_invariant (eprime)
2671 && !is_gimple_min_invariant (val))
2673 value_set_t exprset = VALUE_HANDLE_EXPR_SET (val);
2674 value_set_node_t node;
2676 for (node = exprset->head; node; node = node->next)
2678 if (TREE_CODE (node->expr) == SSA_NAME)
2680 vn_add (node->expr, eprime);
2681 pre_stats.constified++;
2685 free (avail);
2691 for (son = first_dom_son (CDI_DOMINATORS, block);
2692 son;
2693 son = next_dom_son (CDI_DOMINATORS, son))
2695 new_stuff |= insert_aux (son);
2698 return new_stuff;
2701 /* Perform insertion of partially redundant values. */
2703 static void
2704 insert (void)
2706 bool new_stuff = true;
2707 basic_block bb;
2708 int num_iterations = 0;
2710 FOR_ALL_BB (bb)
2711 NEW_SETS (bb) = bitmap_set_new ();
2713 while (new_stuff)
2715 num_iterations++;
2716 new_stuff = false;
2717 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
2719 if (num_iterations > 2 && dump_file && (dump_flags & TDF_STATS))
2720 fprintf (dump_file, "insert required %d iterations\n", num_iterations);
2724 /* Return true if VAR is an SSA variable with no defining statement in
2725 this procedure, *AND* isn't a live-on-entry parameter. */
2727 static bool
2728 is_undefined_value (tree expr)
2730 return (TREE_CODE (expr) == SSA_NAME
2731 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (expr))
2732 /* PARM_DECLs and hard registers are always defined. */
2733 && TREE_CODE (SSA_NAME_VAR (expr)) != PARM_DECL);
2737 /* Given an SSA variable VAR and an expression EXPR, compute the value
2738 number for EXPR and create a value handle (VAL) for it. If VAR and
2739 EXPR are not the same, associate VAL with VAR. Finally, add VAR to
2740 S1 and its value handle to S2.
2742 VUSES represent the virtual use operands associated with EXPR (if
2743 any). */
2745 static inline void
2746 add_to_sets (tree var, tree expr, tree stmt, bitmap_set_t s1,
2747 bitmap_set_t s2)
2749 tree val = vn_lookup_or_add (expr, stmt);
2751 /* VAR and EXPR may be the same when processing statements for which
2752 we are not computing value numbers (e.g., non-assignments, or
2753 statements that make aliased stores). In those cases, we are
2754 only interested in making VAR available as its own value. */
2755 if (var != expr)
2756 vn_add (var, val);
2758 if (s1)
2759 bitmap_insert_into_set (s1, var);
2760 bitmap_value_insert_into_set (s2, var);
2764 /* Given a unary or binary expression EXPR, create and return a new
2765 expression with the same structure as EXPR but with its operands
2766 replaced with the value handles of each of the operands of EXPR.
2768 VUSES represent the virtual use operands associated with EXPR (if
2769 any). Insert EXPR's operands into the EXP_GEN set for BLOCK. */
2771 static inline tree
2772 create_value_expr_from (tree expr, basic_block block, tree stmt)
2774 int i;
2775 enum tree_code code = TREE_CODE (expr);
2776 tree vexpr;
2777 alloc_pool pool;
2779 gcc_assert (TREE_CODE_CLASS (code) == tcc_unary
2780 || TREE_CODE_CLASS (code) == tcc_binary
2781 || TREE_CODE_CLASS (code) == tcc_comparison
2782 || TREE_CODE_CLASS (code) == tcc_reference
2783 || TREE_CODE_CLASS (code) == tcc_expression
2784 || TREE_CODE_CLASS (code) == tcc_exceptional
2785 || TREE_CODE_CLASS (code) == tcc_declaration);
2787 if (TREE_CODE_CLASS (code) == tcc_unary)
2788 pool = unary_node_pool;
2789 else if (TREE_CODE_CLASS (code) == tcc_reference)
2790 pool = reference_node_pool;
2791 else if (TREE_CODE_CLASS (code) == tcc_binary)
2792 pool = binary_node_pool;
2793 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2794 pool = comparison_node_pool;
2795 else if (TREE_CODE_CLASS (code) == tcc_exceptional)
2797 gcc_assert (code == TREE_LIST);
2798 pool = list_node_pool;
2800 else
2802 gcc_assert (code == CALL_EXPR);
2803 pool = expression_node_pool;
2806 vexpr = (tree) pool_alloc (pool);
2807 memcpy (vexpr, expr, tree_size (expr));
2809 /* This case is only for TREE_LIST's that appear as part of
2810 CALL_EXPR's. Anything else is a bug, but we can't easily verify
2811 this, hence this comment. TREE_LIST is not handled by the
2812 general case below is because they don't have a fixed length, or
2813 operands, so you can't access purpose/value/chain through
2814 TREE_OPERAND macros. */
2816 if (code == TREE_LIST)
2818 tree op = NULL_TREE;
2819 tree temp = NULL_TREE;
2820 if (TREE_CHAIN (vexpr))
2821 temp = create_value_expr_from (TREE_CHAIN (vexpr), block, stmt);
2822 TREE_CHAIN (vexpr) = temp ? temp : TREE_CHAIN (vexpr);
2825 /* Recursively value-numberize reference ops. */
2826 if (REFERENCE_CLASS_P (TREE_VALUE (vexpr)))
2828 tree tempop;
2829 op = TREE_VALUE (vexpr);
2830 tempop = create_value_expr_from (op, block, stmt);
2831 op = tempop ? tempop : op;
2833 TREE_VALUE (vexpr) = vn_lookup_or_add (op, stmt);
2835 else
2837 op = TREE_VALUE (vexpr);
2838 TREE_VALUE (vexpr) = vn_lookup_or_add (TREE_VALUE (vexpr), NULL);
2840 /* This is the equivalent of inserting op into EXP_GEN like we
2841 do below */
2842 if (!is_undefined_value (op))
2843 value_insert_into_set (EXP_GEN (block), op);
2845 return vexpr;
2848 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
2850 tree val, op;
2852 op = TREE_OPERAND (expr, i);
2853 if (op == NULL_TREE)
2854 continue;
2856 /* If OP is a constant that has overflowed, do not value number
2857 this expression. */
2858 if (CONSTANT_CLASS_P (op)
2859 && TREE_OVERFLOW (op))
2861 pool_free (pool, vexpr);
2862 return NULL;
2865 /* Recursively value-numberize reference ops and tree lists. */
2866 if (REFERENCE_CLASS_P (op))
2868 tree tempop = create_value_expr_from (op, block, stmt);
2869 op = tempop ? tempop : op;
2870 val = vn_lookup_or_add (op, stmt);
2872 else if (TREE_CODE (op) == TREE_LIST)
2874 tree tempop;
2876 gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2877 tempop = create_value_expr_from (op, block, stmt);
2879 op = tempop ? tempop : op;
2880 vn_lookup_or_add (op, NULL);
2881 /* Unlike everywhere else, we do *not* want to replace the
2882 TREE_LIST itself with a value number, because support
2883 functions we call will blow up. */
2884 val = op;
2886 else
2887 /* Create a value handle for OP and add it to VEXPR. */
2888 val = vn_lookup_or_add (op, NULL);
2890 if (!is_undefined_value (op) && TREE_CODE (op) != TREE_LIST)
2891 value_insert_into_set (EXP_GEN (block), op);
2893 if (TREE_CODE (val) == VALUE_HANDLE)
2894 TREE_TYPE (val) = TREE_TYPE (TREE_OPERAND (vexpr, i));
2896 TREE_OPERAND (vexpr, i) = val;
2899 return vexpr;
2904 /* Insert extra phis to merge values that are fully available from
2905 preds of BLOCK, but have no dominating representative coming from
2906 block DOM. */
2908 static void
2909 insert_extra_phis (basic_block block, basic_block dom)
2912 if (!single_pred_p (block))
2914 edge e;
2915 edge_iterator ei;
2916 bool first = true;
2917 bitmap_set_t tempset = bitmap_set_new ();
2919 FOR_EACH_EDGE (e, ei, block->preds)
2921 /* We cannot handle abnormal incoming edges correctly. */
2922 if (e->flags & EDGE_ABNORMAL)
2923 return;
2925 if (first)
2927 bitmap_set_copy (tempset, AVAIL_OUT (e->src));
2928 first = false;
2930 else
2931 bitmap_set_and (tempset, AVAIL_OUT (e->src));
2934 if (dom)
2935 bitmap_set_and_compl (tempset, AVAIL_OUT (dom));
2937 if (!bitmap_set_empty_p (tempset))
2939 unsigned int i;
2940 bitmap_iterator bi;
2942 EXECUTE_IF_SET_IN_BITMAP (tempset->expressions, 0, i, bi)
2944 tree name = ssa_name (i);
2945 tree val = get_value_handle (name);
2946 tree temp;
2948 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
2949 continue;
2951 if (!mergephitemp
2952 || TREE_TYPE (name) != TREE_TYPE (mergephitemp))
2954 mergephitemp = create_tmp_var (TREE_TYPE (name),
2955 "mergephitmp");
2956 get_var_ann (mergephitemp);
2958 temp = mergephitemp;
2960 if (dump_file && (dump_flags & TDF_DETAILS))
2962 fprintf (dump_file, "Creating phi ");
2963 print_generic_expr (dump_file, temp, 0);
2964 fprintf (dump_file, " to merge available but not dominating values ");
2967 add_referenced_tmp_var (temp);
2968 temp = create_phi_node (temp, block);
2969 NECESSARY (temp) = 0;
2970 VEC_safe_push (tree, heap, inserted_exprs, temp);
2972 FOR_EACH_EDGE (e, ei, block->preds)
2974 tree leader = bitmap_find_leader (AVAIL_OUT (e->src), val);
2976 gcc_assert (leader);
2977 add_phi_arg (temp, leader, e);
2979 if (dump_file && (dump_flags & TDF_DETAILS))
2981 print_generic_expr (dump_file, leader, 0);
2982 fprintf (dump_file, " in block %d,", e->src->index);
2986 vn_add (PHI_RESULT (temp), val);
2988 if (dump_file && (dump_flags & TDF_DETAILS))
2989 fprintf (dump_file, "\n");
2995 /* Given a statement STMT and its right hand side which is a load, try
2996 to look for the expression stored in the location for the load, and
2997 return true if a useful equivalence was recorded for LHS. */
2999 static bool
3000 try_look_through_load (tree lhs, tree mem_ref, tree stmt, basic_block block)
3002 tree store_stmt = NULL;
3003 tree rhs;
3004 ssa_op_iter i;
3005 tree vuse;
3007 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
3009 tree def_stmt;
3011 gcc_assert (TREE_CODE (vuse) == SSA_NAME);
3012 def_stmt = SSA_NAME_DEF_STMT (vuse);
3014 /* If there is no useful statement for this VUSE, we'll not find a
3015 useful expression to return either. Likewise, if there is a
3016 statement but it is not a simple assignment or it has virtual
3017 uses, we can stop right here. Note that this means we do
3018 not look through PHI nodes, which is intentional. */
3019 if (!def_stmt
3020 || TREE_CODE (def_stmt) != MODIFY_EXPR
3021 || !ZERO_SSA_OPERANDS (def_stmt, SSA_OP_VIRTUAL_USES))
3022 return false;
3024 /* If this is not the same statement as one we have looked at for
3025 another VUSE of STMT already, we have two statements producing
3026 something that reaches our STMT. */
3027 if (store_stmt && store_stmt != def_stmt)
3028 return false;
3029 else
3031 /* Is this a store to the exact same location as the one we are
3032 loading from in STMT? */
3033 if (!operand_equal_p (TREE_OPERAND (def_stmt, 0), mem_ref, 0))
3034 return false;
3036 /* Otherwise remember this statement and see if all other VUSEs
3037 come from the same statement. */
3038 store_stmt = def_stmt;
3042 /* Alright then, we have visited all VUSEs of STMT and we've determined
3043 that all of them come from the same statement STORE_STMT. See if there
3044 is a useful expression we can deduce from STORE_STMT. */
3045 rhs = TREE_OPERAND (store_stmt, 1);
3046 if ((TREE_CODE (rhs) == SSA_NAME
3047 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3048 || is_gimple_min_invariant (rhs)
3049 || TREE_CODE (rhs) == ADDR_EXPR
3050 || TREE_INVARIANT (rhs))
3053 /* Yay! Compute a value number for the RHS of the statement and
3054 add its value to the AVAIL_OUT set for the block. Add the LHS
3055 to TMP_GEN. */
3056 add_to_sets (lhs, rhs, store_stmt, TMP_GEN (block), AVAIL_OUT (block));
3057 if (TREE_CODE (rhs) == SSA_NAME
3058 && !is_undefined_value (rhs))
3059 value_insert_into_set (EXP_GEN (block), rhs);
3060 return true;
3063 return false;
3066 /* Return a copy of NODE that is stored in the temporary alloc_pool's.
3067 This is made recursively true, so that the operands are stored in
3068 the pool as well. */
3070 static tree
3071 poolify_tree (tree node)
3073 switch (TREE_CODE (node))
3075 case INDIRECT_REF:
3077 tree temp = pool_alloc (reference_node_pool);
3078 memcpy (temp, node, tree_size (node));
3079 TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
3080 return temp;
3082 break;
3083 case MODIFY_EXPR:
3085 tree temp = pool_alloc (modify_expr_node_pool);
3086 memcpy (temp, node, tree_size (node));
3087 TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
3088 TREE_OPERAND (temp, 1) = poolify_tree (TREE_OPERAND (temp, 1));
3089 return temp;
3091 break;
3092 case SSA_NAME:
3093 case INTEGER_CST:
3094 case STRING_CST:
3095 case REAL_CST:
3096 case PARM_DECL:
3097 case VAR_DECL:
3098 case RESULT_DECL:
3099 return node;
3100 default:
3101 gcc_unreachable ();
3105 static tree modify_expr_template;
3107 /* Allocate a MODIFY_EXPR with TYPE, and operands OP1, OP2 in the
3108 alloc pools and return it. */
3109 static tree
3110 poolify_modify_expr (tree type, tree op1, tree op2)
3112 if (modify_expr_template == NULL)
3113 modify_expr_template = build2 (MODIFY_EXPR, type, op1, op2);
3115 TREE_OPERAND (modify_expr_template, 0) = op1;
3116 TREE_OPERAND (modify_expr_template, 1) = op2;
3117 TREE_TYPE (modify_expr_template) = type;
3119 return poolify_tree (modify_expr_template);
3123 /* For each real store operation of the form
3124 *a = <value> that we see, create a corresponding fake store of the
3125 form storetmp_<version> = *a.
3127 This enables AVAIL computation to mark the results of stores as
3128 available. Without this, you'd need to do some computation to
3129 mark the result of stores as ANTIC and AVAIL at all the right
3130 points.
3131 To save memory, we keep the store
3132 statements pool allocated until we decide whether they are
3133 necessary or not. */
3135 static void
3136 insert_fake_stores (void)
3138 basic_block block;
3140 FOR_ALL_BB (block)
3142 block_stmt_iterator bsi;
3143 for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3145 tree stmt = bsi_stmt (bsi);
3147 /* We can't generate SSA names for stores that are complex
3148 or aggregate. We also want to ignore things whose
3149 virtual uses occur in abnormal phis. */
3151 if (TREE_CODE (stmt) == MODIFY_EXPR
3152 && TREE_CODE (TREE_OPERAND (stmt, 0)) == INDIRECT_REF
3153 && !AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 0)))
3154 && TREE_CODE (TREE_TYPE (TREE_OPERAND (stmt, 0))) != COMPLEX_TYPE)
3156 ssa_op_iter iter;
3157 def_operand_p defp;
3158 tree lhs = TREE_OPERAND (stmt, 0);
3159 tree rhs = TREE_OPERAND (stmt, 1);
3160 tree new;
3161 bool notokay = false;
3163 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
3165 tree defvar = DEF_FROM_PTR (defp);
3166 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defvar))
3168 notokay = true;
3169 break;
3173 if (notokay)
3174 continue;
3176 if (!storetemp || TREE_TYPE (rhs) != TREE_TYPE (storetemp))
3178 storetemp = create_tmp_var (TREE_TYPE (rhs), "storetmp");
3179 get_var_ann (storetemp);
3182 new = poolify_modify_expr (TREE_TYPE (stmt), storetemp, lhs);
3184 lhs = make_ssa_name (storetemp, new);
3185 TREE_OPERAND (new, 0) = lhs;
3186 create_ssa_artficial_load_stmt (new, stmt);
3188 NECESSARY (new) = 0;
3189 VEC_safe_push (tree, heap, inserted_exprs, new);
3190 VEC_safe_push (tree, heap, need_creation, new);
3191 bsi_insert_after (&bsi, new, BSI_NEW_STMT);
3197 /* Turn the pool allocated fake stores that we created back into real
3198 GC allocated ones if they turned out to be necessary to PRE some
3199 expressions. */
3201 static void
3202 realify_fake_stores (void)
3204 unsigned int i;
3205 tree stmt;
3207 for (i = 0; VEC_iterate (tree, need_creation, i, stmt); i++)
3209 if (NECESSARY (stmt))
3211 block_stmt_iterator bsi;
3212 tree newstmt;
3214 /* Mark the temp variable as referenced */
3215 add_referenced_tmp_var (SSA_NAME_VAR (TREE_OPERAND (stmt, 0)));
3217 /* Put the new statement in GC memory, fix up the annotation
3218 and SSA_NAME_DEF_STMT on it, and then put it in place of
3219 the old statement in the IR stream. */
3220 newstmt = unshare_expr (stmt);
3221 SSA_NAME_DEF_STMT (TREE_OPERAND (newstmt, 0)) = newstmt;
3223 newstmt->common.ann = stmt->common.ann;
3225 bsi = bsi_for_stmt (stmt);
3226 bsi_replace (&bsi, newstmt, true);
3228 else
3229 release_defs (stmt);
3234 /* Compute the AVAIL set for all basic blocks.
3236 This function performs value numbering of the statements in each basic
3237 block. The AVAIL sets are built from information we glean while doing
3238 this value numbering, since the AVAIL sets contain only one entry per
3239 value.
3241 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3242 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3244 static void
3245 compute_avail (void)
3247 basic_block block, son;
3248 basic_block *worklist;
3249 size_t sp = 0;
3250 tree param;
3251 /* For arguments with default definitions, we pretend they are
3252 defined in the entry block. */
3253 for (param = DECL_ARGUMENTS (current_function_decl);
3254 param;
3255 param = TREE_CHAIN (param))
3257 if (default_def (param) != NULL)
3259 tree def = default_def (param);
3260 vn_lookup_or_add (def, NULL);
3261 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3262 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3266 /* Likewise for the static chain decl. */
3267 if (cfun->static_chain_decl)
3269 param = cfun->static_chain_decl;
3270 if (default_def (param) != NULL)
3272 tree def = default_def (param);
3273 vn_lookup_or_add (def, NULL);
3274 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3275 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3279 /* Allocate the worklist. */
3280 worklist = XNEWVEC (basic_block, n_basic_blocks);
3282 /* Seed the algorithm by putting the dominator children of the entry
3283 block on the worklist. */
3284 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3285 son;
3286 son = next_dom_son (CDI_DOMINATORS, son))
3287 worklist[sp++] = son;
3289 /* Loop until the worklist is empty. */
3290 while (sp)
3292 block_stmt_iterator bsi;
3293 tree stmt, phi;
3294 basic_block dom;
3295 unsigned int stmt_uid = 1;
3297 /* Pick a block from the worklist. */
3298 block = worklist[--sp];
3300 /* Initially, the set of available values in BLOCK is that of
3301 its immediate dominator. */
3302 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3303 if (dom)
3304 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3306 if (!in_fre)
3307 insert_extra_phis (block, dom);
3309 /* Generate values for PHI nodes. */
3310 for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
3311 /* We have no need for virtual phis, as they don't represent
3312 actual computations. */
3313 if (is_gimple_reg (PHI_RESULT (phi)))
3314 add_to_sets (PHI_RESULT (phi), PHI_RESULT (phi), NULL,
3315 PHI_GEN (block), AVAIL_OUT (block));
3317 /* Now compute value numbers and populate value sets with all
3318 the expressions computed in BLOCK. */
3319 for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3321 stmt_ann_t ann;
3322 ssa_op_iter iter;
3323 tree op;
3325 stmt = bsi_stmt (bsi);
3326 ann = stmt_ann (stmt);
3328 ann->uid = stmt_uid++;
3330 /* For regular value numbering, we are only interested in
3331 assignments of the form X_i = EXPR, where EXPR represents
3332 an "interesting" computation, it has no volatile operands
3333 and X_i doesn't flow through an abnormal edge. */
3334 if (TREE_CODE (stmt) == MODIFY_EXPR
3335 && !ann->has_volatile_ops
3336 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
3337 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (stmt, 0)))
3339 tree lhs = TREE_OPERAND (stmt, 0);
3340 tree rhs = TREE_OPERAND (stmt, 1);
3342 /* Try to look through loads. */
3343 if (TREE_CODE (lhs) == SSA_NAME
3344 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES)
3345 && try_look_through_load (lhs, rhs, stmt, block))
3346 continue;
3348 STRIP_USELESS_TYPE_CONVERSION (rhs);
3349 if (can_value_number_operation (rhs))
3351 /* For value numberable operation, create a
3352 duplicate expression with the operands replaced
3353 with the value handles of the original RHS. */
3354 tree newt = create_value_expr_from (rhs, block, stmt);
3355 if (newt)
3357 add_to_sets (lhs, newt, stmt, TMP_GEN (block),
3358 AVAIL_OUT (block));
3359 value_insert_into_set (EXP_GEN (block), newt);
3360 continue;
3363 else if ((TREE_CODE (rhs) == SSA_NAME
3364 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3365 || is_gimple_min_invariant (rhs)
3366 || TREE_CODE (rhs) == ADDR_EXPR
3367 || TREE_INVARIANT (rhs)
3368 || DECL_P (rhs))
3370 /* Compute a value number for the RHS of the statement
3371 and add its value to the AVAIL_OUT set for the block.
3372 Add the LHS to TMP_GEN. */
3373 add_to_sets (lhs, rhs, stmt, TMP_GEN (block),
3374 AVAIL_OUT (block));
3376 if (TREE_CODE (rhs) == SSA_NAME
3377 && !is_undefined_value (rhs))
3378 value_insert_into_set (EXP_GEN (block), rhs);
3379 continue;
3383 /* For any other statement that we don't recognize, simply
3384 make the names generated by the statement available in
3385 AVAIL_OUT and TMP_GEN. */
3386 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3387 add_to_sets (op, op, NULL, TMP_GEN (block), AVAIL_OUT (block));
3389 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3390 add_to_sets (op, op, NULL, NULL , AVAIL_OUT (block));
3393 /* Put the dominator children of BLOCK on the worklist of blocks
3394 to compute available sets for. */
3395 for (son = first_dom_son (CDI_DOMINATORS, block);
3396 son;
3397 son = next_dom_son (CDI_DOMINATORS, son))
3398 worklist[sp++] = son;
3401 free (worklist);
3405 /* Eliminate fully redundant computations. */
3407 static void
3408 eliminate (void)
3410 basic_block b;
3412 FOR_EACH_BB (b)
3414 block_stmt_iterator i;
3416 for (i = bsi_start (b); !bsi_end_p (i); bsi_next (&i))
3418 tree stmt = bsi_stmt (i);
3420 /* Lookup the RHS of the expression, see if we have an
3421 available computation for it. If so, replace the RHS with
3422 the available computation. */
3423 if (TREE_CODE (stmt) == MODIFY_EXPR
3424 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
3425 && TREE_CODE (TREE_OPERAND (stmt ,1)) != SSA_NAME
3426 && !is_gimple_min_invariant (TREE_OPERAND (stmt, 1))
3427 && !stmt_ann (stmt)->has_volatile_ops)
3429 tree lhs = TREE_OPERAND (stmt, 0);
3430 tree *rhs_p = &TREE_OPERAND (stmt, 1);
3431 tree sprime;
3433 sprime = bitmap_find_leader (AVAIL_OUT (b),
3434 vn_lookup (lhs, NULL));
3435 if (sprime
3436 && sprime != lhs
3437 && (TREE_CODE (*rhs_p) != SSA_NAME
3438 || may_propagate_copy (*rhs_p, sprime)))
3440 gcc_assert (sprime != *rhs_p);
3442 if (dump_file && (dump_flags & TDF_DETAILS))
3444 fprintf (dump_file, "Replaced ");
3445 print_generic_expr (dump_file, *rhs_p, 0);
3446 fprintf (dump_file, " with ");
3447 print_generic_expr (dump_file, sprime, 0);
3448 fprintf (dump_file, " in ");
3449 print_generic_stmt (dump_file, stmt, 0);
3452 if (TREE_CODE (sprime) == SSA_NAME)
3453 NECESSARY (SSA_NAME_DEF_STMT (sprime)) = 1;
3454 /* We need to make sure the new and old types actually match,
3455 which may require adding a simple cast, which fold_convert
3456 will do for us. */
3457 if (TREE_CODE (*rhs_p) != SSA_NAME
3458 && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*rhs_p),
3459 TREE_TYPE (sprime)))
3460 sprime = fold_convert (TREE_TYPE (*rhs_p), sprime);
3462 pre_stats.eliminations++;
3463 propagate_tree_value (rhs_p, sprime);
3464 update_stmt (stmt);
3466 /* If we removed EH side effects from the statement, clean
3467 its EH information. */
3468 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
3470 bitmap_set_bit (need_eh_cleanup,
3471 bb_for_stmt (stmt)->index);
3472 if (dump_file && (dump_flags & TDF_DETAILS))
3473 fprintf (dump_file, " Removed EH side effects.\n");
3481 /* Borrow a bit of tree-ssa-dce.c for the moment.
3482 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
3483 this may be a bit faster, and we may want critical edges kept split. */
3485 /* If OP's defining statement has not already been determined to be necessary,
3486 mark that statement necessary. Return the stmt, if it is newly
3487 necessary. */
3489 static inline tree
3490 mark_operand_necessary (tree op)
3492 tree stmt;
3494 gcc_assert (op);
3496 if (TREE_CODE (op) != SSA_NAME)
3497 return NULL;
3499 stmt = SSA_NAME_DEF_STMT (op);
3500 gcc_assert (stmt);
3502 if (NECESSARY (stmt)
3503 || IS_EMPTY_STMT (stmt))
3504 return NULL;
3506 NECESSARY (stmt) = 1;
3507 return stmt;
3510 /* Because we don't follow exactly the standard PRE algorithm, and decide not
3511 to insert PHI nodes sometimes, and because value numbering of casts isn't
3512 perfect, we sometimes end up inserting dead code. This simple DCE-like
3513 pass removes any insertions we made that weren't actually used. */
3515 static void
3516 remove_dead_inserted_code (void)
3518 VEC(tree,heap) *worklist = NULL;
3519 int i;
3520 tree t;
3522 worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
3523 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3525 if (NECESSARY (t))
3526 VEC_quick_push (tree, worklist, t);
3528 while (VEC_length (tree, worklist) > 0)
3530 t = VEC_pop (tree, worklist);
3532 /* PHI nodes are somewhat special in that each PHI alternative has
3533 data and control dependencies. All the statements feeding the
3534 PHI node's arguments are always necessary. */
3535 if (TREE_CODE (t) == PHI_NODE)
3537 int k;
3539 VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
3540 for (k = 0; k < PHI_NUM_ARGS (t); k++)
3542 tree arg = PHI_ARG_DEF (t, k);
3543 if (TREE_CODE (arg) == SSA_NAME)
3545 arg = mark_operand_necessary (arg);
3546 if (arg)
3547 VEC_quick_push (tree, worklist, arg);
3551 else
3553 /* Propagate through the operands. Examine all the USE, VUSE and
3554 V_MAY_DEF operands in this statement. Mark all the statements
3555 which feed this statement's uses as necessary. */
3556 ssa_op_iter iter;
3557 tree use;
3559 /* The operands of V_MAY_DEF expressions are also needed as they
3560 represent potential definitions that may reach this
3561 statement (V_MAY_DEF operands allow us to follow def-def
3562 links). */
3564 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
3566 tree n = mark_operand_necessary (use);
3567 if (n)
3568 VEC_safe_push (tree, heap, worklist, n);
3573 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3575 if (!NECESSARY (t))
3577 block_stmt_iterator bsi;
3579 if (dump_file && (dump_flags & TDF_DETAILS))
3581 fprintf (dump_file, "Removing unnecessary insertion:");
3582 print_generic_stmt (dump_file, t, 0);
3585 if (TREE_CODE (t) == PHI_NODE)
3587 remove_phi_node (t, NULL);
3589 else
3591 bsi = bsi_for_stmt (t);
3592 bsi_remove (&bsi, true);
3593 release_defs (t);
3597 VEC_free (tree, heap, worklist);
3600 /* Initialize data structures used by PRE. */
3602 static void
3603 init_pre (bool do_fre)
3605 basic_block bb;
3607 in_fre = do_fre;
3609 inserted_exprs = NULL;
3610 need_creation = NULL;
3611 pretemp = NULL_TREE;
3612 storetemp = NULL_TREE;
3613 mergephitemp = NULL_TREE;
3614 prephitemp = NULL_TREE;
3616 vn_init ();
3617 if (!do_fre)
3618 current_loops = loop_optimizer_init (LOOPS_NORMAL);
3620 connect_infinite_loops_to_exit ();
3621 memset (&pre_stats, 0, sizeof (pre_stats));
3623 /* If block 0 has more than one predecessor, it means that its PHI
3624 nodes will have arguments coming from block -1. This creates
3625 problems for several places in PRE that keep local arrays indexed
3626 by block number. To prevent this, we split the edge coming from
3627 ENTRY_BLOCK_PTR (FIXME, if ENTRY_BLOCK_PTR had an index number
3628 different than -1 we wouldn't have to hack this. tree-ssa-dce.c
3629 needs a similar change). */
3630 if (!single_pred_p (single_succ (ENTRY_BLOCK_PTR)))
3631 if (!(single_succ_edge (ENTRY_BLOCK_PTR)->flags & EDGE_ABNORMAL))
3632 split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
3634 FOR_ALL_BB (bb)
3635 bb->aux = xcalloc (1, sizeof (struct bb_value_sets));
3637 bitmap_obstack_initialize (&grand_bitmap_obstack);
3638 phi_translate_table = htab_create (511, expr_pred_trans_hash,
3639 expr_pred_trans_eq, free);
3640 value_set_pool = create_alloc_pool ("Value sets",
3641 sizeof (struct value_set), 30);
3642 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
3643 sizeof (struct bitmap_set), 30);
3644 value_set_node_pool = create_alloc_pool ("Value set nodes",
3645 sizeof (struct value_set_node), 30);
3646 calculate_dominance_info (CDI_POST_DOMINATORS);
3647 calculate_dominance_info (CDI_DOMINATORS);
3648 binary_node_pool = create_alloc_pool ("Binary tree nodes",
3649 tree_code_size (PLUS_EXPR), 30);
3650 unary_node_pool = create_alloc_pool ("Unary tree nodes",
3651 tree_code_size (NEGATE_EXPR), 30);
3652 reference_node_pool = create_alloc_pool ("Reference tree nodes",
3653 tree_code_size (ARRAY_REF), 30);
3654 expression_node_pool = create_alloc_pool ("Expression tree nodes",
3655 tree_code_size (CALL_EXPR), 30);
3656 list_node_pool = create_alloc_pool ("List tree nodes",
3657 tree_code_size (TREE_LIST), 30);
3658 comparison_node_pool = create_alloc_pool ("Comparison tree nodes",
3659 tree_code_size (EQ_EXPR), 30);
3660 modify_expr_node_pool = create_alloc_pool ("MODIFY_EXPR nodes",
3661 tree_code_size (MODIFY_EXPR),
3662 30);
3663 modify_expr_template = NULL;
3665 FOR_ALL_BB (bb)
3667 EXP_GEN (bb) = set_new (true);
3668 PHI_GEN (bb) = bitmap_set_new ();
3669 TMP_GEN (bb) = bitmap_set_new ();
3670 AVAIL_OUT (bb) = bitmap_set_new ();
3673 need_eh_cleanup = BITMAP_ALLOC (NULL);
3677 /* Deallocate data structures used by PRE. */
3679 static void
3680 fini_pre (bool do_fre)
3682 basic_block bb;
3683 unsigned int i;
3685 VEC_free (tree, heap, inserted_exprs);
3686 VEC_free (tree, heap, need_creation);
3687 bitmap_obstack_release (&grand_bitmap_obstack);
3688 free_alloc_pool (value_set_pool);
3689 free_alloc_pool (bitmap_set_pool);
3690 free_alloc_pool (value_set_node_pool);
3691 free_alloc_pool (binary_node_pool);
3692 free_alloc_pool (reference_node_pool);
3693 free_alloc_pool (unary_node_pool);
3694 free_alloc_pool (list_node_pool);
3695 free_alloc_pool (expression_node_pool);
3696 free_alloc_pool (comparison_node_pool);
3697 free_alloc_pool (modify_expr_node_pool);
3698 htab_delete (phi_translate_table);
3699 remove_fake_exit_edges ();
3701 FOR_ALL_BB (bb)
3703 free (bb->aux);
3704 bb->aux = NULL;
3707 free_dominance_info (CDI_POST_DOMINATORS);
3708 vn_delete ();
3710 if (!bitmap_empty_p (need_eh_cleanup))
3712 tree_purge_all_dead_eh_edges (need_eh_cleanup);
3713 cleanup_tree_cfg ();
3716 BITMAP_FREE (need_eh_cleanup);
3718 /* Wipe out pointers to VALUE_HANDLEs. In the not terribly distant
3719 future we will want them to be persistent though. */
3720 for (i = 0; i < num_ssa_names; i++)
3722 tree name = ssa_name (i);
3724 if (!name)
3725 continue;
3727 if (SSA_NAME_VALUE (name)
3728 && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
3729 SSA_NAME_VALUE (name) = NULL;
3731 if (!do_fre && current_loops)
3733 loop_optimizer_finalize (current_loops);
3734 current_loops = NULL;
3738 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
3739 only wants to do full redundancy elimination. */
3741 static void
3742 execute_pre (bool do_fre)
3744 init_pre (do_fre);
3746 if (!do_fre)
3747 insert_fake_stores ();
3749 /* Collect and value number expressions computed in each basic block. */
3750 compute_avail ();
3752 if (dump_file && (dump_flags & TDF_DETAILS))
3754 basic_block bb;
3756 FOR_ALL_BB (bb)
3758 print_value_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
3759 bitmap_print_value_set (dump_file, TMP_GEN (bb), "tmp_gen",
3760 bb->index);
3761 bitmap_print_value_set (dump_file, AVAIL_OUT (bb), "avail_out",
3762 bb->index);
3766 /* Insert can get quite slow on an incredibly large number of basic
3767 blocks due to some quadratic behavior. Until this behavior is
3768 fixed, don't run it when he have an incredibly large number of
3769 bb's. If we aren't going to run insert, there is no point in
3770 computing ANTIC, either, even though it's plenty fast. */
3771 if (!do_fre && n_basic_blocks < 4000)
3773 vuse_names = XCNEWVEC (bitmap, num_ssa_names);
3774 compute_rvuse_and_antic_safe ();
3775 compute_antic ();
3776 insert ();
3777 free (vuse_names);
3780 /* Remove all the redundant expressions. */
3781 eliminate ();
3784 if (dump_file && (dump_flags & TDF_STATS))
3786 fprintf (dump_file, "Insertions: %d\n", pre_stats.insertions);
3787 fprintf (dump_file, "New PHIs: %d\n", pre_stats.phis);
3788 fprintf (dump_file, "Eliminated: %d\n", pre_stats.eliminations);
3789 fprintf (dump_file, "Constified: %d\n", pre_stats.constified);
3792 bsi_commit_edge_inserts ();
3794 if (!do_fre)
3796 remove_dead_inserted_code ();
3797 realify_fake_stores ();
3800 fini_pre (do_fre);
3804 /* Gate and execute functions for PRE. */
3806 static unsigned int
3807 do_pre (void)
3809 execute_pre (false);
3810 return 0;
3813 static bool
3814 gate_pre (void)
3816 return flag_tree_pre != 0;
3819 struct tree_opt_pass pass_pre =
3821 "pre", /* name */
3822 gate_pre, /* gate */
3823 do_pre, /* execute */
3824 NULL, /* sub */
3825 NULL, /* next */
3826 0, /* static_pass_number */
3827 TV_TREE_PRE, /* tv_id */
3828 PROP_no_crit_edges | PROP_cfg
3829 | PROP_ssa | PROP_alias, /* properties_required */
3830 0, /* properties_provided */
3831 0, /* properties_destroyed */
3832 0, /* todo_flags_start */
3833 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
3834 | TODO_verify_ssa, /* todo_flags_finish */
3835 0 /* letter */
3839 /* Gate and execute functions for FRE. */
3841 static unsigned int
3842 execute_fre (void)
3844 execute_pre (true);
3845 return 0;
3848 static bool
3849 gate_fre (void)
3851 return flag_tree_fre != 0;
3854 struct tree_opt_pass pass_fre =
3856 "fre", /* name */
3857 gate_fre, /* gate */
3858 execute_fre, /* execute */
3859 NULL, /* sub */
3860 NULL, /* next */
3861 0, /* static_pass_number */
3862 TV_TREE_FRE, /* tv_id */
3863 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
3864 0, /* properties_provided */
3865 0, /* properties_destroyed */
3866 0, /* todo_flags_start */
3867 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
3868 0 /* letter */