* cfgcleanup.c, cfgexpand.c, cgraphunit.c, config/arm/arm.c,
[official-gcc.git] / gcc / tree-ssa-pre.c
blob7ae481b2b7a622997b4054924bfade1aba227453
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "fibheap.h"
37 #include "hashtab.h"
38 #include "tree-iterator.h"
39 #include "real.h"
40 #include "alloc-pool.h"
41 #include "tree-pass.h"
42 #include "flags.h"
43 #include "bitmap.h"
44 #include "langhooks.h"
45 #include "cfgloop.h"
47 /* TODO:
49 1. Avail sets can be shared by making an avail_find_leader that
50 walks up the dominator tree and looks in those avail sets.
51 This might affect code optimality, it's unclear right now.
52 2. Strength reduction can be performed by anticipating expressions
53 we can repair later on.
54 3. We can do back-substitution or smarter value numbering to catch
55 commutative expressions split up over multiple statements.
56 4. ANTIC_SAFE_LOADS could be a lot smarter than it is now.
57 Right now, it is simply calculating loads that occur before
58 any store in a block, instead of loads that occur before
59 stores that affect them. This is relatively more expensive, and
60 it's not clear how much more it will buy us.
61 */
63 /* For ease of terminology, "expression node" in the below refers to
64 every expression node but MODIFY_EXPR, because MODIFY_EXPR's represent
65 the actual statement containing the expressions we care about, and
66 we cache the value number by putting it in the expression. */
68 /* Basic algorithm
70 First we walk the statements to generate the AVAIL sets, the
71 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
72 generation of values/expressions by a given block. We use them
73 when computing the ANTIC sets. The AVAIL sets consist of
74 SSA_NAME's that represent values, so we know what values are
75 available in what blocks. AVAIL is a forward dataflow problem. In
76 SSA, values are never killed, so we don't need a kill set, or a
77 fixpoint iteration, in order to calculate the AVAIL sets. In
78 traditional parlance, AVAIL sets tell us the downsafety of the
79 expressions/values.
81 Next, we generate the ANTIC sets. These sets represent the
82 anticipatable expressions. ANTIC is a backwards dataflow
83 problem.An expression is anticipatable in a given block if it could
84 be generated in that block. This means that if we had to perform
85 an insertion in that block, of the value of that expression, we
86 could. Calculating the ANTIC sets requires phi translation of
87 expressions, because the flow goes backwards through phis. We must
88 iterate to a fixpoint of the ANTIC sets, because we have a kill
89 set. Even in SSA form, values are not live over the entire
90 function, only from their definition point onwards. So we have to
91 remove values from the ANTIC set once we go past the definition
92 point of the leaders that make them up.
93 compute_antic/compute_antic_aux performs this computation.
95 Third, we perform insertions to make partially redundant
96 expressions fully redundant.
98 An expression is partially redundant (excluding partial
99 anticipation) if:
101 1. It is AVAIL in some, but not all, of the predecessors of a
102 given block.
103 2. It is ANTIC in all the predecessors.
105 In order to make it fully redundant, we insert the expression into
106 the predecessors where it is not available, but is ANTIC.
107 insert/insert_aux performs this insertion.
109 Fourth, we eliminate fully redundant expressions.
110 This is a simple statement walk that replaces redundant
111 calculations with the now available values. */
113 /* Representations of value numbers:
115 Value numbers are represented using the "value handle" approach.
116 This means that each SSA_NAME (and for other reasons to be
117 disclosed in a moment, expression nodes) has a value handle that
118 can be retrieved through get_value_handle. This value handle, *is*
119 the value number of the SSA_NAME. You can pointer compare the
120 value handles for equivalence purposes.
122 For debugging reasons, the value handle is internally more than
123 just a number, it is a VAR_DECL named "value.x", where x is a
124 unique number for each value number in use. This allows
125 expressions with SSA_NAMES replaced by value handles to still be
126 pretty printed in a sane way. They simply print as "value.3 *
127 value.5", etc.
129 Expression nodes have value handles associated with them as a
130 cache. Otherwise, we'd have to look them up again in the hash
131 table This makes significant difference (factor of two or more) on
132 some test cases. They can be thrown away after the pass is
133 finished. */
135 /* Representation of expressions on value numbers:
137 In some portions of this code, you will notice we allocate "fake"
138 analogues to the expression we are value numbering, and replace the
139 operands with the values of the expression. Since we work on
140 values, and not just names, we canonicalize expressions to value
141 expressions for use in the ANTIC sets, the EXP_GEN set, etc.
143 This is theoretically unnecessary, it just saves a bunch of
144 repeated get_value_handle and find_leader calls in the remainder of
145 the code, trading off temporary memory usage for speed. The tree
146 nodes aren't actually creating more garbage, since they are
147 allocated in a special pools which are thrown away at the end of
148 this pass.
150 All of this also means that if you print the EXP_GEN or ANTIC sets,
151 you will see "value.5 + value.7" in the set, instead of "a_55 +
152 b_66" or something. The only thing that actually cares about
153 seeing the value leaders is phi translation, and it needs to be
154 able to find the leader for a value in an arbitrary block, so this
155 "value expression" form is perfect for it (otherwise you'd do
156 get_value_handle->find_leader->translate->get_value_handle->find_leader).*/
159 /* Representation of sets:
161 There are currently two types of sets used, hopefully to be unified soon.
162 The AVAIL sets do not need to be sorted in any particular order,
163 and thus, are simply represented as two bitmaps, one that keeps
164 track of values present in the set, and one that keeps track of
165 expressions present in the set.
167 The other sets are represented as doubly linked lists kept in topological
168 order, with an optional supporting bitmap of values present in the
169 set. The sets represent values, and the elements can be values or
170 expressions. The elements can appear in different sets, but each
171 element can only appear once in each set.
173 Since each node in the set represents a value, we also want to be
174 able to map expression, set pairs to something that tells us
175 whether the value is present is a set. We use a per-set bitmap for
176 that. The value handles also point to a linked list of the
177 expressions they represent via a tree annotation. This is mainly
178 useful only for debugging, since we don't do identity lookups. */
181 static bool in_fre = false;
183 /* A value set element. Basically a single linked list of
184 expressions/values. */
185 typedef struct value_set_node
187 /* An expression. */
188 tree expr;
190 /* A pointer to the next element of the value set. */
191 struct value_set_node *next;
192 } *value_set_node_t;
195 /* A value set. This is a singly linked list of value_set_node
196 elements with a possible bitmap that tells us what values exist in
197 the set. This set must be kept in topologically sorted order. */
198 typedef struct value_set
200 /* The head of the list. Used for iterating over the list in
201 order. */
202 value_set_node_t head;
204 /* The tail of the list. Used for tail insertions, which are
205 necessary to keep the set in topologically sorted order because
206 of how the set is built. */
207 value_set_node_t tail;
209 /* The length of the list. */
210 size_t length;
212 /* True if the set is indexed, which means it contains a backing
213 bitmap for quick determination of whether certain values exist in the
214 set. */
215 bool indexed;
217 /* The bitmap of values that exist in the set. May be NULL in an
218 empty or non-indexed set. */
219 bitmap values;
221 } *value_set_t;
224 /* An unordered bitmap set. One bitmap tracks values, the other,
225 expressions. */
226 typedef struct bitmap_set
228 bitmap expressions;
229 bitmap values;
230 } *bitmap_set_t;
232 /* Sets that we need to keep track of. */
233 typedef struct bb_value_sets
235 /* The EXP_GEN set, which represents expressions/values generated in
236 a basic block. */
237 value_set_t exp_gen;
239 /* The PHI_GEN set, which represents PHI results generated in a
240 basic block. */
241 bitmap_set_t phi_gen;
243 /* The TMP_GEN set, which represents results/temporaries generated
244 in a basic block. IE the LHS of an expression. */
245 bitmap_set_t tmp_gen;
247 /* The AVAIL_OUT set, which represents which values are available in
248 a given basic block. */
249 bitmap_set_t avail_out;
251 /* The ANTIC_IN set, which represents which values are anticipatable
252 in a given basic block. */
253 value_set_t antic_in;
255 /* The NEW_SETS set, which is used during insertion to augment the
256 AVAIL_OUT set of blocks with the new insertions performed during
257 the current iteration. */
258 bitmap_set_t new_sets;
260 /* The RVUSE sets, which are used during ANTIC computation to ensure
261 that we don't mark loads ANTIC once they have died. */
262 bitmap rvuse_in;
263 bitmap rvuse_out;
264 bitmap rvuse_gen;
265 bitmap rvuse_kill;
267 /* For actually occurring loads, as long as they occur before all the
268 other stores in the block, we know they are antic at the top of
269 the block, regardless of RVUSE_KILL. */
270 value_set_t antic_safe_loads;
271 } *bb_value_sets_t;
273 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
274 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
275 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
276 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
277 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
278 #define RVUSE_IN(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_in
279 #define RVUSE_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_gen
280 #define RVUSE_KILL(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_kill
281 #define RVUSE_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_out
282 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
283 #define ANTIC_SAFE_LOADS(BB) ((bb_value_sets_t) ((BB)->aux))->antic_safe_loads
285 /* This structure is used to keep track of statistics on what
286 optimization PRE was able to perform. */
287 static struct
289 /* The number of RHS computations eliminated by PRE. */
290 int eliminations;
292 /* The number of new expressions/temporaries generated by PRE. */
293 int insertions;
295 /* The number of new PHI nodes added by PRE. */
296 int phis;
298 /* The number of values found constant. */
299 int constified;
301 } pre_stats;
304 static tree bitmap_find_leader (bitmap_set_t, tree);
305 static tree find_leader (value_set_t, tree);
306 static void value_insert_into_set (value_set_t, tree);
307 static void bitmap_value_insert_into_set (bitmap_set_t, tree);
308 static void bitmap_value_replace_in_set (bitmap_set_t, tree);
309 static void insert_into_set (value_set_t, tree);
310 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
311 static bool bitmap_set_contains_value (bitmap_set_t, tree);
312 static bitmap_set_t bitmap_set_new (void);
313 static value_set_t set_new (bool);
314 static bool is_undefined_value (tree);
315 static tree create_expression_by_pieces (basic_block, tree, tree);
316 static tree find_or_generate_expression (basic_block, tree, tree);
319 /* We can add and remove elements and entries to and from sets
320 and hash tables, so we use alloc pools for them. */
322 static alloc_pool value_set_pool;
323 static alloc_pool bitmap_set_pool;
324 static alloc_pool value_set_node_pool;
325 static alloc_pool binary_node_pool;
326 static alloc_pool unary_node_pool;
327 static alloc_pool reference_node_pool;
328 static alloc_pool comparison_node_pool;
329 static alloc_pool expression_node_pool;
330 static alloc_pool list_node_pool;
331 static alloc_pool modify_expr_node_pool;
332 static bitmap_obstack grand_bitmap_obstack;
334 /* To avoid adding 300 temporary variables when we only need one, we
335 only create one temporary variable, on demand, and build ssa names
336 off that. We do have to change the variable if the types don't
337 match the current variable's type. */
338 static tree pretemp;
339 static tree storetemp;
340 static tree mergephitemp;
341 static tree prephitemp;
343 /* Set of blocks with statements that have had its EH information
344 cleaned up. */
345 static bitmap need_eh_cleanup;
347 /* The phi_translate_table caches phi translations for a given
348 expression and predecessor. */
350 static htab_t phi_translate_table;
352 /* A three tuple {e, pred, v} used to cache phi translations in the
353 phi_translate_table. */
355 typedef struct expr_pred_trans_d
357 /* The expression. */
358 tree e;
360 /* The predecessor block along which we translated the expression. */
361 basic_block pred;
363 /* vuses associated with the expression. */
364 VEC (tree, gc) *vuses;
366 /* The value that resulted from the translation. */
367 tree v;
370 /* The hashcode for the expression, pred pair. This is cached for
371 speed reasons. */
372 hashval_t hashcode;
373 } *expr_pred_trans_t;
375 /* Return the hash value for a phi translation table entry. */
377 static hashval_t
378 expr_pred_trans_hash (const void *p)
380 const expr_pred_trans_t ve = (expr_pred_trans_t) p;
381 return ve->hashcode;
384 /* Return true if two phi translation table entries are the same.
385 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
387 static int
388 expr_pred_trans_eq (const void *p1, const void *p2)
390 const expr_pred_trans_t ve1 = (expr_pred_trans_t) p1;
391 const expr_pred_trans_t ve2 = (expr_pred_trans_t) p2;
392 basic_block b1 = ve1->pred;
393 basic_block b2 = ve2->pred;
394 int i;
395 tree vuse1;
397 /* If they are not translations for the same basic block, they can't
398 be equal. */
399 if (b1 != b2)
400 return false;
403 /* If they are for the same basic block, determine if the
404 expressions are equal. */
405 if (!expressions_equal_p (ve1->e, ve2->e))
406 return false;
408 /* Make sure the vuses are equivalent. */
409 if (ve1->vuses == ve2->vuses)
410 return true;
412 if (VEC_length (tree, ve1->vuses) != VEC_length (tree, ve2->vuses))
413 return false;
415 for (i = 0; VEC_iterate (tree, ve1->vuses, i, vuse1); i++)
417 if (VEC_index (tree, ve2->vuses, i) != vuse1)
418 return false;
421 return true;
424 /* Search in the phi translation table for the translation of
425 expression E in basic block PRED with vuses VUSES.
426 Return the translated value, if found, NULL otherwise. */
428 static inline tree
429 phi_trans_lookup (tree e, basic_block pred, VEC (tree, gc) *vuses)
431 void **slot;
432 struct expr_pred_trans_d ept;
434 ept.e = e;
435 ept.pred = pred;
436 ept.vuses = vuses;
437 ept.hashcode = vn_compute (e, (unsigned long) pred);
438 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
439 NO_INSERT);
440 if (!slot)
441 return NULL;
442 else
443 return ((expr_pred_trans_t) *slot)->v;
447 /* Add the tuple mapping from {expression E, basic block PRED, vuses VUSES} to
448 value V, to the phi translation table. */
450 static inline void
451 phi_trans_add (tree e, tree v, basic_block pred, VEC (tree, gc) *vuses)
453 void **slot;
454 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
455 new_pair->e = e;
456 new_pair->pred = pred;
457 new_pair->vuses = vuses;
458 new_pair->v = v;
459 new_pair->hashcode = vn_compute (e, (unsigned long) pred);
460 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
461 new_pair->hashcode, INSERT);
462 if (*slot)
463 free (*slot);
464 *slot = (void *) new_pair;
468 /* Add expression E to the expression set of value V. */
470 void
471 add_to_value (tree v, tree e)
473 /* Constants have no expression sets. */
474 if (is_gimple_min_invariant (v))
475 return;
477 if (VALUE_HANDLE_EXPR_SET (v) == NULL)
478 VALUE_HANDLE_EXPR_SET (v) = set_new (false);
480 insert_into_set (VALUE_HANDLE_EXPR_SET (v), e);
484 /* Return true if value V exists in the bitmap for SET. */
486 static inline bool
487 value_exists_in_set_bitmap (value_set_t set, tree v)
489 if (!set->values)
490 return false;
492 return bitmap_bit_p (set->values, VALUE_HANDLE_ID (v));
496 /* Remove value V from the bitmap for SET. */
498 static void
499 value_remove_from_set_bitmap (value_set_t set, tree v)
501 gcc_assert (set->indexed);
503 if (!set->values)
504 return;
506 bitmap_clear_bit (set->values, VALUE_HANDLE_ID (v));
510 /* Insert the value number V into the bitmap of values existing in
511 SET. */
513 static inline void
514 value_insert_into_set_bitmap (value_set_t set, tree v)
516 gcc_assert (set->indexed);
518 if (set->values == NULL)
519 set->values = BITMAP_ALLOC (&grand_bitmap_obstack);
521 bitmap_set_bit (set->values, VALUE_HANDLE_ID (v));
525 /* Create a new bitmap set and return it. */
527 static bitmap_set_t
528 bitmap_set_new (void)
530 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
531 ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
532 ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
533 return ret;
536 /* Create a new set. */
538 static value_set_t
539 set_new (bool indexed)
541 value_set_t ret;
542 ret = (value_set_t) pool_alloc (value_set_pool);
543 ret->head = ret->tail = NULL;
544 ret->length = 0;
545 ret->indexed = indexed;
546 ret->values = NULL;
547 return ret;
550 /* Insert an expression EXPR into a bitmapped set. */
552 static void
553 bitmap_insert_into_set (bitmap_set_t set, tree expr)
555 tree val;
556 /* XXX: For now, we only let SSA_NAMES into the bitmap sets. */
557 gcc_assert (TREE_CODE (expr) == SSA_NAME);
558 val = get_value_handle (expr);
560 gcc_assert (val);
561 if (!is_gimple_min_invariant (val))
563 bitmap_set_bit (set->values, VALUE_HANDLE_ID (val));
564 bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
568 /* Insert EXPR into SET. */
570 static void
571 insert_into_set (value_set_t set, tree expr)
573 value_set_node_t newnode = (value_set_node_t) pool_alloc (value_set_node_pool);
574 tree val = get_value_handle (expr);
575 gcc_assert (val);
577 if (is_gimple_min_invariant (val))
578 return;
580 /* For indexed sets, insert the value into the set value bitmap.
581 For all sets, add it to the linked list and increment the list
582 length. */
583 if (set->indexed)
584 value_insert_into_set_bitmap (set, val);
586 newnode->next = NULL;
587 newnode->expr = expr;
588 set->length ++;
589 if (set->head == NULL)
591 set->head = set->tail = newnode;
593 else
595 set->tail->next = newnode;
596 set->tail = newnode;
600 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
602 static void
603 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
605 bitmap_copy (dest->expressions, orig->expressions);
606 bitmap_copy (dest->values, orig->values);
609 /* Perform bitmapped set operation DEST &= ORIG. */
611 static void
612 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
614 bitmap_iterator bi;
615 unsigned int i;
616 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
618 bitmap_and_into (dest->values, orig->values);
619 bitmap_copy (temp, dest->expressions);
620 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
622 tree name = ssa_name (i);
623 tree val = get_value_handle (name);
624 if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
625 bitmap_clear_bit (dest->expressions, i);
630 /* Perform bitmapped value set operation DEST = DEST & ~ORIG. */
632 static void
633 bitmap_set_and_compl (bitmap_set_t dest, bitmap_set_t orig)
635 bitmap_iterator bi;
636 unsigned int i;
637 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
639 bitmap_and_compl_into (dest->values, orig->values);
640 bitmap_copy (temp, dest->expressions);
641 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
643 tree name = ssa_name (i);
644 tree val = get_value_handle (name);
645 if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
646 bitmap_clear_bit (dest->expressions, i);
650 /* Return true if the bitmap set SET is empty. */
652 static bool
653 bitmap_set_empty_p (bitmap_set_t set)
655 return bitmap_empty_p (set->values);
658 /* Copy the set ORIG to the set DEST. */
660 static void
661 set_copy (value_set_t dest, value_set_t orig)
663 value_set_node_t node;
665 if (!orig || !orig->head)
666 return;
668 for (node = orig->head;
669 node;
670 node = node->next)
672 insert_into_set (dest, node->expr);
676 /* Remove EXPR from SET. */
678 static void
679 set_remove (value_set_t set, tree expr)
681 value_set_node_t node, prev;
683 /* Remove the value of EXPR from the bitmap, decrement the set
684 length, and remove it from the actual double linked list. */
685 value_remove_from_set_bitmap (set, get_value_handle (expr));
686 set->length--;
687 prev = NULL;
688 for (node = set->head;
689 node != NULL;
690 prev = node, node = node->next)
692 if (node->expr == expr)
694 if (prev == NULL)
695 set->head = node->next;
696 else
697 prev->next= node->next;
699 if (node == set->tail)
700 set->tail = prev;
701 pool_free (value_set_node_pool, node);
702 return;
707 /* Return true if SET contains the value VAL. */
709 static bool
710 set_contains_value (value_set_t set, tree val)
712 /* All constants are in every set. */
713 if (is_gimple_min_invariant (val))
714 return true;
716 if (!set || set->length == 0)
717 return false;
719 return value_exists_in_set_bitmap (set, val);
722 /* Return true if bitmapped set SET contains the expression EXPR. */
723 static bool
724 bitmap_set_contains (bitmap_set_t set, tree expr)
726 /* All constants are in every set. */
727 if (is_gimple_min_invariant (get_value_handle (expr)))
728 return true;
730 /* XXX: Bitmapped sets only contain SSA_NAME's for now. */
731 if (TREE_CODE (expr) != SSA_NAME)
732 return false;
733 return bitmap_bit_p (set->expressions, SSA_NAME_VERSION (expr));
737 /* Return true if bitmapped set SET contains the value VAL. */
739 static bool
740 bitmap_set_contains_value (bitmap_set_t set, tree val)
742 if (is_gimple_min_invariant (val))
743 return true;
744 return bitmap_bit_p (set->values, VALUE_HANDLE_ID (val));
747 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
749 static void
750 bitmap_set_replace_value (bitmap_set_t set, tree lookfor, tree expr)
752 value_set_t exprset;
753 value_set_node_t node;
754 if (is_gimple_min_invariant (lookfor))
755 return;
756 if (!bitmap_set_contains_value (set, lookfor))
757 return;
759 /* The number of expressions having a given value is usually
760 significantly less than the total number of expressions in SET.
761 Thus, rather than check, for each expression in SET, whether it
762 has the value LOOKFOR, we walk the reverse mapping that tells us
763 what expressions have a given value, and see if any of those
764 expressions are in our set. For large testcases, this is about
765 5-10x faster than walking the bitmap. If this is somehow a
766 significant lose for some cases, we can choose which set to walk
767 based on the set size. */
768 exprset = VALUE_HANDLE_EXPR_SET (lookfor);
769 for (node = exprset->head; node; node = node->next)
771 if (TREE_CODE (node->expr) == SSA_NAME)
773 if (bitmap_bit_p (set->expressions, SSA_NAME_VERSION (node->expr)))
775 bitmap_clear_bit (set->expressions, SSA_NAME_VERSION (node->expr));
776 bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
777 return;
783 /* Subtract bitmapped set B from value set A, and return the new set. */
785 static value_set_t
786 bitmap_set_subtract_from_value_set (value_set_t a, bitmap_set_t b,
787 bool indexed)
789 value_set_t ret = set_new (indexed);
790 value_set_node_t node;
791 for (node = a->head;
792 node;
793 node = node->next)
795 if (!bitmap_set_contains (b, node->expr))
796 insert_into_set (ret, node->expr);
798 return ret;
801 /* Return true if two sets are equal. */
803 static bool
804 set_equal (value_set_t a, value_set_t b)
806 value_set_node_t node;
808 if (a->length != b->length)
809 return false;
810 for (node = a->head;
811 node;
812 node = node->next)
814 if (!set_contains_value (b, get_value_handle (node->expr)))
815 return false;
817 return true;
820 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
821 and add it otherwise. */
823 static void
824 bitmap_value_replace_in_set (bitmap_set_t set, tree expr)
826 tree val = get_value_handle (expr);
827 if (bitmap_set_contains_value (set, val))
828 bitmap_set_replace_value (set, val, expr);
829 else
830 bitmap_insert_into_set (set, expr);
833 /* Insert EXPR into SET if EXPR's value is not already present in
834 SET. */
836 static void
837 bitmap_value_insert_into_set (bitmap_set_t set, tree expr)
839 tree val = get_value_handle (expr);
841 if (is_gimple_min_invariant (val))
842 return;
844 if (!bitmap_set_contains_value (set, val))
845 bitmap_insert_into_set (set, expr);
848 /* Insert the value for EXPR into SET, if it doesn't exist already. */
850 static void
851 value_insert_into_set (value_set_t set, tree expr)
853 tree val = get_value_handle (expr);
855 /* Constant and invariant values exist everywhere, and thus,
856 actually keeping them in the sets is pointless. */
857 if (is_gimple_min_invariant (val))
858 return;
860 if (!set_contains_value (set, val))
861 insert_into_set (set, expr);
865 /* Print out SET to OUTFILE. */
867 static void
868 bitmap_print_value_set (FILE *outfile, bitmap_set_t set,
869 const char *setname, int blockindex)
871 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
872 if (set)
874 bool first = true;
875 unsigned i;
876 bitmap_iterator bi;
878 EXECUTE_IF_SET_IN_BITMAP (set->expressions, 0, i, bi)
880 if (!first)
881 fprintf (outfile, ", ");
882 first = false;
883 print_generic_expr (outfile, ssa_name (i), 0);
885 fprintf (outfile, " (");
886 print_generic_expr (outfile, get_value_handle (ssa_name (i)), 0);
887 fprintf (outfile, ") ");
890 fprintf (outfile, " }\n");
892 /* Print out the value_set SET to OUTFILE. */
894 static void
895 print_value_set (FILE *outfile, value_set_t set,
896 const char *setname, int blockindex)
898 value_set_node_t node;
899 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
900 if (set)
902 for (node = set->head;
903 node;
904 node = node->next)
906 print_generic_expr (outfile, node->expr, 0);
908 fprintf (outfile, " (");
909 print_generic_expr (outfile, get_value_handle (node->expr), 0);
910 fprintf (outfile, ") ");
912 if (node->next)
913 fprintf (outfile, ", ");
917 fprintf (outfile, " }\n");
920 /* Print out the expressions that have VAL to OUTFILE. */
922 void
923 print_value_expressions (FILE *outfile, tree val)
925 if (VALUE_HANDLE_EXPR_SET (val))
927 char s[10];
928 sprintf (s, "VH.%04d", VALUE_HANDLE_ID (val));
929 print_value_set (outfile, VALUE_HANDLE_EXPR_SET (val), s, 0);
934 void
935 debug_value_expressions (tree val)
937 print_value_expressions (stderr, val);
941 void debug_value_set (value_set_t, const char *, int);
943 void
944 debug_value_set (value_set_t set, const char *setname, int blockindex)
946 print_value_set (stderr, set, setname, blockindex);
949 /* Return the folded version of T if T, when folded, is a gimple
950 min_invariant. Otherwise, return T. */
952 static tree
953 fully_constant_expression (tree t)
955 tree folded;
956 folded = fold (t);
957 if (folded && is_gimple_min_invariant (folded))
958 return folded;
959 return t;
962 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
963 For example, this can copy a list made of TREE_LIST nodes.
964 Allocates the nodes in list_node_pool*/
966 static tree
967 pool_copy_list (tree list)
969 tree head;
970 tree prev, next;
972 if (list == 0)
973 return 0;
974 head = (tree) pool_alloc (list_node_pool);
976 memcpy (head, list, tree_size (list));
977 prev = head;
979 next = TREE_CHAIN (list);
980 while (next)
982 TREE_CHAIN (prev) = (tree) pool_alloc (list_node_pool);
983 memcpy (TREE_CHAIN (prev), next, tree_size (next));
984 prev = TREE_CHAIN (prev);
985 next = TREE_CHAIN (next);
987 return head;
990 /* Translate the vuses in the VUSES vector backwards through phi
991 nodes, so that they have the value they would have in BLOCK. */
993 static VEC(tree, gc) *
994 translate_vuses_through_block (VEC (tree, gc) *vuses, basic_block block)
996 tree oldvuse;
997 VEC(tree, gc) *result = NULL;
998 int i;
1000 for (i = 0; VEC_iterate (tree, vuses, i, oldvuse); i++)
1002 tree phi = SSA_NAME_DEF_STMT (oldvuse);
1003 if (TREE_CODE (phi) == PHI_NODE)
1005 edge e = find_edge (block, bb_for_stmt (phi));
1006 if (e)
1008 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1009 if (def != oldvuse)
1011 if (!result)
1012 result = VEC_copy (tree, gc, vuses);
1013 VEC_replace (tree, result, i, def);
1018 if (result)
1020 sort_vuses (result);
1021 return result;
1023 return vuses;
1026 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1027 the phis in PRED. Return NULL if we can't find a leader for each
1028 part of the translated expression. */
1030 static tree
1031 phi_translate (tree expr, value_set_t set, basic_block pred,
1032 basic_block phiblock)
1034 tree phitrans = NULL;
1035 tree oldexpr = expr;
1036 if (expr == NULL)
1037 return NULL;
1039 if (is_gimple_min_invariant (expr))
1040 return expr;
1042 /* Phi translations of a given expression don't change. */
1043 if (EXPR_P (expr))
1045 tree vh;
1047 vh = get_value_handle (expr);
1048 if (vh && TREE_CODE (vh) == VALUE_HANDLE)
1049 phitrans = phi_trans_lookup (expr, pred, VALUE_HANDLE_VUSES (vh));
1050 else
1051 phitrans = phi_trans_lookup (expr, pred, NULL);
1053 else
1054 phitrans = phi_trans_lookup (expr, pred, NULL);
1056 if (phitrans)
1057 return phitrans;
1059 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1061 case tcc_expression:
1063 if (TREE_CODE (expr) != CALL_EXPR)
1064 return NULL;
1065 else
1067 tree oldop0 = TREE_OPERAND (expr, 0);
1068 tree oldarglist = TREE_OPERAND (expr, 1);
1069 tree oldop2 = TREE_OPERAND (expr, 2);
1070 tree newop0;
1071 tree newarglist;
1072 tree newop2 = NULL;
1073 tree oldwalker;
1074 tree newwalker;
1075 tree newexpr;
1076 tree vh = get_value_handle (expr);
1077 bool listchanged = false;
1078 VEC (tree, gc) *vuses = VALUE_HANDLE_VUSES (vh);
1079 VEC (tree, gc) *tvuses;
1081 /* Call expressions are kind of weird because they have an
1082 argument list. We don't want to value number the list
1083 as one value number, because that doesn't make much
1084 sense, and just breaks the support functions we call,
1085 which expect TREE_OPERAND (call_expr, 2) to be a
1086 TREE_LIST. */
1088 newop0 = phi_translate (find_leader (set, oldop0),
1089 set, pred, phiblock);
1090 if (newop0 == NULL)
1091 return NULL;
1092 if (oldop2)
1094 newop2 = phi_translate (find_leader (set, oldop2),
1095 set, pred, phiblock);
1096 if (newop2 == NULL)
1097 return NULL;
1100 /* phi translate the argument list piece by piece.
1102 We could actually build the list piece by piece here,
1103 but it's likely to not be worth the memory we will save,
1104 unless you have millions of call arguments. */
1106 newarglist = pool_copy_list (oldarglist);
1107 for (oldwalker = oldarglist, newwalker = newarglist;
1108 oldwalker && newwalker;
1109 oldwalker = TREE_CHAIN (oldwalker),
1110 newwalker = TREE_CHAIN (newwalker))
1113 tree oldval = TREE_VALUE (oldwalker);
1114 tree newval;
1115 if (oldval)
1117 /* This may seem like a weird place for this
1118 check, but it's actually the easiest place to
1119 do it. We can't do it lower on in the
1120 recursion because it's valid for pieces of a
1121 component ref to be of AGGREGATE_TYPE, as long
1122 as the outermost one is not.
1123 To avoid *that* case, we have a check for
1124 AGGREGATE_TYPE_P in insert_aux. However, that
1125 check will *not* catch this case because here
1126 it occurs in the argument list. */
1127 if (AGGREGATE_TYPE_P (TREE_TYPE (oldval)))
1128 return NULL;
1129 newval = phi_translate (find_leader (set, oldval),
1130 set, pred, phiblock);
1131 if (newval == NULL)
1132 return NULL;
1133 if (newval != oldval)
1135 listchanged = true;
1136 TREE_VALUE (newwalker) = get_value_handle (newval);
1140 if (listchanged)
1141 vn_lookup_or_add (newarglist, NULL);
1143 tvuses = translate_vuses_through_block (vuses, pred);
1145 if (listchanged || (newop0 != oldop0) || (oldop2 != newop2)
1146 || vuses != tvuses)
1148 newexpr = (tree) pool_alloc (expression_node_pool);
1149 memcpy (newexpr, expr, tree_size (expr));
1150 TREE_OPERAND (newexpr, 0) = newop0 == oldop0 ? oldop0 : get_value_handle (newop0);
1151 TREE_OPERAND (newexpr, 1) = listchanged ? newarglist : oldarglist;
1152 TREE_OPERAND (newexpr, 2) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
1153 create_tree_ann (newexpr);
1154 vn_lookup_or_add_with_vuses (newexpr, tvuses);
1155 expr = newexpr;
1156 phi_trans_add (oldexpr, newexpr, pred, tvuses);
1160 return expr;
1162 case tcc_declaration:
1164 VEC (tree, gc) * oldvuses = NULL;
1165 VEC (tree, gc) * newvuses = NULL;
1167 oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1168 if (oldvuses)
1169 newvuses = translate_vuses_through_block (oldvuses, pred);
1171 if (oldvuses != newvuses)
1172 vn_lookup_or_add_with_vuses (expr, newvuses);
1174 phi_trans_add (oldexpr, expr, pred, newvuses);
1176 return expr;
1178 case tcc_reference:
1180 tree oldop0 = TREE_OPERAND (expr, 0);
1181 tree oldop1 = NULL;
1182 tree newop0;
1183 tree newop1 = NULL;
1184 tree oldop2 = NULL;
1185 tree newop2 = NULL;
1186 tree oldop3 = NULL;
1187 tree newop3 = NULL;
1188 tree newexpr;
1189 VEC (tree, gc) * oldvuses = NULL;
1190 VEC (tree, gc) * newvuses = NULL;
1192 if (TREE_CODE (expr) != INDIRECT_REF
1193 && TREE_CODE (expr) != COMPONENT_REF
1194 && TREE_CODE (expr) != ARRAY_REF)
1195 return NULL;
1197 newop0 = phi_translate (find_leader (set, oldop0),
1198 set, pred, phiblock);
1199 if (newop0 == NULL)
1200 return NULL;
1202 if (TREE_CODE (expr) == ARRAY_REF)
1204 oldop1 = TREE_OPERAND (expr, 1);
1205 newop1 = phi_translate (find_leader (set, oldop1),
1206 set, pred, phiblock);
1208 if (newop1 == NULL)
1209 return NULL;
1210 oldop2 = TREE_OPERAND (expr, 2);
1211 if (oldop2)
1213 newop2 = phi_translate (find_leader (set, oldop2),
1214 set, pred, phiblock);
1216 if (newop2 == NULL)
1217 return NULL;
1219 oldop3 = TREE_OPERAND (expr, 3);
1220 if (oldop3)
1222 newop3 = phi_translate (find_leader (set, oldop3),
1223 set, pred, phiblock);
1225 if (newop3 == NULL)
1226 return NULL;
1230 oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1231 if (oldvuses)
1232 newvuses = translate_vuses_through_block (oldvuses, pred);
1234 if (newop0 != oldop0 || newvuses != oldvuses
1235 || newop1 != oldop1
1236 || newop2 != oldop2
1237 || newop3 != oldop3)
1239 tree t;
1241 newexpr = pool_alloc (reference_node_pool);
1242 memcpy (newexpr, expr, tree_size (expr));
1243 TREE_OPERAND (newexpr, 0) = get_value_handle (newop0);
1244 if (TREE_CODE (expr) == ARRAY_REF)
1246 TREE_OPERAND (newexpr, 1) = get_value_handle (newop1);
1247 if (newop2)
1248 TREE_OPERAND (newexpr, 2) = get_value_handle (newop2);
1249 if (newop3)
1250 TREE_OPERAND (newexpr, 3) = get_value_handle (newop3);
1253 t = fully_constant_expression (newexpr);
1255 if (t != newexpr)
1257 pool_free (reference_node_pool, newexpr);
1258 newexpr = t;
1260 else
1262 create_tree_ann (newexpr);
1263 vn_lookup_or_add_with_vuses (newexpr, newvuses);
1265 expr = newexpr;
1266 phi_trans_add (oldexpr, newexpr, pred, newvuses);
1269 return expr;
1270 break;
1272 case tcc_binary:
1273 case tcc_comparison:
1275 tree oldop1 = TREE_OPERAND (expr, 0);
1276 tree oldop2 = TREE_OPERAND (expr, 1);
1277 tree newop1;
1278 tree newop2;
1279 tree newexpr;
1281 newop1 = phi_translate (find_leader (set, oldop1),
1282 set, pred, phiblock);
1283 if (newop1 == NULL)
1284 return NULL;
1285 newop2 = phi_translate (find_leader (set, oldop2),
1286 set, pred, phiblock);
1287 if (newop2 == NULL)
1288 return NULL;
1289 if (newop1 != oldop1 || newop2 != oldop2)
1291 tree t;
1292 newexpr = (tree) pool_alloc (binary_node_pool);
1293 memcpy (newexpr, expr, tree_size (expr));
1294 TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldop1 : get_value_handle (newop1);
1295 TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
1296 t = fully_constant_expression (newexpr);
1297 if (t != newexpr)
1299 pool_free (binary_node_pool, newexpr);
1300 newexpr = t;
1302 else
1304 create_tree_ann (newexpr);
1305 vn_lookup_or_add (newexpr, NULL);
1307 expr = newexpr;
1308 phi_trans_add (oldexpr, newexpr, pred, NULL);
1311 return expr;
1313 case tcc_unary:
1315 tree oldop1 = TREE_OPERAND (expr, 0);
1316 tree newop1;
1317 tree newexpr;
1319 newop1 = phi_translate (find_leader (set, oldop1),
1320 set, pred, phiblock);
1321 if (newop1 == NULL)
1322 return NULL;
1323 if (newop1 != oldop1)
1325 tree t;
1326 newexpr = (tree) pool_alloc (unary_node_pool);
1327 memcpy (newexpr, expr, tree_size (expr));
1328 TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
1329 t = fully_constant_expression (newexpr);
1330 if (t != newexpr)
1332 pool_free (unary_node_pool, newexpr);
1333 newexpr = t;
1335 else
1337 create_tree_ann (newexpr);
1338 vn_lookup_or_add (newexpr, NULL);
1340 expr = newexpr;
1341 phi_trans_add (oldexpr, newexpr, pred, NULL);
1344 return expr;
1346 case tcc_exceptional:
1348 tree phi = NULL;
1349 edge e;
1350 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1351 if (TREE_CODE (SSA_NAME_DEF_STMT (expr)) == PHI_NODE)
1352 phi = SSA_NAME_DEF_STMT (expr);
1353 else
1354 return expr;
1356 e = find_edge (pred, bb_for_stmt (phi));
1357 if (e)
1359 if (is_undefined_value (PHI_ARG_DEF (phi, e->dest_idx)))
1360 return NULL;
1361 vn_lookup_or_add (PHI_ARG_DEF (phi, e->dest_idx), NULL);
1362 return PHI_ARG_DEF (phi, e->dest_idx);
1365 return expr;
1367 default:
1368 gcc_unreachable ();
1372 /* For each expression in SET, translate the value handles through phi nodes
1373 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1374 expressions in DEST. */
1376 static void
1377 phi_translate_set (value_set_t dest, value_set_t set, basic_block pred,
1378 basic_block phiblock)
1380 value_set_node_t node;
1381 for (node = set->head;
1382 node;
1383 node = node->next)
1385 tree translated;
1387 translated = phi_translate (node->expr, set, pred, phiblock);
1389 /* Don't add constants or empty translations to the cache, since
1390 we won't look them up that way, or use the result, anyway. */
1391 if (translated && !is_gimple_min_invariant (translated))
1393 tree vh = get_value_handle (translated);
1394 VEC (tree, gc) *vuses;
1396 /* The value handle itself may also be an invariant, in
1397 which case, it has no vuses. */
1398 vuses = !is_gimple_min_invariant (vh)
1399 ? VALUE_HANDLE_VUSES (vh) : NULL;
1400 phi_trans_add (node->expr, translated, pred, vuses);
1403 if (translated != NULL)
1404 value_insert_into_set (dest, translated);
1408 /* Find the leader for a value (i.e., the name representing that
1409 value) in a given set, and return it. Return NULL if no leader is
1410 found. */
1412 static tree
1413 bitmap_find_leader (bitmap_set_t set, tree val)
1415 if (val == NULL)
1416 return NULL;
1418 if (is_gimple_min_invariant (val))
1419 return val;
1420 if (bitmap_set_contains_value (set, val))
1422 /* Rather than walk the entire bitmap of expressions, and see
1423 whether any of them has the value we are looking for, we look
1424 at the reverse mapping, which tells us the set of expressions
1425 that have a given value (IE value->expressions with that
1426 value) and see if any of those expressions are in our set.
1427 The number of expressions per value is usually significantly
1428 less than the number of expressions in the set. In fact, for
1429 large testcases, doing it this way is roughly 5-10x faster
1430 than walking the bitmap.
1431 If this is somehow a significant lose for some cases, we can
1432 choose which set to walk based on which set is smaller. */
1433 value_set_t exprset;
1434 value_set_node_t node;
1435 exprset = VALUE_HANDLE_EXPR_SET (val);
1436 for (node = exprset->head; node; node = node->next)
1438 if (TREE_CODE (node->expr) == SSA_NAME)
1440 if (bitmap_bit_p (set->expressions,
1441 SSA_NAME_VERSION (node->expr)))
1442 return node->expr;
1446 return NULL;
1450 /* Find the leader for a value (i.e., the name representing that
1451 value) in a given set, and return it. Return NULL if no leader is
1452 found. */
1454 static tree
1455 find_leader (value_set_t set, tree val)
1457 value_set_node_t node;
1459 if (val == NULL)
1460 return NULL;
1462 /* Constants represent themselves. */
1463 if (is_gimple_min_invariant (val))
1464 return val;
1466 if (set->length == 0)
1467 return NULL;
1469 if (value_exists_in_set_bitmap (set, val))
1471 for (node = set->head;
1472 node;
1473 node = node->next)
1475 if (get_value_handle (node->expr) == val)
1476 return node->expr;
1480 return NULL;
1483 /* Given the vuse representative map, MAP, and an SSA version number,
1484 ID, return the bitmap of names ID represents, or NULL, if none
1485 exists. */
1487 static bitmap
1488 get_representative (bitmap *map, int id)
1490 if (map[id] != NULL)
1491 return map[id];
1492 return NULL;
1495 /* A vuse is anticipable at the top of block x, from the bottom of the
1496 block, if it reaches the top of the block, and is not killed in the
1497 block. In effect, we are trying to see if the vuse is transparent
1498 backwards in the block. */
1500 static bool
1501 vuses_dies_in_block_x (VEC (tree, gc) *vuses, basic_block block)
1503 int i;
1504 tree vuse;
1506 for (i = 0; VEC_iterate (tree, vuses, i, vuse); i++)
1508 /* Any places where this is too conservative, are places
1509 where we created a new version and shouldn't have. */
1511 if (!bitmap_bit_p (RVUSE_IN (block), SSA_NAME_VERSION (vuse))
1512 || bitmap_bit_p (RVUSE_KILL (block), SSA_NAME_VERSION (vuse)))
1513 return true;
1515 return false;
1518 /* Determine if the expression EXPR is valid in SET. This means that
1519 we have a leader for each part of the expression (if it consists of
1520 values), or the expression is an SSA_NAME.
1522 NB: We never should run into a case where we have SSA_NAME +
1523 SSA_NAME or SSA_NAME + value. The sets valid_in_set is called on,
1524 the ANTIC sets, will only ever have SSA_NAME's or value expressions
1525 (IE VALUE1 + VALUE2, *VALUE1, VALUE1 < VALUE2) */
1527 static bool
1528 valid_in_set (value_set_t set, tree expr, basic_block block)
1530 tree vh = get_value_handle (expr);
1531 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1533 case tcc_binary:
1534 case tcc_comparison:
1536 tree op1 = TREE_OPERAND (expr, 0);
1537 tree op2 = TREE_OPERAND (expr, 1);
1538 return set_contains_value (set, op1) && set_contains_value (set, op2);
1541 case tcc_unary:
1543 tree op1 = TREE_OPERAND (expr, 0);
1544 return set_contains_value (set, op1);
1547 case tcc_expression:
1549 if (TREE_CODE (expr) == CALL_EXPR)
1551 tree op0 = TREE_OPERAND (expr, 0);
1552 tree arglist = TREE_OPERAND (expr, 1);
1553 tree op2 = TREE_OPERAND (expr, 2);
1555 /* Check the non-list operands first. */
1556 if (!set_contains_value (set, op0)
1557 || (op2 && !set_contains_value (set, op2)))
1558 return false;
1560 /* Now check the operands. */
1561 for (; arglist; arglist = TREE_CHAIN (arglist))
1563 if (!set_contains_value (set, TREE_VALUE (arglist)))
1564 return false;
1566 return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1568 return false;
1571 case tcc_reference:
1573 if (TREE_CODE (expr) == INDIRECT_REF
1574 || TREE_CODE (expr) == COMPONENT_REF
1575 || TREE_CODE (expr) == ARRAY_REF)
1577 tree op0 = TREE_OPERAND (expr, 0);
1578 gcc_assert (is_gimple_min_invariant (op0)
1579 || TREE_CODE (op0) == VALUE_HANDLE);
1580 if (!set_contains_value (set, op0))
1581 return false;
1582 if (TREE_CODE (expr) == ARRAY_REF)
1584 tree op1 = TREE_OPERAND (expr, 1);
1585 tree op2 = TREE_OPERAND (expr, 2);
1586 tree op3 = TREE_OPERAND (expr, 3);
1587 gcc_assert (is_gimple_min_invariant (op1)
1588 || TREE_CODE (op1) == VALUE_HANDLE);
1589 if (!set_contains_value (set, op1))
1590 return false;
1591 gcc_assert (!op2 || is_gimple_min_invariant (op2)
1592 || TREE_CODE (op2) == VALUE_HANDLE);
1593 if (op2
1594 && !set_contains_value (set, op2))
1595 return false;
1596 gcc_assert (!op3 || is_gimple_min_invariant (op3)
1597 || TREE_CODE (op3) == VALUE_HANDLE);
1598 if (op3
1599 && !set_contains_value (set, op3))
1600 return false;
1602 return set_contains_value (ANTIC_SAFE_LOADS (block),
1604 || !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh),
1605 block);
1608 return false;
1610 case tcc_exceptional:
1611 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1612 return true;
1614 case tcc_declaration:
1615 return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1617 default:
1618 /* No other cases should be encountered. */
1619 gcc_unreachable ();
1623 /* Clean the set of expressions that are no longer valid in SET. This
1624 means expressions that are made up of values we have no leaders for
1625 in SET. */
1627 static void
1628 clean (value_set_t set, basic_block block)
1630 value_set_node_t node;
1631 value_set_node_t next;
1632 node = set->head;
1633 while (node)
1635 next = node->next;
1636 if (!valid_in_set (set, node->expr, block))
1637 set_remove (set, node->expr);
1638 node = next;
1642 static sbitmap has_abnormal_preds;
1644 /* Compute the ANTIC set for BLOCK.
1646 If succs(BLOCK) > 1 then
1647 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1648 else if succs(BLOCK) == 1 then
1649 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
1651 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
1653 XXX: It would be nice to either write a set_clear, and use it for
1654 ANTIC_OUT, or to mark the antic_out set as deleted at the end
1655 of this routine, so that the pool can hand the same memory back out
1656 again for the next ANTIC_OUT. */
1658 static bool
1659 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
1661 basic_block son;
1662 bool changed = false;
1663 value_set_t S, old, ANTIC_OUT;
1664 value_set_node_t node;
1666 ANTIC_OUT = S = NULL;
1668 /* If any edges from predecessors are abnormal, antic_in is empty,
1669 so do nothing. */
1670 if (block_has_abnormal_pred_edge)
1671 goto maybe_dump_sets;
1673 old = set_new (false);
1674 set_copy (old, ANTIC_IN (block));
1675 ANTIC_OUT = set_new (true);
1677 /* If the block has no successors, ANTIC_OUT is empty. */
1678 if (EDGE_COUNT (block->succs) == 0)
1680 /* If we have one successor, we could have some phi nodes to
1681 translate through. */
1682 else if (single_succ_p (block))
1684 phi_translate_set (ANTIC_OUT, ANTIC_IN (single_succ (block)),
1685 block, single_succ (block));
1687 /* If we have multiple successors, we take the intersection of all of
1688 them. */
1689 else
1691 VEC(basic_block, heap) * worklist;
1692 edge e;
1693 size_t i;
1694 basic_block bprime, first;
1695 edge_iterator ei;
1697 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
1698 FOR_EACH_EDGE (e, ei, block->succs)
1699 VEC_quick_push (basic_block, worklist, e->dest);
1700 first = VEC_index (basic_block, worklist, 0);
1701 set_copy (ANTIC_OUT, ANTIC_IN (first));
1703 for (i = 1; VEC_iterate (basic_block, worklist, i, bprime); i++)
1705 node = ANTIC_OUT->head;
1706 while (node)
1708 tree val;
1709 value_set_node_t next = node->next;
1711 val = get_value_handle (node->expr);
1712 if (!set_contains_value (ANTIC_IN (bprime), val))
1713 set_remove (ANTIC_OUT, node->expr);
1714 node = next;
1717 VEC_free (basic_block, heap, worklist);
1720 /* Generate ANTIC_OUT - TMP_GEN. */
1721 S = bitmap_set_subtract_from_value_set (ANTIC_OUT, TMP_GEN (block), false);
1723 /* Start ANTIC_IN with EXP_GEN - TMP_GEN */
1724 ANTIC_IN (block) = bitmap_set_subtract_from_value_set (EXP_GEN (block),
1725 TMP_GEN (block),
1726 true);
1728 /* Then union in the ANTIC_OUT - TMP_GEN values,
1729 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
1730 for (node = S->head; node; node = node->next)
1731 value_insert_into_set (ANTIC_IN (block), node->expr);
1733 clean (ANTIC_IN (block), block);
1734 if (!set_equal (old, ANTIC_IN (block)))
1735 changed = true;
1737 maybe_dump_sets:
1738 if (dump_file && (dump_flags & TDF_DETAILS))
1740 if (ANTIC_OUT)
1741 print_value_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
1743 if (ANTIC_SAFE_LOADS (block))
1744 print_value_set (dump_file, ANTIC_SAFE_LOADS (block),
1745 "ANTIC_SAFE_LOADS", block->index);
1746 print_value_set (dump_file, ANTIC_IN (block), "ANTIC_IN", block->index);
1748 if (S)
1749 print_value_set (dump_file, S, "S", block->index);
1752 for (son = first_dom_son (CDI_POST_DOMINATORS, block);
1753 son;
1754 son = next_dom_son (CDI_POST_DOMINATORS, son))
1756 changed |= compute_antic_aux (son,
1757 TEST_BIT (has_abnormal_preds, son->index));
1759 return changed;
1762 /* Compute ANTIC sets. */
1764 static void
1765 compute_antic (void)
1767 bool changed = true;
1768 int num_iterations = 0;
1769 basic_block block;
1771 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
1772 We pre-build the map of blocks with incoming abnormal edges here. */
1773 has_abnormal_preds = sbitmap_alloc (last_basic_block);
1774 sbitmap_zero (has_abnormal_preds);
1775 FOR_EACH_BB (block)
1777 edge_iterator ei;
1778 edge e;
1780 FOR_EACH_EDGE (e, ei, block->preds)
1781 if (e->flags & EDGE_ABNORMAL)
1783 SET_BIT (has_abnormal_preds, block->index);
1784 break;
1787 /* While we are here, give empty ANTIC_IN sets to each block. */
1788 ANTIC_IN (block) = set_new (true);
1790 /* At the exit block we anticipate nothing. */
1791 ANTIC_IN (EXIT_BLOCK_PTR) = set_new (true);
1793 while (changed)
1795 num_iterations++;
1796 changed = false;
1797 changed = compute_antic_aux (EXIT_BLOCK_PTR, false);
1800 sbitmap_free (has_abnormal_preds);
1802 if (dump_file && (dump_flags & TDF_STATS))
1803 fprintf (dump_file, "compute_antic required %d iterations\n", num_iterations);
1806 /* Print the names represented by the bitmap NAMES, to the file OUT. */
1807 static void
1808 dump_bitmap_of_names (FILE *out, bitmap names)
1810 bitmap_iterator bi;
1811 unsigned int i;
1813 fprintf (out, " { ");
1814 EXECUTE_IF_SET_IN_BITMAP (names, 0, i, bi)
1816 print_generic_expr (out, ssa_name (i), 0);
1817 fprintf (out, " ");
1819 fprintf (out, "}\n");
1822 /* Compute a set of representative vuse versions for each phi. This
1823 is so we can compute conservative kill sets in terms of all vuses
1824 that are killed, instead of continually walking chains.
1826 We also have to be able kill all names associated with a phi when
1827 the phi dies in order to ensure we don't generate overlapping
1828 live ranges, which are not allowed in virtual SSA. */
1830 static bitmap *vuse_names;
1831 static void
1832 compute_vuse_representatives (void)
1834 tree phi;
1835 basic_block bb;
1836 VEC (tree, heap) *phis = NULL;
1837 bool changed = true;
1838 size_t i;
1840 FOR_EACH_BB (bb)
1842 for (phi = phi_nodes (bb);
1843 phi;
1844 phi = PHI_CHAIN (phi))
1845 if (!is_gimple_reg (PHI_RESULT (phi)))
1846 VEC_safe_push (tree, heap, phis, phi);
1849 while (changed)
1851 changed = false;
1853 for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
1855 size_t ver = SSA_NAME_VERSION (PHI_RESULT (phi));
1856 use_operand_p usep;
1857 ssa_op_iter iter;
1859 if (vuse_names[ver] == NULL)
1861 vuse_names[ver] = BITMAP_ALLOC (&grand_bitmap_obstack);
1862 bitmap_set_bit (vuse_names[ver], ver);
1864 FOR_EACH_PHI_ARG (usep, phi, iter, SSA_OP_ALL_USES)
1866 tree use = USE_FROM_PTR (usep);
1867 bitmap usebitmap = get_representative (vuse_names,
1868 SSA_NAME_VERSION (use));
1869 if (usebitmap != NULL)
1871 changed |= bitmap_ior_into (vuse_names[ver],
1872 usebitmap);
1874 else
1876 changed |= !bitmap_bit_p (vuse_names[ver],
1877 SSA_NAME_VERSION (use));
1878 if (changed)
1879 bitmap_set_bit (vuse_names[ver],
1880 SSA_NAME_VERSION (use));
1886 if (dump_file && (dump_flags & TDF_DETAILS))
1887 for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
1889 bitmap reps = get_representative (vuse_names,
1890 SSA_NAME_VERSION (PHI_RESULT (phi)));
1891 if (reps)
1893 print_generic_expr (dump_file, PHI_RESULT (phi), 0);
1894 fprintf (dump_file, " represents ");
1895 dump_bitmap_of_names (dump_file, reps);
1898 VEC_free (tree, heap, phis);
1901 /* Compute reaching vuses and antic safe loads. RVUSE computation is
1902 is a small bit of iterative dataflow to determine what virtual uses
1903 reach what blocks. Because we can't generate overlapping virtual
1904 uses, and virtual uses *do* actually die, this ends up being faster
1905 in most cases than continually walking the virtual use/def chains
1906 to determine whether we are inside a block where a given virtual is
1907 still available to be used.
1909 ANTIC_SAFE_LOADS are those loads that actually occur before any kill to
1910 their vuses in the block,and thus, are safe at the top of the
1911 block.
1913 An example:
1915 <block begin>
1916 b = *a
1917 *a = 9
1918 <block end>
1920 b = *a is an antic safe load because it still safe to consider it
1921 ANTIC at the top of the block.
1923 We currently compute a conservative approximation to
1924 ANTIC_SAFE_LOADS. We compute those loads that occur before *any*
1925 stores in the block. This is not because it is difficult to
1926 compute the precise answer, but because it is expensive. More
1927 testing is necessary to determine whether it is worth computing the
1928 precise answer. */
1930 static void
1931 compute_rvuse_and_antic_safe (void)
1934 size_t i;
1935 tree phi;
1936 basic_block bb;
1937 int *postorder;
1938 bool changed = true;
1939 unsigned int *first_store_uid;
1941 first_store_uid = xcalloc (n_basic_blocks, sizeof (unsigned int));
1943 compute_vuse_representatives ();
1945 FOR_ALL_BB (bb)
1947 RVUSE_IN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1948 RVUSE_GEN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1949 RVUSE_KILL (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1950 RVUSE_OUT (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1951 ANTIC_SAFE_LOADS (bb) = NULL;
1954 /* Mark live on entry */
1955 for (i = 0; i < num_ssa_names; i++)
1957 tree name = ssa_name (i);
1958 if (name && !is_gimple_reg (name)
1959 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
1960 bitmap_set_bit (RVUSE_OUT (ENTRY_BLOCK_PTR),
1961 SSA_NAME_VERSION (name));
1964 /* Compute local sets for reaching vuses.
1965 GEN(block) = generated in block and not locally killed.
1966 KILL(block) = set of vuses killed in block.
1969 FOR_EACH_BB (bb)
1971 block_stmt_iterator bsi;
1972 ssa_op_iter iter;
1973 def_operand_p defp;
1974 use_operand_p usep;
1976 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1978 tree stmt = bsi_stmt (bsi);
1980 if (first_store_uid[bb->index] == 0
1981 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYUSE | SSA_OP_VMAYDEF
1982 | SSA_OP_VMUSTDEF | SSA_OP_VMUSTKILL))
1984 first_store_uid[bb->index] = stmt_ann (stmt)->uid;
1988 FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VIRTUAL_KILLS
1989 | SSA_OP_VMAYUSE)
1991 tree use = USE_FROM_PTR (usep);
1992 bitmap repbit = get_representative (vuse_names,
1993 SSA_NAME_VERSION (use));
1994 if (repbit != NULL)
1996 bitmap_and_compl_into (RVUSE_GEN (bb), repbit);
1997 bitmap_ior_into (RVUSE_KILL (bb), repbit);
1999 else
2001 bitmap_set_bit (RVUSE_KILL (bb), SSA_NAME_VERSION (use));
2002 bitmap_clear_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (use));
2005 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
2007 tree def = DEF_FROM_PTR (defp);
2008 bitmap_set_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (def));
2013 FOR_EACH_BB (bb)
2015 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2017 if (!is_gimple_reg (PHI_RESULT (phi)))
2019 edge e;
2020 edge_iterator ei;
2022 tree def = PHI_RESULT (phi);
2023 /* In reality, the PHI result is generated at the end of
2024 each predecessor block. This will make the value
2025 LVUSE_IN for the bb containing the PHI, which is
2026 correct. */
2027 FOR_EACH_EDGE (e, ei, bb->preds)
2028 bitmap_set_bit (RVUSE_GEN (e->src), SSA_NAME_VERSION (def));
2033 /* Solve reaching vuses.
2035 RVUSE_IN[BB] = Union of RVUSE_OUT of predecessors.
2036 RVUSE_OUT[BB] = RVUSE_GEN[BB] U (RVUSE_IN[BB] - RVUSE_KILL[BB])
2038 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
2039 pre_and_rev_post_order_compute (NULL, postorder, false);
2041 changed = true;
2042 while (changed)
2044 int j;
2045 changed = false;
2046 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2048 edge e;
2049 edge_iterator ei;
2050 bb = BASIC_BLOCK (postorder[j]);
2052 FOR_EACH_EDGE (e, ei, bb->preds)
2053 bitmap_ior_into (RVUSE_IN (bb), RVUSE_OUT (e->src));
2055 changed |= bitmap_ior_and_compl (RVUSE_OUT (bb),
2056 RVUSE_GEN (bb),
2057 RVUSE_IN (bb),
2058 RVUSE_KILL (bb));
2061 free (postorder);
2063 if (dump_file && (dump_flags & TDF_DETAILS))
2065 FOR_ALL_BB (bb)
2067 fprintf (dump_file, "RVUSE_IN (%d) =", bb->index);
2068 dump_bitmap_of_names (dump_file, RVUSE_IN (bb));
2070 fprintf (dump_file, "RVUSE_KILL (%d) =", bb->index);
2071 dump_bitmap_of_names (dump_file, RVUSE_KILL (bb));
2073 fprintf (dump_file, "RVUSE_GEN (%d) =", bb->index);
2074 dump_bitmap_of_names (dump_file, RVUSE_GEN (bb));
2076 fprintf (dump_file, "RVUSE_OUT (%d) =", bb->index);
2077 dump_bitmap_of_names (dump_file, RVUSE_OUT (bb));
2081 FOR_EACH_BB (bb)
2083 value_set_node_t node;
2084 if (bitmap_empty_p (RVUSE_KILL (bb)))
2085 continue;
2087 for (node = EXP_GEN (bb)->head; node; node = node->next)
2089 if (REFERENCE_CLASS_P (node->expr))
2091 tree vh = get_value_handle (node->expr);
2092 tree maybe = bitmap_find_leader (AVAIL_OUT (bb), vh);
2094 if (maybe)
2096 tree def = SSA_NAME_DEF_STMT (maybe);
2098 if (bb_for_stmt (def) != bb)
2099 continue;
2101 if (TREE_CODE (def) == PHI_NODE
2102 || stmt_ann (def)->uid < first_store_uid[bb->index])
2104 if (ANTIC_SAFE_LOADS (bb) == NULL)
2105 ANTIC_SAFE_LOADS (bb) = set_new (true);
2106 value_insert_into_set (ANTIC_SAFE_LOADS (bb),
2107 node->expr);
2113 free (first_store_uid);
2116 /* Return true if we can value number the call in STMT. This is true
2117 if we have a pure or constant call. */
2119 static bool
2120 can_value_number_call (tree stmt)
2122 tree call = get_call_expr_in (stmt);
2124 if (call_expr_flags (call) & (ECF_PURE | ECF_CONST))
2125 return true;
2126 return false;
2129 /* Return true if OP is a tree which we can perform value numbering
2130 on. */
2132 static bool
2133 can_value_number_operation (tree op)
2135 return UNARY_CLASS_P (op)
2136 || BINARY_CLASS_P (op)
2137 || COMPARISON_CLASS_P (op)
2138 || REFERENCE_CLASS_P (op)
2139 || (TREE_CODE (op) == CALL_EXPR
2140 && can_value_number_call (op));
2144 /* Return true if OP is a tree which we can perform PRE on
2145 on. This may not match the operations we can value number, but in
2146 a perfect world would. */
2148 static bool
2149 can_PRE_operation (tree op)
2151 return UNARY_CLASS_P (op)
2152 || BINARY_CLASS_P (op)
2153 || COMPARISON_CLASS_P (op)
2154 || TREE_CODE (op) == INDIRECT_REF
2155 || TREE_CODE (op) == COMPONENT_REF
2156 || TREE_CODE (op) == CALL_EXPR
2157 || TREE_CODE (op) == ARRAY_REF;
2161 /* Inserted expressions are placed onto this worklist, which is used
2162 for performing quick dead code elimination of insertions we made
2163 that didn't turn out to be necessary. */
2164 static VEC(tree,heap) *inserted_exprs;
2166 /* Pool allocated fake store expressions are placed onto this
2167 worklist, which, after performing dead code elimination, is walked
2168 to see which expressions need to be put into GC'able memory */
2169 static VEC(tree, heap) *need_creation;
2171 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2172 COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2173 trying to rename aggregates into ssa form directly, which is a no
2176 Thus, this routine doesn't create temporaries, it just builds a
2177 single access expression for the array, calling
2178 find_or_generate_expression to build the innermost pieces.
2180 This function is a subroutine of create_expression_by_pieces, and
2181 should not be called on it's own unless you really know what you
2182 are doing.
2184 static tree
2185 create_component_ref_by_pieces (basic_block block, tree expr, tree stmts)
2187 tree genop = expr;
2188 tree folded;
2190 if (TREE_CODE (genop) == VALUE_HANDLE)
2192 tree found = bitmap_find_leader (AVAIL_OUT (block), expr);
2193 if (found)
2194 return found;
2197 if (TREE_CODE (genop) == VALUE_HANDLE)
2198 genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
2200 switch TREE_CODE (genop)
2202 case ARRAY_REF:
2204 tree op0;
2205 tree op1, op2, op3;
2206 op0 = create_component_ref_by_pieces (block,
2207 TREE_OPERAND (genop, 0),
2208 stmts);
2209 op1 = TREE_OPERAND (genop, 1);
2210 if (TREE_CODE (op1) == VALUE_HANDLE)
2211 op1 = find_or_generate_expression (block, op1, stmts);
2212 op2 = TREE_OPERAND (genop, 2);
2213 if (op2 && TREE_CODE (op2) == VALUE_HANDLE)
2214 op2 = find_or_generate_expression (block, op2, stmts);
2215 op3 = TREE_OPERAND (genop, 3);
2216 if (op3 && TREE_CODE (op3) == VALUE_HANDLE)
2217 op3 = find_or_generate_expression (block, op3, stmts);
2218 folded = build4 (ARRAY_REF, TREE_TYPE (genop), op0, op1,
2219 op2, op3);
2220 return folded;
2222 case COMPONENT_REF:
2224 tree op0;
2225 tree op1;
2226 op0 = create_component_ref_by_pieces (block,
2227 TREE_OPERAND (genop, 0),
2228 stmts);
2229 op1 = VALUE_HANDLE_EXPR_SET (TREE_OPERAND (genop, 1))->head->expr;
2230 folded = fold_build3 (COMPONENT_REF, TREE_TYPE (genop), op0, op1,
2231 NULL_TREE);
2232 return folded;
2234 break;
2235 case INDIRECT_REF:
2237 tree op1 = TREE_OPERAND (genop, 0);
2238 tree genop1 = find_or_generate_expression (block, op1, stmts);
2240 folded = fold_build1 (TREE_CODE (genop), TREE_TYPE (genop),
2241 genop1);
2242 return folded;
2244 break;
2245 case VAR_DECL:
2246 case PARM_DECL:
2247 case RESULT_DECL:
2248 case SSA_NAME:
2249 case STRING_CST:
2250 return genop;
2251 default:
2252 gcc_unreachable ();
2255 return NULL_TREE;
2258 /* Find a leader for an expression, or generate one using
2259 create_expression_by_pieces if it's ANTIC but
2260 complex.
2261 BLOCK is the basic_block we are looking for leaders in.
2262 EXPR is the expression to find a leader or generate for.
2263 STMTS is the statement list to put the inserted expressions on.
2264 Returns the SSA_NAME of the LHS of the generated expression or the
2265 leader. */
2267 static tree
2268 find_or_generate_expression (basic_block block, tree expr, tree stmts)
2270 tree genop = bitmap_find_leader (AVAIL_OUT (block), expr);
2272 /* If it's still NULL, it must be a complex expression, so generate
2273 it recursively. */
2274 if (genop == NULL)
2276 genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
2278 gcc_assert (can_PRE_operation (genop));
2279 genop = create_expression_by_pieces (block, genop, stmts);
2281 return genop;
2284 #define NECESSARY(stmt) stmt->common.asm_written_flag
2285 /* Create an expression in pieces, so that we can handle very complex
2286 expressions that may be ANTIC, but not necessary GIMPLE.
2287 BLOCK is the basic block the expression will be inserted into,
2288 EXPR is the expression to insert (in value form)
2289 STMTS is a statement list to append the necessary insertions into.
2291 This function will die if we hit some value that shouldn't be
2292 ANTIC but is (IE there is no leader for it, or its components).
2293 This function may also generate expressions that are themselves
2294 partially or fully redundant. Those that are will be either made
2295 fully redundant during the next iteration of insert (for partially
2296 redundant ones), or eliminated by eliminate (for fully redundant
2297 ones). */
2299 static tree
2300 create_expression_by_pieces (basic_block block, tree expr, tree stmts)
2302 tree temp, name;
2303 tree folded, forced_stmts, newexpr;
2304 tree v;
2305 tree_stmt_iterator tsi;
2307 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2309 case tcc_expression:
2311 tree op0, op2;
2312 tree arglist;
2313 tree genop0, genop2;
2314 tree genarglist;
2315 tree walker, genwalker;
2317 gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2318 genop2 = NULL;
2320 op0 = TREE_OPERAND (expr, 0);
2321 arglist = TREE_OPERAND (expr, 1);
2322 op2 = TREE_OPERAND (expr, 2);
2324 genop0 = find_or_generate_expression (block, op0, stmts);
2325 genarglist = copy_list (arglist);
2326 for (walker = arglist, genwalker = genarglist;
2327 genwalker && walker;
2328 genwalker = TREE_CHAIN (genwalker), walker = TREE_CHAIN (walker))
2330 TREE_VALUE (genwalker)
2331 = find_or_generate_expression (block, TREE_VALUE (walker),
2332 stmts);
2335 if (op2)
2336 genop2 = find_or_generate_expression (block, op2, stmts);
2337 folded = fold_build3 (TREE_CODE (expr), TREE_TYPE (expr),
2338 genop0, genarglist, genop2);
2339 break;
2343 break;
2344 case tcc_reference:
2346 if (TREE_CODE (expr) == COMPONENT_REF
2347 || TREE_CODE (expr) == ARRAY_REF)
2349 folded = create_component_ref_by_pieces (block, expr, stmts);
2351 else
2353 tree op1 = TREE_OPERAND (expr, 0);
2354 tree genop1 = find_or_generate_expression (block, op1, stmts);
2356 folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2357 genop1);
2359 break;
2362 case tcc_binary:
2363 case tcc_comparison:
2365 tree op1 = TREE_OPERAND (expr, 0);
2366 tree op2 = TREE_OPERAND (expr, 1);
2367 tree genop1 = find_or_generate_expression (block, op1, stmts);
2368 tree genop2 = find_or_generate_expression (block, op2, stmts);
2369 folded = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr),
2370 genop1, genop2);
2371 break;
2374 case tcc_unary:
2376 tree op1 = TREE_OPERAND (expr, 0);
2377 tree genop1 = find_or_generate_expression (block, op1, stmts);
2378 folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2379 genop1);
2380 break;
2383 default:
2384 gcc_unreachable ();
2387 /* Force the generated expression to be a sequence of GIMPLE
2388 statements.
2389 We have to call unshare_expr because force_gimple_operand may
2390 modify the tree we pass to it. */
2391 newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts,
2392 false, NULL);
2394 /* If we have any intermediate expressions to the value sets, add them
2395 to the value sets and chain them on in the instruction stream. */
2396 if (forced_stmts)
2398 tsi = tsi_start (forced_stmts);
2399 for (; !tsi_end_p (tsi); tsi_next (&tsi))
2401 tree stmt = tsi_stmt (tsi);
2402 tree forcedname = TREE_OPERAND (stmt, 0);
2403 tree forcedexpr = TREE_OPERAND (stmt, 1);
2404 tree val = vn_lookup_or_add (forcedexpr, NULL);
2406 VEC_safe_push (tree, heap, inserted_exprs, stmt);
2407 vn_add (forcedname, val);
2408 bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
2409 bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
2410 mark_new_vars_to_rename (stmt);
2412 tsi = tsi_last (stmts);
2413 tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
2416 /* Build and insert the assignment of the end result to the temporary
2417 that we will return. */
2418 if (!pretemp || TREE_TYPE (expr) != TREE_TYPE (pretemp))
2420 pretemp = create_tmp_var (TREE_TYPE (expr), "pretmp");
2421 get_var_ann (pretemp);
2424 temp = pretemp;
2425 add_referenced_var (temp);
2427 if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2428 DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
2430 newexpr = build2 (MODIFY_EXPR, TREE_TYPE (expr), temp, newexpr);
2431 name = make_ssa_name (temp, newexpr);
2432 TREE_OPERAND (newexpr, 0) = name;
2433 NECESSARY (newexpr) = 0;
2435 tsi = tsi_last (stmts);
2436 tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
2437 VEC_safe_push (tree, heap, inserted_exprs, newexpr);
2438 mark_new_vars_to_rename (newexpr);
2440 /* Add a value handle to the temporary.
2441 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2442 we are creating the expression by pieces, and this particular piece of
2443 the expression may have been represented. There is no harm in replacing
2444 here. */
2445 v = get_value_handle (expr);
2446 vn_add (name, v);
2447 bitmap_value_replace_in_set (NEW_SETS (block), name);
2448 bitmap_value_replace_in_set (AVAIL_OUT (block), name);
2450 pre_stats.insertions++;
2451 if (dump_file && (dump_flags & TDF_DETAILS))
2453 fprintf (dump_file, "Inserted ");
2454 print_generic_expr (dump_file, newexpr, 0);
2455 fprintf (dump_file, " in predecessor %d\n", block->index);
2458 return name;
2461 /* Insert the to-be-made-available values of NODE for each
2462 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2463 merge the result with a phi node, given the same value handle as
2464 NODE. Return true if we have inserted new stuff. */
2466 static bool
2467 insert_into_preds_of_block (basic_block block, value_set_node_t node,
2468 tree *avail)
2470 tree val = get_value_handle (node->expr);
2471 edge pred;
2472 bool insertions = false;
2473 bool nophi = false;
2474 basic_block bprime;
2475 tree eprime;
2476 edge_iterator ei;
2477 tree type = TREE_TYPE (avail[EDGE_PRED (block, 0)->src->index]);
2478 tree temp;
2480 if (dump_file && (dump_flags & TDF_DETAILS))
2482 fprintf (dump_file, "Found partial redundancy for expression ");
2483 print_generic_expr (dump_file, node->expr, 0);
2484 fprintf (dump_file, " (");
2485 print_generic_expr (dump_file, val, 0);
2486 fprintf (dump_file, ")");
2487 fprintf (dump_file, "\n");
2490 /* Make sure we aren't creating an induction variable. */
2491 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2
2492 && TREE_CODE_CLASS (TREE_CODE (node->expr)) != tcc_reference )
2494 bool firstinsideloop = false;
2495 bool secondinsideloop = false;
2496 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2497 EDGE_PRED (block, 0)->src);
2498 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2499 EDGE_PRED (block, 1)->src);
2500 /* Induction variables only have one edge inside the loop. */
2501 if (firstinsideloop ^ secondinsideloop)
2503 if (dump_file && (dump_flags & TDF_DETAILS))
2504 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2505 nophi = true;
2510 /* Make the necessary insertions. */
2511 FOR_EACH_EDGE (pred, ei, block->preds)
2513 tree stmts = alloc_stmt_list ();
2514 tree builtexpr;
2515 bprime = pred->src;
2516 eprime = avail[bprime->index];
2518 if (can_PRE_operation (eprime))
2520 #ifdef ENABLE_CHECKING
2521 tree vh;
2523 /* eprime may be an invariant. */
2524 vh = TREE_CODE (eprime) == VALUE_HANDLE
2525 ? eprime
2526 : get_value_handle (eprime);
2528 /* ensure that the virtual uses we need reach our block. */
2529 if (TREE_CODE (vh) == VALUE_HANDLE)
2531 int i;
2532 tree vuse;
2533 for (i = 0;
2534 VEC_iterate (tree, VALUE_HANDLE_VUSES (vh), i, vuse);
2535 i++)
2537 size_t id = SSA_NAME_VERSION (vuse);
2538 gcc_assert (bitmap_bit_p (RVUSE_OUT (bprime), id)
2539 || IS_EMPTY_STMT (SSA_NAME_DEF_STMT (vuse)));
2542 #endif
2543 builtexpr = create_expression_by_pieces (bprime,
2544 eprime,
2545 stmts);
2546 bsi_insert_on_edge (pred, stmts);
2547 avail[bprime->index] = builtexpr;
2548 insertions = true;
2551 /* If we didn't want a phi node, and we made insertions, we still have
2552 inserted new stuff, and thus return true. If we didn't want a phi node,
2553 and didn't make insertions, we haven't added anything new, so return
2554 false. */
2555 if (nophi && insertions)
2556 return true;
2557 else if (nophi && !insertions)
2558 return false;
2560 /* Now build a phi for the new variable. */
2561 if (!prephitemp || TREE_TYPE (prephitemp) != type)
2563 prephitemp = create_tmp_var (type, "prephitmp");
2564 get_var_ann (prephitemp);
2567 temp = prephitemp;
2568 add_referenced_var (temp);
2570 if (TREE_CODE (type) == COMPLEX_TYPE)
2571 DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
2572 temp = create_phi_node (temp, block);
2574 NECESSARY (temp) = 0;
2575 VEC_safe_push (tree, heap, inserted_exprs, temp);
2576 FOR_EACH_EDGE (pred, ei, block->preds)
2577 add_phi_arg (temp, avail[pred->src->index], pred);
2579 vn_add (PHI_RESULT (temp), val);
2581 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
2582 this insertion, since we test for the existence of this value in PHI_GEN
2583 before proceeding with the partial redundancy checks in insert_aux.
2585 The value may exist in AVAIL_OUT, in particular, it could be represented
2586 by the expression we are trying to eliminate, in which case we want the
2587 replacement to occur. If it's not existing in AVAIL_OUT, we want it
2588 inserted there.
2590 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
2591 this block, because if it did, it would have existed in our dominator's
2592 AVAIL_OUT, and would have been skipped due to the full redundancy check.
2595 bitmap_insert_into_set (PHI_GEN (block),
2596 PHI_RESULT (temp));
2597 bitmap_value_replace_in_set (AVAIL_OUT (block),
2598 PHI_RESULT (temp));
2599 bitmap_insert_into_set (NEW_SETS (block),
2600 PHI_RESULT (temp));
2602 if (dump_file && (dump_flags & TDF_DETAILS))
2604 fprintf (dump_file, "Created phi ");
2605 print_generic_expr (dump_file, temp, 0);
2606 fprintf (dump_file, " in block %d\n", block->index);
2608 pre_stats.phis++;
2609 return true;
2614 /* Perform insertion of partially redundant values.
2615 For BLOCK, do the following:
2616 1. Propagate the NEW_SETS of the dominator into the current block.
2617 If the block has multiple predecessors,
2618 2a. Iterate over the ANTIC expressions for the block to see if
2619 any of them are partially redundant.
2620 2b. If so, insert them into the necessary predecessors to make
2621 the expression fully redundant.
2622 2c. Insert a new PHI merging the values of the predecessors.
2623 2d. Insert the new PHI, and the new expressions, into the
2624 NEW_SETS set.
2625 3. Recursively call ourselves on the dominator children of BLOCK.
2629 static bool
2630 insert_aux (basic_block block)
2632 basic_block son;
2633 bool new_stuff = false;
2635 if (block)
2637 basic_block dom;
2638 dom = get_immediate_dominator (CDI_DOMINATORS, block);
2639 if (dom)
2641 unsigned i;
2642 bitmap_iterator bi;
2643 bitmap_set_t newset = NEW_SETS (dom);
2644 if (newset)
2646 /* Note that we need to value_replace both NEW_SETS, and
2647 AVAIL_OUT. For both the case of NEW_SETS, the value may be
2648 represented by some non-simple expression here that we want
2649 to replace it with. */
2650 EXECUTE_IF_SET_IN_BITMAP (newset->expressions, 0, i, bi)
2652 bitmap_value_replace_in_set (NEW_SETS (block), ssa_name (i));
2653 bitmap_value_replace_in_set (AVAIL_OUT (block), ssa_name (i));
2656 if (!single_pred_p (block))
2658 value_set_node_t node;
2659 for (node = ANTIC_IN (block)->head;
2660 node;
2661 node = node->next)
2663 if (can_PRE_operation (node->expr)
2664 && !AGGREGATE_TYPE_P (TREE_TYPE (node->expr)))
2666 tree *avail;
2667 tree val;
2668 bool by_some = false;
2669 bool cant_insert = false;
2670 bool all_same = true;
2671 tree first_s = NULL;
2672 edge pred;
2673 basic_block bprime;
2674 tree eprime = NULL_TREE;
2675 edge_iterator ei;
2677 val = get_value_handle (node->expr);
2678 if (bitmap_set_contains_value (PHI_GEN (block), val))
2679 continue;
2680 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
2682 if (dump_file && (dump_flags & TDF_DETAILS))
2683 fprintf (dump_file, "Found fully redundant value\n");
2684 continue;
2687 avail = XCNEWVEC (tree, last_basic_block);
2688 FOR_EACH_EDGE (pred, ei, block->preds)
2690 tree vprime;
2691 tree edoubleprime;
2693 /* This can happen in the very weird case
2694 that our fake infinite loop edges have caused a
2695 critical edge to appear. */
2696 if (EDGE_CRITICAL_P (pred))
2698 cant_insert = true;
2699 break;
2701 bprime = pred->src;
2702 eprime = phi_translate (node->expr,
2703 ANTIC_IN (block),
2704 bprime, block);
2706 /* eprime will generally only be NULL if the
2707 value of the expression, translated
2708 through the PHI for this predecessor, is
2709 undefined. If that is the case, we can't
2710 make the expression fully redundant,
2711 because its value is undefined along a
2712 predecessor path. We can thus break out
2713 early because it doesn't matter what the
2714 rest of the results are. */
2715 if (eprime == NULL)
2717 cant_insert = true;
2718 break;
2721 eprime = fully_constant_expression (eprime);
2722 vprime = get_value_handle (eprime);
2723 gcc_assert (vprime);
2724 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
2725 vprime);
2726 if (edoubleprime == NULL)
2728 avail[bprime->index] = eprime;
2729 all_same = false;
2731 else
2733 avail[bprime->index] = edoubleprime;
2734 by_some = true;
2735 if (first_s == NULL)
2736 first_s = edoubleprime;
2737 else if (!operand_equal_p (first_s, edoubleprime,
2739 all_same = false;
2742 /* If we can insert it, it's not the same value
2743 already existing along every predecessor, and
2744 it's defined by some predecessor, it is
2745 partially redundant. */
2746 if (!cant_insert && !all_same && by_some)
2748 if (insert_into_preds_of_block (block, node, avail))
2749 new_stuff = true;
2751 /* If all edges produce the same value and that value is
2752 an invariant, then the PHI has the same value on all
2753 edges. Note this. */
2754 else if (!cant_insert && all_same && eprime
2755 && is_gimple_min_invariant (eprime)
2756 && !is_gimple_min_invariant (val))
2758 value_set_t exprset = VALUE_HANDLE_EXPR_SET (val);
2759 value_set_node_t node;
2761 for (node = exprset->head; node; node = node->next)
2763 if (TREE_CODE (node->expr) == SSA_NAME)
2765 vn_add (node->expr, eprime);
2766 pre_stats.constified++;
2770 free (avail);
2776 for (son = first_dom_son (CDI_DOMINATORS, block);
2777 son;
2778 son = next_dom_son (CDI_DOMINATORS, son))
2780 new_stuff |= insert_aux (son);
2783 return new_stuff;
2786 /* Perform insertion of partially redundant values. */
2788 static void
2789 insert (void)
2791 bool new_stuff = true;
2792 basic_block bb;
2793 int num_iterations = 0;
2795 FOR_ALL_BB (bb)
2796 NEW_SETS (bb) = bitmap_set_new ();
2798 while (new_stuff)
2800 num_iterations++;
2801 new_stuff = false;
2802 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
2804 if (num_iterations > 2 && dump_file && (dump_flags & TDF_STATS))
2805 fprintf (dump_file, "insert required %d iterations\n", num_iterations);
2809 /* Return true if VAR is an SSA variable with no defining statement in
2810 this procedure, *AND* isn't a live-on-entry parameter. */
2812 static bool
2813 is_undefined_value (tree expr)
2815 return (TREE_CODE (expr) == SSA_NAME
2816 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (expr))
2817 /* PARM_DECLs and hard registers are always defined. */
2818 && TREE_CODE (SSA_NAME_VAR (expr)) != PARM_DECL);
2822 /* Given an SSA variable VAR and an expression EXPR, compute the value
2823 number for EXPR and create a value handle (VAL) for it. If VAR and
2824 EXPR are not the same, associate VAL with VAR. Finally, add VAR to
2825 S1 and its value handle to S2.
2827 VUSES represent the virtual use operands associated with EXPR (if
2828 any). */
2830 static inline void
2831 add_to_sets (tree var, tree expr, tree stmt, bitmap_set_t s1,
2832 bitmap_set_t s2)
2834 tree val = vn_lookup_or_add (expr, stmt);
2836 /* VAR and EXPR may be the same when processing statements for which
2837 we are not computing value numbers (e.g., non-assignments, or
2838 statements that make aliased stores). In those cases, we are
2839 only interested in making VAR available as its own value. */
2840 if (var != expr)
2841 vn_add (var, val);
2843 if (s1)
2844 bitmap_insert_into_set (s1, var);
2845 bitmap_value_insert_into_set (s2, var);
2849 /* Given a unary or binary expression EXPR, create and return a new
2850 expression with the same structure as EXPR but with its operands
2851 replaced with the value handles of each of the operands of EXPR.
2853 VUSES represent the virtual use operands associated with EXPR (if
2854 any). Insert EXPR's operands into the EXP_GEN set for BLOCK. */
2856 static inline tree
2857 create_value_expr_from (tree expr, basic_block block, tree stmt)
2859 int i;
2860 enum tree_code code = TREE_CODE (expr);
2861 tree vexpr;
2862 alloc_pool pool;
2864 gcc_assert (TREE_CODE_CLASS (code) == tcc_unary
2865 || TREE_CODE_CLASS (code) == tcc_binary
2866 || TREE_CODE_CLASS (code) == tcc_comparison
2867 || TREE_CODE_CLASS (code) == tcc_reference
2868 || TREE_CODE_CLASS (code) == tcc_expression
2869 || TREE_CODE_CLASS (code) == tcc_exceptional
2870 || TREE_CODE_CLASS (code) == tcc_declaration);
2872 if (TREE_CODE_CLASS (code) == tcc_unary)
2873 pool = unary_node_pool;
2874 else if (TREE_CODE_CLASS (code) == tcc_reference)
2875 pool = reference_node_pool;
2876 else if (TREE_CODE_CLASS (code) == tcc_binary)
2877 pool = binary_node_pool;
2878 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2879 pool = comparison_node_pool;
2880 else if (TREE_CODE_CLASS (code) == tcc_exceptional)
2882 gcc_assert (code == TREE_LIST);
2883 pool = list_node_pool;
2885 else
2887 gcc_assert (code == CALL_EXPR);
2888 pool = expression_node_pool;
2891 vexpr = (tree) pool_alloc (pool);
2892 memcpy (vexpr, expr, tree_size (expr));
2894 /* This case is only for TREE_LIST's that appear as part of
2895 CALL_EXPR's. Anything else is a bug, but we can't easily verify
2896 this, hence this comment. TREE_LIST is not handled by the
2897 general case below is because they don't have a fixed length, or
2898 operands, so you can't access purpose/value/chain through
2899 TREE_OPERAND macros. */
2901 if (code == TREE_LIST)
2903 tree op = NULL_TREE;
2904 tree temp = NULL_TREE;
2905 if (TREE_CHAIN (vexpr))
2906 temp = create_value_expr_from (TREE_CHAIN (vexpr), block, stmt);
2907 TREE_CHAIN (vexpr) = temp ? temp : TREE_CHAIN (vexpr);
2910 /* Recursively value-numberize reference ops. */
2911 if (REFERENCE_CLASS_P (TREE_VALUE (vexpr)))
2913 tree tempop;
2914 op = TREE_VALUE (vexpr);
2915 tempop = create_value_expr_from (op, block, stmt);
2916 op = tempop ? tempop : op;
2918 TREE_VALUE (vexpr) = vn_lookup_or_add (op, stmt);
2920 else
2922 op = TREE_VALUE (vexpr);
2923 TREE_VALUE (vexpr) = vn_lookup_or_add (TREE_VALUE (vexpr), NULL);
2925 /* This is the equivalent of inserting op into EXP_GEN like we
2926 do below */
2927 if (!is_undefined_value (op))
2928 value_insert_into_set (EXP_GEN (block), op);
2930 return vexpr;
2933 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
2935 tree val, op;
2937 op = TREE_OPERAND (expr, i);
2938 if (op == NULL_TREE)
2939 continue;
2941 /* Recursively value-numberize reference ops and tree lists. */
2942 if (REFERENCE_CLASS_P (op))
2944 tree tempop = create_value_expr_from (op, block, stmt);
2945 op = tempop ? tempop : op;
2946 val = vn_lookup_or_add (op, stmt);
2948 else if (TREE_CODE (op) == TREE_LIST)
2950 tree tempop;
2952 gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2953 tempop = create_value_expr_from (op, block, stmt);
2955 op = tempop ? tempop : op;
2956 vn_lookup_or_add (op, NULL);
2957 /* Unlike everywhere else, we do *not* want to replace the
2958 TREE_LIST itself with a value number, because support
2959 functions we call will blow up. */
2960 val = op;
2962 else
2963 /* Create a value handle for OP and add it to VEXPR. */
2964 val = vn_lookup_or_add (op, NULL);
2966 if (!is_undefined_value (op) && TREE_CODE (op) != TREE_LIST)
2967 value_insert_into_set (EXP_GEN (block), op);
2969 if (TREE_CODE (val) == VALUE_HANDLE)
2970 TREE_TYPE (val) = TREE_TYPE (TREE_OPERAND (vexpr, i));
2972 TREE_OPERAND (vexpr, i) = val;
2975 return vexpr;
2980 /* Insert extra phis to merge values that are fully available from
2981 preds of BLOCK, but have no dominating representative coming from
2982 block DOM. */
2984 static void
2985 insert_extra_phis (basic_block block, basic_block dom)
2988 if (!single_pred_p (block))
2990 edge e;
2991 edge_iterator ei;
2992 bool first = true;
2993 bitmap_set_t tempset = bitmap_set_new ();
2995 FOR_EACH_EDGE (e, ei, block->preds)
2997 /* We cannot handle abnormal incoming edges correctly. */
2998 if (e->flags & EDGE_ABNORMAL)
2999 return;
3001 if (first)
3003 bitmap_set_copy (tempset, AVAIL_OUT (e->src));
3004 first = false;
3006 else
3007 bitmap_set_and (tempset, AVAIL_OUT (e->src));
3010 if (dom)
3011 bitmap_set_and_compl (tempset, AVAIL_OUT (dom));
3013 if (!bitmap_set_empty_p (tempset))
3015 unsigned int i;
3016 bitmap_iterator bi;
3018 EXECUTE_IF_SET_IN_BITMAP (tempset->expressions, 0, i, bi)
3020 tree name = ssa_name (i);
3021 tree val = get_value_handle (name);
3022 tree temp;
3024 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
3025 continue;
3027 if (!mergephitemp
3028 || TREE_TYPE (name) != TREE_TYPE (mergephitemp))
3030 mergephitemp = create_tmp_var (TREE_TYPE (name),
3031 "mergephitmp");
3032 get_var_ann (mergephitemp);
3034 temp = mergephitemp;
3036 if (dump_file && (dump_flags & TDF_DETAILS))
3038 fprintf (dump_file, "Creating phi ");
3039 print_generic_expr (dump_file, temp, 0);
3040 fprintf (dump_file, " to merge available but not dominating values ");
3043 add_referenced_var (temp);
3044 temp = create_phi_node (temp, block);
3045 NECESSARY (temp) = 0;
3046 VEC_safe_push (tree, heap, inserted_exprs, temp);
3048 FOR_EACH_EDGE (e, ei, block->preds)
3050 tree leader = bitmap_find_leader (AVAIL_OUT (e->src), val);
3052 gcc_assert (leader);
3053 add_phi_arg (temp, leader, e);
3055 if (dump_file && (dump_flags & TDF_DETAILS))
3057 print_generic_expr (dump_file, leader, 0);
3058 fprintf (dump_file, " in block %d,", e->src->index);
3062 vn_add (PHI_RESULT (temp), val);
3064 if (dump_file && (dump_flags & TDF_DETAILS))
3065 fprintf (dump_file, "\n");
3071 /* Given a statement STMT and its right hand side which is a load, try
3072 to look for the expression stored in the location for the load, and
3073 return true if a useful equivalence was recorded for LHS. */
3075 static bool
3076 try_look_through_load (tree lhs, tree mem_ref, tree stmt, basic_block block)
3078 tree store_stmt = NULL;
3079 tree rhs;
3080 ssa_op_iter i;
3081 tree vuse;
3083 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
3085 tree def_stmt;
3087 gcc_assert (TREE_CODE (vuse) == SSA_NAME);
3088 def_stmt = SSA_NAME_DEF_STMT (vuse);
3090 /* If there is no useful statement for this VUSE, we'll not find a
3091 useful expression to return either. Likewise, if there is a
3092 statement but it is not a simple assignment or it has virtual
3093 uses, we can stop right here. Note that this means we do
3094 not look through PHI nodes, which is intentional. */
3095 if (!def_stmt
3096 || TREE_CODE (def_stmt) != MODIFY_EXPR
3097 || !ZERO_SSA_OPERANDS (def_stmt, SSA_OP_VIRTUAL_USES))
3098 return false;
3100 /* If this is not the same statement as one we have looked at for
3101 another VUSE of STMT already, we have two statements producing
3102 something that reaches our STMT. */
3103 if (store_stmt && store_stmt != def_stmt)
3104 return false;
3105 else
3107 /* Is this a store to the exact same location as the one we are
3108 loading from in STMT? */
3109 if (!operand_equal_p (TREE_OPERAND (def_stmt, 0), mem_ref, 0))
3110 return false;
3112 /* Otherwise remember this statement and see if all other VUSEs
3113 come from the same statement. */
3114 store_stmt = def_stmt;
3118 /* Alright then, we have visited all VUSEs of STMT and we've determined
3119 that all of them come from the same statement STORE_STMT. See if there
3120 is a useful expression we can deduce from STORE_STMT. */
3121 rhs = TREE_OPERAND (store_stmt, 1);
3122 if ((TREE_CODE (rhs) == SSA_NAME
3123 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3124 || is_gimple_min_invariant (rhs)
3125 || TREE_CODE (rhs) == ADDR_EXPR
3126 || TREE_INVARIANT (rhs))
3129 /* Yay! Compute a value number for the RHS of the statement and
3130 add its value to the AVAIL_OUT set for the block. Add the LHS
3131 to TMP_GEN. */
3132 add_to_sets (lhs, rhs, store_stmt, TMP_GEN (block), AVAIL_OUT (block));
3133 if (TREE_CODE (rhs) == SSA_NAME
3134 && !is_undefined_value (rhs))
3135 value_insert_into_set (EXP_GEN (block), rhs);
3136 return true;
3139 return false;
3142 /* Return a copy of NODE that is stored in the temporary alloc_pool's.
3143 This is made recursively true, so that the operands are stored in
3144 the pool as well. */
3146 static tree
3147 poolify_tree (tree node)
3149 switch (TREE_CODE (node))
3151 case INDIRECT_REF:
3153 tree temp = pool_alloc (reference_node_pool);
3154 memcpy (temp, node, tree_size (node));
3155 TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
3156 return temp;
3158 break;
3159 case MODIFY_EXPR:
3161 tree temp = pool_alloc (modify_expr_node_pool);
3162 memcpy (temp, node, tree_size (node));
3163 TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
3164 TREE_OPERAND (temp, 1) = poolify_tree (TREE_OPERAND (temp, 1));
3165 return temp;
3167 break;
3168 case SSA_NAME:
3169 case INTEGER_CST:
3170 case STRING_CST:
3171 case REAL_CST:
3172 case PARM_DECL:
3173 case VAR_DECL:
3174 case RESULT_DECL:
3175 return node;
3176 default:
3177 gcc_unreachable ();
3181 static tree modify_expr_template;
3183 /* Allocate a MODIFY_EXPR with TYPE, and operands OP1, OP2 in the
3184 alloc pools and return it. */
3185 static tree
3186 poolify_modify_expr (tree type, tree op1, tree op2)
3188 if (modify_expr_template == NULL)
3189 modify_expr_template = build2 (MODIFY_EXPR, type, op1, op2);
3191 TREE_OPERAND (modify_expr_template, 0) = op1;
3192 TREE_OPERAND (modify_expr_template, 1) = op2;
3193 TREE_TYPE (modify_expr_template) = type;
3195 return poolify_tree (modify_expr_template);
3199 /* For each real store operation of the form
3200 *a = <value> that we see, create a corresponding fake store of the
3201 form storetmp_<version> = *a.
3203 This enables AVAIL computation to mark the results of stores as
3204 available. Without this, you'd need to do some computation to
3205 mark the result of stores as ANTIC and AVAIL at all the right
3206 points.
3207 To save memory, we keep the store
3208 statements pool allocated until we decide whether they are
3209 necessary or not. */
3211 static void
3212 insert_fake_stores (void)
3214 basic_block block;
3216 FOR_ALL_BB (block)
3218 block_stmt_iterator bsi;
3219 for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3221 tree stmt = bsi_stmt (bsi);
3223 /* We can't generate SSA names for stores that are complex
3224 or aggregate. We also want to ignore things whose
3225 virtual uses occur in abnormal phis. */
3227 if (TREE_CODE (stmt) == MODIFY_EXPR
3228 && TREE_CODE (TREE_OPERAND (stmt, 0)) == INDIRECT_REF
3229 && !AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 0)))
3230 && TREE_CODE (TREE_TYPE (TREE_OPERAND (stmt, 0))) != COMPLEX_TYPE)
3232 ssa_op_iter iter;
3233 def_operand_p defp;
3234 tree lhs = TREE_OPERAND (stmt, 0);
3235 tree rhs = TREE_OPERAND (stmt, 1);
3236 tree new;
3237 bool notokay = false;
3239 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
3241 tree defvar = DEF_FROM_PTR (defp);
3242 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defvar))
3244 notokay = true;
3245 break;
3249 if (notokay)
3250 continue;
3252 if (!storetemp || TREE_TYPE (rhs) != TREE_TYPE (storetemp))
3254 storetemp = create_tmp_var (TREE_TYPE (rhs), "storetmp");
3255 get_var_ann (storetemp);
3258 new = poolify_modify_expr (TREE_TYPE (stmt), storetemp, lhs);
3260 lhs = make_ssa_name (storetemp, new);
3261 TREE_OPERAND (new, 0) = lhs;
3262 create_ssa_artficial_load_stmt (new, stmt);
3264 NECESSARY (new) = 0;
3265 VEC_safe_push (tree, heap, inserted_exprs, new);
3266 VEC_safe_push (tree, heap, need_creation, new);
3267 bsi_insert_after (&bsi, new, BSI_NEW_STMT);
3273 /* Turn the pool allocated fake stores that we created back into real
3274 GC allocated ones if they turned out to be necessary to PRE some
3275 expressions. */
3277 static void
3278 realify_fake_stores (void)
3280 unsigned int i;
3281 tree stmt;
3283 for (i = 0; VEC_iterate (tree, need_creation, i, stmt); i++)
3285 if (NECESSARY (stmt))
3287 block_stmt_iterator bsi;
3288 tree newstmt;
3290 /* Mark the temp variable as referenced */
3291 add_referenced_var (SSA_NAME_VAR (TREE_OPERAND (stmt, 0)));
3293 /* Put the new statement in GC memory, fix up the
3294 SSA_NAME_DEF_STMT on it, and then put it in place of
3295 the old statement before the store in the IR stream
3296 as a plain ssa name copy. */
3297 bsi = bsi_for_stmt (stmt);
3298 bsi_prev (&bsi);
3299 newstmt = build2 (MODIFY_EXPR, void_type_node,
3300 TREE_OPERAND (stmt, 0),
3301 TREE_OPERAND (bsi_stmt (bsi), 1));
3302 SSA_NAME_DEF_STMT (TREE_OPERAND (newstmt, 0)) = newstmt;
3303 bsi_insert_before (&bsi, newstmt, BSI_SAME_STMT);
3304 bsi = bsi_for_stmt (stmt);
3305 bsi_remove (&bsi, true);
3307 else
3308 release_defs (stmt);
3312 /* Tree-combine a value number expression *EXPR_P that does a type
3313 conversion with the value number expression of its operand.
3314 Returns true, if *EXPR_P simplifies to a value number or
3315 gimple min-invariant expression different from EXPR_P and
3316 sets *EXPR_P to the simplified expression value number.
3317 Otherwise returns false and does not change *EXPR_P. */
3319 static bool
3320 try_combine_conversion (tree *expr_p)
3322 tree expr = *expr_p;
3323 tree t;
3325 if (!((TREE_CODE (expr) == NOP_EXPR
3326 || TREE_CODE (expr) == CONVERT_EXPR)
3327 && TREE_CODE (TREE_OPERAND (expr, 0)) == VALUE_HANDLE
3328 && !VALUE_HANDLE_VUSES (TREE_OPERAND (expr, 0))))
3329 return false;
3331 t = fold_unary (TREE_CODE (expr), TREE_TYPE (expr),
3332 VALUE_HANDLE_EXPR_SET (TREE_OPERAND (expr, 0))->head->expr);
3334 /* Disallow value expressions we have no value number for already, as
3335 we would miss a leader for it here. */
3336 if (t
3337 && !(TREE_CODE (t) == VALUE_HANDLE
3338 || is_gimple_min_invariant (t)))
3339 t = vn_lookup (t, NULL);
3341 if (t && t != expr)
3343 *expr_p = t;
3344 return true;
3346 return false;
3349 /* Compute the AVAIL set for all basic blocks.
3351 This function performs value numbering of the statements in each basic
3352 block. The AVAIL sets are built from information we glean while doing
3353 this value numbering, since the AVAIL sets contain only one entry per
3354 value.
3356 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3357 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3359 static void
3360 compute_avail (void)
3362 basic_block block, son;
3363 basic_block *worklist;
3364 size_t sp = 0;
3365 tree param;
3366 /* For arguments with default definitions, we pretend they are
3367 defined in the entry block. */
3368 for (param = DECL_ARGUMENTS (current_function_decl);
3369 param;
3370 param = TREE_CHAIN (param))
3372 if (default_def (param) != NULL)
3374 tree def = default_def (param);
3375 vn_lookup_or_add (def, NULL);
3376 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3377 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3381 /* Likewise for the static chain decl. */
3382 if (cfun->static_chain_decl)
3384 param = cfun->static_chain_decl;
3385 if (default_def (param) != NULL)
3387 tree def = default_def (param);
3388 vn_lookup_or_add (def, NULL);
3389 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3390 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3394 /* Allocate the worklist. */
3395 worklist = XNEWVEC (basic_block, n_basic_blocks);
3397 /* Seed the algorithm by putting the dominator children of the entry
3398 block on the worklist. */
3399 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3400 son;
3401 son = next_dom_son (CDI_DOMINATORS, son))
3402 worklist[sp++] = son;
3404 /* Loop until the worklist is empty. */
3405 while (sp)
3407 block_stmt_iterator bsi;
3408 tree stmt, phi;
3409 basic_block dom;
3410 unsigned int stmt_uid = 1;
3412 /* Pick a block from the worklist. */
3413 block = worklist[--sp];
3415 /* Initially, the set of available values in BLOCK is that of
3416 its immediate dominator. */
3417 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3418 if (dom)
3419 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3421 if (!in_fre)
3422 insert_extra_phis (block, dom);
3424 /* Generate values for PHI nodes. */
3425 for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
3426 /* We have no need for virtual phis, as they don't represent
3427 actual computations. */
3428 if (is_gimple_reg (PHI_RESULT (phi)))
3429 add_to_sets (PHI_RESULT (phi), PHI_RESULT (phi), NULL,
3430 PHI_GEN (block), AVAIL_OUT (block));
3432 /* Now compute value numbers and populate value sets with all
3433 the expressions computed in BLOCK. */
3434 for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3436 stmt_ann_t ann;
3437 ssa_op_iter iter;
3438 tree op;
3440 stmt = bsi_stmt (bsi);
3441 ann = stmt_ann (stmt);
3443 ann->uid = stmt_uid++;
3445 /* For regular value numbering, we are only interested in
3446 assignments of the form X_i = EXPR, where EXPR represents
3447 an "interesting" computation, it has no volatile operands
3448 and X_i doesn't flow through an abnormal edge. */
3449 if (TREE_CODE (stmt) == MODIFY_EXPR
3450 && !ann->has_volatile_ops
3451 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
3452 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (stmt, 0)))
3454 tree lhs = TREE_OPERAND (stmt, 0);
3455 tree rhs = TREE_OPERAND (stmt, 1);
3457 /* Try to look through loads. */
3458 if (TREE_CODE (lhs) == SSA_NAME
3459 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES)
3460 && try_look_through_load (lhs, rhs, stmt, block))
3461 continue;
3463 STRIP_USELESS_TYPE_CONVERSION (rhs);
3464 if (can_value_number_operation (rhs))
3466 /* For value numberable operation, create a
3467 duplicate expression with the operands replaced
3468 with the value handles of the original RHS. */
3469 tree newt = create_value_expr_from (rhs, block, stmt);
3470 if (newt)
3472 /* If we can combine a conversion expression
3473 with the expression for its operand just
3474 record the value number for it. */
3475 if (try_combine_conversion (&newt))
3476 vn_add (lhs, newt);
3477 else
3479 tree val = vn_lookup_or_add (newt, stmt);
3480 vn_add (lhs, val);
3481 value_insert_into_set (EXP_GEN (block), newt);
3483 bitmap_insert_into_set (TMP_GEN (block), lhs);
3484 bitmap_value_insert_into_set (AVAIL_OUT (block), lhs);
3485 continue;
3488 else if ((TREE_CODE (rhs) == SSA_NAME
3489 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3490 || is_gimple_min_invariant (rhs)
3491 || TREE_CODE (rhs) == ADDR_EXPR
3492 || TREE_INVARIANT (rhs)
3493 || DECL_P (rhs))
3495 /* Compute a value number for the RHS of the statement
3496 and add its value to the AVAIL_OUT set for the block.
3497 Add the LHS to TMP_GEN. */
3498 add_to_sets (lhs, rhs, stmt, TMP_GEN (block),
3499 AVAIL_OUT (block));
3501 if (TREE_CODE (rhs) == SSA_NAME
3502 && !is_undefined_value (rhs))
3503 value_insert_into_set (EXP_GEN (block), rhs);
3504 continue;
3508 /* For any other statement that we don't recognize, simply
3509 make the names generated by the statement available in
3510 AVAIL_OUT and TMP_GEN. */
3511 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3512 add_to_sets (op, op, NULL, TMP_GEN (block), AVAIL_OUT (block));
3514 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3515 add_to_sets (op, op, NULL, NULL , AVAIL_OUT (block));
3518 /* Put the dominator children of BLOCK on the worklist of blocks
3519 to compute available sets for. */
3520 for (son = first_dom_son (CDI_DOMINATORS, block);
3521 son;
3522 son = next_dom_son (CDI_DOMINATORS, son))
3523 worklist[sp++] = son;
3526 free (worklist);
3530 /* Eliminate fully redundant computations. */
3532 static void
3533 eliminate (void)
3535 basic_block b;
3537 FOR_EACH_BB (b)
3539 block_stmt_iterator i;
3541 for (i = bsi_start (b); !bsi_end_p (i); bsi_next (&i))
3543 tree stmt = bsi_stmt (i);
3545 /* Lookup the RHS of the expression, see if we have an
3546 available computation for it. If so, replace the RHS with
3547 the available computation. */
3548 if (TREE_CODE (stmt) == MODIFY_EXPR
3549 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
3550 && TREE_CODE (TREE_OPERAND (stmt ,1)) != SSA_NAME
3551 && !is_gimple_min_invariant (TREE_OPERAND (stmt, 1))
3552 && !stmt_ann (stmt)->has_volatile_ops)
3554 tree lhs = TREE_OPERAND (stmt, 0);
3555 tree *rhs_p = &TREE_OPERAND (stmt, 1);
3556 tree sprime;
3558 sprime = bitmap_find_leader (AVAIL_OUT (b),
3559 vn_lookup (lhs, NULL));
3560 if (sprime
3561 && sprime != lhs
3562 && (TREE_CODE (*rhs_p) != SSA_NAME
3563 || may_propagate_copy (*rhs_p, sprime)))
3565 gcc_assert (sprime != *rhs_p);
3567 if (dump_file && (dump_flags & TDF_DETAILS))
3569 fprintf (dump_file, "Replaced ");
3570 print_generic_expr (dump_file, *rhs_p, 0);
3571 fprintf (dump_file, " with ");
3572 print_generic_expr (dump_file, sprime, 0);
3573 fprintf (dump_file, " in ");
3574 print_generic_stmt (dump_file, stmt, 0);
3577 if (TREE_CODE (sprime) == SSA_NAME)
3578 NECESSARY (SSA_NAME_DEF_STMT (sprime)) = 1;
3579 /* We need to make sure the new and old types actually match,
3580 which may require adding a simple cast, which fold_convert
3581 will do for us. */
3582 if (TREE_CODE (*rhs_p) != SSA_NAME
3583 && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*rhs_p),
3584 TREE_TYPE (sprime)))
3585 sprime = fold_convert (TREE_TYPE (*rhs_p), sprime);
3587 pre_stats.eliminations++;
3588 propagate_tree_value (rhs_p, sprime);
3589 update_stmt (stmt);
3591 /* If we removed EH side effects from the statement, clean
3592 its EH information. */
3593 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
3595 bitmap_set_bit (need_eh_cleanup,
3596 bb_for_stmt (stmt)->index);
3597 if (dump_file && (dump_flags & TDF_DETAILS))
3598 fprintf (dump_file, " Removed EH side effects.\n");
3606 /* Borrow a bit of tree-ssa-dce.c for the moment.
3607 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
3608 this may be a bit faster, and we may want critical edges kept split. */
3610 /* If OP's defining statement has not already been determined to be necessary,
3611 mark that statement necessary. Return the stmt, if it is newly
3612 necessary. */
3614 static inline tree
3615 mark_operand_necessary (tree op)
3617 tree stmt;
3619 gcc_assert (op);
3621 if (TREE_CODE (op) != SSA_NAME)
3622 return NULL;
3624 stmt = SSA_NAME_DEF_STMT (op);
3625 gcc_assert (stmt);
3627 if (NECESSARY (stmt)
3628 || IS_EMPTY_STMT (stmt))
3629 return NULL;
3631 NECESSARY (stmt) = 1;
3632 return stmt;
3635 /* Because we don't follow exactly the standard PRE algorithm, and decide not
3636 to insert PHI nodes sometimes, and because value numbering of casts isn't
3637 perfect, we sometimes end up inserting dead code. This simple DCE-like
3638 pass removes any insertions we made that weren't actually used. */
3640 static void
3641 remove_dead_inserted_code (void)
3643 VEC(tree,heap) *worklist = NULL;
3644 int i;
3645 tree t;
3647 worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
3648 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3650 if (NECESSARY (t))
3651 VEC_quick_push (tree, worklist, t);
3653 while (VEC_length (tree, worklist) > 0)
3655 t = VEC_pop (tree, worklist);
3657 /* PHI nodes are somewhat special in that each PHI alternative has
3658 data and control dependencies. All the statements feeding the
3659 PHI node's arguments are always necessary. */
3660 if (TREE_CODE (t) == PHI_NODE)
3662 int k;
3664 VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
3665 for (k = 0; k < PHI_NUM_ARGS (t); k++)
3667 tree arg = PHI_ARG_DEF (t, k);
3668 if (TREE_CODE (arg) == SSA_NAME)
3670 arg = mark_operand_necessary (arg);
3671 if (arg)
3672 VEC_quick_push (tree, worklist, arg);
3676 else
3678 /* Propagate through the operands. Examine all the USE, VUSE and
3679 V_MAY_DEF operands in this statement. Mark all the statements
3680 which feed this statement's uses as necessary. */
3681 ssa_op_iter iter;
3682 tree use;
3684 /* The operands of V_MAY_DEF expressions are also needed as they
3685 represent potential definitions that may reach this
3686 statement (V_MAY_DEF operands allow us to follow def-def
3687 links). */
3689 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
3691 tree n = mark_operand_necessary (use);
3692 if (n)
3693 VEC_safe_push (tree, heap, worklist, n);
3698 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3700 if (!NECESSARY (t))
3702 block_stmt_iterator bsi;
3704 if (dump_file && (dump_flags & TDF_DETAILS))
3706 fprintf (dump_file, "Removing unnecessary insertion:");
3707 print_generic_stmt (dump_file, t, 0);
3710 if (TREE_CODE (t) == PHI_NODE)
3712 remove_phi_node (t, NULL);
3714 else
3716 bsi = bsi_for_stmt (t);
3717 bsi_remove (&bsi, true);
3718 release_defs (t);
3722 VEC_free (tree, heap, worklist);
3725 /* Initialize data structures used by PRE. */
3727 static void
3728 init_pre (bool do_fre)
3730 basic_block bb;
3732 in_fre = do_fre;
3734 inserted_exprs = NULL;
3735 need_creation = NULL;
3736 pretemp = NULL_TREE;
3737 storetemp = NULL_TREE;
3738 mergephitemp = NULL_TREE;
3739 prephitemp = NULL_TREE;
3741 vn_init ();
3742 if (!do_fre)
3743 current_loops = loop_optimizer_init (LOOPS_NORMAL);
3745 connect_infinite_loops_to_exit ();
3746 memset (&pre_stats, 0, sizeof (pre_stats));
3748 /* If block 0 has more than one predecessor, it means that its PHI
3749 nodes will have arguments coming from block -1. This creates
3750 problems for several places in PRE that keep local arrays indexed
3751 by block number. To prevent this, we split the edge coming from
3752 ENTRY_BLOCK_PTR (FIXME, if ENTRY_BLOCK_PTR had an index number
3753 different than -1 we wouldn't have to hack this. tree-ssa-dce.c
3754 needs a similar change). */
3755 if (!single_pred_p (single_succ (ENTRY_BLOCK_PTR)))
3756 if (!(single_succ_edge (ENTRY_BLOCK_PTR)->flags & EDGE_ABNORMAL))
3757 split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
3759 FOR_ALL_BB (bb)
3760 bb->aux = xcalloc (1, sizeof (struct bb_value_sets));
3762 bitmap_obstack_initialize (&grand_bitmap_obstack);
3763 phi_translate_table = htab_create (511, expr_pred_trans_hash,
3764 expr_pred_trans_eq, free);
3765 value_set_pool = create_alloc_pool ("Value sets",
3766 sizeof (struct value_set), 30);
3767 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
3768 sizeof (struct bitmap_set), 30);
3769 value_set_node_pool = create_alloc_pool ("Value set nodes",
3770 sizeof (struct value_set_node), 30);
3771 calculate_dominance_info (CDI_POST_DOMINATORS);
3772 calculate_dominance_info (CDI_DOMINATORS);
3773 binary_node_pool = create_alloc_pool ("Binary tree nodes",
3774 tree_code_size (PLUS_EXPR), 30);
3775 unary_node_pool = create_alloc_pool ("Unary tree nodes",
3776 tree_code_size (NEGATE_EXPR), 30);
3777 reference_node_pool = create_alloc_pool ("Reference tree nodes",
3778 tree_code_size (ARRAY_REF), 30);
3779 expression_node_pool = create_alloc_pool ("Expression tree nodes",
3780 tree_code_size (CALL_EXPR), 30);
3781 list_node_pool = create_alloc_pool ("List tree nodes",
3782 tree_code_size (TREE_LIST), 30);
3783 comparison_node_pool = create_alloc_pool ("Comparison tree nodes",
3784 tree_code_size (EQ_EXPR), 30);
3785 modify_expr_node_pool = create_alloc_pool ("MODIFY_EXPR nodes",
3786 tree_code_size (MODIFY_EXPR),
3787 30);
3788 modify_expr_template = NULL;
3790 FOR_ALL_BB (bb)
3792 EXP_GEN (bb) = set_new (true);
3793 PHI_GEN (bb) = bitmap_set_new ();
3794 TMP_GEN (bb) = bitmap_set_new ();
3795 AVAIL_OUT (bb) = bitmap_set_new ();
3798 need_eh_cleanup = BITMAP_ALLOC (NULL);
3802 /* Deallocate data structures used by PRE. */
3804 static void
3805 fini_pre (bool do_fre)
3807 basic_block bb;
3808 unsigned int i;
3810 VEC_free (tree, heap, inserted_exprs);
3811 VEC_free (tree, heap, need_creation);
3812 bitmap_obstack_release (&grand_bitmap_obstack);
3813 free_alloc_pool (value_set_pool);
3814 free_alloc_pool (bitmap_set_pool);
3815 free_alloc_pool (value_set_node_pool);
3816 free_alloc_pool (binary_node_pool);
3817 free_alloc_pool (reference_node_pool);
3818 free_alloc_pool (unary_node_pool);
3819 free_alloc_pool (list_node_pool);
3820 free_alloc_pool (expression_node_pool);
3821 free_alloc_pool (comparison_node_pool);
3822 free_alloc_pool (modify_expr_node_pool);
3823 htab_delete (phi_translate_table);
3824 remove_fake_exit_edges ();
3826 FOR_ALL_BB (bb)
3828 free (bb->aux);
3829 bb->aux = NULL;
3832 free_dominance_info (CDI_POST_DOMINATORS);
3833 vn_delete ();
3835 if (!bitmap_empty_p (need_eh_cleanup))
3837 tree_purge_all_dead_eh_edges (need_eh_cleanup);
3838 cleanup_tree_cfg ();
3841 BITMAP_FREE (need_eh_cleanup);
3843 /* Wipe out pointers to VALUE_HANDLEs. In the not terribly distant
3844 future we will want them to be persistent though. */
3845 for (i = 0; i < num_ssa_names; i++)
3847 tree name = ssa_name (i);
3849 if (!name)
3850 continue;
3852 if (SSA_NAME_VALUE (name)
3853 && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
3854 SSA_NAME_VALUE (name) = NULL;
3856 if (!do_fre && current_loops)
3858 loop_optimizer_finalize (current_loops);
3859 current_loops = NULL;
3863 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
3864 only wants to do full redundancy elimination. */
3866 static void
3867 execute_pre (bool do_fre)
3869 init_pre (do_fre);
3871 if (!do_fre)
3872 insert_fake_stores ();
3874 /* Collect and value number expressions computed in each basic block. */
3875 compute_avail ();
3877 if (dump_file && (dump_flags & TDF_DETAILS))
3879 basic_block bb;
3881 FOR_ALL_BB (bb)
3883 print_value_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
3884 bitmap_print_value_set (dump_file, TMP_GEN (bb), "tmp_gen",
3885 bb->index);
3886 bitmap_print_value_set (dump_file, AVAIL_OUT (bb), "avail_out",
3887 bb->index);
3891 /* Insert can get quite slow on an incredibly large number of basic
3892 blocks due to some quadratic behavior. Until this behavior is
3893 fixed, don't run it when he have an incredibly large number of
3894 bb's. If we aren't going to run insert, there is no point in
3895 computing ANTIC, either, even though it's plenty fast. */
3896 if (!do_fre && n_basic_blocks < 4000)
3898 vuse_names = XCNEWVEC (bitmap, num_ssa_names);
3899 compute_rvuse_and_antic_safe ();
3900 compute_antic ();
3901 insert ();
3902 free (vuse_names);
3905 /* Remove all the redundant expressions. */
3906 eliminate ();
3909 if (dump_file && (dump_flags & TDF_STATS))
3911 fprintf (dump_file, "Insertions: %d\n", pre_stats.insertions);
3912 fprintf (dump_file, "New PHIs: %d\n", pre_stats.phis);
3913 fprintf (dump_file, "Eliminated: %d\n", pre_stats.eliminations);
3914 fprintf (dump_file, "Constified: %d\n", pre_stats.constified);
3917 bsi_commit_edge_inserts ();
3919 if (!do_fre)
3921 remove_dead_inserted_code ();
3922 realify_fake_stores ();
3925 fini_pre (do_fre);
3929 /* Gate and execute functions for PRE. */
3931 static unsigned int
3932 do_pre (void)
3934 execute_pre (false);
3935 return 0;
3938 static bool
3939 gate_pre (void)
3941 return flag_tree_pre != 0;
3944 struct tree_opt_pass pass_pre =
3946 "pre", /* name */
3947 gate_pre, /* gate */
3948 do_pre, /* execute */
3949 NULL, /* sub */
3950 NULL, /* next */
3951 0, /* static_pass_number */
3952 TV_TREE_PRE, /* tv_id */
3953 PROP_no_crit_edges | PROP_cfg
3954 | PROP_ssa | PROP_alias, /* properties_required */
3955 0, /* properties_provided */
3956 0, /* properties_destroyed */
3957 0, /* todo_flags_start */
3958 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
3959 | TODO_verify_ssa, /* todo_flags_finish */
3960 0 /* letter */
3964 /* Gate and execute functions for FRE. */
3966 static unsigned int
3967 execute_fre (void)
3969 execute_pre (true);
3970 return 0;
3973 static bool
3974 gate_fre (void)
3976 return flag_tree_fre != 0;
3979 struct tree_opt_pass pass_fre =
3981 "fre", /* name */
3982 gate_fre, /* gate */
3983 execute_fre, /* execute */
3984 NULL, /* sub */
3985 NULL, /* next */
3986 0, /* static_pass_number */
3987 TV_TREE_FRE, /* tv_id */
3988 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
3989 0, /* properties_provided */
3990 0, /* properties_destroyed */
3991 0, /* todo_flags_start */
3992 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
3993 0 /* letter */