Merge -r 127928:132243 from trunk
[official-gcc.git] / gcc / tree-ssa-pre.c
blob6a06b2a80047f56545328b5a72178482f7348676
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
5 <stevenb@suse.de>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
12 any later version.
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "fibheap.h"
37 #include "hashtab.h"
38 #include "tree-iterator.h"
39 #include "real.h"
40 #include "alloc-pool.h"
41 #include "obstack.h"
42 #include "tree-pass.h"
43 #include "flags.h"
44 #include "bitmap.h"
45 #include "langhooks.h"
46 #include "cfgloop.h"
47 #include "tree-ssa-sccvn.h"
48 #include "params.h"
50 /* TODO:
52 1. Avail sets can be shared by making an avail_find_leader that
53 walks up the dominator tree and looks in those avail sets.
54 This might affect code optimality, it's unclear right now.
55 2. Strength reduction can be performed by anticipating expressions
56 we can repair later on.
57 3. We can do back-substitution or smarter value numbering to catch
58 commutative expressions split up over multiple statements.
61 /* For ease of terminology, "expression node" in the below refers to
62 every expression node but GIMPLE_MODIFY_STMT, because GIMPLE_MODIFY_STMT's
63 represent the actual statement containing the expressions we care about,
64 and we cache the value number by putting it in the expression. */
66 /* Basic algorithm
68 First we walk the statements to generate the AVAIL sets, the
69 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
70 generation of values/expressions by a given block. We use them
71 when computing the ANTIC sets. The AVAIL sets consist of
72 SSA_NAME's that represent values, so we know what values are
73 available in what blocks. AVAIL is a forward dataflow problem. In
74 SSA, values are never killed, so we don't need a kill set, or a
75 fixpoint iteration, in order to calculate the AVAIL sets. In
76 traditional parlance, AVAIL sets tell us the downsafety of the
77 expressions/values.
79 Next, we generate the ANTIC sets. These sets represent the
80 anticipatable expressions. ANTIC is a backwards dataflow
81 problem. An expression is anticipatable in a given block if it could
82 be generated in that block. This means that if we had to perform
83 an insertion in that block, of the value of that expression, we
84 could. Calculating the ANTIC sets requires phi translation of
85 expressions, because the flow goes backwards through phis. We must
86 iterate to a fixpoint of the ANTIC sets, because we have a kill
87 set. Even in SSA form, values are not live over the entire
88 function, only from their definition point onwards. So we have to
89 remove values from the ANTIC set once we go past the definition
90 point of the leaders that make them up.
91 compute_antic/compute_antic_aux performs this computation.
93 Third, we perform insertions to make partially redundant
94 expressions fully redundant.
96 An expression is partially redundant (excluding partial
97 anticipation) if:
99 1. It is AVAIL in some, but not all, of the predecessors of a
100 given block.
101 2. It is ANTIC in all the predecessors.
103 In order to make it fully redundant, we insert the expression into
104 the predecessors where it is not available, but is ANTIC.
106 For the partial anticipation case, we only perform insertion if it
107 is partially anticipated in some block, and fully available in all
108 of the predecessors.
110 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
111 performs these steps.
113 Fourth, we eliminate fully redundant expressions.
114 This is a simple statement walk that replaces redundant
115 calculations with the now available values. */
117 /* Representations of value numbers:
119 Value numbers are represented using the "value handle" approach.
120 This means that each SSA_NAME (and for other reasons to be
121 disclosed in a moment, expression nodes) has a value handle that
122 can be retrieved through get_value_handle. This value handle *is*
123 the value number of the SSA_NAME. You can pointer compare the
124 value handles for equivalence purposes.
126 For debugging reasons, the value handle is internally more than
127 just a number, it is a VALUE_HANDLE named "VH.x", where x is a
128 unique number for each value number in use. This allows
129 expressions with SSA_NAMES replaced by value handles to still be
130 pretty printed in a sane way. They simply print as "VH.3 *
131 VH.5", etc.
133 Expression nodes have value handles associated with them as a
134 cache. Otherwise, we'd have to look them up again in the hash
135 table This makes significant difference (factor of two or more) on
136 some test cases. They can be thrown away after the pass is
137 finished. */
139 /* Representation of expressions on value numbers:
141 In some portions of this code, you will notice we allocate "fake"
142 analogues to the expression we are value numbering, and replace the
143 operands with the values of the expression. Since we work on
144 values, and not just names, we canonicalize expressions to value
145 expressions for use in the ANTIC sets, the EXP_GEN set, etc.
147 This is theoretically unnecessary, it just saves a bunch of
148 repeated get_value_handle and find_leader calls in the remainder of
149 the code, trading off temporary memory usage for speed. The tree
150 nodes aren't actually creating more garbage, since they are
151 allocated in a special pools which are thrown away at the end of
152 this pass.
154 All of this also means that if you print the EXP_GEN or ANTIC sets,
155 you will see "VH.5 + VH.7" in the set, instead of "a_55 +
156 b_66" or something. The only thing that actually cares about
157 seeing the value leaders is phi translation, and it needs to be
158 able to find the leader for a value in an arbitrary block, so this
159 "value expression" form is perfect for it (otherwise you'd do
160 get_value_handle->find_leader->translate->get_value_handle->find_leader).*/
163 /* Representation of sets:
165 There are currently two types of sets used, hopefully to be unified soon.
166 The AVAIL sets do not need to be sorted in any particular order,
167 and thus, are simply represented as two bitmaps, one that keeps
168 track of values present in the set, and one that keeps track of
169 expressions present in the set.
171 The other sets are represented as doubly linked lists kept in topological
172 order, with an optional supporting bitmap of values present in the
173 set. The sets represent values, and the elements can be values or
174 expressions. The elements can appear in different sets, but each
175 element can only appear once in each set.
177 Since each node in the set represents a value, we also want to be
178 able to map expression, set pairs to something that tells us
179 whether the value is present is a set. We use a per-set bitmap for
180 that. The value handles also point to a linked list of the
181 expressions they represent via a tree annotation. This is mainly
182 useful only for debugging, since we don't do identity lookups. */
185 /* Next global expression id number. */
186 static unsigned int next_expression_id;
188 typedef VEC(tree, gc) *vuse_vec;
189 DEF_VEC_P (vuse_vec);
190 DEF_VEC_ALLOC_P (vuse_vec, heap);
192 static VEC(vuse_vec, heap) *expression_vuses;
194 /* Mapping from expression to id number we can use in bitmap sets. */
195 static VEC(tree, heap) *expressions;
197 /* Allocate an expression id for EXPR. */
199 static inline unsigned int
200 alloc_expression_id (tree expr)
202 tree_ann_common_t ann;
204 ann = get_tree_common_ann (expr);
206 /* Make sure we won't overflow. */
207 gcc_assert (next_expression_id + 1 > next_expression_id);
209 ann->aux = XNEW (unsigned int);
210 * ((unsigned int *)ann->aux) = next_expression_id++;
211 VEC_safe_push (tree, heap, expressions, expr);
212 VEC_safe_push (vuse_vec, heap, expression_vuses, NULL);
213 return next_expression_id - 1;
216 /* Return the expression id for tree EXPR. */
218 static inline unsigned int
219 get_expression_id (tree expr)
221 tree_ann_common_t ann = tree_common_ann (expr);
222 gcc_assert (ann);
223 gcc_assert (ann->aux);
225 return *((unsigned int *)ann->aux);
228 /* Return the existing expression id for EXPR, or create one if one
229 does not exist yet. */
231 static inline unsigned int
232 get_or_alloc_expression_id (tree expr)
234 tree_ann_common_t ann = tree_common_ann (expr);
236 if (ann == NULL || !ann->aux)
237 return alloc_expression_id (expr);
239 return get_expression_id (expr);
242 /* Return the expression that has expression id ID */
244 static inline tree
245 expression_for_id (unsigned int id)
247 return VEC_index (tree, expressions, id);
250 /* Return the expression vuses for EXPR, if there are any. */
252 static inline vuse_vec
253 get_expression_vuses (tree expr)
255 unsigned int expr_id = get_or_alloc_expression_id (expr);
256 return VEC_index (vuse_vec, expression_vuses, expr_id);
259 /* Set the expression vuses for EXPR to VUSES. */
261 static inline void
262 set_expression_vuses (tree expr, vuse_vec vuses)
264 unsigned int expr_id = get_or_alloc_expression_id (expr);
265 VEC_replace (vuse_vec, expression_vuses, expr_id, vuses);
269 /* Free the expression id field in all of our expressions,
270 and then destroy the expressions array. */
272 static void
273 clear_expression_ids (void)
275 int i;
276 tree expr;
278 for (i = 0; VEC_iterate (tree, expressions, i, expr); i++)
280 free (tree_common_ann (expr)->aux);
281 tree_common_ann (expr)->aux = NULL;
283 VEC_free (tree, heap, expressions);
284 VEC_free (vuse_vec, heap, expression_vuses);
287 static bool in_fre = false;
289 /* An unordered bitmap set. One bitmap tracks values, the other,
290 expressions. */
291 typedef struct bitmap_set
293 bitmap expressions;
294 bitmap values;
295 } *bitmap_set_t;
297 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
298 EXECUTE_IF_SET_IN_BITMAP(set->expressions, 0, id, bi)
300 /* Sets that we need to keep track of. */
301 typedef struct bb_bitmap_sets
303 /* The EXP_GEN set, which represents expressions/values generated in
304 a basic block. */
305 bitmap_set_t exp_gen;
307 /* The PHI_GEN set, which represents PHI results generated in a
308 basic block. */
309 bitmap_set_t phi_gen;
311 /* The TMP_GEN set, which represents results/temporaries generated
312 in a basic block. IE the LHS of an expression. */
313 bitmap_set_t tmp_gen;
315 /* The AVAIL_OUT set, which represents which values are available in
316 a given basic block. */
317 bitmap_set_t avail_out;
319 /* The ANTIC_IN set, which represents which values are anticipatable
320 in a given basic block. */
321 bitmap_set_t antic_in;
323 /* The PA_IN set, which represents which values are
324 partially anticipatable in a given basic block. */
325 bitmap_set_t pa_in;
327 /* The NEW_SETS set, which is used during insertion to augment the
328 AVAIL_OUT set of blocks with the new insertions performed during
329 the current iteration. */
330 bitmap_set_t new_sets;
332 /* True if we have visited this block during ANTIC calculation. */
333 unsigned int visited:1;
335 /* True we have deferred processing this block during ANTIC
336 calculation until its successor is processed. */
337 unsigned int deferred : 1;
338 } *bb_value_sets_t;
340 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
341 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
342 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
343 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
344 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
345 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
346 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
347 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
348 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
350 /* Maximal set of values, used to initialize the ANTIC problem, which
351 is an intersection problem. */
352 static bitmap_set_t maximal_set;
354 /* Basic block list in postorder. */
355 static int *postorder;
357 /* This structure is used to keep track of statistics on what
358 optimization PRE was able to perform. */
359 static struct
361 /* The number of RHS computations eliminated by PRE. */
362 int eliminations;
364 /* The number of new expressions/temporaries generated by PRE. */
365 int insertions;
367 /* The number of inserts found due to partial anticipation */
368 int pa_insert;
370 /* The number of new PHI nodes added by PRE. */
371 int phis;
373 /* The number of values found constant. */
374 int constified;
376 } pre_stats;
378 static bool do_partial_partial;
379 static tree bitmap_find_leader (bitmap_set_t, tree);
380 static void bitmap_value_insert_into_set (bitmap_set_t, tree);
381 static void bitmap_value_replace_in_set (bitmap_set_t, tree);
382 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
383 static bool bitmap_set_contains_value (bitmap_set_t, tree);
384 static void bitmap_insert_into_set (bitmap_set_t, tree);
385 static bitmap_set_t bitmap_set_new (void);
386 static tree create_expression_by_pieces (basic_block, tree, tree);
387 static tree find_or_generate_expression (basic_block, tree, tree);
389 /* We can add and remove elements and entries to and from sets
390 and hash tables, so we use alloc pools for them. */
392 static alloc_pool bitmap_set_pool;
393 static alloc_pool binary_node_pool;
394 static alloc_pool unary_node_pool;
395 static alloc_pool reference_node_pool;
396 static alloc_pool comparison_node_pool;
397 static alloc_pool modify_expr_node_pool;
398 static bitmap_obstack grand_bitmap_obstack;
400 /* We can't use allocation pools to hold temporary CALL_EXPR objects, since
401 they are not of fixed size. Instead, use an obstack. */
403 static struct obstack temp_call_expr_obstack;
406 /* To avoid adding 300 temporary variables when we only need one, we
407 only create one temporary variable, on demand, and build ssa names
408 off that. We do have to change the variable if the types don't
409 match the current variable's type. */
410 static tree pretemp;
411 static tree storetemp;
412 static tree prephitemp;
414 /* Set of blocks with statements that have had its EH information
415 cleaned up. */
416 static bitmap need_eh_cleanup;
418 /* Which expressions have been seen during a given phi translation. */
419 static bitmap seen_during_translate;
421 /* The phi_translate_table caches phi translations for a given
422 expression and predecessor. */
424 static htab_t phi_translate_table;
426 /* A three tuple {e, pred, v} used to cache phi translations in the
427 phi_translate_table. */
429 typedef struct expr_pred_trans_d
431 /* The expression. */
432 tree e;
434 /* The predecessor block along which we translated the expression. */
435 basic_block pred;
437 /* vuses associated with the expression. */
438 VEC (tree, gc) *vuses;
440 /* The value that resulted from the translation. */
441 tree v;
443 /* The hashcode for the expression, pred pair. This is cached for
444 speed reasons. */
445 hashval_t hashcode;
446 } *expr_pred_trans_t;
447 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
449 /* Return the hash value for a phi translation table entry. */
451 static hashval_t
452 expr_pred_trans_hash (const void *p)
454 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
455 return ve->hashcode;
458 /* Return true if two phi translation table entries are the same.
459 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
461 static int
462 expr_pred_trans_eq (const void *p1, const void *p2)
464 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
465 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
466 basic_block b1 = ve1->pred;
467 basic_block b2 = ve2->pred;
468 int i;
469 tree vuse1;
471 /* If they are not translations for the same basic block, they can't
472 be equal. */
473 if (b1 != b2)
474 return false;
477 /* If they are for the same basic block, determine if the
478 expressions are equal. */
479 if (!expressions_equal_p (ve1->e, ve2->e))
480 return false;
482 /* Make sure the vuses are equivalent. */
483 if (ve1->vuses == ve2->vuses)
484 return true;
486 if (VEC_length (tree, ve1->vuses) != VEC_length (tree, ve2->vuses))
487 return false;
489 for (i = 0; VEC_iterate (tree, ve1->vuses, i, vuse1); i++)
491 if (VEC_index (tree, ve2->vuses, i) != vuse1)
492 return false;
495 return true;
498 /* Search in the phi translation table for the translation of
499 expression E in basic block PRED with vuses VUSES.
500 Return the translated value, if found, NULL otherwise. */
502 static inline tree
503 phi_trans_lookup (tree e, basic_block pred, VEC (tree, gc) *vuses)
505 void **slot;
506 struct expr_pred_trans_d ept;
508 ept.e = e;
509 ept.pred = pred;
510 ept.vuses = vuses;
511 ept.hashcode = iterative_hash_expr (e, (unsigned long) pred);
512 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
513 NO_INSERT);
514 if (!slot)
515 return NULL;
516 else
517 return ((expr_pred_trans_t) *slot)->v;
521 /* Add the tuple mapping from {expression E, basic block PRED, vuses VUSES} to
522 value V, to the phi translation table. */
524 static inline void
525 phi_trans_add (tree e, tree v, basic_block pred, VEC (tree, gc) *vuses)
527 void **slot;
528 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
529 new_pair->e = e;
530 new_pair->pred = pred;
531 new_pair->vuses = vuses;
532 new_pair->v = v;
533 new_pair->hashcode = iterative_hash_expr (e, (unsigned long) pred);
534 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
535 new_pair->hashcode, INSERT);
536 if (*slot)
537 free (*slot);
538 *slot = (void *) new_pair;
542 /* Return true if V is a value expression that represents itself.
543 In our world, this is *only* non-value handles. */
545 static inline bool
546 constant_expr_p (tree v)
548 return TREE_CODE (v) != VALUE_HANDLE &&
549 (TREE_CODE (v) == FIELD_DECL || is_gimple_min_invariant (v));
552 /* Add expression E to the expression set of value V. */
554 void
555 add_to_value (tree v, tree e)
557 /* Constants have no expression sets. */
558 if (constant_expr_p (v))
559 return;
561 if (VALUE_HANDLE_EXPR_SET (v) == NULL)
562 VALUE_HANDLE_EXPR_SET (v) = bitmap_set_new ();
564 bitmap_insert_into_set (VALUE_HANDLE_EXPR_SET (v), e);
567 /* Create a new bitmap set and return it. */
569 static bitmap_set_t
570 bitmap_set_new (void)
572 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
573 ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
574 ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
575 return ret;
578 /* Remove an expression EXPR from a bitmapped set. */
580 static void
581 bitmap_remove_from_set (bitmap_set_t set, tree expr)
583 tree val = get_value_handle (expr);
585 gcc_assert (val);
586 if (!constant_expr_p (val))
588 bitmap_clear_bit (set->values, VALUE_HANDLE_ID (val));
589 bitmap_clear_bit (set->expressions, get_expression_id (expr));
593 /* Insert an expression EXPR into a bitmapped set. */
595 static void
596 bitmap_insert_into_set (bitmap_set_t set, tree expr)
598 tree val = get_value_handle (expr);
600 gcc_assert (val);
601 if (!constant_expr_p (val))
603 bitmap_set_bit (set->values, VALUE_HANDLE_ID (val));
604 bitmap_set_bit (set->expressions, get_or_alloc_expression_id (expr));
608 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
610 static void
611 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
613 bitmap_copy (dest->expressions, orig->expressions);
614 bitmap_copy (dest->values, orig->values);
618 /* Free memory used up by SET. */
619 static void
620 bitmap_set_free (bitmap_set_t set)
622 BITMAP_FREE (set->expressions);
623 BITMAP_FREE (set->values);
627 /* A comparison function for use in qsort to top sort a bitmap set. Simply
628 subtracts value handle ids, since they are created in topo-order. */
630 static int
631 vh_compare (const void *pa, const void *pb)
633 const tree vha = get_value_handle (*((const tree *)pa));
634 const tree vhb = get_value_handle (*((const tree *)pb));
636 /* This can happen when we constify things. */
637 if (constant_expr_p (vha))
639 if (constant_expr_p (vhb))
640 return -1;
641 return -1;
643 else if (constant_expr_p (vhb))
644 return 1;
645 return VALUE_HANDLE_ID (vha) - VALUE_HANDLE_ID (vhb);
648 /* Generate an topological-ordered array of bitmap set SET. */
650 static VEC(tree, heap) *
651 sorted_array_from_bitmap_set (bitmap_set_t set)
653 unsigned int i;
654 bitmap_iterator bi;
655 VEC(tree, heap) *result = NULL;
657 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
658 VEC_safe_push (tree, heap, result, expression_for_id (i));
660 qsort (VEC_address (tree, result), VEC_length (tree, result),
661 sizeof (tree), vh_compare);
663 return result;
666 /* Perform bitmapped set operation DEST &= ORIG. */
668 static void
669 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
671 bitmap_iterator bi;
672 unsigned int i;
674 if (dest != orig)
676 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
678 bitmap_and_into (dest->values, orig->values);
679 bitmap_copy (temp, dest->expressions);
680 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
682 tree expr = expression_for_id (i);
683 tree val = get_value_handle (expr);
684 if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
685 bitmap_clear_bit (dest->expressions, i);
687 BITMAP_FREE (temp);
691 /* Subtract all values and expressions contained in ORIG from DEST. */
693 static bitmap_set_t
694 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
696 bitmap_set_t result = bitmap_set_new ();
697 bitmap_iterator bi;
698 unsigned int i;
700 bitmap_and_compl (result->expressions, dest->expressions,
701 orig->expressions);
703 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
705 tree expr = expression_for_id (i);
706 tree val = get_value_handle (expr);
707 bitmap_set_bit (result->values, VALUE_HANDLE_ID (val));
710 return result;
713 /* Subtract all the values in bitmap set B from bitmap set A. */
715 static void
716 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
718 unsigned int i;
719 bitmap_iterator bi;
720 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
722 bitmap_copy (temp, a->expressions);
723 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
725 tree expr = expression_for_id (i);
726 if (bitmap_set_contains_value (b, get_value_handle (expr)))
727 bitmap_remove_from_set (a, expr);
729 BITMAP_FREE (temp);
733 /* Return true if bitmapped set SET contains the value VAL. */
735 static bool
736 bitmap_set_contains_value (bitmap_set_t set, tree val)
738 if (constant_expr_p (val))
739 return true;
741 if (!set || bitmap_empty_p (set->expressions))
742 return false;
744 return bitmap_bit_p (set->values, VALUE_HANDLE_ID (val));
747 static inline bool
748 bitmap_set_contains_expr (bitmap_set_t set, tree expr)
750 return bitmap_bit_p (set->expressions, get_expression_id (expr));
753 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
755 static void
756 bitmap_set_replace_value (bitmap_set_t set, tree lookfor, tree expr)
758 bitmap_set_t exprset;
759 unsigned int i;
760 bitmap_iterator bi;
762 if (constant_expr_p (lookfor))
763 return;
765 if (!bitmap_set_contains_value (set, lookfor))
766 return;
768 /* The number of expressions having a given value is usually
769 significantly less than the total number of expressions in SET.
770 Thus, rather than check, for each expression in SET, whether it
771 has the value LOOKFOR, we walk the reverse mapping that tells us
772 what expressions have a given value, and see if any of those
773 expressions are in our set. For large testcases, this is about
774 5-10x faster than walking the bitmap. If this is somehow a
775 significant lose for some cases, we can choose which set to walk
776 based on the set size. */
777 exprset = VALUE_HANDLE_EXPR_SET (lookfor);
778 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
780 if (bitmap_bit_p (set->expressions, i))
782 bitmap_clear_bit (set->expressions, i);
783 bitmap_set_bit (set->expressions, get_expression_id (expr));
784 return;
789 /* Return true if two bitmap sets are equal. */
791 static bool
792 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
794 return bitmap_equal_p (a->values, b->values);
797 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
798 and add it otherwise. */
800 static void
801 bitmap_value_replace_in_set (bitmap_set_t set, tree expr)
803 tree val = get_value_handle (expr);
805 if (bitmap_set_contains_value (set, val))
806 bitmap_set_replace_value (set, val, expr);
807 else
808 bitmap_insert_into_set (set, expr);
811 /* Insert EXPR into SET if EXPR's value is not already present in
812 SET. */
814 static void
815 bitmap_value_insert_into_set (bitmap_set_t set, tree expr)
817 tree val = get_value_handle (expr);
819 if (constant_expr_p (val))
820 return;
822 if (!bitmap_set_contains_value (set, val))
823 bitmap_insert_into_set (set, expr);
826 /* Print out SET to OUTFILE. */
828 static void
829 print_bitmap_set (FILE *outfile, bitmap_set_t set,
830 const char *setname, int blockindex)
832 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
833 if (set)
835 bool first = true;
836 unsigned i;
837 bitmap_iterator bi;
839 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
841 tree expr = expression_for_id (i);
843 if (!first)
844 fprintf (outfile, ", ");
845 first = false;
846 print_generic_expr (outfile, expr, 0);
848 fprintf (outfile, " (");
849 print_generic_expr (outfile, get_value_handle (expr), 0);
850 fprintf (outfile, ") ");
853 fprintf (outfile, " }\n");
856 void debug_bitmap_set (bitmap_set_t);
858 void
859 debug_bitmap_set (bitmap_set_t set)
861 print_bitmap_set (stderr, set, "debug", 0);
864 /* Print out the expressions that have VAL to OUTFILE. */
866 void
867 print_value_expressions (FILE *outfile, tree val)
869 if (VALUE_HANDLE_EXPR_SET (val))
871 char s[10];
872 sprintf (s, "VH.%04d", VALUE_HANDLE_ID (val));
873 print_bitmap_set (outfile, VALUE_HANDLE_EXPR_SET (val), s, 0);
878 void
879 debug_value_expressions (tree val)
881 print_value_expressions (stderr, val);
884 /* Return the folded version of T if T, when folded, is a gimple
885 min_invariant. Otherwise, return T. */
887 static tree
888 fully_constant_expression (tree t)
890 tree folded;
891 folded = fold (t);
892 if (folded && is_gimple_min_invariant (folded))
893 return folded;
894 return t;
897 /* Make a temporary copy of a CALL_EXPR object NODE. */
899 static tree
900 temp_copy_call_expr (tree node)
902 return (tree) obstack_copy (&temp_call_expr_obstack, node, tree_size (node));
905 /* Translate the vuses in the VUSES vector backwards through phi nodes
906 in PHIBLOCK, so that they have the value they would have in
907 BLOCK. */
909 static VEC(tree, gc) *
910 translate_vuses_through_block (VEC (tree, gc) *vuses,
911 basic_block phiblock,
912 basic_block block)
914 tree oldvuse;
915 VEC(tree, gc) *result = NULL;
916 int i;
918 for (i = 0; VEC_iterate (tree, vuses, i, oldvuse); i++)
920 tree phi = SSA_NAME_DEF_STMT (oldvuse);
921 if (TREE_CODE (phi) == PHI_NODE
922 && bb_for_stmt (phi) == phiblock)
924 edge e = find_edge (block, bb_for_stmt (phi));
925 if (e)
927 tree def = PHI_ARG_DEF (phi, e->dest_idx);
928 if (def != oldvuse)
930 if (!result)
931 result = VEC_copy (tree, gc, vuses);
932 VEC_replace (tree, result, i, def);
938 /* We avoid creating a new copy of the vuses unless something
939 actually changed, so result can be NULL. */
940 if (result)
942 sort_vuses (result);
943 return result;
945 return vuses;
949 /* Like find_leader, but checks for the value existing in SET1 *or*
950 SET2. This is used to avoid making a set consisting of the union
951 of PA_IN and ANTIC_IN during insert. */
953 static inline tree
954 find_leader_in_sets (tree expr, bitmap_set_t set1, bitmap_set_t set2)
956 tree result;
958 result = bitmap_find_leader (set1, expr);
959 if (!result && set2)
960 result = bitmap_find_leader (set2, expr);
961 return result;
964 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
965 the phis in PRED. SEEN is a bitmap saying which expression we have
966 translated since we started translation of the toplevel expression.
967 Return NULL if we can't find a leader for each part of the
968 translated expression. */
970 static tree
971 phi_translate_1 (tree expr, bitmap_set_t set1, bitmap_set_t set2,
972 basic_block pred, basic_block phiblock, bitmap seen)
974 tree phitrans = NULL;
975 tree oldexpr = expr;
977 if (expr == NULL)
978 return NULL;
980 if (constant_expr_p (expr))
981 return expr;
983 /* Phi translations of a given expression don't change. */
984 if (EXPR_P (expr) || GIMPLE_STMT_P (expr))
986 phitrans = phi_trans_lookup (expr, pred, get_expression_vuses (expr));
988 else
989 phitrans = phi_trans_lookup (expr, pred, NULL);
991 if (phitrans)
992 return phitrans;
994 /* Prevent cycles when we have recursively dependent leaders. This
995 can only happen when phi translating the maximal set. */
996 if (seen)
998 unsigned int expr_id = get_expression_id (expr);
999 if (bitmap_bit_p (seen, expr_id))
1000 return NULL;
1001 bitmap_set_bit (seen, expr_id);
1004 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1006 case tcc_expression:
1007 return NULL;
1009 case tcc_vl_exp:
1011 if (TREE_CODE (expr) != CALL_EXPR)
1012 return NULL;
1013 else
1015 tree oldfn = CALL_EXPR_FN (expr);
1016 tree oldsc = CALL_EXPR_STATIC_CHAIN (expr);
1017 tree newfn, newsc = NULL;
1018 tree newexpr = NULL_TREE;
1019 bool invariantarg = false;
1020 int i, nargs;
1021 VEC (tree, gc) *vuses = get_expression_vuses (expr);
1022 VEC (tree, gc) *tvuses;
1024 newfn = phi_translate_1 (find_leader_in_sets (oldfn, set1, set2),
1025 set1, set2, pred, phiblock, seen);
1026 if (newfn == NULL)
1027 return NULL;
1028 if (newfn != oldfn)
1030 newexpr = temp_copy_call_expr (expr);
1031 CALL_EXPR_FN (newexpr) = get_value_handle (newfn);
1033 if (oldsc)
1035 newsc = phi_translate_1 (find_leader_in_sets (oldsc, set1, set2),
1036 set1, set2, pred, phiblock, seen);
1037 if (newsc == NULL)
1038 return NULL;
1039 if (newsc != oldsc)
1041 if (!newexpr)
1042 newexpr = temp_copy_call_expr (expr);
1043 CALL_EXPR_STATIC_CHAIN (newexpr) = get_value_handle (newsc);
1047 /* phi translate the argument list piece by piece. */
1048 nargs = call_expr_nargs (expr);
1049 for (i = 0; i < nargs; i++)
1051 tree oldval = CALL_EXPR_ARG (expr, i);
1052 tree newval;
1053 if (oldval)
1055 /* This may seem like a weird place for this
1056 check, but it's actually the easiest place to
1057 do it. We can't do it lower on in the
1058 recursion because it's valid for pieces of a
1059 component ref to be of AGGREGATE_TYPE, as long
1060 as the outermost one is not.
1061 To avoid *that* case, we have a check for
1062 AGGREGATE_TYPE_P in insert_aux. However, that
1063 check will *not* catch this case because here
1064 it occurs in the argument list. */
1065 if (AGGREGATE_TYPE_P (TREE_TYPE (oldval)))
1066 return NULL;
1067 oldval = find_leader_in_sets (oldval, set1, set2);
1068 newval = phi_translate_1 (oldval, set1, set2, pred,
1069 phiblock, seen);
1070 if (newval == NULL)
1071 return NULL;
1072 if (newval != oldval)
1074 invariantarg |= is_gimple_min_invariant (newval);
1075 if (!newexpr)
1076 newexpr = temp_copy_call_expr (expr);
1077 CALL_EXPR_ARG (newexpr, i) = get_value_handle (newval);
1082 /* In case of new invariant args we might try to fold the call
1083 again. */
1084 if (invariantarg && !newsc)
1086 tree tmp1 = build_call_array (TREE_TYPE (expr),
1087 newfn, call_expr_nargs (newexpr),
1088 CALL_EXPR_ARGP (newexpr));
1089 tree tmp2 = fold (tmp1);
1090 if (tmp2 != tmp1)
1092 STRIP_TYPE_NOPS (tmp2);
1093 if (is_gimple_min_invariant (tmp2))
1094 return tmp2;
1098 tvuses = translate_vuses_through_block (vuses, phiblock, pred);
1099 if (vuses != tvuses && ! newexpr)
1100 newexpr = temp_copy_call_expr (expr);
1102 if (newexpr)
1104 newexpr->base.ann = NULL;
1105 vn_lookup_or_add_with_vuses (newexpr, tvuses);
1106 expr = newexpr;
1107 set_expression_vuses (newexpr, tvuses);
1109 phi_trans_add (oldexpr, expr, pred, tvuses);
1112 return expr;
1114 case tcc_declaration:
1116 VEC (tree, gc) * oldvuses = NULL;
1117 VEC (tree, gc) * newvuses = NULL;
1119 oldvuses = get_expression_vuses (expr);
1120 if (oldvuses)
1121 newvuses = translate_vuses_through_block (oldvuses, phiblock,
1122 pred);
1124 if (oldvuses != newvuses)
1126 vn_lookup_or_add_with_vuses (expr, newvuses);
1127 set_expression_vuses (expr, newvuses);
1129 phi_trans_add (oldexpr, expr, pred, newvuses);
1131 return expr;
1133 case tcc_reference:
1135 tree oldop0 = TREE_OPERAND (expr, 0);
1136 tree oldop1 = NULL;
1137 tree newop0;
1138 tree newop1 = NULL;
1139 tree oldop2 = NULL;
1140 tree newop2 = NULL;
1141 tree oldop3 = NULL;
1142 tree newop3 = NULL;
1143 tree newexpr;
1144 VEC (tree, gc) * oldvuses = NULL;
1145 VEC (tree, gc) * newvuses = NULL;
1147 if (TREE_CODE (expr) != INDIRECT_REF
1148 && TREE_CODE (expr) != COMPONENT_REF
1149 && TREE_CODE (expr) != ARRAY_REF)
1150 return NULL;
1152 oldop0 = find_leader_in_sets (oldop0, set1, set2);
1153 newop0 = phi_translate_1 (oldop0, set1, set2, pred, phiblock, seen);
1154 if (newop0 == NULL)
1155 return NULL;
1157 if (TREE_CODE (expr) == ARRAY_REF)
1159 oldop1 = TREE_OPERAND (expr, 1);
1160 oldop1 = find_leader_in_sets (oldop1, set1, set2);
1161 newop1 = phi_translate_1 (oldop1, set1, set2, pred, phiblock, seen);
1163 if (newop1 == NULL)
1164 return NULL;
1166 oldop2 = TREE_OPERAND (expr, 2);
1167 if (oldop2)
1169 oldop2 = find_leader_in_sets (oldop2, set1, set2);
1170 newop2 = phi_translate_1 (oldop2, set1, set2, pred, phiblock, seen);
1172 if (newop2 == NULL)
1173 return NULL;
1175 oldop3 = TREE_OPERAND (expr, 3);
1176 if (oldop3)
1178 oldop3 = find_leader_in_sets (oldop3, set1, set2);
1179 newop3 = phi_translate_1 (oldop3, set1, set2, pred, phiblock, seen);
1181 if (newop3 == NULL)
1182 return NULL;
1186 oldvuses = get_expression_vuses (expr);
1187 if (oldvuses)
1188 newvuses = translate_vuses_through_block (oldvuses, phiblock,
1189 pred);
1191 if (newop0 != oldop0 || newvuses != oldvuses
1192 || newop1 != oldop1
1193 || newop2 != oldop2
1194 || newop3 != oldop3)
1196 tree t;
1198 newexpr = (tree) pool_alloc (reference_node_pool);
1199 memcpy (newexpr, expr, tree_size (expr));
1200 TREE_OPERAND (newexpr, 0) = get_value_handle (newop0);
1201 if (TREE_CODE (expr) == ARRAY_REF)
1203 TREE_OPERAND (newexpr, 1) = get_value_handle (newop1);
1204 if (newop2)
1205 TREE_OPERAND (newexpr, 2) = get_value_handle (newop2);
1206 if (newop3)
1207 TREE_OPERAND (newexpr, 3) = get_value_handle (newop3);
1210 t = fully_constant_expression (newexpr);
1212 if (t != newexpr)
1214 pool_free (reference_node_pool, newexpr);
1215 newexpr = t;
1217 else
1219 newexpr->base.ann = NULL;
1220 vn_lookup_or_add_with_vuses (newexpr, newvuses);
1221 set_expression_vuses (newexpr, newvuses);
1223 expr = newexpr;
1225 phi_trans_add (oldexpr, expr, pred, newvuses);
1227 return expr;
1228 break;
1230 case tcc_binary:
1231 case tcc_comparison:
1233 tree oldop1 = TREE_OPERAND (expr, 0);
1234 tree oldval1 = oldop1;
1235 tree oldop2 = TREE_OPERAND (expr, 1);
1236 tree oldval2 = oldop2;
1237 tree newop1;
1238 tree newop2;
1239 tree newexpr;
1241 oldop1 = find_leader_in_sets (oldop1, set1, set2);
1242 newop1 = phi_translate_1 (oldop1, set1, set2, pred, phiblock, seen);
1243 if (newop1 == NULL)
1244 return NULL;
1246 oldop2 = find_leader_in_sets (oldop2, set1, set2);
1247 newop2 = phi_translate_1 (oldop2, set1, set2, pred, phiblock, seen);
1248 if (newop2 == NULL)
1249 return NULL;
1250 if (newop1 != oldop1 || newop2 != oldop2)
1252 tree t;
1253 newexpr = (tree) pool_alloc (binary_node_pool);
1254 memcpy (newexpr, expr, tree_size (expr));
1255 TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldval1 : get_value_handle (newop1);
1256 TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldval2 : get_value_handle (newop2);
1257 t = fully_constant_expression (newexpr);
1258 if (t != newexpr)
1260 pool_free (binary_node_pool, newexpr);
1261 newexpr = t;
1263 else
1265 newexpr->base.ann = NULL;
1266 vn_lookup_or_add (newexpr);
1268 expr = newexpr;
1270 phi_trans_add (oldexpr, expr, pred, NULL);
1272 return expr;
1274 case tcc_unary:
1276 tree oldop1 = TREE_OPERAND (expr, 0);
1277 tree newop1;
1278 tree newexpr;
1280 oldop1 = find_leader_in_sets (oldop1, set1, set2);
1281 newop1 = phi_translate_1 (oldop1, set1, set2, pred, phiblock, seen);
1282 if (newop1 == NULL)
1283 return NULL;
1284 if (newop1 != oldop1)
1286 tree t;
1287 newexpr = (tree) pool_alloc (unary_node_pool);
1288 memcpy (newexpr, expr, tree_size (expr));
1289 TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
1290 t = fully_constant_expression (newexpr);
1291 if (t != newexpr)
1293 pool_free (unary_node_pool, newexpr);
1294 newexpr = t;
1296 else
1298 newexpr->base.ann = NULL;
1299 vn_lookup_or_add (newexpr);
1301 expr = newexpr;
1303 phi_trans_add (oldexpr, expr, pred, NULL);
1305 return expr;
1307 case tcc_exceptional:
1309 tree phi = NULL;
1310 edge e;
1311 tree def_stmt;
1312 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1314 def_stmt = SSA_NAME_DEF_STMT (expr);
1315 if (TREE_CODE (def_stmt) == PHI_NODE
1316 && bb_for_stmt (def_stmt) == phiblock)
1317 phi = def_stmt;
1318 else
1319 return expr;
1321 e = find_edge (pred, bb_for_stmt (phi));
1322 if (e)
1324 tree val;
1325 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1327 if (is_gimple_min_invariant (def))
1328 return def;
1330 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1331 return NULL;
1333 val = get_value_handle (def);
1334 gcc_assert (val);
1335 return def;
1338 return expr;
1340 default:
1341 gcc_unreachable ();
1345 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1346 the phis in PRED.
1347 Return NULL if we can't find a leader for each part of the
1348 translated expression. */
1350 static tree
1351 phi_translate (tree expr, bitmap_set_t set1, bitmap_set_t set2,
1352 basic_block pred, basic_block phiblock)
1354 bitmap_clear (seen_during_translate);
1355 return phi_translate_1 (expr, set1, set2, pred, phiblock,
1356 seen_during_translate);
1359 /* For each expression in SET, translate the value handles through phi nodes
1360 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1361 expressions in DEST. */
1363 static void
1364 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1365 basic_block phiblock)
1367 VEC (tree, heap) *exprs;
1368 tree expr;
1369 int i;
1371 if (!phi_nodes (phiblock))
1373 bitmap_set_copy (dest, set);
1374 return;
1377 exprs = sorted_array_from_bitmap_set (set);
1378 for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
1380 tree translated;
1381 translated = phi_translate (expr, set, NULL, pred, phiblock);
1383 /* Don't add constants or empty translations to the cache, since
1384 we won't look them up that way, or use the result, anyway. */
1385 if (translated && !is_gimple_min_invariant (translated))
1387 phi_trans_add (expr, translated, pred,
1388 get_expression_vuses (translated));
1391 if (translated != NULL)
1392 bitmap_value_insert_into_set (dest, translated);
1394 VEC_free (tree, heap, exprs);
1397 /* Find the leader for a value (i.e., the name representing that
1398 value) in a given set, and return it. Return NULL if no leader is
1399 found. */
1401 static tree
1402 bitmap_find_leader (bitmap_set_t set, tree val)
1404 if (val == NULL)
1405 return NULL;
1407 if (constant_expr_p (val))
1408 return val;
1410 if (bitmap_set_contains_value (set, val))
1412 /* Rather than walk the entire bitmap of expressions, and see
1413 whether any of them has the value we are looking for, we look
1414 at the reverse mapping, which tells us the set of expressions
1415 that have a given value (IE value->expressions with that
1416 value) and see if any of those expressions are in our set.
1417 The number of expressions per value is usually significantly
1418 less than the number of expressions in the set. In fact, for
1419 large testcases, doing it this way is roughly 5-10x faster
1420 than walking the bitmap.
1421 If this is somehow a significant lose for some cases, we can
1422 choose which set to walk based on which set is smaller. */
1423 unsigned int i;
1424 bitmap_iterator bi;
1425 bitmap_set_t exprset = VALUE_HANDLE_EXPR_SET (val);
1427 EXECUTE_IF_AND_IN_BITMAP (exprset->expressions,
1428 set->expressions, 0, i, bi)
1429 return expression_for_id (i);
1431 return NULL;
1434 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1435 BLOCK by seeing if it is not killed in the block. Note that we are
1436 only determining whether there is a store that kills it. Because
1437 of the order in which clean iterates over values, we are guaranteed
1438 that altered operands will have caused us to be eliminated from the
1439 ANTIC_IN set already. */
1441 static bool
1442 value_dies_in_block_x (tree expr, basic_block block)
1444 int i;
1445 tree vuse;
1446 VEC (tree, gc) *vuses = get_expression_vuses (expr);
1448 /* Conservatively, a value dies if it's vuses are defined in this
1449 block, unless they come from phi nodes (which are merge operations,
1450 rather than stores. */
1451 for (i = 0; VEC_iterate (tree, vuses, i, vuse); i++)
1453 tree def = SSA_NAME_DEF_STMT (vuse);
1455 if (bb_for_stmt (def) != block)
1456 continue;
1457 if (TREE_CODE (def) == PHI_NODE)
1458 continue;
1459 return true;
1461 return false;
1464 /* Determine if the expression EXPR is valid in SET1 U SET2.
1465 ONLY SET2 CAN BE NULL.
1466 This means that we have a leader for each part of the expression
1467 (if it consists of values), or the expression is an SSA_NAME.
1468 For loads/calls, we also see if the vuses are killed in this block.
1470 NB: We never should run into a case where we have SSA_NAME +
1471 SSA_NAME or SSA_NAME + value. The sets valid_in_sets is called on,
1472 the ANTIC sets, will only ever have SSA_NAME's or value expressions
1473 (IE VALUE1 + VALUE2, *VALUE1, VALUE1 < VALUE2) */
1475 #define union_contains_value(SET1, SET2, VAL) \
1476 (bitmap_set_contains_value ((SET1), (VAL)) \
1477 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
1479 static bool
1480 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree expr,
1481 basic_block block)
1483 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1485 case tcc_binary:
1486 case tcc_comparison:
1488 tree op1 = TREE_OPERAND (expr, 0);
1489 tree op2 = TREE_OPERAND (expr, 1);
1491 return union_contains_value (set1, set2, op1)
1492 && union_contains_value (set1, set2, op2);
1495 case tcc_unary:
1497 tree op1 = TREE_OPERAND (expr, 0);
1498 return union_contains_value (set1, set2, op1);
1501 case tcc_expression:
1502 return false;
1504 case tcc_vl_exp:
1506 if (TREE_CODE (expr) == CALL_EXPR)
1508 tree fn = CALL_EXPR_FN (expr);
1509 tree sc = CALL_EXPR_STATIC_CHAIN (expr);
1510 tree arg;
1511 call_expr_arg_iterator iter;
1513 /* Check the non-argument operands first. */
1514 if (!union_contains_value (set1, set2, fn)
1515 || (sc && !union_contains_value (set1, set2, sc)))
1516 return false;
1518 /* Now check the operands. */
1519 FOR_EACH_CALL_EXPR_ARG (arg, iter, expr)
1521 if (!union_contains_value (set1, set2, arg))
1522 return false;
1524 return !value_dies_in_block_x (expr, block);
1526 return false;
1529 case tcc_reference:
1531 if (TREE_CODE (expr) == INDIRECT_REF
1532 || TREE_CODE (expr) == COMPONENT_REF
1533 || TREE_CODE (expr) == ARRAY_REF)
1535 tree op0 = TREE_OPERAND (expr, 0);
1536 gcc_assert (is_gimple_min_invariant (op0)
1537 || TREE_CODE (op0) == VALUE_HANDLE);
1538 if (!union_contains_value (set1, set2, op0))
1539 return false;
1540 if (TREE_CODE (expr) == ARRAY_REF)
1542 tree op1 = TREE_OPERAND (expr, 1);
1543 tree op2 = TREE_OPERAND (expr, 2);
1544 tree op3 = TREE_OPERAND (expr, 3);
1545 gcc_assert (is_gimple_min_invariant (op1)
1546 || TREE_CODE (op1) == VALUE_HANDLE);
1547 if (!union_contains_value (set1, set2, op1))
1548 return false;
1549 gcc_assert (!op2 || is_gimple_min_invariant (op2)
1550 || TREE_CODE (op2) == VALUE_HANDLE);
1551 if (op2
1552 && !union_contains_value (set1, set2, op2))
1553 return false;
1554 gcc_assert (!op3 || is_gimple_min_invariant (op3)
1555 || TREE_CODE (op3) == VALUE_HANDLE);
1556 if (op3
1557 && !union_contains_value (set1, set2, op3))
1558 return false;
1560 return !value_dies_in_block_x (expr, block);
1563 return false;
1565 case tcc_exceptional:
1567 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1568 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
1571 case tcc_declaration:
1572 return !value_dies_in_block_x (expr, block);
1574 default:
1575 /* No other cases should be encountered. */
1576 gcc_unreachable ();
1580 /* Clean the set of expressions that are no longer valid in SET1 or
1581 SET2. This means expressions that are made up of values we have no
1582 leaders for in SET1 or SET2. This version is used for partial
1583 anticipation, which means it is not valid in either ANTIC_IN or
1584 PA_IN. */
1586 static void
1587 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
1589 VEC (tree, heap) *exprs = sorted_array_from_bitmap_set (set1);
1590 tree expr;
1591 int i;
1593 for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
1595 if (!valid_in_sets (set1, set2, expr, block))
1596 bitmap_remove_from_set (set1, expr);
1598 VEC_free (tree, heap, exprs);
1601 /* Clean the set of expressions that are no longer valid in SET. This
1602 means expressions that are made up of values we have no leaders for
1603 in SET. */
1605 static void
1606 clean (bitmap_set_t set, basic_block block)
1608 VEC (tree, heap) *exprs = sorted_array_from_bitmap_set (set);
1609 tree expr;
1610 int i;
1612 for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
1614 if (!valid_in_sets (set, NULL, expr, block))
1615 bitmap_remove_from_set (set, expr);
1617 VEC_free (tree, heap, exprs);
1620 static sbitmap has_abnormal_preds;
1622 /* List of blocks that may have changed during ANTIC computation and
1623 thus need to be iterated over. */
1625 static sbitmap changed_blocks;
1627 /* Decide whether to defer a block for a later iteration, or PHI
1628 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
1629 should defer the block, and true if we processed it. */
1631 static bool
1632 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
1633 basic_block block, basic_block phiblock)
1635 if (!BB_VISITED (phiblock))
1637 SET_BIT (changed_blocks, block->index);
1638 BB_VISITED (block) = 0;
1639 BB_DEFERRED (block) = 1;
1640 return false;
1642 else
1643 phi_translate_set (dest, source, block, phiblock);
1644 return true;
1647 /* Compute the ANTIC set for BLOCK.
1649 If succs(BLOCK) > 1 then
1650 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1651 else if succs(BLOCK) == 1 then
1652 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
1654 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
1657 static bool
1658 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
1660 bool changed = false;
1661 bitmap_set_t S, old, ANTIC_OUT;
1662 bitmap_iterator bi;
1663 unsigned int bii;
1664 edge e;
1665 edge_iterator ei;
1667 old = ANTIC_OUT = S = NULL;
1668 BB_VISITED (block) = 1;
1670 /* If any edges from predecessors are abnormal, antic_in is empty,
1671 so do nothing. */
1672 if (block_has_abnormal_pred_edge)
1673 goto maybe_dump_sets;
1675 old = ANTIC_IN (block);
1676 ANTIC_OUT = bitmap_set_new ();
1678 /* If the block has no successors, ANTIC_OUT is empty. */
1679 if (EDGE_COUNT (block->succs) == 0)
1681 /* If we have one successor, we could have some phi nodes to
1682 translate through. */
1683 else if (single_succ_p (block))
1685 basic_block succ_bb = single_succ (block);
1687 /* We trade iterations of the dataflow equations for having to
1688 phi translate the maximal set, which is incredibly slow
1689 (since the maximal set often has 300+ members, even when you
1690 have a small number of blocks).
1691 Basically, we defer the computation of ANTIC for this block
1692 until we have processed it's successor, which will inevitably
1693 have a *much* smaller set of values to phi translate once
1694 clean has been run on it.
1695 The cost of doing this is that we technically perform more
1696 iterations, however, they are lower cost iterations.
1698 Timings for PRE on tramp3d-v4:
1699 without maximal set fix: 11 seconds
1700 with maximal set fix/without deferring: 26 seconds
1701 with maximal set fix/with deferring: 11 seconds
1704 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
1705 block, succ_bb))
1707 changed = true;
1708 goto maybe_dump_sets;
1711 /* If we have multiple successors, we take the intersection of all of
1712 them. Note that in the case of loop exit phi nodes, we may have
1713 phis to translate through. */
1714 else
1716 VEC(basic_block, heap) * worklist;
1717 size_t i;
1718 basic_block bprime, first;
1720 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
1721 FOR_EACH_EDGE (e, ei, block->succs)
1722 VEC_quick_push (basic_block, worklist, e->dest);
1723 first = VEC_index (basic_block, worklist, 0);
1725 if (phi_nodes (first))
1727 bitmap_set_t from = ANTIC_IN (first);
1729 if (!BB_VISITED (first))
1730 from = maximal_set;
1731 phi_translate_set (ANTIC_OUT, from, block, first);
1733 else
1735 if (!BB_VISITED (first))
1736 bitmap_set_copy (ANTIC_OUT, maximal_set);
1737 else
1738 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
1741 for (i = 1; VEC_iterate (basic_block, worklist, i, bprime); i++)
1743 if (phi_nodes (bprime))
1745 bitmap_set_t tmp = bitmap_set_new ();
1746 bitmap_set_t from = ANTIC_IN (bprime);
1748 if (!BB_VISITED (bprime))
1749 from = maximal_set;
1750 phi_translate_set (tmp, from, block, bprime);
1751 bitmap_set_and (ANTIC_OUT, tmp);
1752 bitmap_set_free (tmp);
1754 else
1756 if (!BB_VISITED (bprime))
1757 bitmap_set_and (ANTIC_OUT, maximal_set);
1758 else
1759 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
1762 VEC_free (basic_block, heap, worklist);
1765 /* Generate ANTIC_OUT - TMP_GEN. */
1766 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
1768 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
1769 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
1770 TMP_GEN (block));
1772 /* Then union in the ANTIC_OUT - TMP_GEN values,
1773 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
1774 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
1775 bitmap_value_insert_into_set (ANTIC_IN (block),
1776 expression_for_id (bii));
1778 clean (ANTIC_IN (block), block);
1780 /* !old->expressions can happen when we deferred a block. */
1781 if (!old->expressions || !bitmap_set_equal (old, ANTIC_IN (block)))
1783 changed = true;
1784 SET_BIT (changed_blocks, block->index);
1785 FOR_EACH_EDGE (e, ei, block->preds)
1786 SET_BIT (changed_blocks, e->src->index);
1788 else
1789 RESET_BIT (changed_blocks, block->index);
1791 maybe_dump_sets:
1792 if (dump_file && (dump_flags & TDF_DETAILS))
1794 if (!BB_DEFERRED (block) || BB_VISITED (block))
1796 if (ANTIC_OUT)
1797 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
1799 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
1800 block->index);
1802 if (S)
1803 print_bitmap_set (dump_file, S, "S", block->index);
1805 else
1807 fprintf (dump_file,
1808 "Block %d was deferred for a future iteration.\n",
1809 block->index);
1812 if (old)
1813 bitmap_set_free (old);
1814 if (S)
1815 bitmap_set_free (S);
1816 if (ANTIC_OUT)
1817 bitmap_set_free (ANTIC_OUT);
1818 return changed;
1821 /* Compute PARTIAL_ANTIC for BLOCK.
1823 If succs(BLOCK) > 1 then
1824 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
1825 in ANTIC_OUT for all succ(BLOCK)
1826 else if succs(BLOCK) == 1 then
1827 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
1829 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
1830 - ANTIC_IN[BLOCK])
1833 static bool
1834 compute_partial_antic_aux (basic_block block,
1835 bool block_has_abnormal_pred_edge)
1837 bool changed = false;
1838 bitmap_set_t old_PA_IN;
1839 bitmap_set_t PA_OUT;
1840 edge e;
1841 edge_iterator ei;
1842 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
1844 old_PA_IN = PA_OUT = NULL;
1846 /* If any edges from predecessors are abnormal, antic_in is empty,
1847 so do nothing. */
1848 if (block_has_abnormal_pred_edge)
1849 goto maybe_dump_sets;
1851 /* If there are too many partially anticipatable values in the
1852 block, phi_translate_set can take an exponential time: stop
1853 before the translation starts. */
1854 if (max_pa
1855 && single_succ_p (block)
1856 && bitmap_count_bits (PA_IN (single_succ (block))->values) > max_pa)
1857 goto maybe_dump_sets;
1859 old_PA_IN = PA_IN (block);
1860 PA_OUT = bitmap_set_new ();
1862 /* If the block has no successors, ANTIC_OUT is empty. */
1863 if (EDGE_COUNT (block->succs) == 0)
1865 /* If we have one successor, we could have some phi nodes to
1866 translate through. Note that we can't phi translate across DFS
1867 back edges in partial antic, because it uses a union operation on
1868 the successors. For recurrences like IV's, we will end up
1869 generating a new value in the set on each go around (i + 3 (VH.1)
1870 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
1871 else if (single_succ_p (block))
1873 basic_block succ = single_succ (block);
1874 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
1875 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
1877 /* If we have multiple successors, we take the union of all of
1878 them. */
1879 else
1881 VEC(basic_block, heap) * worklist;
1882 size_t i;
1883 basic_block bprime;
1885 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
1886 FOR_EACH_EDGE (e, ei, block->succs)
1888 if (e->flags & EDGE_DFS_BACK)
1889 continue;
1890 VEC_quick_push (basic_block, worklist, e->dest);
1892 if (VEC_length (basic_block, worklist) > 0)
1894 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
1896 unsigned int i;
1897 bitmap_iterator bi;
1899 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
1900 bitmap_value_insert_into_set (PA_OUT,
1901 expression_for_id (i));
1902 if (phi_nodes (bprime))
1904 bitmap_set_t pa_in = bitmap_set_new ();
1905 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
1906 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
1907 bitmap_value_insert_into_set (PA_OUT,
1908 expression_for_id (i));
1909 bitmap_set_free (pa_in);
1911 else
1912 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
1913 bitmap_value_insert_into_set (PA_OUT,
1914 expression_for_id (i));
1917 VEC_free (basic_block, heap, worklist);
1920 /* PA_IN starts with PA_OUT - TMP_GEN.
1921 Then we subtract things from ANTIC_IN. */
1922 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
1924 /* For partial antic, we want to put back in the phi results, since
1925 we will properly avoid making them partially antic over backedges. */
1926 bitmap_ior_into (PA_IN (block)->values, PHI_GEN (block)->values);
1927 bitmap_ior_into (PA_IN (block)->expressions, PHI_GEN (block)->expressions);
1929 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
1930 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
1932 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
1934 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
1936 changed = true;
1937 SET_BIT (changed_blocks, block->index);
1938 FOR_EACH_EDGE (e, ei, block->preds)
1939 SET_BIT (changed_blocks, e->src->index);
1941 else
1942 RESET_BIT (changed_blocks, block->index);
1944 maybe_dump_sets:
1945 if (dump_file && (dump_flags & TDF_DETAILS))
1947 if (PA_OUT)
1948 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
1950 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
1952 if (old_PA_IN)
1953 bitmap_set_free (old_PA_IN);
1954 if (PA_OUT)
1955 bitmap_set_free (PA_OUT);
1956 return changed;
1959 /* Compute ANTIC and partial ANTIC sets. */
1961 static void
1962 compute_antic (void)
1964 bool changed = true;
1965 int num_iterations = 0;
1966 basic_block block;
1967 int i;
1969 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
1970 We pre-build the map of blocks with incoming abnormal edges here. */
1971 has_abnormal_preds = sbitmap_alloc (last_basic_block);
1972 sbitmap_zero (has_abnormal_preds);
1974 FOR_EACH_BB (block)
1976 edge_iterator ei;
1977 edge e;
1979 FOR_EACH_EDGE (e, ei, block->preds)
1981 e->flags &= ~EDGE_DFS_BACK;
1982 if (e->flags & EDGE_ABNORMAL)
1984 SET_BIT (has_abnormal_preds, block->index);
1985 break;
1989 BB_VISITED (block) = 0;
1990 BB_DEFERRED (block) = 0;
1991 /* While we are here, give empty ANTIC_IN sets to each block. */
1992 ANTIC_IN (block) = bitmap_set_new ();
1993 PA_IN (block) = bitmap_set_new ();
1996 /* At the exit block we anticipate nothing. */
1997 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
1998 BB_VISITED (EXIT_BLOCK_PTR) = 1;
1999 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2001 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2002 sbitmap_ones (changed_blocks);
2003 while (changed)
2005 if (dump_file && (dump_flags & TDF_DETAILS))
2006 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2007 num_iterations++;
2008 changed = false;
2009 for (i = 0; i < last_basic_block - NUM_FIXED_BLOCKS; i++)
2011 if (TEST_BIT (changed_blocks, postorder[i]))
2013 basic_block block = BASIC_BLOCK (postorder[i]);
2014 changed |= compute_antic_aux (block,
2015 TEST_BIT (has_abnormal_preds,
2016 block->index));
2019 /* Theoretically possible, but *highly* unlikely. */
2020 gcc_assert (num_iterations < 50);
2023 if (dump_file && (dump_flags & TDF_STATS))
2024 fprintf (dump_file, "compute_antic required %d iterations\n",
2025 num_iterations);
2027 if (do_partial_partial)
2029 sbitmap_ones (changed_blocks);
2030 mark_dfs_back_edges ();
2031 num_iterations = 0;
2032 changed = true;
2033 while (changed)
2035 if (dump_file && (dump_flags & TDF_DETAILS))
2036 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2037 num_iterations++;
2038 changed = false;
2039 for (i = 0; i < last_basic_block - NUM_FIXED_BLOCKS; i++)
2041 if (TEST_BIT (changed_blocks, postorder[i]))
2043 basic_block block = BASIC_BLOCK (postorder[i]);
2044 changed
2045 |= compute_partial_antic_aux (block,
2046 TEST_BIT (has_abnormal_preds,
2047 block->index));
2050 /* Theoretically possible, but *highly* unlikely. */
2051 gcc_assert (num_iterations < 50);
2053 if (dump_file && (dump_flags & TDF_STATS))
2054 fprintf (dump_file, "compute_partial_antic required %d iterations\n",
2055 num_iterations);
2057 sbitmap_free (has_abnormal_preds);
2058 sbitmap_free (changed_blocks);
2061 /* Return true if we can value number the call in STMT. This is true
2062 if we have a pure or constant call. */
2064 static bool
2065 can_value_number_call (tree stmt)
2067 tree call = get_call_expr_in (stmt);
2069 if (call_expr_flags (call) & (ECF_PURE | ECF_CONST))
2070 return true;
2071 return false;
2074 /* Return true if OP is an exception handler related operation, such as
2075 FILTER_EXPR or EXC_PTR_EXPR. */
2077 static bool
2078 is_exception_related (tree op)
2080 return TREE_CODE (op) == FILTER_EXPR || TREE_CODE (op) == EXC_PTR_EXPR;
2083 /* Return true if OP is a tree which we can perform value numbering
2084 on. */
2086 static bool
2087 can_value_number_operation (tree op)
2089 return (UNARY_CLASS_P (op)
2090 && !is_exception_related (TREE_OPERAND (op, 0)))
2091 || BINARY_CLASS_P (op)
2092 || COMPARISON_CLASS_P (op)
2093 || REFERENCE_CLASS_P (op)
2094 || (TREE_CODE (op) == CALL_EXPR
2095 && can_value_number_call (op));
2099 /* Return true if OP is a tree which we can perform PRE on
2100 on. This may not match the operations we can value number, but in
2101 a perfect world would. */
2103 static bool
2104 can_PRE_operation (tree op)
2106 return UNARY_CLASS_P (op)
2107 || BINARY_CLASS_P (op)
2108 || COMPARISON_CLASS_P (op)
2109 || TREE_CODE (op) == INDIRECT_REF
2110 || TREE_CODE (op) == COMPONENT_REF
2111 || TREE_CODE (op) == CALL_EXPR
2112 || TREE_CODE (op) == ARRAY_REF;
2116 /* Inserted expressions are placed onto this worklist, which is used
2117 for performing quick dead code elimination of insertions we made
2118 that didn't turn out to be necessary. */
2119 static VEC(tree,heap) *inserted_exprs;
2121 /* Pool allocated fake store expressions are placed onto this
2122 worklist, which, after performing dead code elimination, is walked
2123 to see which expressions need to be put into GC'able memory */
2124 static VEC(tree, heap) *need_creation;
2126 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2127 COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2128 trying to rename aggregates into ssa form directly, which is a no
2131 Thus, this routine doesn't create temporaries, it just builds a
2132 single access expression for the array, calling
2133 find_or_generate_expression to build the innermost pieces.
2135 This function is a subroutine of create_expression_by_pieces, and
2136 should not be called on it's own unless you really know what you
2137 are doing.
2139 static tree
2140 create_component_ref_by_pieces (basic_block block, tree expr, tree stmts)
2142 tree genop = expr;
2143 tree folded;
2145 if (TREE_CODE (genop) == VALUE_HANDLE)
2147 tree found = bitmap_find_leader (AVAIL_OUT (block), expr);
2148 if (found)
2149 return found;
2152 if (TREE_CODE (genop) == VALUE_HANDLE)
2154 bitmap_set_t exprset = VALUE_HANDLE_EXPR_SET (expr);
2155 unsigned int firstbit = bitmap_first_set_bit (exprset->expressions);
2156 genop = expression_for_id (firstbit);
2159 switch TREE_CODE (genop)
2161 case ARRAY_REF:
2163 tree op0;
2164 tree op1, op2, op3;
2165 op0 = create_component_ref_by_pieces (block,
2166 TREE_OPERAND (genop, 0),
2167 stmts);
2168 op1 = TREE_OPERAND (genop, 1);
2169 if (TREE_CODE (op1) == VALUE_HANDLE)
2170 op1 = find_or_generate_expression (block, op1, stmts);
2171 op2 = TREE_OPERAND (genop, 2);
2172 if (op2 && TREE_CODE (op2) == VALUE_HANDLE)
2173 op2 = find_or_generate_expression (block, op2, stmts);
2174 op3 = TREE_OPERAND (genop, 3);
2175 if (op3 && TREE_CODE (op3) == VALUE_HANDLE)
2176 op3 = find_or_generate_expression (block, op3, stmts);
2177 folded = build4 (ARRAY_REF, TREE_TYPE (genop), op0, op1,
2178 op2, op3);
2179 return folded;
2181 case COMPONENT_REF:
2183 tree op0;
2184 tree op1;
2185 op0 = create_component_ref_by_pieces (block,
2186 TREE_OPERAND (genop, 0),
2187 stmts);
2188 /* op1 should be a FIELD_DECL, which are represented by
2189 themselves. */
2190 op1 = TREE_OPERAND (genop, 1);
2191 folded = fold_build3 (COMPONENT_REF, TREE_TYPE (genop), op0, op1,
2192 NULL_TREE);
2193 return folded;
2195 break;
2196 case INDIRECT_REF:
2198 tree op1 = TREE_OPERAND (genop, 0);
2199 tree genop1 = find_or_generate_expression (block, op1, stmts);
2201 folded = fold_build1 (TREE_CODE (genop), TREE_TYPE (genop),
2202 genop1);
2203 return folded;
2205 break;
2206 case VAR_DECL:
2207 case PARM_DECL:
2208 case RESULT_DECL:
2209 case SSA_NAME:
2210 case STRING_CST:
2211 return genop;
2212 default:
2213 gcc_unreachable ();
2216 return NULL_TREE;
2219 /* Find a leader for an expression, or generate one using
2220 create_expression_by_pieces if it's ANTIC but
2221 complex.
2222 BLOCK is the basic_block we are looking for leaders in.
2223 EXPR is the expression to find a leader or generate for.
2224 STMTS is the statement list to put the inserted expressions on.
2225 Returns the SSA_NAME of the LHS of the generated expression or the
2226 leader. */
2228 static tree
2229 find_or_generate_expression (basic_block block, tree expr, tree stmts)
2231 tree genop = bitmap_find_leader (AVAIL_OUT (block), expr);
2233 /* If it's still NULL, it must be a complex expression, so generate
2234 it recursively. */
2235 if (genop == NULL)
2237 bitmap_set_t exprset = VALUE_HANDLE_EXPR_SET (expr);
2238 bool handled = false;
2239 bitmap_iterator bi;
2240 unsigned int i;
2242 /* We will hit cases where we have SSA_NAME's in exprset before
2243 other operations, because we may have come up with the SCCVN
2244 value before getting to the RHS of the expression. */
2245 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
2247 genop = expression_for_id (i);
2248 if (can_PRE_operation (genop))
2250 handled = true;
2251 genop = create_expression_by_pieces (block, genop, stmts);
2252 break;
2255 gcc_assert (handled);
2257 return genop;
2260 #define NECESSARY(stmt) stmt->base.asm_written_flag
2261 /* Create an expression in pieces, so that we can handle very complex
2262 expressions that may be ANTIC, but not necessary GIMPLE.
2263 BLOCK is the basic block the expression will be inserted into,
2264 EXPR is the expression to insert (in value form)
2265 STMTS is a statement list to append the necessary insertions into.
2267 This function will die if we hit some value that shouldn't be
2268 ANTIC but is (IE there is no leader for it, or its components).
2269 This function may also generate expressions that are themselves
2270 partially or fully redundant. Those that are will be either made
2271 fully redundant during the next iteration of insert (for partially
2272 redundant ones), or eliminated by eliminate (for fully redundant
2273 ones). */
2275 static tree
2276 create_expression_by_pieces (basic_block block, tree expr, tree stmts)
2278 tree temp, name;
2279 tree folded, forced_stmts, newexpr;
2280 tree v;
2281 tree_stmt_iterator tsi;
2283 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2285 case tcc_vl_exp:
2287 tree fn, sc;
2288 tree genfn;
2289 int i, nargs;
2290 tree *buffer;
2292 gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2294 fn = CALL_EXPR_FN (expr);
2295 sc = CALL_EXPR_STATIC_CHAIN (expr);
2297 genfn = find_or_generate_expression (block, fn, stmts);
2299 nargs = call_expr_nargs (expr);
2300 buffer = (tree*) alloca (nargs * sizeof (tree));
2302 for (i = 0; i < nargs; i++)
2304 tree arg = CALL_EXPR_ARG (expr, i);
2305 buffer[i] = find_or_generate_expression (block, arg, stmts);
2308 folded = build_call_array (TREE_TYPE (expr), genfn, nargs, buffer);
2309 if (sc)
2310 CALL_EXPR_STATIC_CHAIN (folded) =
2311 find_or_generate_expression (block, sc, stmts);
2312 folded = fold (folded);
2313 break;
2315 break;
2316 case tcc_reference:
2318 if (TREE_CODE (expr) == COMPONENT_REF
2319 || TREE_CODE (expr) == ARRAY_REF)
2321 folded = create_component_ref_by_pieces (block, expr, stmts);
2323 else
2325 tree op1 = TREE_OPERAND (expr, 0);
2326 tree genop1 = find_or_generate_expression (block, op1, stmts);
2328 folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2329 genop1);
2331 break;
2334 case tcc_binary:
2335 case tcc_comparison:
2337 tree op1 = TREE_OPERAND (expr, 0);
2338 tree op2 = TREE_OPERAND (expr, 1);
2339 tree genop1 = find_or_generate_expression (block, op1, stmts);
2340 tree genop2 = find_or_generate_expression (block, op2, stmts);
2341 folded = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr),
2342 genop1, genop2);
2343 break;
2346 case tcc_unary:
2348 tree op1 = TREE_OPERAND (expr, 0);
2349 tree genop1 = find_or_generate_expression (block, op1, stmts);
2350 folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2351 genop1);
2352 break;
2355 default:
2356 gcc_unreachable ();
2359 /* Force the generated expression to be a sequence of GIMPLE
2360 statements.
2361 We have to call unshare_expr because force_gimple_operand may
2362 modify the tree we pass to it. */
2363 newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts,
2364 false, NULL);
2366 /* If we have any intermediate expressions to the value sets, add them
2367 to the value sets and chain them on in the instruction stream. */
2368 if (forced_stmts)
2370 tsi = tsi_start (forced_stmts);
2371 for (; !tsi_end_p (tsi); tsi_next (&tsi))
2373 tree stmt = tsi_stmt (tsi);
2374 tree forcedname = GIMPLE_STMT_OPERAND (stmt, 0);
2375 tree forcedexpr = GIMPLE_STMT_OPERAND (stmt, 1);
2376 tree val = vn_lookup_or_add (forcedexpr);
2378 VEC_safe_push (tree, heap, inserted_exprs, stmt);
2379 VN_INFO_GET (forcedname)->valnum = forcedname;
2380 vn_add (forcedname, val);
2381 bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
2382 bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
2383 mark_symbols_for_renaming (stmt);
2385 tsi = tsi_last (stmts);
2386 tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
2389 /* Build and insert the assignment of the end result to the temporary
2390 that we will return. */
2391 if (!pretemp || TREE_TYPE (expr) != TREE_TYPE (pretemp))
2393 pretemp = create_tmp_var (TREE_TYPE (expr), "pretmp");
2394 get_var_ann (pretemp);
2397 temp = pretemp;
2398 add_referenced_var (temp);
2400 if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE
2401 || TREE_CODE (TREE_TYPE (expr)) == VECTOR_TYPE)
2402 DECL_GIMPLE_REG_P (temp) = 1;
2404 newexpr = build_gimple_modify_stmt (temp, newexpr);
2405 name = make_ssa_name (temp, newexpr);
2406 GIMPLE_STMT_OPERAND (newexpr, 0) = name;
2407 NECESSARY (newexpr) = 0;
2409 tsi = tsi_last (stmts);
2410 tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
2411 VEC_safe_push (tree, heap, inserted_exprs, newexpr);
2413 /* All the symbols in NEWEXPR should be put into SSA form. */
2414 mark_symbols_for_renaming (newexpr);
2416 /* Add a value handle to the temporary.
2417 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2418 we are creating the expression by pieces, and this particular piece of
2419 the expression may have been represented. There is no harm in replacing
2420 here. */
2421 v = get_value_handle (expr);
2422 vn_add (name, v);
2423 VN_INFO_GET (name)->valnum = name;
2424 get_or_alloc_expression_id (name);
2425 bitmap_value_replace_in_set (NEW_SETS (block), name);
2426 bitmap_value_replace_in_set (AVAIL_OUT (block), name);
2428 pre_stats.insertions++;
2429 if (dump_file && (dump_flags & TDF_DETAILS))
2431 fprintf (dump_file, "Inserted ");
2432 print_generic_expr (dump_file, newexpr, 0);
2433 fprintf (dump_file, " in predecessor %d\n", block->index);
2436 return name;
2439 /* Insert the to-be-made-available values of expression EXPRNUM for each
2440 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2441 merge the result with a phi node, given the same value handle as
2442 NODE. Return true if we have inserted new stuff. */
2444 static bool
2445 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2446 tree *avail)
2448 tree expr = expression_for_id (exprnum);
2449 tree val = get_value_handle (expr);
2450 edge pred;
2451 bool insertions = false;
2452 bool nophi = false;
2453 basic_block bprime;
2454 tree eprime;
2455 edge_iterator ei;
2456 tree type = TREE_TYPE (avail[EDGE_PRED (block, 0)->src->index]);
2457 tree temp;
2459 if (dump_file && (dump_flags & TDF_DETAILS))
2461 fprintf (dump_file, "Found partial redundancy for expression ");
2462 print_generic_expr (dump_file, expr, 0);
2463 fprintf (dump_file, " (");
2464 print_generic_expr (dump_file, val, 0);
2465 fprintf (dump_file, ")");
2466 fprintf (dump_file, "\n");
2469 /* Make sure we aren't creating an induction variable. */
2470 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2
2471 && TREE_CODE_CLASS (TREE_CODE (expr)) != tcc_reference )
2473 bool firstinsideloop = false;
2474 bool secondinsideloop = false;
2475 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2476 EDGE_PRED (block, 0)->src);
2477 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2478 EDGE_PRED (block, 1)->src);
2479 /* Induction variables only have one edge inside the loop. */
2480 if (firstinsideloop ^ secondinsideloop)
2482 if (dump_file && (dump_flags & TDF_DETAILS))
2483 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2484 nophi = true;
2489 /* Make the necessary insertions. */
2490 FOR_EACH_EDGE (pred, ei, block->preds)
2492 tree stmts = alloc_stmt_list ();
2493 tree builtexpr;
2494 bprime = pred->src;
2495 eprime = avail[bprime->index];
2497 if (can_PRE_operation (eprime))
2499 builtexpr = create_expression_by_pieces (bprime,
2500 eprime,
2501 stmts);
2502 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
2503 bsi_insert_on_edge (pred, stmts);
2504 avail[bprime->index] = builtexpr;
2505 insertions = true;
2508 /* If we didn't want a phi node, and we made insertions, we still have
2509 inserted new stuff, and thus return true. If we didn't want a phi node,
2510 and didn't make insertions, we haven't added anything new, so return
2511 false. */
2512 if (nophi && insertions)
2513 return true;
2514 else if (nophi && !insertions)
2515 return false;
2517 /* Now build a phi for the new variable. */
2518 if (!prephitemp || TREE_TYPE (prephitemp) != type)
2520 prephitemp = create_tmp_var (type, "prephitmp");
2521 get_var_ann (prephitemp);
2524 temp = prephitemp;
2525 add_referenced_var (temp);
2528 if (TREE_CODE (type) == COMPLEX_TYPE
2529 || TREE_CODE (type) == VECTOR_TYPE)
2530 DECL_GIMPLE_REG_P (temp) = 1;
2531 temp = create_phi_node (temp, block);
2533 NECESSARY (temp) = 0;
2534 VN_INFO_GET (PHI_RESULT (temp))->valnum = PHI_RESULT (temp);
2536 VEC_safe_push (tree, heap, inserted_exprs, temp);
2537 FOR_EACH_EDGE (pred, ei, block->preds)
2538 add_phi_arg (temp, avail[pred->src->index], pred);
2540 vn_add (PHI_RESULT (temp), val);
2542 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
2543 this insertion, since we test for the existence of this value in PHI_GEN
2544 before proceeding with the partial redundancy checks in insert_aux.
2546 The value may exist in AVAIL_OUT, in particular, it could be represented
2547 by the expression we are trying to eliminate, in which case we want the
2548 replacement to occur. If it's not existing in AVAIL_OUT, we want it
2549 inserted there.
2551 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
2552 this block, because if it did, it would have existed in our dominator's
2553 AVAIL_OUT, and would have been skipped due to the full redundancy check.
2556 bitmap_insert_into_set (PHI_GEN (block),
2557 PHI_RESULT (temp));
2558 bitmap_value_replace_in_set (AVAIL_OUT (block),
2559 PHI_RESULT (temp));
2560 bitmap_insert_into_set (NEW_SETS (block),
2561 PHI_RESULT (temp));
2563 if (dump_file && (dump_flags & TDF_DETAILS))
2565 fprintf (dump_file, "Created phi ");
2566 print_generic_expr (dump_file, temp, 0);
2567 fprintf (dump_file, " in block %d\n", block->index);
2569 pre_stats.phis++;
2570 return true;
2575 /* Perform insertion of partially redundant values.
2576 For BLOCK, do the following:
2577 1. Propagate the NEW_SETS of the dominator into the current block.
2578 If the block has multiple predecessors,
2579 2a. Iterate over the ANTIC expressions for the block to see if
2580 any of them are partially redundant.
2581 2b. If so, insert them into the necessary predecessors to make
2582 the expression fully redundant.
2583 2c. Insert a new PHI merging the values of the predecessors.
2584 2d. Insert the new PHI, and the new expressions, into the
2585 NEW_SETS set.
2586 3. Recursively call ourselves on the dominator children of BLOCK.
2588 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
2589 do_regular_insertion and do_partial_insertion.
2593 static bool
2594 do_regular_insertion (basic_block block, basic_block dom)
2596 bool new_stuff = false;
2597 VEC (tree, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
2598 tree expr;
2599 int i;
2601 for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
2603 if (can_PRE_operation (expr) && !AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2605 tree *avail;
2606 tree val;
2607 bool by_some = false;
2608 bool cant_insert = false;
2609 bool all_same = true;
2610 tree first_s = NULL;
2611 edge pred;
2612 basic_block bprime;
2613 tree eprime = NULL_TREE;
2614 edge_iterator ei;
2616 val = get_value_handle (expr);
2617 if (bitmap_set_contains_value (PHI_GEN (block), val))
2618 continue;
2619 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
2621 if (dump_file && (dump_flags & TDF_DETAILS))
2622 fprintf (dump_file, "Found fully redundant value\n");
2623 continue;
2626 avail = XCNEWVEC (tree, last_basic_block);
2627 FOR_EACH_EDGE (pred, ei, block->preds)
2629 tree vprime;
2630 tree edoubleprime;
2632 /* This can happen in the very weird case
2633 that our fake infinite loop edges have caused a
2634 critical edge to appear. */
2635 if (EDGE_CRITICAL_P (pred))
2637 cant_insert = true;
2638 break;
2640 bprime = pred->src;
2641 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
2642 bprime, block);
2644 /* eprime will generally only be NULL if the
2645 value of the expression, translated
2646 through the PHI for this predecessor, is
2647 undefined. If that is the case, we can't
2648 make the expression fully redundant,
2649 because its value is undefined along a
2650 predecessor path. We can thus break out
2651 early because it doesn't matter what the
2652 rest of the results are. */
2653 if (eprime == NULL)
2655 cant_insert = true;
2656 break;
2659 eprime = fully_constant_expression (eprime);
2660 vprime = get_value_handle (eprime);
2661 gcc_assert (vprime);
2662 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
2663 vprime);
2664 if (edoubleprime == NULL)
2666 avail[bprime->index] = eprime;
2667 all_same = false;
2669 else
2671 avail[bprime->index] = edoubleprime;
2672 by_some = true;
2673 if (first_s == NULL)
2674 first_s = edoubleprime;
2675 else if (!operand_equal_p (first_s, edoubleprime,
2677 all_same = false;
2680 /* If we can insert it, it's not the same value
2681 already existing along every predecessor, and
2682 it's defined by some predecessor, it is
2683 partially redundant. */
2684 if (!cant_insert && !all_same && by_some)
2686 if (insert_into_preds_of_block (block, get_expression_id (expr),
2687 avail))
2688 new_stuff = true;
2690 /* If all edges produce the same value and that value is
2691 an invariant, then the PHI has the same value on all
2692 edges. Note this. */
2693 else if (!cant_insert && all_same && eprime
2694 && is_gimple_min_invariant (eprime)
2695 && !is_gimple_min_invariant (val))
2697 unsigned int j;
2698 bitmap_iterator bi;
2700 bitmap_set_t exprset = VALUE_HANDLE_EXPR_SET (val);
2701 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
2703 tree expr = expression_for_id (j);
2704 if (TREE_CODE (expr) == SSA_NAME)
2706 vn_add (expr, eprime);
2707 pre_stats.constified++;
2711 free (avail);
2715 VEC_free (tree, heap, exprs);
2716 return new_stuff;
2720 /* Perform insertion for partially anticipatable expressions. There
2721 is only one case we will perform insertion for these. This case is
2722 if the expression is partially anticipatable, and fully available.
2723 In this case, we know that putting it earlier will enable us to
2724 remove the later computation. */
2727 static bool
2728 do_partial_partial_insertion (basic_block block, basic_block dom)
2730 bool new_stuff = false;
2731 VEC (tree, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
2732 tree expr;
2733 int i;
2735 for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
2737 if (can_PRE_operation (expr) && !AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2739 tree *avail;
2740 tree val;
2741 bool by_all = true;
2742 bool cant_insert = false;
2743 edge pred;
2744 basic_block bprime;
2745 tree eprime = NULL_TREE;
2746 edge_iterator ei;
2748 val = get_value_handle (expr);
2749 if (bitmap_set_contains_value (PHI_GEN (block), val))
2750 continue;
2751 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
2752 continue;
2754 avail = XCNEWVEC (tree, last_basic_block);
2755 FOR_EACH_EDGE (pred, ei, block->preds)
2757 tree vprime;
2758 tree edoubleprime;
2760 /* This can happen in the very weird case
2761 that our fake infinite loop edges have caused a
2762 critical edge to appear. */
2763 if (EDGE_CRITICAL_P (pred))
2765 cant_insert = true;
2766 break;
2768 bprime = pred->src;
2769 eprime = phi_translate (expr, ANTIC_IN (block),
2770 PA_IN (block),
2771 bprime, block);
2773 /* eprime will generally only be NULL if the
2774 value of the expression, translated
2775 through the PHI for this predecessor, is
2776 undefined. If that is the case, we can't
2777 make the expression fully redundant,
2778 because its value is undefined along a
2779 predecessor path. We can thus break out
2780 early because it doesn't matter what the
2781 rest of the results are. */
2782 if (eprime == NULL)
2784 cant_insert = true;
2785 break;
2788 eprime = fully_constant_expression (eprime);
2789 vprime = get_value_handle (eprime);
2790 gcc_assert (vprime);
2791 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
2792 vprime);
2793 if (edoubleprime == NULL)
2795 by_all = false;
2796 break;
2798 else
2799 avail[bprime->index] = edoubleprime;
2803 /* If we can insert it, it's not the same value
2804 already existing along every predecessor, and
2805 it's defined by some predecessor, it is
2806 partially redundant. */
2807 if (!cant_insert && by_all)
2809 pre_stats.pa_insert++;
2810 if (insert_into_preds_of_block (block, get_expression_id (expr),
2811 avail))
2812 new_stuff = true;
2814 free (avail);
2818 VEC_free (tree, heap, exprs);
2819 return new_stuff;
2822 static bool
2823 insert_aux (basic_block block)
2825 basic_block son;
2826 bool new_stuff = false;
2828 if (block)
2830 basic_block dom;
2831 dom = get_immediate_dominator (CDI_DOMINATORS, block);
2832 if (dom)
2834 unsigned i;
2835 bitmap_iterator bi;
2836 bitmap_set_t newset = NEW_SETS (dom);
2837 if (newset)
2839 /* Note that we need to value_replace both NEW_SETS, and
2840 AVAIL_OUT. For both the case of NEW_SETS, the value may be
2841 represented by some non-simple expression here that we want
2842 to replace it with. */
2843 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
2845 tree expr = expression_for_id (i);
2846 bitmap_value_replace_in_set (NEW_SETS (block), expr);
2847 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
2850 if (!single_pred_p (block))
2852 new_stuff |= do_regular_insertion (block, dom);
2853 if (do_partial_partial)
2854 new_stuff |= do_partial_partial_insertion (block, dom);
2858 for (son = first_dom_son (CDI_DOMINATORS, block);
2859 son;
2860 son = next_dom_son (CDI_DOMINATORS, son))
2862 new_stuff |= insert_aux (son);
2865 return new_stuff;
2868 /* Perform insertion of partially redundant values. */
2870 static void
2871 insert (void)
2873 bool new_stuff = true;
2874 basic_block bb;
2875 int num_iterations = 0;
2877 FOR_ALL_BB (bb)
2878 NEW_SETS (bb) = bitmap_set_new ();
2880 while (new_stuff)
2882 num_iterations++;
2883 new_stuff = false;
2884 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
2886 if (num_iterations > 2 && dump_file && (dump_flags & TDF_STATS))
2887 fprintf (dump_file, "insert required %d iterations\n", num_iterations);
2891 /* Add OP to EXP_GEN (block), and possibly to the maximal set if it is
2892 not defined by a phi node.
2893 PHI nodes can't go in the maximal sets because they are not in
2894 TMP_GEN, so it is possible to get into non-monotonic situations
2895 during ANTIC calculation, because it will *add* bits. */
2897 static void
2898 add_to_exp_gen (basic_block block, tree op)
2900 if (!in_fre)
2902 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
2903 return;
2904 bitmap_value_insert_into_set (EXP_GEN (block), op);
2905 if (TREE_CODE (op) != SSA_NAME
2906 || TREE_CODE (SSA_NAME_DEF_STMT (op)) != PHI_NODE)
2907 bitmap_value_insert_into_set (maximal_set, op);
2912 /* Given an SSA variable VAR and an expression EXPR, compute the value
2913 number for EXPR and create a value handle (VAL) for it. If VAR and
2914 EXPR are not the same, associate VAL with VAR. Finally, add VAR to
2915 S1 and its value handle to S2, and to the maximal set if
2916 ADD_TO_MAXIMAL is true.
2918 VUSES represent the virtual use operands associated with EXPR (if
2919 any). */
2921 static inline void
2922 add_to_sets (tree var, tree expr, VEC(tree, gc) *vuses, bitmap_set_t s1,
2923 bitmap_set_t s2)
2925 tree val;
2926 val = vn_lookup_or_add_with_vuses (expr, vuses);
2928 /* VAR and EXPR may be the same when processing statements for which
2929 we are not computing value numbers (e.g., non-assignments, or
2930 statements that make aliased stores). In those cases, we are
2931 only interested in making VAR available as its own value. */
2932 if (var != expr)
2933 vn_add (var, val);
2935 if (s1)
2936 bitmap_insert_into_set (s1, var);
2938 bitmap_value_insert_into_set (s2, var);
2941 /* Find existing value expression that is the same as T,
2942 and return it if it exists. */
2944 static inline tree
2945 find_existing_value_expr (tree t, VEC (tree, gc) *vuses)
2947 bitmap_iterator bi;
2948 unsigned int bii;
2949 tree vh;
2950 bitmap_set_t exprset;
2952 if (REFERENCE_CLASS_P (t) || TREE_CODE (t) == CALL_EXPR || DECL_P (t))
2953 vh = vn_lookup_with_vuses (t, vuses);
2954 else
2955 vh = vn_lookup (t);
2957 if (!vh)
2958 return NULL;
2959 exprset = VALUE_HANDLE_EXPR_SET (vh);
2960 FOR_EACH_EXPR_ID_IN_SET (exprset, bii, bi)
2962 tree efi = expression_for_id (bii);
2963 if (expressions_equal_p (t, efi))
2964 return efi;
2966 return NULL;
2969 /* Given a unary or binary expression EXPR, create and return a new
2970 expression with the same structure as EXPR but with its operands
2971 replaced with the value handles of each of the operands of EXPR.
2973 VUSES represent the virtual use operands associated with EXPR (if
2974 any). Insert EXPR's operands into the EXP_GEN set for BLOCK. */
2976 static inline tree
2977 create_value_expr_from (tree expr, basic_block block, VEC (tree, gc) *vuses)
2979 int i;
2980 enum tree_code code = TREE_CODE (expr);
2981 tree vexpr;
2982 alloc_pool pool = NULL;
2983 tree efi;
2985 gcc_assert (TREE_CODE_CLASS (code) == tcc_unary
2986 || TREE_CODE_CLASS (code) == tcc_binary
2987 || TREE_CODE_CLASS (code) == tcc_comparison
2988 || TREE_CODE_CLASS (code) == tcc_reference
2989 || TREE_CODE_CLASS (code) == tcc_expression
2990 || TREE_CODE_CLASS (code) == tcc_vl_exp
2991 || TREE_CODE_CLASS (code) == tcc_exceptional
2992 || TREE_CODE_CLASS (code) == tcc_declaration);
2994 if (TREE_CODE_CLASS (code) == tcc_unary)
2995 pool = unary_node_pool;
2996 else if (TREE_CODE_CLASS (code) == tcc_reference)
2997 pool = reference_node_pool;
2998 else if (TREE_CODE_CLASS (code) == tcc_binary)
2999 pool = binary_node_pool;
3000 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3001 pool = comparison_node_pool;
3002 else
3003 gcc_assert (code == CALL_EXPR);
3005 if (code == CALL_EXPR)
3006 vexpr = temp_copy_call_expr (expr);
3007 else
3009 vexpr = (tree) pool_alloc (pool);
3010 memcpy (vexpr, expr, tree_size (expr));
3013 for (i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
3015 tree val = NULL_TREE;
3016 tree op;
3018 op = TREE_OPERAND (expr, i);
3019 if (op == NULL_TREE)
3020 continue;
3022 /* Recursively value-numberize reference ops and tree lists. */
3023 if (REFERENCE_CLASS_P (op))
3025 tree tempop = create_value_expr_from (op, block, vuses);
3026 op = tempop ? tempop : op;
3027 val = vn_lookup_or_add_with_vuses (op, vuses);
3028 set_expression_vuses (op, vuses);
3030 else
3032 val = vn_lookup_or_add (op);
3034 if (TREE_CODE (op) != TREE_LIST)
3035 add_to_exp_gen (block, op);
3037 if (TREE_CODE (val) == VALUE_HANDLE)
3038 TREE_TYPE (val) = TREE_TYPE (TREE_OPERAND (vexpr, i));
3040 TREE_OPERAND (vexpr, i) = val;
3042 efi = find_existing_value_expr (vexpr, vuses);
3043 if (efi)
3044 return efi;
3045 get_or_alloc_expression_id (vexpr);
3046 return vexpr;
3049 /* Return a copy of NODE that is stored in the temporary alloc_pool's.
3050 This is made recursively true, so that the operands are stored in
3051 the pool as well. */
3053 static tree
3054 poolify_tree (tree node)
3056 switch (TREE_CODE (node))
3058 case INDIRECT_REF:
3060 tree temp = (tree) pool_alloc (reference_node_pool);
3061 memcpy (temp, node, tree_size (node));
3062 TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
3063 return temp;
3065 break;
3066 case GIMPLE_MODIFY_STMT:
3068 tree temp = (tree) pool_alloc (modify_expr_node_pool);
3069 memcpy (temp, node, tree_size (node));
3070 GIMPLE_STMT_OPERAND (temp, 0) =
3071 poolify_tree (GIMPLE_STMT_OPERAND (temp, 0));
3072 GIMPLE_STMT_OPERAND (temp, 1) =
3073 poolify_tree (GIMPLE_STMT_OPERAND (temp, 1));
3074 return temp;
3076 break;
3077 case SSA_NAME:
3078 case INTEGER_CST:
3079 case STRING_CST:
3080 case REAL_CST:
3081 case FIXED_CST:
3082 case PARM_DECL:
3083 case VAR_DECL:
3084 case RESULT_DECL:
3085 return node;
3086 default:
3087 gcc_unreachable ();
3091 static tree modify_expr_template;
3093 /* Allocate a GIMPLE_MODIFY_STMT with TYPE, and operands OP1, OP2 in the
3094 alloc pools and return it. */
3095 static tree
3096 poolify_modify_stmt (tree op1, tree op2)
3098 if (modify_expr_template == NULL)
3099 modify_expr_template = build_gimple_modify_stmt (op1, op2);
3101 GIMPLE_STMT_OPERAND (modify_expr_template, 0) = op1;
3102 GIMPLE_STMT_OPERAND (modify_expr_template, 1) = op2;
3104 return poolify_tree (modify_expr_template);
3108 /* For each real store operation of the form
3109 *a = <value> that we see, create a corresponding fake store of the
3110 form storetmp_<version> = *a.
3112 This enables AVAIL computation to mark the results of stores as
3113 available. Without this, you'd need to do some computation to
3114 mark the result of stores as ANTIC and AVAIL at all the right
3115 points.
3116 To save memory, we keep the store
3117 statements pool allocated until we decide whether they are
3118 necessary or not. */
3120 static void
3121 insert_fake_stores (void)
3123 basic_block block;
3125 FOR_ALL_BB (block)
3127 block_stmt_iterator bsi;
3128 for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3130 tree stmt = bsi_stmt (bsi);
3132 /* We can't generate SSA names for stores that are complex
3133 or aggregate. We also want to ignore things whose
3134 virtual uses occur in abnormal phis. */
3136 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3137 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == INDIRECT_REF
3138 && !AGGREGATE_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 0)))
3139 && TREE_CODE (TREE_TYPE (GIMPLE_STMT_OPERAND
3140 (stmt, 0))) != COMPLEX_TYPE)
3142 ssa_op_iter iter;
3143 def_operand_p defp;
3144 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3145 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3146 tree new_tree;
3147 bool notokay = false;
3149 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
3151 tree defvar = DEF_FROM_PTR (defp);
3152 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defvar))
3154 notokay = true;
3155 break;
3159 if (notokay)
3160 continue;
3162 if (!storetemp || TREE_TYPE (rhs) != TREE_TYPE (storetemp))
3164 storetemp = create_tmp_var (TREE_TYPE (rhs), "storetmp");
3165 if (TREE_CODE (TREE_TYPE (storetemp)) == VECTOR_TYPE)
3166 DECL_GIMPLE_REG_P (storetemp) = 1;
3167 get_var_ann (storetemp);
3170 new_tree = poolify_modify_stmt (storetemp, lhs);
3172 lhs = make_ssa_name (storetemp, new_tree);
3173 GIMPLE_STMT_OPERAND (new_tree, 0) = lhs;
3174 create_ssa_artificial_load_stmt (new_tree, stmt, false);
3176 NECESSARY (new_tree) = 0;
3177 VEC_safe_push (tree, heap, inserted_exprs, new_tree);
3178 VEC_safe_push (tree, heap, need_creation, new_tree);
3179 bsi_insert_after (&bsi, new_tree, BSI_NEW_STMT);
3185 /* Turn the pool allocated fake stores that we created back into real
3186 GC allocated ones if they turned out to be necessary to PRE some
3187 expressions. */
3189 static void
3190 realify_fake_stores (void)
3192 unsigned int i;
3193 tree stmt;
3195 for (i = 0; VEC_iterate (tree, need_creation, i, stmt); i++)
3197 if (NECESSARY (stmt))
3199 block_stmt_iterator bsi;
3200 tree newstmt, tmp;
3202 /* Mark the temp variable as referenced */
3203 add_referenced_var (SSA_NAME_VAR (GIMPLE_STMT_OPERAND (stmt, 0)));
3205 /* Put the new statement in GC memory, fix up the
3206 SSA_NAME_DEF_STMT on it, and then put it in place of
3207 the old statement before the store in the IR stream
3208 as a plain ssa name copy. */
3209 bsi = bsi_for_stmt (stmt);
3210 bsi_prev (&bsi);
3211 tmp = GIMPLE_STMT_OPERAND (bsi_stmt (bsi), 1);
3212 newstmt = build_gimple_modify_stmt (GIMPLE_STMT_OPERAND (stmt, 0),
3213 tmp);
3214 SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (newstmt, 0)) = newstmt;
3215 bsi_insert_before (&bsi, newstmt, BSI_SAME_STMT);
3216 bsi = bsi_for_stmt (stmt);
3217 bsi_remove (&bsi, true);
3219 else
3220 release_defs (stmt);
3224 /* Given an SSA_NAME, see if SCCVN has a value number for it, and if
3225 so, return the value handle for this value number, creating it if
3226 necessary.
3227 Return NULL if SCCVN has no info for us. */
3229 static tree
3230 get_sccvn_value (tree name)
3232 if (TREE_CODE (name) == SSA_NAME
3233 && VN_INFO (name)->valnum != name
3234 && VN_INFO (name)->valnum != VN_TOP)
3236 tree val = VN_INFO (name)->valnum;
3237 bool is_invariant = is_gimple_min_invariant (val);
3238 tree valvh = !is_invariant ? get_value_handle (val) : NULL_TREE;
3240 /* We may end up with situations where SCCVN has chosen a
3241 representative for the equivalence set that we have not
3242 visited yet. In this case, just create the value handle for
3243 it. */
3244 if (!valvh && !is_invariant)
3246 tree defstmt = SSA_NAME_DEF_STMT (val);
3248 gcc_assert (VN_INFO (val)->valnum == val);
3249 /* PHI nodes can't have vuses and attempts to iterate over
3250 their VUSE operands will crash. */
3251 if (TREE_CODE (defstmt) == PHI_NODE || IS_EMPTY_STMT (defstmt))
3252 defstmt = NULL;
3254 tree defstmt2 = SSA_NAME_DEF_STMT (name);
3255 if (TREE_CODE (defstmt2) != PHI_NODE &&
3256 !ZERO_SSA_OPERANDS (defstmt2, SSA_OP_ALL_VIRTUALS))
3257 gcc_assert (defstmt);
3259 valvh = vn_lookup_or_add_with_stmt (val, defstmt);
3262 if (dump_file && (dump_flags & TDF_DETAILS))
3264 fprintf (dump_file, "SCCVN says ");
3265 print_generic_expr (dump_file, name, 0);
3266 fprintf (dump_file, " value numbers to ");
3267 if (valvh && !is_invariant)
3269 print_generic_expr (dump_file, val, 0);
3270 fprintf (dump_file, " (");
3271 print_generic_expr (dump_file, valvh, 0);
3272 fprintf (dump_file, ")\n");
3274 else
3275 print_generic_stmt (dump_file, val, 0);
3277 if (valvh)
3278 return valvh;
3279 else
3280 return val;
3282 return NULL_TREE;
3285 /* Create value handles for PHI in BLOCK. */
3287 static void
3288 make_values_for_phi (tree phi, basic_block block)
3290 tree result = PHI_RESULT (phi);
3291 /* We have no need for virtual phis, as they don't represent
3292 actual computations. */
3293 if (is_gimple_reg (result))
3295 tree sccvnval = get_sccvn_value (result);
3296 if (sccvnval)
3298 vn_add (result, sccvnval);
3299 bitmap_insert_into_set (PHI_GEN (block), result);
3300 bitmap_value_insert_into_set (AVAIL_OUT (block), result);
3302 else
3303 add_to_sets (result, result, NULL,
3304 PHI_GEN (block), AVAIL_OUT (block));
3308 /* Create value handles for STMT in BLOCK. Return true if we handled
3309 the statement. */
3311 static bool
3312 make_values_for_stmt (tree stmt, basic_block block)
3315 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3316 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3317 tree valvh = NULL_TREE;
3318 tree lhsval;
3319 VEC (tree, gc) *vuses = NULL;
3321 valvh = get_sccvn_value (lhs);
3323 if (valvh)
3325 vn_add (lhs, valvh);
3326 bitmap_value_insert_into_set (AVAIL_OUT (block), lhs);
3327 /* Shortcut for FRE. We have no need to create value expressions,
3328 just want to know what values are available where. */
3329 if (in_fre)
3330 return true;
3333 else if (in_fre)
3335 /* For FRE, if SCCVN didn't find anything, we aren't going to
3336 either, so just make up a new value number if necessary and
3337 call it a day */
3338 if (get_value_handle (lhs) == NULL)
3339 vn_lookup_or_add (lhs);
3340 bitmap_value_insert_into_set (AVAIL_OUT (block), lhs);
3341 return true;
3344 lhsval = valvh ? valvh : get_value_handle (lhs);
3345 vuses = copy_vuses_from_stmt (stmt);
3346 STRIP_USELESS_TYPE_CONVERSION (rhs);
3347 if (can_value_number_operation (rhs)
3348 && (!lhsval || !is_gimple_min_invariant (lhsval)))
3350 /* For value numberable operation, create a
3351 duplicate expression with the operands replaced
3352 with the value handles of the original RHS. */
3353 tree newt = create_value_expr_from (rhs, block, vuses);
3354 if (newt)
3356 set_expression_vuses (newt, vuses);
3357 /* If we already have a value number for the LHS, reuse
3358 it rather than creating a new one. */
3359 if (lhsval)
3361 set_value_handle (newt, lhsval);
3362 if (!is_gimple_min_invariant (lhsval))
3363 add_to_value (lhsval, newt);
3365 else
3367 tree val = vn_lookup_or_add_with_vuses (newt, vuses);
3368 vn_add (lhs, val);
3371 add_to_exp_gen (block, newt);
3374 bitmap_insert_into_set (TMP_GEN (block), lhs);
3375 bitmap_value_insert_into_set (AVAIL_OUT (block), lhs);
3376 return true;
3378 else if ((TREE_CODE (rhs) == SSA_NAME
3379 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3380 || is_gimple_min_invariant (rhs)
3381 || TREE_CODE (rhs) == ADDR_EXPR
3382 || TREE_INVARIANT (rhs)
3383 || DECL_P (rhs))
3386 if (lhsval)
3388 set_expression_vuses (rhs, vuses);
3389 set_value_handle (rhs, lhsval);
3390 if (!is_gimple_min_invariant (lhsval))
3391 add_to_value (lhsval, rhs);
3392 bitmap_insert_into_set (TMP_GEN (block), lhs);
3393 bitmap_value_insert_into_set (AVAIL_OUT (block), lhs);
3395 else
3397 /* Compute a value number for the RHS of the statement
3398 and add its value to the AVAIL_OUT set for the block.
3399 Add the LHS to TMP_GEN. */
3400 set_expression_vuses (rhs, vuses);
3401 add_to_sets (lhs, rhs, vuses, TMP_GEN (block),
3402 AVAIL_OUT (block));
3404 /* None of the rest of these can be PRE'd. */
3405 if (TREE_CODE (rhs) == SSA_NAME && !ssa_undefined_value_p (rhs))
3406 add_to_exp_gen (block, rhs);
3407 return true;
3409 return false;
3413 /* Compute the AVAIL set for all basic blocks.
3415 This function performs value numbering of the statements in each basic
3416 block. The AVAIL sets are built from information we glean while doing
3417 this value numbering, since the AVAIL sets contain only one entry per
3418 value.
3420 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3421 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3423 static void
3424 compute_avail (void)
3426 basic_block block, son;
3427 basic_block *worklist;
3428 size_t sp = 0;
3429 tree param;
3431 /* For arguments with default definitions, we pretend they are
3432 defined in the entry block. */
3433 for (param = DECL_ARGUMENTS (current_function_decl);
3434 param;
3435 param = TREE_CHAIN (param))
3437 if (gimple_default_def (cfun, param) != NULL)
3439 tree def = gimple_default_def (cfun, param);
3441 vn_lookup_or_add (def);
3442 if (!in_fre)
3444 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3445 bitmap_value_insert_into_set (maximal_set, def);
3447 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3451 /* Likewise for the static chain decl. */
3452 if (cfun->static_chain_decl)
3454 param = cfun->static_chain_decl;
3455 if (gimple_default_def (cfun, param) != NULL)
3457 tree def = gimple_default_def (cfun, param);
3459 vn_lookup_or_add (def);
3460 if (!in_fre)
3462 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3463 bitmap_value_insert_into_set (maximal_set, def);
3465 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3469 /* Allocate the worklist. */
3470 worklist = XNEWVEC (basic_block, n_basic_blocks);
3472 /* Seed the algorithm by putting the dominator children of the entry
3473 block on the worklist. */
3474 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3475 son;
3476 son = next_dom_son (CDI_DOMINATORS, son))
3477 worklist[sp++] = son;
3479 /* Loop until the worklist is empty. */
3480 while (sp)
3482 block_stmt_iterator bsi;
3483 tree stmt, phi;
3484 basic_block dom;
3485 unsigned int stmt_uid = 1;
3487 /* Pick a block from the worklist. */
3488 block = worklist[--sp];
3490 /* Initially, the set of available values in BLOCK is that of
3491 its immediate dominator. */
3492 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3493 if (dom)
3494 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3496 /* Generate values for PHI nodes. */
3497 for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
3498 make_values_for_phi (phi, block);
3500 /* Now compute value numbers and populate value sets with all
3501 the expressions computed in BLOCK. */
3502 for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3504 stmt_ann_t ann;
3505 ssa_op_iter iter;
3506 tree op;
3508 stmt = bsi_stmt (bsi);
3509 ann = stmt_ann (stmt);
3511 ann->uid = stmt_uid++;
3513 /* For regular value numbering, we are only interested in
3514 assignments of the form X_i = EXPR, where EXPR represents
3515 an "interesting" computation, it has no volatile operands
3516 and X_i doesn't flow through an abnormal edge. */
3517 if (TREE_CODE (stmt) == RETURN_EXPR
3518 && !ann->has_volatile_ops)
3520 tree realstmt = stmt;
3521 tree lhs;
3522 tree rhs;
3524 stmt = TREE_OPERAND (stmt, 0);
3525 if (stmt && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
3527 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3528 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3529 if (TREE_CODE (lhs) == SSA_NAME
3530 && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
3532 if (dump_file && (dump_flags & TDF_DETAILS))
3534 fprintf (dump_file, "SCCVN says ");
3535 print_generic_expr (dump_file, lhs, 0);
3536 fprintf (dump_file, " value numbers to ");
3537 print_generic_stmt (dump_file, VN_INFO (lhs)->valnum,
3540 vn_add (lhs, VN_INFO (lhs)->valnum);
3541 continue;
3544 if (TREE_CODE (rhs) == SSA_NAME)
3545 add_to_exp_gen (block, rhs);
3547 FOR_EACH_SSA_TREE_OPERAND (op, realstmt, iter, SSA_OP_DEF)
3548 add_to_sets (op, op, NULL, TMP_GEN (block),
3549 AVAIL_OUT (block));
3551 continue;
3554 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3555 && !ann->has_volatile_ops
3556 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME
3557 && (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI
3558 (GIMPLE_STMT_OPERAND (stmt, 0)))
3559 && !tree_could_throw_p (stmt))
3561 if (make_values_for_stmt (stmt, block))
3562 continue;
3566 /* For any other statement that we don't recognize, simply
3567 make the names generated by the statement available in
3568 AVAIL_OUT and TMP_GEN. */
3569 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3570 add_to_sets (op, op, NULL, TMP_GEN (block), AVAIL_OUT (block));
3572 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3574 add_to_sets (op, op, NULL, NULL , AVAIL_OUT (block));
3575 if (TREE_CODE (op) == SSA_NAME || can_PRE_operation (op))
3576 add_to_exp_gen (block, op);
3580 /* Put the dominator children of BLOCK on the worklist of blocks
3581 to compute available sets for. */
3582 for (son = first_dom_son (CDI_DOMINATORS, block);
3583 son;
3584 son = next_dom_son (CDI_DOMINATORS, son))
3585 worklist[sp++] = son;
3588 free (worklist);
3592 /* Eliminate fully redundant computations. */
3594 static void
3595 eliminate (void)
3597 basic_block b;
3599 FOR_EACH_BB (b)
3601 block_stmt_iterator i;
3603 for (i = bsi_start (b); !bsi_end_p (i); bsi_next (&i))
3605 tree stmt = bsi_stmt (i);
3607 /* Lookup the RHS of the expression, see if we have an
3608 available computation for it. If so, replace the RHS with
3609 the available computation. */
3610 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3611 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME
3612 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) != SSA_NAME
3613 && !is_gimple_min_invariant (GIMPLE_STMT_OPERAND (stmt, 1))
3614 && !stmt_ann (stmt)->has_volatile_ops)
3616 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3617 tree *rhs_p = &GIMPLE_STMT_OPERAND (stmt, 1);
3618 tree sprime;
3620 sprime = bitmap_find_leader (AVAIL_OUT (b),
3621 get_value_handle (lhs));
3623 if (sprime
3624 && sprime != lhs
3625 && (TREE_CODE (*rhs_p) != SSA_NAME
3626 || may_propagate_copy (*rhs_p, sprime)))
3628 gcc_assert (sprime != *rhs_p);
3630 if (dump_file && (dump_flags & TDF_DETAILS))
3632 fprintf (dump_file, "Replaced ");
3633 print_generic_expr (dump_file, *rhs_p, 0);
3634 fprintf (dump_file, " with ");
3635 print_generic_expr (dump_file, sprime, 0);
3636 fprintf (dump_file, " in ");
3637 print_generic_stmt (dump_file, stmt, 0);
3640 if (TREE_CODE (sprime) == SSA_NAME)
3641 NECESSARY (SSA_NAME_DEF_STMT (sprime)) = 1;
3642 /* We need to make sure the new and old types actually match,
3643 which may require adding a simple cast, which fold_convert
3644 will do for us. */
3645 if (TREE_CODE (*rhs_p) != SSA_NAME
3646 && !useless_type_conversion_p (TREE_TYPE (*rhs_p),
3647 TREE_TYPE (sprime)))
3648 sprime = fold_convert (TREE_TYPE (*rhs_p), sprime);
3650 pre_stats.eliminations++;
3651 propagate_tree_value (rhs_p, sprime);
3652 update_stmt (stmt);
3654 /* If we removed EH side effects from the statement, clean
3655 its EH information. */
3656 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
3658 bitmap_set_bit (need_eh_cleanup,
3659 bb_for_stmt (stmt)->index);
3660 if (dump_file && (dump_flags & TDF_DETAILS))
3661 fprintf (dump_file, " Removed EH side effects.\n");
3669 /* Borrow a bit of tree-ssa-dce.c for the moment.
3670 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
3671 this may be a bit faster, and we may want critical edges kept split. */
3673 /* If OP's defining statement has not already been determined to be necessary,
3674 mark that statement necessary. Return the stmt, if it is newly
3675 necessary. */
3677 static inline tree
3678 mark_operand_necessary (tree op)
3680 tree stmt;
3682 gcc_assert (op);
3684 if (TREE_CODE (op) != SSA_NAME)
3685 return NULL;
3687 stmt = SSA_NAME_DEF_STMT (op);
3688 gcc_assert (stmt);
3690 if (NECESSARY (stmt)
3691 || IS_EMPTY_STMT (stmt))
3692 return NULL;
3694 NECESSARY (stmt) = 1;
3695 return stmt;
3698 /* Because we don't follow exactly the standard PRE algorithm, and decide not
3699 to insert PHI nodes sometimes, and because value numbering of casts isn't
3700 perfect, we sometimes end up inserting dead code. This simple DCE-like
3701 pass removes any insertions we made that weren't actually used. */
3703 static void
3704 remove_dead_inserted_code (void)
3706 VEC(tree,heap) *worklist = NULL;
3707 int i;
3708 tree t;
3710 worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
3711 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3713 if (NECESSARY (t))
3714 VEC_quick_push (tree, worklist, t);
3716 while (VEC_length (tree, worklist) > 0)
3718 t = VEC_pop (tree, worklist);
3720 /* PHI nodes are somewhat special in that each PHI alternative has
3721 data and control dependencies. All the statements feeding the
3722 PHI node's arguments are always necessary. */
3723 if (TREE_CODE (t) == PHI_NODE)
3725 int k;
3727 VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
3728 for (k = 0; k < PHI_NUM_ARGS (t); k++)
3730 tree arg = PHI_ARG_DEF (t, k);
3731 if (TREE_CODE (arg) == SSA_NAME)
3733 arg = mark_operand_necessary (arg);
3734 if (arg)
3735 VEC_quick_push (tree, worklist, arg);
3739 else
3741 /* Propagate through the operands. Examine all the USE, VUSE and
3742 VDEF operands in this statement. Mark all the statements
3743 which feed this statement's uses as necessary. */
3744 ssa_op_iter iter;
3745 tree use;
3747 /* The operands of VDEF expressions are also needed as they
3748 represent potential definitions that may reach this
3749 statement (VDEF operands allow us to follow def-def
3750 links). */
3752 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
3754 tree n = mark_operand_necessary (use);
3755 if (n)
3756 VEC_safe_push (tree, heap, worklist, n);
3761 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3763 if (!NECESSARY (t))
3765 block_stmt_iterator bsi;
3767 if (dump_file && (dump_flags & TDF_DETAILS))
3769 fprintf (dump_file, "Removing unnecessary insertion:");
3770 print_generic_stmt (dump_file, t, 0);
3773 if (TREE_CODE (t) == PHI_NODE)
3775 remove_phi_node (t, NULL, true);
3777 else
3779 bsi = bsi_for_stmt (t);
3780 bsi_remove (&bsi, true);
3781 release_defs (t);
3785 VEC_free (tree, heap, worklist);
3788 /* Initialize data structures used by PRE. */
3790 static void
3791 init_pre (bool do_fre)
3793 basic_block bb;
3795 next_expression_id = 0;
3796 expressions = NULL;
3797 expression_vuses = NULL;
3798 in_fre = do_fre;
3800 inserted_exprs = NULL;
3801 need_creation = NULL;
3802 pretemp = NULL_TREE;
3803 storetemp = NULL_TREE;
3804 prephitemp = NULL_TREE;
3806 if (!do_fre)
3807 loop_optimizer_init (LOOPS_NORMAL);
3809 connect_infinite_loops_to_exit ();
3810 memset (&pre_stats, 0, sizeof (pre_stats));
3813 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3814 post_order_compute (postorder, false, false);
3816 FOR_ALL_BB (bb)
3817 bb->aux = xcalloc (1, sizeof (struct bb_bitmap_sets));
3819 calculate_dominance_info (CDI_POST_DOMINATORS);
3820 calculate_dominance_info (CDI_DOMINATORS);
3822 bitmap_obstack_initialize (&grand_bitmap_obstack);
3823 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
3824 expr_pred_trans_eq, free);
3825 seen_during_translate = BITMAP_ALLOC (&grand_bitmap_obstack);
3826 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
3827 sizeof (struct bitmap_set), 30);
3828 binary_node_pool = create_alloc_pool ("Binary tree nodes",
3829 tree_code_size (PLUS_EXPR), 30);
3830 unary_node_pool = create_alloc_pool ("Unary tree nodes",
3831 tree_code_size (NEGATE_EXPR), 30);
3832 reference_node_pool = create_alloc_pool ("Reference tree nodes",
3833 tree_code_size (ARRAY_REF), 30);
3834 comparison_node_pool = create_alloc_pool ("Comparison tree nodes",
3835 tree_code_size (EQ_EXPR), 30);
3836 modify_expr_node_pool = create_alloc_pool ("GIMPLE_MODIFY_STMT nodes",
3837 tree_code_size (GIMPLE_MODIFY_STMT),
3838 30);
3839 obstack_init (&temp_call_expr_obstack);
3840 modify_expr_template = NULL;
3842 FOR_ALL_BB (bb)
3844 EXP_GEN (bb) = bitmap_set_new ();
3845 PHI_GEN (bb) = bitmap_set_new ();
3846 TMP_GEN (bb) = bitmap_set_new ();
3847 AVAIL_OUT (bb) = bitmap_set_new ();
3849 maximal_set = in_fre ? NULL : bitmap_set_new ();
3851 need_eh_cleanup = BITMAP_ALLOC (NULL);
3855 /* Deallocate data structures used by PRE. */
3857 static void
3858 fini_pre (void)
3860 basic_block bb;
3861 unsigned int i;
3863 free (postorder);
3864 VEC_free (tree, heap, inserted_exprs);
3865 VEC_free (tree, heap, need_creation);
3866 bitmap_obstack_release (&grand_bitmap_obstack);
3867 free_alloc_pool (bitmap_set_pool);
3868 free_alloc_pool (binary_node_pool);
3869 free_alloc_pool (reference_node_pool);
3870 free_alloc_pool (unary_node_pool);
3871 free_alloc_pool (comparison_node_pool);
3872 free_alloc_pool (modify_expr_node_pool);
3873 htab_delete (phi_translate_table);
3874 remove_fake_exit_edges ();
3876 FOR_ALL_BB (bb)
3878 free (bb->aux);
3879 bb->aux = NULL;
3882 free_dominance_info (CDI_POST_DOMINATORS);
3884 if (!bitmap_empty_p (need_eh_cleanup))
3886 tree_purge_all_dead_eh_edges (need_eh_cleanup);
3887 cleanup_tree_cfg ();
3890 BITMAP_FREE (need_eh_cleanup);
3892 /* Wipe out pointers to VALUE_HANDLEs. In the not terribly distant
3893 future we will want them to be persistent though. */
3894 for (i = 0; i < num_ssa_names; i++)
3896 tree name = ssa_name (i);
3898 if (!name)
3899 continue;
3901 if (SSA_NAME_VALUE (name)
3902 && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
3903 SSA_NAME_VALUE (name) = NULL;
3905 if (current_loops != NULL)
3906 loop_optimizer_finalize ();
3909 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
3910 only wants to do full redundancy elimination. */
3912 static void
3913 execute_pre (bool do_fre)
3916 do_partial_partial = optimize > 2;
3917 init_pre (do_fre);
3919 if (!do_fre)
3920 insert_fake_stores ();
3922 /* Collect and value number expressions computed in each basic block. */
3923 if (!run_scc_vn ())
3925 if (!do_fre)
3926 remove_dead_inserted_code ();
3927 fini_pre ();
3928 return;
3930 switch_to_PRE_table ();
3931 compute_avail ();
3933 if (dump_file && (dump_flags & TDF_DETAILS))
3935 basic_block bb;
3937 FOR_ALL_BB (bb)
3939 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
3940 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen",
3941 bb->index);
3942 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out",
3943 bb->index);
3947 /* Insert can get quite slow on an incredibly large number of basic
3948 blocks due to some quadratic behavior. Until this behavior is
3949 fixed, don't run it when he have an incredibly large number of
3950 bb's. If we aren't going to run insert, there is no point in
3951 computing ANTIC, either, even though it's plenty fast. */
3952 if (!do_fre && n_basic_blocks < 4000)
3954 compute_antic ();
3955 insert ();
3958 /* Remove all the redundant expressions. */
3959 eliminate ();
3961 if (dump_file && (dump_flags & TDF_STATS))
3963 fprintf (dump_file, "Insertions: %d\n", pre_stats.insertions);
3964 fprintf (dump_file, "PA inserted: %d\n", pre_stats.pa_insert);
3965 fprintf (dump_file, "New PHIs: %d\n", pre_stats.phis);
3966 fprintf (dump_file, "Eliminated: %d\n", pre_stats.eliminations);
3967 fprintf (dump_file, "Constified: %d\n", pre_stats.constified);
3969 bsi_commit_edge_inserts ();
3971 free_scc_vn ();
3972 clear_expression_ids ();
3973 if (!do_fre)
3975 remove_dead_inserted_code ();
3976 realify_fake_stores ();
3979 fini_pre ();
3982 /* Gate and execute functions for PRE. */
3984 static unsigned int
3985 do_pre (void)
3987 execute_pre (false);
3988 return TODO_rebuild_alias;
3991 static bool
3992 gate_pre (void)
3994 return flag_tree_pre != 0;
3997 struct tree_opt_pass pass_pre =
3999 "pre", /* name */
4000 gate_pre, /* gate */
4001 do_pre, /* execute */
4002 NULL, /* sub */
4003 NULL, /* next */
4004 0, /* static_pass_number */
4005 TV_TREE_PRE, /* tv_id */
4006 PROP_no_crit_edges | PROP_cfg
4007 | PROP_ssa | PROP_alias, /* properties_required */
4008 0, /* properties_provided */
4009 0, /* properties_destroyed */
4010 0, /* todo_flags_start */
4011 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
4012 | TODO_verify_ssa, /* todo_flags_finish */
4013 0 /* letter */
4017 /* Gate and execute functions for FRE. */
4019 static unsigned int
4020 execute_fre (void)
4022 execute_pre (true);
4023 return 0;
4026 static bool
4027 gate_fre (void)
4029 return flag_tree_fre != 0;
4032 struct tree_opt_pass pass_fre =
4034 "fre", /* name */
4035 gate_fre, /* gate */
4036 execute_fre, /* execute */
4037 NULL, /* sub */
4038 NULL, /* next */
4039 0, /* static_pass_number */
4040 TV_TREE_FRE, /* tv_id */
4041 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
4042 0, /* properties_provided */
4043 0, /* properties_destroyed */
4044 0, /* todo_flags_start */
4045 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
4046 0 /* letter */