* trans-intrinsic.c (gfc_conv_intrinsic_sign): New branchless
[official-gcc.git] / gcc / tree-ssa-pre.c
blob72af16d2caa8573063152e6bd137e890ac031d37
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "fibheap.h"
37 #include "hashtab.h"
38 #include "tree-iterator.h"
39 #include "real.h"
40 #include "alloc-pool.h"
41 #include "tree-pass.h"
42 #include "flags.h"
43 #include "bitmap.h"
44 #include "langhooks.h"
45 #include "cfgloop.h"
47 /* TODO:
49 1. Avail sets can be shared by making an avail_find_leader that
50 walks up the dominator tree and looks in those avail sets.
51 This might affect code optimality, it's unclear right now.
52 2. Strength reduction can be performed by anticipating expressions
53 we can repair later on.
54 3. We can do back-substitution or smarter value numbering to catch
55 commutative expressions split up over multiple statements.
56 4. ANTIC_SAFE_LOADS could be a lot smarter than it is now.
57 Right now, it is simply calculating loads that occur before
58 any store in a block, instead of loads that occur before
59 stores that affect them. This is relatively more expensive, and
60 it's not clear how much more it will buy us.
63 /* For ease of terminology, "expression node" in the below refers to
64 every expression node but GIMPLE_MODIFY_STMT, because GIMPLE_MODIFY_STMT's
65 represent the actual statement containing the expressions we care about,
66 and we cache the value number by putting it in the expression. */
68 /* Basic algorithm
70 First we walk the statements to generate the AVAIL sets, the
71 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
72 generation of values/expressions by a given block. We use them
73 when computing the ANTIC sets. The AVAIL sets consist of
74 SSA_NAME's that represent values, so we know what values are
75 available in what blocks. AVAIL is a forward dataflow problem. In
76 SSA, values are never killed, so we don't need a kill set, or a
77 fixpoint iteration, in order to calculate the AVAIL sets. In
78 traditional parlance, AVAIL sets tell us the downsafety of the
79 expressions/values.
81 Next, we generate the ANTIC sets. These sets represent the
82 anticipatable expressions. ANTIC is a backwards dataflow
83 problem. An expression is anticipatable in a given block if it could
84 be generated in that block. This means that if we had to perform
85 an insertion in that block, of the value of that expression, we
86 could. Calculating the ANTIC sets requires phi translation of
87 expressions, because the flow goes backwards through phis. We must
88 iterate to a fixpoint of the ANTIC sets, because we have a kill
89 set. Even in SSA form, values are not live over the entire
90 function, only from their definition point onwards. So we have to
91 remove values from the ANTIC set once we go past the definition
92 point of the leaders that make them up.
93 compute_antic/compute_antic_aux performs this computation.
95 Third, we perform insertions to make partially redundant
96 expressions fully redundant.
98 An expression is partially redundant (excluding partial
99 anticipation) if:
101 1. It is AVAIL in some, but not all, of the predecessors of a
102 given block.
103 2. It is ANTIC in all the predecessors.
105 In order to make it fully redundant, we insert the expression into
106 the predecessors where it is not available, but is ANTIC.
108 For the partial anticipation case, we only perform insertion if it
109 is partially anticipated in some block, and fully available in all
110 of the predecessors.
112 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
113 performs these steps.
115 Fourth, we eliminate fully redundant expressions.
116 This is a simple statement walk that replaces redundant
117 calculations with the now available values. */
119 /* Representations of value numbers:
121 Value numbers are represented using the "value handle" approach.
122 This means that each SSA_NAME (and for other reasons to be
123 disclosed in a moment, expression nodes) has a value handle that
124 can be retrieved through get_value_handle. This value handle *is*
125 the value number of the SSA_NAME. You can pointer compare the
126 value handles for equivalence purposes.
128 For debugging reasons, the value handle is internally more than
129 just a number, it is a VALUE_HANDLE named "VH.x", where x is a
130 unique number for each value number in use. This allows
131 expressions with SSA_NAMES replaced by value handles to still be
132 pretty printed in a sane way. They simply print as "VH.3 *
133 VH.5", etc.
135 Expression nodes have value handles associated with them as a
136 cache. Otherwise, we'd have to look them up again in the hash
137 table This makes significant difference (factor of two or more) on
138 some test cases. They can be thrown away after the pass is
139 finished. */
141 /* Representation of expressions on value numbers:
143 In some portions of this code, you will notice we allocate "fake"
144 analogues to the expression we are value numbering, and replace the
145 operands with the values of the expression. Since we work on
146 values, and not just names, we canonicalize expressions to value
147 expressions for use in the ANTIC sets, the EXP_GEN set, etc.
149 This is theoretically unnecessary, it just saves a bunch of
150 repeated get_value_handle and find_leader calls in the remainder of
151 the code, trading off temporary memory usage for speed. The tree
152 nodes aren't actually creating more garbage, since they are
153 allocated in a special pools which are thrown away at the end of
154 this pass.
156 All of this also means that if you print the EXP_GEN or ANTIC sets,
157 you will see "VH.5 + VH.7" in the set, instead of "a_55 +
158 b_66" or something. The only thing that actually cares about
159 seeing the value leaders is phi translation, and it needs to be
160 able to find the leader for a value in an arbitrary block, so this
161 "value expression" form is perfect for it (otherwise you'd do
162 get_value_handle->find_leader->translate->get_value_handle->find_leader).*/
165 /* Representation of sets:
167 There are currently two types of sets used, hopefully to be unified soon.
168 The AVAIL sets do not need to be sorted in any particular order,
169 and thus, are simply represented as two bitmaps, one that keeps
170 track of values present in the set, and one that keeps track of
171 expressions present in the set.
173 The other sets are represented as doubly linked lists kept in topological
174 order, with an optional supporting bitmap of values present in the
175 set. The sets represent values, and the elements can be values or
176 expressions. The elements can appear in different sets, but each
177 element can only appear once in each set.
179 Since each node in the set represents a value, we also want to be
180 able to map expression, set pairs to something that tells us
181 whether the value is present is a set. We use a per-set bitmap for
182 that. The value handles also point to a linked list of the
183 expressions they represent via a tree annotation. This is mainly
184 useful only for debugging, since we don't do identity lookups. */
187 /* Next global expression id number. */
188 static unsigned int next_expression_id;
190 /* Mapping from expression to id number we can use in bitmap sets. */
191 static VEC(tree, heap) *expressions;
193 /* Allocate an expression id for EXPR. */
195 static inline unsigned int
196 alloc_expression_id (tree expr)
198 tree_ann_common_t ann;
200 ann = get_tree_common_ann (expr);
202 /* Make sure we won't overflow. */
203 gcc_assert (next_expression_id + 1 > next_expression_id);
205 ann->aux = XNEW (unsigned int);
206 * ((unsigned int *)ann->aux) = next_expression_id++;
207 VEC_safe_push (tree, heap, expressions, expr);
208 return next_expression_id - 1;
211 /* Return the expression id for tree EXPR. */
213 static inline unsigned int
214 get_expression_id (tree expr)
216 tree_ann_common_t ann = tree_common_ann (expr);
217 gcc_assert (ann);
218 gcc_assert (ann->aux);
220 return *((unsigned int *)ann->aux);
223 /* Return the existing expression id for EXPR, or create one if one
224 does not exist yet. */
226 static inline unsigned int
227 get_or_alloc_expression_id (tree expr)
229 tree_ann_common_t ann = tree_common_ann (expr);
231 if (ann == NULL || !ann->aux)
232 return alloc_expression_id (expr);
234 return get_expression_id (expr);
237 /* Return the expression that has expression id ID */
239 static inline tree
240 expression_for_id (unsigned int id)
242 return VEC_index (tree, expressions, id);
245 /* Free the expression id field in all of our expressions,
246 and then destroy the expressions array. */
248 static void
249 clear_expression_ids (void)
251 int i;
252 tree expr;
254 for (i = 0; VEC_iterate (tree, expressions, i, expr); i++)
256 free (tree_common_ann (expr)->aux);
257 tree_common_ann (expr)->aux = NULL;
259 VEC_free (tree, heap, expressions);
262 static bool in_fre = false;
264 /* An unordered bitmap set. One bitmap tracks values, the other,
265 expressions. */
266 typedef struct bitmap_set
268 bitmap expressions;
269 bitmap values;
270 } *bitmap_set_t;
272 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
273 EXECUTE_IF_SET_IN_BITMAP(set->expressions, 0, id, bi)
275 /* Sets that we need to keep track of. */
276 typedef struct bb_bitmap_sets
278 /* The EXP_GEN set, which represents expressions/values generated in
279 a basic block. */
280 bitmap_set_t exp_gen;
282 /* The PHI_GEN set, which represents PHI results generated in a
283 basic block. */
284 bitmap_set_t phi_gen;
286 /* The TMP_GEN set, which represents results/temporaries generated
287 in a basic block. IE the LHS of an expression. */
288 bitmap_set_t tmp_gen;
290 /* The AVAIL_OUT set, which represents which values are available in
291 a given basic block. */
292 bitmap_set_t avail_out;
294 /* The ANTIC_IN set, which represents which values are anticipatable
295 in a given basic block. */
296 bitmap_set_t antic_in;
298 /* The PA_IN set, which represents which values are
299 partially anticipatable in a given basic block. */
300 bitmap_set_t pa_in;
302 /* The NEW_SETS set, which is used during insertion to augment the
303 AVAIL_OUT set of blocks with the new insertions performed during
304 the current iteration. */
305 bitmap_set_t new_sets;
307 /* The RVUSE sets, which are used during ANTIC computation to ensure
308 that we don't mark loads ANTIC once they have died. */
309 bitmap rvuse_in;
310 bitmap rvuse_out;
311 bitmap rvuse_gen;
312 bitmap rvuse_kill;
314 /* For actually occurring loads, as long as they occur before all the
315 other stores in the block, we know they are antic at the top of
316 the block, regardless of RVUSE_KILL. */
317 bitmap_set_t antic_safe_loads;
319 /* True if we have visited this block during ANTIC calculation. */
320 unsigned int visited:1;
322 /* True we have deferred processing this block during ANTIC
323 calculation until its successor is processed. */
324 unsigned int deferred : 1;
325 } *bb_value_sets_t;
327 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
328 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
329 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
330 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
331 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
332 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
333 #define RVUSE_IN(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_in
334 #define RVUSE_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_gen
335 #define RVUSE_KILL(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_kill
336 #define RVUSE_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->rvuse_out
337 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
338 #define ANTIC_SAFE_LOADS(BB) ((bb_value_sets_t) ((BB)->aux))->antic_safe_loads
339 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
340 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
342 /* Maximal set of values, used to initialize the ANTIC problem, which
343 is an intersection problem. */
344 static bitmap_set_t maximal_set;
346 /* Basic block list in postorder. */
347 static int *postorder;
349 /* This structure is used to keep track of statistics on what
350 optimization PRE was able to perform. */
351 static struct
353 /* The number of RHS computations eliminated by PRE. */
354 int eliminations;
356 /* The number of new expressions/temporaries generated by PRE. */
357 int insertions;
359 /* The number of inserts found due to partial anticipation */
360 int pa_insert;
362 /* The number of new PHI nodes added by PRE. */
363 int phis;
365 /* The number of values found constant. */
366 int constified;
368 } pre_stats;
370 static bool do_partial_partial;
371 static tree bitmap_find_leader (bitmap_set_t, tree);
372 static void bitmap_value_insert_into_set (bitmap_set_t, tree);
373 static void bitmap_value_replace_in_set (bitmap_set_t, tree);
374 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
375 static bool bitmap_set_contains_value (bitmap_set_t, tree);
376 static void bitmap_insert_into_set (bitmap_set_t, tree);
377 static bitmap_set_t bitmap_set_new (void);
378 static bool is_undefined_value (tree);
379 static tree create_expression_by_pieces (basic_block, tree, tree);
380 static tree find_or_generate_expression (basic_block, tree, tree);
382 /* We can add and remove elements and entries to and from sets
383 and hash tables, so we use alloc pools for them. */
385 static alloc_pool bitmap_set_pool;
386 static alloc_pool binary_node_pool;
387 static alloc_pool unary_node_pool;
388 static alloc_pool reference_node_pool;
389 static alloc_pool comparison_node_pool;
390 static alloc_pool expression_node_pool;
391 static alloc_pool list_node_pool;
392 static alloc_pool modify_expr_node_pool;
393 static bitmap_obstack grand_bitmap_obstack;
395 /* To avoid adding 300 temporary variables when we only need one, we
396 only create one temporary variable, on demand, and build ssa names
397 off that. We do have to change the variable if the types don't
398 match the current variable's type. */
399 static tree pretemp;
400 static tree storetemp;
401 static tree mergephitemp;
402 static tree prephitemp;
404 /* Set of blocks with statements that have had its EH information
405 cleaned up. */
406 static bitmap need_eh_cleanup;
408 /* The phi_translate_table caches phi translations for a given
409 expression and predecessor. */
411 static htab_t phi_translate_table;
413 /* A three tuple {e, pred, v} used to cache phi translations in the
414 phi_translate_table. */
416 typedef struct expr_pred_trans_d
418 /* The expression. */
419 tree e;
421 /* The predecessor block along which we translated the expression. */
422 basic_block pred;
424 /* vuses associated with the expression. */
425 VEC (tree, gc) *vuses;
427 /* The value that resulted from the translation. */
428 tree v;
430 /* The hashcode for the expression, pred pair. This is cached for
431 speed reasons. */
432 hashval_t hashcode;
433 } *expr_pred_trans_t;
435 /* Return the hash value for a phi translation table entry. */
437 static hashval_t
438 expr_pred_trans_hash (const void *p)
440 const expr_pred_trans_t ve = (expr_pred_trans_t) p;
441 return ve->hashcode;
444 /* Return true if two phi translation table entries are the same.
445 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
447 static int
448 expr_pred_trans_eq (const void *p1, const void *p2)
450 const expr_pred_trans_t ve1 = (expr_pred_trans_t) p1;
451 const expr_pred_trans_t ve2 = (expr_pred_trans_t) p2;
452 basic_block b1 = ve1->pred;
453 basic_block b2 = ve2->pred;
454 int i;
455 tree vuse1;
457 /* If they are not translations for the same basic block, they can't
458 be equal. */
459 if (b1 != b2)
460 return false;
463 /* If they are for the same basic block, determine if the
464 expressions are equal. */
465 if (!expressions_equal_p (ve1->e, ve2->e))
466 return false;
468 /* Make sure the vuses are equivalent. */
469 if (ve1->vuses == ve2->vuses)
470 return true;
472 if (VEC_length (tree, ve1->vuses) != VEC_length (tree, ve2->vuses))
473 return false;
475 for (i = 0; VEC_iterate (tree, ve1->vuses, i, vuse1); i++)
477 if (VEC_index (tree, ve2->vuses, i) != vuse1)
478 return false;
481 return true;
484 /* Search in the phi translation table for the translation of
485 expression E in basic block PRED with vuses VUSES.
486 Return the translated value, if found, NULL otherwise. */
488 static inline tree
489 phi_trans_lookup (tree e, basic_block pred, VEC (tree, gc) *vuses)
491 void **slot;
492 struct expr_pred_trans_d ept;
494 ept.e = e;
495 ept.pred = pred;
496 ept.vuses = vuses;
497 ept.hashcode = vn_compute (e, (unsigned long) pred);
498 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
499 NO_INSERT);
500 if (!slot)
501 return NULL;
502 else
503 return ((expr_pred_trans_t) *slot)->v;
507 /* Add the tuple mapping from {expression E, basic block PRED, vuses VUSES} to
508 value V, to the phi translation table. */
510 static inline void
511 phi_trans_add (tree e, tree v, basic_block pred, VEC (tree, gc) *vuses)
513 void **slot;
514 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
515 new_pair->e = e;
516 new_pair->pred = pred;
517 new_pair->vuses = vuses;
518 new_pair->v = v;
519 new_pair->hashcode = vn_compute (e, (unsigned long) pred);
520 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
521 new_pair->hashcode, INSERT);
522 if (*slot)
523 free (*slot);
524 *slot = (void *) new_pair;
528 /* Return true if V is a value expression that represents itself.
529 In our world, this is *only* non-value handles. */
531 static inline bool
532 constant_expr_p (tree v)
534 return TREE_CODE (v) != VALUE_HANDLE && is_gimple_min_invariant (v);
535 /* return TREE_CODE (v) != VALUE_HANDLE; */
538 /* Add expression E to the expression set of value V. */
540 void
541 add_to_value (tree v, tree e)
543 /* Constants have no expression sets. */
544 if (constant_expr_p (v))
545 return;
547 if (VALUE_HANDLE_EXPR_SET (v) == NULL)
548 VALUE_HANDLE_EXPR_SET (v) = bitmap_set_new ();
550 bitmap_insert_into_set (VALUE_HANDLE_EXPR_SET (v), e);
553 /* Create a new bitmap set and return it. */
555 static bitmap_set_t
556 bitmap_set_new (void)
558 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
559 ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
560 ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
561 return ret;
564 /* Remove an expression EXPR from a bitmapped set. */
566 static void
567 bitmap_remove_from_set (bitmap_set_t set, tree expr)
569 tree val = get_value_handle (expr);
571 gcc_assert (val);
572 if (!constant_expr_p (val))
574 bitmap_clear_bit (set->values, VALUE_HANDLE_ID (val));
575 bitmap_clear_bit (set->expressions, get_expression_id (expr));
579 /* Insert an expression EXPR into a bitmapped set. */
581 static void
582 bitmap_insert_into_set (bitmap_set_t set, tree expr)
584 tree val = get_value_handle (expr);
586 gcc_assert (val);
587 if (!constant_expr_p (val))
589 bitmap_set_bit (set->values, VALUE_HANDLE_ID (val));
590 bitmap_set_bit (set->expressions, get_or_alloc_expression_id (expr));
594 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
596 static void
597 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
599 bitmap_copy (dest->expressions, orig->expressions);
600 bitmap_copy (dest->values, orig->values);
604 /* Free memory used up by SET. */
605 static void
606 bitmap_set_free (bitmap_set_t set)
608 BITMAP_FREE (set->expressions);
609 BITMAP_FREE (set->values);
613 /* A comparison function for use in qsort to top sort a bitmap set. Simply
614 subtracts value handle ids, since they are created in topo-order. */
616 static int
617 vh_compare (const void *pa, const void *pb)
619 const tree vha = get_value_handle (*((const tree *)pa));
620 const tree vhb = get_value_handle (*((const tree *)pb));
622 /* This can happen when we constify things. */
623 if (constant_expr_p (vha))
625 if (constant_expr_p (vhb))
626 return -1;
627 return -1;
629 else if (constant_expr_p (vhb))
630 return 1;
631 return VALUE_HANDLE_ID (vha) - VALUE_HANDLE_ID (vhb);
634 /* Generate an topological-ordered array of bitmap set SET. */
636 static VEC(tree, heap) *
637 sorted_array_from_bitmap_set (bitmap_set_t set)
639 unsigned int i;
640 bitmap_iterator bi;
641 VEC(tree, heap) *result = NULL;
643 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
644 VEC_safe_push (tree, heap, result, expression_for_id (i));
646 qsort (VEC_address (tree, result), VEC_length (tree, result),
647 sizeof (tree), vh_compare);
649 return result;
652 /* Perform bitmapped set operation DEST &= ORIG. */
654 static void
655 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
657 bitmap_iterator bi;
658 unsigned int i;
660 if (dest != orig)
662 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
664 bitmap_and_into (dest->values, orig->values);
666 bitmap_copy (temp, dest->expressions);
667 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
669 tree expr = expression_for_id (i);
670 tree val = get_value_handle (expr);
671 if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
672 bitmap_clear_bit (dest->expressions, i);
674 BITMAP_FREE (temp);
678 /* Subtract all values and expressions contained in ORIG from DEST. */
680 static bitmap_set_t
681 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
683 bitmap_set_t result = bitmap_set_new ();
684 bitmap_iterator bi;
685 unsigned int i;
687 bitmap_and_compl (result->expressions, dest->expressions,
688 orig->expressions);
690 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
692 tree expr = expression_for_id (i);
693 tree val = get_value_handle (expr);
694 bitmap_set_bit (result->values, VALUE_HANDLE_ID (val));
697 return result;
700 /* Subtract all the values in bitmap set B from bitmap set A. */
702 static void
703 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
705 unsigned int i;
706 bitmap_iterator bi;
707 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
709 bitmap_copy (temp, a->expressions);
710 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
712 tree expr = expression_for_id (i);
713 if (bitmap_set_contains_value (b, get_value_handle (expr)))
714 bitmap_remove_from_set (a, expr);
716 BITMAP_FREE (temp);
720 /* Return true if bitmapped set SET contains the value VAL. */
722 static bool
723 bitmap_set_contains_value (bitmap_set_t set, tree val)
725 if (constant_expr_p (val))
726 return true;
728 if (!set || bitmap_empty_p (set->expressions))
729 return false;
731 return bitmap_bit_p (set->values, VALUE_HANDLE_ID (val));
734 static inline bool
735 bitmap_set_contains_expr (bitmap_set_t set, tree expr)
737 return bitmap_bit_p (set->expressions, get_expression_id (expr));
740 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
742 static void
743 bitmap_set_replace_value (bitmap_set_t set, tree lookfor, tree expr)
745 bitmap_set_t exprset;
746 unsigned int i;
747 bitmap_iterator bi;
749 if (constant_expr_p (lookfor))
750 return;
752 if (!bitmap_set_contains_value (set, lookfor))
753 return;
755 /* The number of expressions having a given value is usually
756 significantly less than the total number of expressions in SET.
757 Thus, rather than check, for each expression in SET, whether it
758 has the value LOOKFOR, we walk the reverse mapping that tells us
759 what expressions have a given value, and see if any of those
760 expressions are in our set. For large testcases, this is about
761 5-10x faster than walking the bitmap. If this is somehow a
762 significant lose for some cases, we can choose which set to walk
763 based on the set size. */
764 exprset = VALUE_HANDLE_EXPR_SET (lookfor);
765 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
767 if (bitmap_bit_p (set->expressions, i))
769 bitmap_clear_bit (set->expressions, i);
770 bitmap_set_bit (set->expressions, get_expression_id (expr));
771 return;
776 /* Return true if two bitmap sets are equal. */
778 static bool
779 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
781 return bitmap_equal_p (a->values, b->values);
784 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
785 and add it otherwise. */
787 static void
788 bitmap_value_replace_in_set (bitmap_set_t set, tree expr)
790 tree val = get_value_handle (expr);
792 if (bitmap_set_contains_value (set, val))
793 bitmap_set_replace_value (set, val, expr);
794 else
795 bitmap_insert_into_set (set, expr);
798 /* Insert EXPR into SET if EXPR's value is not already present in
799 SET. */
801 static void
802 bitmap_value_insert_into_set (bitmap_set_t set, tree expr)
804 tree val = get_value_handle (expr);
806 if (constant_expr_p (val))
807 return;
809 if (!bitmap_set_contains_value (set, val))
810 bitmap_insert_into_set (set, expr);
813 /* Print out SET to OUTFILE. */
815 static void
816 print_bitmap_set (FILE *outfile, bitmap_set_t set,
817 const char *setname, int blockindex)
819 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
820 if (set)
822 bool first = true;
823 unsigned i;
824 bitmap_iterator bi;
826 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
828 tree expr = expression_for_id (i);
830 if (!first)
831 fprintf (outfile, ", ");
832 first = false;
833 print_generic_expr (outfile, expr, 0);
835 fprintf (outfile, " (");
836 print_generic_expr (outfile, get_value_handle (expr), 0);
837 fprintf (outfile, ") ");
840 fprintf (outfile, " }\n");
843 void debug_bitmap_set (bitmap_set_t);
845 void
846 debug_bitmap_set (bitmap_set_t set)
848 print_bitmap_set (stderr, set, "debug", 0);
851 /* Print out the expressions that have VAL to OUTFILE. */
853 void
854 print_value_expressions (FILE *outfile, tree val)
856 if (VALUE_HANDLE_EXPR_SET (val))
858 char s[10];
859 sprintf (s, "VH.%04d", VALUE_HANDLE_ID (val));
860 print_bitmap_set (outfile, VALUE_HANDLE_EXPR_SET (val), s, 0);
865 void
866 debug_value_expressions (tree val)
868 print_value_expressions (stderr, val);
871 /* Return the folded version of T if T, when folded, is a gimple
872 min_invariant. Otherwise, return T. */
874 static tree
875 fully_constant_expression (tree t)
877 tree folded;
878 folded = fold (t);
879 if (folded && is_gimple_min_invariant (folded))
880 return folded;
881 return t;
884 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
885 For example, this can copy a list made of TREE_LIST nodes.
886 Allocates the nodes in list_node_pool*/
888 static tree
889 pool_copy_list (tree list)
891 tree head;
892 tree prev, next;
894 if (list == 0)
895 return 0;
896 head = (tree) pool_alloc (list_node_pool);
898 memcpy (head, list, tree_size (list));
899 prev = head;
901 next = TREE_CHAIN (list);
902 while (next)
904 TREE_CHAIN (prev) = (tree) pool_alloc (list_node_pool);
905 memcpy (TREE_CHAIN (prev), next, tree_size (next));
906 prev = TREE_CHAIN (prev);
907 next = TREE_CHAIN (next);
909 return head;
912 /* Translate the vuses in the VUSES vector backwards through phi nodes
913 in PHIBLOCK, so that they have the value they would have in
914 BLOCK. */
916 static VEC(tree, gc) *
917 translate_vuses_through_block (VEC (tree, gc) *vuses,
918 basic_block phiblock,
919 basic_block block)
921 tree oldvuse;
922 VEC(tree, gc) *result = NULL;
923 int i;
925 for (i = 0; VEC_iterate (tree, vuses, i, oldvuse); i++)
927 tree phi = SSA_NAME_DEF_STMT (oldvuse);
928 if (TREE_CODE (phi) == PHI_NODE
929 && bb_for_stmt (phi) == phiblock)
931 edge e = find_edge (block, bb_for_stmt (phi));
932 if (e)
934 tree def = PHI_ARG_DEF (phi, e->dest_idx);
935 if (def != oldvuse)
937 if (!result)
938 result = VEC_copy (tree, gc, vuses);
939 VEC_replace (tree, result, i, def);
945 /* We avoid creating a new copy of the vuses unless something
946 actually changed, so result can be NULL. */
947 if (result)
949 sort_vuses (result);
950 return result;
952 return vuses;
956 /* Like find_leader, but checks for the value existing in SET1 *or*
957 SET2. This is used to avoid making a set consisting of the union
958 of PA_IN and ANTIC_IN during insert. */
960 static inline tree
961 find_leader_in_sets (tree expr, bitmap_set_t set1, bitmap_set_t set2)
963 tree result;
965 result = bitmap_find_leader (set1, expr);
966 if (!result && set2)
967 result = bitmap_find_leader (set2, expr);
968 return result;
971 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
972 the phis in PRED. Return NULL if we can't find a leader for each
973 part of the translated expression. */
975 static tree
976 phi_translate (tree expr, bitmap_set_t set1, bitmap_set_t set2,
977 basic_block pred, basic_block phiblock)
979 tree phitrans = NULL;
980 tree oldexpr = expr;
982 if (expr == NULL)
983 return NULL;
985 if (is_gimple_min_invariant (expr))
986 return expr;
988 /* Phi translations of a given expression don't change. */
989 if (EXPR_P (expr) || GIMPLE_STMT_P (expr))
991 tree vh;
993 vh = get_value_handle (expr);
994 if (vh && TREE_CODE (vh) == VALUE_HANDLE)
995 phitrans = phi_trans_lookup (expr, pred, VALUE_HANDLE_VUSES (vh));
996 else
997 phitrans = phi_trans_lookup (expr, pred, NULL);
999 else
1000 phitrans = phi_trans_lookup (expr, pred, NULL);
1002 if (phitrans)
1003 return phitrans;
1005 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1007 case tcc_expression:
1009 if (TREE_CODE (expr) != CALL_EXPR)
1010 return NULL;
1011 else
1013 tree oldop0 = TREE_OPERAND (expr, 0);
1014 tree oldval0 = oldop0;
1015 tree oldarglist = TREE_OPERAND (expr, 1);
1016 tree oldop2 = TREE_OPERAND (expr, 2);
1017 tree oldval2 = oldop2;
1018 tree newop0;
1019 tree newarglist;
1020 tree newop2 = NULL;
1021 tree oldwalker;
1022 tree newwalker;
1023 tree newexpr;
1024 tree vh = get_value_handle (expr);
1025 bool listchanged = false;
1026 bool invariantarg = false;
1027 VEC (tree, gc) *vuses = VALUE_HANDLE_VUSES (vh);
1028 VEC (tree, gc) *tvuses;
1030 /* Call expressions are kind of weird because they have an
1031 argument list. We don't want to value number the list
1032 as one value number, because that doesn't make much
1033 sense, and just breaks the support functions we call,
1034 which expect TREE_OPERAND (call_expr, 2) to be a
1035 TREE_LIST. */
1036 oldval0 = find_leader_in_sets (oldop0, set1, set2);
1037 newop0 = phi_translate (oldval0, set1, set2, pred, phiblock);
1038 if (newop0 == NULL)
1039 return NULL;
1040 if (oldop2)
1042 oldop2 = find_leader_in_sets (oldop2, set1, set2);
1043 newop2 = phi_translate (oldop2, set1, set2, pred, phiblock);
1044 if (newop2 == NULL)
1045 return NULL;
1048 /* phi translate the argument list piece by piece.
1050 We could actually build the list piece by piece here,
1051 but it's likely to not be worth the memory we will save,
1052 unless you have millions of call arguments. */
1054 newarglist = pool_copy_list (oldarglist);
1055 for (oldwalker = oldarglist, newwalker = newarglist;
1056 oldwalker && newwalker;
1057 oldwalker = TREE_CHAIN (oldwalker),
1058 newwalker = TREE_CHAIN (newwalker))
1061 tree oldval = TREE_VALUE (oldwalker);
1062 tree newval;
1063 if (oldval)
1065 /* This may seem like a weird place for this
1066 check, but it's actually the easiest place to
1067 do it. We can't do it lower on in the
1068 recursion because it's valid for pieces of a
1069 component ref to be of AGGREGATE_TYPE, as long
1070 as the outermost one is not.
1071 To avoid *that* case, we have a check for
1072 AGGREGATE_TYPE_P in insert_aux. However, that
1073 check will *not* catch this case because here
1074 it occurs in the argument list. */
1075 if (AGGREGATE_TYPE_P (TREE_TYPE (oldval)))
1076 return NULL;
1077 oldval = find_leader_in_sets (oldval, set1, set2);
1078 newval = phi_translate (oldval, set1, set2, pred,
1079 phiblock);
1080 if (newval == NULL)
1081 return NULL;
1082 if (newval != oldval)
1084 listchanged = true;
1085 invariantarg |= is_gimple_min_invariant (newval);
1086 TREE_VALUE (newwalker) = get_value_handle (newval);
1091 /* In case of new invariant args we might try to fold the call
1092 again. */
1093 if (invariantarg)
1095 tree tmp = fold_ternary (CALL_EXPR, TREE_TYPE (expr),
1096 newop0, newarglist, newop2);
1097 if (tmp)
1099 STRIP_TYPE_NOPS (tmp);
1100 if (is_gimple_min_invariant (tmp))
1101 return tmp;
1105 if (listchanged)
1106 vn_lookup_or_add (newarglist, NULL);
1108 tvuses = translate_vuses_through_block (vuses, phiblock, pred);
1110 if (listchanged || (newop0 != oldop0) || (oldop2 != newop2)
1111 || vuses != tvuses)
1113 newexpr = (tree) pool_alloc (expression_node_pool);
1114 memcpy (newexpr, expr, tree_size (expr));
1115 TREE_OPERAND (newexpr, 0) = newop0 == oldop0 ? oldval0 : get_value_handle (newop0);
1116 TREE_OPERAND (newexpr, 1) = listchanged ? newarglist : oldarglist;
1117 TREE_OPERAND (newexpr, 2) = newop2 == oldop2 ? oldval2 : get_value_handle (newop2);
1118 newexpr->base.ann = NULL;
1119 vn_lookup_or_add_with_vuses (newexpr, tvuses);
1120 expr = newexpr;
1121 phi_trans_add (oldexpr, newexpr, pred, tvuses);
1125 return expr;
1127 case tcc_declaration:
1129 VEC (tree, gc) * oldvuses = NULL;
1130 VEC (tree, gc) * newvuses = NULL;
1132 oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1133 if (oldvuses)
1134 newvuses = translate_vuses_through_block (oldvuses, phiblock,
1135 pred);
1137 if (oldvuses != newvuses)
1138 vn_lookup_or_add_with_vuses (expr, newvuses);
1140 phi_trans_add (oldexpr, expr, pred, newvuses);
1142 return expr;
1144 case tcc_reference:
1146 tree oldop0 = TREE_OPERAND (expr, 0);
1147 tree oldop1 = NULL;
1148 tree newop0;
1149 tree newop1 = NULL;
1150 tree oldop2 = NULL;
1151 tree newop2 = NULL;
1152 tree oldop3 = NULL;
1153 tree newop3 = NULL;
1154 tree newexpr;
1155 VEC (tree, gc) * oldvuses = NULL;
1156 VEC (tree, gc) * newvuses = NULL;
1158 if (TREE_CODE (expr) != INDIRECT_REF
1159 && TREE_CODE (expr) != COMPONENT_REF
1160 && TREE_CODE (expr) != ARRAY_REF)
1161 return NULL;
1163 oldop0 = find_leader_in_sets (oldop0, set1, set2);
1164 newop0 = phi_translate (oldop0, set1, set2, pred, phiblock);
1165 if (newop0 == NULL)
1166 return NULL;
1168 if (TREE_CODE (expr) == ARRAY_REF)
1170 oldop1 = TREE_OPERAND (expr, 1);
1171 oldop1 = find_leader_in_sets (oldop1, set1, set2);
1172 newop1 = phi_translate (oldop1, set1, set2, pred, phiblock);
1174 if (newop1 == NULL)
1175 return NULL;
1177 oldop2 = TREE_OPERAND (expr, 2);
1178 if (oldop2)
1180 oldop2 = find_leader_in_sets (oldop2, set1, set2);
1181 newop2 = phi_translate (oldop2, set1, set2, pred, phiblock);
1183 if (newop2 == NULL)
1184 return NULL;
1186 oldop3 = TREE_OPERAND (expr, 3);
1187 if (oldop3)
1189 oldop3 = find_leader_in_sets (oldop3, set1, set2);
1190 newop3 = phi_translate (oldop3, set1, set2, pred, phiblock);
1192 if (newop3 == NULL)
1193 return NULL;
1197 oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1198 if (oldvuses)
1199 newvuses = translate_vuses_through_block (oldvuses, phiblock,
1200 pred);
1202 if (newop0 != oldop0 || newvuses != oldvuses
1203 || newop1 != oldop1
1204 || newop2 != oldop2
1205 || newop3 != oldop3)
1207 tree t;
1209 newexpr = (tree) pool_alloc (reference_node_pool);
1210 memcpy (newexpr, expr, tree_size (expr));
1211 TREE_OPERAND (newexpr, 0) = get_value_handle (newop0);
1212 if (TREE_CODE (expr) == ARRAY_REF)
1214 TREE_OPERAND (newexpr, 1) = get_value_handle (newop1);
1215 if (newop2)
1216 TREE_OPERAND (newexpr, 2) = get_value_handle (newop2);
1217 if (newop3)
1218 TREE_OPERAND (newexpr, 3) = get_value_handle (newop3);
1221 t = fully_constant_expression (newexpr);
1223 if (t != newexpr)
1225 pool_free (reference_node_pool, newexpr);
1226 newexpr = t;
1228 else
1230 newexpr->base.ann = NULL;
1231 vn_lookup_or_add_with_vuses (newexpr, newvuses);
1233 expr = newexpr;
1234 phi_trans_add (oldexpr, newexpr, pred, newvuses);
1237 return expr;
1238 break;
1240 case tcc_binary:
1241 case tcc_comparison:
1243 tree oldop1 = TREE_OPERAND (expr, 0);
1244 tree oldval1 = oldop1;
1245 tree oldop2 = TREE_OPERAND (expr, 1);
1246 tree oldval2 = oldop2;
1247 tree newop1;
1248 tree newop2;
1249 tree newexpr;
1251 oldop1 = find_leader_in_sets (oldop1, set1, set2);
1252 newop1 = phi_translate (oldop1, set1, set2, pred, phiblock);
1253 if (newop1 == NULL)
1254 return NULL;
1256 oldop2 = find_leader_in_sets (oldop2, set1, set2);
1257 newop2 = phi_translate (oldop2, set1, set2, pred, phiblock);
1258 if (newop2 == NULL)
1259 return NULL;
1260 if (newop1 != oldop1 || newop2 != oldop2)
1262 tree t;
1263 newexpr = (tree) pool_alloc (binary_node_pool);
1264 memcpy (newexpr, expr, tree_size (expr));
1265 TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldval1 : get_value_handle (newop1);
1266 TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldval2 : get_value_handle (newop2);
1267 t = fully_constant_expression (newexpr);
1268 if (t != newexpr)
1270 pool_free (binary_node_pool, newexpr);
1271 newexpr = t;
1273 else
1275 newexpr->base.ann = NULL;
1276 vn_lookup_or_add (newexpr, NULL);
1278 expr = newexpr;
1279 phi_trans_add (oldexpr, newexpr, pred, NULL);
1282 return expr;
1284 case tcc_unary:
1286 tree oldop1 = TREE_OPERAND (expr, 0);
1287 tree newop1;
1288 tree newexpr;
1290 oldop1 = find_leader_in_sets (oldop1, set1, set2);
1291 newop1 = phi_translate (oldop1, set1, set2, pred, phiblock);
1292 if (newop1 == NULL)
1293 return NULL;
1294 if (newop1 != oldop1)
1296 tree t;
1297 newexpr = (tree) pool_alloc (unary_node_pool);
1298 memcpy (newexpr, expr, tree_size (expr));
1299 TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
1300 t = fully_constant_expression (newexpr);
1301 if (t != newexpr)
1303 pool_free (unary_node_pool, newexpr);
1304 newexpr = t;
1306 else
1308 newexpr->base.ann = NULL;
1309 vn_lookup_or_add (newexpr, NULL);
1311 expr = newexpr;
1312 phi_trans_add (oldexpr, newexpr, pred, NULL);
1315 return expr;
1317 case tcc_exceptional:
1319 tree phi = NULL;
1320 edge e;
1321 tree def_stmt;
1322 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1324 def_stmt = SSA_NAME_DEF_STMT (expr);
1325 if (TREE_CODE (def_stmt) == PHI_NODE
1326 && bb_for_stmt (def_stmt) == phiblock)
1327 phi = def_stmt;
1328 else
1329 return expr;
1331 e = find_edge (pred, bb_for_stmt (phi));
1332 if (e)
1334 if (is_undefined_value (PHI_ARG_DEF (phi, e->dest_idx)))
1335 return NULL;
1336 vn_lookup_or_add (PHI_ARG_DEF (phi, e->dest_idx), NULL);
1337 return PHI_ARG_DEF (phi, e->dest_idx);
1340 return expr;
1342 default:
1343 gcc_unreachable ();
1347 /* For each expression in SET, translate the value handles through phi nodes
1348 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1349 expressions in DEST. */
1351 static void
1352 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1353 basic_block phiblock)
1355 VEC (tree, heap) *exprs;
1356 tree expr;
1357 int i;
1359 if (!phi_nodes (phiblock))
1361 bitmap_set_copy (dest, set);
1362 return;
1365 exprs = sorted_array_from_bitmap_set (set);
1366 for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
1368 tree translated;
1369 translated = phi_translate (expr, set, NULL, pred, phiblock);
1371 /* Don't add constants or empty translations to the cache, since
1372 we won't look them up that way, or use the result, anyway. */
1373 if (translated && !is_gimple_min_invariant (translated))
1375 tree vh = get_value_handle (translated);
1376 VEC (tree, gc) *vuses;
1378 /* The value handle itself may also be an invariant, in
1379 which case, it has no vuses. */
1380 vuses = !is_gimple_min_invariant (vh)
1381 ? VALUE_HANDLE_VUSES (vh) : NULL;
1382 phi_trans_add (expr, translated, pred, vuses);
1385 if (translated != NULL)
1386 bitmap_value_insert_into_set (dest, translated);
1388 VEC_free (tree, heap, exprs);
1391 /* Find the leader for a value (i.e., the name representing that
1392 value) in a given set, and return it. Return NULL if no leader is
1393 found. */
1395 static tree
1396 bitmap_find_leader (bitmap_set_t set, tree val)
1398 if (val == NULL)
1399 return NULL;
1401 if (constant_expr_p (val))
1402 return val;
1404 if (bitmap_set_contains_value (set, val))
1406 /* Rather than walk the entire bitmap of expressions, and see
1407 whether any of them has the value we are looking for, we look
1408 at the reverse mapping, which tells us the set of expressions
1409 that have a given value (IE value->expressions with that
1410 value) and see if any of those expressions are in our set.
1411 The number of expressions per value is usually significantly
1412 less than the number of expressions in the set. In fact, for
1413 large testcases, doing it this way is roughly 5-10x faster
1414 than walking the bitmap.
1415 If this is somehow a significant lose for some cases, we can
1416 choose which set to walk based on which set is smaller. */
1417 unsigned int i;
1418 bitmap_iterator bi;
1419 bitmap_set_t exprset = VALUE_HANDLE_EXPR_SET (val);
1421 EXECUTE_IF_AND_IN_BITMAP (exprset->expressions,
1422 set->expressions, 0, i, bi)
1423 return expression_for_id (i);
1425 return NULL;
1428 /* Given the vuse representative map, MAP, and an SSA version number,
1429 ID, return the bitmap of names ID represents, or NULL, if none
1430 exists. */
1432 static bitmap
1433 get_representative (bitmap *map, int id)
1435 if (map[id] != NULL)
1436 return map[id];
1437 return NULL;
1440 /* A vuse is anticipable at the top of block x, from the bottom of the
1441 block, if it reaches the top of the block, and is not killed in the
1442 block. In effect, we are trying to see if the vuse is transparent
1443 backwards in the block. */
1445 static bool
1446 vuses_dies_in_block_x (VEC (tree, gc) *vuses, basic_block block)
1448 int i;
1449 tree vuse;
1451 for (i = 0; VEC_iterate (tree, vuses, i, vuse); i++)
1453 /* Any places where this is too conservative, are places
1454 where we created a new version and shouldn't have. */
1456 if (!bitmap_bit_p (RVUSE_IN (block), SSA_NAME_VERSION (vuse))
1457 || bitmap_bit_p (RVUSE_KILL (block), SSA_NAME_VERSION (vuse)))
1458 return true;
1460 return false;
1463 /* Determine if the expression EXPR is valid in SET1 U SET2.
1464 ONLY SET2 CAN BE NULL.
1465 This means that we have a leader for each part of the expression
1466 (if it consists of values), or the expression is an SSA_NAME.
1468 NB: We never should run into a case where we have SSA_NAME +
1469 SSA_NAME or SSA_NAME + value. The sets valid_in_sets is called on,
1470 the ANTIC sets, will only ever have SSA_NAME's or value expressions
1471 (IE VALUE1 + VALUE2, *VALUE1, VALUE1 < VALUE2) */
1473 #define union_contains_value(SET1, SET2, VAL) \
1474 (bitmap_set_contains_value ((SET1), (VAL)) \
1475 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
1477 static bool
1478 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree expr,
1479 basic_block block)
1481 tree vh = get_value_handle (expr);
1482 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1484 case tcc_binary:
1485 case tcc_comparison:
1487 tree op1 = TREE_OPERAND (expr, 0);
1488 tree op2 = TREE_OPERAND (expr, 1);
1490 return union_contains_value (set1, set2, op1)
1491 && union_contains_value (set1, set2, op2);
1494 case tcc_unary:
1496 tree op1 = TREE_OPERAND (expr, 0);
1497 return union_contains_value (set1, set2, op1);
1500 case tcc_expression:
1502 if (TREE_CODE (expr) == CALL_EXPR)
1504 tree op0 = TREE_OPERAND (expr, 0);
1505 tree arglist = TREE_OPERAND (expr, 1);
1506 tree op2 = TREE_OPERAND (expr, 2);
1508 /* Check the non-list operands first. */
1509 if (!union_contains_value (set1, set2, op0)
1510 || (op2 && !union_contains_value (set1, set2, op2)))
1511 return false;
1513 /* Now check the operands. */
1514 for (; arglist; arglist = TREE_CHAIN (arglist))
1516 tree arg = TREE_VALUE (arglist);
1517 if (!union_contains_value (set1, set2, arg))
1518 return false;
1520 return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1522 return false;
1525 case tcc_reference:
1527 if (TREE_CODE (expr) == INDIRECT_REF
1528 || TREE_CODE (expr) == COMPONENT_REF
1529 || TREE_CODE (expr) == ARRAY_REF)
1531 tree op0 = TREE_OPERAND (expr, 0);
1532 gcc_assert (is_gimple_min_invariant (op0)
1533 || TREE_CODE (op0) == VALUE_HANDLE);
1534 if (!union_contains_value (set1, set2, op0))
1535 return false;
1536 if (TREE_CODE (expr) == ARRAY_REF)
1538 tree op1 = TREE_OPERAND (expr, 1);
1539 tree op2 = TREE_OPERAND (expr, 2);
1540 tree op3 = TREE_OPERAND (expr, 3);
1541 gcc_assert (is_gimple_min_invariant (op1)
1542 || TREE_CODE (op1) == VALUE_HANDLE);
1543 if (!union_contains_value (set1, set2, op1))
1544 return false;
1545 gcc_assert (!op2 || is_gimple_min_invariant (op2)
1546 || TREE_CODE (op2) == VALUE_HANDLE);
1547 if (op2
1548 && !union_contains_value (set1, set2, op2))
1549 return false;
1550 gcc_assert (!op3 || is_gimple_min_invariant (op3)
1551 || TREE_CODE (op3) == VALUE_HANDLE);
1552 if (op3
1553 && !union_contains_value (set1, set2, op3))
1554 return false;
1556 return bitmap_set_contains_value (ANTIC_SAFE_LOADS (block),
1558 || !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh),
1559 block);
1562 return false;
1564 case tcc_exceptional:
1566 gcc_assert (TREE_CODE (expr) == SSA_NAME);
1567 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
1570 case tcc_declaration:
1571 return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1573 default:
1574 /* No other cases should be encountered. */
1575 gcc_unreachable ();
1579 /* Clean the set of expressions that are no longer valid in SET1 or
1580 SET2. This means expressions that are made up of values we have no
1581 leaders for in SET1 or SET2. This version is used for partial
1582 anticipation, which means it is not valid in either ANTIC_IN or
1583 PA_IN. */
1585 static void
1586 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
1588 VEC (tree, heap) *exprs = sorted_array_from_bitmap_set (set1);
1589 tree expr;
1590 int i;
1592 for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
1594 if (!valid_in_sets (set1, set2, expr, block))
1595 bitmap_remove_from_set (set1, expr);
1597 VEC_free (tree, heap, exprs);
1600 /* Clean the set of expressions that are no longer valid in SET. This
1601 means expressions that are made up of values we have no leaders for
1602 in SET. */
1604 static void
1605 clean (bitmap_set_t set, basic_block block)
1607 VEC (tree, heap) *exprs = sorted_array_from_bitmap_set (set);
1608 tree expr;
1609 int i;
1611 for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
1613 if (!valid_in_sets (set, NULL, expr, block))
1614 bitmap_remove_from_set (set, expr);
1616 VEC_free (tree, heap, exprs);
1619 static sbitmap has_abnormal_preds;
1622 /* List of blocks that may have changed during ANTIC computation and
1623 thus need to be iterated over. */
1625 static sbitmap changed_blocks;
1626 /* Compute the ANTIC set for BLOCK.
1628 If succs(BLOCK) > 1 then
1629 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1630 else if succs(BLOCK) == 1 then
1631 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
1633 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
1636 static bool
1637 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
1639 bool changed = false;
1640 bitmap_set_t S, old, ANTIC_OUT;
1641 bitmap_iterator bi;
1642 unsigned int bii;
1643 edge e;
1644 edge_iterator ei;
1646 old = ANTIC_OUT = S = NULL;
1647 BB_VISITED (block) = 1;
1649 /* If any edges from predecessors are abnormal, antic_in is empty,
1650 so do nothing. */
1651 if (block_has_abnormal_pred_edge)
1652 goto maybe_dump_sets;
1654 old = ANTIC_IN (block);
1655 ANTIC_OUT = bitmap_set_new ();
1657 /* If the block has no successors, ANTIC_OUT is empty. */
1658 if (EDGE_COUNT (block->succs) == 0)
1660 /* If we have one successor, we could have some phi nodes to
1661 translate through. */
1662 else if (single_succ_p (block))
1664 basic_block succ_bb = single_succ (block);
1666 /* We trade iterations of the dataflow equations for having to
1667 phi translate the maximal set, which is incredibly slow
1668 (since the maximal set often has 300+ members, even when you
1669 have a small number of blocks).
1670 Basically, we defer the computation of ANTIC for this block
1671 until we have processed it's successor, which will inevitably
1672 have a *much* smaller set of values to phi translate once
1673 clean has been run on it.
1674 The cost of doing this is that we technically perform more
1675 iterations, however, they are lower cost iterations.
1677 Timings for PRE on tramp3d-v4:
1678 without maximal set fix: 11 seconds
1679 with maximal set fix/without deferring: 26 seconds
1680 with maximal set fix/with deferring: 11 seconds
1683 if (!BB_VISITED (succ_bb))
1685 changed = true;
1686 SET_BIT (changed_blocks, block->index);
1687 BB_VISITED (block) = 0;
1688 BB_DEFERRED (block) = 1;
1689 goto maybe_dump_sets;
1691 else
1692 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb),
1693 block, succ_bb);
1697 /* If we have multiple successors, we take the intersection of all of
1698 them. */
1699 else
1701 VEC(basic_block, heap) * worklist;
1702 size_t i;
1703 basic_block bprime, first;
1705 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
1706 FOR_EACH_EDGE (e, ei, block->succs)
1707 VEC_quick_push (basic_block, worklist, e->dest);
1708 first = VEC_index (basic_block, worklist, 0);
1710 if (!BB_VISITED (first))
1711 bitmap_set_copy (ANTIC_OUT, maximal_set);
1712 else
1713 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
1715 for (i = 1; VEC_iterate (basic_block, worklist, i, bprime); i++)
1717 if (!BB_VISITED (bprime))
1718 bitmap_set_and (ANTIC_OUT, maximal_set);
1719 else
1720 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
1722 VEC_free (basic_block, heap, worklist);
1725 /* Generate ANTIC_OUT - TMP_GEN. */
1726 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
1728 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
1729 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
1730 TMP_GEN (block));
1732 /* Then union in the ANTIC_OUT - TMP_GEN values,
1733 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
1734 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
1735 bitmap_value_insert_into_set (ANTIC_IN (block),
1736 expression_for_id (bii));
1738 clean (ANTIC_IN (block), block);
1740 /* !old->expressions can happen when we deferred a block. */
1741 if (!old->expressions || !bitmap_set_equal (old, ANTIC_IN (block)))
1743 changed = true;
1744 SET_BIT (changed_blocks, block->index);
1745 FOR_EACH_EDGE (e, ei, block->preds)
1746 SET_BIT (changed_blocks, e->src->index);
1748 else
1749 RESET_BIT (changed_blocks, block->index);
1751 maybe_dump_sets:
1752 if (dump_file && (dump_flags & TDF_DETAILS))
1754 if (!BB_DEFERRED (block) || BB_VISITED (block))
1756 if (ANTIC_OUT)
1757 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
1759 if (ANTIC_SAFE_LOADS (block))
1760 print_bitmap_set (dump_file, ANTIC_SAFE_LOADS (block),
1761 "ANTIC_SAFE_LOADS", block->index);
1762 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
1763 block->index);
1765 if (S)
1766 print_bitmap_set (dump_file, S, "S", block->index);
1768 else
1770 fprintf (dump_file,
1771 "Block %d was deferred for a future iteration.\n",
1772 block->index);
1775 if (old)
1776 bitmap_set_free (old);
1777 if (S)
1778 bitmap_set_free (S);
1779 if (ANTIC_OUT)
1780 bitmap_set_free (ANTIC_OUT);
1781 return changed;
1784 /* Compute PARTIAL_ANTIC for BLOCK.
1786 If succs(BLOCK) > 1 then
1787 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
1788 in ANTIC_OUT for all succ(BLOCK)
1789 else if succs(BLOCK) == 1 then
1790 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
1792 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
1793 - ANTIC_IN[BLOCK])
1796 static bool
1797 compute_partial_antic_aux (basic_block block,
1798 bool block_has_abnormal_pred_edge)
1800 bool changed = false;
1801 bitmap_set_t old_PA_IN;
1802 bitmap_set_t PA_OUT;
1803 edge e;
1804 edge_iterator ei;
1806 old_PA_IN = PA_OUT = NULL;
1808 /* If any edges from predecessors are abnormal, antic_in is empty,
1809 so do nothing. */
1810 if (block_has_abnormal_pred_edge)
1811 goto maybe_dump_sets;
1813 old_PA_IN = PA_IN (block);
1814 PA_OUT = bitmap_set_new ();
1816 /* If the block has no successors, ANTIC_OUT is empty. */
1817 if (EDGE_COUNT (block->succs) == 0)
1819 /* If we have one successor, we could have some phi nodes to
1820 translate through. Note that we can't phi translate across DFS
1821 back edges in partial antic, because it uses a union operation
1822 on the successors. For recurrences like IV's, we will end up generating a
1823 new value in the set on each go around (i + 3 (VH.1) VH.1 + 1
1824 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
1825 else if (single_succ_p (block))
1827 basic_block succ = single_succ (block);
1828 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
1829 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
1831 /* If we have multiple successors, we take the union of all of
1832 them. */
1833 else
1835 VEC(basic_block, heap) * worklist;
1836 size_t i;
1837 basic_block bprime;
1839 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
1840 FOR_EACH_EDGE (e, ei, block->succs)
1842 if (e->flags & EDGE_DFS_BACK)
1843 continue;
1844 VEC_quick_push (basic_block, worklist, e->dest);
1846 if (VEC_length (basic_block, worklist) > 0)
1848 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
1850 unsigned int i;
1851 bitmap_iterator bi;
1853 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
1854 bitmap_value_insert_into_set (PA_OUT,
1855 expression_for_id (i));
1857 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
1858 bitmap_value_insert_into_set (PA_OUT,
1859 expression_for_id (i));
1862 VEC_free (basic_block, heap, worklist);
1865 /* PA_IN starts with PA_OUT - TMP_GEN.
1866 Then we subtract things from ANTIC_IN. */
1867 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
1869 /* For partial antic, we want to put back in the phi results, since
1870 we will properly avoid making them partially antic over backedges. */
1871 bitmap_ior_into (PA_IN (block)->values, PHI_GEN (block)->values);
1872 bitmap_ior_into (PA_IN (block)->expressions, PHI_GEN (block)->expressions);
1874 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
1875 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
1877 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
1879 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
1881 changed = true;
1882 SET_BIT (changed_blocks, block->index);
1883 FOR_EACH_EDGE (e, ei, block->preds)
1884 SET_BIT (changed_blocks, e->src->index);
1886 else
1887 RESET_BIT (changed_blocks, block->index);
1889 maybe_dump_sets:
1890 if (dump_file && (dump_flags & TDF_DETAILS))
1892 if (PA_OUT)
1893 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
1895 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
1897 if (old_PA_IN)
1898 bitmap_set_free (old_PA_IN);
1899 if (PA_OUT)
1900 bitmap_set_free (PA_OUT);
1901 return changed;
1904 /* Compute ANTIC and partial ANTIC sets. */
1906 static void
1907 compute_antic (void)
1909 bool changed = true;
1910 int num_iterations = 0;
1911 basic_block block;
1912 int i;
1914 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
1915 We pre-build the map of blocks with incoming abnormal edges here. */
1916 has_abnormal_preds = sbitmap_alloc (last_basic_block);
1917 sbitmap_zero (has_abnormal_preds);
1919 FOR_EACH_BB (block)
1921 edge_iterator ei;
1922 edge e;
1924 FOR_EACH_EDGE (e, ei, block->preds)
1926 e->flags &= ~EDGE_DFS_BACK;
1927 if (e->flags & EDGE_ABNORMAL)
1929 SET_BIT (has_abnormal_preds, block->index);
1930 break;
1934 BB_VISITED (block) = 0;
1935 BB_DEFERRED (block) = 0;
1936 /* While we are here, give empty ANTIC_IN sets to each block. */
1937 ANTIC_IN (block) = bitmap_set_new ();
1938 PA_IN (block) = bitmap_set_new ();
1941 /* At the exit block we anticipate nothing. */
1942 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
1943 BB_VISITED (EXIT_BLOCK_PTR) = 1;
1944 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
1946 changed_blocks = sbitmap_alloc (last_basic_block + 1);
1947 sbitmap_ones (changed_blocks);
1948 while (changed)
1950 if (dump_file && (dump_flags & TDF_DETAILS))
1951 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
1952 num_iterations++;
1953 changed = false;
1954 for (i = 0; i < last_basic_block - NUM_FIXED_BLOCKS; i++)
1956 if (TEST_BIT (changed_blocks, postorder[i]))
1958 basic_block block = BASIC_BLOCK (postorder[i]);
1959 changed |= compute_antic_aux (block,
1960 TEST_BIT (has_abnormal_preds,
1961 block->index));
1964 /* Theoretically possible, but *highly* unlikely. */
1965 gcc_assert (num_iterations < 50);
1968 if (dump_file && (dump_flags & TDF_STATS))
1969 fprintf (dump_file, "compute_antic required %d iterations\n",
1970 num_iterations);
1972 if (do_partial_partial)
1974 sbitmap_ones (changed_blocks);
1975 mark_dfs_back_edges ();
1976 num_iterations = 0;
1977 changed = true;
1978 while (changed)
1980 if (dump_file && (dump_flags & TDF_DETAILS))
1981 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
1982 num_iterations++;
1983 changed = false;
1984 for (i = 0; i < last_basic_block - NUM_FIXED_BLOCKS; i++)
1986 if (TEST_BIT (changed_blocks, postorder[i]))
1988 basic_block block = BASIC_BLOCK (postorder[i]);
1989 changed
1990 |= compute_partial_antic_aux (block,
1991 TEST_BIT (has_abnormal_preds,
1992 block->index));
1995 /* Theoretically possible, but *highly* unlikely. */
1996 gcc_assert (num_iterations < 50);
1998 if (dump_file && (dump_flags & TDF_STATS))
1999 fprintf (dump_file, "compute_partial_antic required %d iterations\n",
2000 num_iterations);
2002 sbitmap_free (has_abnormal_preds);
2003 sbitmap_free (changed_blocks);
2006 /* Print the names represented by the bitmap NAMES, to the file OUT. */
2008 static void
2009 dump_bitmap_of_names (FILE *out, bitmap names)
2011 bitmap_iterator bi;
2012 unsigned int i;
2014 fprintf (out, " { ");
2015 EXECUTE_IF_SET_IN_BITMAP (names, 0, i, bi)
2017 print_generic_expr (out, ssa_name (i), 0);
2018 fprintf (out, " ");
2020 fprintf (out, "}\n");
2023 /* Compute a set of representative vuse versions for each phi. This
2024 is so we can compute conservative kill sets in terms of all vuses
2025 that are killed, instead of continually walking chains.
2027 We also have to be able kill all names associated with a phi when
2028 the phi dies in order to ensure we don't generate overlapping
2029 live ranges, which are not allowed in virtual SSA. */
2031 static bitmap *vuse_names;
2032 static void
2033 compute_vuse_representatives (void)
2035 tree phi;
2036 basic_block bb;
2037 VEC (tree, heap) *phis = NULL;
2038 bool changed = true;
2039 size_t i;
2041 FOR_EACH_BB (bb)
2043 for (phi = phi_nodes (bb);
2044 phi;
2045 phi = PHI_CHAIN (phi))
2046 if (!is_gimple_reg (PHI_RESULT (phi)))
2047 VEC_safe_push (tree, heap, phis, phi);
2050 while (changed)
2052 changed = false;
2054 for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
2056 size_t ver = SSA_NAME_VERSION (PHI_RESULT (phi));
2057 use_operand_p usep;
2058 ssa_op_iter iter;
2060 if (vuse_names[ver] == NULL)
2062 vuse_names[ver] = BITMAP_ALLOC (&grand_bitmap_obstack);
2063 bitmap_set_bit (vuse_names[ver], ver);
2065 FOR_EACH_PHI_ARG (usep, phi, iter, SSA_OP_ALL_USES)
2067 tree use = USE_FROM_PTR (usep);
2068 bitmap usebitmap = get_representative (vuse_names,
2069 SSA_NAME_VERSION (use));
2070 if (usebitmap != NULL)
2072 changed |= bitmap_ior_into (vuse_names[ver],
2073 usebitmap);
2075 else
2077 changed |= !bitmap_bit_p (vuse_names[ver],
2078 SSA_NAME_VERSION (use));
2079 if (changed)
2080 bitmap_set_bit (vuse_names[ver],
2081 SSA_NAME_VERSION (use));
2087 if (dump_file && (dump_flags & TDF_DETAILS))
2088 for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
2090 bitmap reps = get_representative (vuse_names,
2091 SSA_NAME_VERSION (PHI_RESULT (phi)));
2092 if (reps)
2094 print_generic_expr (dump_file, PHI_RESULT (phi), 0);
2095 fprintf (dump_file, " represents ");
2096 dump_bitmap_of_names (dump_file, reps);
2099 VEC_free (tree, heap, phis);
2102 /* Compute reaching vuses and antic safe loads. RVUSE computation is
2103 is a small bit of iterative dataflow to determine what virtual uses
2104 reach what blocks. Because we can't generate overlapping virtual
2105 uses, and virtual uses *do* actually die, this ends up being faster
2106 in most cases than continually walking the virtual use/def chains
2107 to determine whether we are inside a block where a given virtual is
2108 still available to be used.
2110 ANTIC_SAFE_LOADS are those loads that actually occur before any kill to
2111 their vuses in the block,and thus, are safe at the top of the
2112 block.
2114 An example:
2116 <block begin>
2117 b = *a
2118 *a = 9
2119 <block end>
2121 b = *a is an antic safe load because it still safe to consider it
2122 ANTIC at the top of the block.
2124 We currently compute a conservative approximation to
2125 ANTIC_SAFE_LOADS. We compute those loads that occur before *any*
2126 stores in the block. This is not because it is difficult to
2127 compute the precise answer, but because it is expensive. More
2128 testing is necessary to determine whether it is worth computing the
2129 precise answer. */
2131 static void
2132 compute_rvuse_and_antic_safe (void)
2135 unsigned int i;
2136 tree phi;
2137 basic_block bb;
2138 bool changed = true;
2139 unsigned int *first_store_uid;
2141 first_store_uid = xcalloc (n_basic_blocks, sizeof (unsigned int));
2143 compute_vuse_representatives ();
2145 FOR_ALL_BB (bb)
2147 RVUSE_IN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
2148 RVUSE_GEN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
2149 RVUSE_KILL (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
2150 RVUSE_OUT (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
2151 ANTIC_SAFE_LOADS (bb) = NULL;
2154 /* Mark live on entry */
2155 for (i = 0; i < num_ssa_names; i++)
2157 tree name = ssa_name (i);
2158 if (name && !is_gimple_reg (name)
2159 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
2160 bitmap_set_bit (RVUSE_OUT (ENTRY_BLOCK_PTR),
2161 SSA_NAME_VERSION (name));
2164 /* Compute local sets for reaching vuses.
2165 GEN(block) = generated in block and not locally killed.
2166 KILL(block) = set of vuses killed in block.
2169 FOR_EACH_BB (bb)
2171 block_stmt_iterator bsi;
2172 ssa_op_iter iter;
2173 def_operand_p defp;
2174 use_operand_p usep;
2176 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2178 tree stmt = bsi_stmt (bsi);
2180 if (first_store_uid[bb->index] == 0
2181 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYUSE | SSA_OP_VDEF))
2183 first_store_uid[bb->index] = stmt_ann (stmt)->uid;
2186 FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VMAYUSE)
2188 tree use = USE_FROM_PTR (usep);
2189 bitmap repbit = get_representative (vuse_names,
2190 SSA_NAME_VERSION (use));
2191 if (repbit != NULL)
2193 bitmap_and_compl_into (RVUSE_GEN (bb), repbit);
2194 bitmap_ior_into (RVUSE_KILL (bb), repbit);
2196 else
2198 bitmap_set_bit (RVUSE_KILL (bb), SSA_NAME_VERSION (use));
2199 bitmap_clear_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (use));
2202 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
2204 tree def = DEF_FROM_PTR (defp);
2205 bitmap_set_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (def));
2210 FOR_EACH_BB (bb)
2212 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2214 if (!is_gimple_reg (PHI_RESULT (phi)))
2216 edge e;
2217 edge_iterator ei;
2219 tree def = PHI_RESULT (phi);
2220 /* In reality, the PHI result is generated at the end of
2221 each predecessor block. This will make the value
2222 LVUSE_IN for the bb containing the PHI, which is
2223 correct. */
2224 FOR_EACH_EDGE (e, ei, bb->preds)
2225 bitmap_set_bit (RVUSE_GEN (e->src), SSA_NAME_VERSION (def));
2230 /* Solve reaching vuses.
2232 RVUSE_IN[BB] = Union of RVUSE_OUT of predecessors.
2233 RVUSE_OUT[BB] = RVUSE_GEN[BB] U (RVUSE_IN[BB] - RVUSE_KILL[BB])
2236 changed = true;
2237 while (changed)
2239 int j;
2240 changed = false;
2241 for (j = n_basic_blocks - NUM_FIXED_BLOCKS - 1; j >= 0; j--)
2243 edge e;
2244 edge_iterator ei;
2245 bb = BASIC_BLOCK (postorder[j]);
2247 FOR_EACH_EDGE (e, ei, bb->preds)
2248 bitmap_ior_into (RVUSE_IN (bb), RVUSE_OUT (e->src));
2250 changed |= bitmap_ior_and_compl (RVUSE_OUT (bb),
2251 RVUSE_GEN (bb),
2252 RVUSE_IN (bb),
2253 RVUSE_KILL (bb));
2257 if (dump_file && (dump_flags & TDF_DETAILS))
2259 FOR_ALL_BB (bb)
2261 fprintf (dump_file, "RVUSE_IN (%d) =", bb->index);
2262 dump_bitmap_of_names (dump_file, RVUSE_IN (bb));
2264 fprintf (dump_file, "RVUSE_KILL (%d) =", bb->index);
2265 dump_bitmap_of_names (dump_file, RVUSE_KILL (bb));
2267 fprintf (dump_file, "RVUSE_GEN (%d) =", bb->index);
2268 dump_bitmap_of_names (dump_file, RVUSE_GEN (bb));
2270 fprintf (dump_file, "RVUSE_OUT (%d) =", bb->index);
2271 dump_bitmap_of_names (dump_file, RVUSE_OUT (bb));
2275 FOR_EACH_BB (bb)
2277 bitmap_iterator bi;
2279 if (bitmap_empty_p (RVUSE_KILL (bb)))
2280 continue;
2282 FOR_EACH_EXPR_ID_IN_SET (EXP_GEN (bb), i, bi)
2284 tree expr = expression_for_id (i);
2285 if (REFERENCE_CLASS_P (expr))
2287 tree vh = get_value_handle (expr);
2288 tree maybe = bitmap_find_leader (AVAIL_OUT (bb), vh);
2290 if (maybe)
2292 tree def = SSA_NAME_DEF_STMT (maybe);
2294 if (bb_for_stmt (def) != bb)
2295 continue;
2297 if (TREE_CODE (def) == PHI_NODE
2298 || stmt_ann (def)->uid < first_store_uid[bb->index])
2300 if (ANTIC_SAFE_LOADS (bb) == NULL)
2301 ANTIC_SAFE_LOADS (bb) = bitmap_set_new ();
2302 bitmap_value_insert_into_set (ANTIC_SAFE_LOADS (bb),
2303 expr);
2309 free (first_store_uid);
2312 /* Return true if we can value number the call in STMT. This is true
2313 if we have a pure or constant call. */
2315 static bool
2316 can_value_number_call (tree stmt)
2318 tree call = get_call_expr_in (stmt);
2320 if (call_expr_flags (call) & (ECF_PURE | ECF_CONST))
2321 return true;
2322 return false;
2325 /* Return true if OP is a tree which we can perform value numbering
2326 on. */
2328 static bool
2329 can_value_number_operation (tree op)
2331 return UNARY_CLASS_P (op)
2332 || BINARY_CLASS_P (op)
2333 || COMPARISON_CLASS_P (op)
2334 || REFERENCE_CLASS_P (op)
2335 || (TREE_CODE (op) == CALL_EXPR
2336 && can_value_number_call (op));
2340 /* Return true if OP is a tree which we can perform PRE on
2341 on. This may not match the operations we can value number, but in
2342 a perfect world would. */
2344 static bool
2345 can_PRE_operation (tree op)
2347 return UNARY_CLASS_P (op)
2348 || BINARY_CLASS_P (op)
2349 || COMPARISON_CLASS_P (op)
2350 || TREE_CODE (op) == INDIRECT_REF
2351 || TREE_CODE (op) == COMPONENT_REF
2352 || TREE_CODE (op) == CALL_EXPR
2353 || TREE_CODE (op) == ARRAY_REF;
2357 /* Inserted expressions are placed onto this worklist, which is used
2358 for performing quick dead code elimination of insertions we made
2359 that didn't turn out to be necessary. */
2360 static VEC(tree,heap) *inserted_exprs;
2362 /* Pool allocated fake store expressions are placed onto this
2363 worklist, which, after performing dead code elimination, is walked
2364 to see which expressions need to be put into GC'able memory */
2365 static VEC(tree, heap) *need_creation;
2367 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2368 COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2369 trying to rename aggregates into ssa form directly, which is a no
2372 Thus, this routine doesn't create temporaries, it just builds a
2373 single access expression for the array, calling
2374 find_or_generate_expression to build the innermost pieces.
2376 This function is a subroutine of create_expression_by_pieces, and
2377 should not be called on it's own unless you really know what you
2378 are doing.
2380 static tree
2381 create_component_ref_by_pieces (basic_block block, tree expr, tree stmts)
2383 tree genop = expr;
2384 tree folded;
2386 if (TREE_CODE (genop) == VALUE_HANDLE)
2388 tree found = bitmap_find_leader (AVAIL_OUT (block), expr);
2389 if (found)
2390 return found;
2393 if (TREE_CODE (genop) == VALUE_HANDLE)
2395 bitmap_set_t exprset = VALUE_HANDLE_EXPR_SET (expr);
2396 unsigned int firstbit = bitmap_first_set_bit (exprset->expressions);
2397 genop = expression_for_id (firstbit);
2400 switch TREE_CODE (genop)
2402 case ARRAY_REF:
2404 tree op0;
2405 tree op1, op2, op3;
2406 op0 = create_component_ref_by_pieces (block,
2407 TREE_OPERAND (genop, 0),
2408 stmts);
2409 op1 = TREE_OPERAND (genop, 1);
2410 if (TREE_CODE (op1) == VALUE_HANDLE)
2411 op1 = find_or_generate_expression (block, op1, stmts);
2412 op2 = TREE_OPERAND (genop, 2);
2413 if (op2 && TREE_CODE (op2) == VALUE_HANDLE)
2414 op2 = find_or_generate_expression (block, op2, stmts);
2415 op3 = TREE_OPERAND (genop, 3);
2416 if (op3 && TREE_CODE (op3) == VALUE_HANDLE)
2417 op3 = find_or_generate_expression (block, op3, stmts);
2418 folded = build4 (ARRAY_REF, TREE_TYPE (genop), op0, op1,
2419 op2, op3);
2420 return folded;
2422 case COMPONENT_REF:
2424 bitmap_set_t exprset;
2425 unsigned int firstbit;
2426 tree op0;
2427 tree op1;
2428 op0 = create_component_ref_by_pieces (block,
2429 TREE_OPERAND (genop, 0),
2430 stmts);
2431 exprset = VALUE_HANDLE_EXPR_SET (TREE_OPERAND (genop, 1));
2432 firstbit = bitmap_first_set_bit (exprset->expressions);
2433 op1 = expression_for_id (firstbit);
2434 folded = fold_build3 (COMPONENT_REF, TREE_TYPE (genop), op0, op1,
2435 NULL_TREE);
2436 return folded;
2438 break;
2439 case INDIRECT_REF:
2441 tree op1 = TREE_OPERAND (genop, 0);
2442 tree genop1 = find_or_generate_expression (block, op1, stmts);
2444 folded = fold_build1 (TREE_CODE (genop), TREE_TYPE (genop),
2445 genop1);
2446 return folded;
2448 break;
2449 case VAR_DECL:
2450 case PARM_DECL:
2451 case RESULT_DECL:
2452 case SSA_NAME:
2453 case STRING_CST:
2454 return genop;
2455 default:
2456 gcc_unreachable ();
2459 return NULL_TREE;
2462 /* Find a leader for an expression, or generate one using
2463 create_expression_by_pieces if it's ANTIC but
2464 complex.
2465 BLOCK is the basic_block we are looking for leaders in.
2466 EXPR is the expression to find a leader or generate for.
2467 STMTS is the statement list to put the inserted expressions on.
2468 Returns the SSA_NAME of the LHS of the generated expression or the
2469 leader. */
2471 static tree
2472 find_or_generate_expression (basic_block block, tree expr, tree stmts)
2474 tree genop = bitmap_find_leader (AVAIL_OUT (block), expr);
2476 /* If it's still NULL, it must be a complex expression, so generate
2477 it recursively. */
2478 if (genop == NULL)
2480 bitmap_set_t exprset = VALUE_HANDLE_EXPR_SET (expr);
2481 unsigned int firstbit = bitmap_first_set_bit (exprset->expressions);
2483 genop = expression_for_id (firstbit);
2484 gcc_assert (can_PRE_operation (genop));
2485 genop = create_expression_by_pieces (block, genop, stmts);
2487 return genop;
2490 #define NECESSARY(stmt) stmt->base.asm_written_flag
2491 /* Create an expression in pieces, so that we can handle very complex
2492 expressions that may be ANTIC, but not necessary GIMPLE.
2493 BLOCK is the basic block the expression will be inserted into,
2494 EXPR is the expression to insert (in value form)
2495 STMTS is a statement list to append the necessary insertions into.
2497 This function will die if we hit some value that shouldn't be
2498 ANTIC but is (IE there is no leader for it, or its components).
2499 This function may also generate expressions that are themselves
2500 partially or fully redundant. Those that are will be either made
2501 fully redundant during the next iteration of insert (for partially
2502 redundant ones), or eliminated by eliminate (for fully redundant
2503 ones). */
2505 static tree
2506 create_expression_by_pieces (basic_block block, tree expr, tree stmts)
2508 tree temp, name;
2509 tree folded, forced_stmts, newexpr;
2510 tree v;
2511 tree_stmt_iterator tsi;
2513 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2515 case tcc_expression:
2517 tree op0, op2;
2518 tree arglist;
2519 tree genop0, genop2;
2520 tree genarglist;
2521 tree walker, genwalker;
2523 gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2524 genop2 = NULL;
2526 op0 = TREE_OPERAND (expr, 0);
2527 arglist = TREE_OPERAND (expr, 1);
2528 op2 = TREE_OPERAND (expr, 2);
2530 genop0 = find_or_generate_expression (block, op0, stmts);
2531 genarglist = copy_list (arglist);
2532 for (walker = arglist, genwalker = genarglist;
2533 genwalker && walker;
2534 genwalker = TREE_CHAIN (genwalker), walker = TREE_CHAIN (walker))
2536 TREE_VALUE (genwalker)
2537 = find_or_generate_expression (block, TREE_VALUE (walker),
2538 stmts);
2541 if (op2)
2542 genop2 = find_or_generate_expression (block, op2, stmts);
2543 folded = fold_build3 (TREE_CODE (expr), TREE_TYPE (expr),
2544 genop0, genarglist, genop2);
2545 break;
2549 break;
2550 case tcc_reference:
2552 if (TREE_CODE (expr) == COMPONENT_REF
2553 || TREE_CODE (expr) == ARRAY_REF)
2555 folded = create_component_ref_by_pieces (block, expr, stmts);
2557 else
2559 tree op1 = TREE_OPERAND (expr, 0);
2560 tree genop1 = find_or_generate_expression (block, op1, stmts);
2562 folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2563 genop1);
2565 break;
2568 case tcc_binary:
2569 case tcc_comparison:
2571 tree op1 = TREE_OPERAND (expr, 0);
2572 tree op2 = TREE_OPERAND (expr, 1);
2573 tree genop1 = find_or_generate_expression (block, op1, stmts);
2574 tree genop2 = find_or_generate_expression (block, op2, stmts);
2575 folded = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr),
2576 genop1, genop2);
2577 break;
2580 case tcc_unary:
2582 tree op1 = TREE_OPERAND (expr, 0);
2583 tree genop1 = find_or_generate_expression (block, op1, stmts);
2584 folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2585 genop1);
2586 break;
2589 default:
2590 gcc_unreachable ();
2593 /* Force the generated expression to be a sequence of GIMPLE
2594 statements.
2595 We have to call unshare_expr because force_gimple_operand may
2596 modify the tree we pass to it. */
2597 newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts,
2598 false, NULL);
2600 /* If we have any intermediate expressions to the value sets, add them
2601 to the value sets and chain them on in the instruction stream. */
2602 if (forced_stmts)
2604 tsi = tsi_start (forced_stmts);
2605 for (; !tsi_end_p (tsi); tsi_next (&tsi))
2607 tree stmt = tsi_stmt (tsi);
2608 tree forcedname = GIMPLE_STMT_OPERAND (stmt, 0);
2609 tree forcedexpr = GIMPLE_STMT_OPERAND (stmt, 1);
2610 tree val = vn_lookup_or_add (forcedexpr, NULL);
2612 VEC_safe_push (tree, heap, inserted_exprs, stmt);
2613 vn_add (forcedname, val);
2614 bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
2615 bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
2616 mark_symbols_for_renaming (stmt);
2618 tsi = tsi_last (stmts);
2619 tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
2622 /* Build and insert the assignment of the end result to the temporary
2623 that we will return. */
2624 if (!pretemp || TREE_TYPE (expr) != TREE_TYPE (pretemp))
2626 pretemp = create_tmp_var (TREE_TYPE (expr), "pretmp");
2627 get_var_ann (pretemp);
2630 temp = pretemp;
2631 add_referenced_var (temp);
2633 if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE
2634 || TREE_CODE (TREE_TYPE (expr)) == VECTOR_TYPE)
2635 DECL_GIMPLE_REG_P (temp) = 1;
2637 newexpr = build2_gimple (GIMPLE_MODIFY_STMT, temp, newexpr);
2638 name = make_ssa_name (temp, newexpr);
2639 GIMPLE_STMT_OPERAND (newexpr, 0) = name;
2640 NECESSARY (newexpr) = 0;
2642 tsi = tsi_last (stmts);
2643 tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
2644 VEC_safe_push (tree, heap, inserted_exprs, newexpr);
2646 /* All the symbols in NEWEXPR should be put into SSA form. */
2647 mark_symbols_for_renaming (newexpr);
2649 /* Add a value handle to the temporary.
2650 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2651 we are creating the expression by pieces, and this particular piece of
2652 the expression may have been represented. There is no harm in replacing
2653 here. */
2654 v = get_value_handle (expr);
2655 vn_add (name, v);
2656 get_or_alloc_expression_id (name);
2657 bitmap_value_replace_in_set (NEW_SETS (block), name);
2658 bitmap_value_replace_in_set (AVAIL_OUT (block), name);
2660 pre_stats.insertions++;
2661 if (dump_file && (dump_flags & TDF_DETAILS))
2663 fprintf (dump_file, "Inserted ");
2664 print_generic_expr (dump_file, newexpr, 0);
2665 fprintf (dump_file, " in predecessor %d\n", block->index);
2668 return name;
2671 /* Insert the to-be-made-available values of expression EXPRNUM for each
2672 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2673 merge the result with a phi node, given the same value handle as
2674 NODE. Return true if we have inserted new stuff. */
2676 static bool
2677 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2678 tree *avail)
2680 tree expr = expression_for_id (exprnum);
2681 tree val = get_value_handle (expr);
2682 edge pred;
2683 bool insertions = false;
2684 bool nophi = false;
2685 basic_block bprime;
2686 tree eprime;
2687 edge_iterator ei;
2688 tree type = TREE_TYPE (avail[EDGE_PRED (block, 0)->src->index]);
2689 tree temp;
2691 if (dump_file && (dump_flags & TDF_DETAILS))
2693 fprintf (dump_file, "Found partial redundancy for expression ");
2694 print_generic_expr (dump_file, expr, 0);
2695 fprintf (dump_file, " (");
2696 print_generic_expr (dump_file, val, 0);
2697 fprintf (dump_file, ")");
2698 fprintf (dump_file, "\n");
2701 /* Make sure we aren't creating an induction variable. */
2702 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2
2703 && TREE_CODE_CLASS (TREE_CODE (expr)) != tcc_reference )
2705 bool firstinsideloop = false;
2706 bool secondinsideloop = false;
2707 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2708 EDGE_PRED (block, 0)->src);
2709 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2710 EDGE_PRED (block, 1)->src);
2711 /* Induction variables only have one edge inside the loop. */
2712 if (firstinsideloop ^ secondinsideloop)
2714 if (dump_file && (dump_flags & TDF_DETAILS))
2715 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2716 nophi = true;
2721 /* Make the necessary insertions. */
2722 FOR_EACH_EDGE (pred, ei, block->preds)
2724 tree stmts = alloc_stmt_list ();
2725 tree builtexpr;
2726 bprime = pred->src;
2727 eprime = avail[bprime->index];
2729 if (can_PRE_operation (eprime))
2731 #ifdef ENABLE_CHECKING
2732 tree vh;
2734 /* eprime may be an invariant. */
2735 vh = TREE_CODE (eprime) == VALUE_HANDLE
2736 ? eprime
2737 : get_value_handle (eprime);
2739 /* ensure that the virtual uses we need reach our block. */
2740 if (TREE_CODE (vh) == VALUE_HANDLE)
2742 int i;
2743 tree vuse;
2744 for (i = 0;
2745 VEC_iterate (tree, VALUE_HANDLE_VUSES (vh), i, vuse);
2746 i++)
2748 size_t id = SSA_NAME_VERSION (vuse);
2749 gcc_assert (bitmap_bit_p (RVUSE_OUT (bprime), id)
2750 || IS_EMPTY_STMT (SSA_NAME_DEF_STMT (vuse)));
2753 #endif
2754 builtexpr = create_expression_by_pieces (bprime,
2755 eprime,
2756 stmts);
2757 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
2758 bsi_insert_on_edge (pred, stmts);
2759 avail[bprime->index] = builtexpr;
2760 insertions = true;
2763 /* If we didn't want a phi node, and we made insertions, we still have
2764 inserted new stuff, and thus return true. If we didn't want a phi node,
2765 and didn't make insertions, we haven't added anything new, so return
2766 false. */
2767 if (nophi && insertions)
2768 return true;
2769 else if (nophi && !insertions)
2770 return false;
2772 /* Now build a phi for the new variable. */
2773 if (!prephitemp || TREE_TYPE (prephitemp) != type)
2775 prephitemp = create_tmp_var (type, "prephitmp");
2776 get_var_ann (prephitemp);
2779 temp = prephitemp;
2780 add_referenced_var (temp);
2783 if (TREE_CODE (type) == COMPLEX_TYPE
2784 || TREE_CODE (type) == VECTOR_TYPE)
2785 DECL_GIMPLE_REG_P (temp) = 1;
2786 temp = create_phi_node (temp, block);
2788 NECESSARY (temp) = 0;
2789 VEC_safe_push (tree, heap, inserted_exprs, temp);
2790 FOR_EACH_EDGE (pred, ei, block->preds)
2791 add_phi_arg (temp, avail[pred->src->index], pred);
2793 vn_add (PHI_RESULT (temp), val);
2795 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
2796 this insertion, since we test for the existence of this value in PHI_GEN
2797 before proceeding with the partial redundancy checks in insert_aux.
2799 The value may exist in AVAIL_OUT, in particular, it could be represented
2800 by the expression we are trying to eliminate, in which case we want the
2801 replacement to occur. If it's not existing in AVAIL_OUT, we want it
2802 inserted there.
2804 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
2805 this block, because if it did, it would have existed in our dominator's
2806 AVAIL_OUT, and would have been skipped due to the full redundancy check.
2809 bitmap_insert_into_set (PHI_GEN (block),
2810 PHI_RESULT (temp));
2811 bitmap_value_replace_in_set (AVAIL_OUT (block),
2812 PHI_RESULT (temp));
2813 bitmap_insert_into_set (NEW_SETS (block),
2814 PHI_RESULT (temp));
2816 if (dump_file && (dump_flags & TDF_DETAILS))
2818 fprintf (dump_file, "Created phi ");
2819 print_generic_expr (dump_file, temp, 0);
2820 fprintf (dump_file, " in block %d\n", block->index);
2822 pre_stats.phis++;
2823 return true;
2828 /* Perform insertion of partially redundant values.
2829 For BLOCK, do the following:
2830 1. Propagate the NEW_SETS of the dominator into the current block.
2831 If the block has multiple predecessors,
2832 2a. Iterate over the ANTIC expressions for the block to see if
2833 any of them are partially redundant.
2834 2b. If so, insert them into the necessary predecessors to make
2835 the expression fully redundant.
2836 2c. Insert a new PHI merging the values of the predecessors.
2837 2d. Insert the new PHI, and the new expressions, into the
2838 NEW_SETS set.
2839 3. Recursively call ourselves on the dominator children of BLOCK.
2841 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
2842 do_regular_insertion and do_partial_insertion.
2846 static bool
2847 do_regular_insertion (basic_block block, basic_block dom)
2849 bool new_stuff = false;
2850 VEC (tree, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
2851 tree expr;
2852 int i;
2854 for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
2856 if (can_PRE_operation (expr) && !AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2858 tree *avail;
2859 tree val;
2860 bool by_some = false;
2861 bool cant_insert = false;
2862 bool all_same = true;
2863 tree first_s = NULL;
2864 edge pred;
2865 basic_block bprime;
2866 tree eprime = NULL_TREE;
2867 edge_iterator ei;
2869 val = get_value_handle (expr);
2870 if (bitmap_set_contains_value (PHI_GEN (block), val))
2871 continue;
2872 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
2874 if (dump_file && (dump_flags & TDF_DETAILS))
2875 fprintf (dump_file, "Found fully redundant value\n");
2876 continue;
2879 avail = XCNEWVEC (tree, last_basic_block);
2880 FOR_EACH_EDGE (pred, ei, block->preds)
2882 tree vprime;
2883 tree edoubleprime;
2885 /* This can happen in the very weird case
2886 that our fake infinite loop edges have caused a
2887 critical edge to appear. */
2888 if (EDGE_CRITICAL_P (pred))
2890 cant_insert = true;
2891 break;
2893 bprime = pred->src;
2894 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
2895 bprime, block);
2897 /* eprime will generally only be NULL if the
2898 value of the expression, translated
2899 through the PHI for this predecessor, is
2900 undefined. If that is the case, we can't
2901 make the expression fully redundant,
2902 because its value is undefined along a
2903 predecessor path. We can thus break out
2904 early because it doesn't matter what the
2905 rest of the results are. */
2906 if (eprime == NULL)
2908 cant_insert = true;
2909 break;
2912 eprime = fully_constant_expression (eprime);
2913 vprime = get_value_handle (eprime);
2914 gcc_assert (vprime);
2915 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
2916 vprime);
2917 if (edoubleprime == NULL)
2919 avail[bprime->index] = eprime;
2920 all_same = false;
2922 else
2924 avail[bprime->index] = edoubleprime;
2925 by_some = true;
2926 if (first_s == NULL)
2927 first_s = edoubleprime;
2928 else if (!operand_equal_p (first_s, edoubleprime,
2930 all_same = false;
2933 /* If we can insert it, it's not the same value
2934 already existing along every predecessor, and
2935 it's defined by some predecessor, it is
2936 partially redundant. */
2937 if (!cant_insert && !all_same && by_some)
2939 if (insert_into_preds_of_block (block, get_expression_id (expr),
2940 avail))
2941 new_stuff = true;
2943 /* If all edges produce the same value and that value is
2944 an invariant, then the PHI has the same value on all
2945 edges. Note this. */
2946 else if (!cant_insert && all_same && eprime
2947 && is_gimple_min_invariant (eprime)
2948 && !is_gimple_min_invariant (val))
2950 unsigned int j;
2951 bitmap_iterator bi;
2953 bitmap_set_t exprset = VALUE_HANDLE_EXPR_SET (val);
2954 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
2956 tree expr = expression_for_id (j);
2957 if (TREE_CODE (expr) == SSA_NAME)
2959 vn_add (expr, eprime);
2960 pre_stats.constified++;
2964 free (avail);
2968 VEC_free (tree, heap, exprs);
2969 return new_stuff;
2973 /* Perform insertion for partially anticipatable expressions. There
2974 is only one case we will perform insertion for these. This case is
2975 if the expression is partially anticipatable, and fully available.
2976 In this case, we know that putting it earlier will enable us to
2977 remove the later computation. */
2980 static bool
2981 do_partial_partial_insertion (basic_block block, basic_block dom)
2983 bool new_stuff = false;
2984 VEC (tree, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
2985 tree expr;
2986 int i;
2988 for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
2990 if (can_PRE_operation (expr) && !AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2992 tree *avail;
2993 tree val;
2994 bool by_all = true;
2995 bool cant_insert = false;
2996 edge pred;
2997 basic_block bprime;
2998 tree eprime = NULL_TREE;
2999 edge_iterator ei;
3001 val = get_value_handle (expr);
3002 if (bitmap_set_contains_value (PHI_GEN (block), val))
3003 continue;
3004 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3005 continue;
3007 avail = XCNEWVEC (tree, last_basic_block);
3008 FOR_EACH_EDGE (pred, ei, block->preds)
3010 tree vprime;
3011 tree edoubleprime;
3013 /* This can happen in the very weird case
3014 that our fake infinite loop edges have caused a
3015 critical edge to appear. */
3016 if (EDGE_CRITICAL_P (pred))
3018 cant_insert = true;
3019 break;
3021 bprime = pred->src;
3022 eprime = phi_translate (expr, ANTIC_IN (block),
3023 PA_IN (block),
3024 bprime, block);
3026 /* eprime will generally only be NULL if the
3027 value of the expression, translated
3028 through the PHI for this predecessor, is
3029 undefined. If that is the case, we can't
3030 make the expression fully redundant,
3031 because its value is undefined along a
3032 predecessor path. We can thus break out
3033 early because it doesn't matter what the
3034 rest of the results are. */
3035 if (eprime == NULL)
3037 cant_insert = true;
3038 break;
3041 eprime = fully_constant_expression (eprime);
3042 vprime = get_value_handle (eprime);
3043 gcc_assert (vprime);
3044 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3045 vprime);
3046 if (edoubleprime == NULL)
3048 by_all = false;
3049 break;
3051 else
3052 avail[bprime->index] = edoubleprime;
3056 /* If we can insert it, it's not the same value
3057 already existing along every predecessor, and
3058 it's defined by some predecessor, it is
3059 partially redundant. */
3060 if (!cant_insert && by_all)
3062 pre_stats.pa_insert++;
3063 if (insert_into_preds_of_block (block, get_expression_id (expr),
3064 avail))
3065 new_stuff = true;
3067 free (avail);
3071 VEC_free (tree, heap, exprs);
3072 return new_stuff;
3075 static bool
3076 insert_aux (basic_block block)
3078 basic_block son;
3079 bool new_stuff = false;
3081 if (block)
3083 basic_block dom;
3084 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3085 if (dom)
3087 unsigned i;
3088 bitmap_iterator bi;
3089 bitmap_set_t newset = NEW_SETS (dom);
3090 if (newset)
3092 /* Note that we need to value_replace both NEW_SETS, and
3093 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3094 represented by some non-simple expression here that we want
3095 to replace it with. */
3096 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3098 tree expr = expression_for_id (i);
3099 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3100 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3103 if (!single_pred_p (block))
3105 new_stuff |= do_regular_insertion (block, dom);
3106 if (do_partial_partial)
3107 new_stuff |= do_partial_partial_insertion (block, dom);
3111 for (son = first_dom_son (CDI_DOMINATORS, block);
3112 son;
3113 son = next_dom_son (CDI_DOMINATORS, son))
3115 new_stuff |= insert_aux (son);
3118 return new_stuff;
3121 /* Perform insertion of partially redundant values. */
3123 static void
3124 insert (void)
3126 bool new_stuff = true;
3127 basic_block bb;
3128 int num_iterations = 0;
3130 FOR_ALL_BB (bb)
3131 NEW_SETS (bb) = bitmap_set_new ();
3133 while (new_stuff)
3135 num_iterations++;
3136 new_stuff = false;
3137 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3139 if (num_iterations > 2 && dump_file && (dump_flags & TDF_STATS))
3140 fprintf (dump_file, "insert required %d iterations\n", num_iterations);
3144 /* Return true if VAR is an SSA variable with no defining statement in
3145 this procedure, *AND* isn't a live-on-entry parameter. */
3147 static bool
3148 is_undefined_value (tree expr)
3150 return (TREE_CODE (expr) == SSA_NAME
3151 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (expr))
3152 /* PARM_DECLs and hard registers are always defined. */
3153 && TREE_CODE (SSA_NAME_VAR (expr)) != PARM_DECL);
3157 /* Given an SSA variable VAR and an expression EXPR, compute the value
3158 number for EXPR and create a value handle (VAL) for it. If VAR and
3159 EXPR are not the same, associate VAL with VAR. Finally, add VAR to
3160 S1 and its value handle to S2, and to the maximal set if
3161 ADD_TO_MAXIMAL is true.
3163 VUSES represent the virtual use operands associated with EXPR (if
3164 any). */
3166 static inline void
3167 add_to_sets (tree var, tree expr, tree stmt, bitmap_set_t s1,
3168 bitmap_set_t s2)
3170 tree val = vn_lookup_or_add (expr, stmt);
3172 /* VAR and EXPR may be the same when processing statements for which
3173 we are not computing value numbers (e.g., non-assignments, or
3174 statements that make aliased stores). In those cases, we are
3175 only interested in making VAR available as its own value. */
3176 if (var != expr)
3177 vn_add (var, val);
3179 if (s1)
3180 bitmap_insert_into_set (s1, var);
3182 /* PHI nodes can't go in the maximal sets because they are not in
3183 TMP_GEN, so it is possible to get into non-monotonic situations
3184 during ANTIC calculation, because it will *add* bits. */
3185 if (!in_fre && TREE_CODE (SSA_NAME_DEF_STMT (var)) != PHI_NODE)
3186 bitmap_value_insert_into_set (maximal_set, var);
3187 bitmap_value_insert_into_set (s2, var);
3190 /* Find existing value expression that is the same as T,
3191 and return it if it exists. */
3193 static inline tree
3194 find_existing_value_expr (tree t, tree stmt)
3196 bitmap_iterator bi;
3197 unsigned int bii;
3198 tree vh;
3199 bitmap_set_t exprset;
3201 if (REFERENCE_CLASS_P (t))
3202 vh = vn_lookup (t, stmt);
3203 else
3204 vh = vn_lookup (t, NULL);
3206 if (!vh)
3207 return NULL;
3208 exprset = VALUE_HANDLE_EXPR_SET (vh);
3209 FOR_EACH_EXPR_ID_IN_SET (exprset, bii, bi)
3211 tree efi = expression_for_id (bii);
3212 if (expressions_equal_p (t, efi))
3213 return efi;
3215 return NULL;
3218 /* Given a unary or binary expression EXPR, create and return a new
3219 expression with the same structure as EXPR but with its operands
3220 replaced with the value handles of each of the operands of EXPR.
3222 VUSES represent the virtual use operands associated with EXPR (if
3223 any). Insert EXPR's operands into the EXP_GEN set for BLOCK. */
3225 static inline tree
3226 create_value_expr_from (tree expr, basic_block block, tree stmt)
3228 int i;
3229 enum tree_code code = TREE_CODE (expr);
3230 tree vexpr;
3231 alloc_pool pool;
3232 tree efi;
3234 gcc_assert (TREE_CODE_CLASS (code) == tcc_unary
3235 || TREE_CODE_CLASS (code) == tcc_binary
3236 || TREE_CODE_CLASS (code) == tcc_comparison
3237 || TREE_CODE_CLASS (code) == tcc_reference
3238 || TREE_CODE_CLASS (code) == tcc_expression
3239 || TREE_CODE_CLASS (code) == tcc_exceptional
3240 || TREE_CODE_CLASS (code) == tcc_declaration);
3242 if (TREE_CODE_CLASS (code) == tcc_unary)
3243 pool = unary_node_pool;
3244 else if (TREE_CODE_CLASS (code) == tcc_reference)
3245 pool = reference_node_pool;
3246 else if (TREE_CODE_CLASS (code) == tcc_binary)
3247 pool = binary_node_pool;
3248 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3249 pool = comparison_node_pool;
3250 else if (TREE_CODE_CLASS (code) == tcc_exceptional)
3252 gcc_assert (code == TREE_LIST);
3253 pool = list_node_pool;
3255 else
3257 gcc_assert (code == CALL_EXPR);
3258 pool = expression_node_pool;
3261 vexpr = (tree) pool_alloc (pool);
3262 memcpy (vexpr, expr, tree_size (expr));
3264 /* This case is only for TREE_LIST's that appear as part of
3265 CALL_EXPR's. Anything else is a bug, but we can't easily verify
3266 this, hence this comment. TREE_LIST is not handled by the
3267 general case below because they don't have a fixed length, or
3268 operands, so you can't access purpose/value/chain through
3269 TREE_OPERAND macros. */
3271 if (code == TREE_LIST)
3273 tree op = NULL_TREE;
3274 tree temp = NULL_TREE;
3275 if (TREE_CHAIN (vexpr))
3276 temp = create_value_expr_from (TREE_CHAIN (vexpr), block, stmt);
3277 TREE_CHAIN (vexpr) = temp ? temp : TREE_CHAIN (vexpr);
3280 /* Recursively value-numberize reference ops. */
3281 if (REFERENCE_CLASS_P (TREE_VALUE (vexpr)))
3283 tree tempop;
3284 op = TREE_VALUE (vexpr);
3285 tempop = create_value_expr_from (op, block, stmt);
3286 op = tempop ? tempop : op;
3288 TREE_VALUE (vexpr) = vn_lookup_or_add (op, stmt);
3290 else
3292 op = TREE_VALUE (vexpr);
3293 TREE_VALUE (vexpr) = vn_lookup_or_add (TREE_VALUE (vexpr), NULL);
3295 /* This is the equivalent of inserting op into EXP_GEN like we
3296 do below */
3297 if (!is_undefined_value (op))
3298 bitmap_value_insert_into_set (EXP_GEN (block), op);
3300 efi = find_existing_value_expr (vexpr, stmt);
3301 if (efi)
3302 return efi;
3303 get_or_alloc_expression_id (vexpr);
3304 return vexpr;
3307 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3309 tree val, op;
3311 op = TREE_OPERAND (expr, i);
3312 if (op == NULL_TREE)
3313 continue;
3315 /* Recursively value-numberize reference ops and tree lists. */
3316 if (REFERENCE_CLASS_P (op))
3318 tree tempop = create_value_expr_from (op, block, stmt);
3319 op = tempop ? tempop : op;
3320 val = vn_lookup_or_add (op, stmt);
3322 else if (TREE_CODE (op) == TREE_LIST)
3324 tree tempop;
3326 gcc_assert (TREE_CODE (expr) == CALL_EXPR);
3327 tempop = create_value_expr_from (op, block, stmt);
3329 op = tempop ? tempop : op;
3330 vn_lookup_or_add (op, NULL);
3331 /* Unlike everywhere else, we do *not* want to replace the
3332 TREE_LIST itself with a value number, because support
3333 functions we call will blow up. */
3334 val = op;
3336 else
3337 /* Create a value handle for OP and add it to VEXPR. */
3338 val = vn_lookup_or_add (op, NULL);
3340 if (!is_undefined_value (op) && TREE_CODE (op) != TREE_LIST)
3341 bitmap_value_insert_into_set (EXP_GEN (block), op);
3343 if (TREE_CODE (val) == VALUE_HANDLE)
3344 TREE_TYPE (val) = TREE_TYPE (TREE_OPERAND (vexpr, i));
3346 TREE_OPERAND (vexpr, i) = val;
3348 efi = find_existing_value_expr (vexpr, stmt);
3349 if (efi)
3350 return efi;
3351 get_or_alloc_expression_id (vexpr);
3352 return vexpr;
3355 /* Given a statement STMT and its right hand side which is a load, try
3356 to look for the expression stored in the location for the load, and
3357 return true if a useful equivalence was recorded for LHS. */
3359 static bool
3360 try_look_through_load (tree lhs, tree mem_ref, tree stmt, basic_block block)
3362 tree store_stmt = NULL;
3363 tree rhs;
3364 ssa_op_iter i;
3365 tree vuse;
3367 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
3369 tree def_stmt;
3371 gcc_assert (TREE_CODE (vuse) == SSA_NAME);
3372 def_stmt = SSA_NAME_DEF_STMT (vuse);
3374 /* If there is no useful statement for this VUSE, we'll not find a
3375 useful expression to return either. Likewise, if there is a
3376 statement but it is not a simple assignment or it has virtual
3377 uses, we can stop right here. Note that this means we do
3378 not look through PHI nodes, which is intentional. */
3379 if (!def_stmt
3380 || TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT
3381 || !ZERO_SSA_OPERANDS (def_stmt, SSA_OP_VIRTUAL_USES))
3382 return false;
3384 /* If this is not the same statement as one we have looked at for
3385 another VUSE of STMT already, we have two statements producing
3386 something that reaches our STMT. */
3387 if (store_stmt && store_stmt != def_stmt)
3388 return false;
3389 else
3391 /* Is this a store to the exact same location as the one we are
3392 loading from in STMT? */
3393 if (!operand_equal_p (GIMPLE_STMT_OPERAND (def_stmt, 0), mem_ref, 0))
3394 return false;
3396 /* Otherwise remember this statement and see if all other VUSEs
3397 come from the same statement. */
3398 store_stmt = def_stmt;
3402 /* Alright then, we have visited all VUSEs of STMT and we've determined
3403 that all of them come from the same statement STORE_STMT. See if there
3404 is a useful expression we can deduce from STORE_STMT. */
3405 rhs = GIMPLE_STMT_OPERAND (store_stmt, 1);
3406 if ((TREE_CODE (rhs) == SSA_NAME
3407 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3408 || is_gimple_min_invariant (rhs)
3409 || TREE_CODE (rhs) == ADDR_EXPR
3410 || TREE_INVARIANT (rhs))
3413 /* Yay! Compute a value number for the RHS of the statement and
3414 add its value to the AVAIL_OUT set for the block. Add the LHS
3415 to TMP_GEN. */
3416 add_to_sets (lhs, rhs, store_stmt, TMP_GEN (block), AVAIL_OUT (block));
3417 if (TREE_CODE (rhs) == SSA_NAME
3418 && !is_undefined_value (rhs))
3419 bitmap_value_insert_into_set (EXP_GEN (block), rhs);
3420 return true;
3423 return false;
3426 /* Return a copy of NODE that is stored in the temporary alloc_pool's.
3427 This is made recursively true, so that the operands are stored in
3428 the pool as well. */
3430 static tree
3431 poolify_tree (tree node)
3433 switch (TREE_CODE (node))
3435 case INDIRECT_REF:
3437 tree temp = (tree) pool_alloc (reference_node_pool);
3438 memcpy (temp, node, tree_size (node));
3439 TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
3440 return temp;
3442 break;
3443 case GIMPLE_MODIFY_STMT:
3445 tree temp = (tree) pool_alloc (modify_expr_node_pool);
3446 memcpy (temp, node, tree_size (node));
3447 GIMPLE_STMT_OPERAND (temp, 0) =
3448 poolify_tree (GIMPLE_STMT_OPERAND (temp, 0));
3449 GIMPLE_STMT_OPERAND (temp, 1) =
3450 poolify_tree (GIMPLE_STMT_OPERAND (temp, 1));
3451 return temp;
3453 break;
3454 case SSA_NAME:
3455 case INTEGER_CST:
3456 case STRING_CST:
3457 case REAL_CST:
3458 case PARM_DECL:
3459 case VAR_DECL:
3460 case RESULT_DECL:
3461 return node;
3462 default:
3463 gcc_unreachable ();
3467 static tree modify_expr_template;
3469 /* Allocate a GIMPLE_MODIFY_STMT with TYPE, and operands OP1, OP2 in the
3470 alloc pools and return it. */
3471 static tree
3472 poolify_modify_stmt (tree op1, tree op2)
3474 if (modify_expr_template == NULL)
3475 modify_expr_template = build2_gimple (GIMPLE_MODIFY_STMT, op1, op2);
3477 GIMPLE_STMT_OPERAND (modify_expr_template, 0) = op1;
3478 GIMPLE_STMT_OPERAND (modify_expr_template, 1) = op2;
3480 return poolify_tree (modify_expr_template);
3484 /* For each real store operation of the form
3485 *a = <value> that we see, create a corresponding fake store of the
3486 form storetmp_<version> = *a.
3488 This enables AVAIL computation to mark the results of stores as
3489 available. Without this, you'd need to do some computation to
3490 mark the result of stores as ANTIC and AVAIL at all the right
3491 points.
3492 To save memory, we keep the store
3493 statements pool allocated until we decide whether they are
3494 necessary or not. */
3496 static void
3497 insert_fake_stores (void)
3499 basic_block block;
3501 FOR_ALL_BB (block)
3503 block_stmt_iterator bsi;
3504 for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3506 tree stmt = bsi_stmt (bsi);
3508 /* We can't generate SSA names for stores that are complex
3509 or aggregate. We also want to ignore things whose
3510 virtual uses occur in abnormal phis. */
3512 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3513 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == INDIRECT_REF
3514 && !AGGREGATE_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 0)))
3515 && TREE_CODE (TREE_TYPE (GIMPLE_STMT_OPERAND
3516 (stmt, 0))) != COMPLEX_TYPE)
3518 ssa_op_iter iter;
3519 def_operand_p defp;
3520 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3521 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3522 tree new;
3523 bool notokay = false;
3525 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
3527 tree defvar = DEF_FROM_PTR (defp);
3528 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defvar))
3530 notokay = true;
3531 break;
3535 if (notokay)
3536 continue;
3538 if (!storetemp || TREE_TYPE (rhs) != TREE_TYPE (storetemp))
3540 storetemp = create_tmp_var (TREE_TYPE (rhs), "storetmp");
3541 if (TREE_CODE (TREE_TYPE (storetemp)) == VECTOR_TYPE)
3542 DECL_GIMPLE_REG_P (storetemp) = 1;
3543 get_var_ann (storetemp);
3546 new = poolify_modify_stmt (storetemp, lhs);
3548 lhs = make_ssa_name (storetemp, new);
3549 GIMPLE_STMT_OPERAND (new, 0) = lhs;
3550 create_ssa_artificial_load_stmt (new, stmt);
3552 NECESSARY (new) = 0;
3553 VEC_safe_push (tree, heap, inserted_exprs, new);
3554 VEC_safe_push (tree, heap, need_creation, new);
3555 bsi_insert_after (&bsi, new, BSI_NEW_STMT);
3561 /* Turn the pool allocated fake stores that we created back into real
3562 GC allocated ones if they turned out to be necessary to PRE some
3563 expressions. */
3565 static void
3566 realify_fake_stores (void)
3568 unsigned int i;
3569 tree stmt;
3571 for (i = 0; VEC_iterate (tree, need_creation, i, stmt); i++)
3573 if (NECESSARY (stmt))
3575 block_stmt_iterator bsi;
3576 tree newstmt;
3578 /* Mark the temp variable as referenced */
3579 add_referenced_var (SSA_NAME_VAR (GIMPLE_STMT_OPERAND (stmt, 0)));
3581 /* Put the new statement in GC memory, fix up the
3582 SSA_NAME_DEF_STMT on it, and then put it in place of
3583 the old statement before the store in the IR stream
3584 as a plain ssa name copy. */
3585 bsi = bsi_for_stmt (stmt);
3586 bsi_prev (&bsi);
3587 newstmt = build2_gimple (GIMPLE_MODIFY_STMT,
3588 GIMPLE_STMT_OPERAND (stmt, 0),
3589 GIMPLE_STMT_OPERAND (bsi_stmt (bsi), 1));
3590 SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (newstmt, 0)) = newstmt;
3591 bsi_insert_before (&bsi, newstmt, BSI_SAME_STMT);
3592 bsi = bsi_for_stmt (stmt);
3593 bsi_remove (&bsi, true);
3595 else
3596 release_defs (stmt);
3600 /* Tree-combine a value number expression *EXPR_P that does a type
3601 conversion with the value number expression of its operand.
3602 Returns true, if *EXPR_P simplifies to a value number or
3603 gimple min-invariant expression different from EXPR_P and
3604 sets *EXPR_P to the simplified expression value number.
3605 Otherwise returns false and does not change *EXPR_P. */
3607 static bool
3608 try_combine_conversion (tree *expr_p)
3610 tree expr = *expr_p;
3611 tree t;
3612 bitmap_set_t exprset;
3613 unsigned int firstbit;
3615 if (!((TREE_CODE (expr) == NOP_EXPR
3616 || TREE_CODE (expr) == CONVERT_EXPR
3617 || TREE_CODE (expr) == REALPART_EXPR
3618 || TREE_CODE (expr) == IMAGPART_EXPR)
3619 && TREE_CODE (TREE_OPERAND (expr, 0)) == VALUE_HANDLE
3620 && !VALUE_HANDLE_VUSES (TREE_OPERAND (expr, 0))))
3621 return false;
3623 exprset = VALUE_HANDLE_EXPR_SET (TREE_OPERAND (expr, 0));
3624 firstbit = bitmap_first_set_bit (exprset->expressions);
3625 t = fold_unary (TREE_CODE (expr), TREE_TYPE (expr),
3626 expression_for_id (firstbit));
3627 if (!t)
3628 return false;
3630 /* Strip useless type conversions, which is safe in the optimizers but
3631 not generally in fold. */
3632 STRIP_USELESS_TYPE_CONVERSION (t);
3634 /* Disallow value expressions we have no value number for already, as
3635 we would miss a leader for it here. */
3636 if (!(TREE_CODE (t) == VALUE_HANDLE
3637 || is_gimple_min_invariant (t)))
3638 t = vn_lookup (t, NULL);
3640 if (t && t != expr)
3642 *expr_p = t;
3643 return true;
3645 return false;
3648 /* Compute the AVAIL set for all basic blocks.
3650 This function performs value numbering of the statements in each basic
3651 block. The AVAIL sets are built from information we glean while doing
3652 this value numbering, since the AVAIL sets contain only one entry per
3653 value.
3655 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3656 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3658 static void
3659 compute_avail (void)
3661 basic_block block, son;
3662 basic_block *worklist;
3663 size_t sp = 0;
3664 tree param;
3665 /* For arguments with default definitions, we pretend they are
3666 defined in the entry block. */
3667 for (param = DECL_ARGUMENTS (current_function_decl);
3668 param;
3669 param = TREE_CHAIN (param))
3671 if (gimple_default_def (cfun, param) != NULL)
3673 tree def = gimple_default_def (cfun, param);
3675 vn_lookup_or_add (def, NULL);
3676 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3677 if (!in_fre)
3678 bitmap_value_insert_into_set (maximal_set, def);
3679 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3683 /* Likewise for the static chain decl. */
3684 if (cfun->static_chain_decl)
3686 param = cfun->static_chain_decl;
3687 if (gimple_default_def (cfun, param) != NULL)
3689 tree def = gimple_default_def (cfun, param);
3691 vn_lookup_or_add (def, NULL);
3692 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3693 if (!in_fre)
3694 bitmap_value_insert_into_set (maximal_set, def);
3695 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3699 /* Allocate the worklist. */
3700 worklist = XNEWVEC (basic_block, n_basic_blocks);
3702 /* Seed the algorithm by putting the dominator children of the entry
3703 block on the worklist. */
3704 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3705 son;
3706 son = next_dom_son (CDI_DOMINATORS, son))
3707 worklist[sp++] = son;
3709 /* Loop until the worklist is empty. */
3710 while (sp)
3712 block_stmt_iterator bsi;
3713 tree stmt, phi;
3714 basic_block dom;
3715 unsigned int stmt_uid = 1;
3717 /* Pick a block from the worklist. */
3718 block = worklist[--sp];
3720 /* Initially, the set of available values in BLOCK is that of
3721 its immediate dominator. */
3722 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3723 if (dom)
3724 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3726 /* Generate values for PHI nodes. */
3727 for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
3729 /* We have no need for virtual phis, as they don't represent
3730 actual computations. */
3731 if (is_gimple_reg (PHI_RESULT (phi)))
3733 add_to_sets (PHI_RESULT (phi), PHI_RESULT (phi), NULL,
3734 PHI_GEN (block), AVAIL_OUT (block));
3738 /* Now compute value numbers and populate value sets with all
3739 the expressions computed in BLOCK. */
3740 for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3742 stmt_ann_t ann;
3743 ssa_op_iter iter;
3744 tree op;
3746 stmt = bsi_stmt (bsi);
3747 ann = stmt_ann (stmt);
3749 ann->uid = stmt_uid++;
3751 /* For regular value numbering, we are only interested in
3752 assignments of the form X_i = EXPR, where EXPR represents
3753 an "interesting" computation, it has no volatile operands
3754 and X_i doesn't flow through an abnormal edge. */
3755 if (TREE_CODE (stmt) == RETURN_EXPR
3756 && !ann->has_volatile_ops)
3758 tree realstmt = stmt;
3759 tree lhs;
3760 tree rhs;
3762 stmt = TREE_OPERAND (stmt, 0);
3763 if (stmt && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
3765 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3766 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3767 if (TREE_CODE (rhs) == SSA_NAME
3768 && !is_undefined_value (rhs))
3769 bitmap_value_insert_into_set (EXP_GEN (block), rhs);
3771 FOR_EACH_SSA_TREE_OPERAND (op, realstmt, iter, SSA_OP_DEF)
3772 add_to_sets (op, op, NULL, TMP_GEN (block),
3773 AVAIL_OUT (block));
3775 continue;
3778 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3779 && !ann->has_volatile_ops
3780 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME
3781 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI
3782 (GIMPLE_STMT_OPERAND (stmt, 0)))
3784 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3785 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3787 /* Try to look through loads. */
3788 if (TREE_CODE (lhs) == SSA_NAME
3789 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES)
3790 && try_look_through_load (lhs, rhs, stmt, block))
3791 continue;
3793 STRIP_USELESS_TYPE_CONVERSION (rhs);
3794 if (can_value_number_operation (rhs))
3796 /* For value numberable operation, create a
3797 duplicate expression with the operands replaced
3798 with the value handles of the original RHS. */
3799 tree newt = create_value_expr_from (rhs, block, stmt);
3800 if (newt)
3802 /* If we can combine a conversion expression
3803 with the expression for its operand just
3804 record the value number for it. */
3805 if (try_combine_conversion (&newt))
3806 vn_add (lhs, newt);
3807 else
3809 tree val = vn_lookup_or_add (newt, stmt);
3810 vn_add (lhs, val);
3811 if (!in_fre)
3812 bitmap_value_insert_into_set (maximal_set, newt);
3813 bitmap_value_insert_into_set (EXP_GEN (block), newt);
3815 bitmap_insert_into_set (TMP_GEN (block), lhs);
3816 bitmap_value_insert_into_set (AVAIL_OUT (block), lhs);
3817 continue;
3820 else if ((TREE_CODE (rhs) == SSA_NAME
3821 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3822 || is_gimple_min_invariant (rhs)
3823 || TREE_CODE (rhs) == ADDR_EXPR
3824 || TREE_INVARIANT (rhs)
3825 || DECL_P (rhs))
3827 /* Compute a value number for the RHS of the statement
3828 and add its value to the AVAIL_OUT set for the block.
3829 Add the LHS to TMP_GEN. */
3830 add_to_sets (lhs, rhs, stmt, TMP_GEN (block),
3831 AVAIL_OUT (block));
3833 if (TREE_CODE (rhs) == SSA_NAME
3834 && !is_undefined_value (rhs))
3835 bitmap_value_insert_into_set (EXP_GEN (block), rhs);
3836 continue;
3840 /* For any other statement that we don't recognize, simply
3841 make the names generated by the statement available in
3842 AVAIL_OUT and TMP_GEN. */
3843 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3844 add_to_sets (op, op, NULL, TMP_GEN (block), AVAIL_OUT (block));
3846 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3847 add_to_sets (op, op, NULL, NULL , AVAIL_OUT (block));
3850 /* Put the dominator children of BLOCK on the worklist of blocks
3851 to compute available sets for. */
3852 for (son = first_dom_son (CDI_DOMINATORS, block);
3853 son;
3854 son = next_dom_son (CDI_DOMINATORS, son))
3855 worklist[sp++] = son;
3858 free (worklist);
3862 /* Eliminate fully redundant computations. */
3864 static void
3865 eliminate (void)
3867 basic_block b;
3869 FOR_EACH_BB (b)
3871 block_stmt_iterator i;
3873 for (i = bsi_start (b); !bsi_end_p (i); bsi_next (&i))
3875 tree stmt = bsi_stmt (i);
3877 /* Lookup the RHS of the expression, see if we have an
3878 available computation for it. If so, replace the RHS with
3879 the available computation. */
3880 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3881 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME
3882 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) != SSA_NAME
3883 && !is_gimple_min_invariant (GIMPLE_STMT_OPERAND (stmt, 1))
3884 && !stmt_ann (stmt)->has_volatile_ops)
3886 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3887 tree *rhs_p = &GIMPLE_STMT_OPERAND (stmt, 1);
3888 tree sprime;
3890 sprime = bitmap_find_leader (AVAIL_OUT (b),
3891 vn_lookup (lhs, NULL));
3892 if (sprime
3893 && sprime != lhs
3894 && (TREE_CODE (*rhs_p) != SSA_NAME
3895 || may_propagate_copy (*rhs_p, sprime)))
3897 gcc_assert (sprime != *rhs_p);
3899 if (dump_file && (dump_flags & TDF_DETAILS))
3901 fprintf (dump_file, "Replaced ");
3902 print_generic_expr (dump_file, *rhs_p, 0);
3903 fprintf (dump_file, " with ");
3904 print_generic_expr (dump_file, sprime, 0);
3905 fprintf (dump_file, " in ");
3906 print_generic_stmt (dump_file, stmt, 0);
3909 if (TREE_CODE (sprime) == SSA_NAME)
3910 NECESSARY (SSA_NAME_DEF_STMT (sprime)) = 1;
3911 /* We need to make sure the new and old types actually match,
3912 which may require adding a simple cast, which fold_convert
3913 will do for us. */
3914 if (TREE_CODE (*rhs_p) != SSA_NAME
3915 && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*rhs_p),
3916 TREE_TYPE (sprime)))
3917 sprime = fold_convert (TREE_TYPE (*rhs_p), sprime);
3919 pre_stats.eliminations++;
3920 propagate_tree_value (rhs_p, sprime);
3921 update_stmt (stmt);
3923 /* If we removed EH side effects from the statement, clean
3924 its EH information. */
3925 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
3927 bitmap_set_bit (need_eh_cleanup,
3928 bb_for_stmt (stmt)->index);
3929 if (dump_file && (dump_flags & TDF_DETAILS))
3930 fprintf (dump_file, " Removed EH side effects.\n");
3938 /* Borrow a bit of tree-ssa-dce.c for the moment.
3939 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
3940 this may be a bit faster, and we may want critical edges kept split. */
3942 /* If OP's defining statement has not already been determined to be necessary,
3943 mark that statement necessary. Return the stmt, if it is newly
3944 necessary. */
3946 static inline tree
3947 mark_operand_necessary (tree op)
3949 tree stmt;
3951 gcc_assert (op);
3953 if (TREE_CODE (op) != SSA_NAME)
3954 return NULL;
3956 stmt = SSA_NAME_DEF_STMT (op);
3957 gcc_assert (stmt);
3959 if (NECESSARY (stmt)
3960 || IS_EMPTY_STMT (stmt))
3961 return NULL;
3963 NECESSARY (stmt) = 1;
3964 return stmt;
3967 /* Because we don't follow exactly the standard PRE algorithm, and decide not
3968 to insert PHI nodes sometimes, and because value numbering of casts isn't
3969 perfect, we sometimes end up inserting dead code. This simple DCE-like
3970 pass removes any insertions we made that weren't actually used. */
3972 static void
3973 remove_dead_inserted_code (void)
3975 VEC(tree,heap) *worklist = NULL;
3976 int i;
3977 tree t;
3979 worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
3980 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3982 if (NECESSARY (t))
3983 VEC_quick_push (tree, worklist, t);
3985 while (VEC_length (tree, worklist) > 0)
3987 t = VEC_pop (tree, worklist);
3989 /* PHI nodes are somewhat special in that each PHI alternative has
3990 data and control dependencies. All the statements feeding the
3991 PHI node's arguments are always necessary. */
3992 if (TREE_CODE (t) == PHI_NODE)
3994 int k;
3996 VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
3997 for (k = 0; k < PHI_NUM_ARGS (t); k++)
3999 tree arg = PHI_ARG_DEF (t, k);
4000 if (TREE_CODE (arg) == SSA_NAME)
4002 arg = mark_operand_necessary (arg);
4003 if (arg)
4004 VEC_quick_push (tree, worklist, arg);
4008 else
4010 /* Propagate through the operands. Examine all the USE, VUSE and
4011 VDEF operands in this statement. Mark all the statements
4012 which feed this statement's uses as necessary. */
4013 ssa_op_iter iter;
4014 tree use;
4016 /* The operands of VDEF expressions are also needed as they
4017 represent potential definitions that may reach this
4018 statement (VDEF operands allow us to follow def-def
4019 links). */
4021 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4023 tree n = mark_operand_necessary (use);
4024 if (n)
4025 VEC_safe_push (tree, heap, worklist, n);
4030 for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
4032 if (!NECESSARY (t))
4034 block_stmt_iterator bsi;
4036 if (dump_file && (dump_flags & TDF_DETAILS))
4038 fprintf (dump_file, "Removing unnecessary insertion:");
4039 print_generic_stmt (dump_file, t, 0);
4042 if (TREE_CODE (t) == PHI_NODE)
4044 remove_phi_node (t, NULL, true);
4046 else
4048 bsi = bsi_for_stmt (t);
4049 bsi_remove (&bsi, true);
4050 release_defs (t);
4054 VEC_free (tree, heap, worklist);
4057 /* Initialize data structures used by PRE. */
4059 static void
4060 init_pre (bool do_fre)
4062 basic_block bb;
4064 next_expression_id = 0;
4065 expressions = NULL;
4066 in_fre = do_fre;
4068 inserted_exprs = NULL;
4069 need_creation = NULL;
4070 pretemp = NULL_TREE;
4071 storetemp = NULL_TREE;
4072 mergephitemp = NULL_TREE;
4073 prephitemp = NULL_TREE;
4075 vn_init ();
4076 if (!do_fre)
4077 loop_optimizer_init (LOOPS_NORMAL);
4079 connect_infinite_loops_to_exit ();
4080 memset (&pre_stats, 0, sizeof (pre_stats));
4083 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4084 post_order_compute (postorder, false);
4086 FOR_ALL_BB (bb)
4087 bb->aux = xcalloc (1, sizeof (struct bb_bitmap_sets));
4089 calculate_dominance_info (CDI_POST_DOMINATORS);
4090 calculate_dominance_info (CDI_DOMINATORS);
4092 bitmap_obstack_initialize (&grand_bitmap_obstack);
4093 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4094 expr_pred_trans_eq, free);
4095 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4096 sizeof (struct bitmap_set), 30);
4097 binary_node_pool = create_alloc_pool ("Binary tree nodes",
4098 tree_code_size (PLUS_EXPR), 30);
4099 unary_node_pool = create_alloc_pool ("Unary tree nodes",
4100 tree_code_size (NEGATE_EXPR), 30);
4101 reference_node_pool = create_alloc_pool ("Reference tree nodes",
4102 tree_code_size (ARRAY_REF), 30);
4103 expression_node_pool = create_alloc_pool ("Expression tree nodes",
4104 tree_code_size (CALL_EXPR), 30);
4105 list_node_pool = create_alloc_pool ("List tree nodes",
4106 tree_code_size (TREE_LIST), 30);
4107 comparison_node_pool = create_alloc_pool ("Comparison tree nodes",
4108 tree_code_size (EQ_EXPR), 30);
4109 modify_expr_node_pool = create_alloc_pool ("GIMPLE_MODIFY_STMT nodes",
4110 tree_code_size (GIMPLE_MODIFY_STMT),
4111 30);
4112 modify_expr_template = NULL;
4114 FOR_ALL_BB (bb)
4116 EXP_GEN (bb) = bitmap_set_new ();
4117 PHI_GEN (bb) = bitmap_set_new ();
4118 TMP_GEN (bb) = bitmap_set_new ();
4119 AVAIL_OUT (bb) = bitmap_set_new ();
4121 maximal_set = in_fre ? NULL : bitmap_set_new ();
4123 need_eh_cleanup = BITMAP_ALLOC (NULL);
4127 /* Deallocate data structures used by PRE. */
4129 static void
4130 fini_pre (bool do_fre)
4132 basic_block bb;
4133 unsigned int i;
4135 free (postorder);
4136 VEC_free (tree, heap, inserted_exprs);
4137 VEC_free (tree, heap, need_creation);
4138 bitmap_obstack_release (&grand_bitmap_obstack);
4139 free_alloc_pool (bitmap_set_pool);
4140 free_alloc_pool (binary_node_pool);
4141 free_alloc_pool (reference_node_pool);
4142 free_alloc_pool (unary_node_pool);
4143 free_alloc_pool (list_node_pool);
4144 free_alloc_pool (expression_node_pool);
4145 free_alloc_pool (comparison_node_pool);
4146 free_alloc_pool (modify_expr_node_pool);
4147 htab_delete (phi_translate_table);
4148 remove_fake_exit_edges ();
4150 FOR_ALL_BB (bb)
4152 free (bb->aux);
4153 bb->aux = NULL;
4156 free_dominance_info (CDI_POST_DOMINATORS);
4157 vn_delete ();
4159 if (!bitmap_empty_p (need_eh_cleanup))
4161 tree_purge_all_dead_eh_edges (need_eh_cleanup);
4162 cleanup_tree_cfg ();
4165 BITMAP_FREE (need_eh_cleanup);
4167 /* Wipe out pointers to VALUE_HANDLEs. In the not terribly distant
4168 future we will want them to be persistent though. */
4169 for (i = 0; i < num_ssa_names; i++)
4171 tree name = ssa_name (i);
4173 if (!name)
4174 continue;
4176 if (SSA_NAME_VALUE (name)
4177 && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
4178 SSA_NAME_VALUE (name) = NULL;
4180 if (!do_fre && current_loops)
4181 loop_optimizer_finalize ();
4184 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4185 only wants to do full redundancy elimination. */
4187 static void
4188 execute_pre (bool do_fre)
4191 do_partial_partial = optimize > 2;
4192 init_pre (do_fre);
4194 if (!do_fre)
4195 insert_fake_stores ();
4197 /* Collect and value number expressions computed in each basic block. */
4198 compute_avail ();
4200 if (dump_file && (dump_flags & TDF_DETAILS))
4202 basic_block bb;
4204 FOR_ALL_BB (bb)
4206 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4207 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen",
4208 bb->index);
4209 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out",
4210 bb->index);
4214 /* Insert can get quite slow on an incredibly large number of basic
4215 blocks due to some quadratic behavior. Until this behavior is
4216 fixed, don't run it when he have an incredibly large number of
4217 bb's. If we aren't going to run insert, there is no point in
4218 computing ANTIC, either, even though it's plenty fast. */
4219 if (!do_fre && n_basic_blocks < 4000)
4221 vuse_names = XCNEWVEC (bitmap, num_ssa_names);
4222 compute_rvuse_and_antic_safe ();
4223 compute_antic ();
4224 insert ();
4225 free (vuse_names);
4228 /* Remove all the redundant expressions. */
4229 eliminate ();
4231 if (dump_file && (dump_flags & TDF_STATS))
4233 fprintf (dump_file, "Insertions: %d\n", pre_stats.insertions);
4234 fprintf (dump_file, "PA inserted: %d\n", pre_stats.pa_insert);
4235 fprintf (dump_file, "New PHIs: %d\n", pre_stats.phis);
4236 fprintf (dump_file, "Eliminated: %d\n", pre_stats.eliminations);
4237 fprintf (dump_file, "Constified: %d\n", pre_stats.constified);
4239 bsi_commit_edge_inserts ();
4241 clear_expression_ids ();
4242 if (!do_fre)
4244 remove_dead_inserted_code ();
4245 realify_fake_stores ();
4248 fini_pre (do_fre);
4251 /* Gate and execute functions for PRE. */
4253 static unsigned int
4254 do_pre (void)
4256 execute_pre (false);
4257 return 0;
4260 static bool
4261 gate_pre (void)
4263 return flag_tree_pre != 0;
4266 struct tree_opt_pass pass_pre =
4268 "pre", /* name */
4269 gate_pre, /* gate */
4270 do_pre, /* execute */
4271 NULL, /* sub */
4272 NULL, /* next */
4273 0, /* static_pass_number */
4274 TV_TREE_PRE, /* tv_id */
4275 PROP_no_crit_edges | PROP_cfg
4276 | PROP_ssa | PROP_alias, /* properties_required */
4277 0, /* properties_provided */
4278 0, /* properties_destroyed */
4279 0, /* todo_flags_start */
4280 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
4281 | TODO_verify_ssa, /* todo_flags_finish */
4282 0 /* letter */
4286 /* Gate and execute functions for FRE. */
4288 static unsigned int
4289 execute_fre (void)
4291 execute_pre (true);
4292 return 0;
4295 static bool
4296 gate_fre (void)
4298 return flag_tree_fre != 0;
4301 struct tree_opt_pass pass_fre =
4303 "fre", /* name */
4304 gate_fre, /* gate */
4305 execute_fre, /* execute */
4306 NULL, /* sub */
4307 NULL, /* next */
4308 0, /* static_pass_number */
4309 TV_TREE_FRE, /* tv_id */
4310 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
4311 0, /* properties_provided */
4312 0, /* properties_destroyed */
4313 0, /* todo_flags_start */
4314 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
4315 0 /* letter */