2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
38 #include "tree-iterator.h"
40 #include "alloc-pool.h"
42 #include "tree-pass.h"
45 #include "langhooks.h"
47 #include "tree-ssa-sccvn.h"
51 1. Avail sets can be shared by making an avail_find_leader that
52 walks up the dominator tree and looks in those avail sets.
53 This might affect code optimality, it's unclear right now.
54 2. Strength reduction can be performed by anticipating expressions
55 we can repair later on.
56 3. We can do back-substitution or smarter value numbering to catch
57 commutative expressions split up over multiple statements.
60 /* For ease of terminology, "expression node" in the below refers to
61 every expression node but GIMPLE_MODIFY_STMT, because GIMPLE_MODIFY_STMT's
62 represent the actual statement containing the expressions we care about,
63 and we cache the value number by putting it in the expression. */
67 First we walk the statements to generate the AVAIL sets, the
68 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
69 generation of values/expressions by a given block. We use them
70 when computing the ANTIC sets. The AVAIL sets consist of
71 SSA_NAME's that represent values, so we know what values are
72 available in what blocks. AVAIL is a forward dataflow problem. In
73 SSA, values are never killed, so we don't need a kill set, or a
74 fixpoint iteration, in order to calculate the AVAIL sets. In
75 traditional parlance, AVAIL sets tell us the downsafety of the
78 Next, we generate the ANTIC sets. These sets represent the
79 anticipatable expressions. ANTIC is a backwards dataflow
80 problem. An expression is anticipatable in a given block if it could
81 be generated in that block. This means that if we had to perform
82 an insertion in that block, of the value of that expression, we
83 could. Calculating the ANTIC sets requires phi translation of
84 expressions, because the flow goes backwards through phis. We must
85 iterate to a fixpoint of the ANTIC sets, because we have a kill
86 set. Even in SSA form, values are not live over the entire
87 function, only from their definition point onwards. So we have to
88 remove values from the ANTIC set once we go past the definition
89 point of the leaders that make them up.
90 compute_antic/compute_antic_aux performs this computation.
92 Third, we perform insertions to make partially redundant
93 expressions fully redundant.
95 An expression is partially redundant (excluding partial
98 1. It is AVAIL in some, but not all, of the predecessors of a
100 2. It is ANTIC in all the predecessors.
102 In order to make it fully redundant, we insert the expression into
103 the predecessors where it is not available, but is ANTIC.
105 For the partial anticipation case, we only perform insertion if it
106 is partially anticipated in some block, and fully available in all
109 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
110 performs these steps.
112 Fourth, we eliminate fully redundant expressions.
113 This is a simple statement walk that replaces redundant
114 calculations with the now available values. */
116 /* Representations of value numbers:
118 Value numbers are represented using the "value handle" approach.
119 This means that each SSA_NAME (and for other reasons to be
120 disclosed in a moment, expression nodes) has a value handle that
121 can be retrieved through get_value_handle. This value handle *is*
122 the value number of the SSA_NAME. You can pointer compare the
123 value handles for equivalence purposes.
125 For debugging reasons, the value handle is internally more than
126 just a number, it is a VALUE_HANDLE named "VH.x", where x is a
127 unique number for each value number in use. This allows
128 expressions with SSA_NAMES replaced by value handles to still be
129 pretty printed in a sane way. They simply print as "VH.3 *
132 Expression nodes have value handles associated with them as a
133 cache. Otherwise, we'd have to look them up again in the hash
134 table This makes significant difference (factor of two or more) on
135 some test cases. They can be thrown away after the pass is
138 /* Representation of expressions on value numbers:
140 In some portions of this code, you will notice we allocate "fake"
141 analogues to the expression we are value numbering, and replace the
142 operands with the values of the expression. Since we work on
143 values, and not just names, we canonicalize expressions to value
144 expressions for use in the ANTIC sets, the EXP_GEN set, etc.
146 This is theoretically unnecessary, it just saves a bunch of
147 repeated get_value_handle and find_leader calls in the remainder of
148 the code, trading off temporary memory usage for speed. The tree
149 nodes aren't actually creating more garbage, since they are
150 allocated in a special pools which are thrown away at the end of
153 All of this also means that if you print the EXP_GEN or ANTIC sets,
154 you will see "VH.5 + VH.7" in the set, instead of "a_55 +
155 b_66" or something. The only thing that actually cares about
156 seeing the value leaders is phi translation, and it needs to be
157 able to find the leader for a value in an arbitrary block, so this
158 "value expression" form is perfect for it (otherwise you'd do
159 get_value_handle->find_leader->translate->get_value_handle->find_leader).*/
162 /* Representation of sets:
164 There are currently two types of sets used, hopefully to be unified soon.
165 The AVAIL sets do not need to be sorted in any particular order,
166 and thus, are simply represented as two bitmaps, one that keeps
167 track of values present in the set, and one that keeps track of
168 expressions present in the set.
170 The other sets are represented as doubly linked lists kept in topological
171 order, with an optional supporting bitmap of values present in the
172 set. The sets represent values, and the elements can be values or
173 expressions. The elements can appear in different sets, but each
174 element can only appear once in each set.
176 Since each node in the set represents a value, we also want to be
177 able to map expression, set pairs to something that tells us
178 whether the value is present is a set. We use a per-set bitmap for
179 that. The value handles also point to a linked list of the
180 expressions they represent via a tree annotation. This is mainly
181 useful only for debugging, since we don't do identity lookups. */
184 /* Next global expression id number. */
185 static unsigned int next_expression_id
;
187 typedef VEC(tree
, gc
) *vuse_vec
;
188 DEF_VEC_P (vuse_vec
);
189 DEF_VEC_ALLOC_P (vuse_vec
, heap
);
191 static VEC(vuse_vec
, heap
) *expression_vuses
;
193 /* Mapping from expression to id number we can use in bitmap sets. */
194 static VEC(tree
, heap
) *expressions
;
196 /* Allocate an expression id for EXPR. */
198 static inline unsigned int
199 alloc_expression_id (tree expr
)
201 tree_ann_common_t ann
;
203 ann
= get_tree_common_ann (expr
);
205 /* Make sure we won't overflow. */
206 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
208 ann
->aux
= XNEW (unsigned int);
209 * ((unsigned int *)ann
->aux
) = next_expression_id
++;
210 VEC_safe_push (tree
, heap
, expressions
, expr
);
211 VEC_safe_push (vuse_vec
, heap
, expression_vuses
, NULL
);
212 return next_expression_id
- 1;
215 /* Return the expression id for tree EXPR. */
217 static inline unsigned int
218 get_expression_id (tree expr
)
220 tree_ann_common_t ann
= tree_common_ann (expr
);
222 gcc_assert (ann
->aux
);
224 return *((unsigned int *)ann
->aux
);
227 /* Return the existing expression id for EXPR, or create one if one
228 does not exist yet. */
230 static inline unsigned int
231 get_or_alloc_expression_id (tree expr
)
233 tree_ann_common_t ann
= tree_common_ann (expr
);
235 if (ann
== NULL
|| !ann
->aux
)
236 return alloc_expression_id (expr
);
238 return get_expression_id (expr
);
241 /* Return the expression that has expression id ID */
244 expression_for_id (unsigned int id
)
246 return VEC_index (tree
, expressions
, id
);
249 /* Return the expression vuses for EXPR, if there are any. */
251 static inline vuse_vec
252 get_expression_vuses (tree expr
)
254 unsigned int expr_id
= get_or_alloc_expression_id (expr
);
255 return VEC_index (vuse_vec
, expression_vuses
, expr_id
);
258 /* Set the expression vuses for EXPR to VUSES. */
261 set_expression_vuses (tree expr
, vuse_vec vuses
)
263 unsigned int expr_id
= get_or_alloc_expression_id (expr
);
264 VEC_replace (vuse_vec
, expression_vuses
, expr_id
, vuses
);
268 /* Free the expression id field in all of our expressions,
269 and then destroy the expressions array. */
272 clear_expression_ids (void)
277 for (i
= 0; VEC_iterate (tree
, expressions
, i
, expr
); i
++)
279 free (tree_common_ann (expr
)->aux
);
280 tree_common_ann (expr
)->aux
= NULL
;
282 VEC_free (tree
, heap
, expressions
);
283 VEC_free (vuse_vec
, heap
, expression_vuses
);
286 static bool in_fre
= false;
288 /* An unordered bitmap set. One bitmap tracks values, the other,
290 typedef struct bitmap_set
296 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
297 EXECUTE_IF_SET_IN_BITMAP(set->expressions, 0, id, bi)
299 /* Sets that we need to keep track of. */
300 typedef struct bb_bitmap_sets
302 /* The EXP_GEN set, which represents expressions/values generated in
304 bitmap_set_t exp_gen
;
306 /* The PHI_GEN set, which represents PHI results generated in a
308 bitmap_set_t phi_gen
;
310 /* The TMP_GEN set, which represents results/temporaries generated
311 in a basic block. IE the LHS of an expression. */
312 bitmap_set_t tmp_gen
;
314 /* The AVAIL_OUT set, which represents which values are available in
315 a given basic block. */
316 bitmap_set_t avail_out
;
318 /* The ANTIC_IN set, which represents which values are anticipatable
319 in a given basic block. */
320 bitmap_set_t antic_in
;
322 /* The PA_IN set, which represents which values are
323 partially anticipatable in a given basic block. */
326 /* The NEW_SETS set, which is used during insertion to augment the
327 AVAIL_OUT set of blocks with the new insertions performed during
328 the current iteration. */
329 bitmap_set_t new_sets
;
331 /* True if we have visited this block during ANTIC calculation. */
332 unsigned int visited
:1;
334 /* True we have deferred processing this block during ANTIC
335 calculation until its successor is processed. */
336 unsigned int deferred
: 1;
339 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
340 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
341 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
342 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
343 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
344 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
345 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
346 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
347 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
349 /* Maximal set of values, used to initialize the ANTIC problem, which
350 is an intersection problem. */
351 static bitmap_set_t maximal_set
;
353 /* Basic block list in postorder. */
354 static int *postorder
;
356 /* This structure is used to keep track of statistics on what
357 optimization PRE was able to perform. */
360 /* The number of RHS computations eliminated by PRE. */
363 /* The number of new expressions/temporaries generated by PRE. */
366 /* The number of inserts found due to partial anticipation */
369 /* The number of new PHI nodes added by PRE. */
372 /* The number of values found constant. */
377 static bool do_partial_partial
;
378 static tree
bitmap_find_leader (bitmap_set_t
, tree
);
379 static void bitmap_value_insert_into_set (bitmap_set_t
, tree
);
380 static void bitmap_value_replace_in_set (bitmap_set_t
, tree
);
381 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
382 static bool bitmap_set_contains_value (bitmap_set_t
, tree
);
383 static void bitmap_insert_into_set (bitmap_set_t
, tree
);
384 static bitmap_set_t
bitmap_set_new (void);
385 static bool is_undefined_value (tree
);
386 static tree
create_expression_by_pieces (basic_block
, tree
, tree
);
387 static tree
find_or_generate_expression (basic_block
, tree
, tree
);
389 /* We can add and remove elements and entries to and from sets
390 and hash tables, so we use alloc pools for them. */
392 static alloc_pool bitmap_set_pool
;
393 static alloc_pool binary_node_pool
;
394 static alloc_pool unary_node_pool
;
395 static alloc_pool reference_node_pool
;
396 static alloc_pool comparison_node_pool
;
397 static alloc_pool modify_expr_node_pool
;
398 static bitmap_obstack grand_bitmap_obstack
;
400 /* We can't use allocation pools to hold temporary CALL_EXPR objects, since
401 they are not of fixed size. Instead, use an obstack. */
403 static struct obstack temp_call_expr_obstack
;
406 /* To avoid adding 300 temporary variables when we only need one, we
407 only create one temporary variable, on demand, and build ssa names
408 off that. We do have to change the variable if the types don't
409 match the current variable's type. */
411 static tree storetemp
;
412 static tree prephitemp
;
414 /* Set of blocks with statements that have had its EH information
416 static bitmap need_eh_cleanup
;
418 /* Which expressions have been seen during a given phi translation. */
419 static bitmap seen_during_translate
;
421 /* The phi_translate_table caches phi translations for a given
422 expression and predecessor. */
424 static htab_t phi_translate_table
;
426 /* A three tuple {e, pred, v} used to cache phi translations in the
427 phi_translate_table. */
429 typedef struct expr_pred_trans_d
431 /* The expression. */
434 /* The predecessor block along which we translated the expression. */
437 /* vuses associated with the expression. */
438 VEC (tree
, gc
) *vuses
;
440 /* The value that resulted from the translation. */
443 /* The hashcode for the expression, pred pair. This is cached for
446 } *expr_pred_trans_t
;
447 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
449 /* Return the hash value for a phi translation table entry. */
452 expr_pred_trans_hash (const void *p
)
454 const_expr_pred_trans_t
const ve
= (const_expr_pred_trans_t
) p
;
458 /* Return true if two phi translation table entries are the same.
459 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
462 expr_pred_trans_eq (const void *p1
, const void *p2
)
464 const_expr_pred_trans_t
const ve1
= (const_expr_pred_trans_t
) p1
;
465 const_expr_pred_trans_t
const ve2
= (const_expr_pred_trans_t
) p2
;
466 basic_block b1
= ve1
->pred
;
467 basic_block b2
= ve2
->pred
;
471 /* If they are not translations for the same basic block, they can't
477 /* If they are for the same basic block, determine if the
478 expressions are equal. */
479 if (!expressions_equal_p (ve1
->e
, ve2
->e
))
482 /* Make sure the vuses are equivalent. */
483 if (ve1
->vuses
== ve2
->vuses
)
486 if (VEC_length (tree
, ve1
->vuses
) != VEC_length (tree
, ve2
->vuses
))
489 for (i
= 0; VEC_iterate (tree
, ve1
->vuses
, i
, vuse1
); i
++)
491 if (VEC_index (tree
, ve2
->vuses
, i
) != vuse1
)
498 /* Search in the phi translation table for the translation of
499 expression E in basic block PRED with vuses VUSES.
500 Return the translated value, if found, NULL otherwise. */
503 phi_trans_lookup (tree e
, basic_block pred
, VEC (tree
, gc
) *vuses
)
506 struct expr_pred_trans_d ept
;
511 ept
.hashcode
= iterative_hash_expr (e
, (unsigned long) pred
);
512 slot
= htab_find_slot_with_hash (phi_translate_table
, &ept
, ept
.hashcode
,
517 return ((expr_pred_trans_t
) *slot
)->v
;
521 /* Add the tuple mapping from {expression E, basic block PRED, vuses VUSES} to
522 value V, to the phi translation table. */
525 phi_trans_add (tree e
, tree v
, basic_block pred
, VEC (tree
, gc
) *vuses
)
528 expr_pred_trans_t new_pair
= XNEW (struct expr_pred_trans_d
);
530 new_pair
->pred
= pred
;
531 new_pair
->vuses
= vuses
;
533 new_pair
->hashcode
= iterative_hash_expr (e
, (unsigned long) pred
);
534 slot
= htab_find_slot_with_hash (phi_translate_table
, new_pair
,
535 new_pair
->hashcode
, INSERT
);
538 *slot
= (void *) new_pair
;
542 /* Return true if V is a value expression that represents itself.
543 In our world, this is *only* non-value handles. */
546 constant_expr_p (tree v
)
548 return TREE_CODE (v
) != VALUE_HANDLE
&&
549 (TREE_CODE (v
) == FIELD_DECL
|| is_gimple_min_invariant (v
));
552 /* Add expression E to the expression set of value V. */
555 add_to_value (tree v
, tree e
)
557 /* Constants have no expression sets. */
558 if (constant_expr_p (v
))
561 if (VALUE_HANDLE_EXPR_SET (v
) == NULL
)
562 VALUE_HANDLE_EXPR_SET (v
) = bitmap_set_new ();
564 bitmap_insert_into_set (VALUE_HANDLE_EXPR_SET (v
), e
);
567 /* Create a new bitmap set and return it. */
570 bitmap_set_new (void)
572 bitmap_set_t ret
= (bitmap_set_t
) pool_alloc (bitmap_set_pool
);
573 ret
->expressions
= BITMAP_ALLOC (&grand_bitmap_obstack
);
574 ret
->values
= BITMAP_ALLOC (&grand_bitmap_obstack
);
578 /* Remove an expression EXPR from a bitmapped set. */
581 bitmap_remove_from_set (bitmap_set_t set
, tree expr
)
583 tree val
= get_value_handle (expr
);
586 if (!constant_expr_p (val
))
588 bitmap_clear_bit (set
->values
, VALUE_HANDLE_ID (val
));
589 bitmap_clear_bit (set
->expressions
, get_expression_id (expr
));
593 /* Insert an expression EXPR into a bitmapped set. */
596 bitmap_insert_into_set (bitmap_set_t set
, tree expr
)
598 tree val
= get_value_handle (expr
);
601 if (!constant_expr_p (val
))
603 bitmap_set_bit (set
->values
, VALUE_HANDLE_ID (val
));
604 bitmap_set_bit (set
->expressions
, get_or_alloc_expression_id (expr
));
608 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
611 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
613 bitmap_copy (dest
->expressions
, orig
->expressions
);
614 bitmap_copy (dest
->values
, orig
->values
);
618 /* Free memory used up by SET. */
620 bitmap_set_free (bitmap_set_t set
)
622 BITMAP_FREE (set
->expressions
);
623 BITMAP_FREE (set
->values
);
627 /* A comparison function for use in qsort to top sort a bitmap set. Simply
628 subtracts value handle ids, since they are created in topo-order. */
631 vh_compare (const void *pa
, const void *pb
)
633 const tree vha
= get_value_handle (*((const tree
*)pa
));
634 const tree vhb
= get_value_handle (*((const tree
*)pb
));
636 /* This can happen when we constify things. */
637 if (constant_expr_p (vha
))
639 if (constant_expr_p (vhb
))
643 else if (constant_expr_p (vhb
))
645 return VALUE_HANDLE_ID (vha
) - VALUE_HANDLE_ID (vhb
);
648 /* Generate an topological-ordered array of bitmap set SET. */
650 static VEC(tree
, heap
) *
651 sorted_array_from_bitmap_set (bitmap_set_t set
)
655 VEC(tree
, heap
) *result
= NULL
;
657 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
658 VEC_safe_push (tree
, heap
, result
, expression_for_id (i
));
660 qsort (VEC_address (tree
, result
), VEC_length (tree
, result
),
661 sizeof (tree
), vh_compare
);
666 /* Perform bitmapped set operation DEST &= ORIG. */
669 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
676 bitmap temp
= BITMAP_ALLOC (&grand_bitmap_obstack
);
678 bitmap_and_into (dest
->values
, orig
->values
);
679 bitmap_copy (temp
, dest
->expressions
);
680 EXECUTE_IF_SET_IN_BITMAP (temp
, 0, i
, bi
)
682 tree expr
= expression_for_id (i
);
683 tree val
= get_value_handle (expr
);
684 if (!bitmap_bit_p (dest
->values
, VALUE_HANDLE_ID (val
)))
685 bitmap_clear_bit (dest
->expressions
, i
);
691 /* Subtract all values and expressions contained in ORIG from DEST. */
694 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
696 bitmap_set_t result
= bitmap_set_new ();
700 bitmap_and_compl (result
->expressions
, dest
->expressions
,
703 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
705 tree expr
= expression_for_id (i
);
706 tree val
= get_value_handle (expr
);
707 bitmap_set_bit (result
->values
, VALUE_HANDLE_ID (val
));
713 /* Subtract all the values in bitmap set B from bitmap set A. */
716 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
720 bitmap temp
= BITMAP_ALLOC (&grand_bitmap_obstack
);
722 bitmap_copy (temp
, a
->expressions
);
723 EXECUTE_IF_SET_IN_BITMAP (temp
, 0, i
, bi
)
725 tree expr
= expression_for_id (i
);
726 if (bitmap_set_contains_value (b
, get_value_handle (expr
)))
727 bitmap_remove_from_set (a
, expr
);
733 /* Return true if bitmapped set SET contains the value VAL. */
736 bitmap_set_contains_value (bitmap_set_t set
, tree val
)
738 if (constant_expr_p (val
))
741 if (!set
|| bitmap_empty_p (set
->expressions
))
744 return bitmap_bit_p (set
->values
, VALUE_HANDLE_ID (val
));
748 bitmap_set_contains_expr (bitmap_set_t set
, tree expr
)
750 return bitmap_bit_p (set
->expressions
, get_expression_id (expr
));
753 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
756 bitmap_set_replace_value (bitmap_set_t set
, tree lookfor
, tree expr
)
758 bitmap_set_t exprset
;
762 if (constant_expr_p (lookfor
))
765 if (!bitmap_set_contains_value (set
, lookfor
))
768 /* The number of expressions having a given value is usually
769 significantly less than the total number of expressions in SET.
770 Thus, rather than check, for each expression in SET, whether it
771 has the value LOOKFOR, we walk the reverse mapping that tells us
772 what expressions have a given value, and see if any of those
773 expressions are in our set. For large testcases, this is about
774 5-10x faster than walking the bitmap. If this is somehow a
775 significant lose for some cases, we can choose which set to walk
776 based on the set size. */
777 exprset
= VALUE_HANDLE_EXPR_SET (lookfor
);
778 FOR_EACH_EXPR_ID_IN_SET (exprset
, i
, bi
)
780 if (bitmap_bit_p (set
->expressions
, i
))
782 bitmap_clear_bit (set
->expressions
, i
);
783 bitmap_set_bit (set
->expressions
, get_expression_id (expr
));
789 /* Return true if two bitmap sets are equal. */
792 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
794 return bitmap_equal_p (a
->values
, b
->values
);
797 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
798 and add it otherwise. */
801 bitmap_value_replace_in_set (bitmap_set_t set
, tree expr
)
803 tree val
= get_value_handle (expr
);
805 if (bitmap_set_contains_value (set
, val
))
806 bitmap_set_replace_value (set
, val
, expr
);
808 bitmap_insert_into_set (set
, expr
);
811 /* Insert EXPR into SET if EXPR's value is not already present in
815 bitmap_value_insert_into_set (bitmap_set_t set
, tree expr
)
817 tree val
= get_value_handle (expr
);
819 if (constant_expr_p (val
))
822 if (!bitmap_set_contains_value (set
, val
))
823 bitmap_insert_into_set (set
, expr
);
826 /* Print out SET to OUTFILE. */
829 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
830 const char *setname
, int blockindex
)
832 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
839 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
841 tree expr
= expression_for_id (i
);
844 fprintf (outfile
, ", ");
846 print_generic_expr (outfile
, expr
, 0);
848 fprintf (outfile
, " (");
849 print_generic_expr (outfile
, get_value_handle (expr
), 0);
850 fprintf (outfile
, ") ");
853 fprintf (outfile
, " }\n");
856 void debug_bitmap_set (bitmap_set_t
);
859 debug_bitmap_set (bitmap_set_t set
)
861 print_bitmap_set (stderr
, set
, "debug", 0);
864 /* Print out the expressions that have VAL to OUTFILE. */
867 print_value_expressions (FILE *outfile
, tree val
)
869 if (VALUE_HANDLE_EXPR_SET (val
))
872 sprintf (s
, "VH.%04d", VALUE_HANDLE_ID (val
));
873 print_bitmap_set (outfile
, VALUE_HANDLE_EXPR_SET (val
), s
, 0);
879 debug_value_expressions (tree val
)
881 print_value_expressions (stderr
, val
);
884 /* Return the folded version of T if T, when folded, is a gimple
885 min_invariant. Otherwise, return T. */
888 fully_constant_expression (tree t
)
892 if (folded
&& is_gimple_min_invariant (folded
))
897 /* Make a temporary copy of a CALL_EXPR object NODE. */
900 temp_copy_call_expr (tree node
)
902 return (tree
) obstack_copy (&temp_call_expr_obstack
, node
, tree_size (node
));
905 /* Translate the vuses in the VUSES vector backwards through phi nodes
906 in PHIBLOCK, so that they have the value they would have in
909 static VEC(tree
, gc
) *
910 translate_vuses_through_block (VEC (tree
, gc
) *vuses
,
911 basic_block phiblock
,
915 VEC(tree
, gc
) *result
= NULL
;
918 for (i
= 0; VEC_iterate (tree
, vuses
, i
, oldvuse
); i
++)
920 tree phi
= SSA_NAME_DEF_STMT (oldvuse
);
921 if (TREE_CODE (phi
) == PHI_NODE
922 && bb_for_stmt (phi
) == phiblock
)
924 edge e
= find_edge (block
, bb_for_stmt (phi
));
927 tree def
= PHI_ARG_DEF (phi
, e
->dest_idx
);
931 result
= VEC_copy (tree
, gc
, vuses
);
932 VEC_replace (tree
, result
, i
, def
);
938 /* We avoid creating a new copy of the vuses unless something
939 actually changed, so result can be NULL. */
949 /* Like find_leader, but checks for the value existing in SET1 *or*
950 SET2. This is used to avoid making a set consisting of the union
951 of PA_IN and ANTIC_IN during insert. */
954 find_leader_in_sets (tree expr
, bitmap_set_t set1
, bitmap_set_t set2
)
958 result
= bitmap_find_leader (set1
, expr
);
960 result
= bitmap_find_leader (set2
, expr
);
964 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
965 the phis in PRED. SEEN is a bitmap saying which expression we have
966 translated since we started translation of the toplevel expression.
967 Return NULL if we can't find a leader for each part of the
968 translated expression. */
971 phi_translate_1 (tree expr
, bitmap_set_t set1
, bitmap_set_t set2
,
972 basic_block pred
, basic_block phiblock
, bitmap seen
)
974 tree phitrans
= NULL
;
980 if (constant_expr_p (expr
))
983 /* Phi translations of a given expression don't change. */
984 if (EXPR_P (expr
) || GIMPLE_STMT_P (expr
))
986 phitrans
= phi_trans_lookup (expr
, pred
, get_expression_vuses (expr
));
989 phitrans
= phi_trans_lookup (expr
, pred
, NULL
);
994 /* Prevent cycles when we have recursively dependent leaders. This
995 can only happen when phi translating the maximal set. */
998 unsigned int expr_id
= get_expression_id (expr
);
999 if (bitmap_bit_p (seen
, expr_id
))
1001 bitmap_set_bit (seen
, expr_id
);
1004 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
1006 case tcc_expression
:
1011 if (TREE_CODE (expr
) != CALL_EXPR
)
1015 tree oldfn
= CALL_EXPR_FN (expr
);
1016 tree oldsc
= CALL_EXPR_STATIC_CHAIN (expr
);
1017 tree newfn
, newsc
= NULL
;
1018 tree newexpr
= NULL_TREE
;
1019 bool invariantarg
= false;
1021 VEC (tree
, gc
) *vuses
= get_expression_vuses (expr
);
1022 VEC (tree
, gc
) *tvuses
;
1024 newfn
= phi_translate_1 (find_leader_in_sets (oldfn
, set1
, set2
),
1025 set1
, set2
, pred
, phiblock
, seen
);
1030 newexpr
= temp_copy_call_expr (expr
);
1031 CALL_EXPR_FN (newexpr
) = get_value_handle (newfn
);
1035 newsc
= phi_translate_1 (find_leader_in_sets (oldsc
, set1
, set2
),
1036 set1
, set2
, pred
, phiblock
, seen
);
1042 newexpr
= temp_copy_call_expr (expr
);
1043 CALL_EXPR_STATIC_CHAIN (newexpr
) = get_value_handle (newsc
);
1047 /* phi translate the argument list piece by piece. */
1048 nargs
= call_expr_nargs (expr
);
1049 for (i
= 0; i
< nargs
; i
++)
1051 tree oldval
= CALL_EXPR_ARG (expr
, i
);
1055 /* This may seem like a weird place for this
1056 check, but it's actually the easiest place to
1057 do it. We can't do it lower on in the
1058 recursion because it's valid for pieces of a
1059 component ref to be of AGGREGATE_TYPE, as long
1060 as the outermost one is not.
1061 To avoid *that* case, we have a check for
1062 AGGREGATE_TYPE_P in insert_aux. However, that
1063 check will *not* catch this case because here
1064 it occurs in the argument list. */
1065 if (AGGREGATE_TYPE_P (TREE_TYPE (oldval
)))
1067 oldval
= find_leader_in_sets (oldval
, set1
, set2
);
1068 newval
= phi_translate_1 (oldval
, set1
, set2
, pred
,
1072 if (newval
!= oldval
)
1074 invariantarg
|= is_gimple_min_invariant (newval
);
1076 newexpr
= temp_copy_call_expr (expr
);
1077 CALL_EXPR_ARG (newexpr
, i
) = get_value_handle (newval
);
1082 /* In case of new invariant args we might try to fold the call
1084 if (invariantarg
&& !newsc
)
1086 tree tmp1
= build_call_array (TREE_TYPE (expr
),
1087 newfn
, call_expr_nargs (newexpr
),
1088 CALL_EXPR_ARGP (newexpr
));
1089 tree tmp2
= fold (tmp1
);
1092 STRIP_TYPE_NOPS (tmp2
);
1093 if (is_gimple_min_invariant (tmp2
))
1098 tvuses
= translate_vuses_through_block (vuses
, phiblock
, pred
);
1099 if (vuses
!= tvuses
&& ! newexpr
)
1100 newexpr
= temp_copy_call_expr (expr
);
1104 newexpr
->base
.ann
= NULL
;
1105 vn_lookup_or_add_with_vuses (newexpr
, tvuses
);
1107 set_expression_vuses (newexpr
, tvuses
);
1109 phi_trans_add (oldexpr
, expr
, pred
, tvuses
);
1114 case tcc_declaration
:
1116 VEC (tree
, gc
) * oldvuses
= NULL
;
1117 VEC (tree
, gc
) * newvuses
= NULL
;
1119 oldvuses
= get_expression_vuses (expr
);
1121 newvuses
= translate_vuses_through_block (oldvuses
, phiblock
,
1124 if (oldvuses
!= newvuses
)
1126 vn_lookup_or_add_with_vuses (expr
, newvuses
);
1127 set_expression_vuses (expr
, newvuses
);
1129 phi_trans_add (oldexpr
, expr
, pred
, newvuses
);
1135 tree oldop0
= TREE_OPERAND (expr
, 0);
1144 VEC (tree
, gc
) * oldvuses
= NULL
;
1145 VEC (tree
, gc
) * newvuses
= NULL
;
1147 if (TREE_CODE (expr
) != INDIRECT_REF
1148 && TREE_CODE (expr
) != COMPONENT_REF
1149 && TREE_CODE (expr
) != ARRAY_REF
)
1152 oldop0
= find_leader_in_sets (oldop0
, set1
, set2
);
1153 newop0
= phi_translate_1 (oldop0
, set1
, set2
, pred
, phiblock
, seen
);
1157 if (TREE_CODE (expr
) == ARRAY_REF
)
1159 oldop1
= TREE_OPERAND (expr
, 1);
1160 oldop1
= find_leader_in_sets (oldop1
, set1
, set2
);
1161 newop1
= phi_translate_1 (oldop1
, set1
, set2
, pred
, phiblock
, seen
);
1166 oldop2
= TREE_OPERAND (expr
, 2);
1169 oldop2
= find_leader_in_sets (oldop2
, set1
, set2
);
1170 newop2
= phi_translate_1 (oldop2
, set1
, set2
, pred
, phiblock
, seen
);
1175 oldop3
= TREE_OPERAND (expr
, 3);
1178 oldop3
= find_leader_in_sets (oldop3
, set1
, set2
);
1179 newop3
= phi_translate_1 (oldop3
, set1
, set2
, pred
, phiblock
, seen
);
1186 oldvuses
= get_expression_vuses (expr
);
1188 newvuses
= translate_vuses_through_block (oldvuses
, phiblock
,
1191 if (newop0
!= oldop0
|| newvuses
!= oldvuses
1194 || newop3
!= oldop3
)
1198 newexpr
= (tree
) pool_alloc (reference_node_pool
);
1199 memcpy (newexpr
, expr
, tree_size (expr
));
1200 TREE_OPERAND (newexpr
, 0) = get_value_handle (newop0
);
1201 if (TREE_CODE (expr
) == ARRAY_REF
)
1203 TREE_OPERAND (newexpr
, 1) = get_value_handle (newop1
);
1205 TREE_OPERAND (newexpr
, 2) = get_value_handle (newop2
);
1207 TREE_OPERAND (newexpr
, 3) = get_value_handle (newop3
);
1210 t
= fully_constant_expression (newexpr
);
1214 pool_free (reference_node_pool
, newexpr
);
1219 newexpr
->base
.ann
= NULL
;
1220 vn_lookup_or_add_with_vuses (newexpr
, newvuses
);
1221 set_expression_vuses (newexpr
, newvuses
);
1225 phi_trans_add (oldexpr
, expr
, pred
, newvuses
);
1231 case tcc_comparison
:
1233 tree oldop1
= TREE_OPERAND (expr
, 0);
1234 tree oldval1
= oldop1
;
1235 tree oldop2
= TREE_OPERAND (expr
, 1);
1236 tree oldval2
= oldop2
;
1241 oldop1
= find_leader_in_sets (oldop1
, set1
, set2
);
1242 newop1
= phi_translate_1 (oldop1
, set1
, set2
, pred
, phiblock
, seen
);
1246 oldop2
= find_leader_in_sets (oldop2
, set1
, set2
);
1247 newop2
= phi_translate_1 (oldop2
, set1
, set2
, pred
, phiblock
, seen
);
1250 if (newop1
!= oldop1
|| newop2
!= oldop2
)
1253 newexpr
= (tree
) pool_alloc (binary_node_pool
);
1254 memcpy (newexpr
, expr
, tree_size (expr
));
1255 TREE_OPERAND (newexpr
, 0) = newop1
== oldop1
? oldval1
: get_value_handle (newop1
);
1256 TREE_OPERAND (newexpr
, 1) = newop2
== oldop2
? oldval2
: get_value_handle (newop2
);
1257 t
= fully_constant_expression (newexpr
);
1260 pool_free (binary_node_pool
, newexpr
);
1265 newexpr
->base
.ann
= NULL
;
1266 vn_lookup_or_add (newexpr
);
1270 phi_trans_add (oldexpr
, expr
, pred
, NULL
);
1276 tree oldop1
= TREE_OPERAND (expr
, 0);
1280 oldop1
= find_leader_in_sets (oldop1
, set1
, set2
);
1281 newop1
= phi_translate_1 (oldop1
, set1
, set2
, pred
, phiblock
, seen
);
1284 if (newop1
!= oldop1
)
1287 newexpr
= (tree
) pool_alloc (unary_node_pool
);
1288 memcpy (newexpr
, expr
, tree_size (expr
));
1289 TREE_OPERAND (newexpr
, 0) = get_value_handle (newop1
);
1290 t
= fully_constant_expression (newexpr
);
1293 pool_free (unary_node_pool
, newexpr
);
1298 newexpr
->base
.ann
= NULL
;
1299 vn_lookup_or_add (newexpr
);
1303 phi_trans_add (oldexpr
, expr
, pred
, NULL
);
1307 case tcc_exceptional
:
1312 gcc_assert (TREE_CODE (expr
) == SSA_NAME
);
1314 def_stmt
= SSA_NAME_DEF_STMT (expr
);
1315 if (TREE_CODE (def_stmt
) == PHI_NODE
1316 && bb_for_stmt (def_stmt
) == phiblock
)
1321 e
= find_edge (pred
, bb_for_stmt (phi
));
1325 tree def
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1327 if (is_gimple_min_invariant (def
))
1330 if (is_undefined_value (def
))
1333 val
= get_value_handle (def
);
1345 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1347 Return NULL if we can't find a leader for each part of the
1348 translated expression. */
1351 phi_translate (tree expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1352 basic_block pred
, basic_block phiblock
)
1354 bitmap_clear (seen_during_translate
);
1355 return phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
,
1356 seen_during_translate
);
1359 /* For each expression in SET, translate the value handles through phi nodes
1360 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1361 expressions in DEST. */
1364 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1365 basic_block phiblock
)
1367 VEC (tree
, heap
) *exprs
;
1371 if (!phi_nodes (phiblock
))
1373 bitmap_set_copy (dest
, set
);
1377 exprs
= sorted_array_from_bitmap_set (set
);
1378 for (i
= 0; VEC_iterate (tree
, exprs
, i
, expr
); i
++)
1381 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1383 /* Don't add constants or empty translations to the cache, since
1384 we won't look them up that way, or use the result, anyway. */
1385 if (translated
&& !is_gimple_min_invariant (translated
))
1387 phi_trans_add (expr
, translated
, pred
,
1388 get_expression_vuses (translated
));
1391 if (translated
!= NULL
)
1392 bitmap_value_insert_into_set (dest
, translated
);
1394 VEC_free (tree
, heap
, exprs
);
1397 /* Find the leader for a value (i.e., the name representing that
1398 value) in a given set, and return it. Return NULL if no leader is
1402 bitmap_find_leader (bitmap_set_t set
, tree val
)
1407 if (constant_expr_p (val
))
1410 if (bitmap_set_contains_value (set
, val
))
1412 /* Rather than walk the entire bitmap of expressions, and see
1413 whether any of them has the value we are looking for, we look
1414 at the reverse mapping, which tells us the set of expressions
1415 that have a given value (IE value->expressions with that
1416 value) and see if any of those expressions are in our set.
1417 The number of expressions per value is usually significantly
1418 less than the number of expressions in the set. In fact, for
1419 large testcases, doing it this way is roughly 5-10x faster
1420 than walking the bitmap.
1421 If this is somehow a significant lose for some cases, we can
1422 choose which set to walk based on which set is smaller. */
1425 bitmap_set_t exprset
= VALUE_HANDLE_EXPR_SET (val
);
1427 EXECUTE_IF_AND_IN_BITMAP (exprset
->expressions
,
1428 set
->expressions
, 0, i
, bi
)
1429 return expression_for_id (i
);
1434 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1435 BLOCK by seeing if it is not killed in the block. Note that we are
1436 only determining whether there is a store that kills it. Because
1437 of the order in which clean iterates over values, we are guaranteed
1438 that altered operands will have caused us to be eliminated from the
1439 ANTIC_IN set already. */
1442 value_dies_in_block_x (tree expr
, basic_block block
)
1446 VEC (tree
, gc
) *vuses
= get_expression_vuses (expr
);
1448 /* Conservatively, a value dies if it's vuses are defined in this
1449 block, unless they come from phi nodes (which are merge operations,
1450 rather than stores. */
1451 for (i
= 0; VEC_iterate (tree
, vuses
, i
, vuse
); i
++)
1453 tree def
= SSA_NAME_DEF_STMT (vuse
);
1455 if (bb_for_stmt (def
) != block
)
1457 if (TREE_CODE (def
) == PHI_NODE
)
1464 /* Determine if the expression EXPR is valid in SET1 U SET2.
1465 ONLY SET2 CAN BE NULL.
1466 This means that we have a leader for each part of the expression
1467 (if it consists of values), or the expression is an SSA_NAME.
1468 For loads/calls, we also see if the vuses are killed in this block.
1470 NB: We never should run into a case where we have SSA_NAME +
1471 SSA_NAME or SSA_NAME + value. The sets valid_in_sets is called on,
1472 the ANTIC sets, will only ever have SSA_NAME's or value expressions
1473 (IE VALUE1 + VALUE2, *VALUE1, VALUE1 < VALUE2) */
1475 #define union_contains_value(SET1, SET2, VAL) \
1476 (bitmap_set_contains_value ((SET1), (VAL)) \
1477 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
1480 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree expr
,
1483 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
1486 case tcc_comparison
:
1488 tree op1
= TREE_OPERAND (expr
, 0);
1489 tree op2
= TREE_OPERAND (expr
, 1);
1491 return union_contains_value (set1
, set2
, op1
)
1492 && union_contains_value (set1
, set2
, op2
);
1497 tree op1
= TREE_OPERAND (expr
, 0);
1498 return union_contains_value (set1
, set2
, op1
);
1501 case tcc_expression
:
1506 if (TREE_CODE (expr
) == CALL_EXPR
)
1508 tree fn
= CALL_EXPR_FN (expr
);
1509 tree sc
= CALL_EXPR_STATIC_CHAIN (expr
);
1511 call_expr_arg_iterator iter
;
1513 /* Check the non-argument operands first. */
1514 if (!union_contains_value (set1
, set2
, fn
)
1515 || (sc
&& !union_contains_value (set1
, set2
, sc
)))
1518 /* Now check the operands. */
1519 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, expr
)
1521 if (!union_contains_value (set1
, set2
, arg
))
1524 return !value_dies_in_block_x (expr
, block
);
1531 if (TREE_CODE (expr
) == INDIRECT_REF
1532 || TREE_CODE (expr
) == COMPONENT_REF
1533 || TREE_CODE (expr
) == ARRAY_REF
)
1535 tree op0
= TREE_OPERAND (expr
, 0);
1536 gcc_assert (is_gimple_min_invariant (op0
)
1537 || TREE_CODE (op0
) == VALUE_HANDLE
);
1538 if (!union_contains_value (set1
, set2
, op0
))
1540 if (TREE_CODE (expr
) == ARRAY_REF
)
1542 tree op1
= TREE_OPERAND (expr
, 1);
1543 tree op2
= TREE_OPERAND (expr
, 2);
1544 tree op3
= TREE_OPERAND (expr
, 3);
1545 gcc_assert (is_gimple_min_invariant (op1
)
1546 || TREE_CODE (op1
) == VALUE_HANDLE
);
1547 if (!union_contains_value (set1
, set2
, op1
))
1549 gcc_assert (!op2
|| is_gimple_min_invariant (op2
)
1550 || TREE_CODE (op2
) == VALUE_HANDLE
);
1552 && !union_contains_value (set1
, set2
, op2
))
1554 gcc_assert (!op3
|| is_gimple_min_invariant (op3
)
1555 || TREE_CODE (op3
) == VALUE_HANDLE
);
1557 && !union_contains_value (set1
, set2
, op3
))
1560 return !value_dies_in_block_x (expr
, block
);
1565 case tcc_exceptional
:
1567 gcc_assert (TREE_CODE (expr
) == SSA_NAME
);
1568 return bitmap_set_contains_expr (AVAIL_OUT (block
), expr
);
1571 case tcc_declaration
:
1572 return !value_dies_in_block_x (expr
, block
);
1575 /* No other cases should be encountered. */
1580 /* Clean the set of expressions that are no longer valid in SET1 or
1581 SET2. This means expressions that are made up of values we have no
1582 leaders for in SET1 or SET2. This version is used for partial
1583 anticipation, which means it is not valid in either ANTIC_IN or
1587 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
, basic_block block
)
1589 VEC (tree
, heap
) *exprs
= sorted_array_from_bitmap_set (set1
);
1593 for (i
= 0; VEC_iterate (tree
, exprs
, i
, expr
); i
++)
1595 if (!valid_in_sets (set1
, set2
, expr
, block
))
1596 bitmap_remove_from_set (set1
, expr
);
1598 VEC_free (tree
, heap
, exprs
);
1601 /* Clean the set of expressions that are no longer valid in SET. This
1602 means expressions that are made up of values we have no leaders for
1606 clean (bitmap_set_t set
, basic_block block
)
1608 VEC (tree
, heap
) *exprs
= sorted_array_from_bitmap_set (set
);
1612 for (i
= 0; VEC_iterate (tree
, exprs
, i
, expr
); i
++)
1614 if (!valid_in_sets (set
, NULL
, expr
, block
))
1615 bitmap_remove_from_set (set
, expr
);
1617 VEC_free (tree
, heap
, exprs
);
1620 static sbitmap has_abnormal_preds
;
1622 /* List of blocks that may have changed during ANTIC computation and
1623 thus need to be iterated over. */
1625 static sbitmap changed_blocks
;
1627 /* Decide whether to defer a block for a later iteration, or PHI
1628 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
1629 should defer the block, and true if we processed it. */
1632 defer_or_phi_translate_block (bitmap_set_t dest
, bitmap_set_t source
,
1633 basic_block block
, basic_block phiblock
)
1635 if (!BB_VISITED (phiblock
))
1637 SET_BIT (changed_blocks
, block
->index
);
1638 BB_VISITED (block
) = 0;
1639 BB_DEFERRED (block
) = 1;
1643 phi_translate_set (dest
, source
, block
, phiblock
);
1647 /* Compute the ANTIC set for BLOCK.
1649 If succs(BLOCK) > 1 then
1650 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1651 else if succs(BLOCK) == 1 then
1652 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
1654 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
1658 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
1660 bool changed
= false;
1661 bitmap_set_t S
, old
, ANTIC_OUT
;
1667 old
= ANTIC_OUT
= S
= NULL
;
1668 BB_VISITED (block
) = 1;
1670 /* If any edges from predecessors are abnormal, antic_in is empty,
1672 if (block_has_abnormal_pred_edge
)
1673 goto maybe_dump_sets
;
1675 old
= ANTIC_IN (block
);
1676 ANTIC_OUT
= bitmap_set_new ();
1678 /* If the block has no successors, ANTIC_OUT is empty. */
1679 if (EDGE_COUNT (block
->succs
) == 0)
1681 /* If we have one successor, we could have some phi nodes to
1682 translate through. */
1683 else if (single_succ_p (block
))
1685 basic_block succ_bb
= single_succ (block
);
1687 /* We trade iterations of the dataflow equations for having to
1688 phi translate the maximal set, which is incredibly slow
1689 (since the maximal set often has 300+ members, even when you
1690 have a small number of blocks).
1691 Basically, we defer the computation of ANTIC for this block
1692 until we have processed it's successor, which will inevitably
1693 have a *much* smaller set of values to phi translate once
1694 clean has been run on it.
1695 The cost of doing this is that we technically perform more
1696 iterations, however, they are lower cost iterations.
1698 Timings for PRE on tramp3d-v4:
1699 without maximal set fix: 11 seconds
1700 with maximal set fix/without deferring: 26 seconds
1701 with maximal set fix/with deferring: 11 seconds
1704 if (!defer_or_phi_translate_block (ANTIC_OUT
, ANTIC_IN (succ_bb
),
1708 goto maybe_dump_sets
;
1711 /* If we have multiple successors, we take the intersection of all of
1712 them. Note that in the case of loop exit phi nodes, we may have
1713 phis to translate through. */
1716 VEC(basic_block
, heap
) * worklist
;
1718 basic_block bprime
, first
;
1720 worklist
= VEC_alloc (basic_block
, heap
, EDGE_COUNT (block
->succs
));
1721 FOR_EACH_EDGE (e
, ei
, block
->succs
)
1722 VEC_quick_push (basic_block
, worklist
, e
->dest
);
1723 first
= VEC_index (basic_block
, worklist
, 0);
1725 if (phi_nodes (first
))
1727 bitmap_set_t from
= ANTIC_IN (first
);
1729 if (!BB_VISITED (first
))
1731 phi_translate_set (ANTIC_OUT
, from
, block
, first
);
1735 if (!BB_VISITED (first
))
1736 bitmap_set_copy (ANTIC_OUT
, maximal_set
);
1738 bitmap_set_copy (ANTIC_OUT
, ANTIC_IN (first
));
1741 for (i
= 1; VEC_iterate (basic_block
, worklist
, i
, bprime
); i
++)
1743 if (phi_nodes (bprime
))
1745 bitmap_set_t tmp
= bitmap_set_new ();
1746 bitmap_set_t from
= ANTIC_IN (bprime
);
1748 if (!BB_VISITED (bprime
))
1750 phi_translate_set (tmp
, from
, block
, bprime
);
1751 bitmap_set_and (ANTIC_OUT
, tmp
);
1752 bitmap_set_free (tmp
);
1756 if (!BB_VISITED (bprime
))
1757 bitmap_set_and (ANTIC_OUT
, maximal_set
);
1759 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
1762 VEC_free (basic_block
, heap
, worklist
);
1765 /* Generate ANTIC_OUT - TMP_GEN. */
1766 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
1768 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
1769 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
1772 /* Then union in the ANTIC_OUT - TMP_GEN values,
1773 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
1774 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
1775 bitmap_value_insert_into_set (ANTIC_IN (block
),
1776 expression_for_id (bii
));
1778 clean (ANTIC_IN (block
), block
);
1780 /* !old->expressions can happen when we deferred a block. */
1781 if (!old
->expressions
|| !bitmap_set_equal (old
, ANTIC_IN (block
)))
1784 SET_BIT (changed_blocks
, block
->index
);
1785 FOR_EACH_EDGE (e
, ei
, block
->preds
)
1786 SET_BIT (changed_blocks
, e
->src
->index
);
1789 RESET_BIT (changed_blocks
, block
->index
);
1792 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1794 if (!BB_DEFERRED (block
) || BB_VISITED (block
))
1797 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
1799 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
1803 print_bitmap_set (dump_file
, S
, "S", block
->index
);
1808 "Block %d was deferred for a future iteration.\n",
1813 bitmap_set_free (old
);
1815 bitmap_set_free (S
);
1817 bitmap_set_free (ANTIC_OUT
);
1821 /* Compute PARTIAL_ANTIC for BLOCK.
1823 If succs(BLOCK) > 1 then
1824 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
1825 in ANTIC_OUT for all succ(BLOCK)
1826 else if succs(BLOCK) == 1 then
1827 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
1829 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
1834 compute_partial_antic_aux (basic_block block
,
1835 bool block_has_abnormal_pred_edge
)
1837 bool changed
= false;
1838 bitmap_set_t old_PA_IN
;
1839 bitmap_set_t PA_OUT
;
1843 old_PA_IN
= PA_OUT
= NULL
;
1845 /* If any edges from predecessors are abnormal, antic_in is empty,
1847 if (block_has_abnormal_pred_edge
)
1848 goto maybe_dump_sets
;
1850 old_PA_IN
= PA_IN (block
);
1851 PA_OUT
= bitmap_set_new ();
1853 /* If the block has no successors, ANTIC_OUT is empty. */
1854 if (EDGE_COUNT (block
->succs
) == 0)
1856 /* If we have one successor, we could have some phi nodes to
1857 translate through. Note that we can't phi translate across DFS
1858 back edges in partial antic, because it uses a union operation on
1859 the successors. For recurrences like IV's, we will end up
1860 generating a new value in the set on each go around (i + 3 (VH.1)
1861 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
1862 else if (single_succ_p (block
))
1864 basic_block succ
= single_succ (block
);
1865 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
1866 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
1868 /* If we have multiple successors, we take the union of all of
1872 VEC(basic_block
, heap
) * worklist
;
1876 worklist
= VEC_alloc (basic_block
, heap
, EDGE_COUNT (block
->succs
));
1877 FOR_EACH_EDGE (e
, ei
, block
->succs
)
1879 if (e
->flags
& EDGE_DFS_BACK
)
1881 VEC_quick_push (basic_block
, worklist
, e
->dest
);
1883 if (VEC_length (basic_block
, worklist
) > 0)
1885 for (i
= 0; VEC_iterate (basic_block
, worklist
, i
, bprime
); i
++)
1890 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
1891 bitmap_value_insert_into_set (PA_OUT
,
1892 expression_for_id (i
));
1893 if (phi_nodes (bprime
))
1895 bitmap_set_t pa_in
= bitmap_set_new ();
1896 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
1897 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
1898 bitmap_value_insert_into_set (PA_OUT
,
1899 expression_for_id (i
));
1900 bitmap_set_free (pa_in
);
1903 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
1904 bitmap_value_insert_into_set (PA_OUT
,
1905 expression_for_id (i
));
1908 VEC_free (basic_block
, heap
, worklist
);
1911 /* PA_IN starts with PA_OUT - TMP_GEN.
1912 Then we subtract things from ANTIC_IN. */
1913 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
1915 /* For partial antic, we want to put back in the phi results, since
1916 we will properly avoid making them partially antic over backedges. */
1917 bitmap_ior_into (PA_IN (block
)->values
, PHI_GEN (block
)->values
);
1918 bitmap_ior_into (PA_IN (block
)->expressions
, PHI_GEN (block
)->expressions
);
1920 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
1921 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
1923 dependent_clean (PA_IN (block
), ANTIC_IN (block
), block
);
1925 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
1928 SET_BIT (changed_blocks
, block
->index
);
1929 FOR_EACH_EDGE (e
, ei
, block
->preds
)
1930 SET_BIT (changed_blocks
, e
->src
->index
);
1933 RESET_BIT (changed_blocks
, block
->index
);
1936 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1939 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
1941 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
1944 bitmap_set_free (old_PA_IN
);
1946 bitmap_set_free (PA_OUT
);
1950 /* Compute ANTIC and partial ANTIC sets. */
1953 compute_antic (void)
1955 bool changed
= true;
1956 int num_iterations
= 0;
1960 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
1961 We pre-build the map of blocks with incoming abnormal edges here. */
1962 has_abnormal_preds
= sbitmap_alloc (last_basic_block
);
1963 sbitmap_zero (has_abnormal_preds
);
1970 FOR_EACH_EDGE (e
, ei
, block
->preds
)
1972 e
->flags
&= ~EDGE_DFS_BACK
;
1973 if (e
->flags
& EDGE_ABNORMAL
)
1975 SET_BIT (has_abnormal_preds
, block
->index
);
1980 BB_VISITED (block
) = 0;
1981 BB_DEFERRED (block
) = 0;
1982 /* While we are here, give empty ANTIC_IN sets to each block. */
1983 ANTIC_IN (block
) = bitmap_set_new ();
1984 PA_IN (block
) = bitmap_set_new ();
1987 /* At the exit block we anticipate nothing. */
1988 ANTIC_IN (EXIT_BLOCK_PTR
) = bitmap_set_new ();
1989 BB_VISITED (EXIT_BLOCK_PTR
) = 1;
1990 PA_IN (EXIT_BLOCK_PTR
) = bitmap_set_new ();
1992 changed_blocks
= sbitmap_alloc (last_basic_block
+ 1);
1993 sbitmap_ones (changed_blocks
);
1996 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1997 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2000 for (i
= 0; i
< last_basic_block
- NUM_FIXED_BLOCKS
; i
++)
2002 if (TEST_BIT (changed_blocks
, postorder
[i
]))
2004 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2005 changed
|= compute_antic_aux (block
,
2006 TEST_BIT (has_abnormal_preds
,
2010 /* Theoretically possible, but *highly* unlikely. */
2011 gcc_assert (num_iterations
< 50);
2014 if (dump_file
&& (dump_flags
& TDF_STATS
))
2015 fprintf (dump_file
, "compute_antic required %d iterations\n",
2018 if (do_partial_partial
)
2020 sbitmap_ones (changed_blocks
);
2021 mark_dfs_back_edges ();
2026 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2027 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2030 for (i
= 0; i
< last_basic_block
- NUM_FIXED_BLOCKS
; i
++)
2032 if (TEST_BIT (changed_blocks
, postorder
[i
]))
2034 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2036 |= compute_partial_antic_aux (block
,
2037 TEST_BIT (has_abnormal_preds
,
2041 /* Theoretically possible, but *highly* unlikely. */
2042 gcc_assert (num_iterations
< 50);
2044 if (dump_file
&& (dump_flags
& TDF_STATS
))
2045 fprintf (dump_file
, "compute_partial_antic required %d iterations\n",
2048 sbitmap_free (has_abnormal_preds
);
2049 sbitmap_free (changed_blocks
);
2052 /* Return true if we can value number the call in STMT. This is true
2053 if we have a pure or constant call. */
2056 can_value_number_call (tree stmt
)
2058 tree call
= get_call_expr_in (stmt
);
2060 if (call_expr_flags (call
) & (ECF_PURE
| ECF_CONST
))
2065 /* Return true if OP is an exception handler related operation, such as
2066 FILTER_EXPRor EXC_PTR_EXPR. */
2069 is_exception_related (tree op
)
2071 return TREE_CODE (op
) == FILTER_EXPR
|| TREE_CODE (op
) == EXC_PTR_EXPR
;
2074 /* Return true if OP is a tree which we can perform value numbering
2078 can_value_number_operation (tree op
)
2080 return (UNARY_CLASS_P (op
)
2081 && !is_exception_related (TREE_OPERAND (op
, 0)))
2082 || BINARY_CLASS_P (op
)
2083 || COMPARISON_CLASS_P (op
)
2084 || REFERENCE_CLASS_P (op
)
2085 || (TREE_CODE (op
) == CALL_EXPR
2086 && can_value_number_call (op
));
2090 /* Return true if OP is a tree which we can perform PRE on
2091 on. This may not match the operations we can value number, but in
2092 a perfect world would. */
2095 can_PRE_operation (tree op
)
2097 return UNARY_CLASS_P (op
)
2098 || BINARY_CLASS_P (op
)
2099 || COMPARISON_CLASS_P (op
)
2100 || TREE_CODE (op
) == INDIRECT_REF
2101 || TREE_CODE (op
) == COMPONENT_REF
2102 || TREE_CODE (op
) == CALL_EXPR
2103 || TREE_CODE (op
) == ARRAY_REF
;
2107 /* Inserted expressions are placed onto this worklist, which is used
2108 for performing quick dead code elimination of insertions we made
2109 that didn't turn out to be necessary. */
2110 static VEC(tree
,heap
) *inserted_exprs
;
2112 /* Pool allocated fake store expressions are placed onto this
2113 worklist, which, after performing dead code elimination, is walked
2114 to see which expressions need to be put into GC'able memory */
2115 static VEC(tree
, heap
) *need_creation
;
2117 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2118 COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2119 trying to rename aggregates into ssa form directly, which is a no
2122 Thus, this routine doesn't create temporaries, it just builds a
2123 single access expression for the array, calling
2124 find_or_generate_expression to build the innermost pieces.
2126 This function is a subroutine of create_expression_by_pieces, and
2127 should not be called on it's own unless you really know what you
2131 create_component_ref_by_pieces (basic_block block
, tree expr
, tree stmts
)
2136 if (TREE_CODE (genop
) == VALUE_HANDLE
)
2138 tree found
= bitmap_find_leader (AVAIL_OUT (block
), expr
);
2143 if (TREE_CODE (genop
) == VALUE_HANDLE
)
2145 bitmap_set_t exprset
= VALUE_HANDLE_EXPR_SET (expr
);
2146 unsigned int firstbit
= bitmap_first_set_bit (exprset
->expressions
);
2147 genop
= expression_for_id (firstbit
);
2150 switch TREE_CODE (genop
)
2156 op0
= create_component_ref_by_pieces (block
,
2157 TREE_OPERAND (genop
, 0),
2159 op1
= TREE_OPERAND (genop
, 1);
2160 if (TREE_CODE (op1
) == VALUE_HANDLE
)
2161 op1
= find_or_generate_expression (block
, op1
, stmts
);
2162 op2
= TREE_OPERAND (genop
, 2);
2163 if (op2
&& TREE_CODE (op2
) == VALUE_HANDLE
)
2164 op2
= find_or_generate_expression (block
, op2
, stmts
);
2165 op3
= TREE_OPERAND (genop
, 3);
2166 if (op3
&& TREE_CODE (op3
) == VALUE_HANDLE
)
2167 op3
= find_or_generate_expression (block
, op3
, stmts
);
2168 folded
= build4 (ARRAY_REF
, TREE_TYPE (genop
), op0
, op1
,
2176 op0
= create_component_ref_by_pieces (block
,
2177 TREE_OPERAND (genop
, 0),
2179 /* op1 should be a FIELD_DECL, which are represented by
2181 op1
= TREE_OPERAND (genop
, 1);
2182 folded
= fold_build3 (COMPONENT_REF
, TREE_TYPE (genop
), op0
, op1
,
2189 tree op1
= TREE_OPERAND (genop
, 0);
2190 tree genop1
= find_or_generate_expression (block
, op1
, stmts
);
2192 folded
= fold_build1 (TREE_CODE (genop
), TREE_TYPE (genop
),
2210 /* Find a leader for an expression, or generate one using
2211 create_expression_by_pieces if it's ANTIC but
2213 BLOCK is the basic_block we are looking for leaders in.
2214 EXPR is the expression to find a leader or generate for.
2215 STMTS is the statement list to put the inserted expressions on.
2216 Returns the SSA_NAME of the LHS of the generated expression or the
2220 find_or_generate_expression (basic_block block
, tree expr
, tree stmts
)
2222 tree genop
= bitmap_find_leader (AVAIL_OUT (block
), expr
);
2224 /* If it's still NULL, it must be a complex expression, so generate
2228 bitmap_set_t exprset
= VALUE_HANDLE_EXPR_SET (expr
);
2229 bool handled
= false;
2233 /* We will hit cases where we have SSA_NAME's in exprset before
2234 other operations, because we may have come up with the SCCVN
2235 value before getting to the RHS of the expression. */
2236 FOR_EACH_EXPR_ID_IN_SET (exprset
, i
, bi
)
2238 genop
= expression_for_id (i
);
2239 if (can_PRE_operation (genop
))
2242 genop
= create_expression_by_pieces (block
, genop
, stmts
);
2246 gcc_assert (handled
);
2251 #define NECESSARY(stmt) stmt->base.asm_written_flag
2252 /* Create an expression in pieces, so that we can handle very complex
2253 expressions that may be ANTIC, but not necessary GIMPLE.
2254 BLOCK is the basic block the expression will be inserted into,
2255 EXPR is the expression to insert (in value form)
2256 STMTS is a statement list to append the necessary insertions into.
2258 This function will die if we hit some value that shouldn't be
2259 ANTIC but is (IE there is no leader for it, or its components).
2260 This function may also generate expressions that are themselves
2261 partially or fully redundant. Those that are will be either made
2262 fully redundant during the next iteration of insert (for partially
2263 redundant ones), or eliminated by eliminate (for fully redundant
2267 create_expression_by_pieces (basic_block block
, tree expr
, tree stmts
)
2270 tree folded
, forced_stmts
, newexpr
;
2272 tree_stmt_iterator tsi
;
2274 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
2283 gcc_assert (TREE_CODE (expr
) == CALL_EXPR
);
2285 fn
= CALL_EXPR_FN (expr
);
2286 sc
= CALL_EXPR_STATIC_CHAIN (expr
);
2288 genfn
= find_or_generate_expression (block
, fn
, stmts
);
2290 nargs
= call_expr_nargs (expr
);
2291 buffer
= (tree
*) alloca (nargs
* sizeof (tree
));
2293 for (i
= 0; i
< nargs
; i
++)
2295 tree arg
= CALL_EXPR_ARG (expr
, i
);
2296 buffer
[i
] = find_or_generate_expression (block
, arg
, stmts
);
2299 folded
= build_call_array (TREE_TYPE (expr
), genfn
, nargs
, buffer
);
2301 CALL_EXPR_STATIC_CHAIN (folded
) =
2302 find_or_generate_expression (block
, sc
, stmts
);
2303 folded
= fold (folded
);
2309 if (TREE_CODE (expr
) == COMPONENT_REF
2310 || TREE_CODE (expr
) == ARRAY_REF
)
2312 folded
= create_component_ref_by_pieces (block
, expr
, stmts
);
2316 tree op1
= TREE_OPERAND (expr
, 0);
2317 tree genop1
= find_or_generate_expression (block
, op1
, stmts
);
2319 folded
= fold_build1 (TREE_CODE (expr
), TREE_TYPE (expr
),
2326 case tcc_comparison
:
2328 tree op1
= TREE_OPERAND (expr
, 0);
2329 tree op2
= TREE_OPERAND (expr
, 1);
2330 tree genop1
= find_or_generate_expression (block
, op1
, stmts
);
2331 tree genop2
= find_or_generate_expression (block
, op2
, stmts
);
2332 folded
= fold_build2 (TREE_CODE (expr
), TREE_TYPE (expr
),
2339 tree op1
= TREE_OPERAND (expr
, 0);
2340 tree genop1
= find_or_generate_expression (block
, op1
, stmts
);
2341 folded
= fold_build1 (TREE_CODE (expr
), TREE_TYPE (expr
),
2350 /* Force the generated expression to be a sequence of GIMPLE
2352 We have to call unshare_expr because force_gimple_operand may
2353 modify the tree we pass to it. */
2354 newexpr
= force_gimple_operand (unshare_expr (folded
), &forced_stmts
,
2357 /* If we have any intermediate expressions to the value sets, add them
2358 to the value sets and chain them on in the instruction stream. */
2361 tsi
= tsi_start (forced_stmts
);
2362 for (; !tsi_end_p (tsi
); tsi_next (&tsi
))
2364 tree stmt
= tsi_stmt (tsi
);
2365 tree forcedname
= GIMPLE_STMT_OPERAND (stmt
, 0);
2366 tree forcedexpr
= GIMPLE_STMT_OPERAND (stmt
, 1);
2367 tree val
= vn_lookup_or_add (forcedexpr
);
2369 VEC_safe_push (tree
, heap
, inserted_exprs
, stmt
);
2370 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
2371 vn_add (forcedname
, val
);
2372 bitmap_value_replace_in_set (NEW_SETS (block
), forcedname
);
2373 bitmap_value_replace_in_set (AVAIL_OUT (block
), forcedname
);
2374 mark_symbols_for_renaming (stmt
);
2376 tsi
= tsi_last (stmts
);
2377 tsi_link_after (&tsi
, forced_stmts
, TSI_CONTINUE_LINKING
);
2380 /* Build and insert the assignment of the end result to the temporary
2381 that we will return. */
2382 if (!pretemp
|| TREE_TYPE (expr
) != TREE_TYPE (pretemp
))
2384 pretemp
= create_tmp_var (TREE_TYPE (expr
), "pretmp");
2385 get_var_ann (pretemp
);
2389 add_referenced_var (temp
);
2391 if (TREE_CODE (TREE_TYPE (expr
)) == COMPLEX_TYPE
2392 || TREE_CODE (TREE_TYPE (expr
)) == VECTOR_TYPE
)
2393 DECL_GIMPLE_REG_P (temp
) = 1;
2395 newexpr
= build_gimple_modify_stmt (temp
, newexpr
);
2396 name
= make_ssa_name (temp
, newexpr
);
2397 GIMPLE_STMT_OPERAND (newexpr
, 0) = name
;
2398 NECESSARY (newexpr
) = 0;
2400 tsi
= tsi_last (stmts
);
2401 tsi_link_after (&tsi
, newexpr
, TSI_CONTINUE_LINKING
);
2402 VEC_safe_push (tree
, heap
, inserted_exprs
, newexpr
);
2404 /* All the symbols in NEWEXPR should be put into SSA form. */
2405 mark_symbols_for_renaming (newexpr
);
2407 /* Add a value handle to the temporary.
2408 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2409 we are creating the expression by pieces, and this particular piece of
2410 the expression may have been represented. There is no harm in replacing
2412 v
= get_value_handle (expr
);
2414 VN_INFO_GET (name
)->valnum
= name
;
2415 get_or_alloc_expression_id (name
);
2416 bitmap_value_replace_in_set (NEW_SETS (block
), name
);
2417 bitmap_value_replace_in_set (AVAIL_OUT (block
), name
);
2419 pre_stats
.insertions
++;
2420 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2422 fprintf (dump_file
, "Inserted ");
2423 print_generic_expr (dump_file
, newexpr
, 0);
2424 fprintf (dump_file
, " in predecessor %d\n", block
->index
);
2430 /* Insert the to-be-made-available values of expression EXPRNUM for each
2431 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2432 merge the result with a phi node, given the same value handle as
2433 NODE. Return true if we have inserted new stuff. */
2436 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
2439 tree expr
= expression_for_id (exprnum
);
2440 tree val
= get_value_handle (expr
);
2442 bool insertions
= false;
2447 tree type
= TREE_TYPE (avail
[EDGE_PRED (block
, 0)->src
->index
]);
2450 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2452 fprintf (dump_file
, "Found partial redundancy for expression ");
2453 print_generic_expr (dump_file
, expr
, 0);
2454 fprintf (dump_file
, " (");
2455 print_generic_expr (dump_file
, val
, 0);
2456 fprintf (dump_file
, ")");
2457 fprintf (dump_file
, "\n");
2460 /* Make sure we aren't creating an induction variable. */
2461 if (block
->loop_depth
> 0 && EDGE_COUNT (block
->preds
) == 2
2462 && TREE_CODE_CLASS (TREE_CODE (expr
)) != tcc_reference
)
2464 bool firstinsideloop
= false;
2465 bool secondinsideloop
= false;
2466 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
2467 EDGE_PRED (block
, 0)->src
);
2468 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
2469 EDGE_PRED (block
, 1)->src
);
2470 /* Induction variables only have one edge inside the loop. */
2471 if (firstinsideloop
^ secondinsideloop
)
2473 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2474 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2480 /* Make the necessary insertions. */
2481 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
2483 tree stmts
= alloc_stmt_list ();
2486 eprime
= avail
[bprime
->index
];
2488 if (can_PRE_operation (eprime
))
2490 builtexpr
= create_expression_by_pieces (bprime
,
2493 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
2494 bsi_insert_on_edge (pred
, stmts
);
2495 avail
[bprime
->index
] = builtexpr
;
2499 /* If we didn't want a phi node, and we made insertions, we still have
2500 inserted new stuff, and thus return true. If we didn't want a phi node,
2501 and didn't make insertions, we haven't added anything new, so return
2503 if (nophi
&& insertions
)
2505 else if (nophi
&& !insertions
)
2508 /* Now build a phi for the new variable. */
2509 if (!prephitemp
|| TREE_TYPE (prephitemp
) != type
)
2511 prephitemp
= create_tmp_var (type
, "prephitmp");
2512 get_var_ann (prephitemp
);
2516 add_referenced_var (temp
);
2519 if (TREE_CODE (type
) == COMPLEX_TYPE
2520 || TREE_CODE (type
) == VECTOR_TYPE
)
2521 DECL_GIMPLE_REG_P (temp
) = 1;
2522 temp
= create_phi_node (temp
, block
);
2524 NECESSARY (temp
) = 0;
2525 VN_INFO_GET (PHI_RESULT (temp
))->valnum
= PHI_RESULT (temp
);
2527 VEC_safe_push (tree
, heap
, inserted_exprs
, temp
);
2528 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
2529 add_phi_arg (temp
, avail
[pred
->src
->index
], pred
);
2531 vn_add (PHI_RESULT (temp
), val
);
2533 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
2534 this insertion, since we test for the existence of this value in PHI_GEN
2535 before proceeding with the partial redundancy checks in insert_aux.
2537 The value may exist in AVAIL_OUT, in particular, it could be represented
2538 by the expression we are trying to eliminate, in which case we want the
2539 replacement to occur. If it's not existing in AVAIL_OUT, we want it
2542 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
2543 this block, because if it did, it would have existed in our dominator's
2544 AVAIL_OUT, and would have been skipped due to the full redundancy check.
2547 bitmap_insert_into_set (PHI_GEN (block
),
2549 bitmap_value_replace_in_set (AVAIL_OUT (block
),
2551 bitmap_insert_into_set (NEW_SETS (block
),
2554 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2556 fprintf (dump_file
, "Created phi ");
2557 print_generic_expr (dump_file
, temp
, 0);
2558 fprintf (dump_file
, " in block %d\n", block
->index
);
2566 /* Perform insertion of partially redundant values.
2567 For BLOCK, do the following:
2568 1. Propagate the NEW_SETS of the dominator into the current block.
2569 If the block has multiple predecessors,
2570 2a. Iterate over the ANTIC expressions for the block to see if
2571 any of them are partially redundant.
2572 2b. If so, insert them into the necessary predecessors to make
2573 the expression fully redundant.
2574 2c. Insert a new PHI merging the values of the predecessors.
2575 2d. Insert the new PHI, and the new expressions, into the
2577 3. Recursively call ourselves on the dominator children of BLOCK.
2579 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
2580 do_regular_insertion and do_partial_insertion.
2585 do_regular_insertion (basic_block block
, basic_block dom
)
2587 bool new_stuff
= false;
2588 VEC (tree
, heap
) *exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
2592 for (i
= 0; VEC_iterate (tree
, exprs
, i
, expr
); i
++)
2594 if (can_PRE_operation (expr
) && !AGGREGATE_TYPE_P (TREE_TYPE (expr
)))
2598 bool by_some
= false;
2599 bool cant_insert
= false;
2600 bool all_same
= true;
2601 tree first_s
= NULL
;
2604 tree eprime
= NULL_TREE
;
2607 val
= get_value_handle (expr
);
2608 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
2610 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
2612 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2613 fprintf (dump_file
, "Found fully redundant value\n");
2617 avail
= XCNEWVEC (tree
, last_basic_block
);
2618 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
2623 /* This can happen in the very weird case
2624 that our fake infinite loop edges have caused a
2625 critical edge to appear. */
2626 if (EDGE_CRITICAL_P (pred
))
2632 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
2635 /* eprime will generally only be NULL if the
2636 value of the expression, translated
2637 through the PHI for this predecessor, is
2638 undefined. If that is the case, we can't
2639 make the expression fully redundant,
2640 because its value is undefined along a
2641 predecessor path. We can thus break out
2642 early because it doesn't matter what the
2643 rest of the results are. */
2650 eprime
= fully_constant_expression (eprime
);
2651 vprime
= get_value_handle (eprime
);
2652 gcc_assert (vprime
);
2653 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
2655 if (edoubleprime
== NULL
)
2657 avail
[bprime
->index
] = eprime
;
2662 avail
[bprime
->index
] = edoubleprime
;
2664 if (first_s
== NULL
)
2665 first_s
= edoubleprime
;
2666 else if (!operand_equal_p (first_s
, edoubleprime
,
2671 /* If we can insert it, it's not the same value
2672 already existing along every predecessor, and
2673 it's defined by some predecessor, it is
2674 partially redundant. */
2675 if (!cant_insert
&& !all_same
&& by_some
)
2677 if (insert_into_preds_of_block (block
, get_expression_id (expr
),
2681 /* If all edges produce the same value and that value is
2682 an invariant, then the PHI has the same value on all
2683 edges. Note this. */
2684 else if (!cant_insert
&& all_same
&& eprime
2685 && is_gimple_min_invariant (eprime
)
2686 && !is_gimple_min_invariant (val
))
2691 bitmap_set_t exprset
= VALUE_HANDLE_EXPR_SET (val
);
2692 FOR_EACH_EXPR_ID_IN_SET (exprset
, j
, bi
)
2694 tree expr
= expression_for_id (j
);
2695 if (TREE_CODE (expr
) == SSA_NAME
)
2697 vn_add (expr
, eprime
);
2698 pre_stats
.constified
++;
2706 VEC_free (tree
, heap
, exprs
);
2711 /* Perform insertion for partially anticipatable expressions. There
2712 is only one case we will perform insertion for these. This case is
2713 if the expression is partially anticipatable, and fully available.
2714 In this case, we know that putting it earlier will enable us to
2715 remove the later computation. */
2719 do_partial_partial_insertion (basic_block block
, basic_block dom
)
2721 bool new_stuff
= false;
2722 VEC (tree
, heap
) *exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
2726 for (i
= 0; VEC_iterate (tree
, exprs
, i
, expr
); i
++)
2728 if (can_PRE_operation (expr
) && !AGGREGATE_TYPE_P (TREE_TYPE (expr
)))
2733 bool cant_insert
= false;
2736 tree eprime
= NULL_TREE
;
2739 val
= get_value_handle (expr
);
2740 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
2742 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
2745 avail
= XCNEWVEC (tree
, last_basic_block
);
2746 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
2751 /* This can happen in the very weird case
2752 that our fake infinite loop edges have caused a
2753 critical edge to appear. */
2754 if (EDGE_CRITICAL_P (pred
))
2760 eprime
= phi_translate (expr
, ANTIC_IN (block
),
2764 /* eprime will generally only be NULL if the
2765 value of the expression, translated
2766 through the PHI for this predecessor, is
2767 undefined. If that is the case, we can't
2768 make the expression fully redundant,
2769 because its value is undefined along a
2770 predecessor path. We can thus break out
2771 early because it doesn't matter what the
2772 rest of the results are. */
2779 eprime
= fully_constant_expression (eprime
);
2780 vprime
= get_value_handle (eprime
);
2781 gcc_assert (vprime
);
2782 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
2784 if (edoubleprime
== NULL
)
2790 avail
[bprime
->index
] = edoubleprime
;
2794 /* If we can insert it, it's not the same value
2795 already existing along every predecessor, and
2796 it's defined by some predecessor, it is
2797 partially redundant. */
2798 if (!cant_insert
&& by_all
)
2800 pre_stats
.pa_insert
++;
2801 if (insert_into_preds_of_block (block
, get_expression_id (expr
),
2809 VEC_free (tree
, heap
, exprs
);
2814 insert_aux (basic_block block
)
2817 bool new_stuff
= false;
2822 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
2827 bitmap_set_t newset
= NEW_SETS (dom
);
2830 /* Note that we need to value_replace both NEW_SETS, and
2831 AVAIL_OUT. For both the case of NEW_SETS, the value may be
2832 represented by some non-simple expression here that we want
2833 to replace it with. */
2834 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
2836 tree expr
= expression_for_id (i
);
2837 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
2838 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
2841 if (!single_pred_p (block
))
2843 new_stuff
|= do_regular_insertion (block
, dom
);
2844 if (do_partial_partial
)
2845 new_stuff
|= do_partial_partial_insertion (block
, dom
);
2849 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
2851 son
= next_dom_son (CDI_DOMINATORS
, son
))
2853 new_stuff
|= insert_aux (son
);
2859 /* Perform insertion of partially redundant values. */
2864 bool new_stuff
= true;
2866 int num_iterations
= 0;
2869 NEW_SETS (bb
) = bitmap_set_new ();
2875 new_stuff
= insert_aux (ENTRY_BLOCK_PTR
);
2877 if (num_iterations
> 2 && dump_file
&& (dump_flags
& TDF_STATS
))
2878 fprintf (dump_file
, "insert required %d iterations\n", num_iterations
);
2882 /* Return true if VAR is an SSA variable with no defining statement in
2883 this procedure, *AND* isn't a live-on-entry parameter. */
2886 is_undefined_value (tree expr
)
2888 return (TREE_CODE (expr
) == SSA_NAME
2889 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (expr
))
2890 /* PARM_DECLs and hard registers are always defined. */
2891 && TREE_CODE (SSA_NAME_VAR (expr
)) != PARM_DECL
);
2894 /* Add OP to EXP_GEN (block), and possibly to the maximal set if it is
2895 not defined by a phi node.
2896 PHI nodes can't go in the maximal sets because they are not in
2897 TMP_GEN, so it is possible to get into non-monotonic situations
2898 during ANTIC calculation, because it will *add* bits. */
2901 add_to_exp_gen (basic_block block
, tree op
)
2905 if (TREE_CODE (op
) == SSA_NAME
&& is_undefined_value (op
))
2907 bitmap_value_insert_into_set (EXP_GEN (block
), op
);
2908 if (TREE_CODE (op
) != SSA_NAME
2909 || TREE_CODE (SSA_NAME_DEF_STMT (op
)) != PHI_NODE
)
2910 bitmap_value_insert_into_set (maximal_set
, op
);
2915 /* Given an SSA variable VAR and an expression EXPR, compute the value
2916 number for EXPR and create a value handle (VAL) for it. If VAR and
2917 EXPR are not the same, associate VAL with VAR. Finally, add VAR to
2918 S1 and its value handle to S2, and to the maximal set if
2919 ADD_TO_MAXIMAL is true.
2921 VUSES represent the virtual use operands associated with EXPR (if
2925 add_to_sets (tree var
, tree expr
, VEC(tree
, gc
) *vuses
, bitmap_set_t s1
,
2929 val
= vn_lookup_or_add_with_vuses (expr
, vuses
);
2931 /* VAR and EXPR may be the same when processing statements for which
2932 we are not computing value numbers (e.g., non-assignments, or
2933 statements that make aliased stores). In those cases, we are
2934 only interested in making VAR available as its own value. */
2939 bitmap_insert_into_set (s1
, var
);
2941 bitmap_value_insert_into_set (s2
, var
);
2944 /* Find existing value expression that is the same as T,
2945 and return it if it exists. */
2948 find_existing_value_expr (tree t
, VEC (tree
, gc
) *vuses
)
2953 bitmap_set_t exprset
;
2955 if (REFERENCE_CLASS_P (t
) || TREE_CODE (t
) == CALL_EXPR
|| DECL_P (t
))
2956 vh
= vn_lookup_with_vuses (t
, vuses
);
2962 exprset
= VALUE_HANDLE_EXPR_SET (vh
);
2963 FOR_EACH_EXPR_ID_IN_SET (exprset
, bii
, bi
)
2965 tree efi
= expression_for_id (bii
);
2966 if (expressions_equal_p (t
, efi
))
2972 /* Given a unary or binary expression EXPR, create and return a new
2973 expression with the same structure as EXPR but with its operands
2974 replaced with the value handles of each of the operands of EXPR.
2976 VUSES represent the virtual use operands associated with EXPR (if
2977 any). Insert EXPR's operands into the EXP_GEN set for BLOCK. */
2980 create_value_expr_from (tree expr
, basic_block block
, VEC (tree
, gc
) *vuses
)
2983 enum tree_code code
= TREE_CODE (expr
);
2985 alloc_pool pool
= NULL
;
2988 gcc_assert (TREE_CODE_CLASS (code
) == tcc_unary
2989 || TREE_CODE_CLASS (code
) == tcc_binary
2990 || TREE_CODE_CLASS (code
) == tcc_comparison
2991 || TREE_CODE_CLASS (code
) == tcc_reference
2992 || TREE_CODE_CLASS (code
) == tcc_expression
2993 || TREE_CODE_CLASS (code
) == tcc_vl_exp
2994 || TREE_CODE_CLASS (code
) == tcc_exceptional
2995 || TREE_CODE_CLASS (code
) == tcc_declaration
);
2997 if (TREE_CODE_CLASS (code
) == tcc_unary
)
2998 pool
= unary_node_pool
;
2999 else if (TREE_CODE_CLASS (code
) == tcc_reference
)
3000 pool
= reference_node_pool
;
3001 else if (TREE_CODE_CLASS (code
) == tcc_binary
)
3002 pool
= binary_node_pool
;
3003 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3004 pool
= comparison_node_pool
;
3006 gcc_assert (code
== CALL_EXPR
);
3008 if (code
== CALL_EXPR
)
3009 vexpr
= temp_copy_call_expr (expr
);
3012 vexpr
= (tree
) pool_alloc (pool
);
3013 memcpy (vexpr
, expr
, tree_size (expr
));
3016 for (i
= 0; i
< TREE_OPERAND_LENGTH (expr
); i
++)
3018 tree val
= NULL_TREE
;
3021 op
= TREE_OPERAND (expr
, i
);
3022 if (op
== NULL_TREE
)
3025 /* Recursively value-numberize reference ops and tree lists. */
3026 if (REFERENCE_CLASS_P (op
))
3028 tree tempop
= create_value_expr_from (op
, block
, vuses
);
3029 op
= tempop
? tempop
: op
;
3030 val
= vn_lookup_or_add_with_vuses (op
, vuses
);
3031 set_expression_vuses (op
, vuses
);
3035 val
= vn_lookup_or_add (op
);
3037 if (TREE_CODE (op
) != TREE_LIST
)
3038 add_to_exp_gen (block
, op
);
3040 if (TREE_CODE (val
) == VALUE_HANDLE
)
3041 TREE_TYPE (val
) = TREE_TYPE (TREE_OPERAND (vexpr
, i
));
3043 TREE_OPERAND (vexpr
, i
) = val
;
3045 efi
= find_existing_value_expr (vexpr
, vuses
);
3048 get_or_alloc_expression_id (vexpr
);
3052 /* Return a copy of NODE that is stored in the temporary alloc_pool's.
3053 This is made recursively true, so that the operands are stored in
3054 the pool as well. */
3057 poolify_tree (tree node
)
3059 switch (TREE_CODE (node
))
3063 tree temp
= (tree
) pool_alloc (reference_node_pool
);
3064 memcpy (temp
, node
, tree_size (node
));
3065 TREE_OPERAND (temp
, 0) = poolify_tree (TREE_OPERAND (temp
, 0));
3069 case GIMPLE_MODIFY_STMT
:
3071 tree temp
= (tree
) pool_alloc (modify_expr_node_pool
);
3072 memcpy (temp
, node
, tree_size (node
));
3073 GIMPLE_STMT_OPERAND (temp
, 0) =
3074 poolify_tree (GIMPLE_STMT_OPERAND (temp
, 0));
3075 GIMPLE_STMT_OPERAND (temp
, 1) =
3076 poolify_tree (GIMPLE_STMT_OPERAND (temp
, 1));
3094 static tree modify_expr_template
;
3096 /* Allocate a GIMPLE_MODIFY_STMT with TYPE, and operands OP1, OP2 in the
3097 alloc pools and return it. */
3099 poolify_modify_stmt (tree op1
, tree op2
)
3101 if (modify_expr_template
== NULL
)
3102 modify_expr_template
= build_gimple_modify_stmt (op1
, op2
);
3104 GIMPLE_STMT_OPERAND (modify_expr_template
, 0) = op1
;
3105 GIMPLE_STMT_OPERAND (modify_expr_template
, 1) = op2
;
3107 return poolify_tree (modify_expr_template
);
3111 /* For each real store operation of the form
3112 *a = <value> that we see, create a corresponding fake store of the
3113 form storetmp_<version> = *a.
3115 This enables AVAIL computation to mark the results of stores as
3116 available. Without this, you'd need to do some computation to
3117 mark the result of stores as ANTIC and AVAIL at all the right
3119 To save memory, we keep the store
3120 statements pool allocated until we decide whether they are
3121 necessary or not. */
3124 insert_fake_stores (void)
3130 block_stmt_iterator bsi
;
3131 for (bsi
= bsi_start (block
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3133 tree stmt
= bsi_stmt (bsi
);
3135 /* We can't generate SSA names for stores that are complex
3136 or aggregate. We also want to ignore things whose
3137 virtual uses occur in abnormal phis. */
3139 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
3140 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 0)) == INDIRECT_REF
3141 && !AGGREGATE_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (stmt
, 0)))
3142 && TREE_CODE (TREE_TYPE (GIMPLE_STMT_OPERAND
3143 (stmt
, 0))) != COMPLEX_TYPE
)
3147 tree lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
3148 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
3150 bool notokay
= false;
3152 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_VIRTUAL_DEFS
)
3154 tree defvar
= DEF_FROM_PTR (defp
);
3155 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defvar
))
3165 if (!storetemp
|| TREE_TYPE (rhs
) != TREE_TYPE (storetemp
))
3167 storetemp
= create_tmp_var (TREE_TYPE (rhs
), "storetmp");
3168 if (TREE_CODE (TREE_TYPE (storetemp
)) == VECTOR_TYPE
)
3169 DECL_GIMPLE_REG_P (storetemp
) = 1;
3170 get_var_ann (storetemp
);
3173 new_tree
= poolify_modify_stmt (storetemp
, lhs
);
3175 lhs
= make_ssa_name (storetemp
, new_tree
);
3176 GIMPLE_STMT_OPERAND (new_tree
, 0) = lhs
;
3177 create_ssa_artificial_load_stmt (new_tree
, stmt
);
3179 NECESSARY (new_tree
) = 0;
3180 VEC_safe_push (tree
, heap
, inserted_exprs
, new_tree
);
3181 VEC_safe_push (tree
, heap
, need_creation
, new_tree
);
3182 bsi_insert_after (&bsi
, new_tree
, BSI_NEW_STMT
);
3188 /* Turn the pool allocated fake stores that we created back into real
3189 GC allocated ones if they turned out to be necessary to PRE some
3193 realify_fake_stores (void)
3198 for (i
= 0; VEC_iterate (tree
, need_creation
, i
, stmt
); i
++)
3200 if (NECESSARY (stmt
))
3202 block_stmt_iterator bsi
;
3205 /* Mark the temp variable as referenced */
3206 add_referenced_var (SSA_NAME_VAR (GIMPLE_STMT_OPERAND (stmt
, 0)));
3208 /* Put the new statement in GC memory, fix up the
3209 SSA_NAME_DEF_STMT on it, and then put it in place of
3210 the old statement before the store in the IR stream
3211 as a plain ssa name copy. */
3212 bsi
= bsi_for_stmt (stmt
);
3214 tmp
= GIMPLE_STMT_OPERAND (bsi_stmt (bsi
), 1);
3215 newstmt
= build_gimple_modify_stmt (GIMPLE_STMT_OPERAND (stmt
, 0),
3217 SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (newstmt
, 0)) = newstmt
;
3218 bsi_insert_before (&bsi
, newstmt
, BSI_SAME_STMT
);
3219 bsi
= bsi_for_stmt (stmt
);
3220 bsi_remove (&bsi
, true);
3223 release_defs (stmt
);
3227 /* Given an SSA_NAME, see if SCCVN has a value number for it, and if
3228 so, return the value handle for this value number, creating it if
3230 Return NULL if SCCVN has no info for us. */
3233 get_sccvn_value (tree name
)
3235 if (TREE_CODE (name
) == SSA_NAME
3236 && VN_INFO (name
)->valnum
!= name
3237 && VN_INFO (name
)->valnum
!= VN_TOP
)
3239 tree val
= VN_INFO (name
)->valnum
;
3240 bool is_invariant
= is_gimple_min_invariant (val
);
3241 tree valvh
= !is_invariant
? get_value_handle (val
) : NULL_TREE
;
3243 /* We may end up with situations where SCCVN has chosen a
3244 representative for the equivalence set that we have not
3245 visited yet. In this case, just create the value handle for
3247 if (!valvh
&& !is_invariant
)
3249 tree defstmt
= SSA_NAME_DEF_STMT (val
);
3251 gcc_assert (VN_INFO (val
)->valnum
== val
);
3252 /* PHI nodes can't have vuses and attempts to iterate over
3253 their VUSE operands will crash. */
3254 if (TREE_CODE (defstmt
) == PHI_NODE
|| IS_EMPTY_STMT (defstmt
))
3257 tree defstmt2
= SSA_NAME_DEF_STMT (name
);
3258 if (TREE_CODE (defstmt2
) != PHI_NODE
&&
3259 !ZERO_SSA_OPERANDS (defstmt2
, SSA_OP_ALL_VIRTUALS
))
3260 gcc_assert (defstmt
);
3262 valvh
= vn_lookup_or_add_with_stmt (val
, defstmt
);
3265 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3267 fprintf (dump_file
, "SCCVN says ");
3268 print_generic_expr (dump_file
, name
, 0);
3269 fprintf (dump_file
, " value numbers to ");
3270 if (valvh
&& !is_invariant
)
3272 print_generic_expr (dump_file
, val
, 0);
3273 fprintf (dump_file
, " (");
3274 print_generic_expr (dump_file
, valvh
, 0);
3275 fprintf (dump_file
, ")\n");
3278 print_generic_stmt (dump_file
, val
, 0);
3288 /* Create value handles for PHI in BLOCK. */
3291 make_values_for_phi (tree phi
, basic_block block
)
3293 tree result
= PHI_RESULT (phi
);
3294 /* We have no need for virtual phis, as they don't represent
3295 actual computations. */
3296 if (is_gimple_reg (result
))
3298 tree sccvnval
= get_sccvn_value (result
);
3301 vn_add (result
, sccvnval
);
3302 bitmap_insert_into_set (PHI_GEN (block
), result
);
3303 bitmap_value_insert_into_set (AVAIL_OUT (block
), result
);
3306 add_to_sets (result
, result
, NULL
,
3307 PHI_GEN (block
), AVAIL_OUT (block
));
3311 /* Create value handles for STMT in BLOCK. Return true if we handled
3315 make_values_for_stmt (tree stmt
, basic_block block
)
3318 tree lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
3319 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
3320 tree valvh
= NULL_TREE
;
3322 VEC (tree
, gc
) *vuses
= NULL
;
3324 valvh
= get_sccvn_value (lhs
);
3328 vn_add (lhs
, valvh
);
3329 bitmap_value_insert_into_set (AVAIL_OUT (block
), lhs
);
3330 /* Shortcut for FRE. We have no need to create value expressions,
3331 just want to know what values are available where. */
3338 /* For FRE, if SCCVN didn't find anything, we aren't going to
3339 either, so just make up a new value number if necessary and
3341 if (get_value_handle (lhs
) == NULL
)
3342 vn_lookup_or_add (lhs
);
3343 bitmap_value_insert_into_set (AVAIL_OUT (block
), lhs
);
3347 lhsval
= valvh
? valvh
: get_value_handle (lhs
);
3348 vuses
= copy_vuses_from_stmt (stmt
);
3349 STRIP_USELESS_TYPE_CONVERSION (rhs
);
3350 if (can_value_number_operation (rhs
)
3351 && (!lhsval
|| !is_gimple_min_invariant (lhsval
)))
3353 /* For value numberable operation, create a
3354 duplicate expression with the operands replaced
3355 with the value handles of the original RHS. */
3356 tree newt
= create_value_expr_from (rhs
, block
, vuses
);
3359 set_expression_vuses (newt
, vuses
);
3360 /* If we already have a value number for the LHS, reuse
3361 it rather than creating a new one. */
3364 set_value_handle (newt
, lhsval
);
3365 if (!is_gimple_min_invariant (lhsval
))
3366 add_to_value (lhsval
, newt
);
3370 tree val
= vn_lookup_or_add_with_vuses (newt
, vuses
);
3374 add_to_exp_gen (block
, newt
);
3377 bitmap_insert_into_set (TMP_GEN (block
), lhs
);
3378 bitmap_value_insert_into_set (AVAIL_OUT (block
), lhs
);
3381 else if ((TREE_CODE (rhs
) == SSA_NAME
3382 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs
))
3383 || is_gimple_min_invariant (rhs
)
3384 || TREE_CODE (rhs
) == ADDR_EXPR
3385 || TREE_INVARIANT (rhs
)
3391 set_expression_vuses (rhs
, vuses
);
3392 set_value_handle (rhs
, lhsval
);
3393 if (!is_gimple_min_invariant (lhsval
))
3394 add_to_value (lhsval
, rhs
);
3395 bitmap_insert_into_set (TMP_GEN (block
), lhs
);
3396 bitmap_value_insert_into_set (AVAIL_OUT (block
), lhs
);
3400 /* Compute a value number for the RHS of the statement
3401 and add its value to the AVAIL_OUT set for the block.
3402 Add the LHS to TMP_GEN. */
3403 set_expression_vuses (rhs
, vuses
);
3404 add_to_sets (lhs
, rhs
, vuses
, TMP_GEN (block
),
3407 /* None of the rest of these can be PRE'd. */
3408 if (TREE_CODE (rhs
) == SSA_NAME
&& !is_undefined_value (rhs
))
3409 add_to_exp_gen (block
, rhs
);
3416 /* Compute the AVAIL set for all basic blocks.
3418 This function performs value numbering of the statements in each basic
3419 block. The AVAIL sets are built from information we glean while doing
3420 this value numbering, since the AVAIL sets contain only one entry per
3423 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3424 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3427 compute_avail (void)
3429 basic_block block
, son
;
3430 basic_block
*worklist
;
3434 /* For arguments with default definitions, we pretend they are
3435 defined in the entry block. */
3436 for (param
= DECL_ARGUMENTS (current_function_decl
);
3438 param
= TREE_CHAIN (param
))
3440 if (gimple_default_def (cfun
, param
) != NULL
)
3442 tree def
= gimple_default_def (cfun
, param
);
3444 vn_lookup_or_add (def
);
3447 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR
), def
);
3448 bitmap_value_insert_into_set (maximal_set
, def
);
3450 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR
), def
);
3454 /* Likewise for the static chain decl. */
3455 if (cfun
->static_chain_decl
)
3457 param
= cfun
->static_chain_decl
;
3458 if (gimple_default_def (cfun
, param
) != NULL
)
3460 tree def
= gimple_default_def (cfun
, param
);
3462 vn_lookup_or_add (def
);
3465 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR
), def
);
3466 bitmap_value_insert_into_set (maximal_set
, def
);
3468 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR
), def
);
3472 /* Allocate the worklist. */
3473 worklist
= XNEWVEC (basic_block
, n_basic_blocks
);
3475 /* Seed the algorithm by putting the dominator children of the entry
3476 block on the worklist. */
3477 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR
);
3479 son
= next_dom_son (CDI_DOMINATORS
, son
))
3480 worklist
[sp
++] = son
;
3482 /* Loop until the worklist is empty. */
3485 block_stmt_iterator bsi
;
3488 unsigned int stmt_uid
= 1;
3490 /* Pick a block from the worklist. */
3491 block
= worklist
[--sp
];
3493 /* Initially, the set of available values in BLOCK is that of
3494 its immediate dominator. */
3495 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3497 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3499 /* Generate values for PHI nodes. */
3500 for (phi
= phi_nodes (block
); phi
; phi
= PHI_CHAIN (phi
))
3501 make_values_for_phi (phi
, block
);
3503 /* Now compute value numbers and populate value sets with all
3504 the expressions computed in BLOCK. */
3505 for (bsi
= bsi_start (block
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3511 stmt
= bsi_stmt (bsi
);
3512 ann
= stmt_ann (stmt
);
3514 ann
->uid
= stmt_uid
++;
3516 /* For regular value numbering, we are only interested in
3517 assignments of the form X_i = EXPR, where EXPR represents
3518 an "interesting" computation, it has no volatile operands
3519 and X_i doesn't flow through an abnormal edge. */
3520 if (TREE_CODE (stmt
) == RETURN_EXPR
3521 && !ann
->has_volatile_ops
)
3523 tree realstmt
= stmt
;
3527 stmt
= TREE_OPERAND (stmt
, 0);
3528 if (stmt
&& TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
)
3530 lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
3531 rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
3532 if (TREE_CODE (lhs
) == SSA_NAME
3533 && is_gimple_min_invariant (VN_INFO (lhs
)->valnum
))
3535 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3537 fprintf (dump_file
, "SCCVN says ");
3538 print_generic_expr (dump_file
, lhs
, 0);
3539 fprintf (dump_file
, " value numbers to ");
3540 print_generic_stmt (dump_file
, VN_INFO (lhs
)->valnum
,
3543 vn_add (lhs
, VN_INFO (lhs
)->valnum
);
3547 if (TREE_CODE (rhs
) == SSA_NAME
)
3548 add_to_exp_gen (block
, rhs
);
3550 FOR_EACH_SSA_TREE_OPERAND (op
, realstmt
, iter
, SSA_OP_DEF
)
3551 add_to_sets (op
, op
, NULL
, TMP_GEN (block
),
3557 else if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
3558 && !ann
->has_volatile_ops
3559 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 0)) == SSA_NAME
3560 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI
3561 (GIMPLE_STMT_OPERAND (stmt
, 0)))
3563 if (make_values_for_stmt (stmt
, block
))
3568 /* For any other statement that we don't recognize, simply
3569 make the names generated by the statement available in
3570 AVAIL_OUT and TMP_GEN. */
3571 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3572 add_to_sets (op
, op
, NULL
, TMP_GEN (block
), AVAIL_OUT (block
));
3574 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3576 add_to_sets (op
, op
, NULL
, NULL
, AVAIL_OUT (block
));
3577 if (TREE_CODE (op
) == SSA_NAME
|| can_PRE_operation (op
))
3578 add_to_exp_gen (block
, op
);
3582 /* Put the dominator children of BLOCK on the worklist of blocks
3583 to compute available sets for. */
3584 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3586 son
= next_dom_son (CDI_DOMINATORS
, son
))
3587 worklist
[sp
++] = son
;
3594 /* Eliminate fully redundant computations. */
3603 block_stmt_iterator i
;
3605 for (i
= bsi_start (b
); !bsi_end_p (i
); bsi_next (&i
))
3607 tree stmt
= bsi_stmt (i
);
3609 /* Lookup the RHS of the expression, see if we have an
3610 available computation for it. If so, replace the RHS with
3611 the available computation. */
3612 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
3613 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 0)) == SSA_NAME
3614 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 1)) != SSA_NAME
3615 && !is_gimple_min_invariant (GIMPLE_STMT_OPERAND (stmt
, 1))
3616 && !stmt_ann (stmt
)->has_volatile_ops
)
3618 tree lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
3619 tree
*rhs_p
= &GIMPLE_STMT_OPERAND (stmt
, 1);
3622 sprime
= bitmap_find_leader (AVAIL_OUT (b
),
3623 get_value_handle (lhs
));
3627 && (TREE_CODE (*rhs_p
) != SSA_NAME
3628 || may_propagate_copy (*rhs_p
, sprime
)))
3630 gcc_assert (sprime
!= *rhs_p
);
3632 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3634 fprintf (dump_file
, "Replaced ");
3635 print_generic_expr (dump_file
, *rhs_p
, 0);
3636 fprintf (dump_file
, " with ");
3637 print_generic_expr (dump_file
, sprime
, 0);
3638 fprintf (dump_file
, " in ");
3639 print_generic_stmt (dump_file
, stmt
, 0);
3642 if (TREE_CODE (sprime
) == SSA_NAME
)
3643 NECESSARY (SSA_NAME_DEF_STMT (sprime
)) = 1;
3644 /* We need to make sure the new and old types actually match,
3645 which may require adding a simple cast, which fold_convert
3647 if (TREE_CODE (*rhs_p
) != SSA_NAME
3648 && !useless_type_conversion_p (TREE_TYPE (*rhs_p
),
3649 TREE_TYPE (sprime
)))
3650 sprime
= fold_convert (TREE_TYPE (*rhs_p
), sprime
);
3652 pre_stats
.eliminations
++;
3653 propagate_tree_value (rhs_p
, sprime
);
3656 /* If we removed EH side effects from the statement, clean
3657 its EH information. */
3658 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
3660 bitmap_set_bit (need_eh_cleanup
,
3661 bb_for_stmt (stmt
)->index
);
3662 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3663 fprintf (dump_file
, " Removed EH side effects.\n");
3671 /* Borrow a bit of tree-ssa-dce.c for the moment.
3672 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
3673 this may be a bit faster, and we may want critical edges kept split. */
3675 /* If OP's defining statement has not already been determined to be necessary,
3676 mark that statement necessary. Return the stmt, if it is newly
3680 mark_operand_necessary (tree op
)
3686 if (TREE_CODE (op
) != SSA_NAME
)
3689 stmt
= SSA_NAME_DEF_STMT (op
);
3692 if (NECESSARY (stmt
)
3693 || IS_EMPTY_STMT (stmt
))
3696 NECESSARY (stmt
) = 1;
3700 /* Because we don't follow exactly the standard PRE algorithm, and decide not
3701 to insert PHI nodes sometimes, and because value numbering of casts isn't
3702 perfect, we sometimes end up inserting dead code. This simple DCE-like
3703 pass removes any insertions we made that weren't actually used. */
3706 remove_dead_inserted_code (void)
3708 VEC(tree
,heap
) *worklist
= NULL
;
3712 worklist
= VEC_alloc (tree
, heap
, VEC_length (tree
, inserted_exprs
));
3713 for (i
= 0; VEC_iterate (tree
, inserted_exprs
, i
, t
); i
++)
3716 VEC_quick_push (tree
, worklist
, t
);
3718 while (VEC_length (tree
, worklist
) > 0)
3720 t
= VEC_pop (tree
, worklist
);
3722 /* PHI nodes are somewhat special in that each PHI alternative has
3723 data and control dependencies. All the statements feeding the
3724 PHI node's arguments are always necessary. */
3725 if (TREE_CODE (t
) == PHI_NODE
)
3729 VEC_reserve (tree
, heap
, worklist
, PHI_NUM_ARGS (t
));
3730 for (k
= 0; k
< PHI_NUM_ARGS (t
); k
++)
3732 tree arg
= PHI_ARG_DEF (t
, k
);
3733 if (TREE_CODE (arg
) == SSA_NAME
)
3735 arg
= mark_operand_necessary (arg
);
3737 VEC_quick_push (tree
, worklist
, arg
);
3743 /* Propagate through the operands. Examine all the USE, VUSE and
3744 VDEF operands in this statement. Mark all the statements
3745 which feed this statement's uses as necessary. */
3749 /* The operands of VDEF expressions are also needed as they
3750 represent potential definitions that may reach this
3751 statement (VDEF operands allow us to follow def-def
3754 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
3756 tree n
= mark_operand_necessary (use
);
3758 VEC_safe_push (tree
, heap
, worklist
, n
);
3763 for (i
= 0; VEC_iterate (tree
, inserted_exprs
, i
, t
); i
++)
3767 block_stmt_iterator bsi
;
3769 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3771 fprintf (dump_file
, "Removing unnecessary insertion:");
3772 print_generic_stmt (dump_file
, t
, 0);
3775 if (TREE_CODE (t
) == PHI_NODE
)
3777 remove_phi_node (t
, NULL
, true);
3781 bsi
= bsi_for_stmt (t
);
3782 bsi_remove (&bsi
, true);
3787 VEC_free (tree
, heap
, worklist
);
3790 /* Initialize data structures used by PRE. */
3793 init_pre (bool do_fre
)
3797 next_expression_id
= 0;
3799 expression_vuses
= NULL
;
3802 inserted_exprs
= NULL
;
3803 need_creation
= NULL
;
3804 pretemp
= NULL_TREE
;
3805 storetemp
= NULL_TREE
;
3806 prephitemp
= NULL_TREE
;
3809 loop_optimizer_init (LOOPS_NORMAL
);
3811 connect_infinite_loops_to_exit ();
3812 memset (&pre_stats
, 0, sizeof (pre_stats
));
3815 postorder
= XNEWVEC (int, n_basic_blocks
- NUM_FIXED_BLOCKS
);
3816 post_order_compute (postorder
, false, false);
3819 bb
->aux
= xcalloc (1, sizeof (struct bb_bitmap_sets
));
3821 calculate_dominance_info (CDI_POST_DOMINATORS
);
3822 calculate_dominance_info (CDI_DOMINATORS
);
3824 bitmap_obstack_initialize (&grand_bitmap_obstack
);
3825 phi_translate_table
= htab_create (5110, expr_pred_trans_hash
,
3826 expr_pred_trans_eq
, free
);
3827 seen_during_translate
= BITMAP_ALLOC (&grand_bitmap_obstack
);
3828 bitmap_set_pool
= create_alloc_pool ("Bitmap sets",
3829 sizeof (struct bitmap_set
), 30);
3830 binary_node_pool
= create_alloc_pool ("Binary tree nodes",
3831 tree_code_size (PLUS_EXPR
), 30);
3832 unary_node_pool
= create_alloc_pool ("Unary tree nodes",
3833 tree_code_size (NEGATE_EXPR
), 30);
3834 reference_node_pool
= create_alloc_pool ("Reference tree nodes",
3835 tree_code_size (ARRAY_REF
), 30);
3836 comparison_node_pool
= create_alloc_pool ("Comparison tree nodes",
3837 tree_code_size (EQ_EXPR
), 30);
3838 modify_expr_node_pool
= create_alloc_pool ("GIMPLE_MODIFY_STMT nodes",
3839 tree_code_size (GIMPLE_MODIFY_STMT
),
3841 obstack_init (&temp_call_expr_obstack
);
3842 modify_expr_template
= NULL
;
3846 EXP_GEN (bb
) = bitmap_set_new ();
3847 PHI_GEN (bb
) = bitmap_set_new ();
3848 TMP_GEN (bb
) = bitmap_set_new ();
3849 AVAIL_OUT (bb
) = bitmap_set_new ();
3851 maximal_set
= in_fre
? NULL
: bitmap_set_new ();
3853 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
3857 /* Deallocate data structures used by PRE. */
3866 VEC_free (tree
, heap
, inserted_exprs
);
3867 VEC_free (tree
, heap
, need_creation
);
3868 bitmap_obstack_release (&grand_bitmap_obstack
);
3869 free_alloc_pool (bitmap_set_pool
);
3870 free_alloc_pool (binary_node_pool
);
3871 free_alloc_pool (reference_node_pool
);
3872 free_alloc_pool (unary_node_pool
);
3873 free_alloc_pool (comparison_node_pool
);
3874 free_alloc_pool (modify_expr_node_pool
);
3875 htab_delete (phi_translate_table
);
3876 remove_fake_exit_edges ();
3884 free_dominance_info (CDI_POST_DOMINATORS
);
3886 if (!bitmap_empty_p (need_eh_cleanup
))
3888 tree_purge_all_dead_eh_edges (need_eh_cleanup
);
3889 cleanup_tree_cfg ();
3892 BITMAP_FREE (need_eh_cleanup
);
3894 /* Wipe out pointers to VALUE_HANDLEs. In the not terribly distant
3895 future we will want them to be persistent though. */
3896 for (i
= 0; i
< num_ssa_names
; i
++)
3898 tree name
= ssa_name (i
);
3903 if (SSA_NAME_VALUE (name
)
3904 && TREE_CODE (SSA_NAME_VALUE (name
)) == VALUE_HANDLE
)
3905 SSA_NAME_VALUE (name
) = NULL
;
3907 if (current_loops
!= NULL
)
3908 loop_optimizer_finalize ();
3911 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
3912 only wants to do full redundancy elimination. */
3915 execute_pre (bool do_fre
)
3918 do_partial_partial
= optimize
> 2;
3922 insert_fake_stores ();
3924 /* Collect and value number expressions computed in each basic block. */
3926 switch_to_PRE_table ();
3929 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3935 print_bitmap_set (dump_file
, EXP_GEN (bb
), "exp_gen", bb
->index
);
3936 print_bitmap_set (dump_file
, TMP_GEN (bb
), "tmp_gen",
3938 print_bitmap_set (dump_file
, AVAIL_OUT (bb
), "avail_out",
3943 /* Insert can get quite slow on an incredibly large number of basic
3944 blocks due to some quadratic behavior. Until this behavior is
3945 fixed, don't run it when he have an incredibly large number of
3946 bb's. If we aren't going to run insert, there is no point in
3947 computing ANTIC, either, even though it's plenty fast. */
3948 if (!do_fre
&& n_basic_blocks
< 4000)
3954 /* Remove all the redundant expressions. */
3957 if (dump_file
&& (dump_flags
& TDF_STATS
))
3959 fprintf (dump_file
, "Insertions: %d\n", pre_stats
.insertions
);
3960 fprintf (dump_file
, "PA inserted: %d\n", pre_stats
.pa_insert
);
3961 fprintf (dump_file
, "New PHIs: %d\n", pre_stats
.phis
);
3962 fprintf (dump_file
, "Eliminated: %d\n", pre_stats
.eliminations
);
3963 fprintf (dump_file
, "Constified: %d\n", pre_stats
.constified
);
3965 bsi_commit_edge_inserts ();
3968 clear_expression_ids ();
3971 remove_dead_inserted_code ();
3972 realify_fake_stores ();
3978 /* Gate and execute functions for PRE. */
3983 execute_pre (false);
3984 return TODO_rebuild_alias
;
3990 return flag_tree_pre
!= 0;
3993 struct tree_opt_pass pass_pre
=
3996 gate_pre
, /* gate */
3997 do_pre
, /* execute */
4000 0, /* static_pass_number */
4001 TV_TREE_PRE
, /* tv_id */
4002 PROP_no_crit_edges
| PROP_cfg
4003 | PROP_ssa
| PROP_alias
, /* properties_required */
4004 0, /* properties_provided */
4005 0, /* properties_destroyed */
4006 0, /* todo_flags_start */
4007 TODO_update_ssa_only_virtuals
| TODO_dump_func
| TODO_ggc_collect
4008 | TODO_verify_ssa
, /* todo_flags_finish */
4013 /* Gate and execute functions for FRE. */
4025 return flag_tree_fre
!= 0;
4028 struct tree_opt_pass pass_fre
=
4031 gate_fre
, /* gate */
4032 execute_fre
, /* execute */
4035 0, /* static_pass_number */
4036 TV_TREE_FRE
, /* tv_id */
4037 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
4038 0, /* properties_provided */
4039 0, /* properties_destroyed */
4040 0, /* todo_flags_start */
4041 TODO_dump_func
| TODO_ggc_collect
| TODO_verify_ssa
, /* todo_flags_finish */