2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to
21 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
22 Boston, MA 02110-1301, USA. */
26 #include "coretypes.h"
30 #include "basic-block.h"
31 #include "diagnostic.h"
32 #include "tree-inline.h"
33 #include "tree-flow.h"
34 #include "tree-gimple.h"
35 #include "tree-dump.h"
39 #include "tree-iterator.h"
41 #include "alloc-pool.h"
43 #include "tree-pass.h"
46 #include "langhooks.h"
51 1. Avail sets can be shared by making an avail_find_leader that
52 walks up the dominator tree and looks in those avail sets.
53 This might affect code optimality, it's unclear right now.
54 2. Strength reduction can be performed by anticipating expressions
55 we can repair later on.
56 3. We can do back-substitution or smarter value numbering to catch
57 commutative expressions split up over multiple statements.
60 /* For ease of terminology, "expression node" in the below refers to
61 every expression node but GIMPLE_MODIFY_STMT, because GIMPLE_MODIFY_STMT's
62 represent the actual statement containing the expressions we care about,
63 and we cache the value number by putting it in the expression. */
67 First we walk the statements to generate the AVAIL sets, the
68 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
69 generation of values/expressions by a given block. We use them
70 when computing the ANTIC sets. The AVAIL sets consist of
71 SSA_NAME's that represent values, so we know what values are
72 available in what blocks. AVAIL is a forward dataflow problem. In
73 SSA, values are never killed, so we don't need a kill set, or a
74 fixpoint iteration, in order to calculate the AVAIL sets. In
75 traditional parlance, AVAIL sets tell us the downsafety of the
78 Next, we generate the ANTIC sets. These sets represent the
79 anticipatable expressions. ANTIC is a backwards dataflow
80 problem. An expression is anticipatable in a given block if it could
81 be generated in that block. This means that if we had to perform
82 an insertion in that block, of the value of that expression, we
83 could. Calculating the ANTIC sets requires phi translation of
84 expressions, because the flow goes backwards through phis. We must
85 iterate to a fixpoint of the ANTIC sets, because we have a kill
86 set. Even in SSA form, values are not live over the entire
87 function, only from their definition point onwards. So we have to
88 remove values from the ANTIC set once we go past the definition
89 point of the leaders that make them up.
90 compute_antic/compute_antic_aux performs this computation.
92 Third, we perform insertions to make partially redundant
93 expressions fully redundant.
95 An expression is partially redundant (excluding partial
98 1. It is AVAIL in some, but not all, of the predecessors of a
100 2. It is ANTIC in all the predecessors.
102 In order to make it fully redundant, we insert the expression into
103 the predecessors where it is not available, but is ANTIC.
105 For the partial anticipation case, we only perform insertion if it
106 is partially anticipated in some block, and fully available in all
109 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
110 performs these steps.
112 Fourth, we eliminate fully redundant expressions.
113 This is a simple statement walk that replaces redundant
114 calculations with the now available values. */
116 /* Representations of value numbers:
118 Value numbers are represented using the "value handle" approach.
119 This means that each SSA_NAME (and for other reasons to be
120 disclosed in a moment, expression nodes) has a value handle that
121 can be retrieved through get_value_handle. This value handle *is*
122 the value number of the SSA_NAME. You can pointer compare the
123 value handles for equivalence purposes.
125 For debugging reasons, the value handle is internally more than
126 just a number, it is a VALUE_HANDLE named "VH.x", where x is a
127 unique number for each value number in use. This allows
128 expressions with SSA_NAMES replaced by value handles to still be
129 pretty printed in a sane way. They simply print as "VH.3 *
132 Expression nodes have value handles associated with them as a
133 cache. Otherwise, we'd have to look them up again in the hash
134 table This makes significant difference (factor of two or more) on
135 some test cases. They can be thrown away after the pass is
138 /* Representation of expressions on value numbers:
140 In some portions of this code, you will notice we allocate "fake"
141 analogues to the expression we are value numbering, and replace the
142 operands with the values of the expression. Since we work on
143 values, and not just names, we canonicalize expressions to value
144 expressions for use in the ANTIC sets, the EXP_GEN set, etc.
146 This is theoretically unnecessary, it just saves a bunch of
147 repeated get_value_handle and find_leader calls in the remainder of
148 the code, trading off temporary memory usage for speed. The tree
149 nodes aren't actually creating more garbage, since they are
150 allocated in a special pools which are thrown away at the end of
153 All of this also means that if you print the EXP_GEN or ANTIC sets,
154 you will see "VH.5 + VH.7" in the set, instead of "a_55 +
155 b_66" or something. The only thing that actually cares about
156 seeing the value leaders is phi translation, and it needs to be
157 able to find the leader for a value in an arbitrary block, so this
158 "value expression" form is perfect for it (otherwise you'd do
159 get_value_handle->find_leader->translate->get_value_handle->find_leader).*/
162 /* Representation of sets:
164 There are currently two types of sets used, hopefully to be unified soon.
165 The AVAIL sets do not need to be sorted in any particular order,
166 and thus, are simply represented as two bitmaps, one that keeps
167 track of values present in the set, and one that keeps track of
168 expressions present in the set.
170 The other sets are represented as doubly linked lists kept in topological
171 order, with an optional supporting bitmap of values present in the
172 set. The sets represent values, and the elements can be values or
173 expressions. The elements can appear in different sets, but each
174 element can only appear once in each set.
176 Since each node in the set represents a value, we also want to be
177 able to map expression, set pairs to something that tells us
178 whether the value is present is a set. We use a per-set bitmap for
179 that. The value handles also point to a linked list of the
180 expressions they represent via a tree annotation. This is mainly
181 useful only for debugging, since we don't do identity lookups. */
184 /* Next global expression id number. */
185 static unsigned int next_expression_id
;
187 /* Mapping from expression to id number we can use in bitmap sets. */
188 static VEC(tree
, heap
) *expressions
;
190 /* Allocate an expression id for EXPR. */
192 static inline unsigned int
193 alloc_expression_id (tree expr
)
195 tree_ann_common_t ann
;
197 ann
= get_tree_common_ann (expr
);
199 /* Make sure we won't overflow. */
200 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
202 ann
->aux
= XNEW (unsigned int);
203 * ((unsigned int *)ann
->aux
) = next_expression_id
++;
204 VEC_safe_push (tree
, heap
, expressions
, expr
);
205 return next_expression_id
- 1;
208 /* Return the expression id for tree EXPR. */
210 static inline unsigned int
211 get_expression_id (tree expr
)
213 tree_ann_common_t ann
= tree_common_ann (expr
);
215 gcc_assert (ann
->aux
);
217 return *((unsigned int *)ann
->aux
);
220 /* Return the existing expression id for EXPR, or create one if one
221 does not exist yet. */
223 static inline unsigned int
224 get_or_alloc_expression_id (tree expr
)
226 tree_ann_common_t ann
= tree_common_ann (expr
);
228 if (ann
== NULL
|| !ann
->aux
)
229 return alloc_expression_id (expr
);
231 return get_expression_id (expr
);
234 /* Return the expression that has expression id ID */
237 expression_for_id (unsigned int id
)
239 return VEC_index (tree
, expressions
, id
);
242 /* Free the expression id field in all of our expressions,
243 and then destroy the expressions array. */
246 clear_expression_ids (void)
251 for (i
= 0; VEC_iterate (tree
, expressions
, i
, expr
); i
++)
253 free (tree_common_ann (expr
)->aux
);
254 tree_common_ann (expr
)->aux
= NULL
;
256 VEC_free (tree
, heap
, expressions
);
259 static bool in_fre
= false;
261 /* An unordered bitmap set. One bitmap tracks values, the other,
263 typedef struct bitmap_set
269 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
270 EXECUTE_IF_SET_IN_BITMAP(set->expressions, 0, id, bi)
272 /* Sets that we need to keep track of. */
273 typedef struct bb_bitmap_sets
275 /* The EXP_GEN set, which represents expressions/values generated in
277 bitmap_set_t exp_gen
;
279 /* The PHI_GEN set, which represents PHI results generated in a
281 bitmap_set_t phi_gen
;
283 /* The TMP_GEN set, which represents results/temporaries generated
284 in a basic block. IE the LHS of an expression. */
285 bitmap_set_t tmp_gen
;
287 /* The AVAIL_OUT set, which represents which values are available in
288 a given basic block. */
289 bitmap_set_t avail_out
;
291 /* The ANTIC_IN set, which represents which values are anticipatable
292 in a given basic block. */
293 bitmap_set_t antic_in
;
295 /* The PA_IN set, which represents which values are
296 partially anticipatable in a given basic block. */
299 /* The NEW_SETS set, which is used during insertion to augment the
300 AVAIL_OUT set of blocks with the new insertions performed during
301 the current iteration. */
302 bitmap_set_t new_sets
;
304 /* These are the loads that will be ANTIC_IN at the top of the
305 block, and are actually generated in the block. */
306 bitmap_set_t antic_safe_loads
;
308 /* True if we have visited this block during ANTIC calculation. */
309 unsigned int visited
:1;
311 /* True we have deferred processing this block during ANTIC
312 calculation until its successor is processed. */
313 unsigned int deferred
: 1;
316 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
317 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
318 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
319 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
320 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
321 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
322 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
323 #define ANTIC_SAFE_LOADS(BB) ((bb_value_sets_t) ((BB)->aux))->antic_safe_loads
324 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
325 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
327 /* Maximal set of values, used to initialize the ANTIC problem, which
328 is an intersection problem. */
329 static bitmap_set_t maximal_set
;
331 /* Basic block list in postorder. */
332 static int *postorder
;
334 /* This structure is used to keep track of statistics on what
335 optimization PRE was able to perform. */
338 /* The number of RHS computations eliminated by PRE. */
341 /* The number of new expressions/temporaries generated by PRE. */
344 /* The number of inserts found due to partial anticipation */
347 /* The number of new PHI nodes added by PRE. */
350 /* The number of values found constant. */
355 static bool do_partial_partial
;
356 static tree
bitmap_find_leader (bitmap_set_t
, tree
);
357 static void bitmap_value_insert_into_set (bitmap_set_t
, tree
);
358 static void bitmap_value_replace_in_set (bitmap_set_t
, tree
);
359 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
360 static bool bitmap_set_contains_value (bitmap_set_t
, tree
);
361 static void bitmap_insert_into_set (bitmap_set_t
, tree
);
362 static bitmap_set_t
bitmap_set_new (void);
363 static bool is_undefined_value (tree
);
364 static tree
create_expression_by_pieces (basic_block
, tree
, tree
);
365 static tree
find_or_generate_expression (basic_block
, tree
, tree
);
367 /* We can add and remove elements and entries to and from sets
368 and hash tables, so we use alloc pools for them. */
370 static alloc_pool bitmap_set_pool
;
371 static alloc_pool binary_node_pool
;
372 static alloc_pool unary_node_pool
;
373 static alloc_pool reference_node_pool
;
374 static alloc_pool comparison_node_pool
;
375 static alloc_pool modify_expr_node_pool
;
376 static bitmap_obstack grand_bitmap_obstack
;
378 /* We can't use allocation pools to hold temporary CALL_EXPR objects, since
379 they are not of fixed size. Instead, use an obstack. */
381 static struct obstack temp_call_expr_obstack
;
384 /* To avoid adding 300 temporary variables when we only need one, we
385 only create one temporary variable, on demand, and build ssa names
386 off that. We do have to change the variable if the types don't
387 match the current variable's type. */
389 static tree storetemp
;
390 static tree mergephitemp
;
391 static tree prephitemp
;
393 /* Set of blocks with statements that have had its EH information
395 static bitmap need_eh_cleanup
;
397 /* The phi_translate_table caches phi translations for a given
398 expression and predecessor. */
400 static htab_t phi_translate_table
;
402 /* A three tuple {e, pred, v} used to cache phi translations in the
403 phi_translate_table. */
405 typedef struct expr_pred_trans_d
407 /* The expression. */
410 /* The predecessor block along which we translated the expression. */
413 /* vuses associated with the expression. */
414 VEC (tree
, gc
) *vuses
;
416 /* The value that resulted from the translation. */
419 /* The hashcode for the expression, pred pair. This is cached for
422 } *expr_pred_trans_t
;
424 /* Return the hash value for a phi translation table entry. */
427 expr_pred_trans_hash (const void *p
)
429 const expr_pred_trans_t ve
= (expr_pred_trans_t
) p
;
433 /* Return true if two phi translation table entries are the same.
434 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
437 expr_pred_trans_eq (const void *p1
, const void *p2
)
439 const expr_pred_trans_t ve1
= (expr_pred_trans_t
) p1
;
440 const expr_pred_trans_t ve2
= (expr_pred_trans_t
) p2
;
441 basic_block b1
= ve1
->pred
;
442 basic_block b2
= ve2
->pred
;
446 /* If they are not translations for the same basic block, they can't
452 /* If they are for the same basic block, determine if the
453 expressions are equal. */
454 if (!expressions_equal_p (ve1
->e
, ve2
->e
))
457 /* Make sure the vuses are equivalent. */
458 if (ve1
->vuses
== ve2
->vuses
)
461 if (VEC_length (tree
, ve1
->vuses
) != VEC_length (tree
, ve2
->vuses
))
464 for (i
= 0; VEC_iterate (tree
, ve1
->vuses
, i
, vuse1
); i
++)
466 if (VEC_index (tree
, ve2
->vuses
, i
) != vuse1
)
473 /* Search in the phi translation table for the translation of
474 expression E in basic block PRED with vuses VUSES.
475 Return the translated value, if found, NULL otherwise. */
478 phi_trans_lookup (tree e
, basic_block pred
, VEC (tree
, gc
) *vuses
)
481 struct expr_pred_trans_d ept
;
486 ept
.hashcode
= vn_compute (e
, (unsigned long) pred
);
487 slot
= htab_find_slot_with_hash (phi_translate_table
, &ept
, ept
.hashcode
,
492 return ((expr_pred_trans_t
) *slot
)->v
;
496 /* Add the tuple mapping from {expression E, basic block PRED, vuses VUSES} to
497 value V, to the phi translation table. */
500 phi_trans_add (tree e
, tree v
, basic_block pred
, VEC (tree
, gc
) *vuses
)
503 expr_pred_trans_t new_pair
= XNEW (struct expr_pred_trans_d
);
505 new_pair
->pred
= pred
;
506 new_pair
->vuses
= vuses
;
508 new_pair
->hashcode
= vn_compute (e
, (unsigned long) pred
);
509 slot
= htab_find_slot_with_hash (phi_translate_table
, new_pair
,
510 new_pair
->hashcode
, INSERT
);
513 *slot
= (void *) new_pair
;
517 /* Return true if V is a value expression that represents itself.
518 In our world, this is *only* non-value handles. */
521 constant_expr_p (tree v
)
523 return TREE_CODE (v
) != VALUE_HANDLE
&& is_gimple_min_invariant (v
);
524 /* return TREE_CODE (v) != VALUE_HANDLE; */
527 /* Add expression E to the expression set of value V. */
530 add_to_value (tree v
, tree e
)
532 /* Constants have no expression sets. */
533 if (constant_expr_p (v
))
536 if (VALUE_HANDLE_EXPR_SET (v
) == NULL
)
537 VALUE_HANDLE_EXPR_SET (v
) = bitmap_set_new ();
539 bitmap_insert_into_set (VALUE_HANDLE_EXPR_SET (v
), e
);
542 /* Create a new bitmap set and return it. */
545 bitmap_set_new (void)
547 bitmap_set_t ret
= (bitmap_set_t
) pool_alloc (bitmap_set_pool
);
548 ret
->expressions
= BITMAP_ALLOC (&grand_bitmap_obstack
);
549 ret
->values
= BITMAP_ALLOC (&grand_bitmap_obstack
);
553 /* Remove an expression EXPR from a bitmapped set. */
556 bitmap_remove_from_set (bitmap_set_t set
, tree expr
)
558 tree val
= get_value_handle (expr
);
561 if (!constant_expr_p (val
))
563 bitmap_clear_bit (set
->values
, VALUE_HANDLE_ID (val
));
564 bitmap_clear_bit (set
->expressions
, get_expression_id (expr
));
568 /* Insert an expression EXPR into a bitmapped set. */
571 bitmap_insert_into_set (bitmap_set_t set
, tree expr
)
573 tree val
= get_value_handle (expr
);
576 if (!constant_expr_p (val
))
578 bitmap_set_bit (set
->values
, VALUE_HANDLE_ID (val
));
579 bitmap_set_bit (set
->expressions
, get_or_alloc_expression_id (expr
));
583 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
586 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
588 bitmap_copy (dest
->expressions
, orig
->expressions
);
589 bitmap_copy (dest
->values
, orig
->values
);
593 /* Free memory used up by SET. */
595 bitmap_set_free (bitmap_set_t set
)
597 BITMAP_FREE (set
->expressions
);
598 BITMAP_FREE (set
->values
);
602 /* A comparison function for use in qsort to top sort a bitmap set. Simply
603 subtracts value handle ids, since they are created in topo-order. */
606 vh_compare (const void *pa
, const void *pb
)
608 const tree vha
= get_value_handle (*((const tree
*)pa
));
609 const tree vhb
= get_value_handle (*((const tree
*)pb
));
611 /* This can happen when we constify things. */
612 if (constant_expr_p (vha
))
614 if (constant_expr_p (vhb
))
618 else if (constant_expr_p (vhb
))
620 return VALUE_HANDLE_ID (vha
) - VALUE_HANDLE_ID (vhb
);
623 /* Generate an topological-ordered array of bitmap set SET. */
625 static VEC(tree
, heap
) *
626 sorted_array_from_bitmap_set (bitmap_set_t set
)
630 VEC(tree
, heap
) *result
= NULL
;
632 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
633 VEC_safe_push (tree
, heap
, result
, expression_for_id (i
));
635 qsort (VEC_address (tree
, result
), VEC_length (tree
, result
),
636 sizeof (tree
), vh_compare
);
641 /* Perform bitmapped set operation DEST &= ORIG. */
644 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
651 bitmap temp
= BITMAP_ALLOC (&grand_bitmap_obstack
);
653 bitmap_and_into (dest
->values
, orig
->values
);
655 bitmap_copy (temp
, dest
->expressions
);
656 EXECUTE_IF_SET_IN_BITMAP (temp
, 0, i
, bi
)
658 tree expr
= expression_for_id (i
);
659 tree val
= get_value_handle (expr
);
660 if (!bitmap_bit_p (dest
->values
, VALUE_HANDLE_ID (val
)))
661 bitmap_clear_bit (dest
->expressions
, i
);
667 /* Subtract all values and expressions contained in ORIG from DEST. */
670 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
672 bitmap_set_t result
= bitmap_set_new ();
676 bitmap_and_compl (result
->expressions
, dest
->expressions
,
679 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
681 tree expr
= expression_for_id (i
);
682 tree val
= get_value_handle (expr
);
683 bitmap_set_bit (result
->values
, VALUE_HANDLE_ID (val
));
689 /* Subtract all the values in bitmap set B from bitmap set A. */
692 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
696 bitmap temp
= BITMAP_ALLOC (&grand_bitmap_obstack
);
698 bitmap_copy (temp
, a
->expressions
);
699 EXECUTE_IF_SET_IN_BITMAP (temp
, 0, i
, bi
)
701 tree expr
= expression_for_id (i
);
702 if (bitmap_set_contains_value (b
, get_value_handle (expr
)))
703 bitmap_remove_from_set (a
, expr
);
709 /* Return true if bitmapped set SET contains the value VAL. */
712 bitmap_set_contains_value (bitmap_set_t set
, tree val
)
714 if (constant_expr_p (val
))
717 if (!set
|| bitmap_empty_p (set
->expressions
))
720 return bitmap_bit_p (set
->values
, VALUE_HANDLE_ID (val
));
724 bitmap_set_contains_expr (bitmap_set_t set
, tree expr
)
726 return bitmap_bit_p (set
->expressions
, get_expression_id (expr
));
729 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
732 bitmap_set_replace_value (bitmap_set_t set
, tree lookfor
, tree expr
)
734 bitmap_set_t exprset
;
738 if (constant_expr_p (lookfor
))
741 if (!bitmap_set_contains_value (set
, lookfor
))
744 /* The number of expressions having a given value is usually
745 significantly less than the total number of expressions in SET.
746 Thus, rather than check, for each expression in SET, whether it
747 has the value LOOKFOR, we walk the reverse mapping that tells us
748 what expressions have a given value, and see if any of those
749 expressions are in our set. For large testcases, this is about
750 5-10x faster than walking the bitmap. If this is somehow a
751 significant lose for some cases, we can choose which set to walk
752 based on the set size. */
753 exprset
= VALUE_HANDLE_EXPR_SET (lookfor
);
754 FOR_EACH_EXPR_ID_IN_SET (exprset
, i
, bi
)
756 if (bitmap_bit_p (set
->expressions
, i
))
758 bitmap_clear_bit (set
->expressions
, i
);
759 bitmap_set_bit (set
->expressions
, get_expression_id (expr
));
765 /* Return true if two bitmap sets are equal. */
768 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
770 return bitmap_equal_p (a
->values
, b
->values
);
773 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
774 and add it otherwise. */
777 bitmap_value_replace_in_set (bitmap_set_t set
, tree expr
)
779 tree val
= get_value_handle (expr
);
781 if (bitmap_set_contains_value (set
, val
))
782 bitmap_set_replace_value (set
, val
, expr
);
784 bitmap_insert_into_set (set
, expr
);
787 /* Insert EXPR into SET if EXPR's value is not already present in
791 bitmap_value_insert_into_set (bitmap_set_t set
, tree expr
)
793 tree val
= get_value_handle (expr
);
795 if (constant_expr_p (val
))
798 if (!bitmap_set_contains_value (set
, val
))
799 bitmap_insert_into_set (set
, expr
);
802 /* Print out SET to OUTFILE. */
805 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
806 const char *setname
, int blockindex
)
808 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
815 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
817 tree expr
= expression_for_id (i
);
820 fprintf (outfile
, ", ");
822 print_generic_expr (outfile
, expr
, 0);
824 fprintf (outfile
, " (");
825 print_generic_expr (outfile
, get_value_handle (expr
), 0);
826 fprintf (outfile
, ") ");
829 fprintf (outfile
, " }\n");
832 void debug_bitmap_set (bitmap_set_t
);
835 debug_bitmap_set (bitmap_set_t set
)
837 print_bitmap_set (stderr
, set
, "debug", 0);
840 /* Print out the expressions that have VAL to OUTFILE. */
843 print_value_expressions (FILE *outfile
, tree val
)
845 if (VALUE_HANDLE_EXPR_SET (val
))
848 sprintf (s
, "VH.%04d", VALUE_HANDLE_ID (val
));
849 print_bitmap_set (outfile
, VALUE_HANDLE_EXPR_SET (val
), s
, 0);
855 debug_value_expressions (tree val
)
857 print_value_expressions (stderr
, val
);
860 /* Return the folded version of T if T, when folded, is a gimple
861 min_invariant. Otherwise, return T. */
864 fully_constant_expression (tree t
)
868 if (folded
&& is_gimple_min_invariant (folded
))
873 /* Make a temporary copy of a CALL_EXPR object NODE. */
876 temp_copy_call_expr (tree node
)
878 return (tree
) obstack_copy (&temp_call_expr_obstack
, node
, tree_size (node
));
881 /* Translate the vuses in the VUSES vector backwards through phi nodes
882 in PHIBLOCK, so that they have the value they would have in
885 static VEC(tree
, gc
) *
886 translate_vuses_through_block (VEC (tree
, gc
) *vuses
,
887 basic_block phiblock
,
891 VEC(tree
, gc
) *result
= NULL
;
894 for (i
= 0; VEC_iterate (tree
, vuses
, i
, oldvuse
); i
++)
896 tree phi
= SSA_NAME_DEF_STMT (oldvuse
);
897 if (TREE_CODE (phi
) == PHI_NODE
898 && bb_for_stmt (phi
) == phiblock
)
900 edge e
= find_edge (block
, bb_for_stmt (phi
));
903 tree def
= PHI_ARG_DEF (phi
, e
->dest_idx
);
907 result
= VEC_copy (tree
, gc
, vuses
);
908 VEC_replace (tree
, result
, i
, def
);
914 /* We avoid creating a new copy of the vuses unless something
915 actually changed, so result can be NULL. */
925 /* Like find_leader, but checks for the value existing in SET1 *or*
926 SET2. This is used to avoid making a set consisting of the union
927 of PA_IN and ANTIC_IN during insert. */
930 find_leader_in_sets (tree expr
, bitmap_set_t set1
, bitmap_set_t set2
)
934 result
= bitmap_find_leader (set1
, expr
);
936 result
= bitmap_find_leader (set2
, expr
);
940 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
941 the phis in PRED. Return NULL if we can't find a leader for each
942 part of the translated expression. */
945 phi_translate (tree expr
, bitmap_set_t set1
, bitmap_set_t set2
,
946 basic_block pred
, basic_block phiblock
)
948 tree phitrans
= NULL
;
954 if (is_gimple_min_invariant (expr
))
957 /* Phi translations of a given expression don't change. */
958 if (EXPR_P (expr
) || GIMPLE_STMT_P (expr
))
962 vh
= get_value_handle (expr
);
963 if (vh
&& TREE_CODE (vh
) == VALUE_HANDLE
)
964 phitrans
= phi_trans_lookup (expr
, pred
, VALUE_HANDLE_VUSES (vh
));
966 phitrans
= phi_trans_lookup (expr
, pred
, NULL
);
969 phitrans
= phi_trans_lookup (expr
, pred
, NULL
);
974 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
981 if (TREE_CODE (expr
) != CALL_EXPR
)
985 tree oldfn
= CALL_EXPR_FN (expr
);
986 tree oldsc
= CALL_EXPR_STATIC_CHAIN (expr
);
987 tree newfn
, newsc
= NULL
;
988 tree newexpr
= NULL_TREE
;
989 tree vh
= get_value_handle (expr
);
990 bool invariantarg
= false;
992 VEC (tree
, gc
) *vuses
= VALUE_HANDLE_VUSES (vh
);
993 VEC (tree
, gc
) *tvuses
;
995 newfn
= phi_translate (find_leader_in_sets (oldfn
, set1
, set2
),
996 set1
, set2
, pred
, phiblock
);
1001 newexpr
= temp_copy_call_expr (expr
);
1002 CALL_EXPR_FN (newexpr
) = get_value_handle (newfn
);
1006 newsc
= phi_translate (find_leader_in_sets (oldsc
, set1
, set2
),
1007 set1
, set2
, pred
, phiblock
);
1013 newexpr
= temp_copy_call_expr (expr
);
1014 CALL_EXPR_STATIC_CHAIN (newexpr
) = get_value_handle (newsc
);
1018 /* phi translate the argument list piece by piece. */
1019 nargs
= call_expr_nargs (expr
);
1020 for (i
= 0; i
< nargs
; i
++)
1022 tree oldval
= CALL_EXPR_ARG (expr
, i
);
1026 /* This may seem like a weird place for this
1027 check, but it's actually the easiest place to
1028 do it. We can't do it lower on in the
1029 recursion because it's valid for pieces of a
1030 component ref to be of AGGREGATE_TYPE, as long
1031 as the outermost one is not.
1032 To avoid *that* case, we have a check for
1033 AGGREGATE_TYPE_P in insert_aux. However, that
1034 check will *not* catch this case because here
1035 it occurs in the argument list. */
1036 if (AGGREGATE_TYPE_P (TREE_TYPE (oldval
)))
1038 oldval
= find_leader_in_sets (oldval
, set1
, set2
);
1039 newval
= phi_translate (oldval
, set1
, set2
, pred
,
1043 if (newval
!= oldval
)
1045 invariantarg
|= is_gimple_min_invariant (newval
);
1047 newexpr
= temp_copy_call_expr (expr
);
1048 CALL_EXPR_ARG (newexpr
, i
) = get_value_handle (newval
);
1053 /* In case of new invariant args we might try to fold the call
1055 if (invariantarg
&& !newsc
)
1057 tree tmp1
= build_call_array (TREE_TYPE (expr
),
1058 newfn
, call_expr_nargs (newexpr
),
1059 CALL_EXPR_ARGP (newexpr
));
1060 tree tmp2
= fold (tmp1
);
1063 STRIP_TYPE_NOPS (tmp2
);
1064 if (is_gimple_min_invariant (tmp2
))
1069 tvuses
= translate_vuses_through_block (vuses
, phiblock
, pred
);
1070 if (vuses
!= tvuses
&& ! newexpr
)
1071 newexpr
= temp_copy_call_expr (expr
);
1075 newexpr
->base
.ann
= NULL
;
1076 vn_lookup_or_add_with_vuses (newexpr
, tvuses
);
1078 phi_trans_add (oldexpr
, newexpr
, pred
, tvuses
);
1084 case tcc_declaration
:
1086 VEC (tree
, gc
) * oldvuses
= NULL
;
1087 VEC (tree
, gc
) * newvuses
= NULL
;
1089 oldvuses
= VALUE_HANDLE_VUSES (get_value_handle (expr
));
1091 newvuses
= translate_vuses_through_block (oldvuses
, phiblock
,
1094 if (oldvuses
!= newvuses
)
1095 vn_lookup_or_add_with_vuses (expr
, newvuses
);
1097 phi_trans_add (oldexpr
, expr
, pred
, newvuses
);
1103 tree oldop0
= TREE_OPERAND (expr
, 0);
1112 VEC (tree
, gc
) * oldvuses
= NULL
;
1113 VEC (tree
, gc
) * newvuses
= NULL
;
1115 if (TREE_CODE (expr
) != INDIRECT_REF
1116 && TREE_CODE (expr
) != COMPONENT_REF
1117 && TREE_CODE (expr
) != ARRAY_REF
)
1120 oldop0
= find_leader_in_sets (oldop0
, set1
, set2
);
1121 newop0
= phi_translate (oldop0
, set1
, set2
, pred
, phiblock
);
1125 if (TREE_CODE (expr
) == ARRAY_REF
)
1127 oldop1
= TREE_OPERAND (expr
, 1);
1128 oldop1
= find_leader_in_sets (oldop1
, set1
, set2
);
1129 newop1
= phi_translate (oldop1
, set1
, set2
, pred
, phiblock
);
1134 oldop2
= TREE_OPERAND (expr
, 2);
1137 oldop2
= find_leader_in_sets (oldop2
, set1
, set2
);
1138 newop2
= phi_translate (oldop2
, set1
, set2
, pred
, phiblock
);
1143 oldop3
= TREE_OPERAND (expr
, 3);
1146 oldop3
= find_leader_in_sets (oldop3
, set1
, set2
);
1147 newop3
= phi_translate (oldop3
, set1
, set2
, pred
, phiblock
);
1154 oldvuses
= VALUE_HANDLE_VUSES (get_value_handle (expr
));
1156 newvuses
= translate_vuses_through_block (oldvuses
, phiblock
,
1159 if (newop0
!= oldop0
|| newvuses
!= oldvuses
1162 || newop3
!= oldop3
)
1166 newexpr
= (tree
) pool_alloc (reference_node_pool
);
1167 memcpy (newexpr
, expr
, tree_size (expr
));
1168 TREE_OPERAND (newexpr
, 0) = get_value_handle (newop0
);
1169 if (TREE_CODE (expr
) == ARRAY_REF
)
1171 TREE_OPERAND (newexpr
, 1) = get_value_handle (newop1
);
1173 TREE_OPERAND (newexpr
, 2) = get_value_handle (newop2
);
1175 TREE_OPERAND (newexpr
, 3) = get_value_handle (newop3
);
1178 t
= fully_constant_expression (newexpr
);
1182 pool_free (reference_node_pool
, newexpr
);
1187 newexpr
->base
.ann
= NULL
;
1188 vn_lookup_or_add_with_vuses (newexpr
, newvuses
);
1191 phi_trans_add (oldexpr
, newexpr
, pred
, newvuses
);
1198 case tcc_comparison
:
1200 tree oldop1
= TREE_OPERAND (expr
, 0);
1201 tree oldval1
= oldop1
;
1202 tree oldop2
= TREE_OPERAND (expr
, 1);
1203 tree oldval2
= oldop2
;
1208 oldop1
= find_leader_in_sets (oldop1
, set1
, set2
);
1209 newop1
= phi_translate (oldop1
, set1
, set2
, pred
, phiblock
);
1213 oldop2
= find_leader_in_sets (oldop2
, set1
, set2
);
1214 newop2
= phi_translate (oldop2
, set1
, set2
, pred
, phiblock
);
1217 if (newop1
!= oldop1
|| newop2
!= oldop2
)
1220 newexpr
= (tree
) pool_alloc (binary_node_pool
);
1221 memcpy (newexpr
, expr
, tree_size (expr
));
1222 TREE_OPERAND (newexpr
, 0) = newop1
== oldop1
? oldval1
: get_value_handle (newop1
);
1223 TREE_OPERAND (newexpr
, 1) = newop2
== oldop2
? oldval2
: get_value_handle (newop2
);
1224 t
= fully_constant_expression (newexpr
);
1227 pool_free (binary_node_pool
, newexpr
);
1232 newexpr
->base
.ann
= NULL
;
1233 vn_lookup_or_add (newexpr
, NULL
);
1236 phi_trans_add (oldexpr
, newexpr
, pred
, NULL
);
1243 tree oldop1
= TREE_OPERAND (expr
, 0);
1247 oldop1
= find_leader_in_sets (oldop1
, set1
, set2
);
1248 newop1
= phi_translate (oldop1
, set1
, set2
, pred
, phiblock
);
1251 if (newop1
!= oldop1
)
1254 newexpr
= (tree
) pool_alloc (unary_node_pool
);
1255 memcpy (newexpr
, expr
, tree_size (expr
));
1256 TREE_OPERAND (newexpr
, 0) = get_value_handle (newop1
);
1257 t
= fully_constant_expression (newexpr
);
1260 pool_free (unary_node_pool
, newexpr
);
1265 newexpr
->base
.ann
= NULL
;
1266 vn_lookup_or_add (newexpr
, NULL
);
1269 phi_trans_add (oldexpr
, newexpr
, pred
, NULL
);
1274 case tcc_exceptional
:
1279 gcc_assert (TREE_CODE (expr
) == SSA_NAME
);
1281 def_stmt
= SSA_NAME_DEF_STMT (expr
);
1282 if (TREE_CODE (def_stmt
) == PHI_NODE
1283 && bb_for_stmt (def_stmt
) == phiblock
)
1288 e
= find_edge (pred
, bb_for_stmt (phi
));
1291 if (is_undefined_value (PHI_ARG_DEF (phi
, e
->dest_idx
)))
1293 vn_lookup_or_add (PHI_ARG_DEF (phi
, e
->dest_idx
), NULL
);
1294 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1304 /* For each expression in SET, translate the value handles through phi nodes
1305 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1306 expressions in DEST. */
1309 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1310 basic_block phiblock
)
1312 VEC (tree
, heap
) *exprs
;
1316 if (!phi_nodes (phiblock
))
1318 bitmap_set_copy (dest
, set
);
1322 exprs
= sorted_array_from_bitmap_set (set
);
1323 for (i
= 0; VEC_iterate (tree
, exprs
, i
, expr
); i
++)
1326 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1328 /* Don't add constants or empty translations to the cache, since
1329 we won't look them up that way, or use the result, anyway. */
1330 if (translated
&& !is_gimple_min_invariant (translated
))
1332 tree vh
= get_value_handle (translated
);
1333 VEC (tree
, gc
) *vuses
;
1335 /* The value handle itself may also be an invariant, in
1336 which case, it has no vuses. */
1337 vuses
= !is_gimple_min_invariant (vh
)
1338 ? VALUE_HANDLE_VUSES (vh
) : NULL
;
1339 phi_trans_add (expr
, translated
, pred
, vuses
);
1342 if (translated
!= NULL
)
1343 bitmap_value_insert_into_set (dest
, translated
);
1345 VEC_free (tree
, heap
, exprs
);
1348 /* Find the leader for a value (i.e., the name representing that
1349 value) in a given set, and return it. Return NULL if no leader is
1353 bitmap_find_leader (bitmap_set_t set
, tree val
)
1358 if (constant_expr_p (val
))
1361 if (bitmap_set_contains_value (set
, val
))
1363 /* Rather than walk the entire bitmap of expressions, and see
1364 whether any of them has the value we are looking for, we look
1365 at the reverse mapping, which tells us the set of expressions
1366 that have a given value (IE value->expressions with that
1367 value) and see if any of those expressions are in our set.
1368 The number of expressions per value is usually significantly
1369 less than the number of expressions in the set. In fact, for
1370 large testcases, doing it this way is roughly 5-10x faster
1371 than walking the bitmap.
1372 If this is somehow a significant lose for some cases, we can
1373 choose which set to walk based on which set is smaller. */
1376 bitmap_set_t exprset
= VALUE_HANDLE_EXPR_SET (val
);
1378 EXECUTE_IF_AND_IN_BITMAP (exprset
->expressions
,
1379 set
->expressions
, 0, i
, bi
)
1380 return expression_for_id (i
);
1385 /* Determine if VALUE, a memory operation, is ANTIC_IN at the top of
1386 BLOCK by seeing if it is not killed in the block. Note that we are
1387 only determining whether there is a store that kills it. Because
1388 of the order in which clean iterates over values, we are guaranteed
1389 that altered operands will have caused us to be eliminated from the
1390 ANTIC_IN set already. */
1393 value_dies_in_block_x (tree vh
, basic_block block
)
1397 VEC (tree
, gc
) *vuses
= VALUE_HANDLE_VUSES (vh
);
1399 /* Conservatively, a value dies if it's vuses are defined in this
1400 block, unless they come from phi nodes (which are merge operations,
1401 rather than stores. */
1402 for (i
= 0; VEC_iterate (tree
, vuses
, i
, vuse
); i
++)
1404 tree def
= SSA_NAME_DEF_STMT (vuse
);
1406 if (bb_for_stmt (def
) != block
)
1408 if (TREE_CODE (def
) == PHI_NODE
)
1415 /* Determine if the expression EXPR is valid in SET1 U SET2.
1416 ONLY SET2 CAN BE NULL.
1417 This means that we have a leader for each part of the expression
1418 (if it consists of values), or the expression is an SSA_NAME.
1419 For loads/calls, we also see if the vuses are killed in this block.
1421 NB: We never should run into a case where we have SSA_NAME +
1422 SSA_NAME or SSA_NAME + value. The sets valid_in_sets is called on,
1423 the ANTIC sets, will only ever have SSA_NAME's or value expressions
1424 (IE VALUE1 + VALUE2, *VALUE1, VALUE1 < VALUE2) */
1426 #define union_contains_value(SET1, SET2, VAL) \
1427 (bitmap_set_contains_value ((SET1), (VAL)) \
1428 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
1431 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree expr
,
1434 tree vh
= get_value_handle (expr
);
1435 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
1438 case tcc_comparison
:
1440 tree op1
= TREE_OPERAND (expr
, 0);
1441 tree op2
= TREE_OPERAND (expr
, 1);
1443 return union_contains_value (set1
, set2
, op1
)
1444 && union_contains_value (set1
, set2
, op2
);
1449 tree op1
= TREE_OPERAND (expr
, 0);
1450 return union_contains_value (set1
, set2
, op1
);
1453 case tcc_expression
:
1458 if (TREE_CODE (expr
) == CALL_EXPR
)
1460 tree fn
= CALL_EXPR_FN (expr
);
1461 tree sc
= CALL_EXPR_STATIC_CHAIN (expr
);
1463 call_expr_arg_iterator iter
;
1465 /* Check the non-argument operands first. */
1466 if (!union_contains_value (set1
, set2
, fn
)
1467 || (sc
&& !union_contains_value (set1
, set2
, sc
)))
1470 /* Now check the operands. */
1471 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, expr
)
1473 if (!union_contains_value (set1
, set2
, arg
))
1476 return !value_dies_in_block_x (vh
, block
);
1483 if (TREE_CODE (expr
) == INDIRECT_REF
1484 || TREE_CODE (expr
) == COMPONENT_REF
1485 || TREE_CODE (expr
) == ARRAY_REF
)
1487 tree op0
= TREE_OPERAND (expr
, 0);
1488 gcc_assert (is_gimple_min_invariant (op0
)
1489 || TREE_CODE (op0
) == VALUE_HANDLE
);
1490 if (!union_contains_value (set1
, set2
, op0
))
1492 if (TREE_CODE (expr
) == ARRAY_REF
)
1494 tree op1
= TREE_OPERAND (expr
, 1);
1495 tree op2
= TREE_OPERAND (expr
, 2);
1496 tree op3
= TREE_OPERAND (expr
, 3);
1497 gcc_assert (is_gimple_min_invariant (op1
)
1498 || TREE_CODE (op1
) == VALUE_HANDLE
);
1499 if (!union_contains_value (set1
, set2
, op1
))
1501 gcc_assert (!op2
|| is_gimple_min_invariant (op2
)
1502 || TREE_CODE (op2
) == VALUE_HANDLE
);
1504 && !union_contains_value (set1
, set2
, op2
))
1506 gcc_assert (!op3
|| is_gimple_min_invariant (op3
)
1507 || TREE_CODE (op3
) == VALUE_HANDLE
);
1509 && !union_contains_value (set1
, set2
, op3
))
1512 return bitmap_set_contains_value (ANTIC_SAFE_LOADS (block
),
1514 || !value_dies_in_block_x (vh
, block
);
1519 case tcc_exceptional
:
1521 gcc_assert (TREE_CODE (expr
) == SSA_NAME
);
1522 return bitmap_set_contains_expr (AVAIL_OUT (block
), expr
);
1525 case tcc_declaration
:
1526 return !value_dies_in_block_x (vh
, block
);
1529 /* No other cases should be encountered. */
1534 /* Clean the set of expressions that are no longer valid in SET1 or
1535 SET2. This means expressions that are made up of values we have no
1536 leaders for in SET1 or SET2. This version is used for partial
1537 anticipation, which means it is not valid in either ANTIC_IN or
1541 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
, basic_block block
)
1543 VEC (tree
, heap
) *exprs
= sorted_array_from_bitmap_set (set1
);
1547 for (i
= 0; VEC_iterate (tree
, exprs
, i
, expr
); i
++)
1549 if (!valid_in_sets (set1
, set2
, expr
, block
))
1550 bitmap_remove_from_set (set1
, expr
);
1552 VEC_free (tree
, heap
, exprs
);
1555 /* Clean the set of expressions that are no longer valid in SET. This
1556 means expressions that are made up of values we have no leaders for
1560 clean (bitmap_set_t set
, basic_block block
)
1562 VEC (tree
, heap
) *exprs
= sorted_array_from_bitmap_set (set
);
1566 for (i
= 0; VEC_iterate (tree
, exprs
, i
, expr
); i
++)
1568 if (!valid_in_sets (set
, NULL
, expr
, block
))
1569 bitmap_remove_from_set (set
, expr
);
1571 VEC_free (tree
, heap
, exprs
);
1574 static sbitmap has_abnormal_preds
;
1576 /* List of blocks that may have changed during ANTIC computation and
1577 thus need to be iterated over. */
1579 static sbitmap changed_blocks
;
1581 /* Decide whether to defer a block for a later iteration, or PHI
1582 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
1583 should defer the block, and true if we processed it. */
1586 defer_or_phi_translate_block (bitmap_set_t dest
, bitmap_set_t source
,
1587 basic_block block
, basic_block phiblock
)
1589 if (!BB_VISITED (phiblock
))
1591 SET_BIT (changed_blocks
, block
->index
);
1592 BB_VISITED (block
) = 0;
1593 BB_DEFERRED (block
) = 1;
1597 phi_translate_set (dest
, source
, block
, phiblock
);
1601 /* Compute the ANTIC set for BLOCK.
1603 If succs(BLOCK) > 1 then
1604 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1605 else if succs(BLOCK) == 1 then
1606 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
1608 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
1612 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
1614 bool changed
= false;
1615 bitmap_set_t S
, old
, ANTIC_OUT
;
1621 old
= ANTIC_OUT
= S
= NULL
;
1622 BB_VISITED (block
) = 1;
1624 /* If any edges from predecessors are abnormal, antic_in is empty,
1626 if (block_has_abnormal_pred_edge
)
1627 goto maybe_dump_sets
;
1629 old
= ANTIC_IN (block
);
1630 ANTIC_OUT
= bitmap_set_new ();
1632 /* If the block has no successors, ANTIC_OUT is empty. */
1633 if (EDGE_COUNT (block
->succs
) == 0)
1635 /* If we have one successor, we could have some phi nodes to
1636 translate through. */
1637 else if (single_succ_p (block
))
1639 basic_block succ_bb
= single_succ (block
);
1641 /* We trade iterations of the dataflow equations for having to
1642 phi translate the maximal set, which is incredibly slow
1643 (since the maximal set often has 300+ members, even when you
1644 have a small number of blocks).
1645 Basically, we defer the computation of ANTIC for this block
1646 until we have processed it's successor, which will inevitably
1647 have a *much* smaller set of values to phi translate once
1648 clean has been run on it.
1649 The cost of doing this is that we technically perform more
1650 iterations, however, they are lower cost iterations.
1652 Timings for PRE on tramp3d-v4:
1653 without maximal set fix: 11 seconds
1654 with maximal set fix/without deferring: 26 seconds
1655 with maximal set fix/with deferring: 11 seconds
1658 if (!defer_or_phi_translate_block (ANTIC_OUT
, ANTIC_IN (succ_bb
),
1662 goto maybe_dump_sets
;
1665 /* If we have multiple successors, we take the intersection of all of
1666 them. Note that in the case of loop exit phi nodes, we may have
1667 phis to translate through. */
1670 VEC(basic_block
, heap
) * worklist
;
1672 basic_block bprime
, first
;
1674 worklist
= VEC_alloc (basic_block
, heap
, EDGE_COUNT (block
->succs
));
1675 FOR_EACH_EDGE (e
, ei
, block
->succs
)
1676 VEC_quick_push (basic_block
, worklist
, e
->dest
);
1677 first
= VEC_index (basic_block
, worklist
, 0);
1679 if (phi_nodes (first
))
1681 bitmap_set_t from
= ANTIC_IN (first
);
1683 if (!BB_VISITED (first
))
1685 phi_translate_set (ANTIC_OUT
, from
, block
, first
);
1689 if (!BB_VISITED (first
))
1690 bitmap_set_copy (ANTIC_OUT
, maximal_set
);
1692 bitmap_set_copy (ANTIC_OUT
, ANTIC_IN (first
));
1695 for (i
= 1; VEC_iterate (basic_block
, worklist
, i
, bprime
); i
++)
1697 if (phi_nodes (bprime
))
1699 bitmap_set_t tmp
= bitmap_set_new ();
1700 bitmap_set_t from
= ANTIC_IN (bprime
);
1702 if (!BB_VISITED (bprime
))
1704 phi_translate_set (tmp
, from
, block
, bprime
);
1705 bitmap_set_and (ANTIC_OUT
, tmp
);
1706 bitmap_set_free (tmp
);
1710 if (!BB_VISITED (bprime
))
1711 bitmap_set_and (ANTIC_OUT
, maximal_set
);
1713 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
1716 VEC_free (basic_block
, heap
, worklist
);
1719 /* Generate ANTIC_OUT - TMP_GEN. */
1720 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
1722 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
1723 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
1726 /* Then union in the ANTIC_OUT - TMP_GEN values,
1727 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
1728 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
1729 bitmap_value_insert_into_set (ANTIC_IN (block
),
1730 expression_for_id (bii
));
1732 clean (ANTIC_IN (block
), block
);
1734 /* !old->expressions can happen when we deferred a block. */
1735 if (!old
->expressions
|| !bitmap_set_equal (old
, ANTIC_IN (block
)))
1738 SET_BIT (changed_blocks
, block
->index
);
1739 FOR_EACH_EDGE (e
, ei
, block
->preds
)
1740 SET_BIT (changed_blocks
, e
->src
->index
);
1743 RESET_BIT (changed_blocks
, block
->index
);
1746 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1748 if (!BB_DEFERRED (block
) || BB_VISITED (block
))
1751 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
1753 if (ANTIC_SAFE_LOADS (block
))
1754 print_bitmap_set (dump_file
, ANTIC_SAFE_LOADS (block
),
1755 "ANTIC_SAFE_LOADS", block
->index
);
1756 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
1760 print_bitmap_set (dump_file
, S
, "S", block
->index
);
1765 "Block %d was deferred for a future iteration.\n",
1770 bitmap_set_free (old
);
1772 bitmap_set_free (S
);
1774 bitmap_set_free (ANTIC_OUT
);
1778 /* Compute PARTIAL_ANTIC for BLOCK.
1780 If succs(BLOCK) > 1 then
1781 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
1782 in ANTIC_OUT for all succ(BLOCK)
1783 else if succs(BLOCK) == 1 then
1784 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
1786 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
1791 compute_partial_antic_aux (basic_block block
,
1792 bool block_has_abnormal_pred_edge
)
1794 bool changed
= false;
1795 bitmap_set_t old_PA_IN
;
1796 bitmap_set_t PA_OUT
;
1800 old_PA_IN
= PA_OUT
= NULL
;
1802 /* If any edges from predecessors are abnormal, antic_in is empty,
1804 if (block_has_abnormal_pred_edge
)
1805 goto maybe_dump_sets
;
1807 old_PA_IN
= PA_IN (block
);
1808 PA_OUT
= bitmap_set_new ();
1810 /* If the block has no successors, ANTIC_OUT is empty. */
1811 if (EDGE_COUNT (block
->succs
) == 0)
1813 /* If we have one successor, we could have some phi nodes to
1814 translate through. Note that we can't phi translate across DFS
1815 back edges in partial antic, because it uses a union operation
1816 on the successors. For recurrences like IV's, we will end up generating a
1817 new value in the set on each go around (i + 3 (VH.1) VH.1 + 1
1818 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
1819 else if (single_succ_p (block
))
1821 basic_block succ
= single_succ (block
);
1822 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
1823 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
1825 /* If we have multiple successors, we take the union of all of
1829 VEC(basic_block
, heap
) * worklist
;
1833 worklist
= VEC_alloc (basic_block
, heap
, EDGE_COUNT (block
->succs
));
1834 FOR_EACH_EDGE (e
, ei
, block
->succs
)
1836 if (e
->flags
& EDGE_DFS_BACK
)
1838 VEC_quick_push (basic_block
, worklist
, e
->dest
);
1840 if (VEC_length (basic_block
, worklist
) > 0)
1842 for (i
= 0; VEC_iterate (basic_block
, worklist
, i
, bprime
); i
++)
1847 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
1848 bitmap_value_insert_into_set (PA_OUT
,
1849 expression_for_id (i
));
1850 if (phi_nodes (bprime
))
1852 bitmap_set_t pa_in
= bitmap_set_new ();
1853 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
1854 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
1855 bitmap_value_insert_into_set (PA_OUT
,
1856 expression_for_id (i
));
1857 bitmap_set_free (pa_in
);
1860 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
1861 bitmap_value_insert_into_set (PA_OUT
,
1862 expression_for_id (i
));
1865 VEC_free (basic_block
, heap
, worklist
);
1868 /* PA_IN starts with PA_OUT - TMP_GEN.
1869 Then we subtract things from ANTIC_IN. */
1870 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
1872 /* For partial antic, we want to put back in the phi results, since
1873 we will properly avoid making them partially antic over backedges. */
1874 bitmap_ior_into (PA_IN (block
)->values
, PHI_GEN (block
)->values
);
1875 bitmap_ior_into (PA_IN (block
)->expressions
, PHI_GEN (block
)->expressions
);
1877 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
1878 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
1880 dependent_clean (PA_IN (block
), ANTIC_IN (block
), block
);
1882 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
1885 SET_BIT (changed_blocks
, block
->index
);
1886 FOR_EACH_EDGE (e
, ei
, block
->preds
)
1887 SET_BIT (changed_blocks
, e
->src
->index
);
1890 RESET_BIT (changed_blocks
, block
->index
);
1893 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1896 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
1898 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
1901 bitmap_set_free (old_PA_IN
);
1903 bitmap_set_free (PA_OUT
);
1907 /* Compute ANTIC and partial ANTIC sets. */
1910 compute_antic (void)
1912 bool changed
= true;
1913 int num_iterations
= 0;
1917 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
1918 We pre-build the map of blocks with incoming abnormal edges here. */
1919 has_abnormal_preds
= sbitmap_alloc (last_basic_block
);
1920 sbitmap_zero (has_abnormal_preds
);
1927 FOR_EACH_EDGE (e
, ei
, block
->preds
)
1929 e
->flags
&= ~EDGE_DFS_BACK
;
1930 if (e
->flags
& EDGE_ABNORMAL
)
1932 SET_BIT (has_abnormal_preds
, block
->index
);
1937 BB_VISITED (block
) = 0;
1938 BB_DEFERRED (block
) = 0;
1939 /* While we are here, give empty ANTIC_IN sets to each block. */
1940 ANTIC_IN (block
) = bitmap_set_new ();
1941 PA_IN (block
) = bitmap_set_new ();
1944 /* At the exit block we anticipate nothing. */
1945 ANTIC_IN (EXIT_BLOCK_PTR
) = bitmap_set_new ();
1946 BB_VISITED (EXIT_BLOCK_PTR
) = 1;
1947 PA_IN (EXIT_BLOCK_PTR
) = bitmap_set_new ();
1949 changed_blocks
= sbitmap_alloc (last_basic_block
+ 1);
1950 sbitmap_ones (changed_blocks
);
1953 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1954 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
1957 for (i
= 0; i
< last_basic_block
- NUM_FIXED_BLOCKS
; i
++)
1959 if (TEST_BIT (changed_blocks
, postorder
[i
]))
1961 basic_block block
= BASIC_BLOCK (postorder
[i
]);
1962 changed
|= compute_antic_aux (block
,
1963 TEST_BIT (has_abnormal_preds
,
1967 /* Theoretically possible, but *highly* unlikely. */
1968 gcc_assert (num_iterations
< 50);
1971 if (dump_file
&& (dump_flags
& TDF_STATS
))
1972 fprintf (dump_file
, "compute_antic required %d iterations\n",
1975 if (do_partial_partial
)
1977 sbitmap_ones (changed_blocks
);
1978 mark_dfs_back_edges ();
1983 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1984 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
1987 for (i
= 0; i
< last_basic_block
- NUM_FIXED_BLOCKS
; i
++)
1989 if (TEST_BIT (changed_blocks
, postorder
[i
]))
1991 basic_block block
= BASIC_BLOCK (postorder
[i
]);
1993 |= compute_partial_antic_aux (block
,
1994 TEST_BIT (has_abnormal_preds
,
1998 /* Theoretically possible, but *highly* unlikely. */
1999 gcc_assert (num_iterations
< 50);
2001 if (dump_file
&& (dump_flags
& TDF_STATS
))
2002 fprintf (dump_file
, "compute_partial_antic required %d iterations\n",
2005 sbitmap_free (has_abnormal_preds
);
2006 sbitmap_free (changed_blocks
);
2010 ANTIC_SAFE_LOADS are those loads generated in a block that actually
2011 occur before any kill to their vuses in the block, and thus, are
2012 safe at the top of the block. This function computes the set by
2013 walking the EXP_GEN set for the block, and checking the VUSES.
2015 This set could be computed as ANTIC calculation is proceeding, but
2016 but because it does not actually change during that computation, it is
2017 quicker to pre-calculate the results and use them than to do it on
2018 the fly (particularly in the presence of multiple iteration). */
2021 compute_antic_safe (void)
2029 FOR_EACH_EXPR_ID_IN_SET (EXP_GEN (bb
), i
, bi
)
2031 tree expr
= expression_for_id (i
);
2032 if (REFERENCE_CLASS_P (expr
))
2034 tree vh
= get_value_handle (expr
);
2035 tree maybe
= bitmap_find_leader (AVAIL_OUT (bb
), vh
);
2043 stmt
= SSA_NAME_DEF_STMT (maybe
);
2045 FOR_EACH_SSA_TREE_OPERAND (vuse
, stmt
, i
,
2046 SSA_OP_VIRTUAL_USES
)
2048 tree def
= SSA_NAME_DEF_STMT (vuse
);
2050 if (bb_for_stmt (def
) != bb
)
2053 /* See if the vuse is defined by a statement that
2054 comes before us in the block. Phi nodes are not
2055 stores, so they do not count. */
2056 if (TREE_CODE (def
) != PHI_NODE
2057 && stmt_ann (def
)->uid
< stmt_ann (stmt
)->uid
)
2065 if (ANTIC_SAFE_LOADS (bb
) == NULL
)
2066 ANTIC_SAFE_LOADS (bb
) = bitmap_set_new ();
2067 bitmap_value_insert_into_set (ANTIC_SAFE_LOADS (bb
),
2075 /* Return true if we can value number the call in STMT. This is true
2076 if we have a pure or constant call. */
2079 can_value_number_call (tree stmt
)
2081 tree call
= get_call_expr_in (stmt
);
2083 if (call_expr_flags (call
) & (ECF_PURE
| ECF_CONST
))
2088 /* Return true if OP is a tree which we can perform value numbering
2092 can_value_number_operation (tree op
)
2094 return UNARY_CLASS_P (op
)
2095 || BINARY_CLASS_P (op
)
2096 || COMPARISON_CLASS_P (op
)
2097 || REFERENCE_CLASS_P (op
)
2098 || (TREE_CODE (op
) == CALL_EXPR
2099 && can_value_number_call (op
));
2103 /* Return true if OP is a tree which we can perform PRE on
2104 on. This may not match the operations we can value number, but in
2105 a perfect world would. */
2108 can_PRE_operation (tree op
)
2110 return UNARY_CLASS_P (op
)
2111 || BINARY_CLASS_P (op
)
2112 || COMPARISON_CLASS_P (op
)
2113 || TREE_CODE (op
) == INDIRECT_REF
2114 || TREE_CODE (op
) == COMPONENT_REF
2115 || TREE_CODE (op
) == CALL_EXPR
2116 || TREE_CODE (op
) == ARRAY_REF
;
2120 /* Inserted expressions are placed onto this worklist, which is used
2121 for performing quick dead code elimination of insertions we made
2122 that didn't turn out to be necessary. */
2123 static VEC(tree
,heap
) *inserted_exprs
;
2125 /* Pool allocated fake store expressions are placed onto this
2126 worklist, which, after performing dead code elimination, is walked
2127 to see which expressions need to be put into GC'able memory */
2128 static VEC(tree
, heap
) *need_creation
;
2130 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2131 COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2132 trying to rename aggregates into ssa form directly, which is a no
2135 Thus, this routine doesn't create temporaries, it just builds a
2136 single access expression for the array, calling
2137 find_or_generate_expression to build the innermost pieces.
2139 This function is a subroutine of create_expression_by_pieces, and
2140 should not be called on it's own unless you really know what you
2144 create_component_ref_by_pieces (basic_block block
, tree expr
, tree stmts
)
2149 if (TREE_CODE (genop
) == VALUE_HANDLE
)
2151 tree found
= bitmap_find_leader (AVAIL_OUT (block
), expr
);
2156 if (TREE_CODE (genop
) == VALUE_HANDLE
)
2158 bitmap_set_t exprset
= VALUE_HANDLE_EXPR_SET (expr
);
2159 unsigned int firstbit
= bitmap_first_set_bit (exprset
->expressions
);
2160 genop
= expression_for_id (firstbit
);
2163 switch TREE_CODE (genop
)
2169 op0
= create_component_ref_by_pieces (block
,
2170 TREE_OPERAND (genop
, 0),
2172 op1
= TREE_OPERAND (genop
, 1);
2173 if (TREE_CODE (op1
) == VALUE_HANDLE
)
2174 op1
= find_or_generate_expression (block
, op1
, stmts
);
2175 op2
= TREE_OPERAND (genop
, 2);
2176 if (op2
&& TREE_CODE (op2
) == VALUE_HANDLE
)
2177 op2
= find_or_generate_expression (block
, op2
, stmts
);
2178 op3
= TREE_OPERAND (genop
, 3);
2179 if (op3
&& TREE_CODE (op3
) == VALUE_HANDLE
)
2180 op3
= find_or_generate_expression (block
, op3
, stmts
);
2181 folded
= build4 (ARRAY_REF
, TREE_TYPE (genop
), op0
, op1
,
2187 bitmap_set_t exprset
;
2188 unsigned int firstbit
;
2191 op0
= create_component_ref_by_pieces (block
,
2192 TREE_OPERAND (genop
, 0),
2194 exprset
= VALUE_HANDLE_EXPR_SET (TREE_OPERAND (genop
, 1));
2195 firstbit
= bitmap_first_set_bit (exprset
->expressions
);
2196 op1
= expression_for_id (firstbit
);
2197 folded
= fold_build3 (COMPONENT_REF
, TREE_TYPE (genop
), op0
, op1
,
2204 tree op1
= TREE_OPERAND (genop
, 0);
2205 tree genop1
= find_or_generate_expression (block
, op1
, stmts
);
2207 folded
= fold_build1 (TREE_CODE (genop
), TREE_TYPE (genop
),
2225 /* Find a leader for an expression, or generate one using
2226 create_expression_by_pieces if it's ANTIC but
2228 BLOCK is the basic_block we are looking for leaders in.
2229 EXPR is the expression to find a leader or generate for.
2230 STMTS is the statement list to put the inserted expressions on.
2231 Returns the SSA_NAME of the LHS of the generated expression or the
2235 find_or_generate_expression (basic_block block
, tree expr
, tree stmts
)
2237 tree genop
= bitmap_find_leader (AVAIL_OUT (block
), expr
);
2239 /* If it's still NULL, it must be a complex expression, so generate
2243 bitmap_set_t exprset
= VALUE_HANDLE_EXPR_SET (expr
);
2244 unsigned int firstbit
= bitmap_first_set_bit (exprset
->expressions
);
2246 genop
= expression_for_id (firstbit
);
2247 gcc_assert (can_PRE_operation (genop
));
2248 genop
= create_expression_by_pieces (block
, genop
, stmts
);
2253 #define NECESSARY(stmt) stmt->base.asm_written_flag
2254 /* Create an expression in pieces, so that we can handle very complex
2255 expressions that may be ANTIC, but not necessary GIMPLE.
2256 BLOCK is the basic block the expression will be inserted into,
2257 EXPR is the expression to insert (in value form)
2258 STMTS is a statement list to append the necessary insertions into.
2260 This function will die if we hit some value that shouldn't be
2261 ANTIC but is (IE there is no leader for it, or its components).
2262 This function may also generate expressions that are themselves
2263 partially or fully redundant. Those that are will be either made
2264 fully redundant during the next iteration of insert (for partially
2265 redundant ones), or eliminated by eliminate (for fully redundant
2269 create_expression_by_pieces (basic_block block
, tree expr
, tree stmts
)
2272 tree folded
, forced_stmts
, newexpr
;
2274 tree_stmt_iterator tsi
;
2276 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
2285 gcc_assert (TREE_CODE (expr
) == CALL_EXPR
);
2287 fn
= CALL_EXPR_FN (expr
);
2288 sc
= CALL_EXPR_STATIC_CHAIN (expr
);
2290 genfn
= find_or_generate_expression (block
, fn
, stmts
);
2292 nargs
= call_expr_nargs (expr
);
2293 buffer
= alloca (nargs
* sizeof (tree
));
2295 for (i
= 0; i
< nargs
; i
++)
2297 tree arg
= CALL_EXPR_ARG (expr
, i
);
2298 buffer
[i
] = find_or_generate_expression (block
, arg
, stmts
);
2301 folded
= build_call_array (TREE_TYPE (expr
), genfn
, nargs
, buffer
);
2303 CALL_EXPR_STATIC_CHAIN (folded
) =
2304 find_or_generate_expression (block
, sc
, stmts
);
2305 folded
= fold (folded
);
2311 if (TREE_CODE (expr
) == COMPONENT_REF
2312 || TREE_CODE (expr
) == ARRAY_REF
)
2314 folded
= create_component_ref_by_pieces (block
, expr
, stmts
);
2318 tree op1
= TREE_OPERAND (expr
, 0);
2319 tree genop1
= find_or_generate_expression (block
, op1
, stmts
);
2321 folded
= fold_build1 (TREE_CODE (expr
), TREE_TYPE (expr
),
2328 case tcc_comparison
:
2330 tree op1
= TREE_OPERAND (expr
, 0);
2331 tree op2
= TREE_OPERAND (expr
, 1);
2332 tree genop1
= find_or_generate_expression (block
, op1
, stmts
);
2333 tree genop2
= find_or_generate_expression (block
, op2
, stmts
);
2334 folded
= fold_build2 (TREE_CODE (expr
), TREE_TYPE (expr
),
2341 tree op1
= TREE_OPERAND (expr
, 0);
2342 tree genop1
= find_or_generate_expression (block
, op1
, stmts
);
2343 folded
= fold_build1 (TREE_CODE (expr
), TREE_TYPE (expr
),
2352 /* Force the generated expression to be a sequence of GIMPLE
2354 We have to call unshare_expr because force_gimple_operand may
2355 modify the tree we pass to it. */
2356 newexpr
= force_gimple_operand (unshare_expr (folded
), &forced_stmts
,
2359 /* If we have any intermediate expressions to the value sets, add them
2360 to the value sets and chain them on in the instruction stream. */
2363 tsi
= tsi_start (forced_stmts
);
2364 for (; !tsi_end_p (tsi
); tsi_next (&tsi
))
2366 tree stmt
= tsi_stmt (tsi
);
2367 tree forcedname
= GIMPLE_STMT_OPERAND (stmt
, 0);
2368 tree forcedexpr
= GIMPLE_STMT_OPERAND (stmt
, 1);
2369 tree val
= vn_lookup_or_add (forcedexpr
, NULL
);
2371 VEC_safe_push (tree
, heap
, inserted_exprs
, stmt
);
2372 vn_add (forcedname
, val
);
2373 bitmap_value_replace_in_set (NEW_SETS (block
), forcedname
);
2374 bitmap_value_replace_in_set (AVAIL_OUT (block
), forcedname
);
2375 mark_symbols_for_renaming (stmt
);
2377 tsi
= tsi_last (stmts
);
2378 tsi_link_after (&tsi
, forced_stmts
, TSI_CONTINUE_LINKING
);
2381 /* Build and insert the assignment of the end result to the temporary
2382 that we will return. */
2383 if (!pretemp
|| TREE_TYPE (expr
) != TREE_TYPE (pretemp
))
2385 pretemp
= create_tmp_var (TREE_TYPE (expr
), "pretmp");
2386 get_var_ann (pretemp
);
2390 add_referenced_var (temp
);
2392 if (TREE_CODE (TREE_TYPE (expr
)) == COMPLEX_TYPE
2393 || TREE_CODE (TREE_TYPE (expr
)) == VECTOR_TYPE
)
2394 DECL_GIMPLE_REG_P (temp
) = 1;
2396 newexpr
= build_gimple_modify_stmt (temp
, newexpr
);
2397 name
= make_ssa_name (temp
, newexpr
);
2398 GIMPLE_STMT_OPERAND (newexpr
, 0) = name
;
2399 NECESSARY (newexpr
) = 0;
2401 tsi
= tsi_last (stmts
);
2402 tsi_link_after (&tsi
, newexpr
, TSI_CONTINUE_LINKING
);
2403 VEC_safe_push (tree
, heap
, inserted_exprs
, newexpr
);
2405 /* All the symbols in NEWEXPR should be put into SSA form. */
2406 mark_symbols_for_renaming (newexpr
);
2408 /* Add a value handle to the temporary.
2409 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2410 we are creating the expression by pieces, and this particular piece of
2411 the expression may have been represented. There is no harm in replacing
2413 v
= get_value_handle (expr
);
2415 get_or_alloc_expression_id (name
);
2416 bitmap_value_replace_in_set (NEW_SETS (block
), name
);
2417 bitmap_value_replace_in_set (AVAIL_OUT (block
), name
);
2419 pre_stats
.insertions
++;
2420 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2422 fprintf (dump_file
, "Inserted ");
2423 print_generic_expr (dump_file
, newexpr
, 0);
2424 fprintf (dump_file
, " in predecessor %d\n", block
->index
);
2430 /* Insert the to-be-made-available values of expression EXPRNUM for each
2431 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2432 merge the result with a phi node, given the same value handle as
2433 NODE. Return true if we have inserted new stuff. */
2436 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
2439 tree expr
= expression_for_id (exprnum
);
2440 tree val
= get_value_handle (expr
);
2442 bool insertions
= false;
2447 tree type
= TREE_TYPE (avail
[EDGE_PRED (block
, 0)->src
->index
]);
2450 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2452 fprintf (dump_file
, "Found partial redundancy for expression ");
2453 print_generic_expr (dump_file
, expr
, 0);
2454 fprintf (dump_file
, " (");
2455 print_generic_expr (dump_file
, val
, 0);
2456 fprintf (dump_file
, ")");
2457 fprintf (dump_file
, "\n");
2460 /* Make sure we aren't creating an induction variable. */
2461 if (block
->loop_depth
> 0 && EDGE_COUNT (block
->preds
) == 2
2462 && TREE_CODE_CLASS (TREE_CODE (expr
)) != tcc_reference
)
2464 bool firstinsideloop
= false;
2465 bool secondinsideloop
= false;
2466 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
2467 EDGE_PRED (block
, 0)->src
);
2468 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
2469 EDGE_PRED (block
, 1)->src
);
2470 /* Induction variables only have one edge inside the loop. */
2471 if (firstinsideloop
^ secondinsideloop
)
2473 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2474 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2480 /* Make the necessary insertions. */
2481 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
2483 tree stmts
= alloc_stmt_list ();
2486 eprime
= avail
[bprime
->index
];
2488 if (can_PRE_operation (eprime
))
2490 builtexpr
= create_expression_by_pieces (bprime
,
2493 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
2494 bsi_insert_on_edge (pred
, stmts
);
2495 avail
[bprime
->index
] = builtexpr
;
2499 /* If we didn't want a phi node, and we made insertions, we still have
2500 inserted new stuff, and thus return true. If we didn't want a phi node,
2501 and didn't make insertions, we haven't added anything new, so return
2503 if (nophi
&& insertions
)
2505 else if (nophi
&& !insertions
)
2508 /* Now build a phi for the new variable. */
2509 if (!prephitemp
|| TREE_TYPE (prephitemp
) != type
)
2511 prephitemp
= create_tmp_var (type
, "prephitmp");
2512 get_var_ann (prephitemp
);
2516 add_referenced_var (temp
);
2519 if (TREE_CODE (type
) == COMPLEX_TYPE
2520 || TREE_CODE (type
) == VECTOR_TYPE
)
2521 DECL_GIMPLE_REG_P (temp
) = 1;
2522 temp
= create_phi_node (temp
, block
);
2524 NECESSARY (temp
) = 0;
2525 VEC_safe_push (tree
, heap
, inserted_exprs
, temp
);
2526 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
2527 add_phi_arg (temp
, avail
[pred
->src
->index
], pred
);
2529 vn_add (PHI_RESULT (temp
), val
);
2531 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
2532 this insertion, since we test for the existence of this value in PHI_GEN
2533 before proceeding with the partial redundancy checks in insert_aux.
2535 The value may exist in AVAIL_OUT, in particular, it could be represented
2536 by the expression we are trying to eliminate, in which case we want the
2537 replacement to occur. If it's not existing in AVAIL_OUT, we want it
2540 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
2541 this block, because if it did, it would have existed in our dominator's
2542 AVAIL_OUT, and would have been skipped due to the full redundancy check.
2545 bitmap_insert_into_set (PHI_GEN (block
),
2547 bitmap_value_replace_in_set (AVAIL_OUT (block
),
2549 bitmap_insert_into_set (NEW_SETS (block
),
2552 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2554 fprintf (dump_file
, "Created phi ");
2555 print_generic_expr (dump_file
, temp
, 0);
2556 fprintf (dump_file
, " in block %d\n", block
->index
);
2564 /* Perform insertion of partially redundant values.
2565 For BLOCK, do the following:
2566 1. Propagate the NEW_SETS of the dominator into the current block.
2567 If the block has multiple predecessors,
2568 2a. Iterate over the ANTIC expressions for the block to see if
2569 any of them are partially redundant.
2570 2b. If so, insert them into the necessary predecessors to make
2571 the expression fully redundant.
2572 2c. Insert a new PHI merging the values of the predecessors.
2573 2d. Insert the new PHI, and the new expressions, into the
2575 3. Recursively call ourselves on the dominator children of BLOCK.
2577 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
2578 do_regular_insertion and do_partial_insertion.
2583 do_regular_insertion (basic_block block
, basic_block dom
)
2585 bool new_stuff
= false;
2586 VEC (tree
, heap
) *exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
2590 for (i
= 0; VEC_iterate (tree
, exprs
, i
, expr
); i
++)
2592 if (can_PRE_operation (expr
) && !AGGREGATE_TYPE_P (TREE_TYPE (expr
)))
2596 bool by_some
= false;
2597 bool cant_insert
= false;
2598 bool all_same
= true;
2599 tree first_s
= NULL
;
2602 tree eprime
= NULL_TREE
;
2605 val
= get_value_handle (expr
);
2606 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
2608 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
2610 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2611 fprintf (dump_file
, "Found fully redundant value\n");
2615 avail
= XCNEWVEC (tree
, last_basic_block
);
2616 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
2621 /* This can happen in the very weird case
2622 that our fake infinite loop edges have caused a
2623 critical edge to appear. */
2624 if (EDGE_CRITICAL_P (pred
))
2630 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
2633 /* eprime will generally only be NULL if the
2634 value of the expression, translated
2635 through the PHI for this predecessor, is
2636 undefined. If that is the case, we can't
2637 make the expression fully redundant,
2638 because its value is undefined along a
2639 predecessor path. We can thus break out
2640 early because it doesn't matter what the
2641 rest of the results are. */
2648 eprime
= fully_constant_expression (eprime
);
2649 vprime
= get_value_handle (eprime
);
2650 gcc_assert (vprime
);
2651 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
2653 if (edoubleprime
== NULL
)
2655 avail
[bprime
->index
] = eprime
;
2660 avail
[bprime
->index
] = edoubleprime
;
2662 if (first_s
== NULL
)
2663 first_s
= edoubleprime
;
2664 else if (!operand_equal_p (first_s
, edoubleprime
,
2669 /* If we can insert it, it's not the same value
2670 already existing along every predecessor, and
2671 it's defined by some predecessor, it is
2672 partially redundant. */
2673 if (!cant_insert
&& !all_same
&& by_some
)
2675 if (insert_into_preds_of_block (block
, get_expression_id (expr
),
2679 /* If all edges produce the same value and that value is
2680 an invariant, then the PHI has the same value on all
2681 edges. Note this. */
2682 else if (!cant_insert
&& all_same
&& eprime
2683 && is_gimple_min_invariant (eprime
)
2684 && !is_gimple_min_invariant (val
))
2689 bitmap_set_t exprset
= VALUE_HANDLE_EXPR_SET (val
);
2690 FOR_EACH_EXPR_ID_IN_SET (exprset
, j
, bi
)
2692 tree expr
= expression_for_id (j
);
2693 if (TREE_CODE (expr
) == SSA_NAME
)
2695 vn_add (expr
, eprime
);
2696 pre_stats
.constified
++;
2704 VEC_free (tree
, heap
, exprs
);
2709 /* Perform insertion for partially anticipatable expressions. There
2710 is only one case we will perform insertion for these. This case is
2711 if the expression is partially anticipatable, and fully available.
2712 In this case, we know that putting it earlier will enable us to
2713 remove the later computation. */
2717 do_partial_partial_insertion (basic_block block
, basic_block dom
)
2719 bool new_stuff
= false;
2720 VEC (tree
, heap
) *exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
2724 for (i
= 0; VEC_iterate (tree
, exprs
, i
, expr
); i
++)
2726 if (can_PRE_operation (expr
) && !AGGREGATE_TYPE_P (TREE_TYPE (expr
)))
2731 bool cant_insert
= false;
2734 tree eprime
= NULL_TREE
;
2737 val
= get_value_handle (expr
);
2738 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
2740 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
2743 avail
= XCNEWVEC (tree
, last_basic_block
);
2744 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
2749 /* This can happen in the very weird case
2750 that our fake infinite loop edges have caused a
2751 critical edge to appear. */
2752 if (EDGE_CRITICAL_P (pred
))
2758 eprime
= phi_translate (expr
, ANTIC_IN (block
),
2762 /* eprime will generally only be NULL if the
2763 value of the expression, translated
2764 through the PHI for this predecessor, is
2765 undefined. If that is the case, we can't
2766 make the expression fully redundant,
2767 because its value is undefined along a
2768 predecessor path. We can thus break out
2769 early because it doesn't matter what the
2770 rest of the results are. */
2777 eprime
= fully_constant_expression (eprime
);
2778 vprime
= get_value_handle (eprime
);
2779 gcc_assert (vprime
);
2780 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
2782 if (edoubleprime
== NULL
)
2788 avail
[bprime
->index
] = edoubleprime
;
2792 /* If we can insert it, it's not the same value
2793 already existing along every predecessor, and
2794 it's defined by some predecessor, it is
2795 partially redundant. */
2796 if (!cant_insert
&& by_all
)
2798 pre_stats
.pa_insert
++;
2799 if (insert_into_preds_of_block (block
, get_expression_id (expr
),
2807 VEC_free (tree
, heap
, exprs
);
2812 insert_aux (basic_block block
)
2815 bool new_stuff
= false;
2820 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
2825 bitmap_set_t newset
= NEW_SETS (dom
);
2828 /* Note that we need to value_replace both NEW_SETS, and
2829 AVAIL_OUT. For both the case of NEW_SETS, the value may be
2830 represented by some non-simple expression here that we want
2831 to replace it with. */
2832 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
2834 tree expr
= expression_for_id (i
);
2835 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
2836 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
2839 if (!single_pred_p (block
))
2841 new_stuff
|= do_regular_insertion (block
, dom
);
2842 if (do_partial_partial
)
2843 new_stuff
|= do_partial_partial_insertion (block
, dom
);
2847 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
2849 son
= next_dom_son (CDI_DOMINATORS
, son
))
2851 new_stuff
|= insert_aux (son
);
2857 /* Perform insertion of partially redundant values. */
2862 bool new_stuff
= true;
2864 int num_iterations
= 0;
2867 NEW_SETS (bb
) = bitmap_set_new ();
2873 new_stuff
= insert_aux (ENTRY_BLOCK_PTR
);
2875 if (num_iterations
> 2 && dump_file
&& (dump_flags
& TDF_STATS
))
2876 fprintf (dump_file
, "insert required %d iterations\n", num_iterations
);
2880 /* Return true if VAR is an SSA variable with no defining statement in
2881 this procedure, *AND* isn't a live-on-entry parameter. */
2884 is_undefined_value (tree expr
)
2886 return (TREE_CODE (expr
) == SSA_NAME
2887 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (expr
))
2888 /* PARM_DECLs and hard registers are always defined. */
2889 && TREE_CODE (SSA_NAME_VAR (expr
)) != PARM_DECL
);
2893 /* Given an SSA variable VAR and an expression EXPR, compute the value
2894 number for EXPR and create a value handle (VAL) for it. If VAR and
2895 EXPR are not the same, associate VAL with VAR. Finally, add VAR to
2896 S1 and its value handle to S2, and to the maximal set if
2897 ADD_TO_MAXIMAL is true.
2899 VUSES represent the virtual use operands associated with EXPR (if
2903 add_to_sets (tree var
, tree expr
, tree stmt
, bitmap_set_t s1
,
2906 tree val
= vn_lookup_or_add (expr
, stmt
);
2908 /* VAR and EXPR may be the same when processing statements for which
2909 we are not computing value numbers (e.g., non-assignments, or
2910 statements that make aliased stores). In those cases, we are
2911 only interested in making VAR available as its own value. */
2916 bitmap_insert_into_set (s1
, var
);
2918 /* PHI nodes can't go in the maximal sets because they are not in
2919 TMP_GEN, so it is possible to get into non-monotonic situations
2920 during ANTIC calculation, because it will *add* bits. */
2921 if (!in_fre
&& TREE_CODE (SSA_NAME_DEF_STMT (var
)) != PHI_NODE
)
2922 bitmap_value_insert_into_set (maximal_set
, var
);
2923 bitmap_value_insert_into_set (s2
, var
);
2926 /* Find existing value expression that is the same as T,
2927 and return it if it exists. */
2930 find_existing_value_expr (tree t
, tree stmt
)
2935 bitmap_set_t exprset
;
2937 if (REFERENCE_CLASS_P (t
))
2938 vh
= vn_lookup (t
, stmt
);
2940 vh
= vn_lookup (t
, NULL
);
2944 exprset
= VALUE_HANDLE_EXPR_SET (vh
);
2945 FOR_EACH_EXPR_ID_IN_SET (exprset
, bii
, bi
)
2947 tree efi
= expression_for_id (bii
);
2948 if (expressions_equal_p (t
, efi
))
2954 /* Given a unary or binary expression EXPR, create and return a new
2955 expression with the same structure as EXPR but with its operands
2956 replaced with the value handles of each of the operands of EXPR.
2958 VUSES represent the virtual use operands associated with EXPR (if
2959 any). Insert EXPR's operands into the EXP_GEN set for BLOCK. */
2962 create_value_expr_from (tree expr
, basic_block block
, tree stmt
)
2965 enum tree_code code
= TREE_CODE (expr
);
2967 alloc_pool pool
= NULL
;
2970 gcc_assert (TREE_CODE_CLASS (code
) == tcc_unary
2971 || TREE_CODE_CLASS (code
) == tcc_binary
2972 || TREE_CODE_CLASS (code
) == tcc_comparison
2973 || TREE_CODE_CLASS (code
) == tcc_reference
2974 || TREE_CODE_CLASS (code
) == tcc_expression
2975 || TREE_CODE_CLASS (code
) == tcc_vl_exp
2976 || TREE_CODE_CLASS (code
) == tcc_exceptional
2977 || TREE_CODE_CLASS (code
) == tcc_declaration
);
2979 if (TREE_CODE_CLASS (code
) == tcc_unary
)
2980 pool
= unary_node_pool
;
2981 else if (TREE_CODE_CLASS (code
) == tcc_reference
)
2982 pool
= reference_node_pool
;
2983 else if (TREE_CODE_CLASS (code
) == tcc_binary
)
2984 pool
= binary_node_pool
;
2985 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2986 pool
= comparison_node_pool
;
2988 gcc_assert (code
== CALL_EXPR
);
2990 if (code
== CALL_EXPR
)
2991 vexpr
= temp_copy_call_expr (expr
);
2994 vexpr
= (tree
) pool_alloc (pool
);
2995 memcpy (vexpr
, expr
, tree_size (expr
));
2998 for (i
= 0; i
< TREE_OPERAND_LENGTH (expr
); i
++)
3002 op
= TREE_OPERAND (expr
, i
);
3003 if (op
== NULL_TREE
)
3006 /* Recursively value-numberize reference ops and tree lists. */
3007 if (REFERENCE_CLASS_P (op
))
3009 tree tempop
= create_value_expr_from (op
, block
, stmt
);
3010 op
= tempop
? tempop
: op
;
3011 val
= vn_lookup_or_add (op
, stmt
);
3014 /* Create a value handle for OP and add it to VEXPR. */
3015 val
= vn_lookup_or_add (op
, NULL
);
3017 if (!is_undefined_value (op
) && TREE_CODE (op
) != TREE_LIST
)
3018 bitmap_value_insert_into_set (EXP_GEN (block
), op
);
3020 if (TREE_CODE (val
) == VALUE_HANDLE
)
3021 TREE_TYPE (val
) = TREE_TYPE (TREE_OPERAND (vexpr
, i
));
3023 TREE_OPERAND (vexpr
, i
) = val
;
3025 efi
= find_existing_value_expr (vexpr
, stmt
);
3028 get_or_alloc_expression_id (vexpr
);
3032 /* Given a statement STMT and its right hand side which is a load, try
3033 to look for the expression stored in the location for the load, and
3034 return true if a useful equivalence was recorded for LHS. */
3037 try_look_through_load (tree lhs
, tree mem_ref
, tree stmt
, basic_block block
)
3039 tree store_stmt
= NULL
;
3044 FOR_EACH_SSA_TREE_OPERAND (vuse
, stmt
, i
, SSA_OP_VIRTUAL_USES
)
3048 gcc_assert (TREE_CODE (vuse
) == SSA_NAME
);
3049 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
3051 /* If there is no useful statement for this VUSE, we'll not find a
3052 useful expression to return either. Likewise, if there is a
3053 statement but it is not a simple assignment or it has virtual
3054 uses, we can stop right here. Note that this means we do
3055 not look through PHI nodes, which is intentional. */
3057 || TREE_CODE (def_stmt
) != GIMPLE_MODIFY_STMT
3058 || !ZERO_SSA_OPERANDS (def_stmt
, SSA_OP_VIRTUAL_USES
))
3061 /* If this is not the same statement as one we have looked at for
3062 another VUSE of STMT already, we have two statements producing
3063 something that reaches our STMT. */
3064 if (store_stmt
&& store_stmt
!= def_stmt
)
3068 /* Is this a store to the exact same location as the one we are
3069 loading from in STMT? */
3070 if (!operand_equal_p (GIMPLE_STMT_OPERAND (def_stmt
, 0), mem_ref
, 0))
3073 /* Otherwise remember this statement and see if all other VUSEs
3074 come from the same statement. */
3075 store_stmt
= def_stmt
;
3079 /* Alright then, we have visited all VUSEs of STMT and we've determined
3080 that all of them come from the same statement STORE_STMT. See if there
3081 is a useful expression we can deduce from STORE_STMT. */
3082 rhs
= GIMPLE_STMT_OPERAND (store_stmt
, 1);
3083 if ((TREE_CODE (rhs
) == SSA_NAME
3084 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs
))
3085 || is_gimple_min_invariant (rhs
)
3086 || TREE_CODE (rhs
) == ADDR_EXPR
3087 || TREE_INVARIANT (rhs
))
3090 /* Yay! Compute a value number for the RHS of the statement and
3091 add its value to the AVAIL_OUT set for the block. Add the LHS
3093 add_to_sets (lhs
, rhs
, store_stmt
, TMP_GEN (block
), AVAIL_OUT (block
));
3094 if (TREE_CODE (rhs
) == SSA_NAME
3095 && !is_undefined_value (rhs
))
3096 bitmap_value_insert_into_set (EXP_GEN (block
), rhs
);
3103 /* Return a copy of NODE that is stored in the temporary alloc_pool's.
3104 This is made recursively true, so that the operands are stored in
3105 the pool as well. */
3108 poolify_tree (tree node
)
3110 switch (TREE_CODE (node
))
3114 tree temp
= (tree
) pool_alloc (reference_node_pool
);
3115 memcpy (temp
, node
, tree_size (node
));
3116 TREE_OPERAND (temp
, 0) = poolify_tree (TREE_OPERAND (temp
, 0));
3120 case GIMPLE_MODIFY_STMT
:
3122 tree temp
= (tree
) pool_alloc (modify_expr_node_pool
);
3123 memcpy (temp
, node
, tree_size (node
));
3124 GIMPLE_STMT_OPERAND (temp
, 0) =
3125 poolify_tree (GIMPLE_STMT_OPERAND (temp
, 0));
3126 GIMPLE_STMT_OPERAND (temp
, 1) =
3127 poolify_tree (GIMPLE_STMT_OPERAND (temp
, 1));
3144 static tree modify_expr_template
;
3146 /* Allocate a GIMPLE_MODIFY_STMT with TYPE, and operands OP1, OP2 in the
3147 alloc pools and return it. */
3149 poolify_modify_stmt (tree op1
, tree op2
)
3151 if (modify_expr_template
== NULL
)
3152 modify_expr_template
= build_gimple_modify_stmt (op1
, op2
);
3154 GIMPLE_STMT_OPERAND (modify_expr_template
, 0) = op1
;
3155 GIMPLE_STMT_OPERAND (modify_expr_template
, 1) = op2
;
3157 return poolify_tree (modify_expr_template
);
3161 /* For each real store operation of the form
3162 *a = <value> that we see, create a corresponding fake store of the
3163 form storetmp_<version> = *a.
3165 This enables AVAIL computation to mark the results of stores as
3166 available. Without this, you'd need to do some computation to
3167 mark the result of stores as ANTIC and AVAIL at all the right
3169 To save memory, we keep the store
3170 statements pool allocated until we decide whether they are
3171 necessary or not. */
3174 insert_fake_stores (void)
3180 block_stmt_iterator bsi
;
3181 for (bsi
= bsi_start (block
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3183 tree stmt
= bsi_stmt (bsi
);
3185 /* We can't generate SSA names for stores that are complex
3186 or aggregate. We also want to ignore things whose
3187 virtual uses occur in abnormal phis. */
3189 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
3190 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 0)) == INDIRECT_REF
3191 && !AGGREGATE_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (stmt
, 0)))
3192 && TREE_CODE (TREE_TYPE (GIMPLE_STMT_OPERAND
3193 (stmt
, 0))) != COMPLEX_TYPE
)
3197 tree lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
3198 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
3200 bool notokay
= false;
3202 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_VIRTUAL_DEFS
)
3204 tree defvar
= DEF_FROM_PTR (defp
);
3205 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defvar
))
3215 if (!storetemp
|| TREE_TYPE (rhs
) != TREE_TYPE (storetemp
))
3217 storetemp
= create_tmp_var (TREE_TYPE (rhs
), "storetmp");
3218 if (TREE_CODE (TREE_TYPE (storetemp
)) == VECTOR_TYPE
)
3219 DECL_GIMPLE_REG_P (storetemp
) = 1;
3220 get_var_ann (storetemp
);
3223 new = poolify_modify_stmt (storetemp
, lhs
);
3225 lhs
= make_ssa_name (storetemp
, new);
3226 GIMPLE_STMT_OPERAND (new, 0) = lhs
;
3227 create_ssa_artificial_load_stmt (new, stmt
);
3229 NECESSARY (new) = 0;
3230 VEC_safe_push (tree
, heap
, inserted_exprs
, new);
3231 VEC_safe_push (tree
, heap
, need_creation
, new);
3232 bsi_insert_after (&bsi
, new, BSI_NEW_STMT
);
3238 /* Turn the pool allocated fake stores that we created back into real
3239 GC allocated ones if they turned out to be necessary to PRE some
3243 realify_fake_stores (void)
3248 for (i
= 0; VEC_iterate (tree
, need_creation
, i
, stmt
); i
++)
3250 if (NECESSARY (stmt
))
3252 block_stmt_iterator bsi
;
3255 /* Mark the temp variable as referenced */
3256 add_referenced_var (SSA_NAME_VAR (GIMPLE_STMT_OPERAND (stmt
, 0)));
3258 /* Put the new statement in GC memory, fix up the
3259 SSA_NAME_DEF_STMT on it, and then put it in place of
3260 the old statement before the store in the IR stream
3261 as a plain ssa name copy. */
3262 bsi
= bsi_for_stmt (stmt
);
3264 tmp
= GIMPLE_STMT_OPERAND (bsi_stmt (bsi
), 1);
3265 newstmt
= build_gimple_modify_stmt (GIMPLE_STMT_OPERAND (stmt
, 0),
3267 SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (newstmt
, 0)) = newstmt
;
3268 bsi_insert_before (&bsi
, newstmt
, BSI_SAME_STMT
);
3269 bsi
= bsi_for_stmt (stmt
);
3270 bsi_remove (&bsi
, true);
3273 release_defs (stmt
);
3277 /* Tree-combine a value number expression *EXPR_P that does a type
3278 conversion with the value number expression of its operand.
3279 Returns true, if *EXPR_P simplifies to a value number or
3280 gimple min-invariant expression different from EXPR_P and
3281 sets *EXPR_P to the simplified expression value number.
3282 Otherwise returns false and does not change *EXPR_P. */
3285 try_combine_conversion (tree
*expr_p
)
3287 tree expr
= *expr_p
;
3289 bitmap_set_t exprset
;
3290 unsigned int firstbit
;
3292 if (!((TREE_CODE (expr
) == NOP_EXPR
3293 || TREE_CODE (expr
) == CONVERT_EXPR
3294 || TREE_CODE (expr
) == REALPART_EXPR
3295 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3296 && TREE_CODE (TREE_OPERAND (expr
, 0)) == VALUE_HANDLE
3297 && !VALUE_HANDLE_VUSES (TREE_OPERAND (expr
, 0))))
3300 exprset
= VALUE_HANDLE_EXPR_SET (TREE_OPERAND (expr
, 0));
3301 firstbit
= bitmap_first_set_bit (exprset
->expressions
);
3302 t
= fold_unary (TREE_CODE (expr
), TREE_TYPE (expr
),
3303 expression_for_id (firstbit
));
3307 /* Strip useless type conversions, which is safe in the optimizers but
3308 not generally in fold. */
3309 STRIP_USELESS_TYPE_CONVERSION (t
);
3311 /* Disallow value expressions we have no value number for already, as
3312 we would miss a leader for it here. */
3313 if (!(TREE_CODE (t
) == VALUE_HANDLE
3314 || is_gimple_min_invariant (t
)))
3315 t
= vn_lookup (t
, NULL
);
3325 /* Compute the AVAIL set for all basic blocks.
3327 This function performs value numbering of the statements in each basic
3328 block. The AVAIL sets are built from information we glean while doing
3329 this value numbering, since the AVAIL sets contain only one entry per
3332 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3333 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3336 compute_avail (void)
3338 basic_block block
, son
;
3339 basic_block
*worklist
;
3342 /* For arguments with default definitions, we pretend they are
3343 defined in the entry block. */
3344 for (param
= DECL_ARGUMENTS (current_function_decl
);
3346 param
= TREE_CHAIN (param
))
3348 if (gimple_default_def (cfun
, param
) != NULL
)
3350 tree def
= gimple_default_def (cfun
, param
);
3352 vn_lookup_or_add (def
, NULL
);
3353 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR
), def
);
3355 bitmap_value_insert_into_set (maximal_set
, def
);
3356 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR
), def
);
3360 /* Likewise for the static chain decl. */
3361 if (cfun
->static_chain_decl
)
3363 param
= cfun
->static_chain_decl
;
3364 if (gimple_default_def (cfun
, param
) != NULL
)
3366 tree def
= gimple_default_def (cfun
, param
);
3368 vn_lookup_or_add (def
, NULL
);
3369 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR
), def
);
3371 bitmap_value_insert_into_set (maximal_set
, def
);
3372 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR
), def
);
3376 /* Allocate the worklist. */
3377 worklist
= XNEWVEC (basic_block
, n_basic_blocks
);
3379 /* Seed the algorithm by putting the dominator children of the entry
3380 block on the worklist. */
3381 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR
);
3383 son
= next_dom_son (CDI_DOMINATORS
, son
))
3384 worklist
[sp
++] = son
;
3386 /* Loop until the worklist is empty. */
3389 block_stmt_iterator bsi
;
3392 unsigned int stmt_uid
= 1;
3394 /* Pick a block from the worklist. */
3395 block
= worklist
[--sp
];
3397 /* Initially, the set of available values in BLOCK is that of
3398 its immediate dominator. */
3399 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3401 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3403 /* Generate values for PHI nodes. */
3404 for (phi
= phi_nodes (block
); phi
; phi
= PHI_CHAIN (phi
))
3406 /* We have no need for virtual phis, as they don't represent
3407 actual computations. */
3408 if (is_gimple_reg (PHI_RESULT (phi
)))
3410 add_to_sets (PHI_RESULT (phi
), PHI_RESULT (phi
), NULL
,
3411 PHI_GEN (block
), AVAIL_OUT (block
));
3415 /* Now compute value numbers and populate value sets with all
3416 the expressions computed in BLOCK. */
3417 for (bsi
= bsi_start (block
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3423 stmt
= bsi_stmt (bsi
);
3424 ann
= stmt_ann (stmt
);
3426 ann
->uid
= stmt_uid
++;
3428 /* For regular value numbering, we are only interested in
3429 assignments of the form X_i = EXPR, where EXPR represents
3430 an "interesting" computation, it has no volatile operands
3431 and X_i doesn't flow through an abnormal edge. */
3432 if (TREE_CODE (stmt
) == RETURN_EXPR
3433 && !ann
->has_volatile_ops
)
3435 tree realstmt
= stmt
;
3439 stmt
= TREE_OPERAND (stmt
, 0);
3440 if (stmt
&& TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
)
3442 lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
3443 rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
3444 if (TREE_CODE (rhs
) == SSA_NAME
3445 && !is_undefined_value (rhs
))
3446 bitmap_value_insert_into_set (EXP_GEN (block
), rhs
);
3448 FOR_EACH_SSA_TREE_OPERAND (op
, realstmt
, iter
, SSA_OP_DEF
)
3449 add_to_sets (op
, op
, NULL
, TMP_GEN (block
),
3455 else if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
3456 && !ann
->has_volatile_ops
3457 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 0)) == SSA_NAME
3458 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI
3459 (GIMPLE_STMT_OPERAND (stmt
, 0)))
3461 tree lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
3462 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
3464 /* Try to look through loads. */
3465 if (TREE_CODE (lhs
) == SSA_NAME
3466 && !ZERO_SSA_OPERANDS (stmt
, SSA_OP_VIRTUAL_USES
)
3467 && try_look_through_load (lhs
, rhs
, stmt
, block
))
3470 STRIP_USELESS_TYPE_CONVERSION (rhs
);
3471 if (can_value_number_operation (rhs
))
3473 /* For value numberable operation, create a
3474 duplicate expression with the operands replaced
3475 with the value handles of the original RHS. */
3476 tree newt
= create_value_expr_from (rhs
, block
, stmt
);
3479 /* If we can combine a conversion expression
3480 with the expression for its operand just
3481 record the value number for it. */
3482 if (try_combine_conversion (&newt
))
3486 tree val
= vn_lookup_or_add (newt
, stmt
);
3489 bitmap_value_insert_into_set (maximal_set
, newt
);
3490 bitmap_value_insert_into_set (EXP_GEN (block
), newt
);
3492 bitmap_insert_into_set (TMP_GEN (block
), lhs
);
3493 bitmap_value_insert_into_set (AVAIL_OUT (block
), lhs
);
3497 else if ((TREE_CODE (rhs
) == SSA_NAME
3498 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs
))
3499 || is_gimple_min_invariant (rhs
)
3500 || TREE_CODE (rhs
) == ADDR_EXPR
3501 || TREE_INVARIANT (rhs
)
3504 /* Compute a value number for the RHS of the statement
3505 and add its value to the AVAIL_OUT set for the block.
3506 Add the LHS to TMP_GEN. */
3507 add_to_sets (lhs
, rhs
, stmt
, TMP_GEN (block
),
3510 if (TREE_CODE (rhs
) == SSA_NAME
3511 && !is_undefined_value (rhs
))
3512 bitmap_value_insert_into_set (EXP_GEN (block
), rhs
);
3517 /* For any other statement that we don't recognize, simply
3518 make the names generated by the statement available in
3519 AVAIL_OUT and TMP_GEN. */
3520 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3521 add_to_sets (op
, op
, NULL
, TMP_GEN (block
), AVAIL_OUT (block
));
3523 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3524 add_to_sets (op
, op
, NULL
, NULL
, AVAIL_OUT (block
));
3527 /* Put the dominator children of BLOCK on the worklist of blocks
3528 to compute available sets for. */
3529 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3531 son
= next_dom_son (CDI_DOMINATORS
, son
))
3532 worklist
[sp
++] = son
;
3539 /* Eliminate fully redundant computations. */
3548 block_stmt_iterator i
;
3550 for (i
= bsi_start (b
); !bsi_end_p (i
); bsi_next (&i
))
3552 tree stmt
= bsi_stmt (i
);
3554 /* Lookup the RHS of the expression, see if we have an
3555 available computation for it. If so, replace the RHS with
3556 the available computation. */
3557 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
3558 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 0)) == SSA_NAME
3559 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 1)) != SSA_NAME
3560 && !is_gimple_min_invariant (GIMPLE_STMT_OPERAND (stmt
, 1))
3561 && !stmt_ann (stmt
)->has_volatile_ops
)
3563 tree lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
3564 tree
*rhs_p
= &GIMPLE_STMT_OPERAND (stmt
, 1);
3567 sprime
= bitmap_find_leader (AVAIL_OUT (b
),
3568 vn_lookup (lhs
, NULL
));
3571 && (TREE_CODE (*rhs_p
) != SSA_NAME
3572 || may_propagate_copy (*rhs_p
, sprime
)))
3574 gcc_assert (sprime
!= *rhs_p
);
3576 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3578 fprintf (dump_file
, "Replaced ");
3579 print_generic_expr (dump_file
, *rhs_p
, 0);
3580 fprintf (dump_file
, " with ");
3581 print_generic_expr (dump_file
, sprime
, 0);
3582 fprintf (dump_file
, " in ");
3583 print_generic_stmt (dump_file
, stmt
, 0);
3586 if (TREE_CODE (sprime
) == SSA_NAME
)
3587 NECESSARY (SSA_NAME_DEF_STMT (sprime
)) = 1;
3588 /* We need to make sure the new and old types actually match,
3589 which may require adding a simple cast, which fold_convert
3591 if (TREE_CODE (*rhs_p
) != SSA_NAME
3592 && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*rhs_p
),
3593 TREE_TYPE (sprime
)))
3594 sprime
= fold_convert (TREE_TYPE (*rhs_p
), sprime
);
3596 pre_stats
.eliminations
++;
3597 propagate_tree_value (rhs_p
, sprime
);
3600 /* If we removed EH side effects from the statement, clean
3601 its EH information. */
3602 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
3604 bitmap_set_bit (need_eh_cleanup
,
3605 bb_for_stmt (stmt
)->index
);
3606 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3607 fprintf (dump_file
, " Removed EH side effects.\n");
3615 /* Borrow a bit of tree-ssa-dce.c for the moment.
3616 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
3617 this may be a bit faster, and we may want critical edges kept split. */
3619 /* If OP's defining statement has not already been determined to be necessary,
3620 mark that statement necessary. Return the stmt, if it is newly
3624 mark_operand_necessary (tree op
)
3630 if (TREE_CODE (op
) != SSA_NAME
)
3633 stmt
= SSA_NAME_DEF_STMT (op
);
3636 if (NECESSARY (stmt
)
3637 || IS_EMPTY_STMT (stmt
))
3640 NECESSARY (stmt
) = 1;
3644 /* Because we don't follow exactly the standard PRE algorithm, and decide not
3645 to insert PHI nodes sometimes, and because value numbering of casts isn't
3646 perfect, we sometimes end up inserting dead code. This simple DCE-like
3647 pass removes any insertions we made that weren't actually used. */
3650 remove_dead_inserted_code (void)
3652 VEC(tree
,heap
) *worklist
= NULL
;
3656 worklist
= VEC_alloc (tree
, heap
, VEC_length (tree
, inserted_exprs
));
3657 for (i
= 0; VEC_iterate (tree
, inserted_exprs
, i
, t
); i
++)
3660 VEC_quick_push (tree
, worklist
, t
);
3662 while (VEC_length (tree
, worklist
) > 0)
3664 t
= VEC_pop (tree
, worklist
);
3666 /* PHI nodes are somewhat special in that each PHI alternative has
3667 data and control dependencies. All the statements feeding the
3668 PHI node's arguments are always necessary. */
3669 if (TREE_CODE (t
) == PHI_NODE
)
3673 VEC_reserve (tree
, heap
, worklist
, PHI_NUM_ARGS (t
));
3674 for (k
= 0; k
< PHI_NUM_ARGS (t
); k
++)
3676 tree arg
= PHI_ARG_DEF (t
, k
);
3677 if (TREE_CODE (arg
) == SSA_NAME
)
3679 arg
= mark_operand_necessary (arg
);
3681 VEC_quick_push (tree
, worklist
, arg
);
3687 /* Propagate through the operands. Examine all the USE, VUSE and
3688 VDEF operands in this statement. Mark all the statements
3689 which feed this statement's uses as necessary. */
3693 /* The operands of VDEF expressions are also needed as they
3694 represent potential definitions that may reach this
3695 statement (VDEF operands allow us to follow def-def
3698 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
3700 tree n
= mark_operand_necessary (use
);
3702 VEC_safe_push (tree
, heap
, worklist
, n
);
3707 for (i
= 0; VEC_iterate (tree
, inserted_exprs
, i
, t
); i
++)
3711 block_stmt_iterator bsi
;
3713 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3715 fprintf (dump_file
, "Removing unnecessary insertion:");
3716 print_generic_stmt (dump_file
, t
, 0);
3719 if (TREE_CODE (t
) == PHI_NODE
)
3721 remove_phi_node (t
, NULL
, true);
3725 bsi
= bsi_for_stmt (t
);
3726 bsi_remove (&bsi
, true);
3731 VEC_free (tree
, heap
, worklist
);
3734 /* Initialize data structures used by PRE. */
3737 init_pre (bool do_fre
)
3741 next_expression_id
= 0;
3745 inserted_exprs
= NULL
;
3746 need_creation
= NULL
;
3747 pretemp
= NULL_TREE
;
3748 storetemp
= NULL_TREE
;
3749 mergephitemp
= NULL_TREE
;
3750 prephitemp
= NULL_TREE
;
3754 loop_optimizer_init (LOOPS_NORMAL
);
3756 connect_infinite_loops_to_exit ();
3757 memset (&pre_stats
, 0, sizeof (pre_stats
));
3760 postorder
= XNEWVEC (int, n_basic_blocks
- NUM_FIXED_BLOCKS
);
3761 post_order_compute (postorder
, false);
3764 bb
->aux
= xcalloc (1, sizeof (struct bb_bitmap_sets
));
3766 calculate_dominance_info (CDI_POST_DOMINATORS
);
3767 calculate_dominance_info (CDI_DOMINATORS
);
3769 bitmap_obstack_initialize (&grand_bitmap_obstack
);
3770 phi_translate_table
= htab_create (5110, expr_pred_trans_hash
,
3771 expr_pred_trans_eq
, free
);
3772 bitmap_set_pool
= create_alloc_pool ("Bitmap sets",
3773 sizeof (struct bitmap_set
), 30);
3774 binary_node_pool
= create_alloc_pool ("Binary tree nodes",
3775 tree_code_size (PLUS_EXPR
), 30);
3776 unary_node_pool
= create_alloc_pool ("Unary tree nodes",
3777 tree_code_size (NEGATE_EXPR
), 30);
3778 reference_node_pool
= create_alloc_pool ("Reference tree nodes",
3779 tree_code_size (ARRAY_REF
), 30);
3780 comparison_node_pool
= create_alloc_pool ("Comparison tree nodes",
3781 tree_code_size (EQ_EXPR
), 30);
3782 modify_expr_node_pool
= create_alloc_pool ("GIMPLE_MODIFY_STMT nodes",
3783 tree_code_size (GIMPLE_MODIFY_STMT
),
3785 obstack_init (&temp_call_expr_obstack
);
3786 modify_expr_template
= NULL
;
3790 EXP_GEN (bb
) = bitmap_set_new ();
3791 PHI_GEN (bb
) = bitmap_set_new ();
3792 TMP_GEN (bb
) = bitmap_set_new ();
3793 AVAIL_OUT (bb
) = bitmap_set_new ();
3795 maximal_set
= in_fre
? NULL
: bitmap_set_new ();
3797 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
3801 /* Deallocate data structures used by PRE. */
3804 fini_pre (bool do_fre
)
3810 VEC_free (tree
, heap
, inserted_exprs
);
3811 VEC_free (tree
, heap
, need_creation
);
3812 bitmap_obstack_release (&grand_bitmap_obstack
);
3813 free_alloc_pool (bitmap_set_pool
);
3814 free_alloc_pool (binary_node_pool
);
3815 free_alloc_pool (reference_node_pool
);
3816 free_alloc_pool (unary_node_pool
);
3817 free_alloc_pool (comparison_node_pool
);
3818 free_alloc_pool (modify_expr_node_pool
);
3819 htab_delete (phi_translate_table
);
3820 remove_fake_exit_edges ();
3828 free_dominance_info (CDI_POST_DOMINATORS
);
3831 if (!bitmap_empty_p (need_eh_cleanup
))
3833 tree_purge_all_dead_eh_edges (need_eh_cleanup
);
3834 cleanup_tree_cfg ();
3837 BITMAP_FREE (need_eh_cleanup
);
3839 /* Wipe out pointers to VALUE_HANDLEs. In the not terribly distant
3840 future we will want them to be persistent though. */
3841 for (i
= 0; i
< num_ssa_names
; i
++)
3843 tree name
= ssa_name (i
);
3848 if (SSA_NAME_VALUE (name
)
3849 && TREE_CODE (SSA_NAME_VALUE (name
)) == VALUE_HANDLE
)
3850 SSA_NAME_VALUE (name
) = NULL
;
3852 if (!do_fre
&& current_loops
)
3853 loop_optimizer_finalize ();
3856 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
3857 only wants to do full redundancy elimination. */
3860 execute_pre (bool do_fre
)
3863 do_partial_partial
= optimize
> 2;
3867 insert_fake_stores ();
3869 /* Collect and value number expressions computed in each basic block. */
3872 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3878 print_bitmap_set (dump_file
, EXP_GEN (bb
), "exp_gen", bb
->index
);
3879 print_bitmap_set (dump_file
, TMP_GEN (bb
), "tmp_gen",
3881 print_bitmap_set (dump_file
, AVAIL_OUT (bb
), "avail_out",
3886 /* Insert can get quite slow on an incredibly large number of basic
3887 blocks due to some quadratic behavior. Until this behavior is
3888 fixed, don't run it when he have an incredibly large number of
3889 bb's. If we aren't going to run insert, there is no point in
3890 computing ANTIC, either, even though it's plenty fast. */
3891 if (!do_fre
&& n_basic_blocks
< 4000)
3893 compute_antic_safe ();
3898 /* Remove all the redundant expressions. */
3901 if (dump_file
&& (dump_flags
& TDF_STATS
))
3903 fprintf (dump_file
, "Insertions: %d\n", pre_stats
.insertions
);
3904 fprintf (dump_file
, "PA inserted: %d\n", pre_stats
.pa_insert
);
3905 fprintf (dump_file
, "New PHIs: %d\n", pre_stats
.phis
);
3906 fprintf (dump_file
, "Eliminated: %d\n", pre_stats
.eliminations
);
3907 fprintf (dump_file
, "Constified: %d\n", pre_stats
.constified
);
3909 bsi_commit_edge_inserts ();
3911 clear_expression_ids ();
3914 remove_dead_inserted_code ();
3915 realify_fake_stores ();
3921 /* Gate and execute functions for PRE. */
3926 execute_pre (false);
3933 return flag_tree_pre
!= 0;
3936 struct tree_opt_pass pass_pre
=
3939 gate_pre
, /* gate */
3940 do_pre
, /* execute */
3943 0, /* static_pass_number */
3944 TV_TREE_PRE
, /* tv_id */
3945 PROP_no_crit_edges
| PROP_cfg
3946 | PROP_ssa
| PROP_alias
, /* properties_required */
3947 0, /* properties_provided */
3948 0, /* properties_destroyed */
3949 0, /* todo_flags_start */
3950 TODO_update_ssa_only_virtuals
| TODO_dump_func
| TODO_ggc_collect
3951 | TODO_verify_ssa
, /* todo_flags_finish */
3956 /* Gate and execute functions for FRE. */
3968 return flag_tree_fre
!= 0;
3971 struct tree_opt_pass pass_fre
=
3974 gate_fre
, /* gate */
3975 execute_fre
, /* execute */
3978 0, /* static_pass_number */
3979 TV_TREE_FRE
, /* tv_id */
3980 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
3981 0, /* properties_provided */
3982 0, /* properties_destroyed */
3983 0, /* todo_flags_start */
3984 TODO_dump_func
| TODO_ggc_collect
| TODO_verify_ssa
, /* todo_flags_finish */