2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
32 #include "hash-table.h"
33 #include "tree-iterator.h"
34 #include "alloc-pool.h"
36 #include "tree-pass.h"
39 #include "langhooks.h"
41 #include "tree-ssa-sccvn.h"
42 #include "tree-scalar-evolution.h"
49 1. Avail sets can be shared by making an avail_find_leader that
50 walks up the dominator tree and looks in those avail sets.
51 This might affect code optimality, it's unclear right now.
52 2. Strength reduction can be performed by anticipating expressions
53 we can repair later on.
54 3. We can do back-substitution or smarter value numbering to catch
55 commutative expressions split up over multiple statements.
58 /* For ease of terminology, "expression node" in the below refers to
59 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
60 represent the actual statement containing the expressions we care about,
61 and we cache the value number by putting it in the expression. */
65 First we walk the statements to generate the AVAIL sets, the
66 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
67 generation of values/expressions by a given block. We use them
68 when computing the ANTIC sets. The AVAIL sets consist of
69 SSA_NAME's that represent values, so we know what values are
70 available in what blocks. AVAIL is a forward dataflow problem. In
71 SSA, values are never killed, so we don't need a kill set, or a
72 fixpoint iteration, in order to calculate the AVAIL sets. In
73 traditional parlance, AVAIL sets tell us the downsafety of the
76 Next, we generate the ANTIC sets. These sets represent the
77 anticipatable expressions. ANTIC is a backwards dataflow
78 problem. An expression is anticipatable in a given block if it could
79 be generated in that block. This means that if we had to perform
80 an insertion in that block, of the value of that expression, we
81 could. Calculating the ANTIC sets requires phi translation of
82 expressions, because the flow goes backwards through phis. We must
83 iterate to a fixpoint of the ANTIC sets, because we have a kill
84 set. Even in SSA form, values are not live over the entire
85 function, only from their definition point onwards. So we have to
86 remove values from the ANTIC set once we go past the definition
87 point of the leaders that make them up.
88 compute_antic/compute_antic_aux performs this computation.
90 Third, we perform insertions to make partially redundant
91 expressions fully redundant.
93 An expression is partially redundant (excluding partial
96 1. It is AVAIL in some, but not all, of the predecessors of a
98 2. It is ANTIC in all the predecessors.
100 In order to make it fully redundant, we insert the expression into
101 the predecessors where it is not available, but is ANTIC.
103 For the partial anticipation case, we only perform insertion if it
104 is partially anticipated in some block, and fully available in all
107 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
108 performs these steps.
110 Fourth, we eliminate fully redundant expressions.
111 This is a simple statement walk that replaces redundant
112 calculations with the now available values. */
114 /* Representations of value numbers:
116 Value numbers are represented by a representative SSA_NAME. We
117 will create fake SSA_NAME's in situations where we need a
118 representative but do not have one (because it is a complex
119 expression). In order to facilitate storing the value numbers in
120 bitmaps, and keep the number of wasted SSA_NAME's down, we also
121 associate a value_id with each value number, and create full blown
122 ssa_name's only where we actually need them (IE in operands of
123 existing expressions).
125 Theoretically you could replace all the value_id's with
126 SSA_NAME_VERSION, but this would allocate a large number of
127 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
128 It would also require an additional indirection at each point we
131 /* Representation of expressions on value numbers:
133 Expressions consisting of value numbers are represented the same
134 way as our VN internally represents them, with an additional
135 "pre_expr" wrapping around them in order to facilitate storing all
136 of the expressions in the same sets. */
138 /* Representation of sets:
140 The dataflow sets do not need to be sorted in any particular order
141 for the majority of their lifetime, are simply represented as two
142 bitmaps, one that keeps track of values present in the set, and one
143 that keeps track of expressions present in the set.
145 When we need them in topological order, we produce it on demand by
146 transforming the bitmap into an array and sorting it into topo
149 /* Type of expression, used to know which member of the PRE_EXPR union
160 typedef union pre_expr_union_d
165 vn_reference_t reference
;
168 typedef struct pre_expr_d
: typed_noop_remove
<pre_expr_d
>
170 enum pre_expr_kind kind
;
174 /* hash_table support. */
175 typedef pre_expr_d value_type
;
176 typedef pre_expr_d compare_type
;
177 static inline hashval_t
hash (const pre_expr_d
*);
178 static inline int equal (const pre_expr_d
*, const pre_expr_d
*);
181 #define PRE_EXPR_NAME(e) (e)->u.name
182 #define PRE_EXPR_NARY(e) (e)->u.nary
183 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
184 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
186 /* Compare E1 and E1 for equality. */
189 pre_expr_d::equal (const value_type
*e1
, const compare_type
*e2
)
191 if (e1
->kind
!= e2
->kind
)
197 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
198 PRE_EXPR_CONSTANT (e2
));
200 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
202 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
204 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
205 PRE_EXPR_REFERENCE (e2
));
214 pre_expr_d::hash (const value_type
*e
)
219 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
221 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
223 return PRE_EXPR_NARY (e
)->hashcode
;
225 return PRE_EXPR_REFERENCE (e
)->hashcode
;
231 /* Next global expression id number. */
232 static unsigned int next_expression_id
;
234 /* Mapping from expression to id number we can use in bitmap sets. */
235 static vec
<pre_expr
> expressions
;
236 static hash_table
<pre_expr_d
> expression_to_id
;
237 static vec
<unsigned> name_to_id
;
239 /* Allocate an expression id for EXPR. */
241 static inline unsigned int
242 alloc_expression_id (pre_expr expr
)
244 struct pre_expr_d
**slot
;
245 /* Make sure we won't overflow. */
246 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
247 expr
->id
= next_expression_id
++;
248 expressions
.safe_push (expr
);
249 if (expr
->kind
== NAME
)
251 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
252 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
253 re-allocations by using vec::reserve upfront. There is no
254 vec::quick_grow_cleared unfortunately. */
255 unsigned old_len
= name_to_id
.length ();
256 name_to_id
.reserve (num_ssa_names
- old_len
);
257 name_to_id
.safe_grow_cleared (num_ssa_names
);
258 gcc_assert (name_to_id
[version
] == 0);
259 name_to_id
[version
] = expr
->id
;
263 slot
= expression_to_id
.find_slot (expr
, INSERT
);
267 return next_expression_id
- 1;
270 /* Return the expression id for tree EXPR. */
272 static inline unsigned int
273 get_expression_id (const pre_expr expr
)
278 static inline unsigned int
279 lookup_expression_id (const pre_expr expr
)
281 struct pre_expr_d
**slot
;
283 if (expr
->kind
== NAME
)
285 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
286 if (name_to_id
.length () <= version
)
288 return name_to_id
[version
];
292 slot
= expression_to_id
.find_slot (expr
, NO_INSERT
);
295 return ((pre_expr
)*slot
)->id
;
299 /* Return the existing expression id for EXPR, or create one if one
300 does not exist yet. */
302 static inline unsigned int
303 get_or_alloc_expression_id (pre_expr expr
)
305 unsigned int id
= lookup_expression_id (expr
);
307 return alloc_expression_id (expr
);
308 return expr
->id
= id
;
311 /* Return the expression that has expression id ID */
313 static inline pre_expr
314 expression_for_id (unsigned int id
)
316 return expressions
[id
];
319 /* Free the expression id field in all of our expressions,
320 and then destroy the expressions array. */
323 clear_expression_ids (void)
325 expressions
.release ();
328 static alloc_pool pre_expr_pool
;
330 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
333 get_or_alloc_expr_for_name (tree name
)
335 struct pre_expr_d expr
;
337 unsigned int result_id
;
341 PRE_EXPR_NAME (&expr
) = name
;
342 result_id
= lookup_expression_id (&expr
);
344 return expression_for_id (result_id
);
346 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
348 PRE_EXPR_NAME (result
) = name
;
349 alloc_expression_id (result
);
353 /* An unordered bitmap set. One bitmap tracks values, the other,
355 typedef struct bitmap_set
357 bitmap_head expressions
;
361 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
362 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
364 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
365 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
367 /* Mapping from value id to expressions with that value_id. */
368 static vec
<bitmap
> value_expressions
;
370 /* Sets that we need to keep track of. */
371 typedef struct bb_bitmap_sets
373 /* The EXP_GEN set, which represents expressions/values generated in
375 bitmap_set_t exp_gen
;
377 /* The PHI_GEN set, which represents PHI results generated in a
379 bitmap_set_t phi_gen
;
381 /* The TMP_GEN set, which represents results/temporaries generated
382 in a basic block. IE the LHS of an expression. */
383 bitmap_set_t tmp_gen
;
385 /* The AVAIL_OUT set, which represents which values are available in
386 a given basic block. */
387 bitmap_set_t avail_out
;
389 /* The ANTIC_IN set, which represents which values are anticipatable
390 in a given basic block. */
391 bitmap_set_t antic_in
;
393 /* The PA_IN set, which represents which values are
394 partially anticipatable in a given basic block. */
397 /* The NEW_SETS set, which is used during insertion to augment the
398 AVAIL_OUT set of blocks with the new insertions performed during
399 the current iteration. */
400 bitmap_set_t new_sets
;
402 /* A cache for value_dies_in_block_x. */
405 /* True if we have visited this block during ANTIC calculation. */
406 unsigned int visited
: 1;
408 /* True we have deferred processing this block during ANTIC
409 calculation until its successor is processed. */
410 unsigned int deferred
: 1;
412 /* True when the block contains a call that might not return. */
413 unsigned int contains_may_not_return_call
: 1;
416 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
417 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
418 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
419 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
420 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
421 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
422 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
423 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
424 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
425 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
426 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
429 /* Basic block list in postorder. */
430 static int *postorder
;
431 static int postorder_num
;
433 /* This structure is used to keep track of statistics on what
434 optimization PRE was able to perform. */
437 /* The number of RHS computations eliminated by PRE. */
440 /* The number of new expressions/temporaries generated by PRE. */
443 /* The number of inserts found due to partial anticipation */
446 /* The number of new PHI nodes added by PRE. */
450 static bool do_partial_partial
;
451 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int);
452 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
453 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
454 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
455 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
456 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
457 static void bitmap_insert_into_set_1 (bitmap_set_t
, pre_expr
,
459 static bitmap_set_t
bitmap_set_new (void);
460 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
462 static tree
find_or_generate_expression (basic_block
, tree
, gimple_seq
*);
463 static unsigned int get_expr_value_id (pre_expr
);
465 /* We can add and remove elements and entries to and from sets
466 and hash tables, so we use alloc pools for them. */
468 static alloc_pool bitmap_set_pool
;
469 static bitmap_obstack grand_bitmap_obstack
;
471 /* Set of blocks with statements that have had their EH properties changed. */
472 static bitmap need_eh_cleanup
;
474 /* Set of blocks with statements that have had their AB properties changed. */
475 static bitmap need_ab_cleanup
;
477 /* A three tuple {e, pred, v} used to cache phi translations in the
478 phi_translate_table. */
480 typedef struct expr_pred_trans_d
: typed_free_remove
<expr_pred_trans_d
>
482 /* The expression. */
485 /* The predecessor block along which we translated the expression. */
488 /* The value that resulted from the translation. */
491 /* The hashcode for the expression, pred pair. This is cached for
495 /* hash_table support. */
496 typedef expr_pred_trans_d value_type
;
497 typedef expr_pred_trans_d compare_type
;
498 static inline hashval_t
hash (const value_type
*);
499 static inline int equal (const value_type
*, const compare_type
*);
500 } *expr_pred_trans_t
;
501 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
504 expr_pred_trans_d::hash (const expr_pred_trans_d
*e
)
510 expr_pred_trans_d::equal (const value_type
*ve1
,
511 const compare_type
*ve2
)
513 basic_block b1
= ve1
->pred
;
514 basic_block b2
= ve2
->pred
;
516 /* If they are not translations for the same basic block, they can't
520 return pre_expr_d::equal (ve1
->e
, ve2
->e
);
523 /* The phi_translate_table caches phi translations for a given
524 expression and predecessor. */
525 static hash_table
<expr_pred_trans_d
> phi_translate_table
;
527 /* Search in the phi translation table for the translation of
528 expression E in basic block PRED.
529 Return the translated value, if found, NULL otherwise. */
531 static inline pre_expr
532 phi_trans_lookup (pre_expr e
, basic_block pred
)
534 expr_pred_trans_t
*slot
;
535 struct expr_pred_trans_d ept
;
539 ept
.hashcode
= iterative_hash_hashval_t (pre_expr_d::hash (e
), pred
->index
);
540 slot
= phi_translate_table
.find_slot_with_hash (&ept
, ept
.hashcode
,
549 /* Add the tuple mapping from {expression E, basic block PRED} to
550 value V, to the phi translation table. */
553 phi_trans_add (pre_expr e
, pre_expr v
, basic_block pred
)
555 expr_pred_trans_t
*slot
;
556 expr_pred_trans_t new_pair
= XNEW (struct expr_pred_trans_d
);
558 new_pair
->pred
= pred
;
560 new_pair
->hashcode
= iterative_hash_hashval_t (pre_expr_d::hash (e
),
563 slot
= phi_translate_table
.find_slot_with_hash (new_pair
,
564 new_pair
->hashcode
, INSERT
);
570 /* Add expression E to the expression set of value id V. */
573 add_to_value (unsigned int v
, pre_expr e
)
577 gcc_checking_assert (get_expr_value_id (e
) == v
);
579 if (v
>= value_expressions
.length ())
581 value_expressions
.safe_grow_cleared (v
+ 1);
584 set
= value_expressions
[v
];
587 set
= BITMAP_ALLOC (&grand_bitmap_obstack
);
588 value_expressions
[v
] = set
;
591 bitmap_set_bit (set
, get_or_alloc_expression_id (e
));
594 /* Create a new bitmap set and return it. */
597 bitmap_set_new (void)
599 bitmap_set_t ret
= (bitmap_set_t
) pool_alloc (bitmap_set_pool
);
600 bitmap_initialize (&ret
->expressions
, &grand_bitmap_obstack
);
601 bitmap_initialize (&ret
->values
, &grand_bitmap_obstack
);
605 /* Return the value id for a PRE expression EXPR. */
608 get_expr_value_id (pre_expr expr
)
614 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
617 id
= VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
620 id
= PRE_EXPR_NARY (expr
)->value_id
;
623 id
= PRE_EXPR_REFERENCE (expr
)->value_id
;
628 /* ??? We cannot assert that expr has a value-id (it can be 0), because
629 we assign value-ids only to expressions that have a result
630 in set_hashtable_value_ids. */
634 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
637 sccvn_valnum_from_value_id (unsigned int val
)
641 bitmap exprset
= value_expressions
[val
];
642 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
644 pre_expr vexpr
= expression_for_id (i
);
645 if (vexpr
->kind
== NAME
)
646 return VN_INFO (PRE_EXPR_NAME (vexpr
))->valnum
;
647 else if (vexpr
->kind
== CONSTANT
)
648 return PRE_EXPR_CONSTANT (vexpr
);
653 /* Remove an expression EXPR from a bitmapped set. */
656 bitmap_remove_from_set (bitmap_set_t set
, pre_expr expr
)
658 unsigned int val
= get_expr_value_id (expr
);
659 if (!value_id_constant_p (val
))
661 bitmap_clear_bit (&set
->values
, val
);
662 bitmap_clear_bit (&set
->expressions
, get_expression_id (expr
));
667 bitmap_insert_into_set_1 (bitmap_set_t set
, pre_expr expr
,
668 unsigned int val
, bool allow_constants
)
670 if (allow_constants
|| !value_id_constant_p (val
))
672 /* We specifically expect this and only this function to be able to
673 insert constants into a set. */
674 bitmap_set_bit (&set
->values
, val
);
675 bitmap_set_bit (&set
->expressions
, get_or_alloc_expression_id (expr
));
679 /* Insert an expression EXPR into a bitmapped set. */
682 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
684 bitmap_insert_into_set_1 (set
, expr
, get_expr_value_id (expr
), false);
687 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
690 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
692 bitmap_copy (&dest
->expressions
, &orig
->expressions
);
693 bitmap_copy (&dest
->values
, &orig
->values
);
697 /* Free memory used up by SET. */
699 bitmap_set_free (bitmap_set_t set
)
701 bitmap_clear (&set
->expressions
);
702 bitmap_clear (&set
->values
);
706 /* Generate an topological-ordered array of bitmap set SET. */
709 sorted_array_from_bitmap_set (bitmap_set_t set
)
712 bitmap_iterator bi
, bj
;
713 vec
<pre_expr
> result
;
715 /* Pre-allocate roughly enough space for the array. */
716 result
.create (bitmap_count_bits (&set
->values
));
718 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
720 /* The number of expressions having a given value is usually
721 relatively small. Thus, rather than making a vector of all
722 the expressions and sorting it by value-id, we walk the values
723 and check in the reverse mapping that tells us what expressions
724 have a given value, to filter those in our set. As a result,
725 the expressions are inserted in value-id order, which means
728 If this is somehow a significant lose for some cases, we can
729 choose which set to walk based on the set size. */
730 bitmap exprset
= value_expressions
[i
];
731 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bj
)
733 if (bitmap_bit_p (&set
->expressions
, j
))
734 result
.safe_push (expression_for_id (j
));
741 /* Perform bitmapped set operation DEST &= ORIG. */
744 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
752 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
754 bitmap_and_into (&dest
->values
, &orig
->values
);
755 bitmap_copy (&temp
, &dest
->expressions
);
756 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
758 pre_expr expr
= expression_for_id (i
);
759 unsigned int value_id
= get_expr_value_id (expr
);
760 if (!bitmap_bit_p (&dest
->values
, value_id
))
761 bitmap_clear_bit (&dest
->expressions
, i
);
763 bitmap_clear (&temp
);
767 /* Subtract all values and expressions contained in ORIG from DEST. */
770 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
772 bitmap_set_t result
= bitmap_set_new ();
776 bitmap_and_compl (&result
->expressions
, &dest
->expressions
,
779 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
781 pre_expr expr
= expression_for_id (i
);
782 unsigned int value_id
= get_expr_value_id (expr
);
783 bitmap_set_bit (&result
->values
, value_id
);
789 /* Subtract all the values in bitmap set B from bitmap set A. */
792 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
798 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
800 bitmap_copy (&temp
, &a
->expressions
);
801 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
803 pre_expr expr
= expression_for_id (i
);
804 if (bitmap_set_contains_value (b
, get_expr_value_id (expr
)))
805 bitmap_remove_from_set (a
, expr
);
807 bitmap_clear (&temp
);
811 /* Return true if bitmapped set SET contains the value VALUE_ID. */
814 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
816 if (value_id_constant_p (value_id
))
819 if (!set
|| bitmap_empty_p (&set
->expressions
))
822 return bitmap_bit_p (&set
->values
, value_id
);
826 bitmap_set_contains_expr (bitmap_set_t set
, const pre_expr expr
)
828 return bitmap_bit_p (&set
->expressions
, get_expression_id (expr
));
831 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
834 bitmap_set_replace_value (bitmap_set_t set
, unsigned int lookfor
,
841 if (value_id_constant_p (lookfor
))
844 if (!bitmap_set_contains_value (set
, lookfor
))
847 /* The number of expressions having a given value is usually
848 significantly less than the total number of expressions in SET.
849 Thus, rather than check, for each expression in SET, whether it
850 has the value LOOKFOR, we walk the reverse mapping that tells us
851 what expressions have a given value, and see if any of those
852 expressions are in our set. For large testcases, this is about
853 5-10x faster than walking the bitmap. If this is somehow a
854 significant lose for some cases, we can choose which set to walk
855 based on the set size. */
856 exprset
= value_expressions
[lookfor
];
857 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
859 if (bitmap_clear_bit (&set
->expressions
, i
))
861 bitmap_set_bit (&set
->expressions
, get_expression_id (expr
));
869 /* Return true if two bitmap sets are equal. */
872 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
874 return bitmap_equal_p (&a
->values
, &b
->values
);
877 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
878 and add it otherwise. */
881 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
883 unsigned int val
= get_expr_value_id (expr
);
885 if (bitmap_set_contains_value (set
, val
))
886 bitmap_set_replace_value (set
, val
, expr
);
888 bitmap_insert_into_set (set
, expr
);
891 /* Insert EXPR into SET if EXPR's value is not already present in
895 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
897 unsigned int val
= get_expr_value_id (expr
);
899 gcc_checking_assert (expr
->id
== get_or_alloc_expression_id (expr
));
901 /* Constant values are always considered to be part of the set. */
902 if (value_id_constant_p (val
))
905 /* If the value membership changed, add the expression. */
906 if (bitmap_set_bit (&set
->values
, val
))
907 bitmap_set_bit (&set
->expressions
, expr
->id
);
910 /* Print out EXPR to outfile. */
913 print_pre_expr (FILE *outfile
, const pre_expr expr
)
918 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
), 0);
921 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
), 0);
926 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
927 fprintf (outfile
, "{%s,", tree_code_name
[nary
->opcode
]);
928 for (i
= 0; i
< nary
->length
; i
++)
930 print_generic_expr (outfile
, nary
->op
[i
], 0);
931 if (i
!= (unsigned) nary
->length
- 1)
932 fprintf (outfile
, ",");
934 fprintf (outfile
, "}");
940 vn_reference_op_t vro
;
942 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
943 fprintf (outfile
, "{");
945 ref
->operands
.iterate (i
, &vro
);
948 bool closebrace
= false;
949 if (vro
->opcode
!= SSA_NAME
950 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
952 fprintf (outfile
, "%s", tree_code_name
[vro
->opcode
]);
955 fprintf (outfile
, "<");
961 print_generic_expr (outfile
, vro
->op0
, 0);
964 fprintf (outfile
, ",");
965 print_generic_expr (outfile
, vro
->op1
, 0);
969 fprintf (outfile
, ",");
970 print_generic_expr (outfile
, vro
->op2
, 0);
974 fprintf (outfile
, ">");
975 if (i
!= ref
->operands
.length () - 1)
976 fprintf (outfile
, ",");
978 fprintf (outfile
, "}");
981 fprintf (outfile
, "@");
982 print_generic_expr (outfile
, ref
->vuse
, 0);
988 void debug_pre_expr (pre_expr
);
990 /* Like print_pre_expr but always prints to stderr. */
992 debug_pre_expr (pre_expr e
)
994 print_pre_expr (stderr
, e
);
995 fprintf (stderr
, "\n");
998 /* Print out SET to OUTFILE. */
1001 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
1002 const char *setname
, int blockindex
)
1004 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
1011 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1013 const pre_expr expr
= expression_for_id (i
);
1016 fprintf (outfile
, ", ");
1018 print_pre_expr (outfile
, expr
);
1020 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1023 fprintf (outfile
, " }\n");
1026 void debug_bitmap_set (bitmap_set_t
);
1029 debug_bitmap_set (bitmap_set_t set
)
1031 print_bitmap_set (stderr
, set
, "debug", 0);
1034 void debug_bitmap_sets_for (basic_block
);
1037 debug_bitmap_sets_for (basic_block bb
)
1039 print_bitmap_set (stderr
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
1040 print_bitmap_set (stderr
, EXP_GEN (bb
), "exp_gen", bb
->index
);
1041 print_bitmap_set (stderr
, PHI_GEN (bb
), "phi_gen", bb
->index
);
1042 print_bitmap_set (stderr
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
1043 print_bitmap_set (stderr
, ANTIC_IN (bb
), "antic_in", bb
->index
);
1044 if (do_partial_partial
)
1045 print_bitmap_set (stderr
, PA_IN (bb
), "pa_in", bb
->index
);
1046 print_bitmap_set (stderr
, NEW_SETS (bb
), "new_sets", bb
->index
);
1049 /* Print out the expressions that have VAL to OUTFILE. */
1052 print_value_expressions (FILE *outfile
, unsigned int val
)
1054 bitmap set
= value_expressions
[val
];
1059 sprintf (s
, "%04d", val
);
1060 x
.expressions
= *set
;
1061 print_bitmap_set (outfile
, &x
, s
, 0);
1067 debug_value_expressions (unsigned int val
)
1069 print_value_expressions (stderr
, val
);
1072 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1076 get_or_alloc_expr_for_constant (tree constant
)
1078 unsigned int result_id
;
1079 unsigned int value_id
;
1080 struct pre_expr_d expr
;
1083 expr
.kind
= CONSTANT
;
1084 PRE_EXPR_CONSTANT (&expr
) = constant
;
1085 result_id
= lookup_expression_id (&expr
);
1087 return expression_for_id (result_id
);
1089 newexpr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1090 newexpr
->kind
= CONSTANT
;
1091 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1092 alloc_expression_id (newexpr
);
1093 value_id
= get_or_alloc_constant_value_id (constant
);
1094 add_to_value (value_id
, newexpr
);
1098 /* Given a value id V, find the actual tree representing the constant
1099 value if there is one, and return it. Return NULL if we can't find
1103 get_constant_for_value_id (unsigned int v
)
1105 if (value_id_constant_p (v
))
1109 bitmap exprset
= value_expressions
[v
];
1111 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1113 pre_expr expr
= expression_for_id (i
);
1114 if (expr
->kind
== CONSTANT
)
1115 return PRE_EXPR_CONSTANT (expr
);
1121 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1122 Currently only supports constants and SSA_NAMES. */
1124 get_or_alloc_expr_for (tree t
)
1126 if (TREE_CODE (t
) == SSA_NAME
)
1127 return get_or_alloc_expr_for_name (t
);
1128 else if (is_gimple_min_invariant (t
))
1129 return get_or_alloc_expr_for_constant (t
);
1132 /* More complex expressions can result from SCCVN expression
1133 simplification that inserts values for them. As they all
1134 do not have VOPs the get handled by the nary ops struct. */
1135 vn_nary_op_t result
;
1136 unsigned int result_id
;
1137 vn_nary_op_lookup (t
, &result
);
1140 pre_expr e
= (pre_expr
) pool_alloc (pre_expr_pool
);
1142 PRE_EXPR_NARY (e
) = result
;
1143 result_id
= lookup_expression_id (e
);
1146 pool_free (pre_expr_pool
, e
);
1147 e
= expression_for_id (result_id
);
1150 alloc_expression_id (e
);
1157 /* Return the folded version of T if T, when folded, is a gimple
1158 min_invariant. Otherwise, return T. */
1161 fully_constant_expression (pre_expr e
)
1169 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1170 switch (TREE_CODE_CLASS (nary
->opcode
))
1173 case tcc_comparison
:
1175 /* We have to go from trees to pre exprs to value ids to
1177 tree naryop0
= nary
->op
[0];
1178 tree naryop1
= nary
->op
[1];
1180 if (!is_gimple_min_invariant (naryop0
))
1182 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1183 unsigned int vrep0
= get_expr_value_id (rep0
);
1184 tree const0
= get_constant_for_value_id (vrep0
);
1186 naryop0
= fold_convert (TREE_TYPE (naryop0
), const0
);
1188 if (!is_gimple_min_invariant (naryop1
))
1190 pre_expr rep1
= get_or_alloc_expr_for (naryop1
);
1191 unsigned int vrep1
= get_expr_value_id (rep1
);
1192 tree const1
= get_constant_for_value_id (vrep1
);
1194 naryop1
= fold_convert (TREE_TYPE (naryop1
), const1
);
1196 result
= fold_binary (nary
->opcode
, nary
->type
,
1198 if (result
&& is_gimple_min_invariant (result
))
1199 return get_or_alloc_expr_for_constant (result
);
1200 /* We might have simplified the expression to a
1201 SSA_NAME for example from x_1 * 1. But we cannot
1202 insert a PHI for x_1 unconditionally as x_1 might
1203 not be available readily. */
1207 if (nary
->opcode
!= REALPART_EXPR
1208 && nary
->opcode
!= IMAGPART_EXPR
1209 && nary
->opcode
!= VIEW_CONVERT_EXPR
)
1214 /* We have to go from trees to pre exprs to value ids to
1216 tree naryop0
= nary
->op
[0];
1217 tree const0
, result
;
1218 if (is_gimple_min_invariant (naryop0
))
1222 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1223 unsigned int vrep0
= get_expr_value_id (rep0
);
1224 const0
= get_constant_for_value_id (vrep0
);
1229 tree type1
= TREE_TYPE (nary
->op
[0]);
1230 const0
= fold_convert (type1
, const0
);
1231 result
= fold_unary (nary
->opcode
, nary
->type
, const0
);
1233 if (result
&& is_gimple_min_invariant (result
))
1234 return get_or_alloc_expr_for_constant (result
);
1243 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1245 if ((folded
= fully_constant_vn_reference_p (ref
)))
1246 return get_or_alloc_expr_for_constant (folded
);
1255 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1256 it has the value it would have in BLOCK. Set *SAME_VALID to true
1257 in case the new vuse doesn't change the value id of the OPERANDS. */
1260 translate_vuse_through_block (vec
<vn_reference_op_s
> operands
,
1261 alias_set_type set
, tree type
, tree vuse
,
1262 basic_block phiblock
,
1263 basic_block block
, bool *same_valid
)
1265 gimple phi
= SSA_NAME_DEF_STMT (vuse
);
1272 if (gimple_bb (phi
) != phiblock
)
1275 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, type
, operands
);
1277 /* Use the alias-oracle to find either the PHI node in this block,
1278 the first VUSE used in this block that is equivalent to vuse or
1279 the first VUSE which definition in this block kills the value. */
1280 if (gimple_code (phi
) == GIMPLE_PHI
)
1281 e
= find_edge (block
, phiblock
);
1282 else if (use_oracle
)
1283 while (!stmt_may_clobber_ref_p_1 (phi
, &ref
))
1285 vuse
= gimple_vuse (phi
);
1286 phi
= SSA_NAME_DEF_STMT (vuse
);
1287 if (gimple_bb (phi
) != phiblock
)
1289 if (gimple_code (phi
) == GIMPLE_PHI
)
1291 e
= find_edge (block
, phiblock
);
1302 bitmap visited
= NULL
;
1304 /* Try to find a vuse that dominates this phi node by skipping
1305 non-clobbering statements. */
1306 vuse
= get_continuation_for_phi (phi
, &ref
, &cnt
, &visited
, false);
1308 BITMAP_FREE (visited
);
1314 /* If we didn't find any, the value ID can't stay the same,
1315 but return the translated vuse. */
1316 *same_valid
= false;
1317 vuse
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1319 /* ??? We would like to return vuse here as this is the canonical
1320 upmost vdef that this reference is associated with. But during
1321 insertion of the references into the hash tables we only ever
1322 directly insert with their direct gimple_vuse, hence returning
1323 something else would make us not find the other expression. */
1324 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1330 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1331 SET2. This is used to avoid making a set consisting of the union
1332 of PA_IN and ANTIC_IN during insert. */
1334 static inline pre_expr
1335 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
)
1339 result
= bitmap_find_leader (set1
, val
);
1340 if (!result
&& set2
)
1341 result
= bitmap_find_leader (set2
, val
);
1345 /* Get the tree type for our PRE expression e. */
1348 get_expr_type (const pre_expr e
)
1353 return TREE_TYPE (PRE_EXPR_NAME (e
));
1355 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1357 return PRE_EXPR_REFERENCE (e
)->type
;
1359 return PRE_EXPR_NARY (e
)->type
;
1364 /* Get a representative SSA_NAME for a given expression.
1365 Since all of our sub-expressions are treated as values, we require
1366 them to be SSA_NAME's for simplicity.
1367 Prior versions of GVNPRE used to use "value handles" here, so that
1368 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1369 either case, the operands are really values (IE we do not expect
1370 them to be usable without finding leaders). */
1373 get_representative_for (const pre_expr e
)
1376 unsigned int value_id
= get_expr_value_id (e
);
1381 return PRE_EXPR_NAME (e
);
1383 return PRE_EXPR_CONSTANT (e
);
1387 /* Go through all of the expressions representing this value
1388 and pick out an SSA_NAME. */
1391 bitmap exprs
= value_expressions
[value_id
];
1392 EXECUTE_IF_SET_IN_BITMAP (exprs
, 0, i
, bi
)
1394 pre_expr rep
= expression_for_id (i
);
1395 if (rep
->kind
== NAME
)
1396 return PRE_EXPR_NAME (rep
);
1397 else if (rep
->kind
== CONSTANT
)
1398 return PRE_EXPR_CONSTANT (rep
);
1404 /* If we reached here we couldn't find an SSA_NAME. This can
1405 happen when we've discovered a value that has never appeared in
1406 the program as set to an SSA_NAME, as the result of phi translation.
1408 ??? We should be able to re-use this when we insert the statement
1410 name
= make_temp_ssa_name (get_expr_type (e
), gimple_build_nop (), "pretmp");
1411 VN_INFO_GET (name
)->value_id
= value_id
;
1412 VN_INFO (name
)->valnum
= name
;
1413 /* ??? For now mark this SSA name for release by SCCVN. */
1414 VN_INFO (name
)->needs_insertion
= true;
1415 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1416 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1418 fprintf (dump_file
, "Created SSA_NAME representative ");
1419 print_generic_expr (dump_file
, name
, 0);
1420 fprintf (dump_file
, " for expression:");
1421 print_pre_expr (dump_file
, e
);
1422 fprintf (dump_file
, "\n");
1431 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1432 basic_block pred
, basic_block phiblock
);
1434 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1435 the phis in PRED. Return NULL if we can't find a leader for each part
1436 of the translated expression. */
1439 phi_translate_1 (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1440 basic_block pred
, basic_block phiblock
)
1447 bool changed
= false;
1448 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1449 vn_nary_op_t newnary
= XALLOCAVAR (struct vn_nary_op_s
,
1450 sizeof_vn_nary_op (nary
->length
));
1451 memcpy (newnary
, nary
, sizeof_vn_nary_op (nary
->length
));
1453 for (i
= 0; i
< newnary
->length
; i
++)
1455 if (TREE_CODE (newnary
->op
[i
]) != SSA_NAME
)
1459 pre_expr leader
, result
;
1460 unsigned int op_val_id
= VN_INFO (newnary
->op
[i
])->value_id
;
1461 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1462 result
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1463 if (result
&& result
!= leader
)
1465 tree name
= get_representative_for (result
);
1468 newnary
->op
[i
] = name
;
1473 changed
|= newnary
->op
[i
] != nary
->op
[i
];
1479 unsigned int new_val_id
;
1481 tree result
= vn_nary_op_lookup_pieces (newnary
->length
,
1486 if (result
&& is_gimple_min_invariant (result
))
1487 return get_or_alloc_expr_for_constant (result
);
1489 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1494 PRE_EXPR_NARY (expr
) = nary
;
1495 constant
= fully_constant_expression (expr
);
1496 if (constant
!= expr
)
1499 new_val_id
= nary
->value_id
;
1500 get_or_alloc_expression_id (expr
);
1504 new_val_id
= get_next_value_id ();
1505 value_expressions
.safe_grow_cleared (get_max_value_id() + 1);
1506 nary
= vn_nary_op_insert_pieces (newnary
->length
,
1510 result
, new_val_id
);
1511 PRE_EXPR_NARY (expr
) = nary
;
1512 constant
= fully_constant_expression (expr
);
1513 if (constant
!= expr
)
1515 get_or_alloc_expression_id (expr
);
1517 add_to_value (new_val_id
, expr
);
1525 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1526 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1527 tree vuse
= ref
->vuse
;
1528 tree newvuse
= vuse
;
1529 vec
<vn_reference_op_s
> newoperands
= vNULL
;
1530 bool changed
= false, same_valid
= true;
1531 unsigned int i
, j
, n
;
1532 vn_reference_op_t operand
;
1533 vn_reference_t newref
;
1536 operands
.iterate (i
, &operand
); i
++, j
++)
1541 tree type
= operand
->type
;
1542 vn_reference_op_s newop
= *operand
;
1543 op
[0] = operand
->op0
;
1544 op
[1] = operand
->op1
;
1545 op
[2] = operand
->op2
;
1546 for (n
= 0; n
< 3; ++n
)
1548 unsigned int op_val_id
;
1551 if (TREE_CODE (op
[n
]) != SSA_NAME
)
1553 /* We can't possibly insert these. */
1555 && !is_gimple_min_invariant (op
[n
]))
1559 op_val_id
= VN_INFO (op
[n
])->value_id
;
1560 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1563 /* Make sure we do not recursively translate ourselves
1564 like for translating a[n_1] with the leader for
1565 n_1 being a[n_1]. */
1566 if (get_expression_id (leader
) != get_expression_id (expr
))
1568 opresult
= phi_translate (leader
, set1
, set2
,
1572 if (opresult
!= leader
)
1574 tree name
= get_representative_for (opresult
);
1577 changed
|= name
!= op
[n
];
1584 newoperands
.release ();
1587 if (!newoperands
.exists ())
1588 newoperands
= operands
.copy ();
1589 /* We may have changed from an SSA_NAME to a constant */
1590 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op
[0]) != SSA_NAME
)
1591 newop
.opcode
= TREE_CODE (op
[0]);
1596 /* If it transforms a non-constant ARRAY_REF into a constant
1597 one, adjust the constant offset. */
1598 if (newop
.opcode
== ARRAY_REF
1600 && TREE_CODE (op
[0]) == INTEGER_CST
1601 && TREE_CODE (op
[1]) == INTEGER_CST
1602 && TREE_CODE (op
[2]) == INTEGER_CST
)
1604 double_int off
= tree_to_double_int (op
[0]);
1605 off
+= -tree_to_double_int (op
[1]);
1606 off
*= tree_to_double_int (op
[2]);
1607 if (off
.fits_shwi ())
1608 newop
.off
= off
.low
;
1610 newoperands
[j
] = newop
;
1611 /* If it transforms from an SSA_NAME to an address, fold with
1612 a preceding indirect reference. */
1613 if (j
> 0 && op
[0] && TREE_CODE (op
[0]) == ADDR_EXPR
1614 && newoperands
[j
- 1].opcode
== MEM_REF
)
1615 vn_reference_fold_indirect (&newoperands
, &j
);
1617 if (i
!= operands
.length ())
1619 newoperands
.release ();
1625 newvuse
= translate_vuse_through_block (newoperands
,
1626 ref
->set
, ref
->type
,
1627 vuse
, phiblock
, pred
,
1629 if (newvuse
== NULL_TREE
)
1631 newoperands
.release ();
1636 if (changed
|| newvuse
!= vuse
)
1638 unsigned int new_val_id
;
1641 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1646 newoperands
.release ();
1648 /* We can always insert constants, so if we have a partial
1649 redundant constant load of another type try to translate it
1650 to a constant of appropriate type. */
1651 if (result
&& is_gimple_min_invariant (result
))
1654 if (!useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1656 tem
= fold_unary (VIEW_CONVERT_EXPR
, ref
->type
, result
);
1657 if (tem
&& !is_gimple_min_invariant (tem
))
1661 return get_or_alloc_expr_for_constant (tem
);
1664 /* If we'd have to convert things we would need to validate
1665 if we can insert the translated expression. So fail
1666 here for now - we cannot insert an alias with a different
1667 type in the VN tables either, as that would assert. */
1669 && !useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1671 else if (!result
&& newref
1672 && !useless_type_conversion_p (ref
->type
, newref
->type
))
1674 newoperands
.release ();
1678 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1679 expr
->kind
= REFERENCE
;
1684 PRE_EXPR_REFERENCE (expr
) = newref
;
1685 constant
= fully_constant_expression (expr
);
1686 if (constant
!= expr
)
1689 new_val_id
= newref
->value_id
;
1690 get_or_alloc_expression_id (expr
);
1694 if (changed
|| !same_valid
)
1696 new_val_id
= get_next_value_id ();
1697 value_expressions
.safe_grow_cleared(get_max_value_id() + 1);
1700 new_val_id
= ref
->value_id
;
1701 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1704 result
, new_val_id
);
1705 newoperands
.create (0);
1706 PRE_EXPR_REFERENCE (expr
) = newref
;
1707 constant
= fully_constant_expression (expr
);
1708 if (constant
!= expr
)
1710 get_or_alloc_expression_id (expr
);
1712 add_to_value (new_val_id
, expr
);
1714 newoperands
.release ();
1721 tree name
= PRE_EXPR_NAME (expr
);
1722 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
1723 /* If the SSA name is defined by a PHI node in this block,
1725 if (gimple_code (def_stmt
) == GIMPLE_PHI
1726 && gimple_bb (def_stmt
) == phiblock
)
1728 edge e
= find_edge (pred
, gimple_bb (def_stmt
));
1729 tree def
= PHI_ARG_DEF (def_stmt
, e
->dest_idx
);
1731 /* Handle constant. */
1732 if (is_gimple_min_invariant (def
))
1733 return get_or_alloc_expr_for_constant (def
);
1735 return get_or_alloc_expr_for_name (def
);
1737 /* Otherwise return it unchanged - it will get cleaned if its
1738 value is not available in PREDs AVAIL_OUT set of expressions. */
1747 /* Wrapper around phi_translate_1 providing caching functionality. */
1750 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1751 basic_block pred
, basic_block phiblock
)
1758 /* Constants contain no values that need translation. */
1759 if (expr
->kind
== CONSTANT
)
1762 if (value_id_constant_p (get_expr_value_id (expr
)))
1765 if (expr
->kind
!= NAME
)
1767 phitrans
= phi_trans_lookup (expr
, pred
);
1773 phitrans
= phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
);
1775 /* Don't add empty translations to the cache. Neither add
1776 translations of NAMEs as those are cheap to translate. */
1778 && expr
->kind
!= NAME
)
1779 phi_trans_add (expr
, phitrans
, pred
);
1785 /* For each expression in SET, translate the values through phi nodes
1786 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1787 expressions in DEST. */
1790 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1791 basic_block phiblock
)
1793 vec
<pre_expr
> exprs
;
1797 if (gimple_seq_empty_p (phi_nodes (phiblock
)))
1799 bitmap_set_copy (dest
, set
);
1803 exprs
= sorted_array_from_bitmap_set (set
);
1804 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
1806 pre_expr translated
;
1807 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1811 /* We might end up with multiple expressions from SET being
1812 translated to the same value. In this case we do not want
1813 to retain the NARY or REFERENCE expression but prefer a NAME
1814 which would be the leader. */
1815 if (translated
->kind
== NAME
)
1816 bitmap_value_replace_in_set (dest
, translated
);
1818 bitmap_value_insert_into_set (dest
, translated
);
1823 /* Find the leader for a value (i.e., the name representing that
1824 value) in a given set, and return it. If STMT is non-NULL it
1825 makes sure the defining statement for the leader dominates it.
1826 Return NULL if no leader is found. */
1829 bitmap_find_leader (bitmap_set_t set
, unsigned int val
)
1831 if (value_id_constant_p (val
))
1835 bitmap exprset
= value_expressions
[val
];
1837 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1839 pre_expr expr
= expression_for_id (i
);
1840 if (expr
->kind
== CONSTANT
)
1844 if (bitmap_set_contains_value (set
, val
))
1846 /* Rather than walk the entire bitmap of expressions, and see
1847 whether any of them has the value we are looking for, we look
1848 at the reverse mapping, which tells us the set of expressions
1849 that have a given value (IE value->expressions with that
1850 value) and see if any of those expressions are in our set.
1851 The number of expressions per value is usually significantly
1852 less than the number of expressions in the set. In fact, for
1853 large testcases, doing it this way is roughly 5-10x faster
1854 than walking the bitmap.
1855 If this is somehow a significant lose for some cases, we can
1856 choose which set to walk based on which set is smaller. */
1859 bitmap exprset
= value_expressions
[val
];
1861 EXECUTE_IF_AND_IN_BITMAP (exprset
, &set
->expressions
, 0, i
, bi
)
1862 return expression_for_id (i
);
1867 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1868 BLOCK by seeing if it is not killed in the block. Note that we are
1869 only determining whether there is a store that kills it. Because
1870 of the order in which clean iterates over values, we are guaranteed
1871 that altered operands will have caused us to be eliminated from the
1872 ANTIC_IN set already. */
1875 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1877 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1878 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1880 gimple_stmt_iterator gsi
;
1881 unsigned id
= get_expression_id (expr
);
1888 /* Lookup a previously calculated result. */
1889 if (EXPR_DIES (block
)
1890 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1891 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1893 /* A memory expression {e, VUSE} dies in the block if there is a
1894 statement that may clobber e. If, starting statement walk from the
1895 top of the basic block, a statement uses VUSE there can be no kill
1896 inbetween that use and the original statement that loaded {e, VUSE},
1897 so we can stop walking. */
1898 ref
.base
= NULL_TREE
;
1899 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1901 tree def_vuse
, def_vdef
;
1902 def
= gsi_stmt (gsi
);
1903 def_vuse
= gimple_vuse (def
);
1904 def_vdef
= gimple_vdef (def
);
1906 /* Not a memory statement. */
1910 /* Not a may-def. */
1913 /* A load with the same VUSE, we're done. */
1914 if (def_vuse
== vuse
)
1920 /* Init ref only if we really need it. */
1921 if (ref
.base
== NULL_TREE
1922 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->type
,
1928 /* If the statement may clobber expr, it dies. */
1929 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1936 /* Remember the result. */
1937 if (!EXPR_DIES (block
))
1938 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1939 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1941 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1947 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1948 contains its value-id. */
1951 op_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree op
)
1953 if (op
&& TREE_CODE (op
) == SSA_NAME
)
1955 unsigned int value_id
= VN_INFO (op
)->value_id
;
1956 if (!(bitmap_set_contains_value (set1
, value_id
)
1957 || (set2
&& bitmap_set_contains_value (set2
, value_id
))))
1963 /* Determine if the expression EXPR is valid in SET1 U SET2.
1964 ONLY SET2 CAN BE NULL.
1965 This means that we have a leader for each part of the expression
1966 (if it consists of values), or the expression is an SSA_NAME.
1967 For loads/calls, we also see if the vuse is killed in this block. */
1970 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
,
1976 return bitmap_find_leader (AVAIL_OUT (block
),
1977 get_expr_value_id (expr
)) != NULL
;
1981 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1982 for (i
= 0; i
< nary
->length
; i
++)
1983 if (!op_valid_in_sets (set1
, set2
, nary
->op
[i
]))
1990 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1991 vn_reference_op_t vro
;
1994 FOR_EACH_VEC_ELT (ref
->operands
, i
, vro
)
1996 if (!op_valid_in_sets (set1
, set2
, vro
->op0
)
1997 || !op_valid_in_sets (set1
, set2
, vro
->op1
)
1998 || !op_valid_in_sets (set1
, set2
, vro
->op2
))
2008 /* Clean the set of expressions that are no longer valid in SET1 or
2009 SET2. This means expressions that are made up of values we have no
2010 leaders for in SET1 or SET2. This version is used for partial
2011 anticipation, which means it is not valid in either ANTIC_IN or
2015 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
, basic_block block
)
2017 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set1
);
2021 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2023 if (!valid_in_sets (set1
, set2
, expr
, block
))
2024 bitmap_remove_from_set (set1
, expr
);
2029 /* Clean the set of expressions that are no longer valid in SET. This
2030 means expressions that are made up of values we have no leaders for
2034 clean (bitmap_set_t set
, basic_block block
)
2036 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set
);
2040 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2042 if (!valid_in_sets (set
, NULL
, expr
, block
))
2043 bitmap_remove_from_set (set
, expr
);
2048 /* Clean the set of expressions that are no longer valid in SET because
2049 they are clobbered in BLOCK or because they trap and may not be executed. */
2052 prune_clobbered_mems (bitmap_set_t set
, basic_block block
)
2057 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
2059 pre_expr expr
= expression_for_id (i
);
2060 if (expr
->kind
== REFERENCE
)
2062 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2065 gimple def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
2066 if (!gimple_nop_p (def_stmt
)
2067 && ((gimple_bb (def_stmt
) != block
2068 && !dominated_by_p (CDI_DOMINATORS
,
2069 block
, gimple_bb (def_stmt
)))
2070 || (gimple_bb (def_stmt
) == block
2071 && value_dies_in_block_x (expr
, block
))))
2072 bitmap_remove_from_set (set
, expr
);
2075 else if (expr
->kind
== NARY
)
2077 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2078 /* If the NARY may trap make sure the block does not contain
2079 a possible exit point.
2080 ??? This is overly conservative if we translate AVAIL_OUT
2081 as the available expression might be after the exit point. */
2082 if (BB_MAY_NOTRETURN (block
)
2083 && vn_nary_may_trap (nary
))
2084 bitmap_remove_from_set (set
, expr
);
2089 static sbitmap has_abnormal_preds
;
2091 /* List of blocks that may have changed during ANTIC computation and
2092 thus need to be iterated over. */
2094 static sbitmap changed_blocks
;
2096 /* Decide whether to defer a block for a later iteration, or PHI
2097 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2098 should defer the block, and true if we processed it. */
2101 defer_or_phi_translate_block (bitmap_set_t dest
, bitmap_set_t source
,
2102 basic_block block
, basic_block phiblock
)
2104 if (!BB_VISITED (phiblock
))
2106 bitmap_set_bit (changed_blocks
, block
->index
);
2107 BB_VISITED (block
) = 0;
2108 BB_DEFERRED (block
) = 1;
2112 phi_translate_set (dest
, source
, block
, phiblock
);
2116 /* Compute the ANTIC set for BLOCK.
2118 If succs(BLOCK) > 1 then
2119 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2120 else if succs(BLOCK) == 1 then
2121 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2123 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2127 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2129 bool changed
= false;
2130 bitmap_set_t S
, old
, ANTIC_OUT
;
2136 old
= ANTIC_OUT
= S
= NULL
;
2137 BB_VISITED (block
) = 1;
2139 /* If any edges from predecessors are abnormal, antic_in is empty,
2141 if (block_has_abnormal_pred_edge
)
2142 goto maybe_dump_sets
;
2144 old
= ANTIC_IN (block
);
2145 ANTIC_OUT
= bitmap_set_new ();
2147 /* If the block has no successors, ANTIC_OUT is empty. */
2148 if (EDGE_COUNT (block
->succs
) == 0)
2150 /* If we have one successor, we could have some phi nodes to
2151 translate through. */
2152 else if (single_succ_p (block
))
2154 basic_block succ_bb
= single_succ (block
);
2156 /* We trade iterations of the dataflow equations for having to
2157 phi translate the maximal set, which is incredibly slow
2158 (since the maximal set often has 300+ members, even when you
2159 have a small number of blocks).
2160 Basically, we defer the computation of ANTIC for this block
2161 until we have processed it's successor, which will inevitably
2162 have a *much* smaller set of values to phi translate once
2163 clean has been run on it.
2164 The cost of doing this is that we technically perform more
2165 iterations, however, they are lower cost iterations.
2167 Timings for PRE on tramp3d-v4:
2168 without maximal set fix: 11 seconds
2169 with maximal set fix/without deferring: 26 seconds
2170 with maximal set fix/with deferring: 11 seconds
2173 if (!defer_or_phi_translate_block (ANTIC_OUT
, ANTIC_IN (succ_bb
),
2177 goto maybe_dump_sets
;
2180 /* If we have multiple successors, we take the intersection of all of
2181 them. Note that in the case of loop exit phi nodes, we may have
2182 phis to translate through. */
2185 vec
<basic_block
> worklist
;
2187 basic_block bprime
, first
= NULL
;
2189 worklist
.create (EDGE_COUNT (block
->succs
));
2190 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2193 && BB_VISITED (e
->dest
))
2195 else if (BB_VISITED (e
->dest
))
2196 worklist
.quick_push (e
->dest
);
2199 /* Of multiple successors we have to have visited one already. */
2202 bitmap_set_bit (changed_blocks
, block
->index
);
2203 BB_VISITED (block
) = 0;
2204 BB_DEFERRED (block
) = 1;
2206 worklist
.release ();
2207 goto maybe_dump_sets
;
2210 if (!gimple_seq_empty_p (phi_nodes (first
)))
2211 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
), block
, first
);
2213 bitmap_set_copy (ANTIC_OUT
, ANTIC_IN (first
));
2215 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2217 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2219 bitmap_set_t tmp
= bitmap_set_new ();
2220 phi_translate_set (tmp
, ANTIC_IN (bprime
), block
, bprime
);
2221 bitmap_set_and (ANTIC_OUT
, tmp
);
2222 bitmap_set_free (tmp
);
2225 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
2227 worklist
.release ();
2230 /* Prune expressions that are clobbered in block and thus become
2231 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2232 prune_clobbered_mems (ANTIC_OUT
, block
);
2234 /* Generate ANTIC_OUT - TMP_GEN. */
2235 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
2237 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2238 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
2241 /* Then union in the ANTIC_OUT - TMP_GEN values,
2242 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2243 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
2244 bitmap_value_insert_into_set (ANTIC_IN (block
),
2245 expression_for_id (bii
));
2247 clean (ANTIC_IN (block
), block
);
2249 if (!bitmap_set_equal (old
, ANTIC_IN (block
)))
2252 bitmap_set_bit (changed_blocks
, block
->index
);
2253 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2254 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2257 bitmap_clear_bit (changed_blocks
, block
->index
);
2260 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2262 if (!BB_DEFERRED (block
) || BB_VISITED (block
))
2265 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2267 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2271 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2276 "Block %d was deferred for a future iteration.\n",
2281 bitmap_set_free (old
);
2283 bitmap_set_free (S
);
2285 bitmap_set_free (ANTIC_OUT
);
2289 /* Compute PARTIAL_ANTIC for BLOCK.
2291 If succs(BLOCK) > 1 then
2292 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2293 in ANTIC_OUT for all succ(BLOCK)
2294 else if succs(BLOCK) == 1 then
2295 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2297 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2302 compute_partial_antic_aux (basic_block block
,
2303 bool block_has_abnormal_pred_edge
)
2305 bool changed
= false;
2306 bitmap_set_t old_PA_IN
;
2307 bitmap_set_t PA_OUT
;
2310 unsigned long max_pa
= PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH
);
2312 old_PA_IN
= PA_OUT
= NULL
;
2314 /* If any edges from predecessors are abnormal, antic_in is empty,
2316 if (block_has_abnormal_pred_edge
)
2317 goto maybe_dump_sets
;
2319 /* If there are too many partially anticipatable values in the
2320 block, phi_translate_set can take an exponential time: stop
2321 before the translation starts. */
2323 && single_succ_p (block
)
2324 && bitmap_count_bits (&PA_IN (single_succ (block
))->values
) > max_pa
)
2325 goto maybe_dump_sets
;
2327 old_PA_IN
= PA_IN (block
);
2328 PA_OUT
= bitmap_set_new ();
2330 /* If the block has no successors, ANTIC_OUT is empty. */
2331 if (EDGE_COUNT (block
->succs
) == 0)
2333 /* If we have one successor, we could have some phi nodes to
2334 translate through. Note that we can't phi translate across DFS
2335 back edges in partial antic, because it uses a union operation on
2336 the successors. For recurrences like IV's, we will end up
2337 generating a new value in the set on each go around (i + 3 (VH.1)
2338 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2339 else if (single_succ_p (block
))
2341 basic_block succ
= single_succ (block
);
2342 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
2343 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
2345 /* If we have multiple successors, we take the union of all of
2349 vec
<basic_block
> worklist
;
2353 worklist
.create (EDGE_COUNT (block
->succs
));
2354 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2356 if (e
->flags
& EDGE_DFS_BACK
)
2358 worklist
.quick_push (e
->dest
);
2360 if (worklist
.length () > 0)
2362 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2367 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
2368 bitmap_value_insert_into_set (PA_OUT
,
2369 expression_for_id (i
));
2370 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2372 bitmap_set_t pa_in
= bitmap_set_new ();
2373 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
2374 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2375 bitmap_value_insert_into_set (PA_OUT
,
2376 expression_for_id (i
));
2377 bitmap_set_free (pa_in
);
2380 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
2381 bitmap_value_insert_into_set (PA_OUT
,
2382 expression_for_id (i
));
2385 worklist
.release ();
2388 /* Prune expressions that are clobbered in block and thus become
2389 invalid if translated from PA_OUT to PA_IN. */
2390 prune_clobbered_mems (PA_OUT
, block
);
2392 /* PA_IN starts with PA_OUT - TMP_GEN.
2393 Then we subtract things from ANTIC_IN. */
2394 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
2396 /* For partial antic, we want to put back in the phi results, since
2397 we will properly avoid making them partially antic over backedges. */
2398 bitmap_ior_into (&PA_IN (block
)->values
, &PHI_GEN (block
)->values
);
2399 bitmap_ior_into (&PA_IN (block
)->expressions
, &PHI_GEN (block
)->expressions
);
2401 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2402 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2404 dependent_clean (PA_IN (block
), ANTIC_IN (block
), block
);
2406 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
2409 bitmap_set_bit (changed_blocks
, block
->index
);
2410 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2411 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2414 bitmap_clear_bit (changed_blocks
, block
->index
);
2417 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2420 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2422 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2425 bitmap_set_free (old_PA_IN
);
2427 bitmap_set_free (PA_OUT
);
2431 /* Compute ANTIC and partial ANTIC sets. */
2434 compute_antic (void)
2436 bool changed
= true;
2437 int num_iterations
= 0;
2441 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2442 We pre-build the map of blocks with incoming abnormal edges here. */
2443 has_abnormal_preds
= sbitmap_alloc (last_basic_block
);
2444 bitmap_clear (has_abnormal_preds
);
2451 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2453 e
->flags
&= ~EDGE_DFS_BACK
;
2454 if (e
->flags
& EDGE_ABNORMAL
)
2456 bitmap_set_bit (has_abnormal_preds
, block
->index
);
2461 BB_VISITED (block
) = 0;
2462 BB_DEFERRED (block
) = 0;
2464 /* While we are here, give empty ANTIC_IN sets to each block. */
2465 ANTIC_IN (block
) = bitmap_set_new ();
2466 PA_IN (block
) = bitmap_set_new ();
2469 /* At the exit block we anticipate nothing. */
2470 BB_VISITED (EXIT_BLOCK_PTR
) = 1;
2472 changed_blocks
= sbitmap_alloc (last_basic_block
+ 1);
2473 bitmap_ones (changed_blocks
);
2476 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2477 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2478 /* ??? We need to clear our PHI translation cache here as the
2479 ANTIC sets shrink and we restrict valid translations to
2480 those having operands with leaders in ANTIC. Same below
2481 for PA ANTIC computation. */
2484 for (i
= postorder_num
- 1; i
>= 0; i
--)
2486 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2488 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2489 changed
|= compute_antic_aux (block
,
2490 bitmap_bit_p (has_abnormal_preds
,
2494 /* Theoretically possible, but *highly* unlikely. */
2495 gcc_checking_assert (num_iterations
< 500);
2498 statistics_histogram_event (cfun
, "compute_antic iterations",
2501 if (do_partial_partial
)
2503 bitmap_ones (changed_blocks
);
2504 mark_dfs_back_edges ();
2509 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2510 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2513 for (i
= postorder_num
- 1 ; i
>= 0; i
--)
2515 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2517 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2519 |= compute_partial_antic_aux (block
,
2520 bitmap_bit_p (has_abnormal_preds
,
2524 /* Theoretically possible, but *highly* unlikely. */
2525 gcc_checking_assert (num_iterations
< 500);
2527 statistics_histogram_event (cfun
, "compute_partial_antic iterations",
2530 sbitmap_free (has_abnormal_preds
);
2531 sbitmap_free (changed_blocks
);
2535 /* Inserted expressions are placed onto this worklist, which is used
2536 for performing quick dead code elimination of insertions we made
2537 that didn't turn out to be necessary. */
2538 static bitmap inserted_exprs
;
2540 /* The actual worker for create_component_ref_by_pieces. */
2543 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2544 unsigned int *operand
, gimple_seq
*stmts
)
2546 vn_reference_op_t currop
= &ref
->operands
[*operand
];
2549 switch (currop
->opcode
)
2553 tree folded
, sc
= NULL_TREE
;
2554 unsigned int nargs
= 0;
2556 if (TREE_CODE (currop
->op0
) == FUNCTION_DECL
)
2559 fn
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2564 sc
= find_or_generate_expression (block
, currop
->op1
, stmts
);
2568 args
= XNEWVEC (tree
, ref
->operands
.length () - 1);
2569 while (*operand
< ref
->operands
.length ())
2571 args
[nargs
] = create_component_ref_by_pieces_1 (block
, ref
,
2577 folded
= build_call_array (currop
->type
,
2578 (TREE_CODE (fn
) == FUNCTION_DECL
2579 ? build_fold_addr_expr (fn
) : fn
),
2583 CALL_EXPR_STATIC_CHAIN (folded
) = sc
;
2589 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2593 tree offset
= currop
->op0
;
2594 if (TREE_CODE (baseop
) == ADDR_EXPR
2595 && handled_component_p (TREE_OPERAND (baseop
, 0)))
2599 base
= get_addr_base_and_unit_offset (TREE_OPERAND (baseop
, 0),
2602 offset
= int_const_binop (PLUS_EXPR
, offset
,
2603 build_int_cst (TREE_TYPE (offset
),
2605 baseop
= build_fold_addr_expr (base
);
2607 return fold_build2 (MEM_REF
, currop
->type
, baseop
, offset
);
2610 case TARGET_MEM_REF
:
2612 tree genop0
= NULL_TREE
, genop1
= NULL_TREE
;
2613 vn_reference_op_t nextop
= &ref
->operands
[++*operand
];
2614 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2620 genop0
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2626 genop1
= find_or_generate_expression (block
, nextop
->op0
, stmts
);
2630 return build5 (TARGET_MEM_REF
, currop
->type
,
2631 baseop
, currop
->op2
, genop0
, currop
->op1
, genop1
);
2637 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2643 case VIEW_CONVERT_EXPR
:
2645 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2649 return fold_build1 (currop
->opcode
, currop
->type
, genop0
);
2652 case WITH_SIZE_EXPR
:
2654 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2658 tree genop1
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2661 return fold_build2 (currop
->opcode
, currop
->type
, genop0
, genop1
);
2666 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2670 tree op1
= currop
->op0
;
2671 tree op2
= currop
->op1
;
2672 return fold_build3 (BIT_FIELD_REF
, currop
->type
, genop0
, op1
, op2
);
2675 /* For array ref vn_reference_op's, operand 1 of the array ref
2676 is op0 of the reference op and operand 3 of the array ref is
2678 case ARRAY_RANGE_REF
:
2682 tree genop1
= currop
->op0
;
2683 tree genop2
= currop
->op1
;
2684 tree genop3
= currop
->op2
;
2685 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2689 genop1
= find_or_generate_expression (block
, genop1
, stmts
);
2694 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (genop0
));
2695 /* Drop zero minimum index if redundant. */
2696 if (integer_zerop (genop2
)
2698 || integer_zerop (TYPE_MIN_VALUE (domain_type
))))
2702 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2709 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2710 /* We can't always put a size in units of the element alignment
2711 here as the element alignment may be not visible. See
2712 PR43783. Simply drop the element size for constant
2714 if (tree_int_cst_equal (genop3
, TYPE_SIZE_UNIT (elmt_type
)))
2718 genop3
= size_binop (EXACT_DIV_EXPR
, genop3
,
2719 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
2720 genop3
= find_or_generate_expression (block
, genop3
, stmts
);
2725 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2732 tree genop2
= currop
->op1
;
2733 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2736 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2740 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2744 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
, genop2
);
2749 genop
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2770 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2771 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2772 trying to rename aggregates into ssa form directly, which is a no no.
2774 Thus, this routine doesn't create temporaries, it just builds a
2775 single access expression for the array, calling
2776 find_or_generate_expression to build the innermost pieces.
2778 This function is a subroutine of create_expression_by_pieces, and
2779 should not be called on it's own unless you really know what you
2783 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2786 unsigned int op
= 0;
2787 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
);
2790 /* Find a simple leader for an expression, or generate one using
2791 create_expression_by_pieces from a NARY expression for the value.
2792 BLOCK is the basic_block we are looking for leaders in.
2793 OP is the tree expression to find a leader for or generate.
2794 Returns the leader or NULL_TREE on failure. */
2797 find_or_generate_expression (basic_block block
, tree op
, gimple_seq
*stmts
)
2799 pre_expr expr
= get_or_alloc_expr_for (op
);
2800 unsigned int lookfor
= get_expr_value_id (expr
);
2801 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
), lookfor
);
2804 if (leader
->kind
== NAME
)
2805 return PRE_EXPR_NAME (leader
);
2806 else if (leader
->kind
== CONSTANT
)
2807 return PRE_EXPR_CONSTANT (leader
);
2813 /* It must be a complex expression, so generate it recursively. Note
2814 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2815 where the insert algorithm fails to insert a required expression. */
2816 bitmap exprset
= value_expressions
[lookfor
];
2819 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
2821 pre_expr temp
= expression_for_id (i
);
2822 /* We cannot insert random REFERENCE expressions at arbitrary
2823 places. We can insert NARYs which eventually re-materializes
2824 its operand values. */
2825 if (temp
->kind
== NARY
)
2826 return create_expression_by_pieces (block
, temp
, stmts
,
2827 get_expr_type (expr
));
2834 #define NECESSARY GF_PLF_1
2836 /* Create an expression in pieces, so that we can handle very complex
2837 expressions that may be ANTIC, but not necessary GIMPLE.
2838 BLOCK is the basic block the expression will be inserted into,
2839 EXPR is the expression to insert (in value form)
2840 STMTS is a statement list to append the necessary insertions into.
2842 This function will die if we hit some value that shouldn't be
2843 ANTIC but is (IE there is no leader for it, or its components).
2844 The function returns NULL_TREE in case a different antic expression
2845 has to be inserted first.
2846 This function may also generate expressions that are themselves
2847 partially or fully redundant. Those that are will be either made
2848 fully redundant during the next iteration of insert (for partially
2849 redundant ones), or eliminated by eliminate (for fully redundant
2853 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2854 gimple_seq
*stmts
, tree type
)
2858 gimple_seq forced_stmts
= NULL
;
2859 unsigned int value_id
;
2860 gimple_stmt_iterator gsi
;
2861 tree exprtype
= type
? type
: get_expr_type (expr
);
2867 /* We may hit the NAME/CONSTANT case if we have to convert types
2868 that value numbering saw through. */
2870 folded
= PRE_EXPR_NAME (expr
);
2873 folded
= PRE_EXPR_CONSTANT (expr
);
2877 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2878 folded
= create_component_ref_by_pieces (block
, ref
, stmts
);
2885 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2886 tree
*genop
= XALLOCAVEC (tree
, nary
->length
);
2888 for (i
= 0; i
< nary
->length
; ++i
)
2890 genop
[i
] = find_or_generate_expression (block
, nary
->op
[i
], stmts
);
2893 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2894 may have conversions stripped. */
2895 if (nary
->opcode
== POINTER_PLUS_EXPR
)
2898 genop
[i
] = fold_convert (nary
->type
, genop
[i
]);
2900 genop
[i
] = convert_to_ptrofftype (genop
[i
]);
2903 genop
[i
] = fold_convert (TREE_TYPE (nary
->op
[i
]), genop
[i
]);
2905 if (nary
->opcode
== CONSTRUCTOR
)
2907 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
2908 for (i
= 0; i
< nary
->length
; ++i
)
2909 CONSTRUCTOR_APPEND_ELT (elts
, NULL_TREE
, genop
[i
]);
2910 folded
= build_constructor (nary
->type
, elts
);
2914 switch (nary
->length
)
2917 folded
= fold_build1 (nary
->opcode
, nary
->type
,
2921 folded
= fold_build2 (nary
->opcode
, nary
->type
,
2922 genop
[0], genop
[1]);
2925 folded
= fold_build3 (nary
->opcode
, nary
->type
,
2926 genop
[0], genop
[1], genop
[2]);
2938 if (!useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
2939 folded
= fold_convert (exprtype
, folded
);
2941 /* Force the generated expression to be a sequence of GIMPLE
2943 We have to call unshare_expr because force_gimple_operand may
2944 modify the tree we pass to it. */
2945 folded
= force_gimple_operand (unshare_expr (folded
), &forced_stmts
,
2948 /* If we have any intermediate expressions to the value sets, add them
2949 to the value sets and chain them in the instruction stream. */
2952 gsi
= gsi_start (forced_stmts
);
2953 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
2955 gimple stmt
= gsi_stmt (gsi
);
2956 tree forcedname
= gimple_get_lhs (stmt
);
2959 if (TREE_CODE (forcedname
) == SSA_NAME
)
2961 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (forcedname
));
2962 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
2963 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
2964 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
2965 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
2966 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2967 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2970 gimple_seq_add_seq (stmts
, forced_stmts
);
2973 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2974 newstmt
= gimple_build_assign (name
, folded
);
2975 gimple_set_plf (newstmt
, NECESSARY
, false);
2977 gimple_seq_add_stmt (stmts
, newstmt
);
2978 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (name
));
2980 /* Fold the last statement. */
2981 gsi
= gsi_last (*stmts
);
2982 if (fold_stmt_inplace (&gsi
))
2983 update_stmt (gsi_stmt (gsi
));
2985 /* Add a value number to the temporary.
2986 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2987 we are creating the expression by pieces, and this particular piece of
2988 the expression may have been represented. There is no harm in replacing
2990 value_id
= get_expr_value_id (expr
);
2991 VN_INFO_GET (name
)->value_id
= value_id
;
2992 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
2993 if (VN_INFO (name
)->valnum
== NULL_TREE
)
2994 VN_INFO (name
)->valnum
= name
;
2995 gcc_assert (VN_INFO (name
)->valnum
!= NULL_TREE
);
2996 nameexpr
= get_or_alloc_expr_for_name (name
);
2997 add_to_value (value_id
, nameexpr
);
2998 if (NEW_SETS (block
))
2999 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
3000 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
3002 pre_stats
.insertions
++;
3003 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3005 fprintf (dump_file
, "Inserted ");
3006 print_gimple_stmt (dump_file
, newstmt
, 0, 0);
3007 fprintf (dump_file
, " in predecessor %d\n", block
->index
);
3014 /* Returns true if we want to inhibit the insertions of PHI nodes
3015 for the given EXPR for basic block BB (a member of a loop).
3016 We want to do this, when we fear that the induction variable we
3017 create might inhibit vectorization. */
3020 inhibit_phi_insertion (basic_block bb
, pre_expr expr
)
3022 vn_reference_t vr
= PRE_EXPR_REFERENCE (expr
);
3023 vec
<vn_reference_op_s
> ops
= vr
->operands
;
3024 vn_reference_op_t op
;
3027 /* If we aren't going to vectorize we don't inhibit anything. */
3028 if (!flag_tree_vectorize
)
3031 /* Otherwise we inhibit the insertion when the address of the
3032 memory reference is a simple induction variable. In other
3033 cases the vectorizer won't do anything anyway (either it's
3034 loop invariant or a complicated expression). */
3035 FOR_EACH_VEC_ELT (ops
, i
, op
)
3040 /* Calls are not a problem. */
3044 case ARRAY_RANGE_REF
:
3045 if (TREE_CODE (op
->op0
) != SSA_NAME
)
3050 basic_block defbb
= gimple_bb (SSA_NAME_DEF_STMT (op
->op0
));
3052 /* Default defs are loop invariant. */
3055 /* Defined outside this loop, also loop invariant. */
3056 if (!flow_bb_inside_loop_p (bb
->loop_father
, defbb
))
3058 /* If it's a simple induction variable inhibit insertion,
3059 the vectorizer might be interested in this one. */
3060 if (simple_iv (bb
->loop_father
, bb
->loop_father
,
3061 op
->op0
, &iv
, true))
3063 /* No simple IV, vectorizer can't do anything, hence no
3064 reason to inhibit the transformation for this operand. */
3074 /* Insert the to-be-made-available values of expression EXPRNUM for each
3075 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3076 merge the result with a phi node, given the same value number as
3077 NODE. Return true if we have inserted new stuff. */
3080 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
3081 vec
<pre_expr
> avail
)
3083 pre_expr expr
= expression_for_id (exprnum
);
3085 unsigned int val
= get_expr_value_id (expr
);
3087 bool insertions
= false;
3092 tree type
= get_expr_type (expr
);
3096 /* Make sure we aren't creating an induction variable. */
3097 if (bb_loop_depth (block
) > 0 && EDGE_COUNT (block
->preds
) == 2)
3099 bool firstinsideloop
= false;
3100 bool secondinsideloop
= false;
3101 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3102 EDGE_PRED (block
, 0)->src
);
3103 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3104 EDGE_PRED (block
, 1)->src
);
3105 /* Induction variables only have one edge inside the loop. */
3106 if ((firstinsideloop
^ secondinsideloop
)
3107 && (expr
->kind
!= REFERENCE
3108 || inhibit_phi_insertion (block
, expr
)))
3110 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3111 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3116 /* Make the necessary insertions. */
3117 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3119 gimple_seq stmts
= NULL
;
3122 eprime
= avail
[pred
->dest_idx
];
3124 if (eprime
->kind
!= NAME
&& eprime
->kind
!= CONSTANT
)
3126 builtexpr
= create_expression_by_pieces (bprime
, eprime
,
3128 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3129 gsi_insert_seq_on_edge (pred
, stmts
);
3132 /* We cannot insert a PHI node if we failed to insert
3137 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (builtexpr
);
3140 else if (eprime
->kind
== CONSTANT
)
3142 /* Constants may not have the right type, fold_convert
3143 should give us back a constant with the right type. */
3144 tree constant
= PRE_EXPR_CONSTANT (eprime
);
3145 if (!useless_type_conversion_p (type
, TREE_TYPE (constant
)))
3147 tree builtexpr
= fold_convert (type
, constant
);
3148 if (!is_gimple_min_invariant (builtexpr
))
3150 tree forcedexpr
= force_gimple_operand (builtexpr
,
3153 if (!is_gimple_min_invariant (forcedexpr
))
3155 if (forcedexpr
!= builtexpr
)
3157 VN_INFO_GET (forcedexpr
)->valnum
= PRE_EXPR_CONSTANT (eprime
);
3158 VN_INFO (forcedexpr
)->value_id
= get_expr_value_id (eprime
);
3162 gimple_stmt_iterator gsi
;
3163 gsi
= gsi_start (stmts
);
3164 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3166 gimple stmt
= gsi_stmt (gsi
);
3167 tree lhs
= gimple_get_lhs (stmt
);
3168 if (TREE_CODE (lhs
) == SSA_NAME
)
3169 bitmap_set_bit (inserted_exprs
,
3170 SSA_NAME_VERSION (lhs
));
3171 gimple_set_plf (stmt
, NECESSARY
, false);
3173 gsi_insert_seq_on_edge (pred
, stmts
);
3175 avail
[pred
->dest_idx
]
3176 = get_or_alloc_expr_for_name (forcedexpr
);
3180 avail
[pred
->dest_idx
]
3181 = get_or_alloc_expr_for_constant (builtexpr
);
3184 else if (eprime
->kind
== NAME
)
3186 /* We may have to do a conversion because our value
3187 numbering can look through types in certain cases, but
3188 our IL requires all operands of a phi node have the same
3190 tree name
= PRE_EXPR_NAME (eprime
);
3191 if (!useless_type_conversion_p (type
, TREE_TYPE (name
)))
3195 builtexpr
= fold_convert (type
, name
);
3196 forcedexpr
= force_gimple_operand (builtexpr
,
3200 if (forcedexpr
!= name
)
3202 VN_INFO_GET (forcedexpr
)->valnum
= VN_INFO (name
)->valnum
;
3203 VN_INFO (forcedexpr
)->value_id
= VN_INFO (name
)->value_id
;
3208 gimple_stmt_iterator gsi
;
3209 gsi
= gsi_start (stmts
);
3210 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3212 gimple stmt
= gsi_stmt (gsi
);
3213 tree lhs
= gimple_get_lhs (stmt
);
3214 if (TREE_CODE (lhs
) == SSA_NAME
)
3215 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
3216 gimple_set_plf (stmt
, NECESSARY
, false);
3218 gsi_insert_seq_on_edge (pred
, stmts
);
3220 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (forcedexpr
);
3224 /* If we didn't want a phi node, and we made insertions, we still have
3225 inserted new stuff, and thus return true. If we didn't want a phi node,
3226 and didn't make insertions, we haven't added anything new, so return
3228 if (nophi
&& insertions
)
3230 else if (nophi
&& !insertions
)
3233 /* Now build a phi for the new variable. */
3234 temp
= make_temp_ssa_name (type
, NULL
, "prephitmp");
3235 phi
= create_phi_node (temp
, block
);
3237 gimple_set_plf (phi
, NECESSARY
, false);
3238 VN_INFO_GET (temp
)->value_id
= val
;
3239 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3240 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3241 VN_INFO (temp
)->valnum
= temp
;
3242 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3243 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3245 pre_expr ae
= avail
[pred
->dest_idx
];
3246 gcc_assert (get_expr_type (ae
) == type
3247 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3248 if (ae
->kind
== CONSTANT
)
3249 add_phi_arg (phi
, unshare_expr (PRE_EXPR_CONSTANT (ae
)),
3250 pred
, UNKNOWN_LOCATION
);
3252 add_phi_arg (phi
, PRE_EXPR_NAME (ae
), pred
, UNKNOWN_LOCATION
);
3255 newphi
= get_or_alloc_expr_for_name (temp
);
3256 add_to_value (val
, newphi
);
3258 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3259 this insertion, since we test for the existence of this value in PHI_GEN
3260 before proceeding with the partial redundancy checks in insert_aux.
3262 The value may exist in AVAIL_OUT, in particular, it could be represented
3263 by the expression we are trying to eliminate, in which case we want the
3264 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3267 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3268 this block, because if it did, it would have existed in our dominator's
3269 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3272 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3273 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3275 bitmap_insert_into_set (NEW_SETS (block
),
3278 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3280 fprintf (dump_file
, "Created phi ");
3281 print_gimple_stmt (dump_file
, phi
, 0, 0);
3282 fprintf (dump_file
, " in block %d\n", block
->index
);
3290 /* Perform insertion of partially redundant values.
3291 For BLOCK, do the following:
3292 1. Propagate the NEW_SETS of the dominator into the current block.
3293 If the block has multiple predecessors,
3294 2a. Iterate over the ANTIC expressions for the block to see if
3295 any of them are partially redundant.
3296 2b. If so, insert them into the necessary predecessors to make
3297 the expression fully redundant.
3298 2c. Insert a new PHI merging the values of the predecessors.
3299 2d. Insert the new PHI, and the new expressions, into the
3301 3. Recursively call ourselves on the dominator children of BLOCK.
3303 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3304 do_regular_insertion and do_partial_insertion.
3309 do_regular_insertion (basic_block block
, basic_block dom
)
3311 bool new_stuff
= false;
3312 vec
<pre_expr
> exprs
;
3314 vec
<pre_expr
> avail
= vNULL
;
3317 exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3318 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3320 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3322 if (expr
->kind
== NARY
3323 || expr
->kind
== REFERENCE
)
3326 bool by_some
= false;
3327 bool cant_insert
= false;
3328 bool all_same
= true;
3329 pre_expr first_s
= NULL
;
3332 pre_expr eprime
= NULL
;
3334 pre_expr edoubleprime
= NULL
;
3335 bool do_insertion
= false;
3337 val
= get_expr_value_id (expr
);
3338 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3340 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3342 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3343 fprintf (dump_file
, "Found fully redundant value\n");
3347 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3349 unsigned int vprime
;
3351 /* We should never run insertion for the exit block
3352 and so not come across fake pred edges. */
3353 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3355 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
3358 /* eprime will generally only be NULL if the
3359 value of the expression, translated
3360 through the PHI for this predecessor, is
3361 undefined. If that is the case, we can't
3362 make the expression fully redundant,
3363 because its value is undefined along a
3364 predecessor path. We can thus break out
3365 early because it doesn't matter what the
3366 rest of the results are. */
3369 avail
[pred
->dest_idx
] = NULL
;
3374 eprime
= fully_constant_expression (eprime
);
3375 vprime
= get_expr_value_id (eprime
);
3376 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3378 if (edoubleprime
== NULL
)
3380 avail
[pred
->dest_idx
] = eprime
;
3385 avail
[pred
->dest_idx
] = edoubleprime
;
3387 /* We want to perform insertions to remove a redundancy on
3388 a path in the CFG we want to optimize for speed. */
3389 if (optimize_edge_for_speed_p (pred
))
3390 do_insertion
= true;
3391 if (first_s
== NULL
)
3392 first_s
= edoubleprime
;
3393 else if (!pre_expr_d::equal (first_s
, edoubleprime
))
3397 /* If we can insert it, it's not the same value
3398 already existing along every predecessor, and
3399 it's defined by some predecessor, it is
3400 partially redundant. */
3401 if (!cant_insert
&& !all_same
&& by_some
)
3405 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3407 fprintf (dump_file
, "Skipping partial redundancy for "
3409 print_pre_expr (dump_file
, expr
);
3410 fprintf (dump_file
, " (%04d), no redundancy on to be "
3411 "optimized for speed edge\n", val
);
3414 else if (dbg_cnt (treepre_insert
))
3416 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3418 fprintf (dump_file
, "Found partial redundancy for "
3420 print_pre_expr (dump_file
, expr
);
3421 fprintf (dump_file
, " (%04d)\n",
3422 get_expr_value_id (expr
));
3424 if (insert_into_preds_of_block (block
,
3425 get_expression_id (expr
),
3430 /* If all edges produce the same value and that value is
3431 an invariant, then the PHI has the same value on all
3432 edges. Note this. */
3433 else if (!cant_insert
&& all_same
)
3435 gcc_assert (edoubleprime
->kind
== CONSTANT
3436 || edoubleprime
->kind
== NAME
);
3438 tree temp
= make_temp_ssa_name (get_expr_type (expr
),
3440 gimple assign
= gimple_build_assign (temp
,
3441 edoubleprime
->kind
== CONSTANT
? PRE_EXPR_CONSTANT (edoubleprime
) : PRE_EXPR_NAME (edoubleprime
));
3442 gimple_stmt_iterator gsi
= gsi_after_labels (block
);
3443 gsi_insert_before (&gsi
, assign
, GSI_NEW_STMT
);
3445 gimple_set_plf (assign
, NECESSARY
, false);
3446 VN_INFO_GET (temp
)->value_id
= val
;
3447 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3448 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3449 VN_INFO (temp
)->valnum
= temp
;
3450 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3451 pre_expr newe
= get_or_alloc_expr_for_name (temp
);
3452 add_to_value (val
, newe
);
3453 bitmap_value_replace_in_set (AVAIL_OUT (block
), newe
);
3454 bitmap_insert_into_set (NEW_SETS (block
), newe
);
3465 /* Perform insertion for partially anticipatable expressions. There
3466 is only one case we will perform insertion for these. This case is
3467 if the expression is partially anticipatable, and fully available.
3468 In this case, we know that putting it earlier will enable us to
3469 remove the later computation. */
3473 do_partial_partial_insertion (basic_block block
, basic_block dom
)
3475 bool new_stuff
= false;
3476 vec
<pre_expr
> exprs
;
3478 vec
<pre_expr
> avail
= vNULL
;
3481 exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3482 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3484 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3486 if (expr
->kind
== NARY
3487 || expr
->kind
== REFERENCE
)
3491 bool cant_insert
= false;
3494 pre_expr eprime
= NULL
;
3497 val
= get_expr_value_id (expr
);
3498 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3500 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3503 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3505 unsigned int vprime
;
3506 pre_expr edoubleprime
;
3508 /* We should never run insertion for the exit block
3509 and so not come across fake pred edges. */
3510 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3512 eprime
= phi_translate (expr
, ANTIC_IN (block
),
3516 /* eprime will generally only be NULL if the
3517 value of the expression, translated
3518 through the PHI for this predecessor, is
3519 undefined. If that is the case, we can't
3520 make the expression fully redundant,
3521 because its value is undefined along a
3522 predecessor path. We can thus break out
3523 early because it doesn't matter what the
3524 rest of the results are. */
3527 avail
[pred
->dest_idx
] = NULL
;
3532 eprime
= fully_constant_expression (eprime
);
3533 vprime
= get_expr_value_id (eprime
);
3534 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
), vprime
);
3535 avail
[pred
->dest_idx
] = edoubleprime
;
3536 if (edoubleprime
== NULL
)
3543 /* If we can insert it, it's not the same value
3544 already existing along every predecessor, and
3545 it's defined by some predecessor, it is
3546 partially redundant. */
3547 if (!cant_insert
&& by_all
)
3550 bool do_insertion
= false;
3552 /* Insert only if we can remove a later expression on a path
3553 that we want to optimize for speed.
3554 The phi node that we will be inserting in BLOCK is not free,
3555 and inserting it for the sake of !optimize_for_speed successor
3556 may cause regressions on the speed path. */
3557 FOR_EACH_EDGE (succ
, ei
, block
->succs
)
3559 if (bitmap_set_contains_value (PA_IN (succ
->dest
), val
)
3560 || bitmap_set_contains_value (ANTIC_IN (succ
->dest
), val
))
3562 if (optimize_edge_for_speed_p (succ
))
3563 do_insertion
= true;
3569 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3571 fprintf (dump_file
, "Skipping partial partial redundancy "
3573 print_pre_expr (dump_file
, expr
);
3574 fprintf (dump_file
, " (%04d), not (partially) anticipated "
3575 "on any to be optimized for speed edges\n", val
);
3578 else if (dbg_cnt (treepre_insert
))
3580 pre_stats
.pa_insert
++;
3581 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3583 fprintf (dump_file
, "Found partial partial redundancy "
3585 print_pre_expr (dump_file
, expr
);
3586 fprintf (dump_file
, " (%04d)\n",
3587 get_expr_value_id (expr
));
3589 if (insert_into_preds_of_block (block
,
3590 get_expression_id (expr
),
3604 insert_aux (basic_block block
)
3607 bool new_stuff
= false;
3612 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3617 bitmap_set_t newset
= NEW_SETS (dom
);
3620 /* Note that we need to value_replace both NEW_SETS, and
3621 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3622 represented by some non-simple expression here that we want
3623 to replace it with. */
3624 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3626 pre_expr expr
= expression_for_id (i
);
3627 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3628 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3631 if (!single_pred_p (block
))
3633 new_stuff
|= do_regular_insertion (block
, dom
);
3634 if (do_partial_partial
)
3635 new_stuff
|= do_partial_partial_insertion (block
, dom
);
3639 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3641 son
= next_dom_son (CDI_DOMINATORS
, son
))
3643 new_stuff
|= insert_aux (son
);
3649 /* Perform insertion of partially redundant values. */
3654 bool new_stuff
= true;
3656 int num_iterations
= 0;
3659 NEW_SETS (bb
) = bitmap_set_new ();
3664 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3665 fprintf (dump_file
, "Starting insert iteration %d\n", num_iterations
);
3666 new_stuff
= insert_aux (ENTRY_BLOCK_PTR
);
3668 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3672 /* Compute the AVAIL set for all basic blocks.
3674 This function performs value numbering of the statements in each basic
3675 block. The AVAIL sets are built from information we glean while doing
3676 this value numbering, since the AVAIL sets contain only one entry per
3679 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3680 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3683 compute_avail (void)
3686 basic_block block
, son
;
3687 basic_block
*worklist
;
3691 /* We pretend that default definitions are defined in the entry block.
3692 This includes function arguments and the static chain decl. */
3693 for (i
= 1; i
< num_ssa_names
; ++i
)
3695 tree name
= ssa_name (i
);
3698 || !SSA_NAME_IS_DEFAULT_DEF (name
)
3699 || has_zero_uses (name
)
3700 || virtual_operand_p (name
))
3703 e
= get_or_alloc_expr_for_name (name
);
3704 add_to_value (get_expr_value_id (e
), e
);
3705 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR
), e
);
3706 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR
), e
);
3709 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3711 print_bitmap_set (dump_file
, TMP_GEN (ENTRY_BLOCK_PTR
),
3712 "tmp_gen", ENTRY_BLOCK
);
3713 print_bitmap_set (dump_file
, AVAIL_OUT (ENTRY_BLOCK_PTR
),
3714 "avail_out", ENTRY_BLOCK
);
3717 /* Allocate the worklist. */
3718 worklist
= XNEWVEC (basic_block
, n_basic_blocks
);
3720 /* Seed the algorithm by putting the dominator children of the entry
3721 block on the worklist. */
3722 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR
);
3724 son
= next_dom_son (CDI_DOMINATORS
, son
))
3725 worklist
[sp
++] = son
;
3727 /* Loop until the worklist is empty. */
3730 gimple_stmt_iterator gsi
;
3734 /* Pick a block from the worklist. */
3735 block
= worklist
[--sp
];
3737 /* Initially, the set of available values in BLOCK is that of
3738 its immediate dominator. */
3739 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3741 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3743 /* Generate values for PHI nodes. */
3744 for (gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3746 tree result
= gimple_phi_result (gsi_stmt (gsi
));
3748 /* We have no need for virtual phis, as they don't represent
3749 actual computations. */
3750 if (virtual_operand_p (result
))
3753 pre_expr e
= get_or_alloc_expr_for_name (result
);
3754 add_to_value (get_expr_value_id (e
), e
);
3755 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3756 bitmap_insert_into_set (PHI_GEN (block
), e
);
3759 BB_MAY_NOTRETURN (block
) = 0;
3761 /* Now compute value numbers and populate value sets with all
3762 the expressions computed in BLOCK. */
3763 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3768 stmt
= gsi_stmt (gsi
);
3770 /* Cache whether the basic-block has any non-visible side-effect
3772 If this isn't a call or it is the last stmt in the
3773 basic-block then the CFG represents things correctly. */
3774 if (is_gimple_call (stmt
) && !stmt_ends_bb_p (stmt
))
3776 /* Non-looping const functions always return normally.
3777 Otherwise the call might not return or have side-effects
3778 that forbids hoisting possibly trapping expressions
3780 int flags
= gimple_call_flags (stmt
);
3781 if (!(flags
& ECF_CONST
)
3782 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3783 BB_MAY_NOTRETURN (block
) = 1;
3786 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3788 pre_expr e
= get_or_alloc_expr_for_name (op
);
3790 add_to_value (get_expr_value_id (e
), e
);
3791 bitmap_insert_into_set (TMP_GEN (block
), e
);
3792 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3795 if (gimple_has_side_effects (stmt
)
3796 || stmt_could_throw_p (stmt
)
3797 || is_gimple_debug (stmt
))
3800 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3802 if (ssa_undefined_value_p (op
))
3804 pre_expr e
= get_or_alloc_expr_for_name (op
);
3805 bitmap_value_insert_into_set (EXP_GEN (block
), e
);
3808 switch (gimple_code (stmt
))
3816 pre_expr result
= NULL
;
3817 vec
<vn_reference_op_s
> ops
= vNULL
;
3819 /* We can value number only calls to real functions. */
3820 if (gimple_call_internal_p (stmt
))
3823 copy_reference_ops_from_call (stmt
, &ops
);
3824 vn_reference_lookup_pieces (gimple_vuse (stmt
), 0,
3825 gimple_expr_type (stmt
),
3826 ops
, &ref
, VN_NOWALK
);
3831 /* If the value of the call is not invalidated in
3832 this block until it is computed, add the expression
3834 if (!gimple_vuse (stmt
)
3836 (SSA_NAME_DEF_STMT (gimple_vuse (stmt
))) == GIMPLE_PHI
3837 || gimple_bb (SSA_NAME_DEF_STMT
3838 (gimple_vuse (stmt
))) != block
)
3840 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3841 result
->kind
= REFERENCE
;
3843 PRE_EXPR_REFERENCE (result
) = ref
;
3845 get_or_alloc_expression_id (result
);
3846 add_to_value (get_expr_value_id (result
), result
);
3847 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3854 pre_expr result
= NULL
;
3855 switch (vn_get_stmt_kind (stmt
))
3859 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3862 /* COND_EXPR and VEC_COND_EXPR are awkward in
3863 that they contain an embedded complex expression.
3864 Don't even try to shove those through PRE. */
3865 if (code
== COND_EXPR
3866 || code
== VEC_COND_EXPR
)
3869 vn_nary_op_lookup_stmt (stmt
, &nary
);
3873 /* If the NARY traps and there was a preceding
3874 point in the block that might not return avoid
3875 adding the nary to EXP_GEN. */
3876 if (BB_MAY_NOTRETURN (block
)
3877 && vn_nary_may_trap (nary
))
3880 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3881 result
->kind
= NARY
;
3883 PRE_EXPR_NARY (result
) = nary
;
3890 vn_reference_lookup (gimple_assign_rhs1 (stmt
),
3896 /* If the value of the reference is not invalidated in
3897 this block until it is computed, add the expression
3899 if (gimple_vuse (stmt
))
3903 def_stmt
= SSA_NAME_DEF_STMT (gimple_vuse (stmt
));
3904 while (!gimple_nop_p (def_stmt
)
3905 && gimple_code (def_stmt
) != GIMPLE_PHI
3906 && gimple_bb (def_stmt
) == block
)
3908 if (stmt_may_clobber_ref_p
3909 (def_stmt
, gimple_assign_rhs1 (stmt
)))
3915 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt
));
3921 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3922 result
->kind
= REFERENCE
;
3924 PRE_EXPR_REFERENCE (result
) = ref
;
3932 get_or_alloc_expression_id (result
);
3933 add_to_value (get_expr_value_id (result
), result
);
3934 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3942 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3944 print_bitmap_set (dump_file
, EXP_GEN (block
),
3945 "exp_gen", block
->index
);
3946 print_bitmap_set (dump_file
, PHI_GEN (block
),
3947 "phi_gen", block
->index
);
3948 print_bitmap_set (dump_file
, TMP_GEN (block
),
3949 "tmp_gen", block
->index
);
3950 print_bitmap_set (dump_file
, AVAIL_OUT (block
),
3951 "avail_out", block
->index
);
3954 /* Put the dominator children of BLOCK on the worklist of blocks
3955 to compute available sets for. */
3956 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3958 son
= next_dom_son (CDI_DOMINATORS
, son
))
3959 worklist
[sp
++] = son
;
3966 /* Local state for the eliminate domwalk. */
3967 static vec
<gimple
> el_to_remove
;
3968 static vec
<gimple
> el_to_update
;
3969 static unsigned int el_todo
;
3970 static vec
<tree
> el_avail
;
3971 static vec
<tree
> el_avail_stack
;
3973 /* Return a leader for OP that is available at the current point of the
3974 eliminate domwalk. */
3977 eliminate_avail (tree op
)
3979 tree valnum
= VN_INFO (op
)->valnum
;
3980 if (TREE_CODE (valnum
) == SSA_NAME
)
3982 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
3984 if (el_avail
.length () > SSA_NAME_VERSION (valnum
))
3985 return el_avail
[SSA_NAME_VERSION (valnum
)];
3987 else if (is_gimple_min_invariant (valnum
))
3992 /* At the current point of the eliminate domwalk make OP available. */
3995 eliminate_push_avail (tree op
)
3997 tree valnum
= VN_INFO (op
)->valnum
;
3998 if (TREE_CODE (valnum
) == SSA_NAME
)
4000 if (el_avail
.length () <= SSA_NAME_VERSION (valnum
))
4001 el_avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
4002 el_avail
[SSA_NAME_VERSION (valnum
)] = op
;
4003 el_avail_stack
.safe_push (op
);
4007 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4008 the leader for the expression if insertion was successful. */
4011 eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
)
4013 tree expr
= vn_get_expr_for (val
);
4014 if (!CONVERT_EXPR_P (expr
)
4015 && TREE_CODE (expr
) != VIEW_CONVERT_EXPR
)
4018 tree op
= TREE_OPERAND (expr
, 0);
4019 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (op
) : op
;
4023 tree res
= make_temp_ssa_name (TREE_TYPE (val
), NULL
, "pretmp");
4024 gimple tem
= gimple_build_assign (res
,
4025 fold_build1 (TREE_CODE (expr
),
4026 TREE_TYPE (expr
), leader
));
4027 gsi_insert_before (gsi
, tem
, GSI_SAME_STMT
);
4028 VN_INFO_GET (res
)->valnum
= val
;
4030 if (TREE_CODE (leader
) == SSA_NAME
)
4031 gimple_set_plf (SSA_NAME_DEF_STMT (leader
), NECESSARY
, true);
4033 pre_stats
.insertions
++;
4034 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4036 fprintf (dump_file
, "Inserted ");
4037 print_gimple_stmt (dump_file
, tem
, 0, 0);
4043 /* Perform elimination for the basic-block B during the domwalk. */
4046 eliminate_bb (dom_walk_data
*, basic_block b
)
4048 gimple_stmt_iterator gsi
;
4052 el_avail_stack
.safe_push (NULL_TREE
);
4054 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
4056 gimple stmt
, phi
= gsi_stmt (gsi
);
4057 tree sprime
= NULL_TREE
, res
= PHI_RESULT (phi
);
4058 gimple_stmt_iterator gsi2
;
4060 /* We want to perform redundant PHI elimination. Do so by
4061 replacing the PHI with a single copy if possible.
4062 Do not touch inserted, single-argument or virtual PHIs. */
4063 if (gimple_phi_num_args (phi
) == 1
4064 || virtual_operand_p (res
))
4070 sprime
= eliminate_avail (res
);
4074 eliminate_push_avail (res
);
4078 else if (is_gimple_min_invariant (sprime
))
4080 if (!useless_type_conversion_p (TREE_TYPE (res
),
4081 TREE_TYPE (sprime
)))
4082 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4085 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4087 fprintf (dump_file
, "Replaced redundant PHI node defining ");
4088 print_generic_expr (dump_file
, res
, 0);
4089 fprintf (dump_file
, " with ");
4090 print_generic_expr (dump_file
, sprime
, 0);
4091 fprintf (dump_file
, "\n");
4094 remove_phi_node (&gsi
, false);
4097 && !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
))
4098 && TREE_CODE (sprime
) == SSA_NAME
)
4099 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4101 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
4102 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4103 stmt
= gimple_build_assign (res
, sprime
);
4104 SSA_NAME_DEF_STMT (res
) = stmt
;
4105 gimple_set_plf (stmt
, NECESSARY
, gimple_plf (phi
, NECESSARY
));
4107 gsi2
= gsi_after_labels (b
);
4108 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
4109 /* Queue the copy for eventual removal. */
4110 el_to_remove
.safe_push (stmt
);
4111 /* If we inserted this PHI node ourself, it's not an elimination. */
4113 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
4116 pre_stats
.eliminations
++;
4119 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4121 tree lhs
= NULL_TREE
;
4122 tree rhs
= NULL_TREE
;
4124 stmt
= gsi_stmt (gsi
);
4126 if (gimple_has_lhs (stmt
))
4127 lhs
= gimple_get_lhs (stmt
);
4129 if (gimple_assign_single_p (stmt
))
4130 rhs
= gimple_assign_rhs1 (stmt
);
4132 /* Lookup the RHS of the expression, see if we have an
4133 available computation for it. If so, replace the RHS with
4134 the available computation. */
4135 if (gimple_has_lhs (stmt
)
4136 && TREE_CODE (lhs
) == SSA_NAME
4137 && !gimple_has_volatile_ops (stmt
))
4140 gimple orig_stmt
= stmt
;
4142 sprime
= eliminate_avail (lhs
);
4143 /* If there is no usable leader mark lhs as leader for its value. */
4145 eliminate_push_avail (lhs
);
4147 /* See PR43491. Do not replace a global register variable when
4148 it is a the RHS of an assignment. Do replace local register
4149 variables since gcc does not guarantee a local variable will
4150 be allocated in register.
4151 Do not perform copy propagation or undo constant propagation. */
4152 if (gimple_assign_single_p (stmt
)
4153 && (TREE_CODE (rhs
) == SSA_NAME
4154 || is_gimple_min_invariant (rhs
)
4155 || (TREE_CODE (rhs
) == VAR_DECL
4156 && is_global_var (rhs
)
4157 && DECL_HARD_REGISTER (rhs
))))
4162 /* If there is no existing usable leader but SCCVN thinks
4163 it has an expression it wants to use as replacement,
4165 tree val
= VN_INFO (lhs
)->valnum
;
4167 && TREE_CODE (val
) == SSA_NAME
4168 && VN_INFO (val
)->needs_insertion
4169 && VN_INFO (val
)->expr
!= NULL_TREE
4170 && (sprime
= eliminate_insert (&gsi
, val
)) != NULL_TREE
)
4171 eliminate_push_avail (sprime
);
4173 else if (is_gimple_min_invariant (sprime
))
4175 /* If there is no existing leader but SCCVN knows this
4176 value is constant, use that constant. */
4177 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4178 TREE_TYPE (sprime
)))
4179 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4181 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4183 fprintf (dump_file
, "Replaced ");
4184 print_gimple_expr (dump_file
, stmt
, 0, 0);
4185 fprintf (dump_file
, " with ");
4186 print_generic_expr (dump_file
, sprime
, 0);
4187 fprintf (dump_file
, " in ");
4188 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4190 pre_stats
.eliminations
++;
4191 propagate_tree_value_into_stmt (&gsi
, sprime
);
4192 stmt
= gsi_stmt (gsi
);
4195 /* If we removed EH side-effects from the statement, clean
4196 its EH information. */
4197 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4199 bitmap_set_bit (need_eh_cleanup
,
4200 gimple_bb (stmt
)->index
);
4201 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4202 fprintf (dump_file
, " Removed EH side-effects.\n");
4209 && (rhs
== NULL_TREE
4210 || TREE_CODE (rhs
) != SSA_NAME
4211 || may_propagate_copy (rhs
, sprime
)))
4213 bool can_make_abnormal_goto
4214 = is_gimple_call (stmt
)
4215 && stmt_can_make_abnormal_goto (stmt
);
4217 gcc_assert (sprime
!= rhs
);
4219 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4221 fprintf (dump_file
, "Replaced ");
4222 print_gimple_expr (dump_file
, stmt
, 0, 0);
4223 fprintf (dump_file
, " with ");
4224 print_generic_expr (dump_file
, sprime
, 0);
4225 fprintf (dump_file
, " in ");
4226 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4229 if (TREE_CODE (sprime
) == SSA_NAME
)
4230 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4232 /* We need to make sure the new and old types actually match,
4233 which may require adding a simple cast, which fold_convert
4235 if ((!rhs
|| TREE_CODE (rhs
) != SSA_NAME
)
4236 && !useless_type_conversion_p (gimple_expr_type (stmt
),
4237 TREE_TYPE (sprime
)))
4238 sprime
= fold_convert (gimple_expr_type (stmt
), sprime
);
4240 pre_stats
.eliminations
++;
4241 propagate_tree_value_into_stmt (&gsi
, sprime
);
4242 stmt
= gsi_stmt (gsi
);
4245 /* If we removed EH side-effects from the statement, clean
4246 its EH information. */
4247 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4249 bitmap_set_bit (need_eh_cleanup
,
4250 gimple_bb (stmt
)->index
);
4251 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4252 fprintf (dump_file
, " Removed EH side-effects.\n");
4255 /* Likewise for AB side-effects. */
4256 if (can_make_abnormal_goto
4257 && !stmt_can_make_abnormal_goto (stmt
))
4259 bitmap_set_bit (need_ab_cleanup
,
4260 gimple_bb (stmt
)->index
);
4261 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4262 fprintf (dump_file
, " Removed AB side-effects.\n");
4266 /* If the statement is a scalar store, see if the expression
4267 has the same value number as its rhs. If so, the store is
4269 else if (gimple_assign_single_p (stmt
)
4270 && !gimple_has_volatile_ops (stmt
)
4271 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4272 && (TREE_CODE (rhs
) == SSA_NAME
4273 || is_gimple_min_invariant (rhs
)))
4276 val
= vn_reference_lookup (gimple_assign_lhs (stmt
),
4277 gimple_vuse (stmt
), VN_WALK
, NULL
);
4278 if (TREE_CODE (rhs
) == SSA_NAME
)
4279 rhs
= VN_INFO (rhs
)->valnum
;
4281 && operand_equal_p (val
, rhs
, 0))
4283 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4285 fprintf (dump_file
, "Deleted redundant store ");
4286 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4289 /* Queue stmt for removal. */
4290 el_to_remove
.safe_push (stmt
);
4293 /* Visit COND_EXPRs and fold the comparison with the
4294 available value-numbers. */
4295 else if (gimple_code (stmt
) == GIMPLE_COND
)
4297 tree op0
= gimple_cond_lhs (stmt
);
4298 tree op1
= gimple_cond_rhs (stmt
);
4301 if (TREE_CODE (op0
) == SSA_NAME
)
4302 op0
= VN_INFO (op0
)->valnum
;
4303 if (TREE_CODE (op1
) == SSA_NAME
)
4304 op1
= VN_INFO (op1
)->valnum
;
4305 result
= fold_binary (gimple_cond_code (stmt
), boolean_type_node
,
4307 if (result
&& TREE_CODE (result
) == INTEGER_CST
)
4309 if (integer_zerop (result
))
4310 gimple_cond_make_false (stmt
);
4312 gimple_cond_make_true (stmt
);
4314 el_todo
= TODO_cleanup_cfg
;
4317 /* Visit indirect calls and turn them into direct calls if
4319 if (is_gimple_call (stmt
))
4321 tree orig_fn
= gimple_call_fn (stmt
);
4325 if (TREE_CODE (orig_fn
) == SSA_NAME
)
4326 fn
= VN_INFO (orig_fn
)->valnum
;
4327 else if (TREE_CODE (orig_fn
) == OBJ_TYPE_REF
4328 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn
)) == SSA_NAME
)
4329 fn
= VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn
))->valnum
;
4332 if (gimple_call_addr_fndecl (fn
) != NULL_TREE
4333 && useless_type_conversion_p (TREE_TYPE (orig_fn
),
4336 bool can_make_abnormal_goto
4337 = stmt_can_make_abnormal_goto (stmt
);
4338 bool was_noreturn
= gimple_call_noreturn_p (stmt
);
4340 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4342 fprintf (dump_file
, "Replacing call target with ");
4343 print_generic_expr (dump_file
, fn
, 0);
4344 fprintf (dump_file
, " in ");
4345 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4348 gimple_call_set_fn (stmt
, fn
);
4349 el_to_update
.safe_push (stmt
);
4351 /* When changing a call into a noreturn call, cfg cleanup
4352 is needed to fix up the noreturn call. */
4353 if (!was_noreturn
&& gimple_call_noreturn_p (stmt
))
4354 el_todo
|= TODO_cleanup_cfg
;
4356 /* If we removed EH side-effects from the statement, clean
4357 its EH information. */
4358 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
4360 bitmap_set_bit (need_eh_cleanup
,
4361 gimple_bb (stmt
)->index
);
4362 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4363 fprintf (dump_file
, " Removed EH side-effects.\n");
4366 /* Likewise for AB side-effects. */
4367 if (can_make_abnormal_goto
4368 && !stmt_can_make_abnormal_goto (stmt
))
4370 bitmap_set_bit (need_ab_cleanup
,
4371 gimple_bb (stmt
)->index
);
4372 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4373 fprintf (dump_file
, " Removed AB side-effects.\n");
4376 /* Changing an indirect call to a direct call may
4377 have exposed different semantics. This may
4378 require an SSA update. */
4379 el_todo
|= TODO_update_ssa_only_virtuals
;
4385 /* Make no longer available leaders no longer available. */
4388 eliminate_leave_block (dom_walk_data
*, basic_block
)
4391 while ((entry
= el_avail_stack
.pop ()) != NULL_TREE
)
4392 el_avail
[SSA_NAME_VERSION (VN_INFO (entry
)->valnum
)] = NULL_TREE
;
4395 /* Eliminate fully redundant computations. */
4400 struct dom_walk_data walk_data
;
4401 gimple_stmt_iterator gsi
;
4405 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4406 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4408 el_to_remove
.create (0);
4409 el_to_update
.create (0);
4411 el_avail
.create (0);
4412 el_avail_stack
.create (0);
4414 walk_data
.dom_direction
= CDI_DOMINATORS
;
4415 walk_data
.initialize_block_local_data
= NULL
;
4416 walk_data
.before_dom_children
= eliminate_bb
;
4417 walk_data
.after_dom_children
= eliminate_leave_block
;
4418 walk_data
.global_data
= NULL
;
4419 walk_data
.block_local_data_size
= 0;
4420 init_walk_dominator_tree (&walk_data
);
4421 walk_dominator_tree (&walk_data
, ENTRY_BLOCK_PTR
);
4422 fini_walk_dominator_tree (&walk_data
);
4424 el_avail
.release ();
4425 el_avail_stack
.release ();
4427 /* We cannot remove stmts during BB walk, especially not release SSA
4428 names there as this confuses the VN machinery. The stmts ending
4429 up in el_to_remove are either stores or simple copies. */
4430 FOR_EACH_VEC_ELT (el_to_remove
, i
, stmt
)
4432 tree lhs
= gimple_assign_lhs (stmt
);
4433 tree rhs
= gimple_assign_rhs1 (stmt
);
4434 use_operand_p use_p
;
4437 /* If there is a single use only, propagate the equivalency
4438 instead of keeping the copy. */
4439 if (TREE_CODE (lhs
) == SSA_NAME
4440 && TREE_CODE (rhs
) == SSA_NAME
4441 && single_imm_use (lhs
, &use_p
, &use_stmt
)
4442 && may_propagate_copy (USE_FROM_PTR (use_p
), rhs
))
4444 SET_USE (use_p
, rhs
);
4445 update_stmt (use_stmt
);
4447 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (lhs
))
4448 && TREE_CODE (rhs
) == SSA_NAME
)
4449 gimple_set_plf (SSA_NAME_DEF_STMT (rhs
), NECESSARY
, true);
4452 /* If this is a store or a now unused copy, remove it. */
4453 if (TREE_CODE (lhs
) != SSA_NAME
4454 || has_zero_uses (lhs
))
4456 basic_block bb
= gimple_bb (stmt
);
4457 gsi
= gsi_for_stmt (stmt
);
4458 unlink_stmt_vdef (stmt
);
4459 if (gsi_remove (&gsi
, true))
4460 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
4462 && TREE_CODE (lhs
) == SSA_NAME
)
4463 bitmap_clear_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
4464 release_defs (stmt
);
4467 el_to_remove
.release ();
4469 /* We cannot update call statements with virtual operands during
4470 SSA walk. This might remove them which in turn makes our
4471 VN lattice invalid. */
4472 FOR_EACH_VEC_ELT (el_to_update
, i
, stmt
)
4474 el_to_update
.release ();
4479 /* Perform CFG cleanups made necessary by elimination. */
4482 fini_eliminate (void)
4484 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
4485 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
4488 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
4491 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
4493 BITMAP_FREE (need_eh_cleanup
);
4494 BITMAP_FREE (need_ab_cleanup
);
4496 if (do_eh_cleanup
|| do_ab_cleanup
)
4497 return TODO_cleanup_cfg
;
4501 /* Borrow a bit of tree-ssa-dce.c for the moment.
4502 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4503 this may be a bit faster, and we may want critical edges kept split. */
4505 /* If OP's defining statement has not already been determined to be necessary,
4506 mark that statement necessary. Return the stmt, if it is newly
4509 static inline gimple
4510 mark_operand_necessary (tree op
)
4516 if (TREE_CODE (op
) != SSA_NAME
)
4519 stmt
= SSA_NAME_DEF_STMT (op
);
4522 if (gimple_plf (stmt
, NECESSARY
)
4523 || gimple_nop_p (stmt
))
4526 gimple_set_plf (stmt
, NECESSARY
, true);
4530 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4531 to insert PHI nodes sometimes, and because value numbering of casts isn't
4532 perfect, we sometimes end up inserting dead code. This simple DCE-like
4533 pass removes any insertions we made that weren't actually used. */
4536 remove_dead_inserted_code (void)
4543 worklist
= BITMAP_ALLOC (NULL
);
4544 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4546 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4547 if (gimple_plf (t
, NECESSARY
))
4548 bitmap_set_bit (worklist
, i
);
4550 while (!bitmap_empty_p (worklist
))
4552 i
= bitmap_first_set_bit (worklist
);
4553 bitmap_clear_bit (worklist
, i
);
4554 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4556 /* PHI nodes are somewhat special in that each PHI alternative has
4557 data and control dependencies. All the statements feeding the
4558 PHI node's arguments are always necessary. */
4559 if (gimple_code (t
) == GIMPLE_PHI
)
4563 for (k
= 0; k
< gimple_phi_num_args (t
); k
++)
4565 tree arg
= PHI_ARG_DEF (t
, k
);
4566 if (TREE_CODE (arg
) == SSA_NAME
)
4568 gimple n
= mark_operand_necessary (arg
);
4570 bitmap_set_bit (worklist
, SSA_NAME_VERSION (arg
));
4576 /* Propagate through the operands. Examine all the USE, VUSE and
4577 VDEF operands in this statement. Mark all the statements
4578 which feed this statement's uses as necessary. */
4582 /* The operands of VDEF expressions are also needed as they
4583 represent potential definitions that may reach this
4584 statement (VDEF operands allow us to follow def-def
4587 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
4589 gimple n
= mark_operand_necessary (use
);
4591 bitmap_set_bit (worklist
, SSA_NAME_VERSION (use
));
4596 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4598 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4599 if (!gimple_plf (t
, NECESSARY
))
4601 gimple_stmt_iterator gsi
;
4603 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4605 fprintf (dump_file
, "Removing unnecessary insertion:");
4606 print_gimple_stmt (dump_file
, t
, 0, 0);
4609 gsi
= gsi_for_stmt (t
);
4610 if (gimple_code (t
) == GIMPLE_PHI
)
4611 remove_phi_node (&gsi
, true);
4614 gsi_remove (&gsi
, true);
4619 BITMAP_FREE (worklist
);
4623 /* Initialize data structures used by PRE. */
4630 next_expression_id
= 1;
4631 expressions
.create (0);
4632 expressions
.safe_push (NULL
);
4633 value_expressions
.create (get_max_value_id () + 1);
4634 value_expressions
.safe_grow_cleared (get_max_value_id() + 1);
4635 name_to_id
.create (0);
4637 inserted_exprs
= BITMAP_ALLOC (NULL
);
4639 connect_infinite_loops_to_exit ();
4640 memset (&pre_stats
, 0, sizeof (pre_stats
));
4642 postorder
= XNEWVEC (int, n_basic_blocks
);
4643 postorder_num
= inverted_post_order_compute (postorder
);
4645 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets
));
4647 calculate_dominance_info (CDI_POST_DOMINATORS
);
4648 calculate_dominance_info (CDI_DOMINATORS
);
4650 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4651 phi_translate_table
.create (5110);
4652 expression_to_id
.create (num_ssa_names
* 3);
4653 bitmap_set_pool
= create_alloc_pool ("Bitmap sets",
4654 sizeof (struct bitmap_set
), 30);
4655 pre_expr_pool
= create_alloc_pool ("pre_expr nodes",
4656 sizeof (struct pre_expr_d
), 30);
4659 EXP_GEN (bb
) = bitmap_set_new ();
4660 PHI_GEN (bb
) = bitmap_set_new ();
4661 TMP_GEN (bb
) = bitmap_set_new ();
4662 AVAIL_OUT (bb
) = bitmap_set_new ();
4667 /* Deallocate data structures used by PRE. */
4673 value_expressions
.release ();
4674 BITMAP_FREE (inserted_exprs
);
4675 bitmap_obstack_release (&grand_bitmap_obstack
);
4676 free_alloc_pool (bitmap_set_pool
);
4677 free_alloc_pool (pre_expr_pool
);
4678 phi_translate_table
.dispose ();
4679 expression_to_id
.dispose ();
4680 name_to_id
.release ();
4682 free_aux_for_blocks ();
4684 free_dominance_info (CDI_POST_DOMINATORS
);
4687 /* Gate and execute functions for PRE. */
4692 unsigned int todo
= 0;
4694 do_partial_partial
=
4695 flag_tree_partial_pre
&& optimize_function_for_speed_p (cfun
);
4697 /* This has to happen before SCCVN runs because
4698 loop_optimizer_init may create new phis, etc. */
4699 loop_optimizer_init (LOOPS_NORMAL
);
4701 if (!run_scc_vn (VN_WALK
))
4703 loop_optimizer_finalize ();
4710 /* Collect and value number expressions computed in each basic block. */
4713 /* Insert can get quite slow on an incredibly large number of basic
4714 blocks due to some quadratic behavior. Until this behavior is
4715 fixed, don't run it when he have an incredibly large number of
4716 bb's. If we aren't going to run insert, there is no point in
4717 computing ANTIC, either, even though it's plenty fast. */
4718 if (n_basic_blocks
< 4000)
4724 /* Make sure to remove fake edges before committing our inserts.
4725 This makes sure we don't end up with extra critical edges that
4726 we would need to split. */
4727 remove_fake_exit_edges ();
4728 gsi_commit_edge_inserts ();
4730 /* Remove all the redundant expressions. */
4731 todo
|= eliminate ();
4733 statistics_counter_event (cfun
, "Insertions", pre_stats
.insertions
);
4734 statistics_counter_event (cfun
, "PA inserted", pre_stats
.pa_insert
);
4735 statistics_counter_event (cfun
, "New PHIs", pre_stats
.phis
);
4736 statistics_counter_event (cfun
, "Eliminated", pre_stats
.eliminations
);
4738 clear_expression_ids ();
4739 remove_dead_inserted_code ();
4740 todo
|= TODO_verify_flow
;
4744 todo
|= fini_eliminate ();
4745 loop_optimizer_finalize ();
4747 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4748 case we can merge the block with the remaining predecessor of the block.
4750 - call merge_blocks after each tail merge iteration
4751 - call merge_blocks after all tail merge iterations
4752 - mark TODO_cleanup_cfg when necessary
4753 - share the cfg cleanup with fini_pre. */
4754 todo
|= tail_merge_optimize (todo
);
4758 /* Tail merging invalidates the virtual SSA web, together with
4759 cfg-cleanup opportunities exposed by PRE this will wreck the
4760 SSA updating machinery. So make sure to run update-ssa
4761 manually, before eventually scheduling cfg-cleanup as part of
4763 update_ssa (TODO_update_ssa_only_virtuals
);
4771 return flag_tree_pre
!= 0;
4774 struct gimple_opt_pass pass_pre
=
4779 OPTGROUP_NONE
, /* optinfo_flags */
4780 gate_pre
, /* gate */
4781 do_pre
, /* execute */
4784 0, /* static_pass_number */
4785 TV_TREE_PRE
, /* tv_id */
4786 PROP_no_crit_edges
| PROP_cfg
4787 | PROP_ssa
, /* properties_required */
4788 0, /* properties_provided */
4789 0, /* properties_destroyed */
4790 TODO_rebuild_alias
, /* todo_flags_start */
4791 TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */
4796 /* Gate and execute functions for FRE. */
4801 unsigned int todo
= 0;
4803 if (!run_scc_vn (VN_WALKREWRITE
))
4806 memset (&pre_stats
, 0, sizeof (pre_stats
));
4808 /* Remove all the redundant expressions. */
4809 todo
|= eliminate ();
4811 todo
|= fini_eliminate ();
4815 statistics_counter_event (cfun
, "Insertions", pre_stats
.insertions
);
4816 statistics_counter_event (cfun
, "Eliminated", pre_stats
.eliminations
);
4824 return flag_tree_fre
!= 0;
4827 struct gimple_opt_pass pass_fre
=
4832 OPTGROUP_NONE
, /* optinfo_flags */
4833 gate_fre
, /* gate */
4834 execute_fre
, /* execute */
4837 0, /* static_pass_number */
4838 TV_TREE_FRE
, /* tv_id */
4839 PROP_cfg
| PROP_ssa
, /* properties_required */
4840 0, /* properties_provided */
4841 0, /* properties_destroyed */
4842 0, /* todo_flags_start */
4843 TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */