2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
32 #include "hash-table.h"
33 #include "tree-iterator.h"
34 #include "alloc-pool.h"
36 #include "tree-pass.h"
39 #include "langhooks.h"
41 #include "tree-ssa-sccvn.h"
42 #include "tree-scalar-evolution.h"
50 1. Avail sets can be shared by making an avail_find_leader that
51 walks up the dominator tree and looks in those avail sets.
52 This might affect code optimality, it's unclear right now.
53 2. Strength reduction can be performed by anticipating expressions
54 we can repair later on.
55 3. We can do back-substitution or smarter value numbering to catch
56 commutative expressions split up over multiple statements.
59 /* For ease of terminology, "expression node" in the below refers to
60 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
61 represent the actual statement containing the expressions we care about,
62 and we cache the value number by putting it in the expression. */
66 First we walk the statements to generate the AVAIL sets, the
67 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
68 generation of values/expressions by a given block. We use them
69 when computing the ANTIC sets. The AVAIL sets consist of
70 SSA_NAME's that represent values, so we know what values are
71 available in what blocks. AVAIL is a forward dataflow problem. In
72 SSA, values are never killed, so we don't need a kill set, or a
73 fixpoint iteration, in order to calculate the AVAIL sets. In
74 traditional parlance, AVAIL sets tell us the downsafety of the
77 Next, we generate the ANTIC sets. These sets represent the
78 anticipatable expressions. ANTIC is a backwards dataflow
79 problem. An expression is anticipatable in a given block if it could
80 be generated in that block. This means that if we had to perform
81 an insertion in that block, of the value of that expression, we
82 could. Calculating the ANTIC sets requires phi translation of
83 expressions, because the flow goes backwards through phis. We must
84 iterate to a fixpoint of the ANTIC sets, because we have a kill
85 set. Even in SSA form, values are not live over the entire
86 function, only from their definition point onwards. So we have to
87 remove values from the ANTIC set once we go past the definition
88 point of the leaders that make them up.
89 compute_antic/compute_antic_aux performs this computation.
91 Third, we perform insertions to make partially redundant
92 expressions fully redundant.
94 An expression is partially redundant (excluding partial
97 1. It is AVAIL in some, but not all, of the predecessors of a
99 2. It is ANTIC in all the predecessors.
101 In order to make it fully redundant, we insert the expression into
102 the predecessors where it is not available, but is ANTIC.
104 For the partial anticipation case, we only perform insertion if it
105 is partially anticipated in some block, and fully available in all
108 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
109 performs these steps.
111 Fourth, we eliminate fully redundant expressions.
112 This is a simple statement walk that replaces redundant
113 calculations with the now available values. */
115 /* Representations of value numbers:
117 Value numbers are represented by a representative SSA_NAME. We
118 will create fake SSA_NAME's in situations where we need a
119 representative but do not have one (because it is a complex
120 expression). In order to facilitate storing the value numbers in
121 bitmaps, and keep the number of wasted SSA_NAME's down, we also
122 associate a value_id with each value number, and create full blown
123 ssa_name's only where we actually need them (IE in operands of
124 existing expressions).
126 Theoretically you could replace all the value_id's with
127 SSA_NAME_VERSION, but this would allocate a large number of
128 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
129 It would also require an additional indirection at each point we
132 /* Representation of expressions on value numbers:
134 Expressions consisting of value numbers are represented the same
135 way as our VN internally represents them, with an additional
136 "pre_expr" wrapping around them in order to facilitate storing all
137 of the expressions in the same sets. */
139 /* Representation of sets:
141 The dataflow sets do not need to be sorted in any particular order
142 for the majority of their lifetime, are simply represented as two
143 bitmaps, one that keeps track of values present in the set, and one
144 that keeps track of expressions present in the set.
146 When we need them in topological order, we produce it on demand by
147 transforming the bitmap into an array and sorting it into topo
150 /* Type of expression, used to know which member of the PRE_EXPR union
161 typedef union pre_expr_union_d
166 vn_reference_t reference
;
169 typedef struct pre_expr_d
: typed_noop_remove
<pre_expr_d
>
171 enum pre_expr_kind kind
;
175 /* hash_table support. */
176 typedef pre_expr_d value_type
;
177 typedef pre_expr_d compare_type
;
178 static inline hashval_t
hash (const pre_expr_d
*);
179 static inline int equal (const pre_expr_d
*, const pre_expr_d
*);
182 #define PRE_EXPR_NAME(e) (e)->u.name
183 #define PRE_EXPR_NARY(e) (e)->u.nary
184 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
185 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
187 /* Compare E1 and E1 for equality. */
190 pre_expr_d::equal (const value_type
*e1
, const compare_type
*e2
)
192 if (e1
->kind
!= e2
->kind
)
198 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
199 PRE_EXPR_CONSTANT (e2
));
201 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
203 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
205 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
206 PRE_EXPR_REFERENCE (e2
));
215 pre_expr_d::hash (const value_type
*e
)
220 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
222 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
224 return PRE_EXPR_NARY (e
)->hashcode
;
226 return PRE_EXPR_REFERENCE (e
)->hashcode
;
232 /* Next global expression id number. */
233 static unsigned int next_expression_id
;
235 /* Mapping from expression to id number we can use in bitmap sets. */
236 static vec
<pre_expr
> expressions
;
237 static hash_table
<pre_expr_d
> expression_to_id
;
238 static vec
<unsigned> name_to_id
;
240 /* Allocate an expression id for EXPR. */
242 static inline unsigned int
243 alloc_expression_id (pre_expr expr
)
245 struct pre_expr_d
**slot
;
246 /* Make sure we won't overflow. */
247 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
248 expr
->id
= next_expression_id
++;
249 expressions
.safe_push (expr
);
250 if (expr
->kind
== NAME
)
252 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
253 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
254 re-allocations by using vec::reserve upfront. There is no
255 vec::quick_grow_cleared unfortunately. */
256 unsigned old_len
= name_to_id
.length ();
257 name_to_id
.reserve (num_ssa_names
- old_len
);
258 name_to_id
.safe_grow_cleared (num_ssa_names
);
259 gcc_assert (name_to_id
[version
] == 0);
260 name_to_id
[version
] = expr
->id
;
264 slot
= expression_to_id
.find_slot (expr
, INSERT
);
268 return next_expression_id
- 1;
271 /* Return the expression id for tree EXPR. */
273 static inline unsigned int
274 get_expression_id (const pre_expr expr
)
279 static inline unsigned int
280 lookup_expression_id (const pre_expr expr
)
282 struct pre_expr_d
**slot
;
284 if (expr
->kind
== NAME
)
286 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
287 if (name_to_id
.length () <= version
)
289 return name_to_id
[version
];
293 slot
= expression_to_id
.find_slot (expr
, NO_INSERT
);
296 return ((pre_expr
)*slot
)->id
;
300 /* Return the existing expression id for EXPR, or create one if one
301 does not exist yet. */
303 static inline unsigned int
304 get_or_alloc_expression_id (pre_expr expr
)
306 unsigned int id
= lookup_expression_id (expr
);
308 return alloc_expression_id (expr
);
309 return expr
->id
= id
;
312 /* Return the expression that has expression id ID */
314 static inline pre_expr
315 expression_for_id (unsigned int id
)
317 return expressions
[id
];
320 /* Free the expression id field in all of our expressions,
321 and then destroy the expressions array. */
324 clear_expression_ids (void)
326 expressions
.release ();
329 static alloc_pool pre_expr_pool
;
331 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
334 get_or_alloc_expr_for_name (tree name
)
336 struct pre_expr_d expr
;
338 unsigned int result_id
;
342 PRE_EXPR_NAME (&expr
) = name
;
343 result_id
= lookup_expression_id (&expr
);
345 return expression_for_id (result_id
);
347 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
349 PRE_EXPR_NAME (result
) = name
;
350 alloc_expression_id (result
);
354 /* An unordered bitmap set. One bitmap tracks values, the other,
356 typedef struct bitmap_set
358 bitmap_head expressions
;
362 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
363 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
365 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
366 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
368 /* Mapping from value id to expressions with that value_id. */
369 static vec
<bitmap
> value_expressions
;
371 /* Sets that we need to keep track of. */
372 typedef struct bb_bitmap_sets
374 /* The EXP_GEN set, which represents expressions/values generated in
376 bitmap_set_t exp_gen
;
378 /* The PHI_GEN set, which represents PHI results generated in a
380 bitmap_set_t phi_gen
;
382 /* The TMP_GEN set, which represents results/temporaries generated
383 in a basic block. IE the LHS of an expression. */
384 bitmap_set_t tmp_gen
;
386 /* The AVAIL_OUT set, which represents which values are available in
387 a given basic block. */
388 bitmap_set_t avail_out
;
390 /* The ANTIC_IN set, which represents which values are anticipatable
391 in a given basic block. */
392 bitmap_set_t antic_in
;
394 /* The PA_IN set, which represents which values are
395 partially anticipatable in a given basic block. */
398 /* The NEW_SETS set, which is used during insertion to augment the
399 AVAIL_OUT set of blocks with the new insertions performed during
400 the current iteration. */
401 bitmap_set_t new_sets
;
403 /* A cache for value_dies_in_block_x. */
406 /* True if we have visited this block during ANTIC calculation. */
407 unsigned int visited
: 1;
409 /* True we have deferred processing this block during ANTIC
410 calculation until its successor is processed. */
411 unsigned int deferred
: 1;
413 /* True when the block contains a call that might not return. */
414 unsigned int contains_may_not_return_call
: 1;
417 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
418 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
419 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
420 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
421 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
422 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
423 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
424 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
425 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
426 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
427 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
430 /* Basic block list in postorder. */
431 static int *postorder
;
432 static int postorder_num
;
434 /* This structure is used to keep track of statistics on what
435 optimization PRE was able to perform. */
438 /* The number of RHS computations eliminated by PRE. */
441 /* The number of new expressions/temporaries generated by PRE. */
444 /* The number of inserts found due to partial anticipation */
447 /* The number of new PHI nodes added by PRE. */
451 static bool do_partial_partial
;
452 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int);
453 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
454 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
455 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
456 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
457 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
458 static void bitmap_insert_into_set_1 (bitmap_set_t
, pre_expr
,
460 static bitmap_set_t
bitmap_set_new (void);
461 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
463 static tree
find_or_generate_expression (basic_block
, tree
, gimple_seq
*);
464 static unsigned int get_expr_value_id (pre_expr
);
466 /* We can add and remove elements and entries to and from sets
467 and hash tables, so we use alloc pools for them. */
469 static alloc_pool bitmap_set_pool
;
470 static bitmap_obstack grand_bitmap_obstack
;
472 /* Set of blocks with statements that have had their EH properties changed. */
473 static bitmap need_eh_cleanup
;
475 /* Set of blocks with statements that have had their AB properties changed. */
476 static bitmap need_ab_cleanup
;
478 /* A three tuple {e, pred, v} used to cache phi translations in the
479 phi_translate_table. */
481 typedef struct expr_pred_trans_d
: typed_free_remove
<expr_pred_trans_d
>
483 /* The expression. */
486 /* The predecessor block along which we translated the expression. */
489 /* The value that resulted from the translation. */
492 /* The hashcode for the expression, pred pair. This is cached for
496 /* hash_table support. */
497 typedef expr_pred_trans_d value_type
;
498 typedef expr_pred_trans_d compare_type
;
499 static inline hashval_t
hash (const value_type
*);
500 static inline int equal (const value_type
*, const compare_type
*);
501 } *expr_pred_trans_t
;
502 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
505 expr_pred_trans_d::hash (const expr_pred_trans_d
*e
)
511 expr_pred_trans_d::equal (const value_type
*ve1
,
512 const compare_type
*ve2
)
514 basic_block b1
= ve1
->pred
;
515 basic_block b2
= ve2
->pred
;
517 /* If they are not translations for the same basic block, they can't
521 return pre_expr_d::equal (ve1
->e
, ve2
->e
);
524 /* The phi_translate_table caches phi translations for a given
525 expression and predecessor. */
526 static hash_table
<expr_pred_trans_d
> phi_translate_table
;
528 /* Add the tuple mapping from {expression E, basic block PRED} to
529 the phi translation table and return whether it pre-existed. */
532 phi_trans_add (expr_pred_trans_t
*entry
, pre_expr e
, basic_block pred
)
534 expr_pred_trans_t
*slot
;
535 expr_pred_trans_d tem
;
536 hashval_t hash
= iterative_hash_hashval_t (pre_expr_d::hash (e
),
541 slot
= phi_translate_table
.find_slot_with_hash (&tem
, hash
, INSERT
);
548 *entry
= *slot
= XNEW (struct expr_pred_trans_d
);
550 (*entry
)->pred
= pred
;
551 (*entry
)->hashcode
= hash
;
556 /* Add expression E to the expression set of value id V. */
559 add_to_value (unsigned int v
, pre_expr e
)
563 gcc_checking_assert (get_expr_value_id (e
) == v
);
565 if (v
>= value_expressions
.length ())
567 value_expressions
.safe_grow_cleared (v
+ 1);
570 set
= value_expressions
[v
];
573 set
= BITMAP_ALLOC (&grand_bitmap_obstack
);
574 value_expressions
[v
] = set
;
577 bitmap_set_bit (set
, get_or_alloc_expression_id (e
));
580 /* Create a new bitmap set and return it. */
583 bitmap_set_new (void)
585 bitmap_set_t ret
= (bitmap_set_t
) pool_alloc (bitmap_set_pool
);
586 bitmap_initialize (&ret
->expressions
, &grand_bitmap_obstack
);
587 bitmap_initialize (&ret
->values
, &grand_bitmap_obstack
);
591 /* Return the value id for a PRE expression EXPR. */
594 get_expr_value_id (pre_expr expr
)
600 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
603 id
= VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
606 id
= PRE_EXPR_NARY (expr
)->value_id
;
609 id
= PRE_EXPR_REFERENCE (expr
)->value_id
;
614 /* ??? We cannot assert that expr has a value-id (it can be 0), because
615 we assign value-ids only to expressions that have a result
616 in set_hashtable_value_ids. */
620 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
623 sccvn_valnum_from_value_id (unsigned int val
)
627 bitmap exprset
= value_expressions
[val
];
628 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
630 pre_expr vexpr
= expression_for_id (i
);
631 if (vexpr
->kind
== NAME
)
632 return VN_INFO (PRE_EXPR_NAME (vexpr
))->valnum
;
633 else if (vexpr
->kind
== CONSTANT
)
634 return PRE_EXPR_CONSTANT (vexpr
);
639 /* Remove an expression EXPR from a bitmapped set. */
642 bitmap_remove_from_set (bitmap_set_t set
, pre_expr expr
)
644 unsigned int val
= get_expr_value_id (expr
);
645 if (!value_id_constant_p (val
))
647 bitmap_clear_bit (&set
->values
, val
);
648 bitmap_clear_bit (&set
->expressions
, get_expression_id (expr
));
653 bitmap_insert_into_set_1 (bitmap_set_t set
, pre_expr expr
,
654 unsigned int val
, bool allow_constants
)
656 if (allow_constants
|| !value_id_constant_p (val
))
658 /* We specifically expect this and only this function to be able to
659 insert constants into a set. */
660 bitmap_set_bit (&set
->values
, val
);
661 bitmap_set_bit (&set
->expressions
, get_or_alloc_expression_id (expr
));
665 /* Insert an expression EXPR into a bitmapped set. */
668 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
670 bitmap_insert_into_set_1 (set
, expr
, get_expr_value_id (expr
), false);
673 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
676 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
678 bitmap_copy (&dest
->expressions
, &orig
->expressions
);
679 bitmap_copy (&dest
->values
, &orig
->values
);
683 /* Free memory used up by SET. */
685 bitmap_set_free (bitmap_set_t set
)
687 bitmap_clear (&set
->expressions
);
688 bitmap_clear (&set
->values
);
692 /* Generate an topological-ordered array of bitmap set SET. */
695 sorted_array_from_bitmap_set (bitmap_set_t set
)
698 bitmap_iterator bi
, bj
;
699 vec
<pre_expr
> result
;
701 /* Pre-allocate roughly enough space for the array. */
702 result
.create (bitmap_count_bits (&set
->values
));
704 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
706 /* The number of expressions having a given value is usually
707 relatively small. Thus, rather than making a vector of all
708 the expressions and sorting it by value-id, we walk the values
709 and check in the reverse mapping that tells us what expressions
710 have a given value, to filter those in our set. As a result,
711 the expressions are inserted in value-id order, which means
714 If this is somehow a significant lose for some cases, we can
715 choose which set to walk based on the set size. */
716 bitmap exprset
= value_expressions
[i
];
717 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bj
)
719 if (bitmap_bit_p (&set
->expressions
, j
))
720 result
.safe_push (expression_for_id (j
));
727 /* Perform bitmapped set operation DEST &= ORIG. */
730 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
738 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
740 bitmap_and_into (&dest
->values
, &orig
->values
);
741 bitmap_copy (&temp
, &dest
->expressions
);
742 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
744 pre_expr expr
= expression_for_id (i
);
745 unsigned int value_id
= get_expr_value_id (expr
);
746 if (!bitmap_bit_p (&dest
->values
, value_id
))
747 bitmap_clear_bit (&dest
->expressions
, i
);
749 bitmap_clear (&temp
);
753 /* Subtract all values and expressions contained in ORIG from DEST. */
756 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
758 bitmap_set_t result
= bitmap_set_new ();
762 bitmap_and_compl (&result
->expressions
, &dest
->expressions
,
765 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
767 pre_expr expr
= expression_for_id (i
);
768 unsigned int value_id
= get_expr_value_id (expr
);
769 bitmap_set_bit (&result
->values
, value_id
);
775 /* Subtract all the values in bitmap set B from bitmap set A. */
778 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
784 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
786 bitmap_copy (&temp
, &a
->expressions
);
787 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
789 pre_expr expr
= expression_for_id (i
);
790 if (bitmap_set_contains_value (b
, get_expr_value_id (expr
)))
791 bitmap_remove_from_set (a
, expr
);
793 bitmap_clear (&temp
);
797 /* Return true if bitmapped set SET contains the value VALUE_ID. */
800 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
802 if (value_id_constant_p (value_id
))
805 if (!set
|| bitmap_empty_p (&set
->expressions
))
808 return bitmap_bit_p (&set
->values
, value_id
);
812 bitmap_set_contains_expr (bitmap_set_t set
, const pre_expr expr
)
814 return bitmap_bit_p (&set
->expressions
, get_expression_id (expr
));
817 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
820 bitmap_set_replace_value (bitmap_set_t set
, unsigned int lookfor
,
827 if (value_id_constant_p (lookfor
))
830 if (!bitmap_set_contains_value (set
, lookfor
))
833 /* The number of expressions having a given value is usually
834 significantly less than the total number of expressions in SET.
835 Thus, rather than check, for each expression in SET, whether it
836 has the value LOOKFOR, we walk the reverse mapping that tells us
837 what expressions have a given value, and see if any of those
838 expressions are in our set. For large testcases, this is about
839 5-10x faster than walking the bitmap. If this is somehow a
840 significant lose for some cases, we can choose which set to walk
841 based on the set size. */
842 exprset
= value_expressions
[lookfor
];
843 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
845 if (bitmap_clear_bit (&set
->expressions
, i
))
847 bitmap_set_bit (&set
->expressions
, get_expression_id (expr
));
855 /* Return true if two bitmap sets are equal. */
858 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
860 return bitmap_equal_p (&a
->values
, &b
->values
);
863 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
864 and add it otherwise. */
867 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
869 unsigned int val
= get_expr_value_id (expr
);
871 if (bitmap_set_contains_value (set
, val
))
872 bitmap_set_replace_value (set
, val
, expr
);
874 bitmap_insert_into_set (set
, expr
);
877 /* Insert EXPR into SET if EXPR's value is not already present in
881 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
883 unsigned int val
= get_expr_value_id (expr
);
885 gcc_checking_assert (expr
->id
== get_or_alloc_expression_id (expr
));
887 /* Constant values are always considered to be part of the set. */
888 if (value_id_constant_p (val
))
891 /* If the value membership changed, add the expression. */
892 if (bitmap_set_bit (&set
->values
, val
))
893 bitmap_set_bit (&set
->expressions
, expr
->id
);
896 /* Print out EXPR to outfile. */
899 print_pre_expr (FILE *outfile
, const pre_expr expr
)
904 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
), 0);
907 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
), 0);
912 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
913 fprintf (outfile
, "{%s,", tree_code_name
[nary
->opcode
]);
914 for (i
= 0; i
< nary
->length
; i
++)
916 print_generic_expr (outfile
, nary
->op
[i
], 0);
917 if (i
!= (unsigned) nary
->length
- 1)
918 fprintf (outfile
, ",");
920 fprintf (outfile
, "}");
926 vn_reference_op_t vro
;
928 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
929 fprintf (outfile
, "{");
931 ref
->operands
.iterate (i
, &vro
);
934 bool closebrace
= false;
935 if (vro
->opcode
!= SSA_NAME
936 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
938 fprintf (outfile
, "%s", tree_code_name
[vro
->opcode
]);
941 fprintf (outfile
, "<");
947 print_generic_expr (outfile
, vro
->op0
, 0);
950 fprintf (outfile
, ",");
951 print_generic_expr (outfile
, vro
->op1
, 0);
955 fprintf (outfile
, ",");
956 print_generic_expr (outfile
, vro
->op2
, 0);
960 fprintf (outfile
, ">");
961 if (i
!= ref
->operands
.length () - 1)
962 fprintf (outfile
, ",");
964 fprintf (outfile
, "}");
967 fprintf (outfile
, "@");
968 print_generic_expr (outfile
, ref
->vuse
, 0);
974 void debug_pre_expr (pre_expr
);
976 /* Like print_pre_expr but always prints to stderr. */
978 debug_pre_expr (pre_expr e
)
980 print_pre_expr (stderr
, e
);
981 fprintf (stderr
, "\n");
984 /* Print out SET to OUTFILE. */
987 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
988 const char *setname
, int blockindex
)
990 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
997 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
999 const pre_expr expr
= expression_for_id (i
);
1002 fprintf (outfile
, ", ");
1004 print_pre_expr (outfile
, expr
);
1006 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1009 fprintf (outfile
, " }\n");
1012 void debug_bitmap_set (bitmap_set_t
);
1015 debug_bitmap_set (bitmap_set_t set
)
1017 print_bitmap_set (stderr
, set
, "debug", 0);
1020 void debug_bitmap_sets_for (basic_block
);
1023 debug_bitmap_sets_for (basic_block bb
)
1025 print_bitmap_set (stderr
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
1026 print_bitmap_set (stderr
, EXP_GEN (bb
), "exp_gen", bb
->index
);
1027 print_bitmap_set (stderr
, PHI_GEN (bb
), "phi_gen", bb
->index
);
1028 print_bitmap_set (stderr
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
1029 print_bitmap_set (stderr
, ANTIC_IN (bb
), "antic_in", bb
->index
);
1030 if (do_partial_partial
)
1031 print_bitmap_set (stderr
, PA_IN (bb
), "pa_in", bb
->index
);
1032 print_bitmap_set (stderr
, NEW_SETS (bb
), "new_sets", bb
->index
);
1035 /* Print out the expressions that have VAL to OUTFILE. */
1038 print_value_expressions (FILE *outfile
, unsigned int val
)
1040 bitmap set
= value_expressions
[val
];
1045 sprintf (s
, "%04d", val
);
1046 x
.expressions
= *set
;
1047 print_bitmap_set (outfile
, &x
, s
, 0);
1053 debug_value_expressions (unsigned int val
)
1055 print_value_expressions (stderr
, val
);
1058 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1062 get_or_alloc_expr_for_constant (tree constant
)
1064 unsigned int result_id
;
1065 unsigned int value_id
;
1066 struct pre_expr_d expr
;
1069 expr
.kind
= CONSTANT
;
1070 PRE_EXPR_CONSTANT (&expr
) = constant
;
1071 result_id
= lookup_expression_id (&expr
);
1073 return expression_for_id (result_id
);
1075 newexpr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1076 newexpr
->kind
= CONSTANT
;
1077 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1078 alloc_expression_id (newexpr
);
1079 value_id
= get_or_alloc_constant_value_id (constant
);
1080 add_to_value (value_id
, newexpr
);
1084 /* Given a value id V, find the actual tree representing the constant
1085 value if there is one, and return it. Return NULL if we can't find
1089 get_constant_for_value_id (unsigned int v
)
1091 if (value_id_constant_p (v
))
1095 bitmap exprset
= value_expressions
[v
];
1097 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1099 pre_expr expr
= expression_for_id (i
);
1100 if (expr
->kind
== CONSTANT
)
1101 return PRE_EXPR_CONSTANT (expr
);
1107 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1108 Currently only supports constants and SSA_NAMES. */
1110 get_or_alloc_expr_for (tree t
)
1112 if (TREE_CODE (t
) == SSA_NAME
)
1113 return get_or_alloc_expr_for_name (t
);
1114 else if (is_gimple_min_invariant (t
))
1115 return get_or_alloc_expr_for_constant (t
);
1118 /* More complex expressions can result from SCCVN expression
1119 simplification that inserts values for them. As they all
1120 do not have VOPs the get handled by the nary ops struct. */
1121 vn_nary_op_t result
;
1122 unsigned int result_id
;
1123 vn_nary_op_lookup (t
, &result
);
1126 pre_expr e
= (pre_expr
) pool_alloc (pre_expr_pool
);
1128 PRE_EXPR_NARY (e
) = result
;
1129 result_id
= lookup_expression_id (e
);
1132 pool_free (pre_expr_pool
, e
);
1133 e
= expression_for_id (result_id
);
1136 alloc_expression_id (e
);
1143 /* Return the folded version of T if T, when folded, is a gimple
1144 min_invariant. Otherwise, return T. */
1147 fully_constant_expression (pre_expr e
)
1155 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1156 switch (TREE_CODE_CLASS (nary
->opcode
))
1159 case tcc_comparison
:
1161 /* We have to go from trees to pre exprs to value ids to
1163 tree naryop0
= nary
->op
[0];
1164 tree naryop1
= nary
->op
[1];
1166 if (!is_gimple_min_invariant (naryop0
))
1168 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1169 unsigned int vrep0
= get_expr_value_id (rep0
);
1170 tree const0
= get_constant_for_value_id (vrep0
);
1172 naryop0
= fold_convert (TREE_TYPE (naryop0
), const0
);
1174 if (!is_gimple_min_invariant (naryop1
))
1176 pre_expr rep1
= get_or_alloc_expr_for (naryop1
);
1177 unsigned int vrep1
= get_expr_value_id (rep1
);
1178 tree const1
= get_constant_for_value_id (vrep1
);
1180 naryop1
= fold_convert (TREE_TYPE (naryop1
), const1
);
1182 result
= fold_binary (nary
->opcode
, nary
->type
,
1184 if (result
&& is_gimple_min_invariant (result
))
1185 return get_or_alloc_expr_for_constant (result
);
1186 /* We might have simplified the expression to a
1187 SSA_NAME for example from x_1 * 1. But we cannot
1188 insert a PHI for x_1 unconditionally as x_1 might
1189 not be available readily. */
1193 if (nary
->opcode
!= REALPART_EXPR
1194 && nary
->opcode
!= IMAGPART_EXPR
1195 && nary
->opcode
!= VIEW_CONVERT_EXPR
)
1200 /* We have to go from trees to pre exprs to value ids to
1202 tree naryop0
= nary
->op
[0];
1203 tree const0
, result
;
1204 if (is_gimple_min_invariant (naryop0
))
1208 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1209 unsigned int vrep0
= get_expr_value_id (rep0
);
1210 const0
= get_constant_for_value_id (vrep0
);
1215 tree type1
= TREE_TYPE (nary
->op
[0]);
1216 const0
= fold_convert (type1
, const0
);
1217 result
= fold_unary (nary
->opcode
, nary
->type
, const0
);
1219 if (result
&& is_gimple_min_invariant (result
))
1220 return get_or_alloc_expr_for_constant (result
);
1229 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1231 if ((folded
= fully_constant_vn_reference_p (ref
)))
1232 return get_or_alloc_expr_for_constant (folded
);
1241 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1242 it has the value it would have in BLOCK. Set *SAME_VALID to true
1243 in case the new vuse doesn't change the value id of the OPERANDS. */
1246 translate_vuse_through_block (vec
<vn_reference_op_s
> operands
,
1247 alias_set_type set
, tree type
, tree vuse
,
1248 basic_block phiblock
,
1249 basic_block block
, bool *same_valid
)
1251 gimple phi
= SSA_NAME_DEF_STMT (vuse
);
1258 if (gimple_bb (phi
) != phiblock
)
1261 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, type
, operands
);
1263 /* Use the alias-oracle to find either the PHI node in this block,
1264 the first VUSE used in this block that is equivalent to vuse or
1265 the first VUSE which definition in this block kills the value. */
1266 if (gimple_code (phi
) == GIMPLE_PHI
)
1267 e
= find_edge (block
, phiblock
);
1268 else if (use_oracle
)
1269 while (!stmt_may_clobber_ref_p_1 (phi
, &ref
))
1271 vuse
= gimple_vuse (phi
);
1272 phi
= SSA_NAME_DEF_STMT (vuse
);
1273 if (gimple_bb (phi
) != phiblock
)
1275 if (gimple_code (phi
) == GIMPLE_PHI
)
1277 e
= find_edge (block
, phiblock
);
1288 bitmap visited
= NULL
;
1290 /* Try to find a vuse that dominates this phi node by skipping
1291 non-clobbering statements. */
1292 vuse
= get_continuation_for_phi (phi
, &ref
, &cnt
, &visited
, false);
1294 BITMAP_FREE (visited
);
1300 /* If we didn't find any, the value ID can't stay the same,
1301 but return the translated vuse. */
1302 *same_valid
= false;
1303 vuse
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1305 /* ??? We would like to return vuse here as this is the canonical
1306 upmost vdef that this reference is associated with. But during
1307 insertion of the references into the hash tables we only ever
1308 directly insert with their direct gimple_vuse, hence returning
1309 something else would make us not find the other expression. */
1310 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1316 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1317 SET2. This is used to avoid making a set consisting of the union
1318 of PA_IN and ANTIC_IN during insert. */
1320 static inline pre_expr
1321 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
)
1325 result
= bitmap_find_leader (set1
, val
);
1326 if (!result
&& set2
)
1327 result
= bitmap_find_leader (set2
, val
);
1331 /* Get the tree type for our PRE expression e. */
1334 get_expr_type (const pre_expr e
)
1339 return TREE_TYPE (PRE_EXPR_NAME (e
));
1341 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1343 return PRE_EXPR_REFERENCE (e
)->type
;
1345 return PRE_EXPR_NARY (e
)->type
;
1350 /* Get a representative SSA_NAME for a given expression.
1351 Since all of our sub-expressions are treated as values, we require
1352 them to be SSA_NAME's for simplicity.
1353 Prior versions of GVNPRE used to use "value handles" here, so that
1354 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1355 either case, the operands are really values (IE we do not expect
1356 them to be usable without finding leaders). */
1359 get_representative_for (const pre_expr e
)
1362 unsigned int value_id
= get_expr_value_id (e
);
1367 return PRE_EXPR_NAME (e
);
1369 return PRE_EXPR_CONSTANT (e
);
1373 /* Go through all of the expressions representing this value
1374 and pick out an SSA_NAME. */
1377 bitmap exprs
= value_expressions
[value_id
];
1378 EXECUTE_IF_SET_IN_BITMAP (exprs
, 0, i
, bi
)
1380 pre_expr rep
= expression_for_id (i
);
1381 if (rep
->kind
== NAME
)
1382 return PRE_EXPR_NAME (rep
);
1383 else if (rep
->kind
== CONSTANT
)
1384 return PRE_EXPR_CONSTANT (rep
);
1390 /* If we reached here we couldn't find an SSA_NAME. This can
1391 happen when we've discovered a value that has never appeared in
1392 the program as set to an SSA_NAME, as the result of phi translation.
1394 ??? We should be able to re-use this when we insert the statement
1396 name
= make_temp_ssa_name (get_expr_type (e
), gimple_build_nop (), "pretmp");
1397 VN_INFO_GET (name
)->value_id
= value_id
;
1398 VN_INFO (name
)->valnum
= name
;
1399 /* ??? For now mark this SSA name for release by SCCVN. */
1400 VN_INFO (name
)->needs_insertion
= true;
1401 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1402 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1404 fprintf (dump_file
, "Created SSA_NAME representative ");
1405 print_generic_expr (dump_file
, name
, 0);
1406 fprintf (dump_file
, " for expression:");
1407 print_pre_expr (dump_file
, e
);
1408 fprintf (dump_file
, " (%04d)\n", value_id
);
1417 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1418 basic_block pred
, basic_block phiblock
);
1420 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1421 the phis in PRED. Return NULL if we can't find a leader for each part
1422 of the translated expression. */
1425 phi_translate_1 (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1426 basic_block pred
, basic_block phiblock
)
1433 bool changed
= false;
1434 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1435 vn_nary_op_t newnary
= XALLOCAVAR (struct vn_nary_op_s
,
1436 sizeof_vn_nary_op (nary
->length
));
1437 memcpy (newnary
, nary
, sizeof_vn_nary_op (nary
->length
));
1439 for (i
= 0; i
< newnary
->length
; i
++)
1441 if (TREE_CODE (newnary
->op
[i
]) != SSA_NAME
)
1445 pre_expr leader
, result
;
1446 unsigned int op_val_id
= VN_INFO (newnary
->op
[i
])->value_id
;
1447 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1448 result
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1449 if (result
&& result
!= leader
)
1451 tree name
= get_representative_for (result
);
1454 newnary
->op
[i
] = name
;
1459 changed
|= newnary
->op
[i
] != nary
->op
[i
];
1465 unsigned int new_val_id
;
1467 tree result
= vn_nary_op_lookup_pieces (newnary
->length
,
1472 if (result
&& is_gimple_min_invariant (result
))
1473 return get_or_alloc_expr_for_constant (result
);
1475 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1480 PRE_EXPR_NARY (expr
) = nary
;
1481 constant
= fully_constant_expression (expr
);
1482 if (constant
!= expr
)
1485 new_val_id
= nary
->value_id
;
1486 get_or_alloc_expression_id (expr
);
1490 new_val_id
= get_next_value_id ();
1491 value_expressions
.safe_grow_cleared (get_max_value_id() + 1);
1492 nary
= vn_nary_op_insert_pieces (newnary
->length
,
1496 result
, new_val_id
);
1497 PRE_EXPR_NARY (expr
) = nary
;
1498 constant
= fully_constant_expression (expr
);
1499 if (constant
!= expr
)
1501 get_or_alloc_expression_id (expr
);
1503 add_to_value (new_val_id
, expr
);
1511 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1512 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1513 tree vuse
= ref
->vuse
;
1514 tree newvuse
= vuse
;
1515 vec
<vn_reference_op_s
> newoperands
= vNULL
;
1516 bool changed
= false, same_valid
= true;
1517 unsigned int i
, j
, n
;
1518 vn_reference_op_t operand
;
1519 vn_reference_t newref
;
1522 operands
.iterate (i
, &operand
); i
++, j
++)
1527 tree type
= operand
->type
;
1528 vn_reference_op_s newop
= *operand
;
1529 op
[0] = operand
->op0
;
1530 op
[1] = operand
->op1
;
1531 op
[2] = operand
->op2
;
1532 for (n
= 0; n
< 3; ++n
)
1534 unsigned int op_val_id
;
1537 if (TREE_CODE (op
[n
]) != SSA_NAME
)
1539 /* We can't possibly insert these. */
1541 && !is_gimple_min_invariant (op
[n
]))
1545 op_val_id
= VN_INFO (op
[n
])->value_id
;
1546 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1549 opresult
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1552 if (opresult
!= leader
)
1554 tree name
= get_representative_for (opresult
);
1557 changed
|= name
!= op
[n
];
1563 newoperands
.release ();
1566 if (!newoperands
.exists ())
1567 newoperands
= operands
.copy ();
1568 /* We may have changed from an SSA_NAME to a constant */
1569 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op
[0]) != SSA_NAME
)
1570 newop
.opcode
= TREE_CODE (op
[0]);
1575 /* If it transforms a non-constant ARRAY_REF into a constant
1576 one, adjust the constant offset. */
1577 if (newop
.opcode
== ARRAY_REF
1579 && TREE_CODE (op
[0]) == INTEGER_CST
1580 && TREE_CODE (op
[1]) == INTEGER_CST
1581 && TREE_CODE (op
[2]) == INTEGER_CST
)
1583 double_int off
= tree_to_double_int (op
[0]);
1584 off
+= -tree_to_double_int (op
[1]);
1585 off
*= tree_to_double_int (op
[2]);
1586 if (off
.fits_shwi ())
1587 newop
.off
= off
.low
;
1589 newoperands
[j
] = newop
;
1590 /* If it transforms from an SSA_NAME to an address, fold with
1591 a preceding indirect reference. */
1592 if (j
> 0 && op
[0] && TREE_CODE (op
[0]) == ADDR_EXPR
1593 && newoperands
[j
- 1].opcode
== MEM_REF
)
1594 vn_reference_fold_indirect (&newoperands
, &j
);
1596 if (i
!= operands
.length ())
1598 newoperands
.release ();
1604 newvuse
= translate_vuse_through_block (newoperands
,
1605 ref
->set
, ref
->type
,
1606 vuse
, phiblock
, pred
,
1608 if (newvuse
== NULL_TREE
)
1610 newoperands
.release ();
1615 if (changed
|| newvuse
!= vuse
)
1617 unsigned int new_val_id
;
1620 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1625 newoperands
.release ();
1627 /* We can always insert constants, so if we have a partial
1628 redundant constant load of another type try to translate it
1629 to a constant of appropriate type. */
1630 if (result
&& is_gimple_min_invariant (result
))
1633 if (!useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1635 tem
= fold_unary (VIEW_CONVERT_EXPR
, ref
->type
, result
);
1636 if (tem
&& !is_gimple_min_invariant (tem
))
1640 return get_or_alloc_expr_for_constant (tem
);
1643 /* If we'd have to convert things we would need to validate
1644 if we can insert the translated expression. So fail
1645 here for now - we cannot insert an alias with a different
1646 type in the VN tables either, as that would assert. */
1648 && !useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1650 else if (!result
&& newref
1651 && !useless_type_conversion_p (ref
->type
, newref
->type
))
1653 newoperands
.release ();
1657 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1658 expr
->kind
= REFERENCE
;
1663 PRE_EXPR_REFERENCE (expr
) = newref
;
1664 constant
= fully_constant_expression (expr
);
1665 if (constant
!= expr
)
1668 new_val_id
= newref
->value_id
;
1669 get_or_alloc_expression_id (expr
);
1673 if (changed
|| !same_valid
)
1675 new_val_id
= get_next_value_id ();
1676 value_expressions
.safe_grow_cleared(get_max_value_id() + 1);
1679 new_val_id
= ref
->value_id
;
1680 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1683 result
, new_val_id
);
1684 newoperands
.create (0);
1685 PRE_EXPR_REFERENCE (expr
) = newref
;
1686 constant
= fully_constant_expression (expr
);
1687 if (constant
!= expr
)
1689 get_or_alloc_expression_id (expr
);
1691 add_to_value (new_val_id
, expr
);
1693 newoperands
.release ();
1700 tree name
= PRE_EXPR_NAME (expr
);
1701 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
1702 /* If the SSA name is defined by a PHI node in this block,
1704 if (gimple_code (def_stmt
) == GIMPLE_PHI
1705 && gimple_bb (def_stmt
) == phiblock
)
1707 edge e
= find_edge (pred
, gimple_bb (def_stmt
));
1708 tree def
= PHI_ARG_DEF (def_stmt
, e
->dest_idx
);
1710 /* Handle constant. */
1711 if (is_gimple_min_invariant (def
))
1712 return get_or_alloc_expr_for_constant (def
);
1714 return get_or_alloc_expr_for_name (def
);
1716 /* Otherwise return it unchanged - it will get cleaned if its
1717 value is not available in PREDs AVAIL_OUT set of expressions. */
1726 /* Wrapper around phi_translate_1 providing caching functionality. */
1729 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1730 basic_block pred
, basic_block phiblock
)
1732 expr_pred_trans_t slot
= NULL
;
1738 /* Constants contain no values that need translation. */
1739 if (expr
->kind
== CONSTANT
)
1742 if (value_id_constant_p (get_expr_value_id (expr
)))
1745 /* Don't add translations of NAMEs as those are cheap to translate. */
1746 if (expr
->kind
!= NAME
)
1748 if (phi_trans_add (&slot
, expr
, pred
))
1750 /* Store NULL for the value we want to return in the case of
1756 phitrans
= phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
);
1765 /* For each expression in SET, translate the values through phi nodes
1766 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1767 expressions in DEST. */
1770 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1771 basic_block phiblock
)
1773 vec
<pre_expr
> exprs
;
1777 if (gimple_seq_empty_p (phi_nodes (phiblock
)))
1779 bitmap_set_copy (dest
, set
);
1783 exprs
= sorted_array_from_bitmap_set (set
);
1784 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
1786 pre_expr translated
;
1787 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1791 /* We might end up with multiple expressions from SET being
1792 translated to the same value. In this case we do not want
1793 to retain the NARY or REFERENCE expression but prefer a NAME
1794 which would be the leader. */
1795 if (translated
->kind
== NAME
)
1796 bitmap_value_replace_in_set (dest
, translated
);
1798 bitmap_value_insert_into_set (dest
, translated
);
1803 /* Find the leader for a value (i.e., the name representing that
1804 value) in a given set, and return it. Return NULL if no leader
1808 bitmap_find_leader (bitmap_set_t set
, unsigned int val
)
1810 if (value_id_constant_p (val
))
1814 bitmap exprset
= value_expressions
[val
];
1816 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1818 pre_expr expr
= expression_for_id (i
);
1819 if (expr
->kind
== CONSTANT
)
1823 if (bitmap_set_contains_value (set
, val
))
1825 /* Rather than walk the entire bitmap of expressions, and see
1826 whether any of them has the value we are looking for, we look
1827 at the reverse mapping, which tells us the set of expressions
1828 that have a given value (IE value->expressions with that
1829 value) and see if any of those expressions are in our set.
1830 The number of expressions per value is usually significantly
1831 less than the number of expressions in the set. In fact, for
1832 large testcases, doing it this way is roughly 5-10x faster
1833 than walking the bitmap.
1834 If this is somehow a significant lose for some cases, we can
1835 choose which set to walk based on which set is smaller. */
1838 bitmap exprset
= value_expressions
[val
];
1840 EXECUTE_IF_AND_IN_BITMAP (exprset
, &set
->expressions
, 0, i
, bi
)
1841 return expression_for_id (i
);
1846 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1847 BLOCK by seeing if it is not killed in the block. Note that we are
1848 only determining whether there is a store that kills it. Because
1849 of the order in which clean iterates over values, we are guaranteed
1850 that altered operands will have caused us to be eliminated from the
1851 ANTIC_IN set already. */
1854 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1856 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1857 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1859 gimple_stmt_iterator gsi
;
1860 unsigned id
= get_expression_id (expr
);
1867 /* Lookup a previously calculated result. */
1868 if (EXPR_DIES (block
)
1869 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1870 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1872 /* A memory expression {e, VUSE} dies in the block if there is a
1873 statement that may clobber e. If, starting statement walk from the
1874 top of the basic block, a statement uses VUSE there can be no kill
1875 inbetween that use and the original statement that loaded {e, VUSE},
1876 so we can stop walking. */
1877 ref
.base
= NULL_TREE
;
1878 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1880 tree def_vuse
, def_vdef
;
1881 def
= gsi_stmt (gsi
);
1882 def_vuse
= gimple_vuse (def
);
1883 def_vdef
= gimple_vdef (def
);
1885 /* Not a memory statement. */
1889 /* Not a may-def. */
1892 /* A load with the same VUSE, we're done. */
1893 if (def_vuse
== vuse
)
1899 /* Init ref only if we really need it. */
1900 if (ref
.base
== NULL_TREE
1901 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->type
,
1907 /* If the statement may clobber expr, it dies. */
1908 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1915 /* Remember the result. */
1916 if (!EXPR_DIES (block
))
1917 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1918 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1920 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1926 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1927 contains its value-id. */
1930 op_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree op
)
1932 if (op
&& TREE_CODE (op
) == SSA_NAME
)
1934 unsigned int value_id
= VN_INFO (op
)->value_id
;
1935 if (!(bitmap_set_contains_value (set1
, value_id
)
1936 || (set2
&& bitmap_set_contains_value (set2
, value_id
))))
1942 /* Determine if the expression EXPR is valid in SET1 U SET2.
1943 ONLY SET2 CAN BE NULL.
1944 This means that we have a leader for each part of the expression
1945 (if it consists of values), or the expression is an SSA_NAME.
1946 For loads/calls, we also see if the vuse is killed in this block. */
1949 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
,
1955 return bitmap_find_leader (AVAIL_OUT (block
),
1956 get_expr_value_id (expr
)) != NULL
;
1960 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1961 for (i
= 0; i
< nary
->length
; i
++)
1962 if (!op_valid_in_sets (set1
, set2
, nary
->op
[i
]))
1969 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1970 vn_reference_op_t vro
;
1973 FOR_EACH_VEC_ELT (ref
->operands
, i
, vro
)
1975 if (!op_valid_in_sets (set1
, set2
, vro
->op0
)
1976 || !op_valid_in_sets (set1
, set2
, vro
->op1
)
1977 || !op_valid_in_sets (set1
, set2
, vro
->op2
))
1987 /* Clean the set of expressions that are no longer valid in SET1 or
1988 SET2. This means expressions that are made up of values we have no
1989 leaders for in SET1 or SET2. This version is used for partial
1990 anticipation, which means it is not valid in either ANTIC_IN or
1994 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
, basic_block block
)
1996 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set1
);
2000 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2002 if (!valid_in_sets (set1
, set2
, expr
, block
))
2003 bitmap_remove_from_set (set1
, expr
);
2008 /* Clean the set of expressions that are no longer valid in SET. This
2009 means expressions that are made up of values we have no leaders for
2013 clean (bitmap_set_t set
, basic_block block
)
2015 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set
);
2019 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2021 if (!valid_in_sets (set
, NULL
, expr
, block
))
2022 bitmap_remove_from_set (set
, expr
);
2027 /* Clean the set of expressions that are no longer valid in SET because
2028 they are clobbered in BLOCK or because they trap and may not be executed. */
2031 prune_clobbered_mems (bitmap_set_t set
, basic_block block
)
2036 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
2038 pre_expr expr
= expression_for_id (i
);
2039 if (expr
->kind
== REFERENCE
)
2041 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2044 gimple def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
2045 if (!gimple_nop_p (def_stmt
)
2046 && ((gimple_bb (def_stmt
) != block
2047 && !dominated_by_p (CDI_DOMINATORS
,
2048 block
, gimple_bb (def_stmt
)))
2049 || (gimple_bb (def_stmt
) == block
2050 && value_dies_in_block_x (expr
, block
))))
2051 bitmap_remove_from_set (set
, expr
);
2054 else if (expr
->kind
== NARY
)
2056 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2057 /* If the NARY may trap make sure the block does not contain
2058 a possible exit point.
2059 ??? This is overly conservative if we translate AVAIL_OUT
2060 as the available expression might be after the exit point. */
2061 if (BB_MAY_NOTRETURN (block
)
2062 && vn_nary_may_trap (nary
))
2063 bitmap_remove_from_set (set
, expr
);
2068 static sbitmap has_abnormal_preds
;
2070 /* List of blocks that may have changed during ANTIC computation and
2071 thus need to be iterated over. */
2073 static sbitmap changed_blocks
;
2075 /* Decide whether to defer a block for a later iteration, or PHI
2076 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2077 should defer the block, and true if we processed it. */
2080 defer_or_phi_translate_block (bitmap_set_t dest
, bitmap_set_t source
,
2081 basic_block block
, basic_block phiblock
)
2083 if (!BB_VISITED (phiblock
))
2085 bitmap_set_bit (changed_blocks
, block
->index
);
2086 BB_VISITED (block
) = 0;
2087 BB_DEFERRED (block
) = 1;
2091 phi_translate_set (dest
, source
, block
, phiblock
);
2095 /* Compute the ANTIC set for BLOCK.
2097 If succs(BLOCK) > 1 then
2098 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2099 else if succs(BLOCK) == 1 then
2100 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2102 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2106 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2108 bool changed
= false;
2109 bitmap_set_t S
, old
, ANTIC_OUT
;
2115 old
= ANTIC_OUT
= S
= NULL
;
2116 BB_VISITED (block
) = 1;
2118 /* If any edges from predecessors are abnormal, antic_in is empty,
2120 if (block_has_abnormal_pred_edge
)
2121 goto maybe_dump_sets
;
2123 old
= ANTIC_IN (block
);
2124 ANTIC_OUT
= bitmap_set_new ();
2126 /* If the block has no successors, ANTIC_OUT is empty. */
2127 if (EDGE_COUNT (block
->succs
) == 0)
2129 /* If we have one successor, we could have some phi nodes to
2130 translate through. */
2131 else if (single_succ_p (block
))
2133 basic_block succ_bb
= single_succ (block
);
2135 /* We trade iterations of the dataflow equations for having to
2136 phi translate the maximal set, which is incredibly slow
2137 (since the maximal set often has 300+ members, even when you
2138 have a small number of blocks).
2139 Basically, we defer the computation of ANTIC for this block
2140 until we have processed it's successor, which will inevitably
2141 have a *much* smaller set of values to phi translate once
2142 clean has been run on it.
2143 The cost of doing this is that we technically perform more
2144 iterations, however, they are lower cost iterations.
2146 Timings for PRE on tramp3d-v4:
2147 without maximal set fix: 11 seconds
2148 with maximal set fix/without deferring: 26 seconds
2149 with maximal set fix/with deferring: 11 seconds
2152 if (!defer_or_phi_translate_block (ANTIC_OUT
, ANTIC_IN (succ_bb
),
2156 goto maybe_dump_sets
;
2159 /* If we have multiple successors, we take the intersection of all of
2160 them. Note that in the case of loop exit phi nodes, we may have
2161 phis to translate through. */
2164 vec
<basic_block
> worklist
;
2166 basic_block bprime
, first
= NULL
;
2168 worklist
.create (EDGE_COUNT (block
->succs
));
2169 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2172 && BB_VISITED (e
->dest
))
2174 else if (BB_VISITED (e
->dest
))
2175 worklist
.quick_push (e
->dest
);
2178 /* Of multiple successors we have to have visited one already. */
2181 bitmap_set_bit (changed_blocks
, block
->index
);
2182 BB_VISITED (block
) = 0;
2183 BB_DEFERRED (block
) = 1;
2185 worklist
.release ();
2186 goto maybe_dump_sets
;
2189 if (!gimple_seq_empty_p (phi_nodes (first
)))
2190 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
), block
, first
);
2192 bitmap_set_copy (ANTIC_OUT
, ANTIC_IN (first
));
2194 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2196 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2198 bitmap_set_t tmp
= bitmap_set_new ();
2199 phi_translate_set (tmp
, ANTIC_IN (bprime
), block
, bprime
);
2200 bitmap_set_and (ANTIC_OUT
, tmp
);
2201 bitmap_set_free (tmp
);
2204 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
2206 worklist
.release ();
2209 /* Prune expressions that are clobbered in block and thus become
2210 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2211 prune_clobbered_mems (ANTIC_OUT
, block
);
2213 /* Generate ANTIC_OUT - TMP_GEN. */
2214 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
2216 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2217 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
2220 /* Then union in the ANTIC_OUT - TMP_GEN values,
2221 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2222 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
2223 bitmap_value_insert_into_set (ANTIC_IN (block
),
2224 expression_for_id (bii
));
2226 clean (ANTIC_IN (block
), block
);
2228 if (!bitmap_set_equal (old
, ANTIC_IN (block
)))
2231 bitmap_set_bit (changed_blocks
, block
->index
);
2232 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2233 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2236 bitmap_clear_bit (changed_blocks
, block
->index
);
2239 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2241 if (!BB_DEFERRED (block
) || BB_VISITED (block
))
2244 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2246 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2250 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2255 "Block %d was deferred for a future iteration.\n",
2260 bitmap_set_free (old
);
2262 bitmap_set_free (S
);
2264 bitmap_set_free (ANTIC_OUT
);
2268 /* Compute PARTIAL_ANTIC for BLOCK.
2270 If succs(BLOCK) > 1 then
2271 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2272 in ANTIC_OUT for all succ(BLOCK)
2273 else if succs(BLOCK) == 1 then
2274 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2276 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2281 compute_partial_antic_aux (basic_block block
,
2282 bool block_has_abnormal_pred_edge
)
2284 bool changed
= false;
2285 bitmap_set_t old_PA_IN
;
2286 bitmap_set_t PA_OUT
;
2289 unsigned long max_pa
= PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH
);
2291 old_PA_IN
= PA_OUT
= NULL
;
2293 /* If any edges from predecessors are abnormal, antic_in is empty,
2295 if (block_has_abnormal_pred_edge
)
2296 goto maybe_dump_sets
;
2298 /* If there are too many partially anticipatable values in the
2299 block, phi_translate_set can take an exponential time: stop
2300 before the translation starts. */
2302 && single_succ_p (block
)
2303 && bitmap_count_bits (&PA_IN (single_succ (block
))->values
) > max_pa
)
2304 goto maybe_dump_sets
;
2306 old_PA_IN
= PA_IN (block
);
2307 PA_OUT
= bitmap_set_new ();
2309 /* If the block has no successors, ANTIC_OUT is empty. */
2310 if (EDGE_COUNT (block
->succs
) == 0)
2312 /* If we have one successor, we could have some phi nodes to
2313 translate through. Note that we can't phi translate across DFS
2314 back edges in partial antic, because it uses a union operation on
2315 the successors. For recurrences like IV's, we will end up
2316 generating a new value in the set on each go around (i + 3 (VH.1)
2317 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2318 else if (single_succ_p (block
))
2320 basic_block succ
= single_succ (block
);
2321 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
2322 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
2324 /* If we have multiple successors, we take the union of all of
2328 vec
<basic_block
> worklist
;
2332 worklist
.create (EDGE_COUNT (block
->succs
));
2333 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2335 if (e
->flags
& EDGE_DFS_BACK
)
2337 worklist
.quick_push (e
->dest
);
2339 if (worklist
.length () > 0)
2341 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2346 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
2347 bitmap_value_insert_into_set (PA_OUT
,
2348 expression_for_id (i
));
2349 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2351 bitmap_set_t pa_in
= bitmap_set_new ();
2352 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
2353 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2354 bitmap_value_insert_into_set (PA_OUT
,
2355 expression_for_id (i
));
2356 bitmap_set_free (pa_in
);
2359 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
2360 bitmap_value_insert_into_set (PA_OUT
,
2361 expression_for_id (i
));
2364 worklist
.release ();
2367 /* Prune expressions that are clobbered in block and thus become
2368 invalid if translated from PA_OUT to PA_IN. */
2369 prune_clobbered_mems (PA_OUT
, block
);
2371 /* PA_IN starts with PA_OUT - TMP_GEN.
2372 Then we subtract things from ANTIC_IN. */
2373 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
2375 /* For partial antic, we want to put back in the phi results, since
2376 we will properly avoid making them partially antic over backedges. */
2377 bitmap_ior_into (&PA_IN (block
)->values
, &PHI_GEN (block
)->values
);
2378 bitmap_ior_into (&PA_IN (block
)->expressions
, &PHI_GEN (block
)->expressions
);
2380 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2381 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2383 dependent_clean (PA_IN (block
), ANTIC_IN (block
), block
);
2385 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
2388 bitmap_set_bit (changed_blocks
, block
->index
);
2389 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2390 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2393 bitmap_clear_bit (changed_blocks
, block
->index
);
2396 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2399 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2401 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2404 bitmap_set_free (old_PA_IN
);
2406 bitmap_set_free (PA_OUT
);
2410 /* Compute ANTIC and partial ANTIC sets. */
2413 compute_antic (void)
2415 bool changed
= true;
2416 int num_iterations
= 0;
2420 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2421 We pre-build the map of blocks with incoming abnormal edges here. */
2422 has_abnormal_preds
= sbitmap_alloc (last_basic_block
);
2423 bitmap_clear (has_abnormal_preds
);
2430 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2432 e
->flags
&= ~EDGE_DFS_BACK
;
2433 if (e
->flags
& EDGE_ABNORMAL
)
2435 bitmap_set_bit (has_abnormal_preds
, block
->index
);
2440 BB_VISITED (block
) = 0;
2441 BB_DEFERRED (block
) = 0;
2443 /* While we are here, give empty ANTIC_IN sets to each block. */
2444 ANTIC_IN (block
) = bitmap_set_new ();
2445 PA_IN (block
) = bitmap_set_new ();
2448 /* At the exit block we anticipate nothing. */
2449 BB_VISITED (EXIT_BLOCK_PTR
) = 1;
2451 changed_blocks
= sbitmap_alloc (last_basic_block
+ 1);
2452 bitmap_ones (changed_blocks
);
2455 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2456 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2457 /* ??? We need to clear our PHI translation cache here as the
2458 ANTIC sets shrink and we restrict valid translations to
2459 those having operands with leaders in ANTIC. Same below
2460 for PA ANTIC computation. */
2463 for (i
= postorder_num
- 1; i
>= 0; i
--)
2465 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2467 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2468 changed
|= compute_antic_aux (block
,
2469 bitmap_bit_p (has_abnormal_preds
,
2473 /* Theoretically possible, but *highly* unlikely. */
2474 gcc_checking_assert (num_iterations
< 500);
2477 statistics_histogram_event (cfun
, "compute_antic iterations",
2480 if (do_partial_partial
)
2482 bitmap_ones (changed_blocks
);
2483 mark_dfs_back_edges ();
2488 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2489 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2492 for (i
= postorder_num
- 1 ; i
>= 0; i
--)
2494 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2496 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2498 |= compute_partial_antic_aux (block
,
2499 bitmap_bit_p (has_abnormal_preds
,
2503 /* Theoretically possible, but *highly* unlikely. */
2504 gcc_checking_assert (num_iterations
< 500);
2506 statistics_histogram_event (cfun
, "compute_partial_antic iterations",
2509 sbitmap_free (has_abnormal_preds
);
2510 sbitmap_free (changed_blocks
);
2514 /* Inserted expressions are placed onto this worklist, which is used
2515 for performing quick dead code elimination of insertions we made
2516 that didn't turn out to be necessary. */
2517 static bitmap inserted_exprs
;
2519 /* The actual worker for create_component_ref_by_pieces. */
2522 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2523 unsigned int *operand
, gimple_seq
*stmts
)
2525 vn_reference_op_t currop
= &ref
->operands
[*operand
];
2528 switch (currop
->opcode
)
2532 tree folded
, sc
= NULL_TREE
;
2533 unsigned int nargs
= 0;
2535 if (TREE_CODE (currop
->op0
) == FUNCTION_DECL
)
2538 fn
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2543 sc
= find_or_generate_expression (block
, currop
->op1
, stmts
);
2547 args
= XNEWVEC (tree
, ref
->operands
.length () - 1);
2548 while (*operand
< ref
->operands
.length ())
2550 args
[nargs
] = create_component_ref_by_pieces_1 (block
, ref
,
2556 folded
= build_call_array (currop
->type
,
2557 (TREE_CODE (fn
) == FUNCTION_DECL
2558 ? build_fold_addr_expr (fn
) : fn
),
2562 CALL_EXPR_STATIC_CHAIN (folded
) = sc
;
2568 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2572 tree offset
= currop
->op0
;
2573 if (TREE_CODE (baseop
) == ADDR_EXPR
2574 && handled_component_p (TREE_OPERAND (baseop
, 0)))
2578 base
= get_addr_base_and_unit_offset (TREE_OPERAND (baseop
, 0),
2581 offset
= int_const_binop (PLUS_EXPR
, offset
,
2582 build_int_cst (TREE_TYPE (offset
),
2584 baseop
= build_fold_addr_expr (base
);
2586 return fold_build2 (MEM_REF
, currop
->type
, baseop
, offset
);
2589 case TARGET_MEM_REF
:
2591 tree genop0
= NULL_TREE
, genop1
= NULL_TREE
;
2592 vn_reference_op_t nextop
= &ref
->operands
[++*operand
];
2593 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2599 genop0
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2605 genop1
= find_or_generate_expression (block
, nextop
->op0
, stmts
);
2609 return build5 (TARGET_MEM_REF
, currop
->type
,
2610 baseop
, currop
->op2
, genop0
, currop
->op1
, genop1
);
2616 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2622 case VIEW_CONVERT_EXPR
:
2624 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2628 return fold_build1 (currop
->opcode
, currop
->type
, genop0
);
2631 case WITH_SIZE_EXPR
:
2633 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2637 tree genop1
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2640 return fold_build2 (currop
->opcode
, currop
->type
, genop0
, genop1
);
2645 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2649 tree op1
= currop
->op0
;
2650 tree op2
= currop
->op1
;
2651 return fold_build3 (BIT_FIELD_REF
, currop
->type
, genop0
, op1
, op2
);
2654 /* For array ref vn_reference_op's, operand 1 of the array ref
2655 is op0 of the reference op and operand 3 of the array ref is
2657 case ARRAY_RANGE_REF
:
2661 tree genop1
= currop
->op0
;
2662 tree genop2
= currop
->op1
;
2663 tree genop3
= currop
->op2
;
2664 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2668 genop1
= find_or_generate_expression (block
, genop1
, stmts
);
2673 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (genop0
));
2674 /* Drop zero minimum index if redundant. */
2675 if (integer_zerop (genop2
)
2677 || integer_zerop (TYPE_MIN_VALUE (domain_type
))))
2681 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2688 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2689 /* We can't always put a size in units of the element alignment
2690 here as the element alignment may be not visible. See
2691 PR43783. Simply drop the element size for constant
2693 if (tree_int_cst_equal (genop3
, TYPE_SIZE_UNIT (elmt_type
)))
2697 genop3
= size_binop (EXACT_DIV_EXPR
, genop3
,
2698 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
2699 genop3
= find_or_generate_expression (block
, genop3
, stmts
);
2704 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2711 tree genop2
= currop
->op1
;
2712 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2715 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2719 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2723 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
, genop2
);
2728 genop
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2749 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2750 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2751 trying to rename aggregates into ssa form directly, which is a no no.
2753 Thus, this routine doesn't create temporaries, it just builds a
2754 single access expression for the array, calling
2755 find_or_generate_expression to build the innermost pieces.
2757 This function is a subroutine of create_expression_by_pieces, and
2758 should not be called on it's own unless you really know what you
2762 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2765 unsigned int op
= 0;
2766 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
);
2769 /* Find a simple leader for an expression, or generate one using
2770 create_expression_by_pieces from a NARY expression for the value.
2771 BLOCK is the basic_block we are looking for leaders in.
2772 OP is the tree expression to find a leader for or generate.
2773 Returns the leader or NULL_TREE on failure. */
2776 find_or_generate_expression (basic_block block
, tree op
, gimple_seq
*stmts
)
2778 pre_expr expr
= get_or_alloc_expr_for (op
);
2779 unsigned int lookfor
= get_expr_value_id (expr
);
2780 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
), lookfor
);
2783 if (leader
->kind
== NAME
)
2784 return PRE_EXPR_NAME (leader
);
2785 else if (leader
->kind
== CONSTANT
)
2786 return PRE_EXPR_CONSTANT (leader
);
2792 /* It must be a complex expression, so generate it recursively. Note
2793 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2794 where the insert algorithm fails to insert a required expression. */
2795 bitmap exprset
= value_expressions
[lookfor
];
2798 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
2800 pre_expr temp
= expression_for_id (i
);
2801 /* We cannot insert random REFERENCE expressions at arbitrary
2802 places. We can insert NARYs which eventually re-materializes
2803 its operand values. */
2804 if (temp
->kind
== NARY
)
2805 return create_expression_by_pieces (block
, temp
, stmts
,
2806 get_expr_type (expr
));
2813 #define NECESSARY GF_PLF_1
2815 /* Create an expression in pieces, so that we can handle very complex
2816 expressions that may be ANTIC, but not necessary GIMPLE.
2817 BLOCK is the basic block the expression will be inserted into,
2818 EXPR is the expression to insert (in value form)
2819 STMTS is a statement list to append the necessary insertions into.
2821 This function will die if we hit some value that shouldn't be
2822 ANTIC but is (IE there is no leader for it, or its components).
2823 The function returns NULL_TREE in case a different antic expression
2824 has to be inserted first.
2825 This function may also generate expressions that are themselves
2826 partially or fully redundant. Those that are will be either made
2827 fully redundant during the next iteration of insert (for partially
2828 redundant ones), or eliminated by eliminate (for fully redundant
2832 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2833 gimple_seq
*stmts
, tree type
)
2837 gimple_seq forced_stmts
= NULL
;
2838 unsigned int value_id
;
2839 gimple_stmt_iterator gsi
;
2840 tree exprtype
= type
? type
: get_expr_type (expr
);
2846 /* We may hit the NAME/CONSTANT case if we have to convert types
2847 that value numbering saw through. */
2849 folded
= PRE_EXPR_NAME (expr
);
2852 folded
= PRE_EXPR_CONSTANT (expr
);
2856 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2857 folded
= create_component_ref_by_pieces (block
, ref
, stmts
);
2864 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2865 tree
*genop
= XALLOCAVEC (tree
, nary
->length
);
2867 for (i
= 0; i
< nary
->length
; ++i
)
2869 genop
[i
] = find_or_generate_expression (block
, nary
->op
[i
], stmts
);
2872 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2873 may have conversions stripped. */
2874 if (nary
->opcode
== POINTER_PLUS_EXPR
)
2877 genop
[i
] = fold_convert (nary
->type
, genop
[i
]);
2879 genop
[i
] = convert_to_ptrofftype (genop
[i
]);
2882 genop
[i
] = fold_convert (TREE_TYPE (nary
->op
[i
]), genop
[i
]);
2884 if (nary
->opcode
== CONSTRUCTOR
)
2886 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
2887 for (i
= 0; i
< nary
->length
; ++i
)
2888 CONSTRUCTOR_APPEND_ELT (elts
, NULL_TREE
, genop
[i
]);
2889 folded
= build_constructor (nary
->type
, elts
);
2893 switch (nary
->length
)
2896 folded
= fold_build1 (nary
->opcode
, nary
->type
,
2900 folded
= fold_build2 (nary
->opcode
, nary
->type
,
2901 genop
[0], genop
[1]);
2904 folded
= fold_build3 (nary
->opcode
, nary
->type
,
2905 genop
[0], genop
[1], genop
[2]);
2917 if (!useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
2918 folded
= fold_convert (exprtype
, folded
);
2920 /* Force the generated expression to be a sequence of GIMPLE
2922 We have to call unshare_expr because force_gimple_operand may
2923 modify the tree we pass to it. */
2924 folded
= force_gimple_operand (unshare_expr (folded
), &forced_stmts
,
2927 /* If we have any intermediate expressions to the value sets, add them
2928 to the value sets and chain them in the instruction stream. */
2931 gsi
= gsi_start (forced_stmts
);
2932 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
2934 gimple stmt
= gsi_stmt (gsi
);
2935 tree forcedname
= gimple_get_lhs (stmt
);
2938 if (TREE_CODE (forcedname
) == SSA_NAME
)
2940 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (forcedname
));
2941 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
2942 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
2943 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
2944 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
2945 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2946 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2949 gimple_seq_add_seq (stmts
, forced_stmts
);
2952 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2953 newstmt
= gimple_build_assign (name
, folded
);
2954 gimple_set_plf (newstmt
, NECESSARY
, false);
2956 gimple_seq_add_stmt (stmts
, newstmt
);
2957 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (name
));
2959 /* Fold the last statement. */
2960 gsi
= gsi_last (*stmts
);
2961 if (fold_stmt_inplace (&gsi
))
2962 update_stmt (gsi_stmt (gsi
));
2964 /* Add a value number to the temporary.
2965 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2966 we are creating the expression by pieces, and this particular piece of
2967 the expression may have been represented. There is no harm in replacing
2969 value_id
= get_expr_value_id (expr
);
2970 VN_INFO_GET (name
)->value_id
= value_id
;
2971 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
2972 if (VN_INFO (name
)->valnum
== NULL_TREE
)
2973 VN_INFO (name
)->valnum
= name
;
2974 gcc_assert (VN_INFO (name
)->valnum
!= NULL_TREE
);
2975 nameexpr
= get_or_alloc_expr_for_name (name
);
2976 add_to_value (value_id
, nameexpr
);
2977 if (NEW_SETS (block
))
2978 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2979 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2981 pre_stats
.insertions
++;
2982 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2984 fprintf (dump_file
, "Inserted ");
2985 print_gimple_stmt (dump_file
, newstmt
, 0, 0);
2986 fprintf (dump_file
, " in predecessor %d (%04d)\n",
2987 block
->index
, value_id
);
2994 /* Returns true if we want to inhibit the insertions of PHI nodes
2995 for the given EXPR for basic block BB (a member of a loop).
2996 We want to do this, when we fear that the induction variable we
2997 create might inhibit vectorization. */
3000 inhibit_phi_insertion (basic_block bb
, pre_expr expr
)
3002 vn_reference_t vr
= PRE_EXPR_REFERENCE (expr
);
3003 vec
<vn_reference_op_s
> ops
= vr
->operands
;
3004 vn_reference_op_t op
;
3007 /* If we aren't going to vectorize we don't inhibit anything. */
3008 if (!flag_tree_loop_vectorize
)
3011 /* Otherwise we inhibit the insertion when the address of the
3012 memory reference is a simple induction variable. In other
3013 cases the vectorizer won't do anything anyway (either it's
3014 loop invariant or a complicated expression). */
3015 FOR_EACH_VEC_ELT (ops
, i
, op
)
3020 /* Calls are not a problem. */
3024 case ARRAY_RANGE_REF
:
3025 if (TREE_CODE (op
->op0
) != SSA_NAME
)
3030 basic_block defbb
= gimple_bb (SSA_NAME_DEF_STMT (op
->op0
));
3032 /* Default defs are loop invariant. */
3035 /* Defined outside this loop, also loop invariant. */
3036 if (!flow_bb_inside_loop_p (bb
->loop_father
, defbb
))
3038 /* If it's a simple induction variable inhibit insertion,
3039 the vectorizer might be interested in this one. */
3040 if (simple_iv (bb
->loop_father
, bb
->loop_father
,
3041 op
->op0
, &iv
, true))
3043 /* No simple IV, vectorizer can't do anything, hence no
3044 reason to inhibit the transformation for this operand. */
3054 /* Insert the to-be-made-available values of expression EXPRNUM for each
3055 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3056 merge the result with a phi node, given the same value number as
3057 NODE. Return true if we have inserted new stuff. */
3060 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
3061 vec
<pre_expr
> avail
)
3063 pre_expr expr
= expression_for_id (exprnum
);
3065 unsigned int val
= get_expr_value_id (expr
);
3067 bool insertions
= false;
3072 tree type
= get_expr_type (expr
);
3076 /* Make sure we aren't creating an induction variable. */
3077 if (bb_loop_depth (block
) > 0 && EDGE_COUNT (block
->preds
) == 2)
3079 bool firstinsideloop
= false;
3080 bool secondinsideloop
= false;
3081 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3082 EDGE_PRED (block
, 0)->src
);
3083 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3084 EDGE_PRED (block
, 1)->src
);
3085 /* Induction variables only have one edge inside the loop. */
3086 if ((firstinsideloop
^ secondinsideloop
)
3087 && (expr
->kind
!= REFERENCE
3088 || inhibit_phi_insertion (block
, expr
)))
3090 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3091 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3096 /* Make the necessary insertions. */
3097 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3099 gimple_seq stmts
= NULL
;
3102 eprime
= avail
[pred
->dest_idx
];
3104 if (eprime
->kind
!= NAME
&& eprime
->kind
!= CONSTANT
)
3106 builtexpr
= create_expression_by_pieces (bprime
, eprime
,
3108 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3109 gsi_insert_seq_on_edge (pred
, stmts
);
3112 /* We cannot insert a PHI node if we failed to insert
3117 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (builtexpr
);
3120 else if (eprime
->kind
== CONSTANT
)
3122 /* Constants may not have the right type, fold_convert
3123 should give us back a constant with the right type. */
3124 tree constant
= PRE_EXPR_CONSTANT (eprime
);
3125 if (!useless_type_conversion_p (type
, TREE_TYPE (constant
)))
3127 tree builtexpr
= fold_convert (type
, constant
);
3128 if (!is_gimple_min_invariant (builtexpr
))
3130 tree forcedexpr
= force_gimple_operand (builtexpr
,
3133 if (!is_gimple_min_invariant (forcedexpr
))
3135 if (forcedexpr
!= builtexpr
)
3137 VN_INFO_GET (forcedexpr
)->valnum
= PRE_EXPR_CONSTANT (eprime
);
3138 VN_INFO (forcedexpr
)->value_id
= get_expr_value_id (eprime
);
3142 gimple_stmt_iterator gsi
;
3143 gsi
= gsi_start (stmts
);
3144 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3146 gimple stmt
= gsi_stmt (gsi
);
3147 tree lhs
= gimple_get_lhs (stmt
);
3148 if (TREE_CODE (lhs
) == SSA_NAME
)
3149 bitmap_set_bit (inserted_exprs
,
3150 SSA_NAME_VERSION (lhs
));
3151 gimple_set_plf (stmt
, NECESSARY
, false);
3153 gsi_insert_seq_on_edge (pred
, stmts
);
3155 avail
[pred
->dest_idx
]
3156 = get_or_alloc_expr_for_name (forcedexpr
);
3160 avail
[pred
->dest_idx
]
3161 = get_or_alloc_expr_for_constant (builtexpr
);
3164 else if (eprime
->kind
== NAME
)
3166 /* We may have to do a conversion because our value
3167 numbering can look through types in certain cases, but
3168 our IL requires all operands of a phi node have the same
3170 tree name
= PRE_EXPR_NAME (eprime
);
3171 if (!useless_type_conversion_p (type
, TREE_TYPE (name
)))
3175 builtexpr
= fold_convert (type
, name
);
3176 forcedexpr
= force_gimple_operand (builtexpr
,
3180 if (forcedexpr
!= name
)
3182 VN_INFO_GET (forcedexpr
)->valnum
= VN_INFO (name
)->valnum
;
3183 VN_INFO (forcedexpr
)->value_id
= VN_INFO (name
)->value_id
;
3188 gimple_stmt_iterator gsi
;
3189 gsi
= gsi_start (stmts
);
3190 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3192 gimple stmt
= gsi_stmt (gsi
);
3193 tree lhs
= gimple_get_lhs (stmt
);
3194 if (TREE_CODE (lhs
) == SSA_NAME
)
3195 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
3196 gimple_set_plf (stmt
, NECESSARY
, false);
3198 gsi_insert_seq_on_edge (pred
, stmts
);
3200 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (forcedexpr
);
3204 /* If we didn't want a phi node, and we made insertions, we still have
3205 inserted new stuff, and thus return true. If we didn't want a phi node,
3206 and didn't make insertions, we haven't added anything new, so return
3208 if (nophi
&& insertions
)
3210 else if (nophi
&& !insertions
)
3213 /* Now build a phi for the new variable. */
3214 temp
= make_temp_ssa_name (type
, NULL
, "prephitmp");
3215 phi
= create_phi_node (temp
, block
);
3217 gimple_set_plf (phi
, NECESSARY
, false);
3218 VN_INFO_GET (temp
)->value_id
= val
;
3219 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3220 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3221 VN_INFO (temp
)->valnum
= temp
;
3222 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3223 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3225 pre_expr ae
= avail
[pred
->dest_idx
];
3226 gcc_assert (get_expr_type (ae
) == type
3227 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3228 if (ae
->kind
== CONSTANT
)
3229 add_phi_arg (phi
, unshare_expr (PRE_EXPR_CONSTANT (ae
)),
3230 pred
, UNKNOWN_LOCATION
);
3232 add_phi_arg (phi
, PRE_EXPR_NAME (ae
), pred
, UNKNOWN_LOCATION
);
3235 newphi
= get_or_alloc_expr_for_name (temp
);
3236 add_to_value (val
, newphi
);
3238 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3239 this insertion, since we test for the existence of this value in PHI_GEN
3240 before proceeding with the partial redundancy checks in insert_aux.
3242 The value may exist in AVAIL_OUT, in particular, it could be represented
3243 by the expression we are trying to eliminate, in which case we want the
3244 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3247 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3248 this block, because if it did, it would have existed in our dominator's
3249 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3252 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3253 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3255 bitmap_insert_into_set (NEW_SETS (block
),
3258 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3260 fprintf (dump_file
, "Created phi ");
3261 print_gimple_stmt (dump_file
, phi
, 0, 0);
3262 fprintf (dump_file
, " in block %d (%04d)\n", block
->index
, val
);
3270 /* Perform insertion of partially redundant values.
3271 For BLOCK, do the following:
3272 1. Propagate the NEW_SETS of the dominator into the current block.
3273 If the block has multiple predecessors,
3274 2a. Iterate over the ANTIC expressions for the block to see if
3275 any of them are partially redundant.
3276 2b. If so, insert them into the necessary predecessors to make
3277 the expression fully redundant.
3278 2c. Insert a new PHI merging the values of the predecessors.
3279 2d. Insert the new PHI, and the new expressions, into the
3281 3. Recursively call ourselves on the dominator children of BLOCK.
3283 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3284 do_regular_insertion and do_partial_insertion.
3289 do_regular_insertion (basic_block block
, basic_block dom
)
3291 bool new_stuff
= false;
3292 vec
<pre_expr
> exprs
;
3294 vec
<pre_expr
> avail
= vNULL
;
3297 exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3298 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3300 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3302 if (expr
->kind
== NARY
3303 || expr
->kind
== REFERENCE
)
3306 bool by_some
= false;
3307 bool cant_insert
= false;
3308 bool all_same
= true;
3309 pre_expr first_s
= NULL
;
3312 pre_expr eprime
= NULL
;
3314 pre_expr edoubleprime
= NULL
;
3315 bool do_insertion
= false;
3317 val
= get_expr_value_id (expr
);
3318 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3320 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3322 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3324 fprintf (dump_file
, "Found fully redundant value: ");
3325 print_pre_expr (dump_file
, expr
);
3326 fprintf (dump_file
, "\n");
3331 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3333 unsigned int vprime
;
3335 /* We should never run insertion for the exit block
3336 and so not come across fake pred edges. */
3337 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3339 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
3342 /* eprime will generally only be NULL if the
3343 value of the expression, translated
3344 through the PHI for this predecessor, is
3345 undefined. If that is the case, we can't
3346 make the expression fully redundant,
3347 because its value is undefined along a
3348 predecessor path. We can thus break out
3349 early because it doesn't matter what the
3350 rest of the results are. */
3353 avail
[pred
->dest_idx
] = NULL
;
3358 eprime
= fully_constant_expression (eprime
);
3359 vprime
= get_expr_value_id (eprime
);
3360 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3362 if (edoubleprime
== NULL
)
3364 avail
[pred
->dest_idx
] = eprime
;
3369 avail
[pred
->dest_idx
] = edoubleprime
;
3371 /* We want to perform insertions to remove a redundancy on
3372 a path in the CFG we want to optimize for speed. */
3373 if (optimize_edge_for_speed_p (pred
))
3374 do_insertion
= true;
3375 if (first_s
== NULL
)
3376 first_s
= edoubleprime
;
3377 else if (!pre_expr_d::equal (first_s
, edoubleprime
))
3381 /* If we can insert it, it's not the same value
3382 already existing along every predecessor, and
3383 it's defined by some predecessor, it is
3384 partially redundant. */
3385 if (!cant_insert
&& !all_same
&& by_some
)
3389 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3391 fprintf (dump_file
, "Skipping partial redundancy for "
3393 print_pre_expr (dump_file
, expr
);
3394 fprintf (dump_file
, " (%04d), no redundancy on to be "
3395 "optimized for speed edge\n", val
);
3398 else if (dbg_cnt (treepre_insert
))
3400 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3402 fprintf (dump_file
, "Found partial redundancy for "
3404 print_pre_expr (dump_file
, expr
);
3405 fprintf (dump_file
, " (%04d)\n",
3406 get_expr_value_id (expr
));
3408 if (insert_into_preds_of_block (block
,
3409 get_expression_id (expr
),
3414 /* If all edges produce the same value and that value is
3415 an invariant, then the PHI has the same value on all
3416 edges. Note this. */
3417 else if (!cant_insert
&& all_same
)
3419 gcc_assert (edoubleprime
->kind
== CONSTANT
3420 || edoubleprime
->kind
== NAME
);
3422 tree temp
= make_temp_ssa_name (get_expr_type (expr
),
3424 gimple assign
= gimple_build_assign (temp
,
3425 edoubleprime
->kind
== CONSTANT
? PRE_EXPR_CONSTANT (edoubleprime
) : PRE_EXPR_NAME (edoubleprime
));
3426 gimple_stmt_iterator gsi
= gsi_after_labels (block
);
3427 gsi_insert_before (&gsi
, assign
, GSI_NEW_STMT
);
3429 gimple_set_plf (assign
, NECESSARY
, false);
3430 VN_INFO_GET (temp
)->value_id
= val
;
3431 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3432 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3433 VN_INFO (temp
)->valnum
= temp
;
3434 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3435 pre_expr newe
= get_or_alloc_expr_for_name (temp
);
3436 add_to_value (val
, newe
);
3437 bitmap_value_replace_in_set (AVAIL_OUT (block
), newe
);
3438 bitmap_insert_into_set (NEW_SETS (block
), newe
);
3449 /* Perform insertion for partially anticipatable expressions. There
3450 is only one case we will perform insertion for these. This case is
3451 if the expression is partially anticipatable, and fully available.
3452 In this case, we know that putting it earlier will enable us to
3453 remove the later computation. */
3457 do_partial_partial_insertion (basic_block block
, basic_block dom
)
3459 bool new_stuff
= false;
3460 vec
<pre_expr
> exprs
;
3462 vec
<pre_expr
> avail
= vNULL
;
3465 exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3466 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3468 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3470 if (expr
->kind
== NARY
3471 || expr
->kind
== REFERENCE
)
3475 bool cant_insert
= false;
3478 pre_expr eprime
= NULL
;
3481 val
= get_expr_value_id (expr
);
3482 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3484 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3487 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3489 unsigned int vprime
;
3490 pre_expr edoubleprime
;
3492 /* We should never run insertion for the exit block
3493 and so not come across fake pred edges. */
3494 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3496 eprime
= phi_translate (expr
, ANTIC_IN (block
),
3500 /* eprime will generally only be NULL if the
3501 value of the expression, translated
3502 through the PHI for this predecessor, is
3503 undefined. If that is the case, we can't
3504 make the expression fully redundant,
3505 because its value is undefined along a
3506 predecessor path. We can thus break out
3507 early because it doesn't matter what the
3508 rest of the results are. */
3511 avail
[pred
->dest_idx
] = NULL
;
3516 eprime
= fully_constant_expression (eprime
);
3517 vprime
= get_expr_value_id (eprime
);
3518 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
), vprime
);
3519 avail
[pred
->dest_idx
] = edoubleprime
;
3520 if (edoubleprime
== NULL
)
3527 /* If we can insert it, it's not the same value
3528 already existing along every predecessor, and
3529 it's defined by some predecessor, it is
3530 partially redundant. */
3531 if (!cant_insert
&& by_all
)
3534 bool do_insertion
= false;
3536 /* Insert only if we can remove a later expression on a path
3537 that we want to optimize for speed.
3538 The phi node that we will be inserting in BLOCK is not free,
3539 and inserting it for the sake of !optimize_for_speed successor
3540 may cause regressions on the speed path. */
3541 FOR_EACH_EDGE (succ
, ei
, block
->succs
)
3543 if (bitmap_set_contains_value (PA_IN (succ
->dest
), val
)
3544 || bitmap_set_contains_value (ANTIC_IN (succ
->dest
), val
))
3546 if (optimize_edge_for_speed_p (succ
))
3547 do_insertion
= true;
3553 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3555 fprintf (dump_file
, "Skipping partial partial redundancy "
3557 print_pre_expr (dump_file
, expr
);
3558 fprintf (dump_file
, " (%04d), not (partially) anticipated "
3559 "on any to be optimized for speed edges\n", val
);
3562 else if (dbg_cnt (treepre_insert
))
3564 pre_stats
.pa_insert
++;
3565 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3567 fprintf (dump_file
, "Found partial partial redundancy "
3569 print_pre_expr (dump_file
, expr
);
3570 fprintf (dump_file
, " (%04d)\n",
3571 get_expr_value_id (expr
));
3573 if (insert_into_preds_of_block (block
,
3574 get_expression_id (expr
),
3588 insert_aux (basic_block block
)
3591 bool new_stuff
= false;
3596 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3601 bitmap_set_t newset
= NEW_SETS (dom
);
3604 /* Note that we need to value_replace both NEW_SETS, and
3605 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3606 represented by some non-simple expression here that we want
3607 to replace it with. */
3608 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3610 pre_expr expr
= expression_for_id (i
);
3611 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3612 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3615 if (!single_pred_p (block
))
3617 new_stuff
|= do_regular_insertion (block
, dom
);
3618 if (do_partial_partial
)
3619 new_stuff
|= do_partial_partial_insertion (block
, dom
);
3623 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3625 son
= next_dom_son (CDI_DOMINATORS
, son
))
3627 new_stuff
|= insert_aux (son
);
3633 /* Perform insertion of partially redundant values. */
3638 bool new_stuff
= true;
3640 int num_iterations
= 0;
3643 NEW_SETS (bb
) = bitmap_set_new ();
3648 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3649 fprintf (dump_file
, "Starting insert iteration %d\n", num_iterations
);
3650 new_stuff
= insert_aux (ENTRY_BLOCK_PTR
);
3652 /* Clear the NEW sets before the next iteration. We have already
3653 fully propagated its contents. */
3656 bitmap_set_free (NEW_SETS (bb
));
3658 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3662 /* Compute the AVAIL set for all basic blocks.
3664 This function performs value numbering of the statements in each basic
3665 block. The AVAIL sets are built from information we glean while doing
3666 this value numbering, since the AVAIL sets contain only one entry per
3669 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3670 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3673 compute_avail (void)
3676 basic_block block
, son
;
3677 basic_block
*worklist
;
3681 /* We pretend that default definitions are defined in the entry block.
3682 This includes function arguments and the static chain decl. */
3683 for (i
= 1; i
< num_ssa_names
; ++i
)
3685 tree name
= ssa_name (i
);
3688 || !SSA_NAME_IS_DEFAULT_DEF (name
)
3689 || has_zero_uses (name
)
3690 || virtual_operand_p (name
))
3693 e
= get_or_alloc_expr_for_name (name
);
3694 add_to_value (get_expr_value_id (e
), e
);
3695 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR
), e
);
3696 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR
), e
);
3699 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3701 print_bitmap_set (dump_file
, TMP_GEN (ENTRY_BLOCK_PTR
),
3702 "tmp_gen", ENTRY_BLOCK
);
3703 print_bitmap_set (dump_file
, AVAIL_OUT (ENTRY_BLOCK_PTR
),
3704 "avail_out", ENTRY_BLOCK
);
3707 /* Allocate the worklist. */
3708 worklist
= XNEWVEC (basic_block
, n_basic_blocks
);
3710 /* Seed the algorithm by putting the dominator children of the entry
3711 block on the worklist. */
3712 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR
);
3714 son
= next_dom_son (CDI_DOMINATORS
, son
))
3715 worklist
[sp
++] = son
;
3717 /* Loop until the worklist is empty. */
3720 gimple_stmt_iterator gsi
;
3724 /* Pick a block from the worklist. */
3725 block
= worklist
[--sp
];
3727 /* Initially, the set of available values in BLOCK is that of
3728 its immediate dominator. */
3729 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3731 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3733 /* Generate values for PHI nodes. */
3734 for (gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3736 tree result
= gimple_phi_result (gsi_stmt (gsi
));
3738 /* We have no need for virtual phis, as they don't represent
3739 actual computations. */
3740 if (virtual_operand_p (result
))
3743 pre_expr e
= get_or_alloc_expr_for_name (result
);
3744 add_to_value (get_expr_value_id (e
), e
);
3745 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3746 bitmap_insert_into_set (PHI_GEN (block
), e
);
3749 BB_MAY_NOTRETURN (block
) = 0;
3751 /* Now compute value numbers and populate value sets with all
3752 the expressions computed in BLOCK. */
3753 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3758 stmt
= gsi_stmt (gsi
);
3760 /* Cache whether the basic-block has any non-visible side-effect
3762 If this isn't a call or it is the last stmt in the
3763 basic-block then the CFG represents things correctly. */
3764 if (is_gimple_call (stmt
) && !stmt_ends_bb_p (stmt
))
3766 /* Non-looping const functions always return normally.
3767 Otherwise the call might not return or have side-effects
3768 that forbids hoisting possibly trapping expressions
3770 int flags
= gimple_call_flags (stmt
);
3771 if (!(flags
& ECF_CONST
)
3772 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3773 BB_MAY_NOTRETURN (block
) = 1;
3776 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3778 pre_expr e
= get_or_alloc_expr_for_name (op
);
3780 add_to_value (get_expr_value_id (e
), e
);
3781 bitmap_insert_into_set (TMP_GEN (block
), e
);
3782 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3785 if (gimple_has_side_effects (stmt
)
3786 || stmt_could_throw_p (stmt
)
3787 || is_gimple_debug (stmt
))
3790 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3792 if (ssa_undefined_value_p (op
))
3794 pre_expr e
= get_or_alloc_expr_for_name (op
);
3795 bitmap_value_insert_into_set (EXP_GEN (block
), e
);
3798 switch (gimple_code (stmt
))
3806 pre_expr result
= NULL
;
3807 vec
<vn_reference_op_s
> ops
= vNULL
;
3809 /* We can value number only calls to real functions. */
3810 if (gimple_call_internal_p (stmt
))
3813 copy_reference_ops_from_call (stmt
, &ops
);
3814 vn_reference_lookup_pieces (gimple_vuse (stmt
), 0,
3815 gimple_expr_type (stmt
),
3816 ops
, &ref
, VN_NOWALK
);
3821 /* If the value of the call is not invalidated in
3822 this block until it is computed, add the expression
3824 if (!gimple_vuse (stmt
)
3826 (SSA_NAME_DEF_STMT (gimple_vuse (stmt
))) == GIMPLE_PHI
3827 || gimple_bb (SSA_NAME_DEF_STMT
3828 (gimple_vuse (stmt
))) != block
)
3830 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3831 result
->kind
= REFERENCE
;
3833 PRE_EXPR_REFERENCE (result
) = ref
;
3835 get_or_alloc_expression_id (result
);
3836 add_to_value (get_expr_value_id (result
), result
);
3837 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3844 pre_expr result
= NULL
;
3845 switch (vn_get_stmt_kind (stmt
))
3849 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3852 /* COND_EXPR and VEC_COND_EXPR are awkward in
3853 that they contain an embedded complex expression.
3854 Don't even try to shove those through PRE. */
3855 if (code
== COND_EXPR
3856 || code
== VEC_COND_EXPR
)
3859 vn_nary_op_lookup_stmt (stmt
, &nary
);
3863 /* If the NARY traps and there was a preceding
3864 point in the block that might not return avoid
3865 adding the nary to EXP_GEN. */
3866 if (BB_MAY_NOTRETURN (block
)
3867 && vn_nary_may_trap (nary
))
3870 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3871 result
->kind
= NARY
;
3873 PRE_EXPR_NARY (result
) = nary
;
3880 vn_reference_lookup (gimple_assign_rhs1 (stmt
),
3886 /* If the value of the reference is not invalidated in
3887 this block until it is computed, add the expression
3889 if (gimple_vuse (stmt
))
3893 def_stmt
= SSA_NAME_DEF_STMT (gimple_vuse (stmt
));
3894 while (!gimple_nop_p (def_stmt
)
3895 && gimple_code (def_stmt
) != GIMPLE_PHI
3896 && gimple_bb (def_stmt
) == block
)
3898 if (stmt_may_clobber_ref_p
3899 (def_stmt
, gimple_assign_rhs1 (stmt
)))
3905 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt
));
3911 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3912 result
->kind
= REFERENCE
;
3914 PRE_EXPR_REFERENCE (result
) = ref
;
3922 get_or_alloc_expression_id (result
);
3923 add_to_value (get_expr_value_id (result
), result
);
3924 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3932 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3934 print_bitmap_set (dump_file
, EXP_GEN (block
),
3935 "exp_gen", block
->index
);
3936 print_bitmap_set (dump_file
, PHI_GEN (block
),
3937 "phi_gen", block
->index
);
3938 print_bitmap_set (dump_file
, TMP_GEN (block
),
3939 "tmp_gen", block
->index
);
3940 print_bitmap_set (dump_file
, AVAIL_OUT (block
),
3941 "avail_out", block
->index
);
3944 /* Put the dominator children of BLOCK on the worklist of blocks
3945 to compute available sets for. */
3946 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3948 son
= next_dom_son (CDI_DOMINATORS
, son
))
3949 worklist
[sp
++] = son
;
3956 /* Local state for the eliminate domwalk. */
3957 static vec
<gimple
> el_to_remove
;
3958 static vec
<gimple
> el_to_update
;
3959 static unsigned int el_todo
;
3960 static vec
<tree
> el_avail
;
3961 static vec
<tree
> el_avail_stack
;
3963 /* Return a leader for OP that is available at the current point of the
3964 eliminate domwalk. */
3967 eliminate_avail (tree op
)
3969 tree valnum
= VN_INFO (op
)->valnum
;
3970 if (TREE_CODE (valnum
) == SSA_NAME
)
3972 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
3974 if (el_avail
.length () > SSA_NAME_VERSION (valnum
))
3975 return el_avail
[SSA_NAME_VERSION (valnum
)];
3977 else if (is_gimple_min_invariant (valnum
))
3982 /* At the current point of the eliminate domwalk make OP available. */
3985 eliminate_push_avail (tree op
)
3987 tree valnum
= VN_INFO (op
)->valnum
;
3988 if (TREE_CODE (valnum
) == SSA_NAME
)
3990 if (el_avail
.length () <= SSA_NAME_VERSION (valnum
))
3991 el_avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
3992 el_avail
[SSA_NAME_VERSION (valnum
)] = op
;
3993 el_avail_stack
.safe_push (op
);
3997 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3998 the leader for the expression if insertion was successful. */
4001 eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
)
4003 tree expr
= vn_get_expr_for (val
);
4004 if (!CONVERT_EXPR_P (expr
)
4005 && TREE_CODE (expr
) != VIEW_CONVERT_EXPR
)
4008 tree op
= TREE_OPERAND (expr
, 0);
4009 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (op
) : op
;
4013 tree res
= make_temp_ssa_name (TREE_TYPE (val
), NULL
, "pretmp");
4014 gimple tem
= gimple_build_assign (res
,
4015 fold_build1 (TREE_CODE (expr
),
4016 TREE_TYPE (expr
), leader
));
4017 gsi_insert_before (gsi
, tem
, GSI_SAME_STMT
);
4018 VN_INFO_GET (res
)->valnum
= val
;
4020 if (TREE_CODE (leader
) == SSA_NAME
)
4021 gimple_set_plf (SSA_NAME_DEF_STMT (leader
), NECESSARY
, true);
4023 pre_stats
.insertions
++;
4024 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4026 fprintf (dump_file
, "Inserted ");
4027 print_gimple_stmt (dump_file
, tem
, 0, 0);
4033 class eliminate_dom_walker
: public dom_walker
4036 eliminate_dom_walker (cdi_direction direction
) : dom_walker (direction
) {}
4038 virtual void before_dom_children (basic_block
);
4039 virtual void after_dom_children (basic_block
);
4042 /* Perform elimination for the basic-block B during the domwalk. */
4045 eliminate_dom_walker::before_dom_children (basic_block b
)
4047 gimple_stmt_iterator gsi
;
4051 el_avail_stack
.safe_push (NULL_TREE
);
4053 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
4055 gimple stmt
, phi
= gsi_stmt (gsi
);
4056 tree sprime
= NULL_TREE
, res
= PHI_RESULT (phi
);
4057 gimple_stmt_iterator gsi2
;
4059 /* We want to perform redundant PHI elimination. Do so by
4060 replacing the PHI with a single copy if possible.
4061 Do not touch inserted, single-argument or virtual PHIs. */
4062 if (gimple_phi_num_args (phi
) == 1
4063 || virtual_operand_p (res
))
4069 sprime
= eliminate_avail (res
);
4073 eliminate_push_avail (res
);
4077 else if (is_gimple_min_invariant (sprime
))
4079 if (!useless_type_conversion_p (TREE_TYPE (res
),
4080 TREE_TYPE (sprime
)))
4081 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4084 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4086 fprintf (dump_file
, "Replaced redundant PHI node defining ");
4087 print_generic_expr (dump_file
, res
, 0);
4088 fprintf (dump_file
, " with ");
4089 print_generic_expr (dump_file
, sprime
, 0);
4090 fprintf (dump_file
, "\n");
4093 remove_phi_node (&gsi
, false);
4096 && !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
))
4097 && TREE_CODE (sprime
) == SSA_NAME
)
4098 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4100 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
4101 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4102 stmt
= gimple_build_assign (res
, sprime
);
4103 SSA_NAME_DEF_STMT (res
) = stmt
;
4104 gimple_set_plf (stmt
, NECESSARY
, gimple_plf (phi
, NECESSARY
));
4106 gsi2
= gsi_after_labels (b
);
4107 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
4108 /* Queue the copy for eventual removal. */
4109 el_to_remove
.safe_push (stmt
);
4110 /* If we inserted this PHI node ourself, it's not an elimination. */
4112 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
4115 pre_stats
.eliminations
++;
4118 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4120 tree lhs
= NULL_TREE
;
4121 tree rhs
= NULL_TREE
;
4123 stmt
= gsi_stmt (gsi
);
4125 if (gimple_has_lhs (stmt
))
4126 lhs
= gimple_get_lhs (stmt
);
4128 if (gimple_assign_single_p (stmt
))
4129 rhs
= gimple_assign_rhs1 (stmt
);
4131 /* Lookup the RHS of the expression, see if we have an
4132 available computation for it. If so, replace the RHS with
4133 the available computation. */
4134 if (gimple_has_lhs (stmt
)
4135 && TREE_CODE (lhs
) == SSA_NAME
4136 && !gimple_has_volatile_ops (stmt
))
4139 gimple orig_stmt
= stmt
;
4141 sprime
= eliminate_avail (lhs
);
4142 /* If there is no usable leader mark lhs as leader for its value. */
4144 eliminate_push_avail (lhs
);
4146 /* See PR43491. Do not replace a global register variable when
4147 it is a the RHS of an assignment. Do replace local register
4148 variables since gcc does not guarantee a local variable will
4149 be allocated in register.
4150 Do not perform copy propagation or undo constant propagation. */
4151 if (gimple_assign_single_p (stmt
)
4152 && (TREE_CODE (rhs
) == SSA_NAME
4153 || is_gimple_min_invariant (rhs
)
4154 || (TREE_CODE (rhs
) == VAR_DECL
4155 && is_global_var (rhs
)
4156 && DECL_HARD_REGISTER (rhs
))))
4161 /* If there is no existing usable leader but SCCVN thinks
4162 it has an expression it wants to use as replacement,
4164 tree val
= VN_INFO (lhs
)->valnum
;
4166 && TREE_CODE (val
) == SSA_NAME
4167 && VN_INFO (val
)->needs_insertion
4168 && VN_INFO (val
)->expr
!= NULL_TREE
4169 && (sprime
= eliminate_insert (&gsi
, val
)) != NULL_TREE
)
4170 eliminate_push_avail (sprime
);
4172 else if (is_gimple_min_invariant (sprime
))
4174 /* If there is no existing leader but SCCVN knows this
4175 value is constant, use that constant. */
4176 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4177 TREE_TYPE (sprime
)))
4178 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4180 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4182 fprintf (dump_file
, "Replaced ");
4183 print_gimple_expr (dump_file
, stmt
, 0, 0);
4184 fprintf (dump_file
, " with ");
4185 print_generic_expr (dump_file
, sprime
, 0);
4186 fprintf (dump_file
, " in ");
4187 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4189 pre_stats
.eliminations
++;
4190 propagate_tree_value_into_stmt (&gsi
, sprime
);
4191 stmt
= gsi_stmt (gsi
);
4194 /* If we removed EH side-effects from the statement, clean
4195 its EH information. */
4196 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4198 bitmap_set_bit (need_eh_cleanup
,
4199 gimple_bb (stmt
)->index
);
4200 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4201 fprintf (dump_file
, " Removed EH side-effects.\n");
4208 && (rhs
== NULL_TREE
4209 || TREE_CODE (rhs
) != SSA_NAME
4210 || may_propagate_copy (rhs
, sprime
)))
4212 bool can_make_abnormal_goto
4213 = is_gimple_call (stmt
)
4214 && stmt_can_make_abnormal_goto (stmt
);
4216 gcc_assert (sprime
!= rhs
);
4218 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4220 fprintf (dump_file
, "Replaced ");
4221 print_gimple_expr (dump_file
, stmt
, 0, 0);
4222 fprintf (dump_file
, " with ");
4223 print_generic_expr (dump_file
, sprime
, 0);
4224 fprintf (dump_file
, " in ");
4225 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4228 if (TREE_CODE (sprime
) == SSA_NAME
)
4229 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4231 /* We need to make sure the new and old types actually match,
4232 which may require adding a simple cast, which fold_convert
4234 if ((!rhs
|| TREE_CODE (rhs
) != SSA_NAME
)
4235 && !useless_type_conversion_p (gimple_expr_type (stmt
),
4236 TREE_TYPE (sprime
)))
4237 sprime
= fold_convert (gimple_expr_type (stmt
), sprime
);
4239 pre_stats
.eliminations
++;
4240 propagate_tree_value_into_stmt (&gsi
, sprime
);
4241 stmt
= gsi_stmt (gsi
);
4244 /* If we removed EH side-effects from the statement, clean
4245 its EH information. */
4246 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4248 bitmap_set_bit (need_eh_cleanup
,
4249 gimple_bb (stmt
)->index
);
4250 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4251 fprintf (dump_file
, " Removed EH side-effects.\n");
4254 /* Likewise for AB side-effects. */
4255 if (can_make_abnormal_goto
4256 && !stmt_can_make_abnormal_goto (stmt
))
4258 bitmap_set_bit (need_ab_cleanup
,
4259 gimple_bb (stmt
)->index
);
4260 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4261 fprintf (dump_file
, " Removed AB side-effects.\n");
4265 /* If the statement is a scalar store, see if the expression
4266 has the same value number as its rhs. If so, the store is
4268 else if (gimple_assign_single_p (stmt
)
4269 && !gimple_has_volatile_ops (stmt
)
4270 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4271 && (TREE_CODE (rhs
) == SSA_NAME
4272 || is_gimple_min_invariant (rhs
)))
4275 val
= vn_reference_lookup (gimple_assign_lhs (stmt
),
4276 gimple_vuse (stmt
), VN_WALK
, NULL
);
4277 if (TREE_CODE (rhs
) == SSA_NAME
)
4278 rhs
= VN_INFO (rhs
)->valnum
;
4280 && operand_equal_p (val
, rhs
, 0))
4282 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4284 fprintf (dump_file
, "Deleted redundant store ");
4285 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4288 /* Queue stmt for removal. */
4289 el_to_remove
.safe_push (stmt
);
4292 /* Visit COND_EXPRs and fold the comparison with the
4293 available value-numbers. */
4294 else if (gimple_code (stmt
) == GIMPLE_COND
)
4296 tree op0
= gimple_cond_lhs (stmt
);
4297 tree op1
= gimple_cond_rhs (stmt
);
4300 if (TREE_CODE (op0
) == SSA_NAME
)
4301 op0
= VN_INFO (op0
)->valnum
;
4302 if (TREE_CODE (op1
) == SSA_NAME
)
4303 op1
= VN_INFO (op1
)->valnum
;
4304 result
= fold_binary (gimple_cond_code (stmt
), boolean_type_node
,
4306 if (result
&& TREE_CODE (result
) == INTEGER_CST
)
4308 if (integer_zerop (result
))
4309 gimple_cond_make_false (stmt
);
4311 gimple_cond_make_true (stmt
);
4313 el_todo
= TODO_cleanup_cfg
;
4316 /* Visit indirect calls and turn them into direct calls if
4318 if (is_gimple_call (stmt
))
4320 tree orig_fn
= gimple_call_fn (stmt
);
4324 if (TREE_CODE (orig_fn
) == SSA_NAME
)
4325 fn
= VN_INFO (orig_fn
)->valnum
;
4326 else if (TREE_CODE (orig_fn
) == OBJ_TYPE_REF
4327 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn
)) == SSA_NAME
)
4329 fn
= VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn
))->valnum
;
4330 if (!gimple_call_addr_fndecl (fn
))
4332 fn
= ipa_intraprocedural_devirtualization (stmt
);
4334 fn
= build_fold_addr_expr (fn
);
4339 if (gimple_call_addr_fndecl (fn
) != NULL_TREE
4340 && useless_type_conversion_p (TREE_TYPE (orig_fn
),
4343 bool can_make_abnormal_goto
4344 = stmt_can_make_abnormal_goto (stmt
);
4345 bool was_noreturn
= gimple_call_noreturn_p (stmt
);
4347 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4349 fprintf (dump_file
, "Replacing call target with ");
4350 print_generic_expr (dump_file
, fn
, 0);
4351 fprintf (dump_file
, " in ");
4352 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4355 gimple_call_set_fn (stmt
, fn
);
4356 el_to_update
.safe_push (stmt
);
4358 /* When changing a call into a noreturn call, cfg cleanup
4359 is needed to fix up the noreturn call. */
4360 if (!was_noreturn
&& gimple_call_noreturn_p (stmt
))
4361 el_todo
|= TODO_cleanup_cfg
;
4363 /* If we removed EH side-effects from the statement, clean
4364 its EH information. */
4365 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
4367 bitmap_set_bit (need_eh_cleanup
,
4368 gimple_bb (stmt
)->index
);
4369 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4370 fprintf (dump_file
, " Removed EH side-effects.\n");
4373 /* Likewise for AB side-effects. */
4374 if (can_make_abnormal_goto
4375 && !stmt_can_make_abnormal_goto (stmt
))
4377 bitmap_set_bit (need_ab_cleanup
,
4378 gimple_bb (stmt
)->index
);
4379 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4380 fprintf (dump_file
, " Removed AB side-effects.\n");
4383 /* Changing an indirect call to a direct call may
4384 have exposed different semantics. This may
4385 require an SSA update. */
4386 el_todo
|= TODO_update_ssa_only_virtuals
;
4392 /* Make no longer available leaders no longer available. */
4395 eliminate_dom_walker::after_dom_children (basic_block
)
4398 while ((entry
= el_avail_stack
.pop ()) != NULL_TREE
)
4399 el_avail
[SSA_NAME_VERSION (VN_INFO (entry
)->valnum
)] = NULL_TREE
;
4402 /* Eliminate fully redundant computations. */
4407 gimple_stmt_iterator gsi
;
4411 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4412 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4414 el_to_remove
.create (0);
4415 el_to_update
.create (0);
4417 el_avail
.create (0);
4418 el_avail_stack
.create (0);
4420 eliminate_dom_walker (CDI_DOMINATORS
).walk (cfun
->cfg
->x_entry_block_ptr
);
4422 el_avail
.release ();
4423 el_avail_stack
.release ();
4425 /* We cannot remove stmts during BB walk, especially not release SSA
4426 names there as this confuses the VN machinery. The stmts ending
4427 up in el_to_remove are either stores or simple copies. */
4428 FOR_EACH_VEC_ELT (el_to_remove
, i
, stmt
)
4430 tree lhs
= gimple_assign_lhs (stmt
);
4431 tree rhs
= gimple_assign_rhs1 (stmt
);
4432 use_operand_p use_p
;
4435 /* If there is a single use only, propagate the equivalency
4436 instead of keeping the copy. */
4437 if (TREE_CODE (lhs
) == SSA_NAME
4438 && TREE_CODE (rhs
) == SSA_NAME
4439 && single_imm_use (lhs
, &use_p
, &use_stmt
)
4440 && may_propagate_copy (USE_FROM_PTR (use_p
), rhs
))
4442 SET_USE (use_p
, rhs
);
4443 update_stmt (use_stmt
);
4445 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (lhs
))
4446 && TREE_CODE (rhs
) == SSA_NAME
)
4447 gimple_set_plf (SSA_NAME_DEF_STMT (rhs
), NECESSARY
, true);
4450 /* If this is a store or a now unused copy, remove it. */
4451 if (TREE_CODE (lhs
) != SSA_NAME
4452 || has_zero_uses (lhs
))
4454 basic_block bb
= gimple_bb (stmt
);
4455 gsi
= gsi_for_stmt (stmt
);
4456 unlink_stmt_vdef (stmt
);
4457 if (gsi_remove (&gsi
, true))
4458 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
4460 && TREE_CODE (lhs
) == SSA_NAME
)
4461 bitmap_clear_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
4462 release_defs (stmt
);
4465 el_to_remove
.release ();
4467 /* We cannot update call statements with virtual operands during
4468 SSA walk. This might remove them which in turn makes our
4469 VN lattice invalid. */
4470 FOR_EACH_VEC_ELT (el_to_update
, i
, stmt
)
4472 el_to_update
.release ();
4477 /* Perform CFG cleanups made necessary by elimination. */
4480 fini_eliminate (void)
4482 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
4483 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
4486 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
4489 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
4491 BITMAP_FREE (need_eh_cleanup
);
4492 BITMAP_FREE (need_ab_cleanup
);
4494 if (do_eh_cleanup
|| do_ab_cleanup
)
4495 return TODO_cleanup_cfg
;
4499 /* Borrow a bit of tree-ssa-dce.c for the moment.
4500 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4501 this may be a bit faster, and we may want critical edges kept split. */
4503 /* If OP's defining statement has not already been determined to be necessary,
4504 mark that statement necessary. Return the stmt, if it is newly
4507 static inline gimple
4508 mark_operand_necessary (tree op
)
4514 if (TREE_CODE (op
) != SSA_NAME
)
4517 stmt
= SSA_NAME_DEF_STMT (op
);
4520 if (gimple_plf (stmt
, NECESSARY
)
4521 || gimple_nop_p (stmt
))
4524 gimple_set_plf (stmt
, NECESSARY
, true);
4528 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4529 to insert PHI nodes sometimes, and because value numbering of casts isn't
4530 perfect, we sometimes end up inserting dead code. This simple DCE-like
4531 pass removes any insertions we made that weren't actually used. */
4534 remove_dead_inserted_code (void)
4541 worklist
= BITMAP_ALLOC (NULL
);
4542 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4544 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4545 if (gimple_plf (t
, NECESSARY
))
4546 bitmap_set_bit (worklist
, i
);
4548 while (!bitmap_empty_p (worklist
))
4550 i
= bitmap_first_set_bit (worklist
);
4551 bitmap_clear_bit (worklist
, i
);
4552 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4554 /* PHI nodes are somewhat special in that each PHI alternative has
4555 data and control dependencies. All the statements feeding the
4556 PHI node's arguments are always necessary. */
4557 if (gimple_code (t
) == GIMPLE_PHI
)
4561 for (k
= 0; k
< gimple_phi_num_args (t
); k
++)
4563 tree arg
= PHI_ARG_DEF (t
, k
);
4564 if (TREE_CODE (arg
) == SSA_NAME
)
4566 gimple n
= mark_operand_necessary (arg
);
4568 bitmap_set_bit (worklist
, SSA_NAME_VERSION (arg
));
4574 /* Propagate through the operands. Examine all the USE, VUSE and
4575 VDEF operands in this statement. Mark all the statements
4576 which feed this statement's uses as necessary. */
4580 /* The operands of VDEF expressions are also needed as they
4581 represent potential definitions that may reach this
4582 statement (VDEF operands allow us to follow def-def
4585 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
4587 gimple n
= mark_operand_necessary (use
);
4589 bitmap_set_bit (worklist
, SSA_NAME_VERSION (use
));
4594 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4596 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4597 if (!gimple_plf (t
, NECESSARY
))
4599 gimple_stmt_iterator gsi
;
4601 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4603 fprintf (dump_file
, "Removing unnecessary insertion:");
4604 print_gimple_stmt (dump_file
, t
, 0, 0);
4607 gsi
= gsi_for_stmt (t
);
4608 if (gimple_code (t
) == GIMPLE_PHI
)
4609 remove_phi_node (&gsi
, true);
4612 gsi_remove (&gsi
, true);
4617 BITMAP_FREE (worklist
);
4621 /* Initialize data structures used by PRE. */
4628 next_expression_id
= 1;
4629 expressions
.create (0);
4630 expressions
.safe_push (NULL
);
4631 value_expressions
.create (get_max_value_id () + 1);
4632 value_expressions
.safe_grow_cleared (get_max_value_id() + 1);
4633 name_to_id
.create (0);
4635 inserted_exprs
= BITMAP_ALLOC (NULL
);
4637 connect_infinite_loops_to_exit ();
4638 memset (&pre_stats
, 0, sizeof (pre_stats
));
4640 postorder
= XNEWVEC (int, n_basic_blocks
);
4641 postorder_num
= inverted_post_order_compute (postorder
);
4643 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets
));
4645 calculate_dominance_info (CDI_POST_DOMINATORS
);
4646 calculate_dominance_info (CDI_DOMINATORS
);
4648 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4649 phi_translate_table
.create (5110);
4650 expression_to_id
.create (num_ssa_names
* 3);
4651 bitmap_set_pool
= create_alloc_pool ("Bitmap sets",
4652 sizeof (struct bitmap_set
), 30);
4653 pre_expr_pool
= create_alloc_pool ("pre_expr nodes",
4654 sizeof (struct pre_expr_d
), 30);
4657 EXP_GEN (bb
) = bitmap_set_new ();
4658 PHI_GEN (bb
) = bitmap_set_new ();
4659 TMP_GEN (bb
) = bitmap_set_new ();
4660 AVAIL_OUT (bb
) = bitmap_set_new ();
4665 /* Deallocate data structures used by PRE. */
4671 value_expressions
.release ();
4672 BITMAP_FREE (inserted_exprs
);
4673 bitmap_obstack_release (&grand_bitmap_obstack
);
4674 free_alloc_pool (bitmap_set_pool
);
4675 free_alloc_pool (pre_expr_pool
);
4676 phi_translate_table
.dispose ();
4677 expression_to_id
.dispose ();
4678 name_to_id
.release ();
4680 free_aux_for_blocks ();
4682 free_dominance_info (CDI_POST_DOMINATORS
);
4685 /* Gate and execute functions for PRE. */
4690 unsigned int todo
= 0;
4692 do_partial_partial
=
4693 flag_tree_partial_pre
&& optimize_function_for_speed_p (cfun
);
4695 /* This has to happen before SCCVN runs because
4696 loop_optimizer_init may create new phis, etc. */
4697 loop_optimizer_init (LOOPS_NORMAL
);
4699 if (!run_scc_vn (VN_WALK
))
4701 loop_optimizer_finalize ();
4708 /* Collect and value number expressions computed in each basic block. */
4711 /* Insert can get quite slow on an incredibly large number of basic
4712 blocks due to some quadratic behavior. Until this behavior is
4713 fixed, don't run it when he have an incredibly large number of
4714 bb's. If we aren't going to run insert, there is no point in
4715 computing ANTIC, either, even though it's plenty fast. */
4716 if (n_basic_blocks
< 4000)
4722 /* Make sure to remove fake edges before committing our inserts.
4723 This makes sure we don't end up with extra critical edges that
4724 we would need to split. */
4725 remove_fake_exit_edges ();
4726 gsi_commit_edge_inserts ();
4728 /* Remove all the redundant expressions. */
4729 todo
|= eliminate ();
4731 statistics_counter_event (cfun
, "Insertions", pre_stats
.insertions
);
4732 statistics_counter_event (cfun
, "PA inserted", pre_stats
.pa_insert
);
4733 statistics_counter_event (cfun
, "New PHIs", pre_stats
.phis
);
4734 statistics_counter_event (cfun
, "Eliminated", pre_stats
.eliminations
);
4736 clear_expression_ids ();
4737 remove_dead_inserted_code ();
4738 todo
|= TODO_verify_flow
;
4742 todo
|= fini_eliminate ();
4743 loop_optimizer_finalize ();
4745 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4746 case we can merge the block with the remaining predecessor of the block.
4748 - call merge_blocks after each tail merge iteration
4749 - call merge_blocks after all tail merge iterations
4750 - mark TODO_cleanup_cfg when necessary
4751 - share the cfg cleanup with fini_pre. */
4752 todo
|= tail_merge_optimize (todo
);
4756 /* Tail merging invalidates the virtual SSA web, together with
4757 cfg-cleanup opportunities exposed by PRE this will wreck the
4758 SSA updating machinery. So make sure to run update-ssa
4759 manually, before eventually scheduling cfg-cleanup as part of
4761 update_ssa (TODO_update_ssa_only_virtuals
);
4769 return flag_tree_pre
!= 0;
4774 const pass_data pass_data_pre
=
4776 GIMPLE_PASS
, /* type */
4778 OPTGROUP_NONE
, /* optinfo_flags */
4779 true, /* has_gate */
4780 true, /* has_execute */
4781 TV_TREE_PRE
, /* tv_id */
4782 ( PROP_no_crit_edges
| PROP_cfg
| PROP_ssa
), /* properties_required */
4783 0, /* properties_provided */
4784 0, /* properties_destroyed */
4785 TODO_rebuild_alias
, /* todo_flags_start */
4786 TODO_verify_ssa
, /* todo_flags_finish */
4789 class pass_pre
: public gimple_opt_pass
4792 pass_pre(gcc::context
*ctxt
)
4793 : gimple_opt_pass(pass_data_pre
, ctxt
)
4796 /* opt_pass methods: */
4797 bool gate () { return gate_pre (); }
4798 unsigned int execute () { return do_pre (); }
4800 }; // class pass_pre
4805 make_pass_pre (gcc::context
*ctxt
)
4807 return new pass_pre (ctxt
);
4811 /* Gate and execute functions for FRE. */
4816 unsigned int todo
= 0;
4818 if (!run_scc_vn (VN_WALKREWRITE
))
4821 memset (&pre_stats
, 0, sizeof (pre_stats
));
4823 /* Remove all the redundant expressions. */
4824 todo
|= eliminate ();
4826 todo
|= fini_eliminate ();
4830 statistics_counter_event (cfun
, "Insertions", pre_stats
.insertions
);
4831 statistics_counter_event (cfun
, "Eliminated", pre_stats
.eliminations
);
4839 return flag_tree_fre
!= 0;
4844 const pass_data pass_data_fre
=
4846 GIMPLE_PASS
, /* type */
4848 OPTGROUP_NONE
, /* optinfo_flags */
4849 true, /* has_gate */
4850 true, /* has_execute */
4851 TV_TREE_FRE
, /* tv_id */
4852 ( PROP_cfg
| PROP_ssa
), /* properties_required */
4853 0, /* properties_provided */
4854 0, /* properties_destroyed */
4855 0, /* todo_flags_start */
4856 TODO_verify_ssa
, /* todo_flags_finish */
4859 class pass_fre
: public gimple_opt_pass
4862 pass_fre(gcc::context
*ctxt
)
4863 : gimple_opt_pass(pass_data_fre
, ctxt
)
4866 /* opt_pass methods: */
4867 opt_pass
* clone () { return new pass_fre (ctxt_
); }
4868 bool gate () { return gate_fre (); }
4869 unsigned int execute () { return execute_fre (); }
4871 }; // class pass_fre
4876 make_pass_fre (gcc::context
*ctxt
)
4878 return new pass_fre (ctxt
);